mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-22 03:01:08 -05:00
Compare commits
10 Commits
fix/build
...
feat/mothe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
03908edcbb | ||
|
|
3112485c31 | ||
|
|
459c2930ae | ||
|
|
3338b25c30 | ||
|
|
4c3002f97d | ||
|
|
632e0e0762 | ||
|
|
7599774974 | ||
|
|
471e58a2d0 | ||
|
|
231ddc59a0 | ||
|
|
b197f68828 |
@@ -1,12 +1,22 @@
|
||||
import { db } from '@sim/db'
|
||||
import { settings } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
|
||||
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
||||
|
||||
/** Headers for server-to-server calls to the Go copilot backend. */
|
||||
function copilotHeaders(): Record<string, string> {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
if (env.COPILOT_API_KEY) {
|
||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||
}
|
||||
return headers
|
||||
}
|
||||
|
||||
/**
|
||||
* GET - Fetch user's auto-allowed integration tools
|
||||
*/
|
||||
@@ -20,24 +30,18 @@ export async function GET() {
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
const [userSettings] = await db
|
||||
.select()
|
||||
.from(settings)
|
||||
.where(eq(settings.userId, userId))
|
||||
.limit(1)
|
||||
const res = await fetch(
|
||||
`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed?userId=${encodeURIComponent(userId)}`,
|
||||
{ method: 'GET', headers: copilotHeaders() }
|
||||
)
|
||||
|
||||
if (userSettings) {
|
||||
const autoAllowedTools = (userSettings.copilotAutoAllowedTools as string[]) || []
|
||||
return NextResponse.json({ autoAllowedTools })
|
||||
if (!res.ok) {
|
||||
logger.warn('Go backend returned error for list auto-allowed', { status: res.status })
|
||||
return NextResponse.json({ autoAllowedTools: [] })
|
||||
}
|
||||
|
||||
await db.insert(settings).values({
|
||||
id: userId,
|
||||
userId,
|
||||
copilotAutoAllowedTools: [],
|
||||
})
|
||||
|
||||
return NextResponse.json({ autoAllowedTools: [] })
|
||||
const payload = await res.json()
|
||||
return NextResponse.json({ autoAllowedTools: payload?.autoAllowedTools || [] })
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch auto-allowed tools', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
@@ -62,38 +66,22 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'toolId must be a string' }, { status: 400 })
|
||||
}
|
||||
|
||||
const toolId = body.toolId
|
||||
|
||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||
|
||||
if (existing) {
|
||||
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||
|
||||
if (!currentTools.includes(toolId)) {
|
||||
const updatedTools = [...currentTools, toolId]
|
||||
await db
|
||||
.update(settings)
|
||||
.set({
|
||||
copilotAutoAllowedTools: updatedTools,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(settings.userId, userId))
|
||||
|
||||
logger.info('Added tool to auto-allowed list', { userId, toolId })
|
||||
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||
}
|
||||
|
||||
return NextResponse.json({ success: true, autoAllowedTools: currentTools })
|
||||
}
|
||||
|
||||
await db.insert(settings).values({
|
||||
id: userId,
|
||||
userId,
|
||||
copilotAutoAllowedTools: [toolId],
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
||||
method: 'POST',
|
||||
headers: copilotHeaders(),
|
||||
body: JSON.stringify({ userId, toolId: body.toolId }),
|
||||
})
|
||||
|
||||
logger.info('Created settings and added tool to auto-allowed list', { userId, toolId })
|
||||
return NextResponse.json({ success: true, autoAllowedTools: [toolId] })
|
||||
if (!res.ok) {
|
||||
logger.warn('Go backend returned error for add auto-allowed', { status: res.status })
|
||||
return NextResponse.json({ error: 'Failed to add tool' }, { status: 500 })
|
||||
}
|
||||
|
||||
const payload = await res.json()
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
autoAllowedTools: payload?.autoAllowedTools || [],
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to add auto-allowed tool', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
@@ -119,25 +107,21 @@ export async function DELETE(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'toolId query parameter is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||
const res = await fetch(
|
||||
`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed?userId=${encodeURIComponent(userId)}&toolId=${encodeURIComponent(toolId)}`,
|
||||
{ method: 'DELETE', headers: copilotHeaders() }
|
||||
)
|
||||
|
||||
if (existing) {
|
||||
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||
const updatedTools = currentTools.filter((t) => t !== toolId)
|
||||
|
||||
await db
|
||||
.update(settings)
|
||||
.set({
|
||||
copilotAutoAllowedTools: updatedTools,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(settings.userId, userId))
|
||||
|
||||
logger.info('Removed tool from auto-allowed list', { userId, toolId })
|
||||
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||
if (!res.ok) {
|
||||
logger.warn('Go backend returned error for remove auto-allowed', { status: res.status })
|
||||
return NextResponse.json({ error: 'Failed to remove tool' }, { status: 500 })
|
||||
}
|
||||
|
||||
return NextResponse.json({ success: true, autoAllowedTools: [] })
|
||||
const payload = await res.json()
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
autoAllowedTools: payload?.autoAllowedTools || [],
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to remove auto-allowed tool', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
|
||||
@@ -264,6 +264,7 @@ export async function POST(req: NextRequest) {
|
||||
{
|
||||
message,
|
||||
workflowId,
|
||||
workflowName: resolved.workflowName,
|
||||
userId: authenticatedUserId,
|
||||
userMessageId: userMessageIdToUse,
|
||||
mode,
|
||||
|
||||
130
apps/sim/app/api/copilot/workspace-chat/route.ts
Normal file
130
apps/sim/app/api/copilot/workspace-chat/route.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import { db } from '@sim/db'
|
||||
import { copilotChats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle'
|
||||
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||
// Workspace prompt is now generated by the Go copilot backend (detected via source: 'workspace-chat')
|
||||
|
||||
const logger = createLogger('WorkspaceChatAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
export const maxDuration = 300
|
||||
|
||||
const WorkspaceChatSchema = z.object({
|
||||
message: z.string().min(1, 'Message is required'),
|
||||
workspaceId: z.string().min(1, 'workspaceId is required'),
|
||||
chatId: z.string().optional(),
|
||||
model: z.string().optional().default('claude-opus-4-5'),
|
||||
})
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { message, workspaceId, chatId, model } = WorkspaceChatSchema.parse(body)
|
||||
|
||||
const chatResult = await resolveOrCreateChat({
|
||||
chatId,
|
||||
userId: session.user.id,
|
||||
workspaceId,
|
||||
model,
|
||||
})
|
||||
|
||||
const requestPayload: Record<string, unknown> = {
|
||||
message,
|
||||
userId: session.user.id,
|
||||
model,
|
||||
mode: 'agent',
|
||||
headless: true,
|
||||
messageId: crypto.randomUUID(),
|
||||
version: SIM_AGENT_VERSION,
|
||||
source: 'workspace-chat',
|
||||
stream: true,
|
||||
...(chatResult.chatId ? { chatId: chatResult.chatId } : {}),
|
||||
}
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
const pushEvent = (event: Record<string, unknown>) => {
|
||||
try {
|
||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify(event)}\n\n`))
|
||||
} catch {
|
||||
// Client disconnected
|
||||
}
|
||||
}
|
||||
|
||||
if (chatResult.chatId) {
|
||||
pushEvent({ type: 'chat_id', chatId: chatResult.chatId })
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await orchestrateCopilotStream(requestPayload, {
|
||||
userId: session.user.id,
|
||||
workspaceId,
|
||||
chatId: chatResult.chatId || undefined,
|
||||
autoExecuteTools: true,
|
||||
interactive: false,
|
||||
onEvent: async (event: SSEEvent) => {
|
||||
pushEvent(event as unknown as Record<string, unknown>)
|
||||
},
|
||||
})
|
||||
|
||||
if (chatResult.chatId && result.conversationId) {
|
||||
await db
|
||||
.update(copilotChats)
|
||||
.set({
|
||||
updatedAt: new Date(),
|
||||
conversationId: result.conversationId,
|
||||
})
|
||||
.where(eq(copilotChats.id, chatResult.chatId))
|
||||
}
|
||||
|
||||
pushEvent({
|
||||
type: 'done',
|
||||
success: result.success,
|
||||
content: result.content,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Workspace chat orchestration failed', { error })
|
||||
pushEvent({
|
||||
type: 'error',
|
||||
error: error instanceof Error ? error.message : 'Chat failed',
|
||||
})
|
||||
} finally {
|
||||
controller.close()
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
return new Response(stream, {
|
||||
headers: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
Connection: 'keep-alive',
|
||||
'X-Accel-Buffering': 'no',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error('Workspace chat error', { error })
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -8,7 +8,10 @@ import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
||||
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
loadWorkflowFromNormalizedTables,
|
||||
saveWorkflowToNormalizedTables,
|
||||
} from '@/lib/workflows/persistence/utils'
|
||||
import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validation'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
@@ -108,6 +111,49 @@ const WorkflowStateSchema = z.object({
|
||||
variables: z.any().optional(), // Workflow variables
|
||||
})
|
||||
|
||||
/**
|
||||
* GET /api/workflows/[id]/state
|
||||
* Fetch the current workflow state from normalized tables.
|
||||
* Used by the client after server-side edits (edit_workflow) to stay in sync.
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const { id: workflowId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId,
|
||||
userId: auth.userId,
|
||||
action: 'read',
|
||||
})
|
||||
if (!authorization.allowed) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
if (!normalized) {
|
||||
return NextResponse.json({ error: 'Workflow state not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
blocks: normalized.blocks,
|
||||
edges: normalized.edges,
|
||||
loops: normalized.loops || {},
|
||||
parallels: normalized.parallels || {},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to fetch workflow state', {
|
||||
workflowId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT /api/workflows/[id]/state
|
||||
* Save complete workflow state to normalized database tables
|
||||
|
||||
259
apps/sim/app/workspace/[workspaceId]/chat/chat.tsx
Normal file
259
apps/sim/app/workspace/[workspaceId]/chat/chat.tsx
Normal file
@@ -0,0 +1,259 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { Check, CircleAlert, Loader2, Send, Square, Zap } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import ReactMarkdown from 'react-markdown'
|
||||
import remarkGfm from 'remark-gfm'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { ContentBlock, ToolCallInfo, ToolCallStatus } from './hooks/use-workspace-chat'
|
||||
import { useWorkspaceChat } from './hooks/use-workspace-chat'
|
||||
|
||||
const REMARK_PLUGINS = [remarkGfm]
|
||||
|
||||
/** Status icon for a tool call. */
|
||||
function ToolStatusIcon({ status }: { status: ToolCallStatus }) {
|
||||
switch (status) {
|
||||
case 'executing':
|
||||
return <Loader2 className='h-3 w-3 animate-spin text-[var(--text-tertiary)]' />
|
||||
case 'success':
|
||||
return <Check className='h-3 w-3 text-emerald-500' />
|
||||
case 'error':
|
||||
return <CircleAlert className='h-3 w-3 text-red-400' />
|
||||
}
|
||||
}
|
||||
|
||||
/** Formats a tool name for display: "edit_workflow" → "Edit Workflow". */
|
||||
function formatToolName(name: string): string {
|
||||
return name
|
||||
.replace(/_v\d+$/, '')
|
||||
.split('_')
|
||||
.map((w) => w.charAt(0).toUpperCase() + w.slice(1))
|
||||
.join(' ')
|
||||
}
|
||||
|
||||
/** Compact inline rendering of a single tool call. */
|
||||
function ToolCallItem({ toolCall }: { toolCall: ToolCallInfo }) {
|
||||
const label = toolCall.displayTitle || formatToolName(toolCall.name)
|
||||
|
||||
return (
|
||||
<div className='flex items-center gap-2 rounded-md border border-[var(--border)] bg-[var(--surface-2)] px-3 py-1.5'>
|
||||
<Zap className='h-3 w-3 flex-shrink-0 text-[var(--text-tertiary)]' />
|
||||
<span className='min-w-0 flex-1 truncate text-xs text-[var(--text-secondary)]'>{label}</span>
|
||||
<ToolStatusIcon status={toolCall.status} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
/** Renders a subagent activity label. */
|
||||
function SubagentLabel({ label }: { label: string }) {
|
||||
return (
|
||||
<div className='flex items-center gap-2 py-0.5'>
|
||||
<Loader2 className='h-3 w-3 animate-spin text-[var(--text-tertiary)]' />
|
||||
<span className='text-xs text-[var(--text-tertiary)]'>{label}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
/** Renders structured content blocks for an assistant message. */
|
||||
function AssistantContent({ blocks, isStreaming }: { blocks: ContentBlock[]; isStreaming: boolean }) {
|
||||
return (
|
||||
<div className='space-y-2'>
|
||||
{blocks.map((block, i) => {
|
||||
switch (block.type) {
|
||||
case 'text': {
|
||||
if (!block.content?.trim()) return null
|
||||
return (
|
||||
<div key={`text-${i}`} className='prose-sm prose-invert max-w-none'>
|
||||
<ReactMarkdown remarkPlugins={REMARK_PLUGINS}>{block.content}</ReactMarkdown>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
case 'tool_call': {
|
||||
if (!block.toolCall) return null
|
||||
return <ToolCallItem key={block.toolCall.id} toolCall={block.toolCall} />
|
||||
}
|
||||
case 'subagent': {
|
||||
if (!block.content) return null
|
||||
// Only show the subagent label if it's the last subagent block and we're streaming
|
||||
const isLastSubagent =
|
||||
isStreaming &&
|
||||
blocks.slice(i + 1).every((b) => b.type !== 'subagent')
|
||||
if (!isLastSubagent) return null
|
||||
return <SubagentLabel key={`sub-${i}`} label={block.content} />
|
||||
}
|
||||
default:
|
||||
return null
|
||||
}
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export function Chat() {
|
||||
const { workspaceId } = useParams<{ workspaceId: string }>()
|
||||
const [inputValue, setInputValue] = useState('')
|
||||
const inputRef = useRef<HTMLTextAreaElement>(null)
|
||||
const messagesEndRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const { messages, isSending, error, sendMessage, abortMessage } = useWorkspaceChat({
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
const scrollToBottom = useCallback(() => {
|
||||
messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' })
|
||||
}, [])
|
||||
|
||||
const handleSubmit = useCallback(async () => {
|
||||
const trimmed = inputValue.trim()
|
||||
if (!trimmed || !workspaceId) return
|
||||
|
||||
setInputValue('')
|
||||
await sendMessage(trimmed)
|
||||
scrollToBottom()
|
||||
}, [inputValue, workspaceId, sendMessage, scrollToBottom])
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
if (e.key === 'Enter' && !e.shiftKey) {
|
||||
e.preventDefault()
|
||||
handleSubmit()
|
||||
}
|
||||
},
|
||||
[handleSubmit]
|
||||
)
|
||||
|
||||
return (
|
||||
<div className='flex h-full flex-col'>
|
||||
{/* Header */}
|
||||
<div className='flex flex-shrink-0 items-center border-b border-[var(--border)] px-6 py-3'>
|
||||
<h1 className='font-medium text-[16px] text-[var(--text-primary)]'>Mothership</h1>
|
||||
</div>
|
||||
|
||||
{/* Messages area */}
|
||||
<div className='flex-1 overflow-y-auto px-6 py-4'>
|
||||
{messages.length === 0 && !isSending ? (
|
||||
<div className='flex h-full items-center justify-center'>
|
||||
<div className='flex flex-col items-center gap-3 text-center'>
|
||||
<p className='text-[var(--text-secondary)] text-sm'>
|
||||
Ask anything about your workspace — build workflows, manage resources, get help.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div className='mx-auto max-w-3xl space-y-4'>
|
||||
{messages.map((msg) => {
|
||||
const isStreamingEmpty =
|
||||
isSending &&
|
||||
msg.role === 'assistant' &&
|
||||
!msg.content &&
|
||||
(!msg.contentBlocks || msg.contentBlocks.length === 0)
|
||||
if (isStreamingEmpty) {
|
||||
return (
|
||||
<div key={msg.id} className='flex justify-start'>
|
||||
<div className='flex items-center gap-2 rounded-lg bg-[var(--surface-3)] px-4 py-2 text-sm text-[var(--text-secondary)]'>
|
||||
<Loader2 className='h-3 w-3 animate-spin' />
|
||||
Thinking...
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Skip empty assistant messages
|
||||
if (
|
||||
msg.role === 'assistant' &&
|
||||
!msg.content &&
|
||||
(!msg.contentBlocks || msg.contentBlocks.length === 0)
|
||||
)
|
||||
return null
|
||||
|
||||
// User messages
|
||||
if (msg.role === 'user') {
|
||||
return (
|
||||
<div key={msg.id} className='flex justify-end'>
|
||||
<div className='max-w-[85%] rounded-lg bg-[var(--accent)] px-4 py-2 text-sm text-[var(--accent-foreground)]'>
|
||||
<p className='whitespace-pre-wrap'>{msg.content}</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Assistant messages with content blocks
|
||||
const hasBlocks = msg.contentBlocks && msg.contentBlocks.length > 0
|
||||
const isThisMessageStreaming = isSending && msg === messages[messages.length - 1]
|
||||
|
||||
return (
|
||||
<div key={msg.id} className='flex justify-start'>
|
||||
<div className='max-w-[85%] rounded-lg bg-[var(--surface-3)] px-4 py-2 text-sm text-[var(--text-primary)]'>
|
||||
{hasBlocks ? (
|
||||
<AssistantContent
|
||||
blocks={msg.contentBlocks!}
|
||||
isStreaming={isThisMessageStreaming}
|
||||
/>
|
||||
) : (
|
||||
<div className='prose-sm prose-invert max-w-none'>
|
||||
<ReactMarkdown remarkPlugins={REMARK_PLUGINS}>
|
||||
{msg.content}
|
||||
</ReactMarkdown>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
<div ref={messagesEndRef} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Error display */}
|
||||
{error && (
|
||||
<div className='px-6 pb-2'>
|
||||
<p className='text-xs text-red-500'>{error}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Input area */}
|
||||
<div className='flex-shrink-0 border-t border-[var(--border)] px-6 py-4'>
|
||||
<div className='mx-auto flex max-w-3xl items-end gap-2'>
|
||||
<textarea
|
||||
ref={inputRef}
|
||||
value={inputValue}
|
||||
onChange={(e) => setInputValue(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
placeholder='Send a message...'
|
||||
rows={1}
|
||||
className='flex-1 resize-none rounded-lg border border-[var(--border)] bg-[var(--surface-2)] px-4 py-2.5 text-sm text-[var(--text-primary)] placeholder:text-[var(--text-tertiary)] focus:border-[var(--accent)] focus:outline-none'
|
||||
style={{ maxHeight: '120px' }}
|
||||
onInput={(e) => {
|
||||
const target = e.target as HTMLTextAreaElement
|
||||
target.style.height = 'auto'
|
||||
target.style.height = `${Math.min(target.scrollHeight, 120)}px`
|
||||
}}
|
||||
/>
|
||||
{isSending ? (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={abortMessage}
|
||||
className='h-[38px] w-[38px] flex-shrink-0 p-0'
|
||||
>
|
||||
<Square className='h-4 w-4' />
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={handleSubmit}
|
||||
disabled={!inputValue.trim()}
|
||||
className='h-[38px] w-[38px] flex-shrink-0 p-0'
|
||||
>
|
||||
<Send className='h-4 w-4' />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,346 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
|
||||
const logger = createLogger('useWorkspaceChat')
|
||||
|
||||
/** Status of a tool call as it progresses through execution. */
|
||||
export type ToolCallStatus = 'executing' | 'success' | 'error'
|
||||
|
||||
/** Lightweight info about a single tool call rendered in the chat. */
|
||||
export interface ToolCallInfo {
|
||||
id: string
|
||||
name: string
|
||||
status: ToolCallStatus
|
||||
/** Human-readable title from the backend ToolUI metadata. */
|
||||
displayTitle?: string
|
||||
}
|
||||
|
||||
/** A content block inside an assistant message. */
|
||||
export type ContentBlockType = 'text' | 'tool_call' | 'subagent'
|
||||
|
||||
export interface ContentBlock {
|
||||
type: ContentBlockType
|
||||
/** Text content (for 'text' and 'subagent' blocks). */
|
||||
content?: string
|
||||
/** Tool call info (for 'tool_call' blocks). */
|
||||
toolCall?: ToolCallInfo
|
||||
}
|
||||
|
||||
export interface ChatMessage {
|
||||
id: string
|
||||
role: 'user' | 'assistant'
|
||||
content: string
|
||||
timestamp: string
|
||||
/** Structured content blocks for rich rendering. When present, prefer over `content`. */
|
||||
contentBlocks?: ContentBlock[]
|
||||
/** Name of the currently active subagent (shown as a label while streaming). */
|
||||
activeSubagent?: string | null
|
||||
}
|
||||
|
||||
interface UseWorkspaceChatProps {
|
||||
workspaceId: string
|
||||
}
|
||||
|
||||
interface UseWorkspaceChatReturn {
|
||||
messages: ChatMessage[]
|
||||
isSending: boolean
|
||||
error: string | null
|
||||
sendMessage: (message: string) => Promise<void>
|
||||
abortMessage: () => void
|
||||
clearMessages: () => void
|
||||
}
|
||||
|
||||
/** Maps subagent IDs to human-readable labels. */
|
||||
const SUBAGENT_LABELS: Record<string, string> = {
|
||||
build: 'Building',
|
||||
deploy: 'Deploying',
|
||||
auth: 'Connecting credentials',
|
||||
research: 'Researching',
|
||||
knowledge: 'Managing knowledge base',
|
||||
custom_tool: 'Creating tool',
|
||||
superagent: 'Executing action',
|
||||
plan: 'Planning',
|
||||
debug: 'Debugging',
|
||||
edit: 'Editing workflow',
|
||||
}
|
||||
|
||||
export function useWorkspaceChat({ workspaceId }: UseWorkspaceChatProps): UseWorkspaceChatReturn {
|
||||
const [messages, setMessages] = useState<ChatMessage[]>([])
|
||||
const [isSending, setIsSending] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
const abortControllerRef = useRef<AbortController | null>(null)
|
||||
const chatIdRef = useRef<string | undefined>(undefined)
|
||||
|
||||
const sendMessage = useCallback(
|
||||
async (message: string) => {
|
||||
if (!message.trim() || !workspaceId) return
|
||||
|
||||
setError(null)
|
||||
setIsSending(true)
|
||||
|
||||
const userMessage: ChatMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'user',
|
||||
content: message,
|
||||
timestamp: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const assistantMessage: ChatMessage = {
|
||||
id: crypto.randomUUID(),
|
||||
role: 'assistant',
|
||||
content: '',
|
||||
timestamp: new Date().toISOString(),
|
||||
contentBlocks: [],
|
||||
activeSubagent: null,
|
||||
}
|
||||
|
||||
setMessages((prev) => [...prev, userMessage, assistantMessage])
|
||||
|
||||
const abortController = new AbortController()
|
||||
abortControllerRef.current = abortController
|
||||
|
||||
// Mutable refs for the streaming context so we can build content blocks
|
||||
// without relying on stale React state closures.
|
||||
const blocksRef: ContentBlock[] = []
|
||||
const toolCallMapRef = new Map<string, number>() // toolCallId → index in blocksRef
|
||||
|
||||
/** Ensure the last block is a text block and return it. */
|
||||
const ensureTextBlock = (): ContentBlock => {
|
||||
const last = blocksRef[blocksRef.length - 1]
|
||||
if (last && last.type === 'text') return last
|
||||
const newBlock: ContentBlock = { type: 'text', content: '' }
|
||||
blocksRef.push(newBlock)
|
||||
return newBlock
|
||||
}
|
||||
|
||||
/** Push updated blocks + content into the assistant message. */
|
||||
const flushBlocks = (extra?: Partial<ChatMessage>) => {
|
||||
const fullText = blocksRef
|
||||
.filter((b) => b.type === 'text')
|
||||
.map((b) => b.content ?? '')
|
||||
.join('')
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === assistantMessage.id
|
||||
? {
|
||||
...msg,
|
||||
content: fullText,
|
||||
contentBlocks: [...blocksRef],
|
||||
...extra,
|
||||
}
|
||||
: msg
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/copilot/workspace-chat', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
message,
|
||||
workspaceId,
|
||||
...(chatIdRef.current ? { chatId: chatIdRef.current } : {}),
|
||||
}),
|
||||
signal: abortController.signal,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
throw new Error(errorData.error || `Request failed: ${response.status}`)
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
buffer += decoder.decode(value, { stream: true })
|
||||
|
||||
const lines = buffer.split('\n')
|
||||
buffer = lines.pop() || ''
|
||||
|
||||
for (const line of lines) {
|
||||
if (!line.startsWith('data: ')) continue
|
||||
|
||||
try {
|
||||
const event = JSON.parse(line.slice(6))
|
||||
|
||||
switch (event.type) {
|
||||
case 'chat_id': {
|
||||
if (event.chatId) {
|
||||
chatIdRef.current = event.chatId
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'content': {
|
||||
if (event.content || event.data) {
|
||||
const chunk =
|
||||
typeof event.data === 'string' ? event.data : event.content || ''
|
||||
if (chunk) {
|
||||
const textBlock = ensureTextBlock()
|
||||
textBlock.content = (textBlock.content ?? '') + chunk
|
||||
flushBlocks()
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'tool_generating':
|
||||
case 'tool_call': {
|
||||
const toolCallId = event.toolCallId
|
||||
const toolName = event.toolName || event.data?.name || 'unknown'
|
||||
if (!toolCallId) break
|
||||
|
||||
const ui = event.ui || event.data?.ui
|
||||
const displayTitle = ui?.title || ui?.phaseLabel
|
||||
|
||||
if (!toolCallMapRef.has(toolCallId)) {
|
||||
const toolBlock: ContentBlock = {
|
||||
type: 'tool_call',
|
||||
toolCall: {
|
||||
id: toolCallId,
|
||||
name: toolName,
|
||||
status: 'executing',
|
||||
displayTitle,
|
||||
},
|
||||
}
|
||||
toolCallMapRef.set(toolCallId, blocksRef.length)
|
||||
blocksRef.push(toolBlock)
|
||||
} else {
|
||||
const idx = toolCallMapRef.get(toolCallId)!
|
||||
const existing = blocksRef[idx]
|
||||
if (existing.toolCall) {
|
||||
existing.toolCall.name = toolName
|
||||
if (displayTitle) existing.toolCall.displayTitle = displayTitle
|
||||
}
|
||||
}
|
||||
flushBlocks()
|
||||
break
|
||||
}
|
||||
|
||||
case 'tool_result': {
|
||||
const toolCallId = event.toolCallId || event.data?.id
|
||||
if (!toolCallId) break
|
||||
const idx = toolCallMapRef.get(toolCallId)
|
||||
if (idx !== undefined) {
|
||||
const block = blocksRef[idx]
|
||||
if (block.toolCall) {
|
||||
block.toolCall.status = event.success ? 'success' : 'error'
|
||||
}
|
||||
flushBlocks()
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'tool_error': {
|
||||
const toolCallId = event.toolCallId || event.data?.id
|
||||
if (!toolCallId) break
|
||||
const idx = toolCallMapRef.get(toolCallId)
|
||||
if (idx !== undefined) {
|
||||
const block = blocksRef[idx]
|
||||
if (block.toolCall) {
|
||||
block.toolCall.status = 'error'
|
||||
}
|
||||
flushBlocks()
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'subagent_start': {
|
||||
const subagentName = event.subagent || event.data?.agent
|
||||
if (subagentName) {
|
||||
const label = SUBAGENT_LABELS[subagentName] || subagentName
|
||||
const subBlock: ContentBlock = {
|
||||
type: 'subagent',
|
||||
content: label,
|
||||
}
|
||||
blocksRef.push(subBlock)
|
||||
flushBlocks({ activeSubagent: label })
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'subagent_end': {
|
||||
flushBlocks({ activeSubagent: null })
|
||||
break
|
||||
}
|
||||
|
||||
case 'error': {
|
||||
setError(event.error || 'An error occurred')
|
||||
break
|
||||
}
|
||||
|
||||
case 'done': {
|
||||
if (event.content && typeof event.content === 'string') {
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === assistantMessage.id && !msg.content
|
||||
? { ...msg, content: event.content }
|
||||
: msg
|
||||
)
|
||||
)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Skip malformed SSE lines
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Error && err.name === 'AbortError') {
|
||||
logger.info('Message aborted by user')
|
||||
return
|
||||
}
|
||||
|
||||
const errorMessage = err instanceof Error ? err.message : 'Failed to send message'
|
||||
logger.error('Failed to send workspace chat message', { error: errorMessage })
|
||||
setError(errorMessage)
|
||||
|
||||
setMessages((prev) =>
|
||||
prev.map((msg) =>
|
||||
msg.id === assistantMessage.id && !msg.content
|
||||
? { ...msg, content: 'Sorry, something went wrong. Please try again.' }
|
||||
: msg
|
||||
)
|
||||
)
|
||||
} finally {
|
||||
setIsSending(false)
|
||||
abortControllerRef.current = null
|
||||
}
|
||||
},
|
||||
[workspaceId]
|
||||
)
|
||||
|
||||
const abortMessage = useCallback(() => {
|
||||
abortControllerRef.current?.abort()
|
||||
setIsSending(false)
|
||||
}, [])
|
||||
|
||||
const clearMessages = useCallback(() => {
|
||||
setMessages([])
|
||||
setError(null)
|
||||
chatIdRef.current = undefined
|
||||
}, [])
|
||||
|
||||
return {
|
||||
messages,
|
||||
isSending,
|
||||
error,
|
||||
sendMessage,
|
||||
abortMessage,
|
||||
clearMessages,
|
||||
}
|
||||
}
|
||||
7
apps/sim/app/workspace/[workspaceId]/chat/layout.tsx
Normal file
7
apps/sim/app/workspace/[workspaceId]/chat/layout.tsx
Normal file
@@ -0,0 +1,7 @@
|
||||
export default function ChatLayout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<div className='flex h-full flex-1 flex-col overflow-hidden pl-[var(--sidebar-width)]'>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
26
apps/sim/app/workspace/[workspaceId]/chat/page.tsx
Normal file
26
apps/sim/app/workspace/[workspaceId]/chat/page.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
import { redirect } from 'next/navigation'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
import { Chat } from './chat'
|
||||
|
||||
interface ChatPageProps {
|
||||
params: Promise<{
|
||||
workspaceId: string
|
||||
}>
|
||||
}
|
||||
|
||||
export default async function ChatPage({ params }: ChatPageProps) {
|
||||
const { workspaceId } = await params
|
||||
const session = await getSession()
|
||||
|
||||
if (!session?.user?.id) {
|
||||
redirect('/')
|
||||
}
|
||||
|
||||
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||
if (!hasPermission) {
|
||||
redirect('/')
|
||||
}
|
||||
|
||||
return <Chat />
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
|
||||
import { Database, HelpCircle, Layout, MessageSquare, Plus, Search, Settings } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { Button, Download, FolderPlus, Library, Loader, Tooltip } from '@/components/emcn'
|
||||
@@ -248,6 +248,12 @@ export const Sidebar = memo(function Sidebar() {
|
||||
const footerNavigationItems = useMemo(
|
||||
() =>
|
||||
[
|
||||
{
|
||||
id: 'chat',
|
||||
label: 'Mothership',
|
||||
icon: MessageSquare,
|
||||
href: `/workspace/${workspaceId}/chat`,
|
||||
},
|
||||
{
|
||||
id: 'logs',
|
||||
label: 'Logs',
|
||||
|
||||
@@ -15,14 +15,16 @@ export interface ChatLoadResult {
|
||||
/**
|
||||
* Resolve or create a copilot chat session.
|
||||
* If chatId is provided, loads the existing chat. Otherwise creates a new one.
|
||||
* Supports both workflow-scoped and workspace-scoped chats.
|
||||
*/
|
||||
export async function resolveOrCreateChat(params: {
|
||||
chatId?: string
|
||||
userId: string
|
||||
workflowId: string
|
||||
workflowId?: string
|
||||
workspaceId?: string
|
||||
model: string
|
||||
}): Promise<ChatLoadResult> {
|
||||
const { chatId, userId, workflowId, model } = params
|
||||
const { chatId, userId, workflowId, workspaceId, model } = params
|
||||
|
||||
if (chatId) {
|
||||
const [chat] = await db
|
||||
@@ -43,7 +45,8 @@ export async function resolveOrCreateChat(params: {
|
||||
.insert(copilotChats)
|
||||
.values({
|
||||
userId,
|
||||
workflowId,
|
||||
...(workflowId ? { workflowId } : {}),
|
||||
...(workspaceId ? { workspaceId } : {}),
|
||||
title: null,
|
||||
model,
|
||||
messages: [],
|
||||
@@ -51,7 +54,7 @@ export async function resolveOrCreateChat(params: {
|
||||
.returning()
|
||||
|
||||
if (!newChat) {
|
||||
logger.warn('Failed to create new copilot chat row', { userId, workflowId })
|
||||
logger.warn('Failed to create new copilot chat row', { userId, workflowId, workspaceId })
|
||||
return {
|
||||
chatId: '',
|
||||
chat: null,
|
||||
|
||||
@@ -10,6 +10,7 @@ const logger = createLogger('CopilotChatPayload')
|
||||
export interface BuildPayloadParams {
|
||||
message: string
|
||||
workflowId: string
|
||||
workflowName?: string
|
||||
userId: string
|
||||
userMessageId: string
|
||||
mode: string
|
||||
@@ -152,6 +153,7 @@ export async function buildCopilotRequestPayload(
|
||||
return {
|
||||
message,
|
||||
workflowId,
|
||||
...(params.workflowName ? { workflowName: params.workflowName } : {}),
|
||||
userId,
|
||||
model: selectedModel,
|
||||
...(provider ? { provider } : {}),
|
||||
|
||||
@@ -269,7 +269,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
updatedMap[toolCallId] = {
|
||||
...current,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params, current.serverUI),
|
||||
}
|
||||
set({ toolCallsById: updatedMap })
|
||||
|
||||
@@ -317,22 +317,45 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
const resultPayload = asRecord(
|
||||
data?.result || eventData.result || eventData.data || data?.data
|
||||
)
|
||||
const workflowState = asRecord(resultPayload?.workflowState)
|
||||
const hasWorkflowState = !!resultPayload?.workflowState
|
||||
logger.info('[SSE] edit_workflow result received', {
|
||||
hasWorkflowState,
|
||||
blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0,
|
||||
edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0,
|
||||
})
|
||||
if (hasWorkflowState) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
diffStore
|
||||
.setProposedChanges(resultPayload.workflowState as WorkflowState)
|
||||
.catch((err) => {
|
||||
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
const input = asRecord(current.params || current.input)
|
||||
const workflowId =
|
||||
(input?.workflowId as string) ||
|
||||
useWorkflowRegistry.getState().activeWorkflowId
|
||||
|
||||
if (!workflowId) {
|
||||
logger.warn('[SSE] edit_workflow result has no workflowId, skipping diff')
|
||||
} else {
|
||||
// Re-fetch the state the server just wrote to DB.
|
||||
// Never use the response's workflowState directly — that would
|
||||
// mean client and server independently track state, creating
|
||||
// race conditions when the build agent makes sequential calls.
|
||||
logger.info('[SSE] edit_workflow success, fetching state from DB', { workflowId })
|
||||
fetch(`/api/workflows/${workflowId}/state`)
|
||||
.then((res) => {
|
||||
if (!res.ok) throw new Error(`State fetch failed: ${res.status}`)
|
||||
return res.json()
|
||||
})
|
||||
.then((freshState) => {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
return diffStore.setProposedChanges(freshState as WorkflowState, undefined, {
|
||||
skipPersist: true,
|
||||
})
|
||||
})
|
||||
.catch((err) => {
|
||||
logger.error('[SSE] Failed to fetch/apply edit_workflow state', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
workflowId,
|
||||
})
|
||||
// Fallback: use the response's workflowState if DB fetch failed
|
||||
if (resultPayload?.workflowState) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
diffStore
|
||||
.setProposedChanges(resultPayload.workflowState as WorkflowState, undefined, {
|
||||
skipPersist: true,
|
||||
})
|
||||
.catch(() => {})
|
||||
}
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('[SSE] edit_workflow result handling failed', {
|
||||
@@ -469,7 +492,8 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
b.toolCall?.name,
|
||||
targetState,
|
||||
toolCallId,
|
||||
b.toolCall?.params
|
||||
b.toolCall?.params,
|
||||
b.toolCall?.serverUI
|
||||
),
|
||||
},
|
||||
}
|
||||
@@ -507,7 +531,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
updatedMap[toolCallId] = {
|
||||
...current,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||
display: resolveToolDisplay(current.name, targetState, current.id, current.params, current.serverUI),
|
||||
}
|
||||
set({ toolCallsById: updatedMap })
|
||||
}
|
||||
@@ -532,7 +556,8 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
b.toolCall?.name,
|
||||
targetState,
|
||||
toolCallId,
|
||||
b.toolCall?.params
|
||||
b.toolCall?.params,
|
||||
b.toolCall?.serverUI
|
||||
),
|
||||
},
|
||||
}
|
||||
@@ -579,6 +604,16 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
const isPartial = toolData.partial === true
|
||||
const { toolCallsById } = get()
|
||||
|
||||
// Extract copilot-provided UI metadata for fallback display
|
||||
const rawUI = (toolData.ui || data?.ui) as Record<string, unknown> | undefined
|
||||
const serverUI = rawUI
|
||||
? {
|
||||
title: rawUI.title as string | undefined,
|
||||
phaseLabel: rawUI.phaseLabel as string | undefined,
|
||||
icon: rawUI.icon as string | undefined,
|
||||
}
|
||||
: undefined
|
||||
|
||||
const existing = toolCallsById[id]
|
||||
const toolName = name || existing?.name || 'unknown_tool'
|
||||
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||
@@ -592,20 +627,24 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
||||
initialState = ClientToolCallState.executing
|
||||
}
|
||||
|
||||
const effectiveServerUI = serverUI || existing?.serverUI
|
||||
|
||||
const next: CopilotToolCall = existing
|
||||
? {
|
||||
...existing,
|
||||
name: toolName,
|
||||
state: initialState,
|
||||
...(args ? { params: args } : {}),
|
||||
display: resolveToolDisplay(toolName, initialState, id, args || existing.params),
|
||||
...(effectiveServerUI ? { serverUI: effectiveServerUI } : {}),
|
||||
display: resolveToolDisplay(toolName, initialState, id, args || existing.params, effectiveServerUI),
|
||||
}
|
||||
: {
|
||||
id,
|
||||
name: toolName,
|
||||
state: initialState,
|
||||
...(args ? { params: args } : {}),
|
||||
display: resolveToolDisplay(toolName, initialState, id, args),
|
||||
...(serverUI ? { serverUI } : {}),
|
||||
display: resolveToolDisplay(toolName, initialState, id, args, serverUI),
|
||||
}
|
||||
const updated = { ...toolCallsById, [id]: next }
|
||||
set({ toolCallsById: updated })
|
||||
|
||||
@@ -178,7 +178,7 @@ function setToolState(toolCallId: string, state: ClientToolCallState): void {
|
||||
[toolCallId]: {
|
||||
...current,
|
||||
state,
|
||||
display: resolveToolDisplay(current.name, state, toolCallId, current.params),
|
||||
display: resolveToolDisplay(current.name, state, toolCallId, current.params, current.serverUI),
|
||||
},
|
||||
}
|
||||
useCopilotStore.setState({ toolCallsById: updated })
|
||||
|
||||
@@ -285,7 +285,7 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
const updatedSubAgentToolCall = {
|
||||
...existing,
|
||||
state: targetState,
|
||||
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params),
|
||||
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params, existing.serverUI),
|
||||
}
|
||||
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ export function clearStreamingFlags(toolCall: CopilotToolCall): void {
|
||||
? ClientToolCallState.success
|
||||
: ClientToolCallState.aborted
|
||||
toolCall.state = normalized
|
||||
toolCall.display = resolveToolDisplay(toolCall.name, normalized, toolCall.id, toolCall.params)
|
||||
toolCall.display = resolveToolDisplay(toolCall.name, normalized, toolCall.id, toolCall.params, toolCall.serverUI)
|
||||
}
|
||||
|
||||
if (Array.isArray(toolCall.subAgentBlocks)) {
|
||||
|
||||
@@ -10,6 +10,9 @@ export const INTERRUPT_TOOL_NAMES = [
|
||||
'deploy_chat',
|
||||
'deploy_api',
|
||||
'create_workspace_mcp_server',
|
||||
'update_workspace_mcp_server',
|
||||
'delete_workspace_mcp_server',
|
||||
'delete_workflow',
|
||||
'set_environment_variables',
|
||||
'make_api_request',
|
||||
'oauth_request_access',
|
||||
|
||||
@@ -1,15 +1,21 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { prepareExecutionContext } from '@/lib/copilot/orchestrator/tool-executor'
|
||||
import type { OrchestratorOptions, OrchestratorResult } from '@/lib/copilot/orchestrator/types'
|
||||
import type {
|
||||
ExecutionContext,
|
||||
OrchestratorOptions,
|
||||
OrchestratorResult,
|
||||
} from '@/lib/copilot/orchestrator/types'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||
import { buildToolCallSummaries, createStreamingContext, runStreamLoop } from './stream-core'
|
||||
|
||||
const logger = createLogger('CopilotOrchestrator')
|
||||
|
||||
export interface OrchestrateStreamOptions extends OrchestratorOptions {
|
||||
userId: string
|
||||
workflowId: string
|
||||
workflowId?: string
|
||||
workspaceId?: string
|
||||
chatId?: string
|
||||
}
|
||||
|
||||
@@ -17,8 +23,20 @@ export async function orchestrateCopilotStream(
|
||||
requestPayload: Record<string, unknown>,
|
||||
options: OrchestrateStreamOptions
|
||||
): Promise<OrchestratorResult> {
|
||||
const { userId, workflowId, chatId } = options
|
||||
const execContext = await prepareExecutionContext(userId, workflowId)
|
||||
const { userId, workflowId, workspaceId, chatId } = options
|
||||
|
||||
let execContext: ExecutionContext
|
||||
if (workflowId) {
|
||||
execContext = await prepareExecutionContext(userId, workflowId)
|
||||
} else {
|
||||
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
||||
execContext = {
|
||||
userId,
|
||||
workflowId: '',
|
||||
workspaceId,
|
||||
decryptedEnvVars,
|
||||
}
|
||||
}
|
||||
|
||||
const payloadMsgId = requestPayload?.messageId
|
||||
const context = createStreamingContext({
|
||||
|
||||
@@ -62,7 +62,7 @@ describe('sse-handlers tool lifecycle', () => {
|
||||
await sseHandlers.tool_call(
|
||||
{
|
||||
type: 'tool_call',
|
||||
data: { id: 'tool-1', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||
data: { id: 'tool-1', name: 'read', arguments: { workflowId: 'workflow-1' } },
|
||||
} as any,
|
||||
context,
|
||||
execContext,
|
||||
@@ -90,7 +90,7 @@ describe('sse-handlers tool lifecycle', () => {
|
||||
|
||||
const event = {
|
||||
type: 'tool_call',
|
||||
data: { id: 'tool-dup', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||
data: { id: 'tool-dup', name: 'read', arguments: { workflowId: 'workflow-1' } },
|
||||
}
|
||||
|
||||
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
||||
|
||||
@@ -217,13 +217,6 @@ export async function executeDeployMcp(
|
||||
return { success: false, error: 'workspaceId is required' }
|
||||
}
|
||||
|
||||
if (!workflowRecord.isDeployed) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Workflow must be deployed before adding as an MCP tool. Use deploy_api first.',
|
||||
}
|
||||
}
|
||||
|
||||
const serverId = params.serverId
|
||||
if (!serverId) {
|
||||
return {
|
||||
@@ -232,6 +225,34 @@ export async function executeDeployMcp(
|
||||
}
|
||||
}
|
||||
|
||||
// Handle undeploy action — remove workflow from MCP server
|
||||
if (params.action === 'undeploy') {
|
||||
const deleted = await db
|
||||
.delete(workflowMcpTool)
|
||||
.where(
|
||||
and(eq(workflowMcpTool.serverId, serverId), eq(workflowMcpTool.workflowId, workflowId))
|
||||
)
|
||||
.returning({ id: workflowMcpTool.id })
|
||||
|
||||
if (deleted.length === 0) {
|
||||
return { success: false, error: 'Workflow is not deployed to this MCP server' }
|
||||
}
|
||||
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId })
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: { workflowId, serverId, action: 'undeploy', removed: true },
|
||||
}
|
||||
}
|
||||
|
||||
if (!workflowRecord.isDeployed) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Workflow must be deployed before adding as an MCP tool. Use deploy_api first.',
|
||||
}
|
||||
}
|
||||
|
||||
const existingTool = await db
|
||||
.select()
|
||||
.from(workflowMcpTool)
|
||||
|
||||
@@ -11,7 +11,9 @@ import { ensureWorkflowAccess } from '../access'
|
||||
import type {
|
||||
CheckDeploymentStatusParams,
|
||||
CreateWorkspaceMcpServerParams,
|
||||
DeleteWorkspaceMcpServerParams,
|
||||
ListWorkspaceMcpServersParams,
|
||||
UpdateWorkspaceMcpServerParams,
|
||||
} from '../param-types'
|
||||
|
||||
export async function executeCheckDeploymentStatus(
|
||||
@@ -231,3 +233,82 @@ export async function executeCreateWorkspaceMcpServer(
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeUpdateWorkspaceMcpServer(
|
||||
params: UpdateWorkspaceMcpServerParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const serverId = params.serverId
|
||||
if (!serverId) {
|
||||
return { success: false, error: 'serverId is required' }
|
||||
}
|
||||
|
||||
const updates: Record<string, unknown> = { updatedAt: new Date() }
|
||||
|
||||
if (typeof params.name === 'string') {
|
||||
const name = params.name.trim()
|
||||
if (!name) return { success: false, error: 'name cannot be empty' }
|
||||
updates.name = name
|
||||
}
|
||||
if (typeof params.description === 'string') {
|
||||
updates.description = params.description.trim() || null
|
||||
}
|
||||
if (typeof params.isPublic === 'boolean') {
|
||||
updates.isPublic = params.isPublic
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length <= 1) {
|
||||
return { success: false, error: 'At least one of name, description, or isPublic is required' }
|
||||
}
|
||||
|
||||
const [existing] = await db
|
||||
.select({ id: workflowMcpServer.id, createdBy: workflowMcpServer.createdBy })
|
||||
.from(workflowMcpServer)
|
||||
.where(eq(workflowMcpServer.id, serverId))
|
||||
.limit(1)
|
||||
|
||||
if (!existing) {
|
||||
return { success: false, error: 'MCP server not found' }
|
||||
}
|
||||
|
||||
await db
|
||||
.update(workflowMcpServer)
|
||||
.set(updates)
|
||||
.where(eq(workflowMcpServer.id, serverId))
|
||||
|
||||
return { success: true, output: { serverId, ...updates, updatedAt: undefined } }
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeDeleteWorkspaceMcpServer(
|
||||
params: DeleteWorkspaceMcpServerParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const serverId = params.serverId
|
||||
if (!serverId) {
|
||||
return { success: false, error: 'serverId is required' }
|
||||
}
|
||||
|
||||
const [existing] = await db
|
||||
.select({ id: workflowMcpServer.id, name: workflowMcpServer.name, workspaceId: workflowMcpServer.workspaceId })
|
||||
.from(workflowMcpServer)
|
||||
.where(eq(workflowMcpServer.id, serverId))
|
||||
.limit(1)
|
||||
|
||||
if (!existing) {
|
||||
return { success: false, error: 'MCP server not found' }
|
||||
}
|
||||
|
||||
await db.delete(workflowMcpServer).where(eq(workflowMcpServer.id, serverId))
|
||||
|
||||
mcpPubSub?.publishWorkflowToolsChanged({ serverId, workspaceId: existing.workspaceId })
|
||||
|
||||
return { success: true, output: { serverId, name: existing.name, deleted: true } }
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,18 +17,29 @@ import { getTool, resolveToolId } from '@/tools/utils'
|
||||
import {
|
||||
executeCheckDeploymentStatus,
|
||||
executeCreateWorkspaceMcpServer,
|
||||
executeDeleteWorkspaceMcpServer,
|
||||
executeDeployApi,
|
||||
executeDeployChat,
|
||||
executeDeployMcp,
|
||||
executeListWorkspaceMcpServers,
|
||||
executeRedeploy,
|
||||
executeUpdateWorkspaceMcpServer,
|
||||
} from './deployment-tools'
|
||||
import { executeIntegrationToolDirect } from './integration-tools'
|
||||
import {
|
||||
executeVfsGlob,
|
||||
executeVfsGrep,
|
||||
executeVfsList,
|
||||
executeVfsRead,
|
||||
} from './vfs-tools'
|
||||
import type {
|
||||
CheckDeploymentStatusParams,
|
||||
CreateFolderParams,
|
||||
CreateWorkflowParams,
|
||||
CreateWorkspaceMcpServerParams,
|
||||
DeleteFolderParams,
|
||||
DeleteWorkflowParams,
|
||||
DeleteWorkspaceMcpServerParams,
|
||||
DeployApiParams,
|
||||
DeployChatParams,
|
||||
DeployMcpParams,
|
||||
@@ -36,43 +47,44 @@ import type {
|
||||
GetBlockOutputsParams,
|
||||
GetBlockUpstreamReferencesParams,
|
||||
GetDeployedWorkflowStateParams,
|
||||
GetUserWorkflowParams,
|
||||
GetWorkflowDataParams,
|
||||
GetWorkflowFromNameParams,
|
||||
ListFoldersParams,
|
||||
ListUserWorkflowsParams,
|
||||
ListWorkspaceMcpServersParams,
|
||||
MoveFolderParams,
|
||||
MoveWorkflowParams,
|
||||
RenameFolderParams,
|
||||
RenameWorkflowParams,
|
||||
RunBlockParams,
|
||||
RunFromBlockParams,
|
||||
RunWorkflowParams,
|
||||
RunWorkflowUntilBlockParams,
|
||||
SetGlobalWorkflowVariablesParams,
|
||||
UpdateWorkflowParams,
|
||||
UpdateWorkspaceMcpServerParams,
|
||||
} from './param-types'
|
||||
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
|
||||
import {
|
||||
executeCreateFolder,
|
||||
executeCreateWorkflow,
|
||||
executeDeleteFolder,
|
||||
executeDeleteWorkflow,
|
||||
executeGenerateApiKey,
|
||||
executeGetBlockOutputs,
|
||||
executeGetBlockUpstreamReferences,
|
||||
executeGetDeployedWorkflowState,
|
||||
executeGetUserWorkflow,
|
||||
executeGetWorkflowData,
|
||||
executeGetWorkflowFromName,
|
||||
executeListFolders,
|
||||
executeListUserWorkflows,
|
||||
executeListUserWorkspaces,
|
||||
executeMoveFolder,
|
||||
executeMoveWorkflow,
|
||||
executeRenameFolder,
|
||||
executeRenameWorkflow,
|
||||
executeRunBlock,
|
||||
executeRunFromBlock,
|
||||
executeRunWorkflow,
|
||||
executeRunWorkflowUntilBlock,
|
||||
executeSetGlobalWorkflowVariables,
|
||||
executeUpdateWorkflow,
|
||||
} from './workflow-tools'
|
||||
|
||||
const logger = createLogger('CopilotToolExecutor')
|
||||
@@ -319,17 +331,13 @@ async function executeManageCustomTool(
|
||||
}
|
||||
|
||||
const SERVER_TOOLS = new Set<string>([
|
||||
'get_blocks_and_tools',
|
||||
'get_blocks_metadata',
|
||||
'get_block_options',
|
||||
'get_block_config',
|
||||
'get_trigger_blocks',
|
||||
'edit_workflow',
|
||||
'get_workflow_console',
|
||||
'search_documentation',
|
||||
'search_online',
|
||||
'set_environment_variables',
|
||||
'get_credentials',
|
||||
'make_api_request',
|
||||
'knowledge_base',
|
||||
])
|
||||
@@ -338,16 +346,17 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||
string,
|
||||
(params: Record<string, unknown>, context: ExecutionContext) => Promise<ToolCallResult>
|
||||
> = {
|
||||
get_user_workflow: (p, c) => executeGetUserWorkflow(p as GetUserWorkflowParams, c),
|
||||
get_workflow_from_name: (p, c) => executeGetWorkflowFromName(p as GetWorkflowFromNameParams, c),
|
||||
list_user_workflows: (p, c) => executeListUserWorkflows(p as ListUserWorkflowsParams, c),
|
||||
list_user_workspaces: (_p, c) => executeListUserWorkspaces(c),
|
||||
list_folders: (p, c) => executeListFolders(p as ListFoldersParams, c),
|
||||
create_workflow: (p, c) => executeCreateWorkflow(p as CreateWorkflowParams, c),
|
||||
create_folder: (p, c) => executeCreateFolder(p as CreateFolderParams, c),
|
||||
rename_workflow: (p, c) => executeRenameWorkflow(p as unknown as RenameWorkflowParams, c),
|
||||
update_workflow: (p, c) => executeUpdateWorkflow(p as unknown as UpdateWorkflowParams, c),
|
||||
delete_workflow: (p, c) => executeDeleteWorkflow(p as unknown as DeleteWorkflowParams, c),
|
||||
move_workflow: (p, c) => executeMoveWorkflow(p as unknown as MoveWorkflowParams, c),
|
||||
move_folder: (p, c) => executeMoveFolder(p as unknown as MoveFolderParams, c),
|
||||
rename_folder: (p, c) => executeRenameFolder(p as unknown as RenameFolderParams, c),
|
||||
delete_folder: (p, c) => executeDeleteFolder(p as unknown as DeleteFolderParams, c),
|
||||
get_workflow_data: (p, c) => executeGetWorkflowData(p as GetWorkflowDataParams, c),
|
||||
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
||||
get_block_upstream_references: (p, c) =>
|
||||
@@ -377,6 +386,10 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||
executeListWorkspaceMcpServers(p as ListWorkspaceMcpServersParams, c),
|
||||
create_workspace_mcp_server: (p, c) =>
|
||||
executeCreateWorkspaceMcpServer(p as CreateWorkspaceMcpServerParams, c),
|
||||
update_workspace_mcp_server: (p, c) =>
|
||||
executeUpdateWorkspaceMcpServer(p as unknown as UpdateWorkspaceMcpServerParams, c),
|
||||
delete_workspace_mcp_server: (p, c) =>
|
||||
executeDeleteWorkspaceMcpServer(p as unknown as DeleteWorkspaceMcpServerParams, c),
|
||||
oauth_get_auth_link: async (p, _c) => {
|
||||
const providerName = (p.providerName || p.provider_name || 'the provider') as string
|
||||
try {
|
||||
@@ -416,6 +429,11 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||
}
|
||||
},
|
||||
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
|
||||
// VFS tools
|
||||
grep: (p, c) => executeVfsGrep(p, c),
|
||||
glob: (p, c) => executeVfsGlob(p, c),
|
||||
read: (p, c) => executeVfsRead(p, c),
|
||||
list: (p, c) => executeVfsList(p, c),
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -5,19 +5,6 @@
|
||||
|
||||
// === Workflow Query Params ===
|
||||
|
||||
export interface GetUserWorkflowParams {
|
||||
workflowId?: string
|
||||
}
|
||||
|
||||
export interface GetWorkflowFromNameParams {
|
||||
workflow_name?: string
|
||||
}
|
||||
|
||||
export interface ListUserWorkflowsParams {
|
||||
workspaceId?: string
|
||||
folderId?: string
|
||||
}
|
||||
|
||||
export interface GetWorkflowDataParams {
|
||||
workflowId?: string
|
||||
data_type?: string
|
||||
@@ -176,6 +163,16 @@ export interface RenameWorkflowParams {
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface UpdateWorkflowParams {
|
||||
workflowId: string
|
||||
name?: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface DeleteWorkflowParams {
|
||||
workflowId: string
|
||||
}
|
||||
|
||||
export interface MoveWorkflowParams {
|
||||
workflowId: string
|
||||
folderId: string | null
|
||||
@@ -185,3 +182,23 @@ export interface MoveFolderParams {
|
||||
folderId: string
|
||||
parentId: string | null
|
||||
}
|
||||
|
||||
export interface RenameFolderParams {
|
||||
folderId: string
|
||||
name: string
|
||||
}
|
||||
|
||||
export interface DeleteFolderParams {
|
||||
folderId: string
|
||||
}
|
||||
|
||||
export interface UpdateWorkspaceMcpServerParams {
|
||||
serverId: string
|
||||
name?: string
|
||||
description?: string
|
||||
isPublic?: boolean
|
||||
}
|
||||
|
||||
export interface DeleteWorkspaceMcpServerParams {
|
||||
serverId: string
|
||||
}
|
||||
|
||||
128
apps/sim/lib/copilot/orchestrator/tool-executor/vfs-tools.ts
Normal file
128
apps/sim/lib/copilot/orchestrator/tool-executor/vfs-tools.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||
import { getOrMaterializeVFS } from '@/lib/copilot/vfs'
|
||||
|
||||
const logger = createLogger('VfsTools')
|
||||
|
||||
export async function executeVfsGrep(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const pattern = params.pattern as string | undefined
|
||||
if (!pattern) {
|
||||
return { success: false, error: "Missing required parameter 'pattern'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const result = vfs.grep(
|
||||
pattern,
|
||||
params.path as string | undefined,
|
||||
{
|
||||
maxResults: (params.maxResults as number) ?? 50,
|
||||
outputMode: (params.output_mode as 'content' | 'files_with_matches' | 'count') ?? 'content',
|
||||
ignoreCase: (params.ignoreCase as boolean) ?? false,
|
||||
lineNumbers: (params.lineNumbers as boolean) ?? true,
|
||||
context: (params.context as number) ?? 0,
|
||||
}
|
||||
)
|
||||
const outputMode = (params.output_mode as string) ?? 'content'
|
||||
const key = outputMode === 'files_with_matches' ? 'files' : outputMode === 'count' ? 'counts' : 'matches'
|
||||
return { success: true, output: { [key]: result } }
|
||||
} catch (err) {
|
||||
logger.error('vfs_grep failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_grep failed' }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeVfsGlob(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const pattern = params.pattern as string | undefined
|
||||
if (!pattern) {
|
||||
return { success: false, error: "Missing required parameter 'pattern'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const files = vfs.glob(pattern)
|
||||
return { success: true, output: { files } }
|
||||
} catch (err) {
|
||||
logger.error('vfs_glob failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_glob failed' }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeVfsRead(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const path = params.path as string | undefined
|
||||
if (!path) {
|
||||
return { success: false, error: "Missing required parameter 'path'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const result = vfs.read(
|
||||
path,
|
||||
params.offset as number | undefined,
|
||||
params.limit as number | undefined
|
||||
)
|
||||
if (!result) {
|
||||
return { success: false, error: `File not found: ${path}` }
|
||||
}
|
||||
return { success: true, output: result }
|
||||
} catch (err) {
|
||||
logger.error('vfs_read failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_read failed' }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeVfsList(
|
||||
params: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const path = params.path as string | undefined
|
||||
if (!path) {
|
||||
return { success: false, error: "Missing required parameter 'path'" }
|
||||
}
|
||||
|
||||
const workspaceId = context.workspaceId
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'No workspace context available' }
|
||||
}
|
||||
|
||||
try {
|
||||
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
|
||||
const entries = vfs.list(path)
|
||||
return { success: true, output: { entries } }
|
||||
} catch (err) {
|
||||
logger.error('vfs_list failed', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { success: false, error: err instanceof Error ? err.message : 'vfs_list failed' }
|
||||
}
|
||||
}
|
||||
@@ -18,15 +18,19 @@ import { ensureWorkflowAccess, ensureWorkspaceAccess, getDefaultWorkspaceId } fr
|
||||
import type {
|
||||
CreateFolderParams,
|
||||
CreateWorkflowParams,
|
||||
DeleteFolderParams,
|
||||
DeleteWorkflowParams,
|
||||
GenerateApiKeyParams,
|
||||
MoveFolderParams,
|
||||
MoveWorkflowParams,
|
||||
RenameFolderParams,
|
||||
RenameWorkflowParams,
|
||||
RunBlockParams,
|
||||
RunFromBlockParams,
|
||||
RunWorkflowParams,
|
||||
RunWorkflowUntilBlockParams,
|
||||
SetGlobalWorkflowVariablesParams,
|
||||
UpdateWorkflowParams,
|
||||
VariableOperation,
|
||||
} from '../param-types'
|
||||
|
||||
@@ -566,6 +570,142 @@ export async function executeRunFromBlock(
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeUpdateWorkflow(
|
||||
params: UpdateWorkflowParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workflowId = params.workflowId
|
||||
if (!workflowId) {
|
||||
return { success: false, error: 'workflowId is required' }
|
||||
}
|
||||
|
||||
const updates: Record<string, unknown> = { updatedAt: new Date() }
|
||||
|
||||
if (typeof params.name === 'string') {
|
||||
const name = params.name.trim()
|
||||
if (!name) return { success: false, error: 'name cannot be empty' }
|
||||
if (name.length > 200) return { success: false, error: 'Workflow name must be 200 characters or less' }
|
||||
updates.name = name
|
||||
}
|
||||
|
||||
if (typeof params.description === 'string') {
|
||||
if (params.description.length > 2000) {
|
||||
return { success: false, error: 'Description must be 2000 characters or less' }
|
||||
}
|
||||
updates.description = params.description
|
||||
}
|
||||
|
||||
if (Object.keys(updates).length <= 1) {
|
||||
return { success: false, error: 'At least one of name or description is required' }
|
||||
}
|
||||
|
||||
await ensureWorkflowAccess(workflowId, context.userId)
|
||||
|
||||
await db.update(workflow).set(updates).where(eq(workflow.id, workflowId))
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: { workflowId, ...updates, updatedAt: undefined },
|
||||
}
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeDeleteWorkflow(
|
||||
params: DeleteWorkflowParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workflowId = params.workflowId
|
||||
if (!workflowId) {
|
||||
return { success: false, error: 'workflowId is required' }
|
||||
}
|
||||
|
||||
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||
|
||||
await db.delete(workflow).where(eq(workflow.id, workflowId))
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: { workflowId, name: workflowRecord.name, deleted: true },
|
||||
}
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeRenameFolder(
|
||||
params: RenameFolderParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const folderId = params.folderId
|
||||
if (!folderId) {
|
||||
return { success: false, error: 'folderId is required' }
|
||||
}
|
||||
const name = typeof params.name === 'string' ? params.name.trim() : ''
|
||||
if (!name) {
|
||||
return { success: false, error: 'name is required' }
|
||||
}
|
||||
if (name.length > 200) {
|
||||
return { success: false, error: 'Folder name must be 200 characters or less' }
|
||||
}
|
||||
|
||||
await db
|
||||
.update(workflowFolder)
|
||||
.set({ name, updatedAt: new Date() })
|
||||
.where(eq(workflowFolder.id, folderId))
|
||||
|
||||
return { success: true, output: { folderId, name } }
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeDeleteFolder(
|
||||
params: DeleteFolderParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const folderId = params.folderId
|
||||
if (!folderId) {
|
||||
return { success: false, error: 'folderId is required' }
|
||||
}
|
||||
|
||||
// Get the folder to find its parent
|
||||
const [folder] = await db
|
||||
.select({ parentId: workflowFolder.parentId })
|
||||
.from(workflowFolder)
|
||||
.where(eq(workflowFolder.id, folderId))
|
||||
.limit(1)
|
||||
|
||||
if (!folder) {
|
||||
return { success: false, error: 'Folder not found' }
|
||||
}
|
||||
|
||||
// Move child workflows to parent folder
|
||||
await db
|
||||
.update(workflow)
|
||||
.set({ folderId: folder.parentId, updatedAt: new Date() })
|
||||
.where(eq(workflow.folderId, folderId))
|
||||
|
||||
// Move child folders to parent folder
|
||||
await db
|
||||
.update(workflowFolder)
|
||||
.set({ parentId: folder.parentId, updatedAt: new Date() })
|
||||
.where(eq(workflowFolder.parentId, folderId))
|
||||
|
||||
// Delete the folder
|
||||
await db.delete(workflowFolder).where(eq(workflowFolder.id, folderId))
|
||||
|
||||
return { success: true, output: { folderId, deleted: true } }
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeRunBlock(
|
||||
params: RunBlockParams,
|
||||
context: ExecutionContext
|
||||
|
||||
@@ -2,10 +2,7 @@ import { db } from '@sim/db'
|
||||
import { customTools, permissions, workflow, workflowFolder, workspace } from '@sim/db/schema'
|
||||
import { and, asc, desc, eq, isNull, or } from 'drizzle-orm'
|
||||
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||
import {
|
||||
formatNormalizedWorkflowForCopilot,
|
||||
normalizeWorkflowName,
|
||||
} from '@/lib/copilot/tools/shared/workflow-utils'
|
||||
import { formatNormalizedWorkflowForCopilot } from '@/lib/copilot/tools/shared/workflow-utils'
|
||||
import { mcpService } from '@/lib/mcp/service'
|
||||
import { listWorkspaceFiles } from '@/lib/uploads/contexts/workspace'
|
||||
import { getEffectiveBlockOutputPaths } from '@/lib/workflows/blocks/block-outputs'
|
||||
@@ -22,116 +19,16 @@ import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
import {
|
||||
ensureWorkflowAccess,
|
||||
ensureWorkspaceAccess,
|
||||
getAccessibleWorkflowsForUser,
|
||||
getDefaultWorkspaceId,
|
||||
} from '../access'
|
||||
import type {
|
||||
GetBlockOutputsParams,
|
||||
GetBlockUpstreamReferencesParams,
|
||||
GetDeployedWorkflowStateParams,
|
||||
GetUserWorkflowParams,
|
||||
GetWorkflowDataParams,
|
||||
GetWorkflowFromNameParams,
|
||||
ListFoldersParams,
|
||||
ListUserWorkflowsParams,
|
||||
} from '../param-types'
|
||||
|
||||
export async function executeGetUserWorkflow(
|
||||
params: GetUserWorkflowParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workflowId = params.workflowId || context.workflowId
|
||||
if (!workflowId) {
|
||||
return { success: false, error: 'workflowId is required' }
|
||||
}
|
||||
|
||||
const { workflow: workflowRecord, workspaceId } = await ensureWorkflowAccess(
|
||||
workflowId,
|
||||
context.userId
|
||||
)
|
||||
|
||||
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
|
||||
if (!userWorkflow) {
|
||||
return { success: false, error: 'Workflow has no normalized data' }
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
workflowId,
|
||||
workflowName: workflowRecord.name || '',
|
||||
workspaceId,
|
||||
userWorkflow,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeGetWorkflowFromName(
|
||||
params: GetWorkflowFromNameParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workflowName = typeof params.workflow_name === 'string' ? params.workflow_name.trim() : ''
|
||||
if (!workflowName) {
|
||||
return { success: false, error: 'workflow_name is required' }
|
||||
}
|
||||
|
||||
const workflows = await getAccessibleWorkflowsForUser(context.userId)
|
||||
|
||||
const targetName = normalizeWorkflowName(workflowName)
|
||||
const match = workflows.find((w) => normalizeWorkflowName(w.name) === targetName)
|
||||
if (!match) {
|
||||
return { success: false, error: `Workflow not found: ${workflowName}` }
|
||||
}
|
||||
|
||||
const normalized = await loadWorkflowFromNormalizedTables(match.id)
|
||||
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
|
||||
if (!userWorkflow) {
|
||||
return { success: false, error: 'Workflow has no normalized data' }
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
workflowId: match.id,
|
||||
workflowName: match.name || '',
|
||||
workspaceId: match.workspaceId,
|
||||
userWorkflow,
|
||||
},
|
||||
}
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeListUserWorkflows(
|
||||
params: ListUserWorkflowsParams,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
try {
|
||||
const workspaceId = params?.workspaceId as string | undefined
|
||||
const folderId = params?.folderId as string | undefined
|
||||
|
||||
const workflows = await getAccessibleWorkflowsForUser(context.userId, { workspaceId, folderId })
|
||||
|
||||
const workflowList = workflows.map((w) => ({
|
||||
workflowId: w.id,
|
||||
workflowName: w.name || '',
|
||||
workspaceId: w.workspaceId,
|
||||
folderId: w.folderId,
|
||||
}))
|
||||
|
||||
return { success: true, output: { workflows: workflowList } }
|
||||
} catch (error) {
|
||||
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeListUserWorkspaces(
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
|
||||
@@ -35,6 +35,8 @@ export interface SSEEvent {
|
||||
phase?: string
|
||||
/** Set on tool_result events */
|
||||
failedDependency?: boolean
|
||||
/** UI metadata from copilot (title, icon, phaseLabel) */
|
||||
ui?: Record<string, unknown>
|
||||
}
|
||||
|
||||
export type ToolCallStatus = 'pending' | 'executing' | 'success' | 'error' | 'skipped' | 'rejected'
|
||||
|
||||
@@ -1,5 +1,28 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Loader2 } from 'lucide-react'
|
||||
import type { LucideIcon } from 'lucide-react'
|
||||
import {
|
||||
BookOpen,
|
||||
Bug,
|
||||
Cloud,
|
||||
Code,
|
||||
FileText,
|
||||
Folder,
|
||||
Globe,
|
||||
HelpCircle,
|
||||
Key,
|
||||
Loader2,
|
||||
Lock,
|
||||
Pencil,
|
||||
Play,
|
||||
Plus,
|
||||
Rocket,
|
||||
Search,
|
||||
Server,
|
||||
Settings,
|
||||
Terminal,
|
||||
Wrench,
|
||||
Zap,
|
||||
} from 'lucide-react'
|
||||
import {
|
||||
ClientToolCallState,
|
||||
type ClientToolDisplay,
|
||||
@@ -16,16 +39,62 @@ type StoreSet = (
|
||||
/** Respond tools are internal to copilot subagents and should never be shown in the UI */
|
||||
const HIDDEN_TOOL_SUFFIX = '_respond'
|
||||
|
||||
/** UI metadata sent by the copilot on SSE tool_call events. */
|
||||
export interface ServerToolUI {
|
||||
title?: string
|
||||
phaseLabel?: string
|
||||
icon?: string
|
||||
}
|
||||
|
||||
/** Maps copilot icon name strings to Lucide icon components. */
|
||||
const ICON_MAP: Record<string, LucideIcon> = {
|
||||
search: Search,
|
||||
globe: Globe,
|
||||
hammer: Wrench,
|
||||
rocket: Rocket,
|
||||
lock: Lock,
|
||||
book: BookOpen,
|
||||
wrench: Wrench,
|
||||
zap: Zap,
|
||||
play: Play,
|
||||
cloud: Cloud,
|
||||
key: Key,
|
||||
pencil: Pencil,
|
||||
terminal: Terminal,
|
||||
workflow: Settings,
|
||||
settings: Settings,
|
||||
server: Server,
|
||||
bug: Bug,
|
||||
brain: BookOpen,
|
||||
code: Code,
|
||||
help: HelpCircle,
|
||||
plus: Plus,
|
||||
file: FileText,
|
||||
folder: Folder,
|
||||
}
|
||||
|
||||
function resolveIcon(iconName: string | undefined): LucideIcon {
|
||||
if (!iconName) return Loader2
|
||||
return ICON_MAP[iconName] || Loader2
|
||||
}
|
||||
|
||||
export function resolveToolDisplay(
|
||||
toolName: string | undefined,
|
||||
state: ClientToolCallState,
|
||||
_toolCallId?: string,
|
||||
params?: Record<string, any>
|
||||
params?: Record<string, unknown>,
|
||||
serverUI?: ServerToolUI
|
||||
): ClientToolDisplay | undefined {
|
||||
if (!toolName) return undefined
|
||||
if (toolName.endsWith(HIDDEN_TOOL_SUFFIX)) return undefined
|
||||
const entry = TOOL_DISPLAY_REGISTRY[toolName]
|
||||
if (!entry) return humanizedFallback(toolName, state)
|
||||
if (!entry) {
|
||||
// Use copilot-provided UI as a better fallback than humanized name
|
||||
if (serverUI?.title) {
|
||||
return serverUIFallback(serverUI, state)
|
||||
}
|
||||
return humanizedFallback(toolName, state)
|
||||
}
|
||||
|
||||
if (entry.uiConfig?.dynamicText && params) {
|
||||
const dynamicText = entry.uiConfig.dynamicText(params, state)
|
||||
@@ -51,6 +120,28 @@ export function resolveToolDisplay(
|
||||
return humanizedFallback(toolName, state)
|
||||
}
|
||||
|
||||
/** Generates display from copilot-provided UI metadata. */
|
||||
function serverUIFallback(
|
||||
serverUI: ServerToolUI,
|
||||
state: ClientToolCallState
|
||||
): ClientToolDisplay {
|
||||
const icon = resolveIcon(serverUI.icon)
|
||||
const title = serverUI.title!
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return { text: `Completed ${title.toLowerCase()}`, icon }
|
||||
case ClientToolCallState.error:
|
||||
return { text: `Failed ${title.toLowerCase()}`, icon }
|
||||
case ClientToolCallState.rejected:
|
||||
return { text: `Skipped ${title.toLowerCase()}`, icon }
|
||||
case ClientToolCallState.aborted:
|
||||
return { text: `Aborted ${title.toLowerCase()}`, icon }
|
||||
default:
|
||||
return { text: title, icon: Loader2 }
|
||||
}
|
||||
}
|
||||
|
||||
export function humanizedFallback(
|
||||
toolName: string,
|
||||
state: ClientToolCallState
|
||||
@@ -121,7 +212,7 @@ export function abortAllInProgressTools(set: StoreSet, get: () => CopilotStore)
|
||||
...tc,
|
||||
state: resolved,
|
||||
subAgentStreaming: false,
|
||||
display: resolveToolDisplay(tc.name, resolved, id, tc.params),
|
||||
display: resolveToolDisplay(tc.name, resolved, id, tc.params, tc.serverUI),
|
||||
}
|
||||
hasUpdates = true
|
||||
} else if (tc.subAgentStreaming) {
|
||||
@@ -150,7 +241,7 @@ export function abortAllInProgressTools(set: StoreSet, get: () => CopilotStore)
|
||||
toolCall: {
|
||||
...prev,
|
||||
state: resolved,
|
||||
display: resolveToolDisplay(prev?.name, resolved, prev?.id, prev?.params),
|
||||
display: resolveToolDisplay(prev?.name, resolved, prev?.id, prev?.params, prev?.serverUI),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,35 +1,24 @@
|
||||
import type { LucideIcon } from 'lucide-react'
|
||||
import {
|
||||
Blocks,
|
||||
BookOpen,
|
||||
Bug,
|
||||
Check,
|
||||
CheckCircle,
|
||||
CheckCircle2,
|
||||
ClipboardCheck,
|
||||
Compass,
|
||||
Database,
|
||||
FileCode,
|
||||
FileText,
|
||||
FlaskConical,
|
||||
GitBranch,
|
||||
Globe,
|
||||
Globe2,
|
||||
Grid2x2,
|
||||
Grid2x2Check,
|
||||
Grid2x2X,
|
||||
Info,
|
||||
Key,
|
||||
KeyRound,
|
||||
ListChecks,
|
||||
ListFilter,
|
||||
ListTodo,
|
||||
Loader2,
|
||||
MessageSquare,
|
||||
MinusCircle,
|
||||
Moon,
|
||||
Navigation,
|
||||
Pencil,
|
||||
PencilLine,
|
||||
Play,
|
||||
PlugZap,
|
||||
@@ -41,13 +30,11 @@ import {
|
||||
Sparkles,
|
||||
Tag,
|
||||
TerminalSquare,
|
||||
WorkflowIcon,
|
||||
Wrench,
|
||||
X,
|
||||
XCircle,
|
||||
Zap,
|
||||
} from 'lucide-react'
|
||||
import { getLatestBlock } from '@/blocks/registry'
|
||||
import { getCustomTool } from '@/hooks/queries/custom-tools'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -340,46 +327,6 @@ const META_build: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_debug: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Debugging', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Debugging', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Debugging', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Debugged', icon: Bug },
|
||||
[ClientToolCallState.error]: { text: 'Failed to debug', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped debug', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted debug', icon: XCircle },
|
||||
},
|
||||
uiConfig: {
|
||||
subagent: {
|
||||
streamingLabel: 'Debugging',
|
||||
completedLabel: 'Debugged',
|
||||
shouldCollapse: true,
|
||||
outputArtifacts: [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_discovery: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Discovering', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Discovering', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Discovering', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Discovered', icon: Search },
|
||||
[ClientToolCallState.error]: { text: 'Failed to discover', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped discovery', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted discovery', icon: XCircle },
|
||||
},
|
||||
uiConfig: {
|
||||
subagent: {
|
||||
streamingLabel: 'Discovering',
|
||||
completedLabel: 'Discovered',
|
||||
shouldCollapse: true,
|
||||
outputArtifacts: [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_deploy: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Deploying', icon: Loader2 },
|
||||
@@ -570,28 +517,6 @@ const META_deploy_mcp: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_edit: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Editing', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Editing', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Editing', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Edited', icon: Pencil },
|
||||
[ClientToolCallState.error]: { text: 'Failed to apply edit', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped edit', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted edit', icon: XCircle },
|
||||
},
|
||||
uiConfig: {
|
||||
isSpecial: true,
|
||||
subagent: {
|
||||
streamingLabel: 'Editing',
|
||||
completedLabel: 'Edited',
|
||||
shouldCollapse: false, // Edit subagent stays expanded
|
||||
outputArtifacts: ['edit_summary'],
|
||||
hideThinkingText: true, // We show WorkflowEditSummary instead
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_edit_workflow: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Editing your workflow', icon: Loader2 },
|
||||
@@ -609,106 +534,6 @@ const META_edit_workflow: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_evaluate: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Evaluating', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Evaluating', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Evaluated', icon: ClipboardCheck },
|
||||
[ClientToolCallState.error]: { text: 'Failed to evaluate', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped evaluation', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted evaluation', icon: XCircle },
|
||||
},
|
||||
uiConfig: {
|
||||
subagent: {
|
||||
streamingLabel: 'Evaluating',
|
||||
completedLabel: 'Evaluated',
|
||||
shouldCollapse: true,
|
||||
outputArtifacts: [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_block_config: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting block config', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Getting block config', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Getting block config', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved block config', icon: FileCode },
|
||||
[ClientToolCallState.error]: { text: 'Failed to get block config', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block config', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped getting block config',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.blockType && typeof params.blockType === 'string') {
|
||||
const blockConfig = getLatestBlock(params.blockType)
|
||||
const blockName = (blockConfig?.name ?? params.blockType.replace(/_/g, ' ')).toLowerCase()
|
||||
const opSuffix = params.operation ? ` (${params.operation})` : ''
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Retrieved ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Retrieving ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to retrieve ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted retrieving ${blockName}${opSuffix} config`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped retrieving ${blockName}${opSuffix} config`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_block_options: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved block operations', icon: ListFilter },
|
||||
[ClientToolCallState.error]: { text: 'Failed to get block operations', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block operations', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped getting block operations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const blockId =
|
||||
(params as any)?.blockId ||
|
||||
(params as any)?.blockType ||
|
||||
(params as any)?.block_id ||
|
||||
(params as any)?.block_type
|
||||
if (typeof blockId === 'string') {
|
||||
const blockConfig = getLatestBlock(blockId)
|
||||
const blockName = (blockConfig?.name ?? blockId.replace(/_/g, ' ')).toLowerCase()
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Retrieved ${blockName} operations`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Retrieving ${blockName} operations`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to retrieve ${blockName} operations`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted retrieving ${blockName} operations`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped retrieving ${blockName} operations`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_block_outputs: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting block outputs', icon: Loader2 },
|
||||
@@ -767,81 +592,6 @@ const META_get_block_upstream_references: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_blocks_and_tools: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Exploring available options', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Explored available options', icon: Blocks },
|
||||
[ClientToolCallState.error]: { text: 'Failed to explore options', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted exploring options', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped exploring options', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
}
|
||||
|
||||
const META_get_blocks_metadata: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Searching block choices', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Searching block choices', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Searching block choices', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Searched block choices', icon: ListFilter },
|
||||
[ClientToolCallState.error]: { text: 'Failed to search block choices', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted searching block choices', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped searching block choices',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.blockIds && Array.isArray(params.blockIds) && params.blockIds.length > 0) {
|
||||
const blockList = params.blockIds
|
||||
.slice(0, 3)
|
||||
.map((blockId) => blockId.replace(/_/g, ' '))
|
||||
.join(', ')
|
||||
const more = params.blockIds.length > 3 ? '...' : ''
|
||||
const blocks = `${blockList}${more}`
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Searched ${blocks}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Searching ${blocks}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to search ${blocks}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted searching ${blocks}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped searching ${blocks}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_credentials: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Fetching connected integrations', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Fetched connected integrations', icon: Key },
|
||||
[ClientToolCallState.error]: {
|
||||
text: 'Failed to fetch connected integrations',
|
||||
icon: XCircle,
|
||||
},
|
||||
[ClientToolCallState.aborted]: {
|
||||
text: 'Aborted fetching connected integrations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped fetching connected integrations',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_examples_rag: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching examples', icon: Loader2 },
|
||||
@@ -963,19 +713,6 @@ const META_get_page_contents: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_trigger_blocks: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Finding trigger blocks', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Finding trigger blocks', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Finding trigger blocks', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Found trigger blocks', icon: ListFilter },
|
||||
[ClientToolCallState.error]: { text: 'Failed to find trigger blocks', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted finding trigger blocks', icon: MinusCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped finding trigger blocks', icon: MinusCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
}
|
||||
|
||||
const META_get_trigger_examples: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Selecting a trigger', icon: Loader2 },
|
||||
@@ -989,41 +726,6 @@ const META_get_trigger_examples: ToolMetadata = {
|
||||
interrupt: undefined,
|
||||
}
|
||||
|
||||
const META_get_user_workflow: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Reading your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Reading your workflow', icon: WorkflowIcon },
|
||||
[ClientToolCallState.executing]: { text: 'Reading your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted reading your workflow', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Read your workflow', icon: WorkflowIcon },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read your workflow', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped reading your workflow', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const workflowId = params?.workflowId || useWorkflowRegistry.getState().activeWorkflowId
|
||||
if (workflowId) {
|
||||
const workflowName = useWorkflowRegistry.getState().workflows[workflowId]?.name
|
||||
if (workflowName) {
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Read ${workflowName}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Reading ${workflowName}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to read ${workflowName}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted reading ${workflowName}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped reading ${workflowName}`
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_workflow_console: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Fetching execution logs', icon: Loader2 },
|
||||
@@ -1106,59 +808,6 @@ const META_get_workflow_data: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_get_workflow_from_name: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Reading workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Reading workflow', icon: FileText },
|
||||
[ClientToolCallState.executing]: { text: 'Reading workflow', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted reading workflow', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Read workflow', icon: FileText },
|
||||
[ClientToolCallState.error]: { text: 'Failed to read workflow', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped reading workflow', icon: XCircle },
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
if (params?.workflow_name && typeof params.workflow_name === 'string') {
|
||||
const workflowName = params.workflow_name
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Read ${workflowName}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Reading ${workflowName}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to read ${workflowName}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted reading ${workflowName}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped reading ${workflowName}`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
const META_info: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting info', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Getting info', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Getting info', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved info', icon: Info },
|
||||
[ClientToolCallState.error]: { text: 'Failed to get info', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped info', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted info', icon: XCircle },
|
||||
},
|
||||
uiConfig: {
|
||||
subagent: {
|
||||
streamingLabel: 'Getting info',
|
||||
completedLabel: 'Info retrieved',
|
||||
shouldCollapse: true,
|
||||
outputArtifacts: [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_knowledge: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Managing knowledge', icon: Loader2 },
|
||||
@@ -1230,18 +879,6 @@ const META_knowledge_base: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_list_user_workflows: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Listing your workflows', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Listing your workflows', icon: ListChecks },
|
||||
[ClientToolCallState.executing]: { text: 'Listing your workflows', icon: Loader2 },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted listing workflows', icon: XCircle },
|
||||
[ClientToolCallState.success]: { text: 'Listed your workflows', icon: ListChecks },
|
||||
[ClientToolCallState.error]: { text: 'Failed to list workflows', icon: X },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped listing workflows', icon: XCircle },
|
||||
},
|
||||
}
|
||||
|
||||
const META_list_workspace_mcp_servers: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: {
|
||||
@@ -1604,26 +1241,6 @@ const META_oauth_request_access: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_plan: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Planning', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Planning', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Planning', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Planned', icon: ListTodo },
|
||||
[ClientToolCallState.error]: { text: 'Failed to plan', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped plan', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted plan', icon: XCircle },
|
||||
},
|
||||
uiConfig: {
|
||||
subagent: {
|
||||
streamingLabel: 'Planning',
|
||||
completedLabel: 'Planned',
|
||||
shouldCollapse: true,
|
||||
outputArtifacts: ['plan'],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_redeploy: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Redeploying workflow', icon: Loader2 },
|
||||
@@ -2466,66 +2083,6 @@ const META_superagent: ToolMetadata = {
|
||||
},
|
||||
}
|
||||
|
||||
const META_test: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Testing', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Testing', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Testing', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Tested', icon: FlaskConical },
|
||||
[ClientToolCallState.error]: { text: 'Failed to test', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped test', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted test', icon: XCircle },
|
||||
},
|
||||
uiConfig: {
|
||||
subagent: {
|
||||
streamingLabel: 'Testing',
|
||||
completedLabel: 'Tested',
|
||||
shouldCollapse: true,
|
||||
outputArtifacts: [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_tour: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Touring', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Touring', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Touring', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Completed tour', icon: Compass },
|
||||
[ClientToolCallState.error]: { text: 'Failed tour', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped tour', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted tour', icon: XCircle },
|
||||
},
|
||||
uiConfig: {
|
||||
subagent: {
|
||||
streamingLabel: 'Touring',
|
||||
completedLabel: 'Tour complete',
|
||||
shouldCollapse: true,
|
||||
outputArtifacts: [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const META_workflow: ToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Managing workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Managing workflow', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Managing workflow', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Managed workflow', icon: GitBranch },
|
||||
[ClientToolCallState.error]: { text: 'Failed to manage workflow', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped workflow', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow', icon: XCircle },
|
||||
},
|
||||
uiConfig: {
|
||||
subagent: {
|
||||
streamingLabel: 'Managing workflow',
|
||||
completedLabel: 'Workflow managed',
|
||||
shouldCollapse: true,
|
||||
outputArtifacts: [],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
||||
auth: META_auth,
|
||||
check_deployment_status: META_check_deployment_status,
|
||||
@@ -2534,37 +2091,23 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
||||
create_workspace_mcp_server: META_create_workspace_mcp_server,
|
||||
build: META_build,
|
||||
custom_tool: META_custom_tool,
|
||||
debug: META_debug,
|
||||
deploy: META_deploy,
|
||||
discovery: META_discovery,
|
||||
deploy_api: META_deploy_api,
|
||||
deploy_chat: META_deploy_chat,
|
||||
deploy_mcp: META_deploy_mcp,
|
||||
edit: META_edit,
|
||||
edit_workflow: META_edit_workflow,
|
||||
evaluate: META_evaluate,
|
||||
get_block_config: META_get_block_config,
|
||||
get_block_options: META_get_block_options,
|
||||
get_block_outputs: META_get_block_outputs,
|
||||
get_block_upstream_references: META_get_block_upstream_references,
|
||||
get_blocks_and_tools: META_get_blocks_and_tools,
|
||||
get_blocks_metadata: META_get_blocks_metadata,
|
||||
get_credentials: META_get_credentials,
|
||||
generate_api_key: META_generate_api_key,
|
||||
get_examples_rag: META_get_examples_rag,
|
||||
get_operations_examples: META_get_operations_examples,
|
||||
get_page_contents: META_get_page_contents,
|
||||
get_platform_actions: META_get_platform_actions,
|
||||
get_trigger_blocks: META_get_trigger_blocks,
|
||||
get_trigger_examples: META_get_trigger_examples,
|
||||
get_user_workflow: META_get_user_workflow,
|
||||
get_workflow_console: META_get_workflow_console,
|
||||
get_workflow_data: META_get_workflow_data,
|
||||
get_workflow_from_name: META_get_workflow_from_name,
|
||||
info: META_info,
|
||||
knowledge: META_knowledge,
|
||||
knowledge_base: META_knowledge_base,
|
||||
list_user_workflows: META_list_user_workflows,
|
||||
list_workspace_mcp_servers: META_list_workspace_mcp_servers,
|
||||
make_api_request: META_make_api_request,
|
||||
manage_custom_tool: META_manage_custom_tool,
|
||||
@@ -2572,7 +2115,6 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
||||
mark_todo_in_progress: META_mark_todo_in_progress,
|
||||
navigate_ui: META_navigate_ui,
|
||||
oauth_request_access: META_oauth_request_access,
|
||||
plan: META_plan,
|
||||
redeploy: META_redeploy,
|
||||
remember_debug: META_remember_debug,
|
||||
research: META_research,
|
||||
@@ -2591,9 +2133,6 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
||||
sleep: META_sleep,
|
||||
summarize_conversation: META_summarize_conversation,
|
||||
superagent: META_superagent,
|
||||
test: META_test,
|
||||
tour: META_tour,
|
||||
workflow: META_workflow,
|
||||
}
|
||||
|
||||
export const TOOL_DISPLAY_REGISTRY: Record<string, ToolDisplayEntry> = Object.fromEntries(
|
||||
|
||||
@@ -27,25 +27,6 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'list_workflows',
|
||||
toolId: 'list_user_workflows',
|
||||
description:
|
||||
'List all workflows the user has access to. Returns workflow IDs, names, workspace, and folder info. Use workspaceId/folderId to scope results.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workspaceId: {
|
||||
type: 'string',
|
||||
description: 'Optional workspace ID to filter workflows.',
|
||||
},
|
||||
folderId: {
|
||||
type: 'string',
|
||||
description: 'Optional folder ID to filter workflows.',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'list_folders',
|
||||
toolId: 'list_folders',
|
||||
@@ -62,22 +43,6 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
||||
required: ['workspaceId'],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'get_workflow',
|
||||
toolId: 'get_user_workflow',
|
||||
description:
|
||||
'Get a workflow by ID. Returns the full workflow definition including all blocks, connections, and configuration.',
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
workflowId: {
|
||||
type: 'string',
|
||||
description: 'Workflow ID to retrieve.',
|
||||
},
|
||||
},
|
||||
required: ['workflowId'],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'create_workflow',
|
||||
toolId: 'create_workflow',
|
||||
|
||||
@@ -1,493 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import {
|
||||
GetBlockConfigInput,
|
||||
type GetBlockConfigInputType,
|
||||
GetBlockConfigResult,
|
||||
type GetBlockConfigResultType,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { registry as blockRegistry, getLatestBlock } from '@/blocks/registry'
|
||||
import { isHiddenFromDisplay, type SubBlockConfig } from '@/blocks/types'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
import { PROVIDER_DEFINITIONS } from '@/providers/models'
|
||||
import { tools as toolsRegistry } from '@/tools/registry'
|
||||
import { getTrigger, isTriggerValid } from '@/triggers'
|
||||
|
||||
interface InputFieldSchema {
|
||||
type: string
|
||||
description?: string
|
||||
placeholder?: string
|
||||
required?: boolean
|
||||
options?: string[]
|
||||
default?: any
|
||||
min?: number
|
||||
max?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all available models from PROVIDER_DEFINITIONS as static options.
|
||||
* This provides fallback data when store state is not available server-side.
|
||||
*/
|
||||
function getStaticModelOptions(): string[] {
|
||||
const models: string[] = []
|
||||
|
||||
for (const provider of Object.values(PROVIDER_DEFINITIONS)) {
|
||||
// Skip providers with dynamic/fetched models
|
||||
if (provider.id === 'ollama' || provider.id === 'vllm' || provider.id === 'openrouter') {
|
||||
continue
|
||||
}
|
||||
if (provider?.models) {
|
||||
for (const model of provider.models) {
|
||||
models.push(model.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return models
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempts to call a dynamic options function with fallback data injected.
|
||||
*/
|
||||
function callOptionsWithFallback(optionsFn: () => any[]): any[] | undefined {
|
||||
const staticModels = getStaticModelOptions()
|
||||
|
||||
const mockProvidersState = {
|
||||
providers: {
|
||||
base: { models: staticModels },
|
||||
ollama: { models: [] },
|
||||
vllm: { models: [] },
|
||||
openrouter: { models: [] },
|
||||
},
|
||||
}
|
||||
|
||||
let originalGetState: (() => any) | undefined
|
||||
let store: any
|
||||
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
store = require('@/stores/providers')
|
||||
if (store?.useProvidersStore?.getState) {
|
||||
originalGetState = store.useProvidersStore.getState
|
||||
store.useProvidersStore.getState = () => mockProvidersState
|
||||
}
|
||||
} catch {
|
||||
// Store module not available
|
||||
}
|
||||
|
||||
try {
|
||||
return optionsFn()
|
||||
} finally {
|
||||
if (store?.useProvidersStore && originalGetState) {
|
||||
store.useProvidersStore.getState = originalGetState
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves options from a subBlock, handling both static arrays and dynamic functions
|
||||
*/
|
||||
function resolveSubBlockOptions(sb: SubBlockConfig): string[] | undefined {
|
||||
// Skip if subblock uses fetchOptions (async network calls)
|
||||
if (sb.fetchOptions) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let rawOptions: any[] | undefined
|
||||
|
||||
try {
|
||||
if (typeof sb.options === 'function') {
|
||||
rawOptions = callOptionsWithFallback(sb.options)
|
||||
} else {
|
||||
rawOptions = sb.options
|
||||
}
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
|
||||
if (!Array.isArray(rawOptions) || rawOptions.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Return the actual option ID/value that edit_workflow expects, not the display label
|
||||
return rawOptions
|
||||
.map((opt: any) => {
|
||||
if (!opt) return undefined
|
||||
if (typeof opt === 'object') {
|
||||
return opt.id || opt.label // Prefer id (actual value) over label (display name)
|
||||
}
|
||||
return String(opt)
|
||||
})
|
||||
.filter((o): o is string => o !== undefined)
|
||||
}
|
||||
|
||||
interface OutputFieldSchema {
|
||||
type: string
|
||||
description?: string
|
||||
properties?: Record<string, OutputFieldSchema>
|
||||
items?: { type: string }
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves the condition to check if it matches the given operation
|
||||
*/
|
||||
function matchesOperation(condition: any, operation: string): boolean {
|
||||
if (!condition) return false
|
||||
|
||||
const cond = typeof condition === 'function' ? condition() : condition
|
||||
if (!cond) return false
|
||||
|
||||
if (cond.field === 'operation' && !cond.not) {
|
||||
const values = Array.isArray(cond.value) ? cond.value : [cond.value]
|
||||
return values.includes(operation)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts input schema from subBlocks
|
||||
*/
|
||||
function extractInputsFromSubBlocks(
|
||||
subBlocks: SubBlockConfig[],
|
||||
operation?: string,
|
||||
triggerMode?: boolean
|
||||
): Record<string, InputFieldSchema> {
|
||||
const inputs: Record<string, InputFieldSchema> = {}
|
||||
|
||||
for (const sb of subBlocks) {
|
||||
// Handle trigger vs non-trigger mode filtering
|
||||
if (triggerMode) {
|
||||
// In trigger mode, only include subBlocks with mode: 'trigger'
|
||||
if (sb.mode !== 'trigger') continue
|
||||
} else {
|
||||
// In non-trigger mode, skip trigger-mode subBlocks
|
||||
if (sb.mode === 'trigger') continue
|
||||
}
|
||||
|
||||
// Skip hidden subBlocks
|
||||
if (sb.hidden) continue
|
||||
|
||||
// If operation is specified, only include subBlocks that:
|
||||
// 1. Have no condition (common parameters)
|
||||
// 2. Have a condition matching the operation
|
||||
if (operation) {
|
||||
const condition = typeof sb.condition === 'function' ? sb.condition() : sb.condition
|
||||
if (condition) {
|
||||
if (condition.field === 'operation' && !condition.not) {
|
||||
// This is an operation-specific field
|
||||
const values = Array.isArray(condition.value) ? condition.value : [condition.value]
|
||||
if (!values.includes(operation)) {
|
||||
continue // Skip if doesn't match our operation
|
||||
}
|
||||
} else if (!matchesOperation(condition, operation)) {
|
||||
// Other condition that doesn't match
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const field: InputFieldSchema = {
|
||||
type: mapSubBlockTypeToSchemaType(sb.type),
|
||||
}
|
||||
|
||||
if (sb.description) field.description = sb.description
|
||||
if (sb.title && !sb.description) field.description = sb.title
|
||||
if (sb.placeholder) field.placeholder = sb.placeholder
|
||||
|
||||
// Handle required
|
||||
if (typeof sb.required === 'boolean') {
|
||||
field.required = sb.required
|
||||
} else if (typeof sb.required === 'object') {
|
||||
field.required = true // Has conditional requirement
|
||||
}
|
||||
|
||||
// Handle options using the resolver that handles dynamic model lists
|
||||
const resolvedOptions = resolveSubBlockOptions(sb)
|
||||
if (resolvedOptions && resolvedOptions.length > 0) {
|
||||
field.options = resolvedOptions
|
||||
}
|
||||
|
||||
// Handle default value
|
||||
if (sb.defaultValue !== undefined) {
|
||||
field.default = sb.defaultValue
|
||||
}
|
||||
|
||||
// Handle numeric constraints
|
||||
if (sb.min !== undefined) field.min = sb.min
|
||||
if (sb.max !== undefined) field.max = sb.max
|
||||
|
||||
inputs[sb.id] = field
|
||||
}
|
||||
|
||||
return inputs
|
||||
}
|
||||
|
||||
/**
|
||||
* Maps subBlock type to a simplified schema type
|
||||
*/
|
||||
function mapSubBlockTypeToSchemaType(type: string): string {
|
||||
const typeMap: Record<string, string> = {
|
||||
'short-input': 'string',
|
||||
'long-input': 'string',
|
||||
code: 'string',
|
||||
dropdown: 'string',
|
||||
combobox: 'string',
|
||||
slider: 'number',
|
||||
switch: 'boolean',
|
||||
'tool-input': 'json',
|
||||
'checkbox-list': 'array',
|
||||
'grouped-checkbox-list': 'array',
|
||||
'condition-input': 'json',
|
||||
'eval-input': 'json',
|
||||
'time-input': 'string',
|
||||
'oauth-input': 'credential',
|
||||
'file-selector': 'string',
|
||||
'project-selector': 'string',
|
||||
'channel-selector': 'string',
|
||||
'user-selector': 'string',
|
||||
'folder-selector': 'string',
|
||||
'knowledge-base-selector': 'string',
|
||||
'document-selector': 'string',
|
||||
'mcp-server-selector': 'string',
|
||||
'mcp-tool-selector': 'string',
|
||||
table: 'json',
|
||||
'file-upload': 'file',
|
||||
'messages-input': 'array',
|
||||
}
|
||||
|
||||
return typeMap[type] || 'string'
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts a single output field schema, including nested properties
|
||||
*/
|
||||
function extractOutputField(def: any): OutputFieldSchema {
|
||||
if (typeof def === 'string') {
|
||||
return { type: def }
|
||||
}
|
||||
|
||||
if (typeof def !== 'object' || def === null) {
|
||||
return { type: 'any' }
|
||||
}
|
||||
|
||||
const field: OutputFieldSchema = {
|
||||
type: def.type || 'any',
|
||||
}
|
||||
|
||||
if (def.description) {
|
||||
field.description = def.description
|
||||
}
|
||||
|
||||
// Include nested properties if present
|
||||
if (def.properties && typeof def.properties === 'object') {
|
||||
field.properties = {}
|
||||
for (const [propKey, propDef] of Object.entries(def.properties)) {
|
||||
field.properties[propKey] = extractOutputField(propDef)
|
||||
}
|
||||
}
|
||||
|
||||
// Include items schema for arrays
|
||||
if (def.items && typeof def.items === 'object') {
|
||||
field.items = { type: def.items.type || 'any' }
|
||||
}
|
||||
|
||||
return field
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts trigger outputs from the first available trigger
|
||||
*/
|
||||
function extractTriggerOutputs(blockConfig: any): Record<string, OutputFieldSchema> {
|
||||
const outputs: Record<string, OutputFieldSchema> = {}
|
||||
|
||||
if (!blockConfig.triggers?.enabled || !blockConfig.triggers?.available?.length) {
|
||||
return outputs
|
||||
}
|
||||
|
||||
// Get the first available trigger's outputs as a baseline
|
||||
const triggerId = blockConfig.triggers.available[0]
|
||||
if (triggerId && isTriggerValid(triggerId)) {
|
||||
const trigger = getTrigger(triggerId)
|
||||
if (trigger.outputs) {
|
||||
for (const [key, def] of Object.entries(trigger.outputs)) {
|
||||
if (isHiddenFromDisplay(def)) continue
|
||||
outputs[key] = extractOutputField(def)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return outputs
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts output schema from block config or tool
|
||||
*/
|
||||
function extractOutputs(
|
||||
blockConfig: any,
|
||||
operation?: string,
|
||||
triggerMode?: boolean
|
||||
): Record<string, OutputFieldSchema> {
|
||||
const outputs: Record<string, OutputFieldSchema> = {}
|
||||
|
||||
// In trigger mode, return trigger outputs
|
||||
if (triggerMode && blockConfig.triggers?.enabled) {
|
||||
return extractTriggerOutputs(blockConfig)
|
||||
}
|
||||
|
||||
// If operation is specified, try to get outputs from the specific tool
|
||||
if (operation) {
|
||||
try {
|
||||
const toolSelector = blockConfig.tools?.config?.tool
|
||||
if (typeof toolSelector === 'function') {
|
||||
const toolId = toolSelector({ operation })
|
||||
const tool = toolsRegistry[toolId]
|
||||
if (tool?.outputs) {
|
||||
for (const [key, def] of Object.entries(tool.outputs)) {
|
||||
if (isHiddenFromDisplay(def)) continue
|
||||
outputs[key] = extractOutputField(def)
|
||||
}
|
||||
return outputs
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Fall through to block-level outputs
|
||||
}
|
||||
}
|
||||
|
||||
// Use block-level outputs
|
||||
if (blockConfig.outputs) {
|
||||
for (const [key, def] of Object.entries(blockConfig.outputs)) {
|
||||
if (isHiddenFromDisplay(def)) continue
|
||||
outputs[key] = extractOutputField(def)
|
||||
}
|
||||
}
|
||||
|
||||
return outputs
|
||||
}
|
||||
|
||||
export const getBlockConfigServerTool: BaseServerTool<
|
||||
GetBlockConfigInputType,
|
||||
GetBlockConfigResultType
|
||||
> = {
|
||||
name: 'get_block_config',
|
||||
inputSchema: GetBlockConfigInput,
|
||||
outputSchema: GetBlockConfigResult,
|
||||
async execute(
|
||||
{ blockType, operation, trigger }: GetBlockConfigInputType,
|
||||
context?: { userId: string }
|
||||
): Promise<GetBlockConfigResultType> {
|
||||
const logger = createLogger('GetBlockConfigServerTool')
|
||||
logger.debug('Executing get_block_config', { blockType, operation, trigger })
|
||||
|
||||
if (blockType === 'loop') {
|
||||
const result = {
|
||||
blockType,
|
||||
blockName: 'Loop',
|
||||
operation,
|
||||
trigger,
|
||||
inputs: {
|
||||
loopType: {
|
||||
type: 'string',
|
||||
description: 'Loop type',
|
||||
options: ['for', 'forEach', 'while', 'doWhile'],
|
||||
default: 'for',
|
||||
},
|
||||
iterations: {
|
||||
type: 'number',
|
||||
description: 'Number of iterations (for loop type "for")',
|
||||
},
|
||||
collection: {
|
||||
type: 'string',
|
||||
description: 'Collection to iterate (for loop type "forEach")',
|
||||
},
|
||||
condition: {
|
||||
type: 'string',
|
||||
description: 'Loop condition (for loop types "while" and "doWhile")',
|
||||
},
|
||||
},
|
||||
outputs: {},
|
||||
}
|
||||
return GetBlockConfigResult.parse(result)
|
||||
}
|
||||
|
||||
if (blockType === 'parallel') {
|
||||
const result = {
|
||||
blockType,
|
||||
blockName: 'Parallel',
|
||||
operation,
|
||||
trigger,
|
||||
inputs: {
|
||||
parallelType: {
|
||||
type: 'string',
|
||||
description: 'Parallel type',
|
||||
options: ['count', 'collection'],
|
||||
default: 'count',
|
||||
},
|
||||
count: {
|
||||
type: 'number',
|
||||
description: 'Number of parallel branches (for parallel type "count")',
|
||||
},
|
||||
collection: {
|
||||
type: 'string',
|
||||
description: 'Collection to branch over (for parallel type "collection")',
|
||||
},
|
||||
},
|
||||
outputs: {},
|
||||
}
|
||||
return GetBlockConfigResult.parse(result)
|
||||
}
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations = permissionConfig?.allowedIntegrations
|
||||
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockType)) {
|
||||
throw new Error(`Block "${blockType}" is not available`)
|
||||
}
|
||||
|
||||
const blockConfig = blockRegistry[blockType]
|
||||
if (!blockConfig) {
|
||||
throw new Error(`Block not found: ${blockType}`)
|
||||
}
|
||||
|
||||
// Validate trigger mode is supported for this block
|
||||
if (trigger && !blockConfig.triggers?.enabled && !blockConfig.triggerAllowed) {
|
||||
throw new Error(
|
||||
`Block "${blockType}" does not support trigger mode. Only blocks with triggers.enabled or triggerAllowed can be used in trigger mode.`
|
||||
)
|
||||
}
|
||||
|
||||
// If operation is specified, validate it exists
|
||||
if (operation) {
|
||||
const operationSubBlock = blockConfig.subBlocks?.find((sb) => sb.id === 'operation')
|
||||
if (operationSubBlock && Array.isArray(operationSubBlock.options)) {
|
||||
const validOperations = operationSubBlock.options.map((o) =>
|
||||
typeof o === 'object' ? o.id : o
|
||||
)
|
||||
if (!validOperations.includes(operation)) {
|
||||
throw new Error(
|
||||
`Invalid operation "${operation}" for block "${blockType}". Valid operations: ${validOperations.join(', ')}`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const subBlocks = Array.isArray(blockConfig.subBlocks) ? blockConfig.subBlocks : []
|
||||
const inputs = extractInputsFromSubBlocks(subBlocks, operation, trigger)
|
||||
const outputs = extractOutputs(blockConfig, operation, trigger)
|
||||
|
||||
const latestBlock = getLatestBlock(blockType)
|
||||
const displayName = latestBlock?.name ?? blockConfig.name
|
||||
|
||||
const result = {
|
||||
blockType,
|
||||
blockName: displayName,
|
||||
operation,
|
||||
trigger,
|
||||
inputs,
|
||||
outputs,
|
||||
}
|
||||
|
||||
return GetBlockConfigResult.parse(result)
|
||||
},
|
||||
}
|
||||
@@ -1,130 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import {
|
||||
GetBlockOptionsInput,
|
||||
type GetBlockOptionsInputType,
|
||||
GetBlockOptionsResult,
|
||||
type GetBlockOptionsResultType,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { registry as blockRegistry, getLatestBlock } from '@/blocks/registry'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
import { tools as toolsRegistry } from '@/tools/registry'
|
||||
|
||||
export const getBlockOptionsServerTool: BaseServerTool<
|
||||
GetBlockOptionsInputType,
|
||||
GetBlockOptionsResultType
|
||||
> = {
|
||||
name: 'get_block_options',
|
||||
inputSchema: GetBlockOptionsInput,
|
||||
outputSchema: GetBlockOptionsResult,
|
||||
async execute(
|
||||
{ blockId }: GetBlockOptionsInputType,
|
||||
context?: { userId: string }
|
||||
): Promise<GetBlockOptionsResultType> {
|
||||
const logger = createLogger('GetBlockOptionsServerTool')
|
||||
logger.debug('Executing get_block_options', { blockId })
|
||||
|
||||
if (blockId === 'loop') {
|
||||
const result = {
|
||||
blockId,
|
||||
blockName: 'Loop',
|
||||
operations: [
|
||||
{ id: 'for', name: 'For', description: 'Run a fixed number of iterations.' },
|
||||
{ id: 'forEach', name: 'For each', description: 'Iterate over a collection.' },
|
||||
{ id: 'while', name: 'While', description: 'Repeat while a condition is true.' },
|
||||
{
|
||||
id: 'doWhile',
|
||||
name: 'Do while',
|
||||
description: 'Run once, then repeat while a condition is true.',
|
||||
},
|
||||
],
|
||||
}
|
||||
return GetBlockOptionsResult.parse(result)
|
||||
}
|
||||
|
||||
if (blockId === 'parallel') {
|
||||
const result = {
|
||||
blockId,
|
||||
blockName: 'Parallel',
|
||||
operations: [
|
||||
{ id: 'count', name: 'Count', description: 'Run a fixed number of parallel branches.' },
|
||||
{
|
||||
id: 'collection',
|
||||
name: 'Collection',
|
||||
description: 'Run one branch per collection item.',
|
||||
},
|
||||
],
|
||||
}
|
||||
return GetBlockOptionsResult.parse(result)
|
||||
}
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations = permissionConfig?.allowedIntegrations
|
||||
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockId)) {
|
||||
throw new Error(`Block "${blockId}" is not available`)
|
||||
}
|
||||
|
||||
const blockConfig = blockRegistry[blockId]
|
||||
if (!blockConfig) {
|
||||
throw new Error(`Block not found: ${blockId}`)
|
||||
}
|
||||
|
||||
const operations: { id: string; name: string; description?: string }[] = []
|
||||
|
||||
// Check if block has an operation dropdown to determine available operations
|
||||
const operationSubBlock = blockConfig.subBlocks?.find((sb) => sb.id === 'operation')
|
||||
if (operationSubBlock && Array.isArray(operationSubBlock.options)) {
|
||||
// Block has operations - get tool info for each operation
|
||||
for (const option of operationSubBlock.options) {
|
||||
const opId = typeof option === 'object' ? option.id : option
|
||||
const opLabel = typeof option === 'object' ? option.label : option
|
||||
|
||||
// Try to resolve the tool for this operation
|
||||
let toolDescription: string | undefined
|
||||
try {
|
||||
const toolSelector = blockConfig.tools?.config?.tool
|
||||
if (typeof toolSelector === 'function') {
|
||||
const toolId = toolSelector({ operation: opId })
|
||||
const tool = toolsRegistry[toolId]
|
||||
if (tool) {
|
||||
toolDescription = tool.description
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Tool resolution failed, continue without description
|
||||
}
|
||||
|
||||
operations.push({
|
||||
id: opId,
|
||||
name: opLabel || opId,
|
||||
description: toolDescription,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
// No operation dropdown - list all accessible tools
|
||||
const accessibleTools = blockConfig.tools?.access || []
|
||||
for (const toolId of accessibleTools) {
|
||||
const tool = toolsRegistry[toolId]
|
||||
if (tool) {
|
||||
operations.push({
|
||||
id: toolId,
|
||||
name: tool.name || toolId,
|
||||
description: tool.description,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const latestBlock = getLatestBlock(blockId)
|
||||
const displayName = latestBlock?.name ?? blockConfig.name
|
||||
|
||||
const result = {
|
||||
blockId,
|
||||
blockName: displayName,
|
||||
operations,
|
||||
}
|
||||
|
||||
return GetBlockOptionsResult.parse(result)
|
||||
},
|
||||
}
|
||||
@@ -1,68 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { GetBlocksAndToolsInput, GetBlocksAndToolsResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { registry as blockRegistry } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
|
||||
export const getBlocksAndToolsServerTool: BaseServerTool<
|
||||
ReturnType<typeof GetBlocksAndToolsInput.parse>,
|
||||
ReturnType<typeof GetBlocksAndToolsResult.parse>
|
||||
> = {
|
||||
name: 'get_blocks_and_tools',
|
||||
inputSchema: GetBlocksAndToolsInput,
|
||||
outputSchema: GetBlocksAndToolsResult,
|
||||
async execute(_args: unknown, context?: { userId: string }) {
|
||||
const logger = createLogger('GetBlocksAndToolsServerTool')
|
||||
logger.debug('Executing get_blocks_and_tools')
|
||||
|
||||
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
|
||||
const allowedIntegrations = permissionConfig?.allowedIntegrations
|
||||
|
||||
type BlockListItem = {
|
||||
type: string
|
||||
name: string
|
||||
description?: string
|
||||
triggerAllowed?: boolean
|
||||
}
|
||||
const blocks: BlockListItem[] = []
|
||||
|
||||
Object.entries(blockRegistry)
|
||||
.filter(([blockType, blockConfig]: [string, BlockConfig]) => {
|
||||
if (blockConfig.hideFromToolbar) return false
|
||||
if (allowedIntegrations != null && !allowedIntegrations.includes(blockType)) return false
|
||||
return true
|
||||
})
|
||||
.forEach(([blockType, blockConfig]: [string, BlockConfig]) => {
|
||||
blocks.push({
|
||||
type: blockType,
|
||||
name: blockConfig.name,
|
||||
description: blockConfig.longDescription,
|
||||
triggerAllowed: 'triggerAllowed' in blockConfig ? !!blockConfig.triggerAllowed : false,
|
||||
})
|
||||
})
|
||||
|
||||
const specialBlocks: Record<string, { name: string; description: string }> = {
|
||||
loop: {
|
||||
name: 'Loop',
|
||||
description:
|
||||
'Control flow block for iterating over collections or repeating actions in a loop',
|
||||
},
|
||||
parallel: {
|
||||
name: 'Parallel',
|
||||
description: 'Control flow block for executing multiple branches simultaneously',
|
||||
},
|
||||
}
|
||||
Object.entries(specialBlocks).forEach(([blockType, info]) => {
|
||||
if (!blocks.some((b) => b.type === blockType)) {
|
||||
blocks.push({
|
||||
type: blockType,
|
||||
name: info.name,
|
||||
description: info.description,
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return GetBlocksAndToolsResult.parse({ blocks })
|
||||
},
|
||||
}
|
||||
@@ -4,8 +4,10 @@ import type { KnowledgeBaseArgs, KnowledgeBaseResult } from '@/lib/copilot/tools
|
||||
import { generateSearchEmbedding } from '@/lib/knowledge/embeddings'
|
||||
import {
|
||||
createKnowledgeBase,
|
||||
deleteKnowledgeBase,
|
||||
getKnowledgeBaseById,
|
||||
getKnowledgeBases,
|
||||
updateKnowledgeBase,
|
||||
} from '@/lib/knowledge/service'
|
||||
import {
|
||||
createTagDefinition,
|
||||
@@ -221,6 +223,83 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
case 'update': {
|
||||
if (!args.knowledgeBaseId) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'Knowledge base ID is required for update operation',
|
||||
}
|
||||
}
|
||||
|
||||
const updates: { name?: string; description?: string; chunkingConfig?: { maxSize: number; minSize: number; overlap: number } } = {}
|
||||
if (args.name) updates.name = args.name
|
||||
if (args.description !== undefined) updates.description = args.description
|
||||
if (args.chunkingConfig) updates.chunkingConfig = args.chunkingConfig
|
||||
|
||||
if (!updates.name && updates.description === undefined && !updates.chunkingConfig) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'At least one of name, description, or chunkingConfig is required for update',
|
||||
}
|
||||
}
|
||||
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
const updatedKb = await updateKnowledgeBase(args.knowledgeBaseId, updates, requestId)
|
||||
|
||||
logger.info('Knowledge base updated via copilot', {
|
||||
knowledgeBaseId: args.knowledgeBaseId,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Knowledge base "${updatedKb.name}" updated successfully`,
|
||||
data: {
|
||||
id: updatedKb.id,
|
||||
name: updatedKb.name,
|
||||
description: updatedKb.description,
|
||||
workspaceId: updatedKb.workspaceId,
|
||||
docCount: updatedKb.docCount,
|
||||
updatedAt: updatedKb.updatedAt,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
case 'delete': {
|
||||
if (!args.knowledgeBaseId) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'Knowledge base ID is required for delete operation',
|
||||
}
|
||||
}
|
||||
|
||||
const kbToDelete = await getKnowledgeBaseById(args.knowledgeBaseId)
|
||||
if (!kbToDelete) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Knowledge base with ID "${args.knowledgeBaseId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
await deleteKnowledgeBase(args.knowledgeBaseId, requestId)
|
||||
|
||||
logger.info('Knowledge base deleted via copilot', {
|
||||
knowledgeBaseId: args.knowledgeBaseId,
|
||||
name: kbToDelete.name,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Knowledge base "${kbToDelete.name}" deleted successfully`,
|
||||
data: {
|
||||
id: args.knowledgeBaseId,
|
||||
name: kbToDelete.name,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
case 'list_tags': {
|
||||
if (!args.knowledgeBaseId) {
|
||||
return {
|
||||
@@ -391,7 +470,7 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
default:
|
||||
return {
|
||||
success: false,
|
||||
message: `Unknown operation: ${operation}. Supported operations: create, list, get, query, list_tags, create_tag, update_tag, delete_tag, get_tag_usage`,
|
||||
message: `Unknown operation: ${operation}. Supported operations: create, list, get, query, update, delete, list_tags, create_tag, update_tag, delete_tag, get_tag_usage`,
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { BaseServerTool, ServerToolContext } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { getBlockConfigServerTool } from '@/lib/copilot/tools/server/blocks/get-block-config'
|
||||
import { getBlockOptionsServerTool } from '@/lib/copilot/tools/server/blocks/get-block-options'
|
||||
import { getBlocksAndToolsServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-and-tools'
|
||||
import { getBlocksMetadataServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-metadata-tool'
|
||||
import { getTriggerBlocksServerTool } from '@/lib/copilot/tools/server/blocks/get-trigger-blocks'
|
||||
import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/search-documentation'
|
||||
@@ -22,10 +19,7 @@ const logger = createLogger('ServerToolRouter')
|
||||
|
||||
/** Registry of all server tools. Tools self-declare their validation schemas. */
|
||||
const serverToolRegistry: Record<string, BaseServerTool> = {
|
||||
[getBlocksAndToolsServerTool.name]: getBlocksAndToolsServerTool,
|
||||
[getBlocksMetadataServerTool.name]: getBlocksMetadataServerTool,
|
||||
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
||||
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
||||
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
||||
[editWorkflowServerTool.name]: editWorkflowServerTool,
|
||||
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
||||
|
||||
@@ -7,22 +7,6 @@ export const ExecuteResponseSuccessSchema = z.object({
|
||||
})
|
||||
export type ExecuteResponseSuccess = z.infer<typeof ExecuteResponseSuccessSchema>
|
||||
|
||||
// get_blocks_and_tools
|
||||
export const GetBlocksAndToolsInput = z.object({})
|
||||
export const GetBlocksAndToolsResult = z.object({
|
||||
blocks: z.array(
|
||||
z
|
||||
.object({
|
||||
type: z.string(),
|
||||
name: z.string(),
|
||||
triggerAllowed: z.boolean().optional(),
|
||||
longDescription: z.string().optional(),
|
||||
})
|
||||
.passthrough()
|
||||
),
|
||||
})
|
||||
export type GetBlocksAndToolsResultType = z.infer<typeof GetBlocksAndToolsResult>
|
||||
|
||||
// get_blocks_metadata
|
||||
export const GetBlocksMetadataInput = z.object({ blockIds: z.array(z.string()).min(1) })
|
||||
export const GetBlocksMetadataResult = z.object({ metadata: z.record(z.any()) })
|
||||
@@ -35,41 +19,6 @@ export const GetTriggerBlocksResult = z.object({
|
||||
})
|
||||
export type GetTriggerBlocksResultType = z.infer<typeof GetTriggerBlocksResult>
|
||||
|
||||
// get_block_options
|
||||
export const GetBlockOptionsInput = z.object({
|
||||
blockId: z.string(),
|
||||
})
|
||||
export const GetBlockOptionsResult = z.object({
|
||||
blockId: z.string(),
|
||||
blockName: z.string(),
|
||||
operations: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
),
|
||||
})
|
||||
export type GetBlockOptionsInputType = z.infer<typeof GetBlockOptionsInput>
|
||||
export type GetBlockOptionsResultType = z.infer<typeof GetBlockOptionsResult>
|
||||
|
||||
// get_block_config
|
||||
export const GetBlockConfigInput = z.object({
|
||||
blockType: z.string(),
|
||||
operation: z.string().optional(),
|
||||
trigger: z.boolean().optional(),
|
||||
})
|
||||
export const GetBlockConfigResult = z.object({
|
||||
blockType: z.string(),
|
||||
blockName: z.string(),
|
||||
operation: z.string().optional(),
|
||||
trigger: z.boolean().optional(),
|
||||
inputs: z.record(z.any()),
|
||||
outputs: z.record(z.any()),
|
||||
})
|
||||
export type GetBlockConfigInputType = z.infer<typeof GetBlockConfigInput>
|
||||
export type GetBlockConfigResultType = z.infer<typeof GetBlockConfigResult>
|
||||
|
||||
// knowledge_base - shared schema used by client tool, server tool, and registry
|
||||
export const KnowledgeBaseArgsSchema = z.object({
|
||||
operation: z.enum([
|
||||
@@ -77,6 +26,8 @@ export const KnowledgeBaseArgsSchema = z.object({
|
||||
'list',
|
||||
'get',
|
||||
'query',
|
||||
'update',
|
||||
'delete',
|
||||
'list_tags',
|
||||
'create_tag',
|
||||
'update_tag',
|
||||
|
||||
17
apps/sim/lib/copilot/vfs/index.ts
Normal file
17
apps/sim/lib/copilot/vfs/index.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
export { WorkspaceVFS, getOrMaterializeVFS } from '@/lib/copilot/vfs/workspace-vfs'
|
||||
export type {
|
||||
GrepMatch,
|
||||
GrepOptions,
|
||||
GrepOutputMode,
|
||||
GrepCountEntry,
|
||||
ReadResult,
|
||||
DirEntry,
|
||||
} from '@/lib/copilot/vfs/operations'
|
||||
export {
|
||||
serializeBlockSchema,
|
||||
serializeDocuments,
|
||||
serializeIntegrationSchema,
|
||||
serializeKBMeta,
|
||||
serializeRecentExecutions,
|
||||
serializeWorkflowMeta,
|
||||
} from '@/lib/copilot/vfs/serializers'
|
||||
256
apps/sim/lib/copilot/vfs/operations.ts
Normal file
256
apps/sim/lib/copilot/vfs/operations.ts
Normal file
@@ -0,0 +1,256 @@
|
||||
export interface GrepMatch {
|
||||
path: string
|
||||
line: number
|
||||
content: string
|
||||
}
|
||||
|
||||
export type GrepOutputMode = 'content' | 'files_with_matches' | 'count'
|
||||
|
||||
export interface GrepOptions {
|
||||
maxResults?: number
|
||||
outputMode?: GrepOutputMode
|
||||
ignoreCase?: boolean
|
||||
lineNumbers?: boolean
|
||||
context?: number
|
||||
}
|
||||
|
||||
export interface GrepCountEntry {
|
||||
path: string
|
||||
count: number
|
||||
}
|
||||
|
||||
export interface ReadResult {
|
||||
content: string
|
||||
totalLines: number
|
||||
}
|
||||
|
||||
export interface DirEntry {
|
||||
name: string
|
||||
type: 'file' | 'dir'
|
||||
}
|
||||
|
||||
/**
|
||||
* Regex search over VFS file contents.
|
||||
* Supports multiple output modes: content (default), files_with_matches, count.
|
||||
*/
|
||||
export function grep(
|
||||
files: Map<string, string>,
|
||||
pattern: string,
|
||||
path?: string,
|
||||
opts?: GrepOptions
|
||||
): GrepMatch[] | string[] | GrepCountEntry[] {
|
||||
const maxResults = opts?.maxResults ?? 100
|
||||
const outputMode = opts?.outputMode ?? 'content'
|
||||
const ignoreCase = opts?.ignoreCase ?? false
|
||||
const showLineNumbers = opts?.lineNumbers ?? true
|
||||
const contextLines = opts?.context ?? 0
|
||||
|
||||
const flags = ignoreCase ? 'gi' : 'g'
|
||||
let regex: RegExp
|
||||
try {
|
||||
regex = new RegExp(pattern, flags)
|
||||
} catch {
|
||||
return []
|
||||
}
|
||||
|
||||
if (outputMode === 'files_with_matches') {
|
||||
const matchingFiles: string[] = []
|
||||
for (const [filePath, content] of files) {
|
||||
if (path && !filePath.startsWith(path)) continue
|
||||
regex.lastIndex = 0
|
||||
if (regex.test(content)) {
|
||||
matchingFiles.push(filePath)
|
||||
if (matchingFiles.length >= maxResults) break
|
||||
}
|
||||
}
|
||||
return matchingFiles
|
||||
}
|
||||
|
||||
if (outputMode === 'count') {
|
||||
const counts: GrepCountEntry[] = []
|
||||
for (const [filePath, content] of files) {
|
||||
if (path && !filePath.startsWith(path)) continue
|
||||
const lines = content.split('\n')
|
||||
let count = 0
|
||||
for (const line of lines) {
|
||||
regex.lastIndex = 0
|
||||
if (regex.test(line)) count++
|
||||
}
|
||||
if (count > 0) {
|
||||
counts.push({ path: filePath, count })
|
||||
if (counts.length >= maxResults) break
|
||||
}
|
||||
}
|
||||
return counts
|
||||
}
|
||||
|
||||
// Default: 'content' mode
|
||||
const matches: GrepMatch[] = []
|
||||
for (const [filePath, content] of files) {
|
||||
if (path && !filePath.startsWith(path)) continue
|
||||
|
||||
const lines = content.split('\n')
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
regex.lastIndex = 0
|
||||
if (regex.test(lines[i])) {
|
||||
if (contextLines > 0) {
|
||||
const start = Math.max(0, i - contextLines)
|
||||
const end = Math.min(lines.length - 1, i + contextLines)
|
||||
for (let j = start; j <= end; j++) {
|
||||
matches.push({
|
||||
path: filePath,
|
||||
line: showLineNumbers ? j + 1 : 0,
|
||||
content: lines[j],
|
||||
})
|
||||
}
|
||||
} else {
|
||||
matches.push({
|
||||
path: filePath,
|
||||
line: showLineNumbers ? i + 1 : 0,
|
||||
content: lines[i],
|
||||
})
|
||||
}
|
||||
if (matches.length >= maxResults) return matches
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return matches
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a glob pattern to a RegExp.
|
||||
* Supports *, **, and ? wildcards.
|
||||
*/
|
||||
function globToRegExp(pattern: string): RegExp {
|
||||
let regexStr = '^'
|
||||
let i = 0
|
||||
while (i < pattern.length) {
|
||||
const ch = pattern[i]
|
||||
if (ch === '*') {
|
||||
if (pattern[i + 1] === '*') {
|
||||
// ** matches any number of path segments
|
||||
if (pattern[i + 2] === '/') {
|
||||
regexStr += '(?:.+/)?'
|
||||
i += 3
|
||||
} else {
|
||||
regexStr += '.*'
|
||||
i += 2
|
||||
}
|
||||
} else {
|
||||
// * matches anything except /
|
||||
regexStr += '[^/]*'
|
||||
i++
|
||||
}
|
||||
} else if (ch === '?') {
|
||||
regexStr += '[^/]'
|
||||
i++
|
||||
} else if ('.+^${}()|[]\\'.includes(ch)) {
|
||||
regexStr += '\\' + ch
|
||||
i++
|
||||
} else {
|
||||
regexStr += ch
|
||||
i++
|
||||
}
|
||||
}
|
||||
regexStr += '$'
|
||||
return new RegExp(regexStr)
|
||||
}
|
||||
|
||||
/**
|
||||
* Glob pattern matching against VFS file paths and virtual directories.
|
||||
* Returns matching paths (both files and directory prefixes), just like a real filesystem.
|
||||
*/
|
||||
export function glob(files: Map<string, string>, pattern: string): string[] {
|
||||
const regex = globToRegExp(pattern)
|
||||
const result = new Set<string>()
|
||||
|
||||
// Collect all virtual directory paths from file paths
|
||||
const directories = new Set<string>()
|
||||
for (const filePath of files.keys()) {
|
||||
const parts = filePath.split('/')
|
||||
for (let i = 1; i < parts.length; i++) {
|
||||
directories.add(parts.slice(0, i).join('/'))
|
||||
}
|
||||
}
|
||||
|
||||
// Match file paths
|
||||
for (const filePath of files.keys()) {
|
||||
if (regex.test(filePath)) {
|
||||
result.add(filePath)
|
||||
}
|
||||
}
|
||||
|
||||
// Match virtual directory paths
|
||||
for (const dir of directories) {
|
||||
if (regex.test(dir)) {
|
||||
result.add(dir)
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(result).sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a VFS file's content, optionally with offset and limit.
|
||||
* Returns null if the file does not exist.
|
||||
*/
|
||||
export function read(
|
||||
files: Map<string, string>,
|
||||
path: string,
|
||||
offset?: number,
|
||||
limit?: number
|
||||
): ReadResult | null {
|
||||
const content = files.get(path)
|
||||
if (content === undefined) return null
|
||||
|
||||
const lines = content.split('\n')
|
||||
const totalLines = lines.length
|
||||
|
||||
if (offset !== undefined || limit !== undefined) {
|
||||
const start = offset ?? 0
|
||||
const end = limit !== undefined ? start + limit : lines.length
|
||||
return {
|
||||
content: lines.slice(start, end).join('\n'),
|
||||
totalLines,
|
||||
}
|
||||
}
|
||||
|
||||
return { content, totalLines }
|
||||
}
|
||||
|
||||
/**
|
||||
* List entries in a VFS directory path.
|
||||
* Returns files and subdirectories at the given path level.
|
||||
*/
|
||||
export function list(files: Map<string, string>, path: string): DirEntry[] {
|
||||
const normalizedPath = path.endsWith('/') ? path : path + '/'
|
||||
const seen = new Set<string>()
|
||||
const entries: DirEntry[] = []
|
||||
|
||||
for (const filePath of files.keys()) {
|
||||
if (!filePath.startsWith(normalizedPath)) continue
|
||||
|
||||
const remainder = filePath.slice(normalizedPath.length)
|
||||
if (!remainder) continue
|
||||
|
||||
const slashIndex = remainder.indexOf('/')
|
||||
if (slashIndex === -1) {
|
||||
if (!seen.has(remainder)) {
|
||||
seen.add(remainder)
|
||||
entries.push({ name: remainder, type: 'file' })
|
||||
}
|
||||
} else {
|
||||
const dirName = remainder.slice(0, slashIndex)
|
||||
if (!seen.has(dirName)) {
|
||||
seen.add(dirName)
|
||||
entries.push({ name: dirName, type: 'dir' })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entries.sort((a, b) => {
|
||||
if (a.type !== b.type) return a.type === 'dir' ? -1 : 1
|
||||
return a.name.localeCompare(b.name)
|
||||
})
|
||||
}
|
||||
474
apps/sim/lib/copilot/vfs/serializers.ts
Normal file
474
apps/sim/lib/copilot/vfs/serializers.ts
Normal file
@@ -0,0 +1,474 @@
|
||||
import type { BlockConfig, SubBlockConfig } from '@/blocks/types'
|
||||
import { PROVIDER_DEFINITIONS } from '@/providers/models'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
/**
|
||||
* Serialize workflow metadata for VFS meta.json
|
||||
*/
|
||||
export function serializeWorkflowMeta(wf: {
|
||||
id: string
|
||||
name: string
|
||||
description?: string | null
|
||||
isDeployed: boolean
|
||||
deployedAt?: Date | null
|
||||
runCount: number
|
||||
lastRunAt?: Date | null
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
}): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
id: wf.id,
|
||||
name: wf.name,
|
||||
description: wf.description || undefined,
|
||||
isDeployed: wf.isDeployed,
|
||||
deployedAt: wf.deployedAt?.toISOString(),
|
||||
runCount: wf.runCount,
|
||||
lastRunAt: wf.lastRunAt?.toISOString(),
|
||||
createdAt: wf.createdAt.toISOString(),
|
||||
updatedAt: wf.updatedAt.toISOString(),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize execution logs for VFS executions.json.
|
||||
* Takes recent execution log rows and produces a summary.
|
||||
*/
|
||||
export function serializeRecentExecutions(
|
||||
executions: Array<{
|
||||
id: string
|
||||
executionId: string
|
||||
status: string
|
||||
trigger: string
|
||||
startedAt: Date
|
||||
endedAt?: Date | null
|
||||
totalDurationMs?: number | null
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
executions.map((e) => ({
|
||||
executionId: e.executionId,
|
||||
status: e.status,
|
||||
trigger: e.trigger,
|
||||
startedAt: e.startedAt.toISOString(),
|
||||
endedAt: e.endedAt?.toISOString(),
|
||||
durationMs: e.totalDurationMs,
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize knowledge base metadata for VFS meta.json
|
||||
*/
|
||||
export function serializeKBMeta(kb: {
|
||||
id: string
|
||||
name: string
|
||||
description?: string | null
|
||||
embeddingModel: string
|
||||
embeddingDimension: number
|
||||
tokenCount: number
|
||||
createdAt: Date
|
||||
updatedAt: Date
|
||||
documentCount: number
|
||||
}): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
id: kb.id,
|
||||
name: kb.name,
|
||||
description: kb.description || undefined,
|
||||
embeddingModel: kb.embeddingModel,
|
||||
embeddingDimension: kb.embeddingDimension,
|
||||
tokenCount: kb.tokenCount,
|
||||
documentCount: kb.documentCount,
|
||||
createdAt: kb.createdAt.toISOString(),
|
||||
updatedAt: kb.updatedAt.toISOString(),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize documents list for VFS documents.json (metadata only, no content)
|
||||
*/
|
||||
export function serializeDocuments(
|
||||
docs: Array<{
|
||||
id: string
|
||||
filename: string
|
||||
fileSize: number
|
||||
mimeType: string
|
||||
chunkCount: number
|
||||
tokenCount: number
|
||||
processingStatus: string
|
||||
enabled: boolean
|
||||
uploadedAt: Date
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
docs.map((d) => ({
|
||||
id: d.id,
|
||||
filename: d.filename,
|
||||
fileSize: d.fileSize,
|
||||
mimeType: d.mimeType,
|
||||
chunkCount: d.chunkCount,
|
||||
tokenCount: d.tokenCount,
|
||||
processingStatus: d.processingStatus,
|
||||
enabled: d.enabled,
|
||||
uploadedAt: d.uploadedAt.toISOString(),
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the static model list from PROVIDER_DEFINITIONS for VFS serialization.
|
||||
* Excludes dynamic providers (ollama, vllm, openrouter) whose models are user-configured.
|
||||
* Includes provider ID and whether the model is hosted by Sim (no API key required).
|
||||
*/
|
||||
function getStaticModelOptionsForVFS(): Array<{
|
||||
id: string
|
||||
provider: string
|
||||
hosted: boolean
|
||||
}> {
|
||||
const hostedProviders = new Set(['openai', 'anthropic', 'google'])
|
||||
const dynamicProviders = new Set(['ollama', 'vllm', 'openrouter'])
|
||||
|
||||
const models: Array<{ id: string; provider: string; hosted: boolean }> = []
|
||||
|
||||
for (const [providerId, def] of Object.entries(PROVIDER_DEFINITIONS)) {
|
||||
if (dynamicProviders.has(providerId)) continue
|
||||
for (const model of def.models) {
|
||||
models.push({
|
||||
id: model.id,
|
||||
provider: providerId,
|
||||
hosted: hostedProviders.has(providerId),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return models
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a SubBlockConfig for the VFS component schema.
|
||||
* Strips functions and UI-only fields. Includes static options arrays.
|
||||
*/
|
||||
function serializeSubBlock(sb: SubBlockConfig): Record<string, unknown> {
|
||||
const result: Record<string, unknown> = {
|
||||
id: sb.id,
|
||||
type: sb.type,
|
||||
}
|
||||
if (sb.title) result.title = sb.title
|
||||
if (sb.required === true) result.required = true
|
||||
if (sb.defaultValue !== undefined) result.defaultValue = sb.defaultValue
|
||||
if (sb.mode) result.mode = sb.mode
|
||||
if (sb.canonicalParamId) result.canonicalParamId = sb.canonicalParamId
|
||||
|
||||
// Include static options arrays for dropdowns
|
||||
if (Array.isArray(sb.options)) {
|
||||
result.options = sb.options
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a block schema for VFS components/blocks/{type}.json
|
||||
*/
|
||||
export function serializeBlockSchema(block: BlockConfig): string {
|
||||
const subBlocks = block.subBlocks.map((sb) => {
|
||||
const serialized = serializeSubBlock(sb)
|
||||
|
||||
// For model comboboxes with function options, inject static model data with hosting info
|
||||
if (sb.id === 'model' && sb.type === 'combobox' && typeof sb.options === 'function') {
|
||||
serialized.options = getStaticModelOptionsForVFS()
|
||||
}
|
||||
|
||||
return serialized
|
||||
})
|
||||
|
||||
return JSON.stringify(
|
||||
{
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
description: block.description,
|
||||
category: block.category,
|
||||
longDescription: block.longDescription || undefined,
|
||||
bestPractices: block.bestPractices || undefined,
|
||||
triggerAllowed: block.triggerAllowed || undefined,
|
||||
singleInstance: block.singleInstance || undefined,
|
||||
tools: block.tools.access,
|
||||
subBlocks,
|
||||
inputs: block.inputs,
|
||||
outputs: Object.fromEntries(
|
||||
Object.entries(block.outputs)
|
||||
.filter(([key, val]) => key !== 'visualization' && val != null)
|
||||
.map(([key, val]) => [
|
||||
key,
|
||||
typeof val === 'string'
|
||||
? { type: val }
|
||||
: { type: val.type, description: (val as { description?: string }).description },
|
||||
])
|
||||
),
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize OAuth credentials for VFS environment/credentials.json.
|
||||
* Shows which integrations are connected — IDs and scopes, NOT tokens.
|
||||
*/
|
||||
export function serializeCredentials(
|
||||
accounts: Array<{
|
||||
providerId: string
|
||||
scope: string | null
|
||||
createdAt: Date
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
accounts.map((a) => ({
|
||||
provider: a.providerId,
|
||||
scope: a.scope || undefined,
|
||||
connectedAt: a.createdAt.toISOString(),
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize API keys for VFS environment/api-keys.json.
|
||||
* Shows key names and types — NOT the actual key values.
|
||||
*/
|
||||
export function serializeApiKeys(
|
||||
keys: Array<{
|
||||
id: string
|
||||
name: string
|
||||
type: string
|
||||
lastUsed: Date | null
|
||||
createdAt: Date
|
||||
expiresAt: Date | null
|
||||
}>
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
keys.map((k) => ({
|
||||
id: k.id,
|
||||
name: k.name,
|
||||
type: k.type,
|
||||
lastUsed: k.lastUsed?.toISOString(),
|
||||
createdAt: k.createdAt.toISOString(),
|
||||
expiresAt: k.expiresAt?.toISOString(),
|
||||
})),
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize environment variables for VFS environment/variables.json.
|
||||
* Shows variable NAMES only — NOT values.
|
||||
*/
|
||||
export function serializeEnvironmentVariables(
|
||||
personalVarNames: string[],
|
||||
workspaceVarNames: string[]
|
||||
): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
personal: personalVarNames,
|
||||
workspace: workspaceVarNames,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/** Input types for deployment serialization. */
|
||||
export interface DeploymentData {
|
||||
workflowId: string
|
||||
isDeployed: boolean
|
||||
deployedAt?: Date | null
|
||||
needsRedeployment?: boolean
|
||||
api?: {
|
||||
version: number
|
||||
createdAt: Date
|
||||
} | null
|
||||
chat?: {
|
||||
id: string
|
||||
identifier: string
|
||||
title: string
|
||||
description?: string | null
|
||||
authType: string
|
||||
customizations: unknown
|
||||
isActive: boolean
|
||||
} | null
|
||||
form?: {
|
||||
id: string
|
||||
identifier: string
|
||||
title: string
|
||||
description?: string | null
|
||||
authType: string
|
||||
showBranding: boolean
|
||||
customizations: unknown
|
||||
isActive: boolean
|
||||
} | null
|
||||
mcp: Array<{
|
||||
serverId: string
|
||||
serverName: string
|
||||
toolId: string
|
||||
toolName: string
|
||||
toolDescription?: string | null
|
||||
}>
|
||||
a2a?: {
|
||||
id: string
|
||||
name: string
|
||||
description?: string | null
|
||||
version: string
|
||||
isPublished: boolean
|
||||
capabilities: unknown
|
||||
} | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize all deployment configurations for VFS deployment.json.
|
||||
* Only includes keys for active deployment types.
|
||||
*/
|
||||
export function serializeDeployments(data: DeploymentData): string {
|
||||
const result: Record<string, unknown> = {}
|
||||
|
||||
if (data.needsRedeployment !== undefined) {
|
||||
result.needsRedeployment = data.needsRedeployment
|
||||
}
|
||||
|
||||
if (data.isDeployed) {
|
||||
result.api = {
|
||||
isDeployed: true,
|
||||
deployedAt: data.deployedAt?.toISOString(),
|
||||
apiEndpoint: `/api/workflows/${data.workflowId}/run`,
|
||||
...(data.api ? { version: data.api.version } : {}),
|
||||
}
|
||||
}
|
||||
|
||||
if (data.chat) {
|
||||
result.chat = {
|
||||
id: data.chat.id,
|
||||
identifier: data.chat.identifier,
|
||||
chatUrl: `/chat/${data.chat.identifier}`,
|
||||
title: data.chat.title,
|
||||
description: data.chat.description || undefined,
|
||||
authType: data.chat.authType,
|
||||
customizations: data.chat.customizations,
|
||||
isActive: data.chat.isActive,
|
||||
}
|
||||
}
|
||||
|
||||
if (data.form) {
|
||||
result.form = {
|
||||
id: data.form.id,
|
||||
identifier: data.form.identifier,
|
||||
formUrl: `/form/${data.form.identifier}`,
|
||||
title: data.form.title,
|
||||
description: data.form.description || undefined,
|
||||
authType: data.form.authType,
|
||||
showBranding: data.form.showBranding,
|
||||
customizations: data.form.customizations,
|
||||
isActive: data.form.isActive,
|
||||
}
|
||||
}
|
||||
|
||||
if (data.mcp.length > 0) {
|
||||
result.mcp = data.mcp.map((m) => ({
|
||||
serverId: m.serverId,
|
||||
serverName: m.serverName,
|
||||
toolId: m.toolId,
|
||||
toolName: m.toolName,
|
||||
toolDescription: m.toolDescription || undefined,
|
||||
}))
|
||||
}
|
||||
|
||||
if (data.a2a) {
|
||||
result.a2a = {
|
||||
id: data.a2a.id,
|
||||
name: data.a2a.name,
|
||||
description: data.a2a.description || undefined,
|
||||
version: data.a2a.version,
|
||||
isPublished: data.a2a.isPublished,
|
||||
capabilities: data.a2a.capabilities,
|
||||
agentUrl: `/api/a2a/serve/${data.a2a.id}`,
|
||||
}
|
||||
}
|
||||
|
||||
return JSON.stringify(result, null, 2)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a custom tool for VFS custom-tools/{name}.json
|
||||
*/
|
||||
export function serializeCustomTool(tool: {
|
||||
id: string
|
||||
title: string
|
||||
schema: unknown
|
||||
code: string
|
||||
}): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
id: tool.id,
|
||||
title: tool.title,
|
||||
schema: tool.schema,
|
||||
codePreview: tool.code.length > 500 ? tool.code.slice(0, 500) + '...' : tool.code,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize an integration/tool schema for VFS components/integrations/{service}/{operation}.json
|
||||
*/
|
||||
export function serializeIntegrationSchema(tool: ToolConfig): string {
|
||||
return JSON.stringify(
|
||||
{
|
||||
id: tool.id,
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
version: tool.version,
|
||||
oauth: tool.oauth
|
||||
? { required: tool.oauth.required, provider: tool.oauth.provider }
|
||||
: undefined,
|
||||
params: tool.params
|
||||
? Object.fromEntries(
|
||||
Object.entries(tool.params)
|
||||
.filter(([, val]) => val != null)
|
||||
.map(([key, val]) => [
|
||||
key,
|
||||
{
|
||||
type: val.type,
|
||||
required: val.required,
|
||||
description: val.description,
|
||||
default: val.default,
|
||||
},
|
||||
])
|
||||
)
|
||||
: undefined,
|
||||
outputs: tool.outputs
|
||||
? Object.fromEntries(
|
||||
Object.entries(tool.outputs)
|
||||
.filter(([, val]) => val != null)
|
||||
.map(([key, val]) => [
|
||||
key,
|
||||
{ type: val.type, description: val.description },
|
||||
])
|
||||
)
|
||||
: undefined,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
}
|
||||
602
apps/sim/lib/copilot/vfs/workspace-vfs.ts
Normal file
602
apps/sim/lib/copilot/vfs/workspace-vfs.ts
Normal file
@@ -0,0 +1,602 @@
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
a2aAgent,
|
||||
account,
|
||||
apiKey,
|
||||
chat as chatTable,
|
||||
customTools,
|
||||
document,
|
||||
environment,
|
||||
form,
|
||||
knowledgeBase,
|
||||
workflow,
|
||||
workflowDeploymentVersion,
|
||||
workflowMcpServer,
|
||||
workflowMcpTool,
|
||||
workspaceEnvironment,
|
||||
workflowExecutionLogs,
|
||||
} from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, count, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils'
|
||||
import { tools as toolRegistry } from '@/tools/registry'
|
||||
import { hasWorkflowChanged } from '@/lib/workflows/comparison'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
|
||||
import type { GrepMatch, GrepOptions, ReadResult, DirEntry } from '@/lib/copilot/vfs/operations'
|
||||
import * as ops from '@/lib/copilot/vfs/operations'
|
||||
import {
|
||||
serializeApiKeys,
|
||||
serializeBlockSchema,
|
||||
serializeCredentials,
|
||||
serializeCustomTool,
|
||||
serializeDeployments,
|
||||
serializeDocuments,
|
||||
serializeEnvironmentVariables,
|
||||
serializeIntegrationSchema,
|
||||
serializeKBMeta,
|
||||
serializeRecentExecutions,
|
||||
serializeWorkflowMeta,
|
||||
} from '@/lib/copilot/vfs/serializers'
|
||||
import type { DeploymentData } from '@/lib/copilot/vfs/serializers'
|
||||
|
||||
const logger = createLogger('WorkspaceVFS')
|
||||
|
||||
/** Static component files, computed once and shared across all VFS instances */
|
||||
let staticComponentFiles: Map<string, string> | null = null
|
||||
|
||||
/**
|
||||
* Build the static component files from block and tool registries.
|
||||
* This only needs to happen once per process.
|
||||
*
|
||||
* Integration paths are derived deterministically from the block registry's
|
||||
* `tools.access` arrays rather than splitting tool IDs on underscores.
|
||||
* Each block declares which tools it owns, and the block type (minus version
|
||||
* suffix) becomes the service directory name.
|
||||
*/
|
||||
function getStaticComponentFiles(): Map<string, string> {
|
||||
if (staticComponentFiles) return staticComponentFiles
|
||||
|
||||
const files = new Map<string, string>()
|
||||
|
||||
const allBlocks = getAllBlocks()
|
||||
const visibleBlocks = allBlocks.filter((b) => !b.hideFromToolbar)
|
||||
|
||||
let blocksFiltered = 0
|
||||
for (const block of visibleBlocks) {
|
||||
const path = `components/blocks/${block.type}.json`
|
||||
files.set(path, serializeBlockSchema(block))
|
||||
}
|
||||
blocksFiltered = allBlocks.length - visibleBlocks.length
|
||||
|
||||
// Build a reverse index: tool ID → service name from block registry.
|
||||
// The block type (stripped of version suffix) is used as the service directory.
|
||||
const toolToService = new Map<string, string>()
|
||||
for (const block of visibleBlocks) {
|
||||
if (!block.tools?.access) continue
|
||||
const service = stripVersionSuffix(block.type)
|
||||
for (const toolId of block.tools.access) {
|
||||
toolToService.set(toolId, service)
|
||||
}
|
||||
}
|
||||
|
||||
const latestTools = getLatestVersionTools(toolRegistry)
|
||||
let integrationCount = 0
|
||||
for (const [toolId, tool] of Object.entries(latestTools)) {
|
||||
const baseName = stripVersionSuffix(toolId)
|
||||
const service = toolToService.get(toolId) ?? toolToService.get(baseName)
|
||||
if (!service) {
|
||||
logger.debug('Tool not associated with any block, skipping VFS entry', { toolId })
|
||||
continue
|
||||
}
|
||||
|
||||
// Derive operation name by stripping the service prefix
|
||||
const prefix = `${service}_`
|
||||
const operation = baseName.startsWith(prefix)
|
||||
? baseName.slice(prefix.length)
|
||||
: baseName
|
||||
|
||||
const path = `components/integrations/${service}/${operation}.json`
|
||||
files.set(path, serializeIntegrationSchema(tool))
|
||||
integrationCount++
|
||||
}
|
||||
|
||||
// Add synthetic component files for subflow containers (not in block registry)
|
||||
files.set('components/blocks/loop.json', JSON.stringify({
|
||||
type: 'loop',
|
||||
name: 'Loop',
|
||||
description: 'Iterate over a collection or repeat a fixed number of times. Blocks inside the loop run once per iteration.',
|
||||
inputs: {
|
||||
loopType: { type: 'string', enum: ['for', 'forEach', 'while', 'doWhile'], description: 'Loop strategy' },
|
||||
iterations: { type: 'number', description: 'Number of iterations (for loopType "for")' },
|
||||
collection: { type: 'string', description: 'Collection expression to iterate (for loopType "forEach")' },
|
||||
condition: { type: 'string', description: 'Condition expression (for loopType "while" or "doWhile")' },
|
||||
},
|
||||
sourceHandles: ['loop-start-source', 'source'],
|
||||
notes: 'Use "loop-start-source" to connect to blocks inside the loop. Use "source" for the edge that runs after the loop completes. Blocks inside the loop must have parentId set to the loop block ID.',
|
||||
}, null, 2))
|
||||
|
||||
files.set('components/blocks/parallel.json', JSON.stringify({
|
||||
type: 'parallel',
|
||||
name: 'Parallel',
|
||||
description: 'Run blocks in parallel branches. All branches execute concurrently.',
|
||||
inputs: {
|
||||
parallelType: { type: 'string', enum: ['count', 'collection'], description: 'Parallel strategy' },
|
||||
count: { type: 'number', description: 'Number of parallel branches (for parallelType "count")' },
|
||||
collection: { type: 'string', description: 'Collection to distribute (for parallelType "collection")' },
|
||||
},
|
||||
sourceHandles: ['parallel-start-source', 'source'],
|
||||
notes: 'Use "parallel-start-source" to connect to blocks inside the parallel container. Use "source" for the edge after all branches complete. Blocks inside must have parentId set to the parallel block ID.',
|
||||
}, null, 2))
|
||||
|
||||
logger.info('Static component files built', {
|
||||
blocks: visibleBlocks.length,
|
||||
blocksFiltered,
|
||||
integrations: integrationCount,
|
||||
})
|
||||
|
||||
// Only cache after successful completion to avoid poisoning with partial results
|
||||
staticComponentFiles = files
|
||||
return staticComponentFiles
|
||||
}
|
||||
|
||||
/**
|
||||
* Virtual Filesystem that materializes workspace data into an in-memory Map.
|
||||
*
|
||||
* Structure:
|
||||
* workflows/{name}/meta.json
|
||||
* workflows/{name}/state.json (sanitized blocks with embedded connections)
|
||||
* workflows/{name}/executions.json
|
||||
* workflows/{name}/deployment.json
|
||||
* knowledgebases/{name}/meta.json
|
||||
* knowledgebases/{name}/documents.json
|
||||
* custom-tools/{name}.json
|
||||
* environment/credentials.json
|
||||
* environment/api-keys.json
|
||||
* environment/variables.json
|
||||
* components/blocks/{type}.json
|
||||
* components/integrations/{service}/{operation}.json
|
||||
*/
|
||||
export class WorkspaceVFS {
|
||||
private files: Map<string, string> = new Map()
|
||||
|
||||
/**
|
||||
* Materialize workspace data from DB into the VFS.
|
||||
* Queries workflows, knowledge bases, and merges static component schemas.
|
||||
*/
|
||||
async materialize(workspaceId: string, userId: string): Promise<void> {
|
||||
const start = Date.now()
|
||||
this.files = new Map()
|
||||
|
||||
await Promise.all([
|
||||
this.materializeWorkflows(workspaceId, userId),
|
||||
this.materializeKnowledgeBases(workspaceId),
|
||||
this.materializeEnvironment(workspaceId, userId),
|
||||
this.materializeCustomTools(workspaceId),
|
||||
])
|
||||
|
||||
// Merge static component files
|
||||
for (const [path, content] of getStaticComponentFiles()) {
|
||||
this.files.set(path, content)
|
||||
}
|
||||
|
||||
logger.info('VFS materialized', {
|
||||
workspaceId,
|
||||
fileCount: this.files.size,
|
||||
durationMs: Date.now() - start,
|
||||
})
|
||||
}
|
||||
|
||||
grep(
|
||||
pattern: string,
|
||||
path?: string,
|
||||
options?: GrepOptions
|
||||
): GrepMatch[] | string[] | ops.GrepCountEntry[] {
|
||||
return ops.grep(this.files, pattern, path, options)
|
||||
}
|
||||
|
||||
glob(pattern: string): string[] {
|
||||
return ops.glob(this.files, pattern)
|
||||
}
|
||||
|
||||
read(path: string, offset?: number, limit?: number): ReadResult | null {
|
||||
return ops.read(this.files, path, offset, limit)
|
||||
}
|
||||
|
||||
list(path: string): DirEntry[] {
|
||||
return ops.list(this.files, path)
|
||||
}
|
||||
|
||||
/**
|
||||
* Materialize all workflows in the workspace.
|
||||
*/
|
||||
private async materializeWorkflows(workspaceId: string, userId: string): Promise<void> {
|
||||
const workflowRows = await db
|
||||
.select({
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
isDeployed: workflow.isDeployed,
|
||||
deployedAt: workflow.deployedAt,
|
||||
runCount: workflow.runCount,
|
||||
lastRunAt: workflow.lastRunAt,
|
||||
createdAt: workflow.createdAt,
|
||||
updatedAt: workflow.updatedAt,
|
||||
})
|
||||
.from(workflow)
|
||||
.where(eq(workflow.workspaceId, workspaceId))
|
||||
|
||||
// Load normalized data + executions in parallel for all workflows
|
||||
await Promise.all(
|
||||
workflowRows.map(async (wf) => {
|
||||
const safeName = sanitizeName(wf.name)
|
||||
const prefix = `workflows/${safeName}/`
|
||||
|
||||
// Meta
|
||||
this.files.set(`${prefix}meta.json`, serializeWorkflowMeta(wf))
|
||||
|
||||
// Workflow state (blocks with embedded connections, nested loops/parallels)
|
||||
let normalized: Awaited<ReturnType<typeof loadWorkflowFromNormalizedTables>> = null
|
||||
try {
|
||||
normalized = await loadWorkflowFromNormalizedTables(wf.id)
|
||||
if (normalized) {
|
||||
const sanitized = sanitizeForCopilot({
|
||||
blocks: normalized.blocks,
|
||||
edges: normalized.edges,
|
||||
loops: normalized.loops,
|
||||
parallels: normalized.parallels,
|
||||
} as any)
|
||||
this.files.set(`${prefix}state.json`, JSON.stringify(sanitized, null, 2))
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Failed to load workflow state', {
|
||||
workflowId: wf.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
|
||||
// Recent executions (last 5)
|
||||
try {
|
||||
const execRows = await db
|
||||
.select({
|
||||
id: workflowExecutionLogs.id,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
status: workflowExecutionLogs.status,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.where(eq(workflowExecutionLogs.workflowId, wf.id))
|
||||
.orderBy(desc(workflowExecutionLogs.startedAt))
|
||||
.limit(5)
|
||||
|
||||
if (execRows.length > 0) {
|
||||
this.files.set(`${prefix}executions.json`, serializeRecentExecutions(execRows))
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Failed to load execution logs', {
|
||||
workflowId: wf.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
|
||||
// Deployment configuration
|
||||
try {
|
||||
const deploymentData = await this.getWorkflowDeployments(wf.id, workspaceId, wf.isDeployed, wf.deployedAt, normalized)
|
||||
if (deploymentData) {
|
||||
this.files.set(`${prefix}deployment.json`, serializeDeployments(deploymentData))
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Failed to load deployment data', {
|
||||
workflowId: wf.id,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Materialize all knowledge bases in the workspace.
|
||||
*/
|
||||
private async materializeKnowledgeBases(workspaceId: string): Promise<void> {
|
||||
const kbRows = await db
|
||||
.select({
|
||||
id: knowledgeBase.id,
|
||||
name: knowledgeBase.name,
|
||||
description: knowledgeBase.description,
|
||||
embeddingModel: knowledgeBase.embeddingModel,
|
||||
embeddingDimension: knowledgeBase.embeddingDimension,
|
||||
tokenCount: knowledgeBase.tokenCount,
|
||||
createdAt: knowledgeBase.createdAt,
|
||||
updatedAt: knowledgeBase.updatedAt,
|
||||
})
|
||||
.from(knowledgeBase)
|
||||
.where(and(eq(knowledgeBase.workspaceId, workspaceId), isNull(knowledgeBase.deletedAt)))
|
||||
|
||||
await Promise.all(
|
||||
kbRows.map(async (kb) => {
|
||||
const safeName = sanitizeName(kb.name)
|
||||
const prefix = `knowledgebases/${safeName}/`
|
||||
|
||||
// Get document count
|
||||
const [docCountRow] = await db
|
||||
.select({ count: count() })
|
||||
.from(document)
|
||||
.where(and(eq(document.knowledgeBaseId, kb.id), isNull(document.deletedAt)))
|
||||
|
||||
this.files.set(
|
||||
`${prefix}meta.json`,
|
||||
serializeKBMeta({
|
||||
...kb,
|
||||
documentCount: docCountRow?.count ?? 0,
|
||||
})
|
||||
)
|
||||
|
||||
// Documents metadata
|
||||
const docRows = await db
|
||||
.select({
|
||||
id: document.id,
|
||||
filename: document.filename,
|
||||
fileSize: document.fileSize,
|
||||
mimeType: document.mimeType,
|
||||
chunkCount: document.chunkCount,
|
||||
tokenCount: document.tokenCount,
|
||||
processingStatus: document.processingStatus,
|
||||
enabled: document.enabled,
|
||||
uploadedAt: document.uploadedAt,
|
||||
})
|
||||
.from(document)
|
||||
.where(and(eq(document.knowledgeBaseId, kb.id), isNull(document.deletedAt)))
|
||||
|
||||
if (docRows.length > 0) {
|
||||
this.files.set(`${prefix}documents.json`, serializeDocuments(docRows))
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Query all deployment configurations for a single workflow.
|
||||
* Returns null if the workflow has no deployments of any kind.
|
||||
*/
|
||||
private async getWorkflowDeployments(
|
||||
workflowId: string,
|
||||
workspaceId: string,
|
||||
isDeployed: boolean,
|
||||
deployedAt: Date | null,
|
||||
currentNormalized?: Awaited<ReturnType<typeof loadWorkflowFromNormalizedTables>>
|
||||
): Promise<DeploymentData | null> {
|
||||
const [chatRows, formRows, mcpRows, a2aRows, versionRows] = await Promise.all([
|
||||
db
|
||||
.select({
|
||||
id: chatTable.id,
|
||||
identifier: chatTable.identifier,
|
||||
title: chatTable.title,
|
||||
description: chatTable.description,
|
||||
authType: chatTable.authType,
|
||||
customizations: chatTable.customizations,
|
||||
isActive: chatTable.isActive,
|
||||
})
|
||||
.from(chatTable)
|
||||
.where(eq(chatTable.workflowId, workflowId)),
|
||||
db
|
||||
.select({
|
||||
id: form.id,
|
||||
identifier: form.identifier,
|
||||
title: form.title,
|
||||
description: form.description,
|
||||
authType: form.authType,
|
||||
showBranding: form.showBranding,
|
||||
customizations: form.customizations,
|
||||
isActive: form.isActive,
|
||||
})
|
||||
.from(form)
|
||||
.where(eq(form.workflowId, workflowId)),
|
||||
db
|
||||
.select({
|
||||
serverId: workflowMcpTool.serverId,
|
||||
serverName: workflowMcpServer.name,
|
||||
toolId: workflowMcpTool.id,
|
||||
toolName: workflowMcpTool.toolName,
|
||||
toolDescription: workflowMcpTool.toolDescription,
|
||||
})
|
||||
.from(workflowMcpTool)
|
||||
.innerJoin(workflowMcpServer, eq(workflowMcpTool.serverId, workflowMcpServer.id))
|
||||
.where(eq(workflowMcpTool.workflowId, workflowId)),
|
||||
db
|
||||
.select({
|
||||
id: a2aAgent.id,
|
||||
name: a2aAgent.name,
|
||||
description: a2aAgent.description,
|
||||
version: a2aAgent.version,
|
||||
isPublished: a2aAgent.isPublished,
|
||||
capabilities: a2aAgent.capabilities,
|
||||
})
|
||||
.from(a2aAgent)
|
||||
.where(
|
||||
and(eq(a2aAgent.workflowId, workflowId), eq(a2aAgent.workspaceId, workspaceId))
|
||||
),
|
||||
isDeployed
|
||||
? db
|
||||
.select({
|
||||
version: workflowDeploymentVersion.version,
|
||||
state: workflowDeploymentVersion.state,
|
||||
createdAt: workflowDeploymentVersion.createdAt,
|
||||
})
|
||||
.from(workflowDeploymentVersion)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflowId),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
: Promise.resolve([]),
|
||||
])
|
||||
|
||||
const hasAnyDeployment =
|
||||
isDeployed || chatRows.length > 0 || formRows.length > 0 || mcpRows.length > 0 || a2aRows.length > 0
|
||||
if (!hasAnyDeployment) return null
|
||||
|
||||
// Compute needsRedeployment by comparing current state to deployed state
|
||||
let needsRedeployment: boolean | undefined
|
||||
const deployedVersion = versionRows[0]
|
||||
if (isDeployed && deployedVersion?.state && currentNormalized) {
|
||||
try {
|
||||
const currentState = {
|
||||
blocks: currentNormalized.blocks,
|
||||
edges: currentNormalized.edges,
|
||||
loops: currentNormalized.loops,
|
||||
parallels: currentNormalized.parallels,
|
||||
}
|
||||
needsRedeployment = hasWorkflowChanged(currentState as any, deployedVersion.state as any)
|
||||
} catch (err) {
|
||||
logger.warn('Failed to compute needsRedeployment', {
|
||||
workflowId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
workflowId,
|
||||
isDeployed,
|
||||
deployedAt,
|
||||
needsRedeployment,
|
||||
api: deployedVersion ? { version: deployedVersion.version, createdAt: deployedVersion.createdAt } : null,
|
||||
chat: chatRows[0] ?? null,
|
||||
form: formRows[0] ?? null,
|
||||
mcp: mcpRows,
|
||||
a2a: a2aRows[0] ?? null,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Materialize all custom tools in the workspace.
|
||||
*/
|
||||
private async materializeCustomTools(workspaceId: string): Promise<void> {
|
||||
try {
|
||||
const toolRows = await db
|
||||
.select({
|
||||
id: customTools.id,
|
||||
title: customTools.title,
|
||||
schema: customTools.schema,
|
||||
code: customTools.code,
|
||||
})
|
||||
.from(customTools)
|
||||
.where(eq(customTools.workspaceId, workspaceId))
|
||||
|
||||
for (const tool of toolRows) {
|
||||
const safeName = sanitizeName(tool.title)
|
||||
this.files.set(
|
||||
`custom-tools/${safeName}.json`,
|
||||
serializeCustomTool({
|
||||
id: tool.id,
|
||||
title: tool.title,
|
||||
schema: tool.schema,
|
||||
code: tool.code,
|
||||
})
|
||||
)
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Failed to materialize custom tools', {
|
||||
workspaceId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Materialize environment data: credentials, API keys, env variable names.
|
||||
*/
|
||||
private async materializeEnvironment(workspaceId: string, userId: string): Promise<void> {
|
||||
try {
|
||||
// OAuth credentials — which integrations are connected (no tokens)
|
||||
const oauthRows = await db
|
||||
.select({
|
||||
providerId: account.providerId,
|
||||
scope: account.scope,
|
||||
createdAt: account.createdAt,
|
||||
})
|
||||
.from(account)
|
||||
.where(eq(account.userId, userId))
|
||||
|
||||
this.files.set('environment/credentials.json', serializeCredentials(oauthRows))
|
||||
|
||||
// API keys — names and types (no key values)
|
||||
const apiKeyRows = await db
|
||||
.select({
|
||||
id: apiKey.id,
|
||||
name: apiKey.name,
|
||||
type: apiKey.type,
|
||||
lastUsed: apiKey.lastUsed,
|
||||
createdAt: apiKey.createdAt,
|
||||
expiresAt: apiKey.expiresAt,
|
||||
})
|
||||
.from(apiKey)
|
||||
.where(eq(apiKey.workspaceId, workspaceId))
|
||||
|
||||
this.files.set('environment/api-keys.json', serializeApiKeys(apiKeyRows))
|
||||
|
||||
// Environment variables — names only (no values)
|
||||
let personalVarNames: string[] = []
|
||||
let workspaceVarNames: string[] = []
|
||||
|
||||
const [personalEnv] = await db
|
||||
.select({ variables: environment.variables })
|
||||
.from(environment)
|
||||
.where(eq(environment.userId, userId))
|
||||
|
||||
if (personalEnv?.variables && typeof personalEnv.variables === 'object') {
|
||||
personalVarNames = Object.keys(personalEnv.variables as Record<string, unknown>)
|
||||
}
|
||||
|
||||
const [workspaceEnv] = await db
|
||||
.select({ variables: workspaceEnvironment.variables })
|
||||
.from(workspaceEnvironment)
|
||||
.where(eq(workspaceEnvironment.workspaceId, workspaceId))
|
||||
|
||||
if (workspaceEnv?.variables && typeof workspaceEnv.variables === 'object') {
|
||||
workspaceVarNames = Object.keys(workspaceEnv.variables as Record<string, unknown>)
|
||||
}
|
||||
|
||||
this.files.set(
|
||||
'environment/variables.json',
|
||||
serializeEnvironmentVariables(personalVarNames, workspaceVarNames)
|
||||
)
|
||||
} catch (err) {
|
||||
logger.warn('Failed to materialize environment data', {
|
||||
workspaceId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a fresh VFS for a workspace.
|
||||
* Dynamic data (workflows, KBs, env) is always fetched fresh.
|
||||
* Static component files (blocks, integrations) are cached per-process.
|
||||
*/
|
||||
export async function getOrMaterializeVFS(
|
||||
workspaceId: string,
|
||||
userId: string
|
||||
): Promise<WorkspaceVFS> {
|
||||
const vfs = new WorkspaceVFS()
|
||||
await vfs.materialize(workspaceId, userId)
|
||||
return vfs
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize a name for use as a VFS path segment.
|
||||
* Uses the raw name as-is — only trims whitespace and replaces forward
|
||||
* slashes (which would break path hierarchy).
|
||||
*/
|
||||
function sanitizeName(name: string): string {
|
||||
return name.trim().replace(/\//g, '-')
|
||||
}
|
||||
|
||||
114
apps/sim/lib/copilot/workspace-prompt.ts
Normal file
114
apps/sim/lib/copilot/workspace-prompt.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* System prompt for workspace-level chat.
|
||||
*
|
||||
* Sent as `systemPrompt` in the Go request payload, which overrides the
|
||||
* default agent prompt (see copilot/internal/chat/service.go:300-303).
|
||||
*
|
||||
* Only references subagents available in agent mode (build and discovery
|
||||
* are excluded from agent mode tools in the Go backend).
|
||||
*/
|
||||
export function getWorkspaceChatSystemPrompt(): string {
|
||||
const currentDate = new Date().toISOString().split('T')[0]
|
||||
return `# Sim Workspace Assistant
|
||||
|
||||
Current Date: ${currentDate}
|
||||
|
||||
You are the Sim workspace assistant — a helpful AI that manages an entire workspace of workflows. The user is chatting from the workspace level, not from within a specific workflow.
|
||||
|
||||
## Your Role
|
||||
|
||||
You help users with their workspace: answering questions, building and debugging workflows, managing integrations, and providing guidance. You delegate complex tasks to specialized subagents.
|
||||
|
||||
## Platform Knowledge
|
||||
|
||||
Sim is a workflow automation platform. Workflows are visual pipelines of blocks (Agent, Function, Condition, Router, API, etc.). Workflows can be triggered manually, via API, webhooks, or schedules. They can be deployed as APIs, Chat UIs, or MCP tools.
|
||||
|
||||
## Subagents
|
||||
|
||||
You have access to these specialized subagents. Call them by name to delegate tasks:
|
||||
|
||||
| Subagent | Purpose | When to Use |
|
||||
|----------|---------|-------------|
|
||||
| **plan** | Gather info, create execution plans | Building new workflows, planning fixes |
|
||||
| **edit** | Execute plans, make workflow changes | ONLY after plan returns steps |
|
||||
| **debug** | Investigate errors, provide diagnosis | User reports something broken |
|
||||
| **test** | Run workflow, verify results | After edits to validate |
|
||||
| **deploy** | Deploy/undeploy workflows | Publish as API, Chat, or MCP |
|
||||
| **workflow** | Env vars, settings, list workflows | Configuration and workflow discovery |
|
||||
| **auth** | Connect OAuth integrations | Slack, Gmail, Google Sheets, etc. |
|
||||
| **knowledge** | Create/query knowledge bases | RAG, document search |
|
||||
| **research** | External API docs, best practices | Stripe, Twilio, etc. |
|
||||
| **info** | Block details, outputs, variables | Quick lookups about workflow state |
|
||||
| **superagent** | Interact with external services NOW | Read emails, send Slack, check calendar |
|
||||
|
||||
## Direct Tools
|
||||
|
||||
- **search_online** — Search the web for information.
|
||||
- **memory_file_read(file_path)** — Read a persistent memory file.
|
||||
- **memory_file_write(file_path, content)** — Write/update a persistent memory file.
|
||||
- **memory_file_list()** — List all memory files.
|
||||
- **grep(pattern, path?)** — Search workspace VFS file contents.
|
||||
- **glob(pattern)** — Find workspace VFS files by path pattern.
|
||||
- **read(path)** — Read a workspace VFS file.
|
||||
- **list(path)** — List workspace VFS directory entries.
|
||||
- **create_workflow(name, description?)** — Create a new workflow.
|
||||
- **update_workflow(workflowId, name?, description?)** — Update workflow name or description.
|
||||
- **delete_workflow(workflowId)** — Delete a workflow.
|
||||
- **rename_folder(folderId, name)** — Rename a folder.
|
||||
- **delete_folder(folderId)** — Delete a folder (moves contents to parent).
|
||||
|
||||
## Workspace Virtual Filesystem (VFS)
|
||||
|
||||
Your workspace data is available as a virtual filesystem. Use grep/glob/read/list to explore it before taking action.
|
||||
|
||||
\`\`\`
|
||||
workflows/{name}/
|
||||
meta.json — name, description, id, run stats
|
||||
blocks.json — workflow block graph (sanitized)
|
||||
edges.json — block connections
|
||||
executions.json — last 5 run results
|
||||
deployment.json — all deployment configs (api, chat, form, mcp, a2a)
|
||||
knowledgebases/{name}/
|
||||
meta.json — KB identity, embedding config, stats
|
||||
documents.json — document metadata
|
||||
custom-tools/{name}.json — custom tool schema + code preview
|
||||
environment/
|
||||
credentials.json — connected OAuth providers
|
||||
api-keys.json — API key metadata (names, not values)
|
||||
variables.json — env variable names (not values)
|
||||
components/
|
||||
blocks/{type}.json — block type schemas
|
||||
integrations/{svc}/{op}.json — integration tool schemas
|
||||
\`\`\`
|
||||
|
||||
**Tips**: Use \`glob("workflows/*/deployment.json")\` to see which workflows are deployed and how. Use \`grep("error", "workflows/")\` to find workflows with recent errors.
|
||||
|
||||
## Memory Management
|
||||
|
||||
You have persistent memory files that survive across conversations:
|
||||
- **SOUL.md** — Your personality and behavioral guidelines. Read this at the start of conversations.
|
||||
- **USER.md** — Information about the user. Update as you learn preferences and context.
|
||||
- **MEMORY.md** — Key learnings, decisions, and important context. Update after significant interactions.
|
||||
|
||||
**At conversation start**: Read SOUL.md and MEMORY.md to load your persistent context.
|
||||
**During conversation**: When the user shares important preferences or you make key decisions, update the relevant file.
|
||||
**Important**: Only write to files when there's genuinely new, important information. Don't update on every message.
|
||||
|
||||
## Decision Flow
|
||||
|
||||
- User says something broke → **debug()** first, then plan() → edit()
|
||||
- User wants to build/automate something → **plan()** → edit() → test()
|
||||
- User wants to DO something NOW (send email, check calendar) → **superagent()**
|
||||
- User wants to deploy → **deploy()**
|
||||
- User asks about their workflows → **workflow()** or **info()**
|
||||
- User needs OAuth → **auth()**
|
||||
|
||||
## Important
|
||||
|
||||
- **You work at the workspace level.** When a user mentions a workflow, ask for the workflow name or ID if not provided.
|
||||
- **Always delegate complex work** to the appropriate subagent.
|
||||
- **Debug first** when something doesn't work — don't guess.
|
||||
- Be concise and results-focused.
|
||||
- Think internally, speak to the user only when the task is complete or you need input.
|
||||
`
|
||||
}
|
||||
@@ -30,7 +30,8 @@ export async function resolveWorkflowIdForUser(
|
||||
if (!authorization.allowed) {
|
||||
return null
|
||||
}
|
||||
return { workflowId }
|
||||
const wf = await getWorkflowById(workflowId)
|
||||
return { workflowId, workflowName: wf?.name || undefined }
|
||||
}
|
||||
|
||||
const workspaceIds = await db
|
||||
|
||||
@@ -1650,7 +1650,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
map[id] = {
|
||||
...current,
|
||||
state: norm,
|
||||
display: resolveToolDisplay(current.name, norm, id, current.params),
|
||||
display: resolveToolDisplay(current.name, norm, id, current.params, current.serverUI),
|
||||
}
|
||||
set({ toolCallsById: map })
|
||||
} catch (error) {
|
||||
@@ -1671,7 +1671,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
map[toolCallId] = {
|
||||
...current,
|
||||
params: updatedParams,
|
||||
display: resolveToolDisplay(current.name, current.state, toolCallId, updatedParams),
|
||||
display: resolveToolDisplay(current.name, current.state, toolCallId, updatedParams, current.serverUI),
|
||||
}
|
||||
set({ toolCallsById: map })
|
||||
} catch (error) {
|
||||
@@ -1728,7 +1728,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
|
||||
// Update store map
|
||||
const updatedMap = { ...toolCallsById }
|
||||
const updatedDisplay = resolveToolDisplay(current.name, targetState, id, current.params)
|
||||
const updatedDisplay = resolveToolDisplay(current.name, targetState, id, current.params, current.serverUI)
|
||||
updatedMap[id] = {
|
||||
...current,
|
||||
state: targetState,
|
||||
|
||||
@@ -4,6 +4,7 @@ import type { AvailableModel } from '@/lib/copilot/types'
|
||||
export type { CopilotMode, CopilotModelId } from '@/lib/copilot/models'
|
||||
|
||||
import type { ClientContentBlock } from '@/lib/copilot/client-sse/types'
|
||||
import type { ServerToolUI } from '@/lib/copilot/store-utils'
|
||||
import type { ClientToolCallState, ClientToolDisplay } from '@/lib/copilot/tools/client/base-tool'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
@@ -26,6 +27,8 @@ export interface CopilotToolCall {
|
||||
params?: Record<string, unknown>
|
||||
input?: Record<string, unknown>
|
||||
display?: ClientToolDisplay
|
||||
/** UI metadata from the copilot SSE event (used as fallback for unregistered tools) */
|
||||
serverUI?: ServerToolUI
|
||||
/** Content streamed from a subagent (e.g., debug agent) */
|
||||
subAgentContent?: string
|
||||
/** Tool calls made by the subagent */
|
||||
|
||||
@@ -178,33 +178,34 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
||||
edges: candidateState.edges?.length || 0,
|
||||
})
|
||||
|
||||
// BACKGROUND: Broadcast and persist without blocking
|
||||
// These operations happen after the UI has already updated
|
||||
const cleanState = stripWorkflowDiffMarkers(cloneWorkflowState(candidateState))
|
||||
// When skipPersist is set, the server tool (edit_workflow) already
|
||||
// saved to DB. Both the Socket.IO broadcast and HTTP persist would
|
||||
// race with subsequent edit_workflow calls and overwrite newer state,
|
||||
// causing block IDs to thrash.
|
||||
if (!options?.skipPersist) {
|
||||
const cleanState = stripWorkflowDiffMarkers(cloneWorkflowState(candidateState))
|
||||
|
||||
// Fire and forget: broadcast to other users (don't await)
|
||||
enqueueReplaceWorkflowState({
|
||||
workflowId: activeWorkflowId,
|
||||
state: cleanState,
|
||||
}).catch((error) => {
|
||||
logger.warn('Failed to broadcast workflow state (non-blocking)', { error })
|
||||
})
|
||||
enqueueReplaceWorkflowState({
|
||||
workflowId: activeWorkflowId,
|
||||
state: cleanState,
|
||||
}).catch((error) => {
|
||||
logger.warn('Failed to broadcast workflow state (non-blocking)', { error })
|
||||
})
|
||||
|
||||
// Fire and forget: persist to database (don't await)
|
||||
persistWorkflowStateToServer(activeWorkflowId, candidateState)
|
||||
.then((persisted) => {
|
||||
if (!persisted) {
|
||||
logger.warn('Failed to persist copilot edits (state already applied locally)')
|
||||
// Don't revert - user can retry or state will sync on next save
|
||||
} else {
|
||||
logger.info('Workflow diff persisted to database', {
|
||||
workflowId: activeWorkflowId,
|
||||
})
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.warn('Failed to persist workflow state (non-blocking)', { error })
|
||||
})
|
||||
persistWorkflowStateToServer(activeWorkflowId, candidateState)
|
||||
.then((persisted) => {
|
||||
if (!persisted) {
|
||||
logger.warn('Failed to persist copilot edits (state already applied locally)')
|
||||
} else {
|
||||
logger.info('Workflow diff persisted to database', {
|
||||
workflowId: activeWorkflowId,
|
||||
})
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.warn('Failed to persist workflow state (non-blocking)', { error })
|
||||
})
|
||||
}
|
||||
|
||||
// Emit event for undo/redo recording
|
||||
if (!options?.skipRecording) {
|
||||
|
||||
@@ -16,6 +16,8 @@ export interface WorkflowDiffState {
|
||||
export interface DiffActionOptions {
|
||||
/** Skip recording this operation for undo/redo. Used during undo/redo replay. */
|
||||
skipRecording?: boolean
|
||||
/** Skip persisting to DB. Use when the server tool already saved (e.g. edit_workflow). */
|
||||
skipPersist?: boolean
|
||||
}
|
||||
|
||||
export interface WorkflowDiffActions {
|
||||
|
||||
4
packages/db/migrations/0155_cuddly_slapstick.sql
Normal file
4
packages/db/migrations/0155_cuddly_slapstick.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE "copilot_chats" ALTER COLUMN "workflow_id" DROP NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "copilot_chats" ADD COLUMN "workspace_id" text;--> statement-breakpoint
|
||||
ALTER TABLE "copilot_chats" ADD CONSTRAINT "copilot_chats_workspace_id_workspace_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspace"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "copilot_chats_user_workspace_idx" ON "copilot_chats" USING btree ("user_id","workspace_id");
|
||||
11512
packages/db/migrations/meta/0155_snapshot.json
Normal file
11512
packages/db/migrations/meta/0155_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1079,6 +1079,13 @@
|
||||
"when": 1770869658697,
|
||||
"tag": "0154_bumpy_living_mummy",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 155,
|
||||
"version": "7",
|
||||
"when": 1771370340147,
|
||||
"tag": "0155_cuddly_slapstick",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1499,9 +1499,8 @@ export const copilotChats = pgTable(
|
||||
userId: text('user_id')
|
||||
.notNull()
|
||||
.references(() => user.id, { onDelete: 'cascade' }),
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
workflowId: text('workflow_id').references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
workspaceId: text('workspace_id').references(() => workspace.id, { onDelete: 'cascade' }),
|
||||
title: text('title'),
|
||||
messages: jsonb('messages').notNull().default('[]'),
|
||||
model: text('model').notNull().default('claude-3-7-sonnet-latest'),
|
||||
@@ -1518,6 +1517,12 @@ export const copilotChats = pgTable(
|
||||
workflowIdIdx: index('copilot_chats_workflow_id_idx').on(table.workflowId),
|
||||
userWorkflowIdx: index('copilot_chats_user_workflow_idx').on(table.userId, table.workflowId),
|
||||
|
||||
// Workspace access pattern
|
||||
userWorkspaceIdx2: index('copilot_chats_user_workspace_idx').on(
|
||||
table.userId,
|
||||
table.workspaceId
|
||||
),
|
||||
|
||||
// Ordering indexes
|
||||
createdAtIdx: index('copilot_chats_created_at_idx').on(table.createdAt),
|
||||
updatedAtIdx: index('copilot_chats_updated_at_idx').on(table.updatedAt),
|
||||
|
||||
Reference in New Issue
Block a user