mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-09 14:14:57 -05:00
Compare commits
50 Commits
v0.5.84
...
feat/the-c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cd473ecc24 | ||
|
|
9f656ee8b8 | ||
|
|
79babe05b9 | ||
|
|
13919e8c47 | ||
|
|
c51d2bcc42 | ||
|
|
c8e9aee55a | ||
|
|
38e2aa0efa | ||
|
|
2a7ebfb396 | ||
|
|
ca76e38e8c | ||
|
|
dad6fd68fa | ||
|
|
d1a2d661c9 | ||
|
|
f63ed61bc8 | ||
|
|
0f5eb9d351 | ||
|
|
665cc6a3d1 | ||
|
|
a7341cdcd3 | ||
|
|
92efd817d2 | ||
|
|
3d5321d9a1 | ||
|
|
13c8621513 | ||
|
|
529d382d49 | ||
|
|
fe70beb751 | ||
|
|
529233bfb6 | ||
|
|
43a32a627f | ||
|
|
ea7a07a0d0 | ||
|
|
3f3d5b276d | ||
|
|
ef4cae48f2 | ||
|
|
460935c032 | ||
|
|
5fc5f46733 | ||
|
|
8d70132a4b | ||
|
|
c045580230 | ||
|
|
bff3f03ba6 | ||
|
|
c20a5633bf | ||
|
|
1c23805782 | ||
|
|
bb6b182d24 | ||
|
|
b7aaa53300 | ||
|
|
8d477c0bed | ||
|
|
01371c8809 | ||
|
|
490b6bde08 | ||
|
|
9073c1a0bf | ||
|
|
d0329e14e5 | ||
|
|
d1b2e6c757 | ||
|
|
decc19e73b | ||
|
|
d79fcab659 | ||
|
|
c72e244655 | ||
|
|
7bb3dd6103 | ||
|
|
75b62423bc | ||
|
|
565167d3b3 | ||
|
|
9ff5237a2e | ||
|
|
e9b80c566c | ||
|
|
664ce3168c | ||
|
|
5d82f7ae73 |
@@ -56,7 +56,7 @@ Switch between modes using the mode selector at the bottom of the input area.
|
|||||||
Select your preferred AI model using the model selector at the bottom right of the input area.
|
Select your preferred AI model using the model selector at the bottom right of the input area.
|
||||||
|
|
||||||
**Available Models:**
|
**Available Models:**
|
||||||
- Claude 4.5 Opus, Sonnet (default), Haiku
|
- Claude 4.6 Opus (default), 4.5 Opus, Sonnet, Haiku
|
||||||
- GPT 5.2 Codex, Pro
|
- GPT 5.2 Codex, Pro
|
||||||
- Gemini 3 Pro
|
- Gemini 3 Pro
|
||||||
|
|
||||||
@@ -190,3 +190,99 @@ Copilot usage is billed per token from the underlying LLM. If you reach your usa
|
|||||||
<Callout type="info">
|
<Callout type="info">
|
||||||
See the [Cost Calculation page](/execution/costs) for billing details.
|
See the [Cost Calculation page](/execution/costs) for billing details.
|
||||||
</Callout>
|
</Callout>
|
||||||
|
## Copilot MCP
|
||||||
|
|
||||||
|
You can use Copilot as an MCP server in your favorite editor or AI client. This lets you build, test, deploy, and manage Sim workflows directly from tools like Cursor, Claude Code, Claude Desktop, and VS Code.
|
||||||
|
|
||||||
|
### Generating a Copilot API Key
|
||||||
|
|
||||||
|
To connect to the Copilot MCP server, you need a **Copilot API key**:
|
||||||
|
|
||||||
|
1. Go to [sim.ai](https://sim.ai) and sign in
|
||||||
|
2. Navigate to **Settings** → **Copilot**
|
||||||
|
3. Click **Generate API Key**
|
||||||
|
4. Copy the key — it is only shown once
|
||||||
|
|
||||||
|
The key will look like `sk-sim-copilot-...`. You will use this in the configuration below.
|
||||||
|
|
||||||
|
### Cursor
|
||||||
|
|
||||||
|
Add the following to your `.cursor/mcp.json` (project-level) or global Cursor MCP settings:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"sim-copilot": {
|
||||||
|
"url": "https://www.sim.ai/api/mcp/copilot",
|
||||||
|
"headers": {
|
||||||
|
"X-API-Key": "YOUR_COPILOT_API_KEY"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `YOUR_COPILOT_API_KEY` with the key you generated above.
|
||||||
|
|
||||||
|
### Claude Code
|
||||||
|
|
||||||
|
Run the following command to add the Copilot MCP server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
claude mcp add sim-copilot \
|
||||||
|
--transport http \
|
||||||
|
https://www.sim.ai/api/mcp/copilot \
|
||||||
|
--header "X-API-Key: YOUR_COPILOT_API_KEY"
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `YOUR_COPILOT_API_KEY` with your key.
|
||||||
|
|
||||||
|
### Claude Desktop
|
||||||
|
|
||||||
|
Claude Desktop requires [`mcp-remote`](https://www.npmjs.com/package/mcp-remote) to connect to HTTP-based MCP servers. Add the following to your Claude Desktop config file (`~/Library/Application Support/Claude/claude_desktop_config.json` on macOS):
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mcpServers": {
|
||||||
|
"sim-copilot": {
|
||||||
|
"command": "npx",
|
||||||
|
"args": [
|
||||||
|
"-y",
|
||||||
|
"mcp-remote",
|
||||||
|
"https://www.sim.ai/api/mcp/copilot",
|
||||||
|
"--header",
|
||||||
|
"X-API-Key: YOUR_COPILOT_API_KEY"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `YOUR_COPILOT_API_KEY` with your key.
|
||||||
|
|
||||||
|
### VS Code
|
||||||
|
|
||||||
|
Add the following to your VS Code `settings.json` or workspace `.vscode/settings.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mcp": {
|
||||||
|
"servers": {
|
||||||
|
"sim-copilot": {
|
||||||
|
"type": "http",
|
||||||
|
"url": "https://www.sim.ai/api/mcp/copilot",
|
||||||
|
"headers": {
|
||||||
|
"X-API-Key": "YOUR_COPILOT_API_KEY"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `YOUR_COPILOT_API_KEY` with your key.
|
||||||
|
|
||||||
|
<Callout type="info">
|
||||||
|
For self-hosted deployments, replace `https://www.sim.ai` with your self-hosted Sim URL.
|
||||||
|
</Callout>
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpAuthorizationServerMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpAuthorizationServerMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpAuthorizationServerMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpProtectedResourceMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpProtectedResourceMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -18,6 +18,7 @@ const UpdateCostSchema = z.object({
|
|||||||
model: z.string().min(1, 'Model is required'),
|
model: z.string().min(1, 'Model is required'),
|
||||||
inputTokens: z.number().min(0).default(0),
|
inputTokens: z.number().min(0).default(0),
|
||||||
outputTokens: z.number().min(0).default(0),
|
outputTokens: z.number().min(0).default(0),
|
||||||
|
source: z.enum(['copilot', 'mcp_copilot']).default('copilot'),
|
||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -75,12 +76,14 @@ export async function POST(req: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const { userId, cost, model, inputTokens, outputTokens } = validation.data
|
const { userId, cost, model, inputTokens, outputTokens, source } = validation.data
|
||||||
|
const isMcp = source === 'mcp_copilot'
|
||||||
|
|
||||||
logger.info(`[${requestId}] Processing cost update`, {
|
logger.info(`[${requestId}] Processing cost update`, {
|
||||||
userId,
|
userId,
|
||||||
cost,
|
cost,
|
||||||
model,
|
model,
|
||||||
|
source,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Check if user stats record exists (same as ExecutionLogger)
|
// Check if user stats record exists (same as ExecutionLogger)
|
||||||
@@ -96,7 +99,7 @@ export async function POST(req: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'User stats record not found' }, { status: 500 })
|
return NextResponse.json({ error: 'User stats record not found' }, { status: 500 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const updateFields = {
|
const updateFields: Record<string, unknown> = {
|
||||||
totalCost: sql`total_cost + ${cost}`,
|
totalCost: sql`total_cost + ${cost}`,
|
||||||
currentPeriodCost: sql`current_period_cost + ${cost}`,
|
currentPeriodCost: sql`current_period_cost + ${cost}`,
|
||||||
totalCopilotCost: sql`total_copilot_cost + ${cost}`,
|
totalCopilotCost: sql`total_copilot_cost + ${cost}`,
|
||||||
@@ -105,17 +108,24 @@ export async function POST(req: NextRequest) {
|
|||||||
lastActive: new Date(),
|
lastActive: new Date(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Also increment MCP-specific counters when source is mcp_copilot
|
||||||
|
if (isMcp) {
|
||||||
|
updateFields.totalMcpCopilotCost = sql`total_mcp_copilot_cost + ${cost}`
|
||||||
|
updateFields.currentPeriodMcpCopilotCost = sql`current_period_mcp_copilot_cost + ${cost}`
|
||||||
|
}
|
||||||
|
|
||||||
await db.update(userStats).set(updateFields).where(eq(userStats.userId, userId))
|
await db.update(userStats).set(updateFields).where(eq(userStats.userId, userId))
|
||||||
|
|
||||||
logger.info(`[${requestId}] Updated user stats record`, {
|
logger.info(`[${requestId}] Updated user stats record`, {
|
||||||
userId,
|
userId,
|
||||||
addedCost: cost,
|
addedCost: cost,
|
||||||
|
source,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Log usage for complete audit trail
|
// Log usage for complete audit trail
|
||||||
await logModelUsage({
|
await logModelUsage({
|
||||||
userId,
|
userId,
|
||||||
source: 'copilot',
|
source: isMcp ? 'mcp_copilot' : 'copilot',
|
||||||
model,
|
model,
|
||||||
inputTokens,
|
inputTokens,
|
||||||
outputTokens,
|
outputTokens,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
const GenerateApiKeySchema = z.object({
|
const GenerateApiKeySchema = z.object({
|
||||||
@@ -17,9 +17,6 @@ export async function POST(req: NextRequest) {
|
|||||||
|
|
||||||
const userId = session.user.id
|
const userId = session.user.id
|
||||||
|
|
||||||
// Move environment variable access inside the function
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const body = await req.json().catch(() => ({}))
|
const body = await req.json().catch(() => ({}))
|
||||||
const validationResult = GenerateApiKeySchema.safeParse(body)
|
const validationResult = GenerateApiKeySchema.safeParse(body)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
export async function GET(request: NextRequest) {
|
export async function GET(request: NextRequest) {
|
||||||
@@ -12,8 +12,6 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
const userId = session.user.id
|
const userId = session.user.id
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/get-api-keys`, {
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/get-api-keys`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -68,8 +66,6 @@ export async function DELETE(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'id is required' }, { status: 400 })
|
return NextResponse.json({ error: 'id is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/delete`, {
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/delete`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
|
|||||||
@@ -5,10 +5,18 @@ import { and, desc, eq } from 'drizzle-orm'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { buildConversationHistory } from '@/lib/copilot/chat-context'
|
||||||
|
import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle'
|
||||||
|
import { buildCopilotRequestPayload } from '@/lib/copilot/chat-payload'
|
||||||
import { generateChatTitle } from '@/lib/copilot/chat-title'
|
import { generateChatTitle } from '@/lib/copilot/chat-title'
|
||||||
import { getCopilotModel } from '@/lib/copilot/config'
|
import { getCopilotModel } from '@/lib/copilot/config'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT, SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
|
||||||
import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
|
import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
|
||||||
|
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||||
|
import {
|
||||||
|
createStreamEventWriter,
|
||||||
|
resetStreamBuffer,
|
||||||
|
setStreamMeta,
|
||||||
|
} from '@/lib/copilot/orchestrator/stream-buffer'
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -16,18 +24,12 @@ import {
|
|||||||
createRequestTracker,
|
createRequestTracker,
|
||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
|
||||||
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
import { CopilotFiles } from '@/lib/uploads'
|
import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
||||||
import { createFileContent } from '@/lib/uploads/utils/file-utils'
|
|
||||||
import { tools } from '@/tools/registry'
|
|
||||||
import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotChatAPI')
|
const logger = createLogger('CopilotChatAPI')
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const FileAttachmentSchema = z.object({
|
const FileAttachmentSchema = z.object({
|
||||||
id: z.string(),
|
id: z.string(),
|
||||||
key: z.string(),
|
key: z.string(),
|
||||||
@@ -40,8 +42,9 @@ const ChatMessageSchema = z.object({
|
|||||||
message: z.string().min(1, 'Message is required'),
|
message: z.string().min(1, 'Message is required'),
|
||||||
userMessageId: z.string().optional(), // ID from frontend for the user message
|
userMessageId: z.string().optional(), // ID from frontend for the user message
|
||||||
chatId: z.string().optional(),
|
chatId: z.string().optional(),
|
||||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
workflowId: z.string().optional(),
|
||||||
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
|
workflowName: z.string().optional(),
|
||||||
|
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.6-opus'),
|
||||||
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
||||||
prefetch: z.boolean().optional(),
|
prefetch: z.boolean().optional(),
|
||||||
createNewChat: z.boolean().optional().default(false),
|
createNewChat: z.boolean().optional().default(false),
|
||||||
@@ -100,7 +103,8 @@ export async function POST(req: NextRequest) {
|
|||||||
message,
|
message,
|
||||||
userMessageId,
|
userMessageId,
|
||||||
chatId,
|
chatId,
|
||||||
workflowId,
|
workflowId: providedWorkflowId,
|
||||||
|
workflowName,
|
||||||
model,
|
model,
|
||||||
mode,
|
mode,
|
||||||
prefetch,
|
prefetch,
|
||||||
@@ -113,6 +117,20 @@ export async function POST(req: NextRequest) {
|
|||||||
contexts,
|
contexts,
|
||||||
commands,
|
commands,
|
||||||
} = ChatMessageSchema.parse(body)
|
} = ChatMessageSchema.parse(body)
|
||||||
|
|
||||||
|
// Resolve workflowId - if not provided, use first workflow or find by name
|
||||||
|
const resolved = await resolveWorkflowIdForUser(
|
||||||
|
authenticatedUserId,
|
||||||
|
providedWorkflowId,
|
||||||
|
workflowName
|
||||||
|
)
|
||||||
|
if (!resolved) {
|
||||||
|
return createBadRequestResponse(
|
||||||
|
'No workflows found. Create a workflow first or provide a valid workflowId.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const workflowId = resolved.workflowId
|
||||||
|
|
||||||
// Ensure we have a consistent user message ID for this request
|
// Ensure we have a consistent user message ID for this request
|
||||||
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
||||||
try {
|
try {
|
||||||
@@ -157,116 +175,21 @@ export async function POST(req: NextRequest) {
|
|||||||
let conversationHistory: any[] = []
|
let conversationHistory: any[] = []
|
||||||
let actualChatId = chatId
|
let actualChatId = chatId
|
||||||
|
|
||||||
if (chatId) {
|
if (chatId || createNewChat) {
|
||||||
// Load existing chat
|
const defaultsForChatRow = getCopilotModel('chat')
|
||||||
const [chat] = await db
|
const chatResult = await resolveOrCreateChat({
|
||||||
.select()
|
chatId,
|
||||||
.from(copilotChats)
|
userId: authenticatedUserId,
|
||||||
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, authenticatedUserId)))
|
workflowId,
|
||||||
.limit(1)
|
model: defaultsForChatRow.model,
|
||||||
|
})
|
||||||
if (chat) {
|
currentChat = chatResult.chat
|
||||||
currentChat = chat
|
actualChatId = chatResult.chatId || chatId
|
||||||
conversationHistory = Array.isArray(chat.messages) ? chat.messages : []
|
const history = buildConversationHistory(
|
||||||
}
|
chatResult.conversationHistory,
|
||||||
} else if (createNewChat && workflowId) {
|
(chatResult.chat?.conversationId as string | undefined) || conversationId
|
||||||
// Create new chat
|
|
||||||
const { provider, model } = getCopilotModel('chat')
|
|
||||||
const [newChat] = await db
|
|
||||||
.insert(copilotChats)
|
|
||||||
.values({
|
|
||||||
userId: authenticatedUserId,
|
|
||||||
workflowId,
|
|
||||||
title: null,
|
|
||||||
model,
|
|
||||||
messages: [],
|
|
||||||
})
|
|
||||||
.returning()
|
|
||||||
|
|
||||||
if (newChat) {
|
|
||||||
currentChat = newChat
|
|
||||||
actualChatId = newChat.id
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process file attachments if present
|
|
||||||
const processedFileContents: any[] = []
|
|
||||||
if (fileAttachments && fileAttachments.length > 0) {
|
|
||||||
const processedAttachments = await CopilotFiles.processCopilotAttachments(
|
|
||||||
fileAttachments,
|
|
||||||
tracker.requestId
|
|
||||||
)
|
)
|
||||||
|
conversationHistory = history.history
|
||||||
for (const { buffer, attachment } of processedAttachments) {
|
|
||||||
const fileContent = createFileContent(buffer, attachment.media_type)
|
|
||||||
if (fileContent) {
|
|
||||||
processedFileContents.push(fileContent)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build messages array for sim agent with conversation history
|
|
||||||
const messages: any[] = []
|
|
||||||
|
|
||||||
// Add conversation history (need to rebuild these with file support if they had attachments)
|
|
||||||
for (const msg of conversationHistory) {
|
|
||||||
if (msg.fileAttachments && msg.fileAttachments.length > 0) {
|
|
||||||
// This is a message with file attachments - rebuild with content array
|
|
||||||
const content: any[] = [{ type: 'text', text: msg.content }]
|
|
||||||
|
|
||||||
const processedHistoricalAttachments = await CopilotFiles.processCopilotAttachments(
|
|
||||||
msg.fileAttachments,
|
|
||||||
tracker.requestId
|
|
||||||
)
|
|
||||||
|
|
||||||
for (const { buffer, attachment } of processedHistoricalAttachments) {
|
|
||||||
const fileContent = createFileContent(buffer, attachment.media_type)
|
|
||||||
if (fileContent) {
|
|
||||||
content.push(fileContent)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
messages.push({
|
|
||||||
role: msg.role,
|
|
||||||
content,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
// Regular text-only message
|
|
||||||
messages.push({
|
|
||||||
role: msg.role,
|
|
||||||
content: msg.content,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add implicit feedback if provided
|
|
||||||
if (implicitFeedback) {
|
|
||||||
messages.push({
|
|
||||||
role: 'system',
|
|
||||||
content: implicitFeedback,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add current user message with file attachments
|
|
||||||
if (processedFileContents.length > 0) {
|
|
||||||
// Message with files - use content array format
|
|
||||||
const content: any[] = [{ type: 'text', text: message }]
|
|
||||||
|
|
||||||
// Add file contents
|
|
||||||
for (const fileContent of processedFileContents) {
|
|
||||||
content.push(fileContent)
|
|
||||||
}
|
|
||||||
|
|
||||||
messages.push({
|
|
||||||
role: 'user',
|
|
||||||
content,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
// Text-only message
|
|
||||||
messages.push({
|
|
||||||
role: 'user',
|
|
||||||
content: message,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const defaults = getCopilotModel('chat')
|
const defaults = getCopilotModel('chat')
|
||||||
@@ -311,239 +234,94 @@ export async function POST(req: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const effectiveMode = mode === 'agent' ? 'build' : mode
|
const effectiveMode = mode === 'agent' ? 'build' : mode
|
||||||
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
|
|
||||||
|
|
||||||
// Determine conversationId to use for this request
|
|
||||||
const effectiveConversationId =
|
const effectiveConversationId =
|
||||||
(currentChat?.conversationId as string | undefined) || conversationId
|
(currentChat?.conversationId as string | undefined) || conversationId
|
||||||
|
|
||||||
// For agent/build mode, fetch credentials and build tool definitions
|
const requestPayload = await buildCopilotRequestPayload(
|
||||||
let integrationTools: any[] = []
|
{
|
||||||
let baseTools: any[] = []
|
message,
|
||||||
let credentials: {
|
workflowId,
|
||||||
oauth: Record<
|
userId: authenticatedUserId,
|
||||||
string,
|
userMessageId: userMessageIdToUse,
|
||||||
{ accessToken: string; accountId: string; name: string; expiresAt?: string }
|
mode,
|
||||||
>
|
model: selectedModel,
|
||||||
apiKeys: string[]
|
conversationHistory,
|
||||||
metadata?: {
|
contexts: agentContexts,
|
||||||
connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }>
|
fileAttachments,
|
||||||
configuredApiKeys: string[]
|
commands,
|
||||||
|
chatId: actualChatId,
|
||||||
|
implicitFeedback,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
selectedModel,
|
||||||
}
|
}
|
||||||
} | null = null
|
)
|
||||||
|
|
||||||
if (effectiveMode === 'build') {
|
|
||||||
// Build base tools (executed locally, not deferred)
|
|
||||||
// Include function_execute for code execution capability
|
|
||||||
baseTools = [
|
|
||||||
{
|
|
||||||
name: 'function_execute',
|
|
||||||
description:
|
|
||||||
'Execute JavaScript code to perform calculations, data transformations, API calls, or any programmatic task. Code runs in a secure sandbox with fetch() available. Write plain statements (not wrapped in functions). Example: const res = await fetch(url); const data = await res.json(); return data;',
|
|
||||||
input_schema: {
|
|
||||||
type: 'object',
|
|
||||||
properties: {
|
|
||||||
code: {
|
|
||||||
type: 'string',
|
|
||||||
description:
|
|
||||||
'Raw JavaScript statements to execute. Code is auto-wrapped in async context. Use fetch() for HTTP requests. Write like: const res = await fetch(url); return await res.json();',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
required: ['code'],
|
|
||||||
},
|
|
||||||
executeLocally: true,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
// Fetch user credentials (OAuth + API keys) - pass workflowId to get workspace env vars
|
|
||||||
try {
|
|
||||||
const rawCredentials = await getCredentialsServerTool.execute(
|
|
||||||
{ workflowId },
|
|
||||||
{ userId: authenticatedUserId }
|
|
||||||
)
|
|
||||||
|
|
||||||
// Transform OAuth credentials to map format: { [provider]: { accessToken, accountId, ... } }
|
|
||||||
const oauthMap: Record<
|
|
||||||
string,
|
|
||||||
{ accessToken: string; accountId: string; name: string; expiresAt?: string }
|
|
||||||
> = {}
|
|
||||||
const connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }> = []
|
|
||||||
for (const cred of rawCredentials?.oauth?.connected?.credentials || []) {
|
|
||||||
if (cred.accessToken) {
|
|
||||||
oauthMap[cred.provider] = {
|
|
||||||
accessToken: cred.accessToken,
|
|
||||||
accountId: cred.id,
|
|
||||||
name: cred.name,
|
|
||||||
}
|
|
||||||
connectedOAuth.push({
|
|
||||||
provider: cred.provider,
|
|
||||||
name: cred.name,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
credentials = {
|
|
||||||
oauth: oauthMap,
|
|
||||||
apiKeys: rawCredentials?.environment?.variableNames || [],
|
|
||||||
metadata: {
|
|
||||||
connectedOAuth,
|
|
||||||
configuredApiKeys: rawCredentials?.environment?.variableNames || [],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Fetched credentials for build mode`, {
|
|
||||||
oauthProviders: Object.keys(oauthMap),
|
|
||||||
apiKeyCount: credentials.apiKeys.length,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`[${tracker.requestId}] Failed to fetch credentials`, {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build tool definitions (schemas only)
|
|
||||||
try {
|
|
||||||
const { createUserToolSchema } = await import('@/tools/params')
|
|
||||||
|
|
||||||
const latestTools = getLatestVersionTools(tools)
|
|
||||||
|
|
||||||
integrationTools = Object.entries(latestTools).map(([toolId, toolConfig]) => {
|
|
||||||
const userSchema = createUserToolSchema(toolConfig)
|
|
||||||
const strippedName = stripVersionSuffix(toolId)
|
|
||||||
return {
|
|
||||||
name: strippedName,
|
|
||||||
description: toolConfig.description || toolConfig.name || strippedName,
|
|
||||||
input_schema: userSchema,
|
|
||||||
defer_loading: true, // Anthropic Advanced Tool Use
|
|
||||||
...(toolConfig.oauth?.required && {
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: toolConfig.oauth.provider,
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Built tool definitions for build mode`, {
|
|
||||||
integrationToolCount: integrationTools.length,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`[${tracker.requestId}] Failed to build tool definitions`, {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const requestPayload = {
|
|
||||||
message: message, // Just send the current user message text
|
|
||||||
workflowId,
|
|
||||||
userId: authenticatedUserId,
|
|
||||||
stream: stream,
|
|
||||||
streamToolCalls: true,
|
|
||||||
model: selectedModel,
|
|
||||||
mode: transportMode,
|
|
||||||
messageId: userMessageIdToUse,
|
|
||||||
version: SIM_AGENT_VERSION,
|
|
||||||
...(providerConfig ? { provider: providerConfig } : {}),
|
|
||||||
...(effectiveConversationId ? { conversationId: effectiveConversationId } : {}),
|
|
||||||
...(typeof prefetch === 'boolean' ? { prefetch: prefetch } : {}),
|
|
||||||
...(session?.user?.name && { userName: session.user.name }),
|
|
||||||
...(agentContexts.length > 0 && { context: agentContexts }),
|
|
||||||
...(actualChatId ? { chatId: actualChatId } : {}),
|
|
||||||
...(processedFileContents.length > 0 && { fileAttachments: processedFileContents }),
|
|
||||||
// For build/agent mode, include tools and credentials
|
|
||||||
...(integrationTools.length > 0 && { tools: integrationTools }),
|
|
||||||
...(baseTools.length > 0 && { baseTools }),
|
|
||||||
...(credentials && { credentials }),
|
|
||||||
...(commands && commands.length > 0 && { commands }),
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
logger.info(`[${tracker.requestId}] About to call Sim Agent`, {
|
logger.info(`[${tracker.requestId}] About to call Sim Agent`, {
|
||||||
hasContext: agentContexts.length > 0,
|
hasContext: agentContexts.length > 0,
|
||||||
contextCount: agentContexts.length,
|
contextCount: agentContexts.length,
|
||||||
hasConversationId: !!effectiveConversationId,
|
hasConversationId: !!effectiveConversationId,
|
||||||
hasFileAttachments: processedFileContents.length > 0,
|
hasFileAttachments: Array.isArray(requestPayload.fileAttachments),
|
||||||
messageLength: message.length,
|
messageLength: message.length,
|
||||||
mode: effectiveMode,
|
mode: effectiveMode,
|
||||||
hasTools: integrationTools.length > 0,
|
hasTools: Array.isArray(requestPayload.tools),
|
||||||
toolCount: integrationTools.length,
|
toolCount: Array.isArray(requestPayload.tools) ? requestPayload.tools.length : 0,
|
||||||
hasBaseTools: baseTools.length > 0,
|
hasBaseTools: Array.isArray(requestPayload.baseTools),
|
||||||
baseToolCount: baseTools.length,
|
baseToolCount: Array.isArray(requestPayload.baseTools)
|
||||||
hasCredentials: !!credentials,
|
? requestPayload.baseTools.length
|
||||||
|
: 0,
|
||||||
|
hasCredentials: !!requestPayload.credentials,
|
||||||
})
|
})
|
||||||
} catch {}
|
} catch {}
|
||||||
|
|
||||||
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/chat-completion-streaming`, {
|
if (stream) {
|
||||||
method: 'POST',
|
const streamId = userMessageIdToUse
|
||||||
headers: {
|
let eventWriter: ReturnType<typeof createStreamEventWriter> | null = null
|
||||||
'Content-Type': 'application/json',
|
let clientDisconnected = false
|
||||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
|
||||||
},
|
|
||||||
body: JSON.stringify(requestPayload),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!simAgentResponse.ok) {
|
|
||||||
if (simAgentResponse.status === 401 || simAgentResponse.status === 402) {
|
|
||||||
// Rethrow status only; client will render appropriate assistant message
|
|
||||||
return new NextResponse(null, { status: simAgentResponse.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorText = await simAgentResponse.text().catch(() => '')
|
|
||||||
logger.error(`[${tracker.requestId}] Sim agent API error:`, {
|
|
||||||
status: simAgentResponse.status,
|
|
||||||
error: errorText,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: `Sim agent API error: ${simAgentResponse.statusText}` },
|
|
||||||
{ status: simAgentResponse.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// If streaming is requested, forward the stream and update chat later
|
|
||||||
if (stream && simAgentResponse.body) {
|
|
||||||
// Create user message to save
|
|
||||||
const userMessage = {
|
|
||||||
id: userMessageIdToUse, // Consistent ID used for request and persistence
|
|
||||||
role: 'user',
|
|
||||||
content: message,
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
|
|
||||||
...(Array.isArray(contexts) && contexts.length > 0 && { contexts }),
|
|
||||||
...(Array.isArray(contexts) &&
|
|
||||||
contexts.length > 0 && {
|
|
||||||
contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }],
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a pass-through stream that captures the response
|
|
||||||
const transformedStream = new ReadableStream({
|
const transformedStream = new ReadableStream({
|
||||||
async start(controller) {
|
async start(controller) {
|
||||||
const encoder = new TextEncoder()
|
const encoder = new TextEncoder()
|
||||||
let assistantContent = ''
|
|
||||||
const toolCalls: any[] = []
|
|
||||||
let buffer = ''
|
|
||||||
const isFirstDone = true
|
|
||||||
let responseIdFromStart: string | undefined
|
|
||||||
let responseIdFromDone: string | undefined
|
|
||||||
// Track tool call progress to identify a safe done event
|
|
||||||
const announcedToolCallIds = new Set<string>()
|
|
||||||
const startedToolExecutionIds = new Set<string>()
|
|
||||||
const completedToolExecutionIds = new Set<string>()
|
|
||||||
let lastDoneResponseId: string | undefined
|
|
||||||
let lastSafeDoneResponseId: string | undefined
|
|
||||||
|
|
||||||
// Send chatId as first event
|
await resetStreamBuffer(streamId)
|
||||||
if (actualChatId) {
|
await setStreamMeta(streamId, { status: 'active', userId: authenticatedUserId })
|
||||||
const chatIdEvent = `data: ${JSON.stringify({
|
eventWriter = createStreamEventWriter(streamId)
|
||||||
type: 'chat_id',
|
|
||||||
chatId: actualChatId,
|
const shouldFlushEvent = (event: Record<string, any>) =>
|
||||||
})}\n\n`
|
event.type === 'tool_call' ||
|
||||||
controller.enqueue(encoder.encode(chatIdEvent))
|
event.type === 'tool_result' ||
|
||||||
logger.debug(`[${tracker.requestId}] Sent initial chatId event to client`)
|
event.type === 'tool_error' ||
|
||||||
|
event.type === 'subagent_end' ||
|
||||||
|
event.type === 'structured_result' ||
|
||||||
|
event.type === 'subagent_result' ||
|
||||||
|
event.type === 'done' ||
|
||||||
|
event.type === 'error'
|
||||||
|
|
||||||
|
const pushEvent = async (event: Record<string, any>) => {
|
||||||
|
if (!eventWriter) return
|
||||||
|
const entry = await eventWriter.write(event)
|
||||||
|
if (shouldFlushEvent(event)) {
|
||||||
|
await eventWriter.flush()
|
||||||
|
}
|
||||||
|
const payload = {
|
||||||
|
...event,
|
||||||
|
eventId: entry.eventId,
|
||||||
|
streamId,
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
if (!clientDisconnected) {
|
||||||
|
controller.enqueue(encoder.encode(`data: ${JSON.stringify(payload)}\n\n`))
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
clientDisconnected = true
|
||||||
|
await eventWriter.flush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (actualChatId) {
|
||||||
|
await pushEvent({ type: 'chat_id', chatId: actualChatId })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start title generation in parallel if needed
|
|
||||||
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
|
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
|
||||||
generateChatTitle(message)
|
generateChatTitle(message)
|
||||||
.then(async (title) => {
|
.then(async (title) => {
|
||||||
@@ -555,311 +333,64 @@ export async function POST(req: NextRequest) {
|
|||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
|
await pushEvent({ type: 'title_updated', title })
|
||||||
const titleEvent = `data: ${JSON.stringify({
|
|
||||||
type: 'title_updated',
|
|
||||||
title: title,
|
|
||||||
})}\n\n`
|
|
||||||
controller.enqueue(encoder.encode(titleEvent))
|
|
||||||
logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`)
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||||
})
|
})
|
||||||
} else {
|
|
||||||
logger.debug(`[${tracker.requestId}] Skipping title generation`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Forward the sim agent stream and capture assistant response
|
|
||||||
const reader = simAgentResponse.body!.getReader()
|
|
||||||
const decoder = new TextDecoder()
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
while (true) {
|
const result = await orchestrateCopilotStream(requestPayload, {
|
||||||
const { done, value } = await reader.read()
|
userId: authenticatedUserId,
|
||||||
if (done) {
|
workflowId,
|
||||||
break
|
chatId: actualChatId,
|
||||||
}
|
autoExecuteTools: true,
|
||||||
|
interactive: true,
|
||||||
// Decode and parse SSE events for logging and capturing content
|
onEvent: async (event) => {
|
||||||
const decodedChunk = decoder.decode(value, { stream: true })
|
await pushEvent(event)
|
||||||
buffer += decodedChunk
|
},
|
||||||
|
|
||||||
const lines = buffer.split('\n')
|
|
||||||
buffer = lines.pop() || '' // Keep incomplete line in buffer
|
|
||||||
|
|
||||||
for (const line of lines) {
|
|
||||||
if (line.trim() === '') continue // Skip empty lines
|
|
||||||
|
|
||||||
if (line.startsWith('data: ') && line.length > 6) {
|
|
||||||
try {
|
|
||||||
const jsonStr = line.slice(6)
|
|
||||||
|
|
||||||
// Check if the JSON string is unusually large (potential streaming issue)
|
|
||||||
if (jsonStr.length > 50000) {
|
|
||||||
// 50KB limit
|
|
||||||
logger.warn(`[${tracker.requestId}] Large SSE event detected`, {
|
|
||||||
size: jsonStr.length,
|
|
||||||
preview: `${jsonStr.substring(0, 100)}...`,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
const event = JSON.parse(jsonStr)
|
|
||||||
|
|
||||||
// Log different event types comprehensively
|
|
||||||
switch (event.type) {
|
|
||||||
case 'content':
|
|
||||||
if (event.data) {
|
|
||||||
assistantContent += event.data
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'reasoning':
|
|
||||||
logger.debug(
|
|
||||||
`[${tracker.requestId}] Reasoning chunk received (${(event.data || event.content || '').length} chars)`
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'tool_call':
|
|
||||||
if (!event.data?.partial) {
|
|
||||||
toolCalls.push(event.data)
|
|
||||||
if (event.data?.id) {
|
|
||||||
announcedToolCallIds.add(event.data.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'tool_generating':
|
|
||||||
if (event.toolCallId) {
|
|
||||||
startedToolExecutionIds.add(event.toolCallId)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'tool_result':
|
|
||||||
if (event.toolCallId) {
|
|
||||||
completedToolExecutionIds.add(event.toolCallId)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'tool_error':
|
|
||||||
logger.error(`[${tracker.requestId}] Tool error:`, {
|
|
||||||
toolCallId: event.toolCallId,
|
|
||||||
toolName: event.toolName,
|
|
||||||
error: event.error,
|
|
||||||
success: event.success,
|
|
||||||
})
|
|
||||||
if (event.toolCallId) {
|
|
||||||
completedToolExecutionIds.add(event.toolCallId)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'start':
|
|
||||||
if (event.data?.responseId) {
|
|
||||||
responseIdFromStart = event.data.responseId
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'done':
|
|
||||||
if (event.data?.responseId) {
|
|
||||||
responseIdFromDone = event.data.responseId
|
|
||||||
lastDoneResponseId = responseIdFromDone
|
|
||||||
|
|
||||||
// Mark this done as safe only if no tool call is currently in progress or pending
|
|
||||||
const announced = announcedToolCallIds.size
|
|
||||||
const completed = completedToolExecutionIds.size
|
|
||||||
const started = startedToolExecutionIds.size
|
|
||||||
const hasToolInProgress = announced > completed || started > completed
|
|
||||||
if (!hasToolInProgress) {
|
|
||||||
lastSafeDoneResponseId = responseIdFromDone
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'error':
|
|
||||||
break
|
|
||||||
|
|
||||||
default:
|
|
||||||
}
|
|
||||||
|
|
||||||
// Emit to client: rewrite 'error' events into user-friendly assistant message
|
|
||||||
if (event?.type === 'error') {
|
|
||||||
try {
|
|
||||||
const displayMessage: string =
|
|
||||||
(event?.data && (event.data.displayMessage as string)) ||
|
|
||||||
'Sorry, I encountered an error. Please try again.'
|
|
||||||
const formatted = `_${displayMessage}_`
|
|
||||||
// Accumulate so it persists to DB as assistant content
|
|
||||||
assistantContent += formatted
|
|
||||||
// Send as content chunk
|
|
||||||
try {
|
|
||||||
controller.enqueue(
|
|
||||||
encoder.encode(
|
|
||||||
`data: ${JSON.stringify({ type: 'content', data: formatted })}\n\n`
|
|
||||||
)
|
|
||||||
)
|
|
||||||
} catch (enqueueErr) {
|
|
||||||
reader.cancel()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
// Then close this response cleanly for the client
|
|
||||||
try {
|
|
||||||
controller.enqueue(
|
|
||||||
encoder.encode(`data: ${JSON.stringify({ type: 'done' })}\n\n`)
|
|
||||||
)
|
|
||||||
} catch (enqueueErr) {
|
|
||||||
reader.cancel()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
} catch {}
|
|
||||||
// Do not forward the original error event
|
|
||||||
} else {
|
|
||||||
// Forward original event to client
|
|
||||||
try {
|
|
||||||
controller.enqueue(encoder.encode(`data: ${jsonStr}\n\n`))
|
|
||||||
} catch (enqueueErr) {
|
|
||||||
reader.cancel()
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
// Enhanced error handling for large payloads and parsing issues
|
|
||||||
const lineLength = line.length
|
|
||||||
const isLargePayload = lineLength > 10000
|
|
||||||
|
|
||||||
if (isLargePayload) {
|
|
||||||
logger.error(
|
|
||||||
`[${tracker.requestId}] Failed to parse large SSE event (${lineLength} chars)`,
|
|
||||||
{
|
|
||||||
error: e,
|
|
||||||
preview: `${line.substring(0, 200)}...`,
|
|
||||||
size: lineLength,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
logger.warn(
|
|
||||||
`[${tracker.requestId}] Failed to parse SSE event: "${line.substring(0, 200)}..."`,
|
|
||||||
e
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (line.trim() && line !== 'data: [DONE]') {
|
|
||||||
logger.debug(`[${tracker.requestId}] Non-SSE line from sim agent: "${line}"`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process any remaining buffer
|
|
||||||
if (buffer.trim()) {
|
|
||||||
logger.debug(`[${tracker.requestId}] Processing remaining buffer: "${buffer}"`)
|
|
||||||
if (buffer.startsWith('data: ')) {
|
|
||||||
try {
|
|
||||||
const jsonStr = buffer.slice(6)
|
|
||||||
const event = JSON.parse(jsonStr)
|
|
||||||
if (event.type === 'content' && event.data) {
|
|
||||||
assistantContent += event.data
|
|
||||||
}
|
|
||||||
// Forward remaining event, applying same error rewrite behavior
|
|
||||||
if (event?.type === 'error') {
|
|
||||||
const displayMessage: string =
|
|
||||||
(event?.data && (event.data.displayMessage as string)) ||
|
|
||||||
'Sorry, I encountered an error. Please try again.'
|
|
||||||
const formatted = `_${displayMessage}_`
|
|
||||||
assistantContent += formatted
|
|
||||||
try {
|
|
||||||
controller.enqueue(
|
|
||||||
encoder.encode(
|
|
||||||
`data: ${JSON.stringify({ type: 'content', data: formatted })}\n\n`
|
|
||||||
)
|
|
||||||
)
|
|
||||||
controller.enqueue(
|
|
||||||
encoder.encode(`data: ${JSON.stringify({ type: 'done' })}\n\n`)
|
|
||||||
)
|
|
||||||
} catch (enqueueErr) {
|
|
||||||
reader.cancel()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
controller.enqueue(encoder.encode(`data: ${jsonStr}\n\n`))
|
|
||||||
} catch (enqueueErr) {
|
|
||||||
reader.cancel()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
logger.warn(`[${tracker.requestId}] Failed to parse final buffer: "${buffer}"`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Log final streaming summary
|
|
||||||
logger.info(`[${tracker.requestId}] Streaming complete summary:`, {
|
|
||||||
totalContentLength: assistantContent.length,
|
|
||||||
toolCallsCount: toolCalls.length,
|
|
||||||
hasContent: assistantContent.length > 0,
|
|
||||||
toolNames: toolCalls.map((tc) => tc?.name).filter(Boolean),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
// NOTE: Messages are saved by the client via update-messages endpoint with full contentBlocks.
|
if (currentChat && result.conversationId) {
|
||||||
// Server only updates conversationId here to avoid overwriting client's richer save.
|
await db
|
||||||
if (currentChat) {
|
.update(copilotChats)
|
||||||
// Persist only a safe conversationId to avoid continuing from a state that expects tool outputs
|
.set({
|
||||||
const previousConversationId = currentChat?.conversationId as string | undefined
|
updatedAt: new Date(),
|
||||||
const responseId = lastSafeDoneResponseId || previousConversationId || undefined
|
conversationId: result.conversationId,
|
||||||
|
})
|
||||||
if (responseId) {
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
await db
|
|
||||||
.update(copilotChats)
|
|
||||||
.set({
|
|
||||||
updatedAt: new Date(),
|
|
||||||
conversationId: responseId,
|
|
||||||
})
|
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${tracker.requestId}] Updated conversationId for chat ${actualChatId}`,
|
|
||||||
{
|
|
||||||
updatedConversationId: responseId,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
await eventWriter.close()
|
||||||
|
await setStreamMeta(streamId, { status: 'complete', userId: authenticatedUserId })
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${tracker.requestId}] Error processing stream:`, error)
|
logger.error(`[${tracker.requestId}] Orchestration error:`, error)
|
||||||
|
await eventWriter.close()
|
||||||
// Send an error event to the client before closing so it knows what happened
|
await setStreamMeta(streamId, {
|
||||||
try {
|
status: 'error',
|
||||||
const errorMessage =
|
userId: authenticatedUserId,
|
||||||
error instanceof Error && error.message === 'terminated'
|
error: error instanceof Error ? error.message : 'Stream error',
|
||||||
? 'Connection to AI service was interrupted. Please try again.'
|
})
|
||||||
: 'An unexpected error occurred while processing the response.'
|
await pushEvent({
|
||||||
const encoder = new TextEncoder()
|
type: 'error',
|
||||||
|
data: {
|
||||||
// Send error as content so it shows in the chat
|
displayMessage: 'An unexpected error occurred while processing the response.',
|
||||||
controller.enqueue(
|
},
|
||||||
encoder.encode(
|
})
|
||||||
`data: ${JSON.stringify({ type: 'content', data: `\n\n_${errorMessage}_` })}\n\n`
|
|
||||||
)
|
|
||||||
)
|
|
||||||
// Send done event to properly close the stream on client
|
|
||||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ type: 'done' })}\n\n`))
|
|
||||||
} catch (enqueueError) {
|
|
||||||
// Stream might already be closed, that's ok
|
|
||||||
logger.warn(
|
|
||||||
`[${tracker.requestId}] Could not send error event to client:`,
|
|
||||||
enqueueError
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} finally {
|
} finally {
|
||||||
try {
|
controller.close()
|
||||||
controller.close()
|
}
|
||||||
} catch {
|
},
|
||||||
// Controller might already be closed
|
async cancel() {
|
||||||
}
|
clientDisconnected = true
|
||||||
|
if (eventWriter) {
|
||||||
|
await eventWriter.flush()
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
const response = new Response(transformedStream, {
|
return new Response(transformedStream, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'text/event-stream',
|
'Content-Type': 'text/event-stream',
|
||||||
'Cache-Control': 'no-cache',
|
'Cache-Control': 'no-cache',
|
||||||
@@ -867,43 +398,34 @@ export async function POST(req: NextRequest) {
|
|||||||
'X-Accel-Buffering': 'no',
|
'X-Accel-Buffering': 'no',
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Returning streaming response to client`, {
|
|
||||||
duration: tracker.getDuration(),
|
|
||||||
chatId: actualChatId,
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'text/event-stream',
|
|
||||||
'Cache-Control': 'no-cache',
|
|
||||||
Connection: 'keep-alive',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
return response
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// For non-streaming responses
|
const nonStreamingResult = await orchestrateCopilotStream(requestPayload, {
|
||||||
const responseData = await simAgentResponse.json()
|
userId: authenticatedUserId,
|
||||||
logger.info(`[${tracker.requestId}] Non-streaming response from sim agent:`, {
|
workflowId,
|
||||||
|
chatId: actualChatId,
|
||||||
|
autoExecuteTools: true,
|
||||||
|
interactive: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
const responseData = {
|
||||||
|
content: nonStreamingResult.content,
|
||||||
|
toolCalls: nonStreamingResult.toolCalls,
|
||||||
|
model: selectedModel,
|
||||||
|
provider:
|
||||||
|
(requestPayload?.provider as Record<string, unknown>)?.provider ||
|
||||||
|
env.COPILOT_PROVIDER ||
|
||||||
|
'openai',
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${tracker.requestId}] Non-streaming response from orchestrator:`, {
|
||||||
hasContent: !!responseData.content,
|
hasContent: !!responseData.content,
|
||||||
contentLength: responseData.content?.length || 0,
|
contentLength: responseData.content?.length || 0,
|
||||||
model: responseData.model,
|
model: responseData.model,
|
||||||
provider: responseData.provider,
|
provider: responseData.provider,
|
||||||
toolCallsCount: responseData.toolCalls?.length || 0,
|
toolCallsCount: responseData.toolCalls?.length || 0,
|
||||||
hasTokens: !!responseData.tokens,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
// Log tool calls if present
|
|
||||||
if (responseData.toolCalls?.length > 0) {
|
|
||||||
responseData.toolCalls.forEach((toolCall: any) => {
|
|
||||||
logger.info(`[${tracker.requestId}] Tool call in response:`, {
|
|
||||||
id: toolCall.id,
|
|
||||||
name: toolCall.name,
|
|
||||||
success: toolCall.success,
|
|
||||||
result: `${JSON.stringify(toolCall.result).substring(0, 200)}...`,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save messages if we have a chat
|
// Save messages if we have a chat
|
||||||
if (currentChat && responseData.content) {
|
if (currentChat && responseData.content) {
|
||||||
const userMessage = {
|
const userMessage = {
|
||||||
@@ -955,6 +477,9 @@ export async function POST(req: NextRequest) {
|
|||||||
.set({
|
.set({
|
||||||
messages: updatedMessages,
|
messages: updatedMessages,
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
|
...(nonStreamingResult.conversationId
|
||||||
|
? { conversationId: nonStreamingResult.conversationId }
|
||||||
|
: {}),
|
||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
}
|
}
|
||||||
@@ -1006,10 +531,7 @@ export async function GET(req: NextRequest) {
|
|||||||
try {
|
try {
|
||||||
const { searchParams } = new URL(req.url)
|
const { searchParams } = new URL(req.url)
|
||||||
const workflowId = searchParams.get('workflowId')
|
const workflowId = searchParams.get('workflowId')
|
||||||
|
const chatId = searchParams.get('chatId')
|
||||||
if (!workflowId) {
|
|
||||||
return createBadRequestResponse('workflowId is required')
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get authenticated user using consolidated helper
|
// Get authenticated user using consolidated helper
|
||||||
const { userId: authenticatedUserId, isAuthenticated } =
|
const { userId: authenticatedUserId, isAuthenticated } =
|
||||||
@@ -1018,6 +540,47 @@ export async function GET(req: NextRequest) {
|
|||||||
return createUnauthorizedResponse()
|
return createUnauthorizedResponse()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If chatId is provided, fetch a single chat
|
||||||
|
if (chatId) {
|
||||||
|
const [chat] = await db
|
||||||
|
.select({
|
||||||
|
id: copilotChats.id,
|
||||||
|
title: copilotChats.title,
|
||||||
|
model: copilotChats.model,
|
||||||
|
messages: copilotChats.messages,
|
||||||
|
planArtifact: copilotChats.planArtifact,
|
||||||
|
config: copilotChats.config,
|
||||||
|
createdAt: copilotChats.createdAt,
|
||||||
|
updatedAt: copilotChats.updatedAt,
|
||||||
|
})
|
||||||
|
.from(copilotChats)
|
||||||
|
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, authenticatedUserId)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!chat) {
|
||||||
|
return NextResponse.json({ success: false, error: 'Chat not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const transformedChat = {
|
||||||
|
id: chat.id,
|
||||||
|
title: chat.title,
|
||||||
|
model: chat.model,
|
||||||
|
messages: Array.isArray(chat.messages) ? chat.messages : [],
|
||||||
|
messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0,
|
||||||
|
planArtifact: chat.planArtifact || null,
|
||||||
|
config: chat.config || null,
|
||||||
|
createdAt: chat.createdAt,
|
||||||
|
updatedAt: chat.updatedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Retrieved chat ${chatId}`)
|
||||||
|
return NextResponse.json({ success: true, chat: transformedChat })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!workflowId) {
|
||||||
|
return createBadRequestResponse('workflowId or chatId is required')
|
||||||
|
}
|
||||||
|
|
||||||
// Fetch chats for this user and workflow
|
// Fetch chats for this user and workflow
|
||||||
const chats = await db
|
const chats = await db
|
||||||
.select({
|
.select({
|
||||||
|
|||||||
130
apps/sim/app/api/copilot/chat/stream/route.ts
Normal file
130
apps/sim/app/api/copilot/chat/stream/route.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import {
|
||||||
|
getStreamMeta,
|
||||||
|
readStreamEvents,
|
||||||
|
type StreamMeta,
|
||||||
|
} from '@/lib/copilot/orchestrator/stream-buffer'
|
||||||
|
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||||
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotChatStreamAPI')
|
||||||
|
const POLL_INTERVAL_MS = 250
|
||||||
|
const MAX_STREAM_MS = 10 * 60 * 1000
|
||||||
|
|
||||||
|
function encodeEvent(event: Record<string, any>): Uint8Array {
|
||||||
|
return new TextEncoder().encode(`data: ${JSON.stringify(event)}\n\n`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
const { userId: authenticatedUserId, isAuthenticated } =
|
||||||
|
await authenticateCopilotRequestSessionOnly()
|
||||||
|
|
||||||
|
if (!isAuthenticated || !authenticatedUserId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = new URL(request.url)
|
||||||
|
const streamId = url.searchParams.get('streamId') || ''
|
||||||
|
const fromParam = url.searchParams.get('from') || '0'
|
||||||
|
const fromEventId = Number(fromParam || 0)
|
||||||
|
// If batch=true, return buffered events as JSON instead of SSE
|
||||||
|
const batchMode = url.searchParams.get('batch') === 'true'
|
||||||
|
const toParam = url.searchParams.get('to')
|
||||||
|
const toEventId = toParam ? Number(toParam) : undefined
|
||||||
|
|
||||||
|
if (!streamId) {
|
||||||
|
return NextResponse.json({ error: 'streamId is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const meta = (await getStreamMeta(streamId)) as StreamMeta | null
|
||||||
|
logger.info('[Resume] Stream lookup', {
|
||||||
|
streamId,
|
||||||
|
fromEventId,
|
||||||
|
toEventId,
|
||||||
|
batchMode,
|
||||||
|
hasMeta: !!meta,
|
||||||
|
metaStatus: meta?.status,
|
||||||
|
})
|
||||||
|
if (!meta) {
|
||||||
|
return NextResponse.json({ error: 'Stream not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
if (meta.userId && meta.userId !== authenticatedUserId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Batch mode: return all buffered events as JSON
|
||||||
|
if (batchMode) {
|
||||||
|
const events = await readStreamEvents(streamId, fromEventId)
|
||||||
|
const filteredEvents = toEventId ? events.filter((e) => e.eventId <= toEventId) : events
|
||||||
|
logger.info('[Resume] Batch response', {
|
||||||
|
streamId,
|
||||||
|
fromEventId,
|
||||||
|
toEventId,
|
||||||
|
eventCount: filteredEvents.length,
|
||||||
|
})
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
events: filteredEvents,
|
||||||
|
status: meta.status,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const startTime = Date.now()
|
||||||
|
|
||||||
|
const stream = new ReadableStream({
|
||||||
|
async start(controller) {
|
||||||
|
let lastEventId = Number.isFinite(fromEventId) ? fromEventId : 0
|
||||||
|
|
||||||
|
const flushEvents = async () => {
|
||||||
|
const events = await readStreamEvents(streamId, lastEventId)
|
||||||
|
if (events.length > 0) {
|
||||||
|
logger.info('[Resume] Flushing events', {
|
||||||
|
streamId,
|
||||||
|
fromEventId: lastEventId,
|
||||||
|
eventCount: events.length,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
for (const entry of events) {
|
||||||
|
lastEventId = entry.eventId
|
||||||
|
const payload = {
|
||||||
|
...entry.event,
|
||||||
|
eventId: entry.eventId,
|
||||||
|
streamId: entry.streamId,
|
||||||
|
}
|
||||||
|
controller.enqueue(encodeEvent(payload))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await flushEvents()
|
||||||
|
|
||||||
|
while (Date.now() - startTime < MAX_STREAM_MS) {
|
||||||
|
const currentMeta = await getStreamMeta(streamId)
|
||||||
|
if (!currentMeta) break
|
||||||
|
|
||||||
|
await flushEvents()
|
||||||
|
|
||||||
|
if (currentMeta.status === 'complete' || currentMeta.status === 'error') {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if (request.signal.aborted) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Stream replay failed', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
} finally {
|
||||||
|
controller.close()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return new Response(stream, { headers: SSE_HEADERS })
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
|
import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants'
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -23,7 +24,8 @@ const ConfirmationSchema = z.object({
|
|||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update tool call status in Redis
|
* Write the user's tool decision to Redis. The server-side orchestrator's
|
||||||
|
* waitForToolDecision() polls Redis for this value.
|
||||||
*/
|
*/
|
||||||
async function updateToolCallStatus(
|
async function updateToolCallStatus(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
@@ -32,57 +34,24 @@ async function updateToolCallStatus(
|
|||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
const redis = getRedisClient()
|
const redis = getRedisClient()
|
||||||
if (!redis) {
|
if (!redis) {
|
||||||
logger.warn('updateToolCallStatus: Redis client not available')
|
logger.warn('Redis client not available for tool confirmation')
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const key = `tool_call:${toolCallId}`
|
const key = `${REDIS_TOOL_CALL_PREFIX}${toolCallId}`
|
||||||
const timeout = 600000 // 10 minutes timeout for user confirmation
|
const payload = {
|
||||||
const pollInterval = 100 // Poll every 100ms
|
|
||||||
const startTime = Date.now()
|
|
||||||
|
|
||||||
logger.info('Polling for tool call in Redis', { toolCallId, key, timeout })
|
|
||||||
|
|
||||||
// Poll until the key exists or timeout
|
|
||||||
while (Date.now() - startTime < timeout) {
|
|
||||||
const exists = await redis.exists(key)
|
|
||||||
if (exists) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
// Wait before next poll
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, pollInterval))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Final check if key exists after polling
|
|
||||||
const exists = await redis.exists(key)
|
|
||||||
if (!exists) {
|
|
||||||
logger.warn('Tool call not found in Redis after polling timeout', {
|
|
||||||
toolCallId,
|
|
||||||
key,
|
|
||||||
timeout,
|
|
||||||
pollDuration: Date.now() - startTime,
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store both status and message as JSON
|
|
||||||
const toolCallData = {
|
|
||||||
status,
|
status,
|
||||||
message: message || null,
|
message: message || null,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
}
|
}
|
||||||
|
await redis.set(key, JSON.stringify(payload), 'EX', REDIS_TOOL_CALL_TTL_SECONDS)
|
||||||
await redis.set(key, JSON.stringify(toolCallData), 'EX', 86400) // Keep 24 hour expiry
|
|
||||||
|
|
||||||
return true
|
return true
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to update tool call status in Redis', {
|
logger.error('Failed to update tool call status', {
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
message,
|
error: error instanceof Error ? error.message : String(error),
|
||||||
error: error instanceof Error ? error.message : 'Unknown error',
|
|
||||||
})
|
})
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
28
apps/sim/app/api/copilot/credentials/route.ts
Normal file
28
apps/sim/app/api/copilot/credentials/route.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
||||||
|
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/copilot/credentials
|
||||||
|
* Returns connected OAuth credentials for the authenticated user.
|
||||||
|
* Used by the copilot store for credential masking.
|
||||||
|
*/
|
||||||
|
export async function GET(_req: NextRequest) {
|
||||||
|
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||||
|
if (!isAuthenticated || !userId) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await routeExecution('get_credentials', {}, { userId })
|
||||||
|
return NextResponse.json({ success: true, result })
|
||||||
|
} catch (error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Failed to load credentials',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import {
|
|
||||||
authenticateCopilotRequestSessionOnly,
|
|
||||||
createBadRequestResponse,
|
|
||||||
createInternalServerErrorResponse,
|
|
||||||
createRequestTracker,
|
|
||||||
createUnauthorizedResponse,
|
|
||||||
} from '@/lib/copilot/request-helpers'
|
|
||||||
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
|
||||||
|
|
||||||
const logger = createLogger('ExecuteCopilotServerToolAPI')
|
|
||||||
|
|
||||||
const ExecuteSchema = z.object({
|
|
||||||
toolName: z.string(),
|
|
||||||
payload: z.unknown().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(req: NextRequest) {
|
|
||||||
const tracker = createRequestTracker()
|
|
||||||
try {
|
|
||||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
|
||||||
if (!isAuthenticated || !userId) {
|
|
||||||
return createUnauthorizedResponse()
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await req.json()
|
|
||||||
try {
|
|
||||||
const preview = JSON.stringify(body).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Incoming request body preview`, { preview })
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const { toolName, payload } = ExecuteSchema.parse(body)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Executing server tool`, { toolName })
|
|
||||||
const result = await routeExecution(toolName, payload, { userId })
|
|
||||||
|
|
||||||
try {
|
|
||||||
const resultPreview = JSON.stringify(result).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Server tool result preview`, { toolName, resultPreview })
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
return NextResponse.json({ success: true, result })
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues })
|
|
||||||
return createBadRequestResponse('Invalid request body for execute-copilot-server-tool')
|
|
||||||
}
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to execute server tool:`, error)
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Failed to execute server tool'
|
|
||||||
return createInternalServerErrorResponse(errorMessage)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,247 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { account, workflow } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { and, eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import {
|
|
||||||
createBadRequestResponse,
|
|
||||||
createInternalServerErrorResponse,
|
|
||||||
createRequestTracker,
|
|
||||||
createUnauthorizedResponse,
|
|
||||||
} from '@/lib/copilot/request-helpers'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
|
||||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
|
||||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
|
||||||
import { executeTool } from '@/tools'
|
|
||||||
import { getTool, resolveToolId } from '@/tools/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotExecuteToolAPI')
|
|
||||||
|
|
||||||
const ExecuteToolSchema = z.object({
|
|
||||||
toolCallId: z.string(),
|
|
||||||
toolName: z.string(),
|
|
||||||
arguments: z.record(z.any()).optional().default({}),
|
|
||||||
workflowId: z.string().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(req: NextRequest) {
|
|
||||||
const tracker = createRequestTracker()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return createUnauthorizedResponse()
|
|
||||||
}
|
|
||||||
|
|
||||||
const userId = session.user.id
|
|
||||||
const body = await req.json()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const preview = JSON.stringify(body).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Incoming execute-tool request`, { preview })
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const { toolCallId, toolName, arguments: toolArgs, workflowId } = ExecuteToolSchema.parse(body)
|
|
||||||
|
|
||||||
const resolvedToolName = resolveToolId(toolName)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Executing tool`, {
|
|
||||||
toolCallId,
|
|
||||||
toolName,
|
|
||||||
resolvedToolName,
|
|
||||||
workflowId,
|
|
||||||
hasArgs: Object.keys(toolArgs).length > 0,
|
|
||||||
})
|
|
||||||
|
|
||||||
const toolConfig = getTool(resolvedToolName)
|
|
||||||
if (!toolConfig) {
|
|
||||||
// Find similar tool names to help debug
|
|
||||||
const { tools: allTools } = await import('@/tools/registry')
|
|
||||||
const allToolNames = Object.keys(allTools)
|
|
||||||
const prefix = toolName.split('_').slice(0, 2).join('_')
|
|
||||||
const similarTools = allToolNames
|
|
||||||
.filter((name) => name.startsWith(`${prefix.split('_')[0]}_`))
|
|
||||||
.slice(0, 10)
|
|
||||||
|
|
||||||
logger.warn(`[${tracker.requestId}] Tool not found in registry`, {
|
|
||||||
toolName,
|
|
||||||
prefix,
|
|
||||||
similarTools,
|
|
||||||
totalToolsInRegistry: allToolNames.length,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `Tool not found: ${toolName}. Similar tools: ${similarTools.join(', ')}`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the workspaceId from the workflow (env vars are stored at workspace level)
|
|
||||||
let workspaceId: string | undefined
|
|
||||||
if (workflowId) {
|
|
||||||
const workflowResult = await db
|
|
||||||
.select({ workspaceId: workflow.workspaceId })
|
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
workspaceId = workflowResult[0]?.workspaceId ?? undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get decrypted environment variables early so we can resolve all {{VAR}} references
|
|
||||||
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Fetched environment variables`, {
|
|
||||||
workflowId,
|
|
||||||
workspaceId,
|
|
||||||
envVarCount: Object.keys(decryptedEnvVars).length,
|
|
||||||
envVarKeys: Object.keys(decryptedEnvVars),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Build execution params starting with LLM-provided arguments
|
|
||||||
// Resolve all {{ENV_VAR}} references in the arguments (deep for nested objects)
|
|
||||||
const executionParams: Record<string, any> = resolveEnvVarReferences(
|
|
||||||
toolArgs,
|
|
||||||
decryptedEnvVars,
|
|
||||||
{ deep: true }
|
|
||||||
) as Record<string, any>
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, {
|
|
||||||
toolName,
|
|
||||||
originalArgKeys: Object.keys(toolArgs),
|
|
||||||
resolvedArgKeys: Object.keys(executionParams),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Resolve OAuth access token if required
|
|
||||||
if (toolConfig.oauth?.required && toolConfig.oauth.provider) {
|
|
||||||
const provider = toolConfig.oauth.provider
|
|
||||||
logger.info(`[${tracker.requestId}] Resolving OAuth token`, { provider })
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Find the account for this provider and user
|
|
||||||
const accounts = await db
|
|
||||||
.select()
|
|
||||||
.from(account)
|
|
||||||
.where(and(eq(account.providerId, provider), eq(account.userId, userId)))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (accounts.length > 0) {
|
|
||||||
const acc = accounts[0]
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const { accessToken } = await refreshTokenIfNeeded(requestId, acc as any, acc.id)
|
|
||||||
|
|
||||||
if (accessToken) {
|
|
||||||
executionParams.accessToken = accessToken
|
|
||||||
logger.info(`[${tracker.requestId}] OAuth token resolved`, { provider })
|
|
||||||
} else {
|
|
||||||
logger.warn(`[${tracker.requestId}] No access token available`, { provider })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `OAuth token not available for ${provider}. Please reconnect your account.`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
logger.warn(`[${tracker.requestId}] No account found for provider`, { provider })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `No ${provider} account connected. Please connect your account first.`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to resolve OAuth token`, {
|
|
||||||
provider,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `Failed to get OAuth token for ${provider}`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if tool requires an API key that wasn't resolved via {{ENV_VAR}} reference
|
|
||||||
const needsApiKey = toolConfig.params?.apiKey?.required
|
|
||||||
|
|
||||||
if (needsApiKey && !executionParams.apiKey) {
|
|
||||||
logger.warn(`[${tracker.requestId}] No API key found for tool`, { toolName })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `API key not provided for ${toolName}. Use {{YOUR_API_KEY_ENV_VAR}} to reference your environment variable.`,
|
|
||||||
toolCallId,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add execution context
|
|
||||||
executionParams._context = {
|
|
||||||
workflowId,
|
|
||||||
userId,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special handling for function_execute - inject environment variables
|
|
||||||
if (toolName === 'function_execute') {
|
|
||||||
executionParams.envVars = decryptedEnvVars
|
|
||||||
executionParams.workflowVariables = {} // No workflow variables in copilot context
|
|
||||||
executionParams.blockData = {} // No block data in copilot context
|
|
||||||
executionParams.blockNameMapping = {} // No block mapping in copilot context
|
|
||||||
executionParams.language = executionParams.language || 'javascript'
|
|
||||||
executionParams.timeout = executionParams.timeout || 30000
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Injected env vars for function_execute`, {
|
|
||||||
envVarCount: Object.keys(decryptedEnvVars).length,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execute the tool
|
|
||||||
logger.info(`[${tracker.requestId}] Executing tool with resolved credentials`, {
|
|
||||||
toolName,
|
|
||||||
hasAccessToken: !!executionParams.accessToken,
|
|
||||||
hasApiKey: !!executionParams.apiKey,
|
|
||||||
})
|
|
||||||
|
|
||||||
const result = await executeTool(resolvedToolName, executionParams)
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Tool execution complete`, {
|
|
||||||
toolName,
|
|
||||||
success: result.success,
|
|
||||||
hasOutput: !!result.output,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
toolCallId,
|
|
||||||
result: {
|
|
||||||
success: result.success,
|
|
||||||
output: result.output,
|
|
||||||
error: result.error,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.debug(`[${tracker.requestId}] Zod validation error`, { issues: error.issues })
|
|
||||||
return createBadRequestResponse('Invalid request body for execute-tool')
|
|
||||||
}
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to execute tool:`, error)
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Failed to execute tool'
|
|
||||||
return createInternalServerErrorResponse(errorMessage)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -10,8 +10,6 @@ import {
|
|||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const BodySchema = z.object({
|
const BodySchema = z.object({
|
||||||
messageId: z.string(),
|
messageId: z.string(),
|
||||||
diffCreated: z.boolean(),
|
diffCreated: z.boolean(),
|
||||||
|
|||||||
@@ -1,123 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/copilot/constants'
|
|
||||||
import {
|
|
||||||
authenticateCopilotRequestSessionOnly,
|
|
||||||
createBadRequestResponse,
|
|
||||||
createInternalServerErrorResponse,
|
|
||||||
createRequestTracker,
|
|
||||||
createUnauthorizedResponse,
|
|
||||||
} from '@/lib/copilot/request-helpers'
|
|
||||||
import { env } from '@/lib/core/config/env'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotMarkToolCompleteAPI')
|
|
||||||
|
|
||||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
|
||||||
|
|
||||||
const MarkCompleteSchema = z.object({
|
|
||||||
id: z.string(),
|
|
||||||
name: z.string(),
|
|
||||||
status: z.number().int(),
|
|
||||||
message: z.any().optional(),
|
|
||||||
data: z.any().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
/**
|
|
||||||
* POST /api/copilot/tools/mark-complete
|
|
||||||
* Proxy to Sim Agent: POST /api/tools/mark-complete
|
|
||||||
*/
|
|
||||||
export async function POST(req: NextRequest) {
|
|
||||||
const tracker = createRequestTracker()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
|
||||||
if (!isAuthenticated || !userId) {
|
|
||||||
return createUnauthorizedResponse()
|
|
||||||
}
|
|
||||||
|
|
||||||
const body = await req.json()
|
|
||||||
|
|
||||||
// Log raw body shape for diagnostics (avoid dumping huge payloads)
|
|
||||||
try {
|
|
||||||
const bodyPreview = JSON.stringify(body).slice(0, 300)
|
|
||||||
logger.debug(`[${tracker.requestId}] Incoming mark-complete raw body preview`, {
|
|
||||||
preview: `${bodyPreview}${bodyPreview.length === 300 ? '...' : ''}`,
|
|
||||||
})
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const parsed = MarkCompleteSchema.parse(body)
|
|
||||||
|
|
||||||
const messagePreview = (() => {
|
|
||||||
try {
|
|
||||||
const s =
|
|
||||||
typeof parsed.message === 'string' ? parsed.message : JSON.stringify(parsed.message)
|
|
||||||
return s ? `${s.slice(0, 200)}${s.length > 200 ? '...' : ''}` : undefined
|
|
||||||
} catch {
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
})()
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Forwarding tool mark-complete`, {
|
|
||||||
userId,
|
|
||||||
toolCallId: parsed.id,
|
|
||||||
toolName: parsed.name,
|
|
||||||
status: parsed.status,
|
|
||||||
hasMessage: parsed.message !== undefined,
|
|
||||||
hasData: parsed.data !== undefined,
|
|
||||||
messagePreview,
|
|
||||||
agentUrl: `${SIM_AGENT_API_URL}/api/tools/mark-complete`,
|
|
||||||
})
|
|
||||||
|
|
||||||
const agentRes = await fetch(`${SIM_AGENT_API_URL}/api/tools/mark-complete`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
|
||||||
},
|
|
||||||
body: JSON.stringify(parsed),
|
|
||||||
})
|
|
||||||
|
|
||||||
// Attempt to parse agent response JSON
|
|
||||||
let agentJson: any = null
|
|
||||||
let agentText: string | null = null
|
|
||||||
try {
|
|
||||||
agentJson = await agentRes.json()
|
|
||||||
} catch (_) {
|
|
||||||
try {
|
|
||||||
agentText = await agentRes.text()
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[${tracker.requestId}] Agent responded to mark-complete`, {
|
|
||||||
status: agentRes.status,
|
|
||||||
ok: agentRes.ok,
|
|
||||||
responseJsonPreview: agentJson ? JSON.stringify(agentJson).slice(0, 300) : undefined,
|
|
||||||
responseTextPreview: agentText ? agentText.slice(0, 300) : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (agentRes.ok) {
|
|
||||||
return NextResponse.json({ success: true })
|
|
||||||
}
|
|
||||||
|
|
||||||
const errorMessage =
|
|
||||||
agentJson?.error || agentText || `Agent responded with status ${agentRes.status}`
|
|
||||||
const status = agentRes.status >= 500 ? 500 : 400
|
|
||||||
|
|
||||||
logger.warn(`[${tracker.requestId}] Mark-complete failed`, {
|
|
||||||
status,
|
|
||||||
error: errorMessage,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({ success: false, error: errorMessage }, { status })
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.warn(`[${tracker.requestId}] Invalid mark-complete request body`, {
|
|
||||||
issues: error.issues,
|
|
||||||
})
|
|
||||||
return createBadRequestResponse('Invalid request body for mark-complete')
|
|
||||||
}
|
|
||||||
logger.error(`[${tracker.requestId}] Failed to proxy mark-complete:`, error)
|
|
||||||
return createInternalServerErrorResponse('Failed to mark tool as complete')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -28,6 +28,7 @@ const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
|
|||||||
'claude-4-sonnet': false,
|
'claude-4-sonnet': false,
|
||||||
'claude-4.5-haiku': true,
|
'claude-4.5-haiku': true,
|
||||||
'claude-4.5-sonnet': true,
|
'claude-4.5-sonnet': true,
|
||||||
|
'claude-4.6-opus': true,
|
||||||
'claude-4.5-opus': true,
|
'claude-4.5-opus': true,
|
||||||
'claude-4.1-opus': false,
|
'claude-4.1-opus': false,
|
||||||
'gemini-3-pro': true,
|
'gemini-3-pro': true,
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpAuthorizationServerMetadataResponse(request)
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||||
|
return createMcpProtectedResourceMetadataResponse(request)
|
||||||
|
}
|
||||||
790
apps/sim/app/api/mcp/copilot/route.ts
Normal file
790
apps/sim/app/api/mcp/copilot/route.ts
Normal file
@@ -0,0 +1,790 @@
|
|||||||
|
import { Server } from '@modelcontextprotocol/sdk/server/index.js'
|
||||||
|
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'
|
||||||
|
import {
|
||||||
|
CallToolRequestSchema,
|
||||||
|
type CallToolResult,
|
||||||
|
ErrorCode,
|
||||||
|
type JSONRPCError,
|
||||||
|
type ListToolsResult,
|
||||||
|
ListToolsRequestSchema,
|
||||||
|
McpError,
|
||||||
|
type RequestId,
|
||||||
|
} from '@modelcontextprotocol/sdk/types.js'
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { userStats } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { randomUUID } from 'node:crypto'
|
||||||
|
import { eq, sql } from 'drizzle-orm'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||||
|
import { getCopilotModel } from '@/lib/copilot/config'
|
||||||
|
import {
|
||||||
|
ORCHESTRATION_TIMEOUT_MS,
|
||||||
|
SIM_AGENT_API_URL,
|
||||||
|
SIM_AGENT_VERSION,
|
||||||
|
} from '@/lib/copilot/constants'
|
||||||
|
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||||
|
import { orchestrateSubagentStream } from '@/lib/copilot/orchestrator/subagent'
|
||||||
|
import {
|
||||||
|
executeToolServerSide,
|
||||||
|
prepareExecutionContext,
|
||||||
|
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/definitions'
|
||||||
|
import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotMcpAPI')
|
||||||
|
const mcpRateLimiter = new RateLimiter()
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
export const runtime = 'nodejs'
|
||||||
|
export const maxDuration = 300
|
||||||
|
|
||||||
|
interface CopilotKeyAuthResult {
|
||||||
|
success: boolean
|
||||||
|
userId?: string
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates a copilot API key by forwarding it to the Go copilot service's
|
||||||
|
* `/api/validate-key` endpoint. Returns the associated userId on success.
|
||||||
|
*/
|
||||||
|
async function authenticateCopilotApiKey(apiKey: string): Promise<CopilotKeyAuthResult> {
|
||||||
|
try {
|
||||||
|
const internalSecret = env.INTERNAL_API_SECRET
|
||||||
|
if (!internalSecret) {
|
||||||
|
logger.error('INTERNAL_API_SECRET not configured')
|
||||||
|
return { success: false, error: 'Server configuration error' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'x-api-key': internalSecret,
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ targetApiKey: apiKey }),
|
||||||
|
signal: AbortSignal.timeout(10_000),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
const body = await res.json().catch(() => null)
|
||||||
|
const upstream = (body as Record<string, unknown>)?.message
|
||||||
|
const status = res.status
|
||||||
|
|
||||||
|
if (status === 401 || status === 403) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Invalid Copilot API key. Generate a new key in Settings → Copilot and set it in the x-api-key header.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (status === 402) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Usage limit exceeded for this Copilot API key. Upgrade your plan or wait for your quota to reset.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: false, error: String(upstream ?? 'Copilot API key validation failed') }
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await res.json()) as { ok?: boolean; userId?: string }
|
||||||
|
if (!data.ok || !data.userId) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid Copilot API key. Generate a new key in Settings → Copilot.',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: true, userId: data.userId }
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Copilot API key validation failed', { error })
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Could not validate Copilot API key — the authentication service is temporarily unreachable. This is NOT a problem with the API key itself; please retry shortly.',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* MCP Server instructions that guide LLMs on how to use the Sim copilot tools.
|
||||||
|
* This is included in the initialize response to help external LLMs understand
|
||||||
|
* the workflow lifecycle and best practices.
|
||||||
|
*/
|
||||||
|
const MCP_SERVER_INSTRUCTIONS = `
|
||||||
|
## Sim Workflow Copilot
|
||||||
|
|
||||||
|
Sim is a workflow automation platform. Workflows are visual pipelines of connected blocks (Agent, Function, Condition, API, integrations, etc.). The Agent block is the core — an LLM with tools, memory, structured output, and knowledge bases.
|
||||||
|
|
||||||
|
### Workflow Lifecycle (Happy Path)
|
||||||
|
|
||||||
|
1. \`list_workspaces\` → know where to work
|
||||||
|
2. \`create_workflow(name, workspaceId)\` → get a workflowId
|
||||||
|
3. \`sim_build(request, workflowId)\` → plan and build in one pass
|
||||||
|
4. \`sim_test(request, workflowId)\` → verify it works
|
||||||
|
5. \`sim_deploy("deploy as api", workflowId)\` → make it accessible externally (optional)
|
||||||
|
|
||||||
|
For fine-grained control, use \`sim_plan\` → \`sim_edit\` instead of \`sim_build\`. Pass the plan object from sim_plan EXACTLY as-is to sim_edit's context.plan field.
|
||||||
|
|
||||||
|
### Working with Existing Workflows
|
||||||
|
|
||||||
|
When the user refers to a workflow by name or description ("the email one", "my Slack bot"):
|
||||||
|
1. Use \`sim_discovery\` to find it by functionality
|
||||||
|
2. Or use \`list_workflows\` and match by name
|
||||||
|
3. Then pass the workflowId to other tools
|
||||||
|
|
||||||
|
### Organization
|
||||||
|
|
||||||
|
- \`rename_workflow\` — rename a workflow
|
||||||
|
- \`move_workflow\` — move a workflow into a folder (or root with null)
|
||||||
|
- \`move_folder\` — nest a folder inside another (or root with null)
|
||||||
|
- \`create_folder(name, parentId)\` — create nested folder hierarchies
|
||||||
|
|
||||||
|
### Key Rules
|
||||||
|
|
||||||
|
- You can test workflows immediately after building — deployment is only needed for external access (API, chat, MCP).
|
||||||
|
- All copilot tools (build, plan, edit, deploy, test, debug) require workflowId.
|
||||||
|
- If the user reports errors → use \`sim_debug\` first, don't guess.
|
||||||
|
- Variable syntax: \`<blockname.field>\` for block outputs, \`{{ENV_VAR}}\` for env vars.
|
||||||
|
`
|
||||||
|
|
||||||
|
type HeaderMap = Record<string, string | string[] | undefined>
|
||||||
|
|
||||||
|
function createError(id: RequestId, code: ErrorCode | number, message: string): JSONRPCError {
|
||||||
|
return {
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id,
|
||||||
|
error: { code, message },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeRequestHeaders(request: NextRequest): HeaderMap {
|
||||||
|
const headers: HeaderMap = {}
|
||||||
|
|
||||||
|
request.headers.forEach((value, key) => {
|
||||||
|
headers[key.toLowerCase()] = value
|
||||||
|
})
|
||||||
|
|
||||||
|
return headers
|
||||||
|
}
|
||||||
|
|
||||||
|
function readHeader(headers: HeaderMap | undefined, name: string): string | undefined {
|
||||||
|
if (!headers) return undefined
|
||||||
|
const value = headers[name.toLowerCase()]
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return value[0]
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
class NextResponseCapture {
|
||||||
|
private _status = 200
|
||||||
|
private _headers = new Headers()
|
||||||
|
private _controller: ReadableStreamDefaultController<Uint8Array> | null = null
|
||||||
|
private _pendingChunks: Uint8Array[] = []
|
||||||
|
private _closeHandlers: Array<() => void> = []
|
||||||
|
private _errorHandlers: Array<(error: Error) => void> = []
|
||||||
|
private _headersWritten = false
|
||||||
|
private _ended = false
|
||||||
|
private _headersPromise: Promise<void>
|
||||||
|
private _resolveHeaders: (() => void) | null = null
|
||||||
|
private _endedPromise: Promise<void>
|
||||||
|
private _resolveEnded: (() => void) | null = null
|
||||||
|
readonly readable: ReadableStream<Uint8Array>
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this._headersPromise = new Promise<void>((resolve) => {
|
||||||
|
this._resolveHeaders = resolve
|
||||||
|
})
|
||||||
|
|
||||||
|
this._endedPromise = new Promise<void>((resolve) => {
|
||||||
|
this._resolveEnded = resolve
|
||||||
|
})
|
||||||
|
|
||||||
|
this.readable = new ReadableStream<Uint8Array>({
|
||||||
|
start: (controller) => {
|
||||||
|
this._controller = controller
|
||||||
|
if (this._pendingChunks.length > 0) {
|
||||||
|
for (const chunk of this._pendingChunks) {
|
||||||
|
controller.enqueue(chunk)
|
||||||
|
}
|
||||||
|
this._pendingChunks = []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
cancel: () => {
|
||||||
|
this._ended = true
|
||||||
|
this._resolveEnded?.()
|
||||||
|
this.triggerCloseHandlers()
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private markHeadersWritten(): void {
|
||||||
|
if (this._headersWritten) return
|
||||||
|
this._headersWritten = true
|
||||||
|
this._resolveHeaders?.()
|
||||||
|
}
|
||||||
|
|
||||||
|
private triggerCloseHandlers(): void {
|
||||||
|
for (const handler of this._closeHandlers) {
|
||||||
|
try {
|
||||||
|
handler()
|
||||||
|
} catch (error) {
|
||||||
|
this.triggerErrorHandlers(error instanceof Error ? error : new Error(String(error)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private triggerErrorHandlers(error: Error): void {
|
||||||
|
for (const errorHandler of this._errorHandlers) {
|
||||||
|
errorHandler(error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private normalizeChunk(chunk: unknown): Uint8Array | null {
|
||||||
|
if (typeof chunk === 'string') {
|
||||||
|
return new TextEncoder().encode(chunk)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (chunk instanceof Uint8Array) {
|
||||||
|
return chunk
|
||||||
|
}
|
||||||
|
|
||||||
|
if (chunk === undefined || chunk === null) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return new TextEncoder().encode(String(chunk))
|
||||||
|
}
|
||||||
|
|
||||||
|
writeHead(status: number, headers?: Record<string, string | number | string[]>): this {
|
||||||
|
this._status = status
|
||||||
|
|
||||||
|
if (headers) {
|
||||||
|
Object.entries(headers).forEach(([key, value]) => {
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
this._headers.set(key, value.join(', '))
|
||||||
|
} else {
|
||||||
|
this._headers.set(key, String(value))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
this.markHeadersWritten()
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
flushHeaders(): this {
|
||||||
|
this.markHeadersWritten()
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
write(chunk: unknown): boolean {
|
||||||
|
const normalized = this.normalizeChunk(chunk)
|
||||||
|
if (!normalized) return true
|
||||||
|
|
||||||
|
this.markHeadersWritten()
|
||||||
|
|
||||||
|
if (this._controller) {
|
||||||
|
try {
|
||||||
|
this._controller.enqueue(normalized)
|
||||||
|
} catch (error) {
|
||||||
|
this.triggerErrorHandlers(error instanceof Error ? error : new Error(String(error)))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this._pendingChunks.push(normalized)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
end(chunk?: unknown): this {
|
||||||
|
if (chunk !== undefined) this.write(chunk)
|
||||||
|
this.markHeadersWritten()
|
||||||
|
if (this._ended) return this
|
||||||
|
|
||||||
|
this._ended = true
|
||||||
|
this._resolveEnded?.()
|
||||||
|
|
||||||
|
if (this._controller) {
|
||||||
|
try {
|
||||||
|
this._controller.close()
|
||||||
|
} catch (error) {
|
||||||
|
this.triggerErrorHandlers(error instanceof Error ? error : new Error(String(error)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.triggerCloseHandlers()
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
async waitForHeaders(timeoutMs = 30000): Promise<void> {
|
||||||
|
if (this._headersWritten) return
|
||||||
|
|
||||||
|
await Promise.race([
|
||||||
|
this._headersPromise,
|
||||||
|
new Promise<void>((resolve) => {
|
||||||
|
setTimeout(resolve, timeoutMs)
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
async waitForEnd(timeoutMs = 30000): Promise<void> {
|
||||||
|
if (this._ended) return
|
||||||
|
|
||||||
|
await Promise.race([
|
||||||
|
this._endedPromise,
|
||||||
|
new Promise<void>((resolve) => {
|
||||||
|
setTimeout(resolve, timeoutMs)
|
||||||
|
}),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
on(event: 'close' | 'error', handler: (() => void) | ((error: Error) => void)): this {
|
||||||
|
if (event === 'close') {
|
||||||
|
this._closeHandlers.push(handler as () => void)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (event === 'error') {
|
||||||
|
this._errorHandlers.push(handler as (error: Error) => void)
|
||||||
|
}
|
||||||
|
|
||||||
|
return this
|
||||||
|
}
|
||||||
|
|
||||||
|
toNextResponse(): NextResponse {
|
||||||
|
return new NextResponse(this.readable, {
|
||||||
|
status: this._status,
|
||||||
|
headers: this._headers,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildMcpServer(abortSignal?: AbortSignal): Server {
|
||||||
|
const server = new Server(
|
||||||
|
{
|
||||||
|
name: 'sim-copilot',
|
||||||
|
version: '1.0.0',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
capabilities: { tools: {} },
|
||||||
|
instructions: MCP_SERVER_INSTRUCTIONS,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||||
|
const directTools = DIRECT_TOOL_DEFS.map((tool) => ({
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
inputSchema: tool.inputSchema,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const subagentTools = SUBAGENT_TOOL_DEFS.map((tool) => ({
|
||||||
|
name: tool.name,
|
||||||
|
description: tool.description,
|
||||||
|
inputSchema: tool.inputSchema,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const result: ListToolsResult = {
|
||||||
|
tools: [...directTools, ...subagentTools],
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
})
|
||||||
|
|
||||||
|
server.setRequestHandler(CallToolRequestSchema, async (request, extra) => {
|
||||||
|
const headers = (extra.requestInfo?.headers || {}) as HeaderMap
|
||||||
|
const apiKeyHeader = readHeader(headers, 'x-api-key')
|
||||||
|
|
||||||
|
if (!apiKeyHeader) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: 'AUTHENTICATION ERROR: No Copilot API key provided. The user must set their Copilot API key in the x-api-key header. They can generate one in the Sim app under Settings → Copilot. Do NOT retry — this will fail until the key is configured.',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const authResult = await authenticateCopilotApiKey(apiKeyHeader)
|
||||||
|
if (!authResult.success || !authResult.userId) {
|
||||||
|
logger.warn('MCP copilot key auth failed', { method: request.method })
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: `AUTHENTICATION ERROR: ${authResult.error} Do NOT retry — this will fail until the user fixes their Copilot API key.`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const rateLimitResult = await mcpRateLimiter.checkRateLimitWithSubscription(
|
||||||
|
authResult.userId,
|
||||||
|
await getHighestPrioritySubscription(authResult.userId),
|
||||||
|
'api-endpoint',
|
||||||
|
false
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!rateLimitResult.allowed) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text' as const,
|
||||||
|
text: `RATE LIMIT: Too many requests. Please wait and retry after ${rateLimitResult.resetAt.toISOString()}.`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = request.params as { name?: string; arguments?: Record<string, unknown> } | undefined
|
||||||
|
if (!params?.name) {
|
||||||
|
throw new McpError(ErrorCode.InvalidParams, 'Tool name required')
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await handleToolsCall(
|
||||||
|
{
|
||||||
|
name: params.name,
|
||||||
|
arguments: params.arguments,
|
||||||
|
},
|
||||||
|
authResult.userId,
|
||||||
|
abortSignal
|
||||||
|
)
|
||||||
|
|
||||||
|
trackMcpCopilotCall(authResult.userId)
|
||||||
|
|
||||||
|
return result
|
||||||
|
})
|
||||||
|
|
||||||
|
return server
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleMcpRequestWithSdk(
|
||||||
|
request: NextRequest,
|
||||||
|
parsedBody: unknown
|
||||||
|
): Promise<NextResponse> {
|
||||||
|
const server = buildMcpServer(request.signal)
|
||||||
|
const transport = new StreamableHTTPServerTransport({
|
||||||
|
sessionIdGenerator: undefined,
|
||||||
|
enableJsonResponse: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
const responseCapture = new NextResponseCapture()
|
||||||
|
const requestAdapter = {
|
||||||
|
method: request.method,
|
||||||
|
headers: normalizeRequestHeaders(request),
|
||||||
|
}
|
||||||
|
|
||||||
|
await server.connect(transport)
|
||||||
|
|
||||||
|
try {
|
||||||
|
await transport.handleRequest(requestAdapter as any, responseCapture as any, parsedBody)
|
||||||
|
await responseCapture.waitForHeaders()
|
||||||
|
// Must exceed the longest possible tool execution (build = 5 min).
|
||||||
|
// Using ORCHESTRATION_TIMEOUT_MS + 60 s buffer so the orchestrator can
|
||||||
|
// finish or time-out on its own before the transport is torn down.
|
||||||
|
await responseCapture.waitForEnd(ORCHESTRATION_TIMEOUT_MS + 60_000)
|
||||||
|
return responseCapture.toNextResponse()
|
||||||
|
} finally {
|
||||||
|
await server.close().catch(() => {})
|
||||||
|
await transport.close().catch(() => {})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
// Return 405 to signal that server-initiated SSE notifications are not
|
||||||
|
// supported. Without this, clients like mcp-remote will repeatedly
|
||||||
|
// reconnect trying to open an SSE stream, flooding the logs with GETs.
|
||||||
|
return new NextResponse(null, { status: 405 })
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
let parsedBody: unknown
|
||||||
|
|
||||||
|
try {
|
||||||
|
parsedBody = await request.json()
|
||||||
|
} catch {
|
||||||
|
return NextResponse.json(createError(0, ErrorCode.ParseError, 'Invalid JSON body'), {
|
||||||
|
status: 400,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return await handleMcpRequestWithSdk(request, parsedBody)
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error handling MCP request', { error })
|
||||||
|
return NextResponse.json(createError(0, ErrorCode.InternalError, 'Internal error'), {
|
||||||
|
status: 500,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function DELETE(request: NextRequest) {
|
||||||
|
void request
|
||||||
|
return NextResponse.json(createError(0, -32000, 'Method not allowed.'), { status: 405 })
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Increment MCP copilot call counter in userStats (fire-and-forget).
|
||||||
|
*/
|
||||||
|
function trackMcpCopilotCall(userId: string): void {
|
||||||
|
db.update(userStats)
|
||||||
|
.set({
|
||||||
|
totalMcpCopilotCalls: sql`total_mcp_copilot_calls + 1`,
|
||||||
|
lastActive: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(userStats.userId, userId))
|
||||||
|
.then(() => {})
|
||||||
|
.catch((error) => {
|
||||||
|
logger.error('Failed to track MCP copilot call', { error, userId })
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleToolsCall(
|
||||||
|
params: { name: string; arguments?: Record<string, unknown> },
|
||||||
|
userId: string,
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
): Promise<CallToolResult> {
|
||||||
|
const args = params.arguments || {}
|
||||||
|
|
||||||
|
const directTool = DIRECT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||||
|
if (directTool) {
|
||||||
|
return handleDirectToolCall(directTool, args, userId)
|
||||||
|
}
|
||||||
|
|
||||||
|
const subagentTool = SUBAGENT_TOOL_DEFS.find((tool) => tool.name === params.name)
|
||||||
|
if (subagentTool) {
|
||||||
|
return handleSubagentToolCall(subagentTool, args, userId, abortSignal)
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new McpError(ErrorCode.MethodNotFound, `Tool not found: ${params.name}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleDirectToolCall(
|
||||||
|
toolDef: (typeof DIRECT_TOOL_DEFS)[number],
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string
|
||||||
|
): Promise<CallToolResult> {
|
||||||
|
try {
|
||||||
|
const execContext = await prepareExecutionContext(userId, (args.workflowId as string) || '')
|
||||||
|
|
||||||
|
const toolCall = {
|
||||||
|
id: randomUUID(),
|
||||||
|
name: toolDef.toolId,
|
||||||
|
status: 'pending' as const,
|
||||||
|
params: args as Record<string, any>,
|
||||||
|
startTime: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(result.output ?? result, null, 2),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Direct tool execution failed', { tool: toolDef.name, error })
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: `Tool execution failed: ${error instanceof Error ? error.message : String(error)}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build mode uses the main chat orchestrator with the 'fast' command instead of
|
||||||
|
* the subagent endpoint. In Go, 'build' is not a registered subagent — it's a mode
|
||||||
|
* (ModeFast) on the main chat processor that bypasses subagent orchestration and
|
||||||
|
* executes all tools directly.
|
||||||
|
*/
|
||||||
|
async function handleBuildToolCall(
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string,
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
): Promise<CallToolResult> {
|
||||||
|
try {
|
||||||
|
const requestText = (args.request as string) || JSON.stringify(args)
|
||||||
|
const { model } = getCopilotModel('chat')
|
||||||
|
const workflowId = args.workflowId as string | undefined
|
||||||
|
|
||||||
|
const resolved = workflowId ? { workflowId } : await resolveWorkflowIdForUser(userId)
|
||||||
|
|
||||||
|
if (!resolved?.workflowId) {
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'workflowId is required for build. Call create_workflow first.',
|
||||||
|
},
|
||||||
|
null,
|
||||||
|
2
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const chatId = randomUUID()
|
||||||
|
|
||||||
|
const requestPayload = {
|
||||||
|
message: requestText,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
userId,
|
||||||
|
model,
|
||||||
|
mode: 'agent',
|
||||||
|
commands: ['fast'],
|
||||||
|
messageId: randomUUID(),
|
||||||
|
version: SIM_AGENT_VERSION,
|
||||||
|
headless: true,
|
||||||
|
chatId,
|
||||||
|
source: 'mcp',
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await orchestrateCopilotStream(requestPayload, {
|
||||||
|
userId,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
chatId,
|
||||||
|
autoExecuteTools: true,
|
||||||
|
timeout: 300000,
|
||||||
|
interactive: false,
|
||||||
|
abortSignal,
|
||||||
|
})
|
||||||
|
|
||||||
|
const responseData = {
|
||||||
|
success: result.success,
|
||||||
|
content: result.content,
|
||||||
|
toolCalls: result.toolCalls,
|
||||||
|
error: result.error,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text', text: JSON.stringify(responseData, null, 2) }],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Build tool call failed', { error })
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: `Build failed: ${error instanceof Error ? error.message : String(error)}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleSubagentToolCall(
|
||||||
|
toolDef: (typeof SUBAGENT_TOOL_DEFS)[number],
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
userId: string,
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
): Promise<CallToolResult> {
|
||||||
|
if (toolDef.agentId === 'build') {
|
||||||
|
return handleBuildToolCall(args, userId, abortSignal)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const requestText =
|
||||||
|
(args.request as string) ||
|
||||||
|
(args.message as string) ||
|
||||||
|
(args.error as string) ||
|
||||||
|
JSON.stringify(args)
|
||||||
|
|
||||||
|
const context = (args.context as Record<string, unknown>) || {}
|
||||||
|
if (args.plan && !context.plan) {
|
||||||
|
context.plan = args.plan
|
||||||
|
}
|
||||||
|
|
||||||
|
const { model } = getCopilotModel('chat')
|
||||||
|
|
||||||
|
const result = await orchestrateSubagentStream(
|
||||||
|
toolDef.agentId,
|
||||||
|
{
|
||||||
|
message: requestText,
|
||||||
|
workflowId: args.workflowId,
|
||||||
|
workspaceId: args.workspaceId,
|
||||||
|
context,
|
||||||
|
model,
|
||||||
|
headless: true,
|
||||||
|
source: 'mcp',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
userId,
|
||||||
|
workflowId: args.workflowId as string | undefined,
|
||||||
|
workspaceId: args.workspaceId as string | undefined,
|
||||||
|
abortSignal,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
let responseData: unknown
|
||||||
|
|
||||||
|
if (result.structuredResult) {
|
||||||
|
responseData = {
|
||||||
|
success: result.structuredResult.success ?? result.success,
|
||||||
|
type: result.structuredResult.type,
|
||||||
|
summary: result.structuredResult.summary,
|
||||||
|
data: result.structuredResult.data,
|
||||||
|
}
|
||||||
|
} else if (result.error) {
|
||||||
|
responseData = {
|
||||||
|
success: false,
|
||||||
|
error: result.error,
|
||||||
|
errors: result.errors,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
responseData = {
|
||||||
|
success: result.success,
|
||||||
|
content: result.content,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: JSON.stringify(responseData, null, 2),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: !result.success,
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Subagent tool call failed', {
|
||||||
|
tool: toolDef.name,
|
||||||
|
agentId: toolDef.agentId,
|
||||||
|
error,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: `Subagent call failed: ${error instanceof Error ? error.message : String(error)}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
isError: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
114
apps/sim/app/api/v1/copilot/chat/route.ts
Normal file
114
apps/sim/app/api/v1/copilot/chat/route.ts
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { getCopilotModel } from '@/lib/copilot/config'
|
||||||
|
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||||
|
import { COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
|
||||||
|
import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator'
|
||||||
|
import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
||||||
|
import { authenticateV1Request } from '@/app/api/v1/auth'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotHeadlessAPI')
|
||||||
|
|
||||||
|
const RequestSchema = z.object({
|
||||||
|
message: z.string().min(1, 'message is required'),
|
||||||
|
workflowId: z.string().optional(),
|
||||||
|
workflowName: z.string().optional(),
|
||||||
|
chatId: z.string().optional(),
|
||||||
|
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
||||||
|
model: z.string().optional(),
|
||||||
|
autoExecuteTools: z.boolean().optional().default(true),
|
||||||
|
timeout: z.number().optional().default(300000),
|
||||||
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /api/v1/copilot/chat
|
||||||
|
* Headless copilot endpoint for server-side orchestration.
|
||||||
|
*
|
||||||
|
* workflowId is optional - if not provided:
|
||||||
|
* - If workflowName is provided, finds that workflow
|
||||||
|
* - Otherwise uses the user's first workflow as context
|
||||||
|
* - The copilot can still operate on any workflow using list_user_workflows
|
||||||
|
*/
|
||||||
|
export async function POST(req: NextRequest) {
|
||||||
|
const auth = await authenticateV1Request(req)
|
||||||
|
if (!auth.authenticated || !auth.userId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: auth.error || 'Unauthorized' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const body = await req.json()
|
||||||
|
const parsed = RequestSchema.parse(body)
|
||||||
|
const defaults = getCopilotModel('chat')
|
||||||
|
const selectedModel = parsed.model || defaults.model
|
||||||
|
|
||||||
|
// Resolve workflow ID
|
||||||
|
const resolved = await resolveWorkflowIdForUser(
|
||||||
|
auth.userId,
|
||||||
|
parsed.workflowId,
|
||||||
|
parsed.workflowName
|
||||||
|
)
|
||||||
|
if (!resolved) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'No workflows found. Create a workflow first or provide a valid workflowId.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transform mode to transport mode (same as client API)
|
||||||
|
// build and agent both map to 'agent' on the backend
|
||||||
|
const effectiveMode = parsed.mode === 'agent' ? 'build' : parsed.mode
|
||||||
|
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
|
||||||
|
|
||||||
|
// Always generate a chatId - required for artifacts system to work with subagents
|
||||||
|
const chatId = parsed.chatId || crypto.randomUUID()
|
||||||
|
|
||||||
|
const requestPayload = {
|
||||||
|
message: parsed.message,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
userId: auth.userId,
|
||||||
|
model: selectedModel,
|
||||||
|
mode: transportMode,
|
||||||
|
messageId: crypto.randomUUID(),
|
||||||
|
version: SIM_AGENT_VERSION,
|
||||||
|
headless: true,
|
||||||
|
chatId,
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await orchestrateCopilotStream(requestPayload, {
|
||||||
|
userId: auth.userId,
|
||||||
|
workflowId: resolved.workflowId,
|
||||||
|
chatId,
|
||||||
|
autoExecuteTools: parsed.autoExecuteTools,
|
||||||
|
timeout: parsed.timeout,
|
||||||
|
interactive: false,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: result.success,
|
||||||
|
content: result.content,
|
||||||
|
toolCalls: result.toolCalls,
|
||||||
|
chatId: result.chatId || chatId, // Return the chatId for conversation continuity
|
||||||
|
conversationId: result.conversationId,
|
||||||
|
error: result.error,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid request', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error('Headless copilot request failed', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return NextResponse.json({ success: false, error: 'Internal server error' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -211,7 +211,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
|||||||
if (block.type === 'text') {
|
if (block.type === 'text') {
|
||||||
const isLastTextBlock =
|
const isLastTextBlock =
|
||||||
index === message.contentBlocks!.length - 1 && block.type === 'text'
|
index === message.contentBlocks!.length - 1 && block.type === 'text'
|
||||||
const parsed = parseSpecialTags(block.content)
|
const parsed = parseSpecialTags(block.content ?? '')
|
||||||
// Mask credential IDs in the displayed content
|
// Mask credential IDs in the displayed content
|
||||||
const cleanBlockContent = maskCredentialValue(
|
const cleanBlockContent = maskCredentialValue(
|
||||||
parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
parsed.cleanContent.replace(/\n{3,}/g, '\n\n')
|
||||||
@@ -243,7 +243,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
|||||||
return (
|
return (
|
||||||
<div key={blockKey} className='w-full'>
|
<div key={blockKey} className='w-full'>
|
||||||
<ThinkingBlock
|
<ThinkingBlock
|
||||||
content={maskCredentialValue(block.content)}
|
content={maskCredentialValue(block.content ?? '')}
|
||||||
isStreaming={isActivelyStreaming}
|
isStreaming={isActivelyStreaming}
|
||||||
hasFollowingContent={hasFollowingContent}
|
hasFollowingContent={hasFollowingContent}
|
||||||
hasSpecialTags={hasSpecialTags}
|
hasSpecialTags={hasSpecialTags}
|
||||||
@@ -251,7 +251,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
|||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (block.type === 'tool_call') {
|
if (block.type === 'tool_call' && block.toolCall) {
|
||||||
const blockKey = `tool-${block.toolCall.id}`
|
const blockKey = `tool-${block.toolCall.id}`
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -1,20 +1,15 @@
|
|||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
import clsx from 'clsx'
|
import clsx from 'clsx'
|
||||||
import { ChevronUp, LayoutList } from 'lucide-react'
|
import { ChevronUp, LayoutList } from 'lucide-react'
|
||||||
import Editor from 'react-simple-code-editor'
|
import Editor from 'react-simple-code-editor'
|
||||||
import { Button, Code, getCodeEditorProps, highlight, languages } from '@/components/emcn'
|
import { Button, Code, getCodeEditorProps, highlight, languages } from '@/components/emcn'
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { getClientTool } from '@/lib/copilot/tools/client/manager'
|
|
||||||
import { getRegisteredTools } from '@/lib/copilot/tools/client/registry'
|
|
||||||
import '@/lib/copilot/tools/client/init-tool-configs'
|
|
||||||
import {
|
import {
|
||||||
getSubagentLabels as getSubagentLabelsFromConfig,
|
ClientToolCallState,
|
||||||
getToolUIConfig,
|
TOOL_DISPLAY_REGISTRY,
|
||||||
hasInterrupt as hasInterruptFromConfig,
|
} from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
isSpecialTool as isSpecialToolFromConfig,
|
|
||||||
} from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||||
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||||
import { SmoothStreamingText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming'
|
import { SmoothStreamingText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming'
|
||||||
@@ -25,7 +20,6 @@ import { getDisplayValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/co
|
|||||||
import { getBlock } from '@/blocks/registry'
|
import { getBlock } from '@/blocks/registry'
|
||||||
import type { CopilotToolCall } from '@/stores/panel'
|
import type { CopilotToolCall } from '@/stores/panel'
|
||||||
import { useCopilotStore } from '@/stores/panel'
|
import { useCopilotStore } from '@/stores/panel'
|
||||||
import { CLASS_TOOL_METADATA } from '@/stores/panel/copilot/store'
|
|
||||||
import type { SubAgentContentBlock } from '@/stores/panel/copilot/types'
|
import type { SubAgentContentBlock } from '@/stores/panel/copilot/types'
|
||||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||||
|
|
||||||
@@ -710,8 +704,8 @@ const ShimmerOverlayText = memo(function ShimmerOverlayText({
|
|||||||
* @returns The completion label from UI config, defaults to 'Thought'
|
* @returns The completion label from UI config, defaults to 'Thought'
|
||||||
*/
|
*/
|
||||||
function getSubagentCompletionLabel(toolName: string): string {
|
function getSubagentCompletionLabel(toolName: string): string {
|
||||||
const labels = getSubagentLabelsFromConfig(toolName, false)
|
const labels = TOOL_DISPLAY_REGISTRY[toolName]?.uiConfig?.subagentLabels
|
||||||
return labels?.completed ?? 'Thought'
|
return labels?.completed || 'Thought'
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -943,7 +937,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
|||||||
* Determines if a tool call should display with special gradient styling.
|
* Determines if a tool call should display with special gradient styling.
|
||||||
*/
|
*/
|
||||||
function isSpecialToolCall(toolCall: CopilotToolCall): boolean {
|
function isSpecialToolCall(toolCall: CopilotToolCall): boolean {
|
||||||
return isSpecialToolFromConfig(toolCall.name)
|
return TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.isSpecial === true
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1223,28 +1217,11 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
|
|
||||||
/** Checks if a tool is server-side executed (not a client tool) */
|
/** Checks if a tool is server-side executed (not a client tool) */
|
||||||
function isIntegrationTool(toolName: string): boolean {
|
function isIntegrationTool(toolName: string): boolean {
|
||||||
return !CLASS_TOOL_METADATA[toolName]
|
return !TOOL_DISPLAY_REGISTRY[toolName]
|
||||||
}
|
}
|
||||||
|
|
||||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||||
if (hasInterruptFromConfig(toolCall.name) && toolCall.state === 'pending') {
|
const hasInterrupt = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.interrupt === true
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
const instance = getClientTool(toolCall.id)
|
|
||||||
let hasInterrupt = !!instance?.getInterruptDisplays?.()
|
|
||||||
if (!hasInterrupt) {
|
|
||||||
try {
|
|
||||||
const def = getRegisteredTools()[toolCall.name]
|
|
||||||
if (def) {
|
|
||||||
hasInterrupt =
|
|
||||||
typeof def.hasInterrupt === 'function'
|
|
||||||
? !!def.hasInterrupt(toolCall.params || {})
|
|
||||||
: !!def.hasInterrupt
|
|
||||||
}
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (hasInterrupt && toolCall.state === 'pending') {
|
if (hasInterrupt && toolCall.state === 'pending') {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@@ -1257,109 +1234,50 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const toolCallLogger = createLogger('CopilotToolCall')
|
||||||
|
|
||||||
|
async function sendToolDecision(
|
||||||
|
toolCallId: string,
|
||||||
|
status: 'accepted' | 'rejected' | 'background'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
await fetch('/api/copilot/confirm', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ toolCallId, status }),
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
toolCallLogger.warn('Failed to send tool decision', {
|
||||||
|
toolCallId,
|
||||||
|
status,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function handleRun(
|
async function handleRun(
|
||||||
toolCall: CopilotToolCall,
|
toolCall: CopilotToolCall,
|
||||||
setToolCallState: any,
|
setToolCallState: any,
|
||||||
onStateChange?: any,
|
onStateChange?: any,
|
||||||
editedParams?: any
|
editedParams?: any
|
||||||
) {
|
) {
|
||||||
const instance = getClientTool(toolCall.id)
|
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
||||||
|
onStateChange?.('executing')
|
||||||
if (!instance && isIntegrationTool(toolCall.name)) {
|
await sendToolDecision(toolCall.id, 'accepted')
|
||||||
onStateChange?.('executing')
|
|
||||||
try {
|
|
||||||
await useCopilotStore.getState().executeIntegrationTool(toolCall.id)
|
|
||||||
} catch (e) {
|
|
||||||
setToolCallState(toolCall, 'error', { error: e instanceof Error ? e.message : String(e) })
|
|
||||||
onStateChange?.('error')
|
|
||||||
try {
|
|
||||||
await fetch('/api/copilot/tools/mark-complete', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
id: toolCall.id,
|
|
||||||
name: toolCall.name,
|
|
||||||
status: 500,
|
|
||||||
message: e instanceof Error ? e.message : 'Tool execution failed',
|
|
||||||
data: { error: e instanceof Error ? e.message : String(e) },
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
} catch {
|
|
||||||
console.error('[handleRun] Failed to notify backend of tool error:', toolCall.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!instance) return
|
|
||||||
try {
|
|
||||||
const mergedParams =
|
|
||||||
editedParams ||
|
|
||||||
(toolCall as any).params ||
|
|
||||||
(toolCall as any).parameters ||
|
|
||||||
(toolCall as any).input ||
|
|
||||||
{}
|
|
||||||
await instance.handleAccept?.(mergedParams)
|
|
||||||
onStateChange?.('executing')
|
|
||||||
} catch (e) {
|
|
||||||
setToolCallState(toolCall, 'error', { error: e instanceof Error ? e.message : String(e) })
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onStateChange?: any) {
|
async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onStateChange?: any) {
|
||||||
const instance = getClientTool(toolCall.id)
|
|
||||||
|
|
||||||
if (!instance && isIntegrationTool(toolCall.name)) {
|
|
||||||
setToolCallState(toolCall, 'rejected')
|
|
||||||
onStateChange?.('rejected')
|
|
||||||
|
|
||||||
let notified = false
|
|
||||||
for (let attempt = 0; attempt < 3 && !notified; attempt++) {
|
|
||||||
try {
|
|
||||||
const res = await fetch('/api/copilot/tools/mark-complete', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
id: toolCall.id,
|
|
||||||
name: toolCall.name,
|
|
||||||
status: 400,
|
|
||||||
message: 'Tool execution skipped by user',
|
|
||||||
data: { skipped: true, reason: 'user_skipped' },
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
if (res.ok) {
|
|
||||||
notified = true
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
if (attempt < 2) {
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!notified) {
|
|
||||||
console.error('[handleSkip] Failed to notify backend after 3 attempts:', toolCall.id)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (instance) {
|
|
||||||
try {
|
|
||||||
await instance.handleReject?.()
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
setToolCallState(toolCall, 'rejected')
|
setToolCallState(toolCall, 'rejected')
|
||||||
onStateChange?.('rejected')
|
onStateChange?.('rejected')
|
||||||
|
await sendToolDecision(toolCall.id, 'rejected')
|
||||||
}
|
}
|
||||||
|
|
||||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||||
const fromStore = (toolCall as any).display?.text
|
const fromStore = (toolCall as any).display?.text
|
||||||
if (fromStore) return fromStore
|
if (fromStore) return fromStore
|
||||||
try {
|
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
const def = getRegisteredTools()[toolCall.name] as any
|
const byState = registryEntry?.displayNames?.[toolCall.state as ClientToolCallState]
|
||||||
const byState = def?.metadata?.displayNames?.[toolCall.state]
|
if (byState?.text) return byState.text
|
||||||
if (byState?.text) return byState.text
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
const stateVerb = getStateVerb(toolCall.state)
|
const stateVerb = getStateVerb(toolCall.state)
|
||||||
const formattedName = formatToolName(toolCall.name)
|
const formattedName = formatToolName(toolCall.name)
|
||||||
@@ -1509,7 +1427,7 @@ export function ToolCall({
|
|||||||
// Check if this integration tool is auto-allowed
|
// Check if this integration tool is auto-allowed
|
||||||
// Subscribe to autoAllowedTools so we re-render when it changes
|
// Subscribe to autoAllowedTools so we re-render when it changes
|
||||||
const autoAllowedTools = useCopilotStore((s) => s.autoAllowedTools)
|
const autoAllowedTools = useCopilotStore((s) => s.autoAllowedTools)
|
||||||
const { removeAutoAllowedTool } = useCopilotStore()
|
const { removeAutoAllowedTool, setToolCallState } = useCopilotStore()
|
||||||
const isAutoAllowed = isIntegrationTool(toolCall.name) && autoAllowedTools.includes(toolCall.name)
|
const isAutoAllowed = isIntegrationTool(toolCall.name) && autoAllowedTools.includes(toolCall.name)
|
||||||
|
|
||||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||||
@@ -1526,34 +1444,12 @@ export function ToolCall({
|
|||||||
toolCall.name === 'mark_todo_in_progress' ||
|
toolCall.name === 'mark_todo_in_progress' ||
|
||||||
toolCall.name === 'tool_search_tool_regex' ||
|
toolCall.name === 'tool_search_tool_regex' ||
|
||||||
toolCall.name === 'user_memory' ||
|
toolCall.name === 'user_memory' ||
|
||||||
toolCall.name === 'edit_respond' ||
|
toolCall.name.endsWith('_respond')
|
||||||
toolCall.name === 'debug_respond' ||
|
|
||||||
toolCall.name === 'plan_respond' ||
|
|
||||||
toolCall.name === 'research_respond' ||
|
|
||||||
toolCall.name === 'info_respond' ||
|
|
||||||
toolCall.name === 'deploy_respond' ||
|
|
||||||
toolCall.name === 'superagent_respond'
|
|
||||||
)
|
)
|
||||||
return null
|
return null
|
||||||
|
|
||||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||||
const SUBAGENT_TOOLS = [
|
const isSubagentTool = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
||||||
'plan',
|
|
||||||
'edit',
|
|
||||||
'debug',
|
|
||||||
'test',
|
|
||||||
'deploy',
|
|
||||||
'evaluate',
|
|
||||||
'auth',
|
|
||||||
'research',
|
|
||||||
'knowledge',
|
|
||||||
'custom_tool',
|
|
||||||
'tour',
|
|
||||||
'info',
|
|
||||||
'workflow',
|
|
||||||
'superagent',
|
|
||||||
]
|
|
||||||
const isSubagentTool = SUBAGENT_TOOLS.includes(toolCall.name)
|
|
||||||
|
|
||||||
// For ALL subagent tools, don't show anything until we have blocks with content
|
// For ALL subagent tools, don't show anything until we have blocks with content
|
||||||
if (isSubagentTool) {
|
if (isSubagentTool) {
|
||||||
@@ -1593,17 +1489,18 @@ export function ToolCall({
|
|||||||
stateStr === 'aborted'
|
stateStr === 'aborted'
|
||||||
|
|
||||||
// Allow rendering if:
|
// Allow rendering if:
|
||||||
// 1. Tool is in CLASS_TOOL_METADATA (client tools), OR
|
// 1. Tool is in TOOL_DISPLAY_REGISTRY (client tools), OR
|
||||||
// 2. We're in build mode (integration tools are executed server-side), OR
|
// 2. We're in build mode (integration tools are executed server-side), OR
|
||||||
// 3. Tool call is already completed (historical - should always render)
|
// 3. Tool call is already completed (historical - should always render)
|
||||||
const isClientTool = !!CLASS_TOOL_METADATA[toolCall.name]
|
const isClientTool = !!TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
||||||
|
|
||||||
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
||||||
// Check if tool has params table config (meaning it's expandable)
|
// Check if tool has params table config (meaning it's expandable)
|
||||||
const hasParamsTable = !!getToolUIConfig(toolCall.name)?.paramsTable
|
const hasParamsTable = !!toolUIConfig?.paramsTable
|
||||||
const isRunWorkflow = toolCall.name === 'run_workflow'
|
const isRunWorkflow = toolCall.name === 'run_workflow'
|
||||||
const isExpandableTool =
|
const isExpandableTool =
|
||||||
hasParamsTable ||
|
hasParamsTable ||
|
||||||
@@ -1613,7 +1510,6 @@ export function ToolCall({
|
|||||||
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
||||||
|
|
||||||
// Check UI config for secondary action - only show for current message tool calls
|
// Check UI config for secondary action - only show for current message tool calls
|
||||||
const toolUIConfig = getToolUIConfig(toolCall.name)
|
|
||||||
const secondaryAction = toolUIConfig?.secondaryAction
|
const secondaryAction = toolUIConfig?.secondaryAction
|
||||||
const showSecondaryAction = secondaryAction?.showInStates.includes(
|
const showSecondaryAction = secondaryAction?.showInStates.includes(
|
||||||
toolCall.state as ClientToolCallState
|
toolCall.state as ClientToolCallState
|
||||||
@@ -2211,16 +2107,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
try {
|
setToolCallState(toolCall, ClientToolCallState.background)
|
||||||
const instance = getClientTool(toolCall.id)
|
onStateChange?.('background')
|
||||||
instance?.setState?.((ClientToolCallState as any).background)
|
await sendToolDecision(toolCall.id, 'background')
|
||||||
await instance?.markToolComplete?.(
|
|
||||||
200,
|
|
||||||
'The user has chosen to move the workflow execution to the background. Check back with them later to know when the workflow execution is complete'
|
|
||||||
)
|
|
||||||
forceUpdate({})
|
|
||||||
onStateChange?.('background')
|
|
||||||
} catch {}
|
|
||||||
}}
|
}}
|
||||||
variant='tertiary'
|
variant='tertiary'
|
||||||
title='Move to Background'
|
title='Move to Background'
|
||||||
@@ -2232,21 +2121,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
try {
|
setToolCallState(toolCall, ClientToolCallState.background)
|
||||||
const instance = getClientTool(toolCall.id)
|
onStateChange?.('background')
|
||||||
const elapsedSeconds = instance?.getElapsedSeconds?.() || 0
|
await sendToolDecision(toolCall.id, 'background')
|
||||||
instance?.setState?.((ClientToolCallState as any).background, {
|
|
||||||
result: { _elapsedSeconds: elapsedSeconds },
|
|
||||||
})
|
|
||||||
const { updateToolCallParams } = useCopilotStore.getState()
|
|
||||||
updateToolCallParams?.(toolCall.id, { _elapsedSeconds: Math.round(elapsedSeconds) })
|
|
||||||
await instance?.markToolComplete?.(
|
|
||||||
200,
|
|
||||||
`User woke you up after ${Math.round(elapsedSeconds)} seconds`
|
|
||||||
)
|
|
||||||
forceUpdate({})
|
|
||||||
onStateChange?.('background')
|
|
||||||
} catch {}
|
|
||||||
}}
|
}}
|
||||||
variant='tertiary'
|
variant='tertiary'
|
||||||
title='Wake'
|
title='Wake'
|
||||||
|
|||||||
@@ -246,6 +246,7 @@ export function getCommandDisplayLabel(commandId: string): string {
|
|||||||
* Model configuration options
|
* Model configuration options
|
||||||
*/
|
*/
|
||||||
export const MODEL_OPTIONS = [
|
export const MODEL_OPTIONS = [
|
||||||
|
{ value: 'claude-4.6-opus', label: 'Claude 4.6 Opus' },
|
||||||
{ value: 'claude-4.5-opus', label: 'Claude 4.5 Opus' },
|
{ value: 'claude-4.5-opus', label: 'Claude 4.5 Opus' },
|
||||||
{ value: 'claude-4.5-sonnet', label: 'Claude 4.5 Sonnet' },
|
{ value: 'claude-4.5-sonnet', label: 'Claude 4.5 Sonnet' },
|
||||||
{ value: 'claude-4.5-haiku', label: 'Claude 4.5 Haiku' },
|
{ value: 'claude-4.5-haiku', label: 'Claude 4.5 Haiku' },
|
||||||
|
|||||||
@@ -107,13 +107,13 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
currentChat,
|
currentChat,
|
||||||
selectChat,
|
selectChat,
|
||||||
deleteChat,
|
deleteChat,
|
||||||
areChatsFresh,
|
|
||||||
workflowId: copilotWorkflowId,
|
workflowId: copilotWorkflowId,
|
||||||
setPlanTodos,
|
setPlanTodos,
|
||||||
closePlanTodos,
|
closePlanTodos,
|
||||||
clearPlanArtifact,
|
clearPlanArtifact,
|
||||||
savePlanArtifact,
|
savePlanArtifact,
|
||||||
loadAutoAllowedTools,
|
loadAutoAllowedTools,
|
||||||
|
resumeActiveStream,
|
||||||
} = useCopilotStore()
|
} = useCopilotStore()
|
||||||
|
|
||||||
// Initialize copilot
|
// Initialize copilot
|
||||||
@@ -126,6 +126,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
loadAutoAllowedTools,
|
loadAutoAllowedTools,
|
||||||
currentChat,
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
|
resumeActiveStream,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Handle scroll management (80px stickiness for copilot)
|
// Handle scroll management (80px stickiness for copilot)
|
||||||
@@ -140,7 +141,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
activeWorkflowId,
|
activeWorkflowId,
|
||||||
copilotWorkflowId,
|
copilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
areChatsFresh,
|
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -421,8 +421,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Show loading state until fully initialized */}
|
{/* Show loading state until fully initialized, but skip if actively streaming (resume case) */}
|
||||||
{!isInitialized ? (
|
{!isInitialized && !isSendingMessage ? (
|
||||||
<div className='flex h-full w-full items-center justify-center'>
|
<div className='flex h-full w-full items-center justify-center'>
|
||||||
<div className='flex flex-col items-center gap-3'>
|
<div className='flex flex-col items-center gap-3'>
|
||||||
<p className='text-muted-foreground text-sm'>Loading copilot</p>
|
<p className='text-muted-foreground text-sm'>Loading copilot</p>
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ interface UseChatHistoryProps {
|
|||||||
activeWorkflowId: string | null
|
activeWorkflowId: string | null
|
||||||
copilotWorkflowId: string | null
|
copilotWorkflowId: string | null
|
||||||
loadChats: (forceRefresh: boolean) => Promise<void>
|
loadChats: (forceRefresh: boolean) => Promise<void>
|
||||||
areChatsFresh: (workflowId: string) => boolean
|
|
||||||
isSendingMessage: boolean
|
isSendingMessage: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -21,8 +20,7 @@ interface UseChatHistoryProps {
|
|||||||
* @returns Chat history utilities
|
* @returns Chat history utilities
|
||||||
*/
|
*/
|
||||||
export function useChatHistory(props: UseChatHistoryProps) {
|
export function useChatHistory(props: UseChatHistoryProps) {
|
||||||
const { chats, activeWorkflowId, copilotWorkflowId, loadChats, areChatsFresh, isSendingMessage } =
|
const { chats, activeWorkflowId, copilotWorkflowId, loadChats, isSendingMessage } = props
|
||||||
props
|
|
||||||
|
|
||||||
/** Groups chats by time period (Today, Yesterday, This Week, etc.) */
|
/** Groups chats by time period (Today, Yesterday, This Week, etc.) */
|
||||||
const groupedChats = useMemo(() => {
|
const groupedChats = useMemo(() => {
|
||||||
@@ -80,7 +78,7 @@ export function useChatHistory(props: UseChatHistoryProps) {
|
|||||||
/** Handles history dropdown opening and loads chats if needed (non-blocking) */
|
/** Handles history dropdown opening and loads chats if needed (non-blocking) */
|
||||||
const handleHistoryDropdownOpen = useCallback(
|
const handleHistoryDropdownOpen = useCallback(
|
||||||
(open: boolean) => {
|
(open: boolean) => {
|
||||||
if (open && activeWorkflowId && !isSendingMessage && !areChatsFresh(activeWorkflowId)) {
|
if (open && activeWorkflowId && !isSendingMessage) {
|
||||||
loadChats(false).catch((error) => {
|
loadChats(false).catch((error) => {
|
||||||
logger.error('Failed to load chat history:', error)
|
logger.error('Failed to load chat history:', error)
|
||||||
})
|
})
|
||||||
@@ -90,7 +88,7 @@ export function useChatHistory(props: UseChatHistoryProps) {
|
|||||||
logger.info('Chat history opened during stream - showing cached data only')
|
logger.info('Chat history opened during stream - showing cached data only')
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[activeWorkflowId, areChatsFresh, isSendingMessage, loadChats]
|
[activeWorkflowId, isSendingMessage, loadChats]
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ interface UseCopilotInitializationProps {
|
|||||||
loadAutoAllowedTools: () => Promise<void>
|
loadAutoAllowedTools: () => Promise<void>
|
||||||
currentChat: any
|
currentChat: any
|
||||||
isSendingMessage: boolean
|
isSendingMessage: boolean
|
||||||
|
resumeActiveStream: () => Promise<boolean>
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -32,11 +33,13 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
loadAutoAllowedTools,
|
loadAutoAllowedTools,
|
||||||
currentChat,
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
|
resumeActiveStream,
|
||||||
} = props
|
} = props
|
||||||
|
|
||||||
const [isInitialized, setIsInitialized] = useState(false)
|
const [isInitialized, setIsInitialized] = useState(false)
|
||||||
const lastWorkflowIdRef = useRef<string | null>(null)
|
const lastWorkflowIdRef = useRef<string | null>(null)
|
||||||
const hasMountedRef = useRef(false)
|
const hasMountedRef = useRef(false)
|
||||||
|
const hasResumedRef = useRef(false)
|
||||||
|
|
||||||
/** Initialize on mount - loads chats if needed. Never loads during streaming */
|
/** Initialize on mount - loads chats if needed. Never loads during streaming */
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -105,6 +108,16 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
])
|
])
|
||||||
|
|
||||||
|
/** Try to resume active stream on mount - runs early, before waiting for chats */
|
||||||
|
useEffect(() => {
|
||||||
|
if (hasResumedRef.current || isSendingMessage) return
|
||||||
|
hasResumedRef.current = true
|
||||||
|
// Resume immediately on mount - don't wait for isInitialized
|
||||||
|
resumeActiveStream().catch((err) => {
|
||||||
|
logger.warn('[Copilot] Failed to resume active stream', err)
|
||||||
|
})
|
||||||
|
}, [isSendingMessage, resumeActiveStream])
|
||||||
|
|
||||||
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||||
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ import 'reactflow/dist/style.css'
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { useShallow } from 'zustand/react/shallow'
|
import { useShallow } from 'zustand/react/shallow'
|
||||||
import { useSession } from '@/lib/auth/auth-client'
|
import { useSession } from '@/lib/auth/auth-client'
|
||||||
import type { OAuthConnectEventDetail } from '@/lib/copilot/tools/client/other/oauth-request-access'
|
import type { OAuthConnectEventDetail } from '@/lib/copilot/tools/client/base-tool'
|
||||||
import type { OAuthProvider } from '@/lib/oauth'
|
import type { OAuthProvider } from '@/lib/oauth'
|
||||||
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
import { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||||
|
|||||||
@@ -5,10 +5,43 @@ import { CheckCircle, ChevronDown, ChevronRight, Loader2, Settings, XCircle } fr
|
|||||||
import { Badge } from '@/components/emcn'
|
import { Badge } from '@/components/emcn'
|
||||||
import { Button } from '@/components/ui/button'
|
import { Button } from '@/components/ui/button'
|
||||||
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
|
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
|
||||||
import type { ToolCallGroup, ToolCallState } from '@/lib/copilot/types'
|
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
import { formatDuration } from '@/lib/core/utils/formatting'
|
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||||
|
|
||||||
|
interface ToolCallState {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
displayName?: string
|
||||||
|
parameters?: Record<string, unknown>
|
||||||
|
state:
|
||||||
|
| 'detecting'
|
||||||
|
| 'pending'
|
||||||
|
| 'executing'
|
||||||
|
| 'completed'
|
||||||
|
| 'error'
|
||||||
|
| 'rejected'
|
||||||
|
| 'applied'
|
||||||
|
| 'ready_for_review'
|
||||||
|
| 'aborted'
|
||||||
|
| 'skipped'
|
||||||
|
| 'background'
|
||||||
|
startTime?: number
|
||||||
|
endTime?: number
|
||||||
|
duration?: number
|
||||||
|
result?: unknown
|
||||||
|
error?: string
|
||||||
|
progress?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ToolCallGroup {
|
||||||
|
id: string
|
||||||
|
toolCalls: ToolCallState[]
|
||||||
|
status: 'pending' | 'in_progress' | 'completed' | 'error'
|
||||||
|
startTime?: number
|
||||||
|
endTime?: number
|
||||||
|
summary?: string
|
||||||
|
}
|
||||||
|
|
||||||
interface ToolCallProps {
|
interface ToolCallProps {
|
||||||
toolCall: ToolCallState
|
toolCall: ToolCallState
|
||||||
isCompact?: boolean
|
isCompact?: boolean
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { BlockType } from '@/executor/constants'
|
|||||||
import type { DAG } from '@/executor/dag/builder'
|
import type { DAG } from '@/executor/dag/builder'
|
||||||
import type { EdgeManager } from '@/executor/execution/edge-manager'
|
import type { EdgeManager } from '@/executor/execution/edge-manager'
|
||||||
import { serializePauseSnapshot } from '@/executor/execution/snapshot-serializer'
|
import { serializePauseSnapshot } from '@/executor/execution/snapshot-serializer'
|
||||||
|
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||||
import type { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
|
import type { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
|
||||||
import type {
|
import type {
|
||||||
ExecutionContext,
|
ExecutionContext,
|
||||||
@@ -135,6 +136,7 @@ export class ExecutionEngine {
|
|||||||
success: false,
|
success: false,
|
||||||
output: this.finalOutput,
|
output: this.finalOutput,
|
||||||
logs: this.context.blockLogs,
|
logs: this.context.blockLogs,
|
||||||
|
executionState: this.getSerializableExecutionState(),
|
||||||
metadata: this.context.metadata,
|
metadata: this.context.metadata,
|
||||||
status: 'cancelled',
|
status: 'cancelled',
|
||||||
}
|
}
|
||||||
@@ -144,6 +146,7 @@ export class ExecutionEngine {
|
|||||||
success: true,
|
success: true,
|
||||||
output: this.finalOutput,
|
output: this.finalOutput,
|
||||||
logs: this.context.blockLogs,
|
logs: this.context.blockLogs,
|
||||||
|
executionState: this.getSerializableExecutionState(),
|
||||||
metadata: this.context.metadata,
|
metadata: this.context.metadata,
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -157,6 +160,7 @@ export class ExecutionEngine {
|
|||||||
success: false,
|
success: false,
|
||||||
output: this.finalOutput,
|
output: this.finalOutput,
|
||||||
logs: this.context.blockLogs,
|
logs: this.context.blockLogs,
|
||||||
|
executionState: this.getSerializableExecutionState(),
|
||||||
metadata: this.context.metadata,
|
metadata: this.context.metadata,
|
||||||
status: 'cancelled',
|
status: 'cancelled',
|
||||||
}
|
}
|
||||||
@@ -459,6 +463,7 @@ export class ExecutionEngine {
|
|||||||
success: true,
|
success: true,
|
||||||
output: this.collectPauseResponses(),
|
output: this.collectPauseResponses(),
|
||||||
logs: this.context.blockLogs,
|
logs: this.context.blockLogs,
|
||||||
|
executionState: this.getSerializableExecutionState(snapshotSeed),
|
||||||
metadata: this.context.metadata,
|
metadata: this.context.metadata,
|
||||||
status: 'paused',
|
status: 'paused',
|
||||||
pausePoints,
|
pausePoints,
|
||||||
@@ -466,6 +471,24 @@ export class ExecutionEngine {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private getSerializableExecutionState(snapshotSeed?: {
|
||||||
|
snapshot: string
|
||||||
|
}): SerializableExecutionState | undefined {
|
||||||
|
try {
|
||||||
|
const serializedSnapshot =
|
||||||
|
snapshotSeed?.snapshot ?? serializePauseSnapshot(this.context, [], this.dag).snapshot
|
||||||
|
const parsedSnapshot = JSON.parse(serializedSnapshot) as {
|
||||||
|
state?: SerializableExecutionState
|
||||||
|
}
|
||||||
|
return parsedSnapshot.state
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to serialize execution state', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private collectPauseResponses(): NormalizedBlockOutput {
|
private collectPauseResponses(): NormalizedBlockOutput {
|
||||||
const responses = Array.from(this.pausedBlocks.values()).map((pause) => pause.response)
|
const responses = Array.from(this.pausedBlocks.values()).map((pause) => pause.response)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import type { TraceSpan } from '@/lib/logs/types'
|
import type { TraceSpan } from '@/lib/logs/types'
|
||||||
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
|
||||||
import type { BlockOutput } from '@/blocks/types'
|
import type { BlockOutput } from '@/blocks/types'
|
||||||
|
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||||
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
|
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
|
||||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||||
|
|
||||||
@@ -302,6 +303,7 @@ export interface ExecutionResult {
|
|||||||
output: NormalizedBlockOutput
|
output: NormalizedBlockOutput
|
||||||
error?: string
|
error?: string
|
||||||
logs?: BlockLog[]
|
logs?: BlockLog[]
|
||||||
|
executionState?: SerializableExecutionState
|
||||||
metadata?: ExecutionMetadata
|
metadata?: ExecutionMetadata
|
||||||
status?: 'completed' | 'paused' | 'cancelled'
|
status?: 'completed' | 'paused' | 'cancelled'
|
||||||
pausePoints?: PausePoint[]
|
pausePoints?: PausePoint[]
|
||||||
|
|||||||
@@ -1,5 +1,12 @@
|
|||||||
import { useCallback } from 'react'
|
import { useCallback } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
|
|
||||||
|
declare global {
|
||||||
|
interface Window {
|
||||||
|
__skipDiffRecording?: boolean
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
import type { Edge } from 'reactflow'
|
import type { Edge } from 'reactflow'
|
||||||
import { useSession } from '@/lib/auth/auth-client'
|
import { useSession } from '@/lib/auth/auth-client'
|
||||||
import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations'
|
import { enqueueReplaceWorkflowState } from '@/lib/workflows/operations/socket-operations'
|
||||||
@@ -908,7 +915,7 @@ export function useUndoRedo() {
|
|||||||
|
|
||||||
// Set flag to skip recording during this operation
|
// Set flag to skip recording during this operation
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Restore baseline state and broadcast to everyone
|
// Restore baseline state and broadcast to everyone
|
||||||
if (baselineSnapshot && activeWorkflowId) {
|
if (baselineSnapshot && activeWorkflowId) {
|
||||||
@@ -945,7 +952,7 @@ export function useUndoRedo() {
|
|||||||
logger.info('Clearing diff UI state')
|
logger.info('Clearing diff UI state')
|
||||||
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
|
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Undid apply-diff operation successfully')
|
logger.info('Undid apply-diff operation successfully')
|
||||||
@@ -965,7 +972,7 @@ export function useUndoRedo() {
|
|||||||
|
|
||||||
// Set flag to skip recording during this operation
|
// Set flag to skip recording during this operation
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Apply the before-accept state (with markers for this user)
|
// Apply the before-accept state (with markers for this user)
|
||||||
useWorkflowStore.getState().replaceWorkflowState(beforeAccept)
|
useWorkflowStore.getState().replaceWorkflowState(beforeAccept)
|
||||||
@@ -1004,7 +1011,7 @@ export function useUndoRedo() {
|
|||||||
diffAnalysis: diffAnalysis,
|
diffAnalysis: diffAnalysis,
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Undid accept-diff operation - restored diff view')
|
logger.info('Undid accept-diff operation - restored diff view')
|
||||||
@@ -1018,7 +1025,7 @@ export function useUndoRedo() {
|
|||||||
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
||||||
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Apply the before-reject state (with markers for this user)
|
// Apply the before-reject state (with markers for this user)
|
||||||
useWorkflowStore.getState().replaceWorkflowState(beforeReject)
|
useWorkflowStore.getState().replaceWorkflowState(beforeReject)
|
||||||
@@ -1055,7 +1062,7 @@ export function useUndoRedo() {
|
|||||||
diffAnalysis: diffAnalysis,
|
diffAnalysis: diffAnalysis,
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Undid reject-diff operation - restored diff view')
|
logger.info('Undid reject-diff operation - restored diff view')
|
||||||
@@ -1526,7 +1533,7 @@ export function useUndoRedo() {
|
|||||||
|
|
||||||
// Set flag to skip recording during this operation
|
// Set flag to skip recording during this operation
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Manually apply the proposed state and set up diff store (similar to setProposedChanges but with original baseline)
|
// Manually apply the proposed state and set up diff store (similar to setProposedChanges but with original baseline)
|
||||||
const diffStore = useWorkflowDiffStore.getState()
|
const diffStore = useWorkflowDiffStore.getState()
|
||||||
@@ -1567,7 +1574,7 @@ export function useUndoRedo() {
|
|||||||
diffAnalysis: diffAnalysis,
|
diffAnalysis: diffAnalysis,
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Redid apply-diff operation')
|
logger.info('Redid apply-diff operation')
|
||||||
@@ -1583,7 +1590,7 @@ export function useUndoRedo() {
|
|||||||
|
|
||||||
// Set flag to skip recording during this operation
|
// Set flag to skip recording during this operation
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Clear diff state FIRST to prevent flash of colors (local UI only)
|
// Clear diff state FIRST to prevent flash of colors (local UI only)
|
||||||
// Use setState directly to ensure synchronous clearing
|
// Use setState directly to ensure synchronous clearing
|
||||||
@@ -1621,7 +1628,7 @@ export function useUndoRedo() {
|
|||||||
operationId: opId,
|
operationId: opId,
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Redid accept-diff operation - cleared diff view')
|
logger.info('Redid accept-diff operation - cleared diff view')
|
||||||
@@ -1635,7 +1642,7 @@ export function useUndoRedo() {
|
|||||||
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
||||||
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
||||||
|
|
||||||
;(window as any).__skipDiffRecording = true
|
window.__skipDiffRecording = true
|
||||||
try {
|
try {
|
||||||
// Clear diff state FIRST to prevent flash of colors (local UI only)
|
// Clear diff state FIRST to prevent flash of colors (local UI only)
|
||||||
// Use setState directly to ensure synchronous clearing
|
// Use setState directly to ensure synchronous clearing
|
||||||
@@ -1673,7 +1680,7 @@ export function useUndoRedo() {
|
|||||||
operationId: opId,
|
operationId: opId,
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
;(window as any).__skipDiffRecording = false
|
window.__skipDiffRecording = false
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Redid reject-diff operation - cleared diff view')
|
logger.info('Redid reject-diff operation - cleared diff view')
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ export type UsageLogCategory = 'model' | 'fixed'
|
|||||||
/**
|
/**
|
||||||
* Usage log source types
|
* Usage log source types
|
||||||
*/
|
*/
|
||||||
export type UsageLogSource = 'workflow' | 'wand' | 'copilot'
|
export type UsageLogSource = 'workflow' | 'wand' | 'copilot' | 'mcp_copilot'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Metadata for 'model' category charges
|
* Metadata for 'model' category charges
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { COPILOT_CHAT_API_PATH, COPILOT_CHAT_STREAM_API_PATH } from '@/lib/copilot/constants'
|
||||||
import type { CopilotMode, CopilotModelId, CopilotTransportMode } from '@/lib/copilot/models'
|
import type { CopilotMode, CopilotModelId, CopilotTransportMode } from '@/lib/copilot/models'
|
||||||
|
|
||||||
const logger = createLogger('CopilotAPI')
|
const logger = createLogger('CopilotAPI')
|
||||||
@@ -82,6 +83,7 @@ export interface SendMessageRequest {
|
|||||||
executionId?: string
|
executionId?: string
|
||||||
}>
|
}>
|
||||||
commands?: string[]
|
commands?: string[]
|
||||||
|
resumeFromEventId?: number
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -120,7 +122,7 @@ export async function sendStreamingMessage(
|
|||||||
request: SendMessageRequest
|
request: SendMessageRequest
|
||||||
): Promise<StreamingResponse> {
|
): Promise<StreamingResponse> {
|
||||||
try {
|
try {
|
||||||
const { abortSignal, ...requestBody } = request
|
const { abortSignal, resumeFromEventId, ...requestBody } = request
|
||||||
try {
|
try {
|
||||||
const preview = Array.isArray((requestBody as any).contexts)
|
const preview = Array.isArray((requestBody as any).contexts)
|
||||||
? (requestBody as any).contexts.map((c: any) => ({
|
? (requestBody as any).contexts.map((c: any) => ({
|
||||||
@@ -136,9 +138,56 @@ export async function sendStreamingMessage(
|
|||||||
? (requestBody as any).contexts.length
|
? (requestBody as any).contexts.length
|
||||||
: 0,
|
: 0,
|
||||||
contextsPreview: preview,
|
contextsPreview: preview,
|
||||||
|
resumeFromEventId,
|
||||||
})
|
})
|
||||||
} catch {}
|
} catch (error) {
|
||||||
const response = await fetch('/api/copilot/chat', {
|
logger.warn('Failed to log streaming message context preview', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const streamId = request.userMessageId
|
||||||
|
if (typeof resumeFromEventId === 'number') {
|
||||||
|
if (!streamId) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'streamId is required to resume a stream',
|
||||||
|
status: 400,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const url = `${COPILOT_CHAT_STREAM_API_PATH}?streamId=${encodeURIComponent(
|
||||||
|
streamId
|
||||||
|
)}&from=${encodeURIComponent(String(resumeFromEventId))}`
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'GET',
|
||||||
|
signal: abortSignal,
|
||||||
|
credentials: 'include',
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorMessage = await handleApiError(response, 'Failed to resume streaming message')
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
status: response.status,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!response.body) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'No response body received',
|
||||||
|
status: 500,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
stream: response.body,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(COPILOT_CHAT_API_PATH, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ ...requestBody, stream: true }),
|
body: JSON.stringify({ ...requestBody, stream: true }),
|
||||||
|
|||||||
66
apps/sim/lib/copilot/chat-context.ts
Normal file
66
apps/sim/lib/copilot/chat-context.ts
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { CopilotFiles } from '@/lib/uploads'
|
||||||
|
import { createFileContent } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotChatContext')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build conversation history from stored chat messages.
|
||||||
|
*/
|
||||||
|
export function buildConversationHistory(
|
||||||
|
messages: unknown[],
|
||||||
|
conversationId?: string
|
||||||
|
): { history: unknown[]; conversationId?: string } {
|
||||||
|
const history = Array.isArray(messages) ? messages : []
|
||||||
|
return {
|
||||||
|
history,
|
||||||
|
...(conversationId ? { conversationId } : {}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileAttachmentInput {
|
||||||
|
id: string
|
||||||
|
key: string
|
||||||
|
name?: string
|
||||||
|
filename?: string
|
||||||
|
mimeType?: string
|
||||||
|
media_type?: string
|
||||||
|
size: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileContent {
|
||||||
|
type: string
|
||||||
|
[key: string]: unknown
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process file attachments into content for the payload.
|
||||||
|
*/
|
||||||
|
export async function processFileAttachments(
|
||||||
|
fileAttachments: FileAttachmentInput[],
|
||||||
|
userId: string
|
||||||
|
): Promise<FileContent[]> {
|
||||||
|
if (!Array.isArray(fileAttachments) || fileAttachments.length === 0) return []
|
||||||
|
|
||||||
|
const processedFileContents: FileContent[] = []
|
||||||
|
const requestId = `copilot-${userId}-${Date.now()}`
|
||||||
|
const processedAttachments = await CopilotFiles.processCopilotAttachments(
|
||||||
|
fileAttachments as Parameters<typeof CopilotFiles.processCopilotAttachments>[0],
|
||||||
|
requestId
|
||||||
|
)
|
||||||
|
|
||||||
|
for (const { buffer, attachment } of processedAttachments) {
|
||||||
|
const fileContent = createFileContent(buffer, attachment.media_type)
|
||||||
|
if (fileContent) {
|
||||||
|
processedFileContents.push(fileContent as FileContent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Processed file attachments for payload', {
|
||||||
|
userId,
|
||||||
|
inputCount: fileAttachments.length,
|
||||||
|
outputCount: processedFileContents.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return processedFileContents
|
||||||
|
}
|
||||||
69
apps/sim/lib/copilot/chat-lifecycle.ts
Normal file
69
apps/sim/lib/copilot/chat-lifecycle.ts
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { copilotChats } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotChatLifecycle')
|
||||||
|
|
||||||
|
export interface ChatLoadResult {
|
||||||
|
chatId: string
|
||||||
|
chat: typeof copilotChats.$inferSelect | null
|
||||||
|
conversationHistory: unknown[]
|
||||||
|
isNew: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve or create a copilot chat session.
|
||||||
|
* If chatId is provided, loads the existing chat. Otherwise creates a new one.
|
||||||
|
*/
|
||||||
|
export async function resolveOrCreateChat(params: {
|
||||||
|
chatId?: string
|
||||||
|
userId: string
|
||||||
|
workflowId: string
|
||||||
|
model: string
|
||||||
|
}): Promise<ChatLoadResult> {
|
||||||
|
const { chatId, userId, workflowId, model } = params
|
||||||
|
|
||||||
|
if (chatId) {
|
||||||
|
const [chat] = await db
|
||||||
|
.select()
|
||||||
|
.from(copilotChats)
|
||||||
|
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
return {
|
||||||
|
chatId,
|
||||||
|
chat: chat ?? null,
|
||||||
|
conversationHistory: chat && Array.isArray(chat.messages) ? chat.messages : [],
|
||||||
|
isNew: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const [newChat] = await db
|
||||||
|
.insert(copilotChats)
|
||||||
|
.values({
|
||||||
|
userId,
|
||||||
|
workflowId,
|
||||||
|
title: null,
|
||||||
|
model,
|
||||||
|
messages: [],
|
||||||
|
})
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
if (!newChat) {
|
||||||
|
logger.warn('Failed to create new copilot chat row', { userId, workflowId })
|
||||||
|
return {
|
||||||
|
chatId: '',
|
||||||
|
chat: null,
|
||||||
|
conversationHistory: [],
|
||||||
|
isNew: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
chatId: newChat.id,
|
||||||
|
chat: newChat,
|
||||||
|
conversationHistory: [],
|
||||||
|
isNew: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
237
apps/sim/lib/copilot/chat-payload.ts
Normal file
237
apps/sim/lib/copilot/chat-payload.ts
Normal file
@@ -0,0 +1,237 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { processFileAttachments } from '@/lib/copilot/chat-context'
|
||||||
|
import { getCopilotModel } from '@/lib/copilot/config'
|
||||||
|
import { SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||||
|
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||||
|
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { tools } from '@/tools/registry'
|
||||||
|
import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotChatPayload')
|
||||||
|
|
||||||
|
export interface BuildPayloadParams {
|
||||||
|
message: string
|
||||||
|
workflowId: string
|
||||||
|
userId: string
|
||||||
|
userMessageId: string
|
||||||
|
mode: string
|
||||||
|
model: string
|
||||||
|
conversationHistory?: unknown[]
|
||||||
|
contexts?: Array<{ type: string; content: string }>
|
||||||
|
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
|
||||||
|
commands?: string[]
|
||||||
|
chatId?: string
|
||||||
|
implicitFeedback?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ToolSchema {
|
||||||
|
name: string
|
||||||
|
description: string
|
||||||
|
input_schema: Record<string, unknown>
|
||||||
|
defer_loading?: boolean
|
||||||
|
executeLocally?: boolean
|
||||||
|
oauth?: { required: boolean; provider: string }
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CredentialsPayload {
|
||||||
|
oauth: Record<
|
||||||
|
string,
|
||||||
|
{ accessToken: string; accountId: string; name: string; expiresAt?: string }
|
||||||
|
>
|
||||||
|
apiKeys: string[]
|
||||||
|
metadata?: {
|
||||||
|
connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }>
|
||||||
|
configuredApiKeys: string[]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type MessageContent = string | Array<{ type: string; text?: string; [key: string]: unknown }>
|
||||||
|
|
||||||
|
interface ConversationMessage {
|
||||||
|
role: string
|
||||||
|
content: MessageContent
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildProviderConfig(selectedModel: string): CopilotProviderConfig | undefined {
|
||||||
|
const defaults = getCopilotModel('chat')
|
||||||
|
const envModel = env.COPILOT_MODEL || defaults.model
|
||||||
|
const providerEnv = env.COPILOT_PROVIDER
|
||||||
|
|
||||||
|
if (!providerEnv) return undefined
|
||||||
|
|
||||||
|
if (providerEnv === 'azure-openai') {
|
||||||
|
return {
|
||||||
|
provider: 'azure-openai',
|
||||||
|
model: envModel,
|
||||||
|
apiKey: env.AZURE_OPENAI_API_KEY,
|
||||||
|
apiVersion: 'preview',
|
||||||
|
endpoint: env.AZURE_OPENAI_ENDPOINT,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (providerEnv === 'vertex') {
|
||||||
|
return {
|
||||||
|
provider: 'vertex',
|
||||||
|
model: envModel,
|
||||||
|
apiKey: env.COPILOT_API_KEY,
|
||||||
|
vertexProject: env.VERTEX_PROJECT,
|
||||||
|
vertexLocation: env.VERTEX_LOCATION,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
provider: providerEnv as Exclude<string, 'azure-openai' | 'vertex'>,
|
||||||
|
model: selectedModel,
|
||||||
|
apiKey: env.COPILOT_API_KEY,
|
||||||
|
} as CopilotProviderConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build the request payload for the copilot backend.
|
||||||
|
*/
|
||||||
|
export async function buildCopilotRequestPayload(
|
||||||
|
params: BuildPayloadParams,
|
||||||
|
options: {
|
||||||
|
providerConfig?: CopilotProviderConfig
|
||||||
|
selectedModel: string
|
||||||
|
}
|
||||||
|
): Promise<Record<string, unknown>> {
|
||||||
|
const {
|
||||||
|
message,
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
userMessageId,
|
||||||
|
mode,
|
||||||
|
conversationHistory = [],
|
||||||
|
contexts,
|
||||||
|
fileAttachments,
|
||||||
|
commands,
|
||||||
|
chatId,
|
||||||
|
implicitFeedback,
|
||||||
|
} = params
|
||||||
|
|
||||||
|
const selectedModel = options.selectedModel
|
||||||
|
const providerConfig = options.providerConfig ?? buildProviderConfig(selectedModel)
|
||||||
|
|
||||||
|
const effectiveMode = mode === 'agent' ? 'build' : mode
|
||||||
|
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
|
||||||
|
|
||||||
|
const processedFileContents = await processFileAttachments(fileAttachments ?? [], userId)
|
||||||
|
|
||||||
|
const messages: ConversationMessage[] = []
|
||||||
|
for (const msg of conversationHistory as Array<Record<string, unknown>>) {
|
||||||
|
const msgAttachments = msg.fileAttachments as Array<Record<string, unknown>> | undefined
|
||||||
|
if (Array.isArray(msgAttachments) && msgAttachments.length > 0) {
|
||||||
|
const content: Array<{ type: string; text?: string; [key: string]: unknown }> = [
|
||||||
|
{ type: 'text', text: msg.content as string },
|
||||||
|
]
|
||||||
|
const processedHistoricalAttachments = await processFileAttachments(
|
||||||
|
(msgAttachments as BuildPayloadParams['fileAttachments']) ?? [],
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
for (const fileContent of processedHistoricalAttachments) {
|
||||||
|
content.push(fileContent)
|
||||||
|
}
|
||||||
|
messages.push({ role: msg.role as string, content })
|
||||||
|
} else {
|
||||||
|
messages.push({ role: msg.role as string, content: msg.content as string })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (implicitFeedback) {
|
||||||
|
messages.push({ role: 'system', content: implicitFeedback })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (processedFileContents.length > 0) {
|
||||||
|
const content: Array<{ type: string; text?: string; [key: string]: unknown }> = [
|
||||||
|
{ type: 'text', text: message },
|
||||||
|
]
|
||||||
|
for (const fileContent of processedFileContents) {
|
||||||
|
content.push(fileContent)
|
||||||
|
}
|
||||||
|
messages.push({ role: 'user', content })
|
||||||
|
} else {
|
||||||
|
messages.push({ role: 'user', content: message })
|
||||||
|
}
|
||||||
|
|
||||||
|
let integrationTools: ToolSchema[] = []
|
||||||
|
let credentials: CredentialsPayload | null = null
|
||||||
|
|
||||||
|
if (effectiveMode === 'build') {
|
||||||
|
// function_execute sandbox tool is now defined in Go — no need to send it
|
||||||
|
|
||||||
|
try {
|
||||||
|
const rawCredentials = await getCredentialsServerTool.execute({ workflowId }, { userId })
|
||||||
|
|
||||||
|
const oauthMap: CredentialsPayload['oauth'] = {}
|
||||||
|
const connectedOAuth: Array<{ provider: string; name: string; scopes?: string[] }> = []
|
||||||
|
for (const cred of rawCredentials?.oauth?.connected?.credentials ?? []) {
|
||||||
|
if (cred.accessToken) {
|
||||||
|
oauthMap[cred.provider] = {
|
||||||
|
accessToken: cred.accessToken,
|
||||||
|
accountId: cred.id,
|
||||||
|
name: cred.name,
|
||||||
|
}
|
||||||
|
connectedOAuth.push({ provider: cred.provider, name: cred.name })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
credentials = {
|
||||||
|
oauth: oauthMap,
|
||||||
|
apiKeys: rawCredentials?.environment?.variableNames ?? [],
|
||||||
|
metadata: {
|
||||||
|
connectedOAuth,
|
||||||
|
configuredApiKeys: rawCredentials?.environment?.variableNames ?? [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to fetch credentials for build payload', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { createUserToolSchema } = await import('@/tools/params')
|
||||||
|
const latestTools = getLatestVersionTools(tools)
|
||||||
|
|
||||||
|
integrationTools = Object.entries(latestTools).map(([toolId, toolConfig]) => {
|
||||||
|
const userSchema = createUserToolSchema(toolConfig)
|
||||||
|
const strippedName = stripVersionSuffix(toolId)
|
||||||
|
return {
|
||||||
|
name: strippedName,
|
||||||
|
description: toolConfig.description || toolConfig.name || strippedName,
|
||||||
|
input_schema: userSchema as unknown as Record<string, unknown>,
|
||||||
|
defer_loading: true,
|
||||||
|
...(toolConfig.oauth?.required && {
|
||||||
|
oauth: {
|
||||||
|
required: true,
|
||||||
|
provider: toolConfig.oauth.provider,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to build tool schemas for payload', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
message,
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
model: selectedModel,
|
||||||
|
mode: transportMode,
|
||||||
|
messageId: userMessageId,
|
||||||
|
version: SIM_AGENT_VERSION,
|
||||||
|
...(providerConfig ? { provider: providerConfig } : {}),
|
||||||
|
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
|
||||||
|
...(chatId ? { chatId } : {}),
|
||||||
|
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
|
||||||
|
...(integrationTools.length > 0 ? { integrationTools } : {}),
|
||||||
|
...(credentials ? { credentials } : {}),
|
||||||
|
...(commands && commands.length > 0 ? { commands } : {}),
|
||||||
|
}
|
||||||
|
}
|
||||||
147
apps/sim/lib/copilot/client-sse/content-blocks.ts
Normal file
147
apps/sim/lib/copilot/client-sse/content-blocks.ts
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
import type {
|
||||||
|
ChatContext,
|
||||||
|
CopilotMessage,
|
||||||
|
MessageFileAttachment,
|
||||||
|
} from '@/stores/panel/copilot/types'
|
||||||
|
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
||||||
|
|
||||||
|
const TEXT_BLOCK_TYPE = 'text'
|
||||||
|
const THINKING_BLOCK_TYPE = 'thinking'
|
||||||
|
const CONTINUE_OPTIONS_TAG = '<options>{"1":"Continue"}</options>'
|
||||||
|
|
||||||
|
export function createUserMessage(
|
||||||
|
content: string,
|
||||||
|
fileAttachments?: MessageFileAttachment[],
|
||||||
|
contexts?: ChatContext[],
|
||||||
|
messageId?: string
|
||||||
|
): CopilotMessage {
|
||||||
|
return {
|
||||||
|
id: messageId || crypto.randomUUID(),
|
||||||
|
role: 'user',
|
||||||
|
content,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
|
||||||
|
...(contexts && contexts.length > 0 && { contexts }),
|
||||||
|
...(contexts &&
|
||||||
|
contexts.length > 0 && {
|
||||||
|
contentBlocks: [{ type: 'contexts', contexts, timestamp: Date.now() }],
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createStreamingMessage(): CopilotMessage {
|
||||||
|
return {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
role: 'assistant',
|
||||||
|
content: '',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createErrorMessage(
|
||||||
|
messageId: string,
|
||||||
|
content: string,
|
||||||
|
errorType?: 'usage_limit' | 'unauthorized' | 'forbidden' | 'rate_limit' | 'upgrade_required'
|
||||||
|
): CopilotMessage {
|
||||||
|
return {
|
||||||
|
id: messageId,
|
||||||
|
role: 'assistant',
|
||||||
|
content,
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
contentBlocks: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
content,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
errorType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function appendTextBlock(context: ClientStreamingContext, text: string) {
|
||||||
|
if (!text) return
|
||||||
|
context.accumulatedContent += text
|
||||||
|
if (context.currentTextBlock && context.contentBlocks.length > 0) {
|
||||||
|
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||||
|
if (lastBlock.type === TEXT_BLOCK_TYPE && lastBlock === context.currentTextBlock) {
|
||||||
|
lastBlock.content += text
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const newBlock: ClientContentBlock = { type: 'text', content: text, timestamp: Date.now() }
|
||||||
|
context.currentTextBlock = newBlock
|
||||||
|
context.contentBlocks.push(newBlock)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function appendContinueOption(content: string): string {
|
||||||
|
if (/<options>/i.test(content)) return content
|
||||||
|
const suffix = content.trim().length > 0 ? '\n\n' : ''
|
||||||
|
return `${content}${suffix}${CONTINUE_OPTIONS_TAG}`
|
||||||
|
}
|
||||||
|
|
||||||
|
export function appendContinueOptionBlock(blocks: ClientContentBlock[]): ClientContentBlock[] {
|
||||||
|
if (!Array.isArray(blocks)) return blocks
|
||||||
|
const hasOptions = blocks.some(
|
||||||
|
(block) =>
|
||||||
|
block?.type === TEXT_BLOCK_TYPE &&
|
||||||
|
typeof block.content === 'string' &&
|
||||||
|
/<options>/i.test(block.content)
|
||||||
|
)
|
||||||
|
if (hasOptions) return blocks
|
||||||
|
return [
|
||||||
|
...blocks,
|
||||||
|
{
|
||||||
|
type: TEXT_BLOCK_TYPE,
|
||||||
|
content: CONTINUE_OPTIONS_TAG,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stripContinueOption(content: string): string {
|
||||||
|
if (!content || !content.includes(CONTINUE_OPTIONS_TAG)) return content
|
||||||
|
const next = content.replace(CONTINUE_OPTIONS_TAG, '')
|
||||||
|
return next.replace(/\n{2,}\s*$/g, '\n').trimEnd()
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stripContinueOptionFromBlocks(blocks: ClientContentBlock[]): ClientContentBlock[] {
|
||||||
|
if (!Array.isArray(blocks)) return blocks
|
||||||
|
return blocks.flatMap((block) => {
|
||||||
|
if (
|
||||||
|
block?.type === TEXT_BLOCK_TYPE &&
|
||||||
|
typeof block.content === 'string' &&
|
||||||
|
block.content.includes(CONTINUE_OPTIONS_TAG)
|
||||||
|
) {
|
||||||
|
const nextContent = stripContinueOption(block.content)
|
||||||
|
if (!nextContent.trim()) return []
|
||||||
|
return [{ ...block, content: nextContent }]
|
||||||
|
}
|
||||||
|
return [block]
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export function beginThinkingBlock(context: ClientStreamingContext) {
|
||||||
|
if (!context.currentThinkingBlock) {
|
||||||
|
const newBlock: ClientContentBlock = {
|
||||||
|
type: 'thinking',
|
||||||
|
content: '',
|
||||||
|
timestamp: Date.now(),
|
||||||
|
startTime: Date.now(),
|
||||||
|
}
|
||||||
|
context.currentThinkingBlock = newBlock
|
||||||
|
context.contentBlocks.push(newBlock)
|
||||||
|
}
|
||||||
|
context.isInThinkingBlock = true
|
||||||
|
context.currentTextBlock = null
|
||||||
|
}
|
||||||
|
|
||||||
|
export function finalizeThinkingBlock(context: ClientStreamingContext) {
|
||||||
|
if (context.currentThinkingBlock) {
|
||||||
|
context.currentThinkingBlock.duration =
|
||||||
|
Date.now() - (context.currentThinkingBlock.startTime || Date.now())
|
||||||
|
}
|
||||||
|
context.isInThinkingBlock = false
|
||||||
|
context.currentThinkingBlock = null
|
||||||
|
context.currentTextBlock = null
|
||||||
|
}
|
||||||
850
apps/sim/lib/copilot/client-sse/handlers.ts
Normal file
850
apps/sim/lib/copilot/client-sse/handlers.ts
Normal file
@@ -0,0 +1,850 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
||||||
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import {
|
||||||
|
isBackgroundState,
|
||||||
|
isRejectedState,
|
||||||
|
isReviewState,
|
||||||
|
resolveToolDisplay,
|
||||||
|
} from '@/lib/copilot/store-utils'
|
||||||
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
|
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
||||||
|
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||||
|
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||||
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
|
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
|
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
|
||||||
|
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotClientSseHandlers')
|
||||||
|
const TEXT_BLOCK_TYPE = 'text'
|
||||||
|
const MAX_BATCH_INTERVAL = 50
|
||||||
|
const MIN_BATCH_INTERVAL = 16
|
||||||
|
const MAX_QUEUE_SIZE = 5
|
||||||
|
|
||||||
|
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
||||||
|
if (typeof window === 'undefined') return
|
||||||
|
try {
|
||||||
|
if (!info) {
|
||||||
|
window.sessionStorage.removeItem(STREAM_STORAGE_KEY)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
window.sessionStorage.setItem(STREAM_STORAGE_KEY, JSON.stringify(info))
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to write active stream to storage', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type StoreSet = (
|
||||||
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
|
) => void
|
||||||
|
|
||||||
|
export type SSEHandler = (
|
||||||
|
data: SSEEvent,
|
||||||
|
context: ClientStreamingContext,
|
||||||
|
get: () => CopilotStore,
|
||||||
|
set: StoreSet
|
||||||
|
) => Promise<void> | void
|
||||||
|
|
||||||
|
const streamingUpdateQueue = new Map<string, ClientStreamingContext>()
|
||||||
|
let streamingUpdateRAF: number | null = null
|
||||||
|
let lastBatchTime = 0
|
||||||
|
|
||||||
|
export function stopStreamingUpdates() {
|
||||||
|
if (streamingUpdateRAF !== null) {
|
||||||
|
cancelAnimationFrame(streamingUpdateRAF)
|
||||||
|
streamingUpdateRAF = null
|
||||||
|
}
|
||||||
|
streamingUpdateQueue.clear()
|
||||||
|
}
|
||||||
|
|
||||||
|
function createOptimizedContentBlocks(contentBlocks: ClientContentBlock[]): ClientContentBlock[] {
|
||||||
|
const result: ClientContentBlock[] = new Array(contentBlocks.length)
|
||||||
|
for (let i = 0; i < contentBlocks.length; i++) {
|
||||||
|
const block = contentBlocks[i]
|
||||||
|
result[i] = { ...block }
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
export function flushStreamingUpdates(set: StoreSet) {
|
||||||
|
if (streamingUpdateRAF !== null) {
|
||||||
|
cancelAnimationFrame(streamingUpdateRAF)
|
||||||
|
streamingUpdateRAF = null
|
||||||
|
}
|
||||||
|
if (streamingUpdateQueue.size === 0) return
|
||||||
|
|
||||||
|
const updates = new Map(streamingUpdateQueue)
|
||||||
|
streamingUpdateQueue.clear()
|
||||||
|
|
||||||
|
set((state: CopilotStore) => {
|
||||||
|
if (updates.size === 0) return state
|
||||||
|
return {
|
||||||
|
messages: state.messages.map((msg) => {
|
||||||
|
const update = updates.get(msg.id)
|
||||||
|
if (update) {
|
||||||
|
return {
|
||||||
|
...msg,
|
||||||
|
content: '',
|
||||||
|
contentBlocks:
|
||||||
|
update.contentBlocks.length > 0
|
||||||
|
? createOptimizedContentBlocks(update.contentBlocks)
|
||||||
|
: [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return msg
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateStreamingMessage(set: StoreSet, context: ClientStreamingContext) {
|
||||||
|
if (context.suppressStreamingUpdates) return
|
||||||
|
const now = performance.now()
|
||||||
|
streamingUpdateQueue.set(context.messageId, context)
|
||||||
|
const timeSinceLastBatch = now - lastBatchTime
|
||||||
|
const shouldFlushImmediately =
|
||||||
|
streamingUpdateQueue.size >= MAX_QUEUE_SIZE || timeSinceLastBatch > MAX_BATCH_INTERVAL
|
||||||
|
|
||||||
|
if (streamingUpdateRAF === null) {
|
||||||
|
const scheduleUpdate = () => {
|
||||||
|
streamingUpdateRAF = requestAnimationFrame(() => {
|
||||||
|
const updates = new Map(streamingUpdateQueue)
|
||||||
|
streamingUpdateQueue.clear()
|
||||||
|
streamingUpdateRAF = null
|
||||||
|
lastBatchTime = performance.now()
|
||||||
|
set((state: CopilotStore) => {
|
||||||
|
if (updates.size === 0) return state
|
||||||
|
const messages = state.messages
|
||||||
|
const lastMessage = messages[messages.length - 1]
|
||||||
|
const lastMessageUpdate = lastMessage ? updates.get(lastMessage.id) : null
|
||||||
|
if (updates.size === 1 && lastMessageUpdate) {
|
||||||
|
const newMessages = [...messages]
|
||||||
|
newMessages[messages.length - 1] = {
|
||||||
|
...lastMessage,
|
||||||
|
content: '',
|
||||||
|
contentBlocks:
|
||||||
|
lastMessageUpdate.contentBlocks.length > 0
|
||||||
|
? createOptimizedContentBlocks(lastMessageUpdate.contentBlocks)
|
||||||
|
: [],
|
||||||
|
}
|
||||||
|
return { messages: newMessages }
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
messages: messages.map((msg) => {
|
||||||
|
const update = updates.get(msg.id)
|
||||||
|
if (update) {
|
||||||
|
return {
|
||||||
|
...msg,
|
||||||
|
content: '',
|
||||||
|
contentBlocks:
|
||||||
|
update.contentBlocks.length > 0
|
||||||
|
? createOptimizedContentBlocks(update.contentBlocks)
|
||||||
|
: [],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return msg
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (shouldFlushImmediately) scheduleUpdate()
|
||||||
|
else setTimeout(scheduleUpdate, Math.max(0, MIN_BATCH_INTERVAL - timeSinceLastBatch))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function upsertToolCallBlock(context: ClientStreamingContext, toolCall: CopilotToolCall) {
|
||||||
|
let found = false
|
||||||
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
|
const b = context.contentBlocks[i]
|
||||||
|
if (b.type === 'tool_call' && b.toolCall?.id === toolCall.id) {
|
||||||
|
context.contentBlocks[i] = { ...b, toolCall }
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!found) {
|
||||||
|
context.contentBlocks.push({ type: 'tool_call', toolCall, timestamp: Date.now() })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function stripThinkingTags(text: string): string {
|
||||||
|
return text.replace(/<\/?thinking[^>]*>/gi, '').replace(/<\/?thinking[^&]*>/gi, '')
|
||||||
|
}
|
||||||
|
|
||||||
|
function appendThinkingContent(context: ClientStreamingContext, text: string) {
|
||||||
|
if (!text) return
|
||||||
|
const cleanedText = stripThinkingTags(text)
|
||||||
|
if (!cleanedText) return
|
||||||
|
if (context.currentThinkingBlock) {
|
||||||
|
context.currentThinkingBlock.content += cleanedText
|
||||||
|
} else {
|
||||||
|
const newBlock: ClientContentBlock = {
|
||||||
|
type: 'thinking',
|
||||||
|
content: cleanedText,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
startTime: Date.now(),
|
||||||
|
}
|
||||||
|
context.currentThinkingBlock = newBlock
|
||||||
|
context.contentBlocks.push(newBlock)
|
||||||
|
}
|
||||||
|
context.isInThinkingBlock = true
|
||||||
|
context.currentTextBlock = null
|
||||||
|
}
|
||||||
|
|
||||||
|
export const sseHandlers: Record<string, SSEHandler> = {
|
||||||
|
chat_id: async (data, context, get, set) => {
|
||||||
|
context.newChatId = data.chatId
|
||||||
|
const { currentChat, activeStream } = get()
|
||||||
|
if (!currentChat && context.newChatId) {
|
||||||
|
await get().handleNewChatCreation(context.newChatId)
|
||||||
|
}
|
||||||
|
if (activeStream && context.newChatId && !activeStream.chatId) {
|
||||||
|
const updatedStream = { ...activeStream, chatId: context.newChatId }
|
||||||
|
set({ activeStream: updatedStream })
|
||||||
|
writeActiveStreamToStorage(updatedStream)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
title_updated: (_data, _context, get, set) => {
|
||||||
|
const title = _data.title
|
||||||
|
if (!title) return
|
||||||
|
const { currentChat, chats } = get()
|
||||||
|
if (currentChat) {
|
||||||
|
set({
|
||||||
|
currentChat: { ...currentChat, title },
|
||||||
|
chats: chats.map((c) => (c.id === currentChat.id ? { ...c, title } : c)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_result: (data, context, get, set) => {
|
||||||
|
try {
|
||||||
|
const eventData = asRecord(data?.data)
|
||||||
|
const toolCallId: string | undefined =
|
||||||
|
data?.toolCallId || (eventData.id as string | undefined)
|
||||||
|
const success: boolean | undefined = data?.success
|
||||||
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
|
const resultObj = asRecord(data?.result)
|
||||||
|
const skipped: boolean = resultObj.skipped === true
|
||||||
|
if (!toolCallId) return
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const current = toolCallsById[toolCallId]
|
||||||
|
if (current) {
|
||||||
|
if (
|
||||||
|
isRejectedState(current.state) ||
|
||||||
|
isReviewState(current.state) ||
|
||||||
|
isBackgroundState(current.state)
|
||||||
|
) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const targetState = success
|
||||||
|
? ClientToolCallState.success
|
||||||
|
: failedDependency || skipped
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const updatedMap = { ...toolCallsById }
|
||||||
|
updatedMap[toolCallId] = {
|
||||||
|
...current,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||||
|
}
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
|
||||||
|
if (targetState === ClientToolCallState.success && current.name === 'checkoff_todo') {
|
||||||
|
try {
|
||||||
|
const result = asRecord(data?.result) || asRecord(eventData.result)
|
||||||
|
const input = asRecord(current.params || current.input)
|
||||||
|
const todoId = (input.id || input.todoId || result.id || result.todoId) as
|
||||||
|
| string
|
||||||
|
| undefined
|
||||||
|
if (todoId) {
|
||||||
|
get().updatePlanTodoStatus(todoId, 'completed')
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to process checkoff_todo tool result', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
toolCallId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
current.name === 'mark_todo_in_progress'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const result = asRecord(data?.result) || asRecord(eventData.result)
|
||||||
|
const input = asRecord(current.params || current.input)
|
||||||
|
const todoId = (input.id || input.todoId || result.id || result.todoId) as
|
||||||
|
| string
|
||||||
|
| undefined
|
||||||
|
if (todoId) {
|
||||||
|
get().updatePlanTodoStatus(todoId, 'executing')
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to process mark_todo_in_progress tool result', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
toolCallId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (current.name === 'edit_workflow') {
|
||||||
|
try {
|
||||||
|
const resultPayload = asRecord(
|
||||||
|
data?.result || eventData.result || eventData.data || data?.data
|
||||||
|
)
|
||||||
|
const workflowState = asRecord(resultPayload?.workflowState)
|
||||||
|
const hasWorkflowState = !!resultPayload?.workflowState
|
||||||
|
logger.info('[SSE] edit_workflow result received', {
|
||||||
|
hasWorkflowState,
|
||||||
|
blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0,
|
||||||
|
edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0,
|
||||||
|
})
|
||||||
|
if (hasWorkflowState) {
|
||||||
|
const diffStore = useWorkflowDiffStore.getState()
|
||||||
|
diffStore
|
||||||
|
.setProposedChanges(resultPayload.workflowState as WorkflowState)
|
||||||
|
.catch((err) => {
|
||||||
|
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[SSE] edit_workflow result handling failed', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deploy tools: update deployment status in workflow registry
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
(current.name === 'deploy_api' ||
|
||||||
|
current.name === 'deploy_chat' ||
|
||||||
|
current.name === 'deploy_mcp' ||
|
||||||
|
current.name === 'redeploy')
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const resultPayload = asRecord(
|
||||||
|
data?.result || eventData.result || eventData.data || data?.data
|
||||||
|
)
|
||||||
|
const input = asRecord(current.params)
|
||||||
|
const workflowId =
|
||||||
|
(resultPayload?.workflowId as string) ||
|
||||||
|
(input?.workflowId as string) ||
|
||||||
|
useWorkflowRegistry.getState().activeWorkflowId
|
||||||
|
const isDeployed = resultPayload?.isDeployed !== false
|
||||||
|
if (workflowId) {
|
||||||
|
useWorkflowRegistry
|
||||||
|
.getState()
|
||||||
|
.setDeploymentStatus(workflowId, isDeployed, isDeployed ? new Date() : undefined)
|
||||||
|
logger.info('[SSE] Updated deployment status from tool result', {
|
||||||
|
toolName: current.name,
|
||||||
|
workflowId,
|
||||||
|
isDeployed,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to hydrate deployment status', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Environment variables: reload store after successful set
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
current.name === 'set_environment_variables'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
useEnvironmentStore.getState().loadEnvironmentVariables()
|
||||||
|
logger.info('[SSE] Triggered environment variables reload')
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to reload environment variables', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Workflow variables: reload store after successful set
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
current.name === 'set_global_workflow_variables'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const input = asRecord(current.params)
|
||||||
|
const workflowId =
|
||||||
|
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
||||||
|
if (workflowId) {
|
||||||
|
useVariablesStore.getState().loadForWorkflow(workflowId)
|
||||||
|
logger.info('[SSE] Triggered workflow variables reload', { workflowId })
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to reload workflow variables', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
|
const b = context.contentBlocks[i]
|
||||||
|
if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) {
|
||||||
|
if (
|
||||||
|
isRejectedState(b.toolCall?.state) ||
|
||||||
|
isReviewState(b.toolCall?.state) ||
|
||||||
|
isBackgroundState(b.toolCall?.state)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
const targetState = success
|
||||||
|
? ClientToolCallState.success
|
||||||
|
: failedDependency || skipped
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
context.contentBlocks[i] = {
|
||||||
|
...b,
|
||||||
|
toolCall: {
|
||||||
|
...b.toolCall,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(
|
||||||
|
b.toolCall?.name,
|
||||||
|
targetState,
|
||||||
|
toolCallId,
|
||||||
|
b.toolCall?.params
|
||||||
|
),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to process tool_result SSE event', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_error: (data, context, get, set) => {
|
||||||
|
try {
|
||||||
|
const errorData = asRecord(data?.data)
|
||||||
|
const toolCallId: string | undefined =
|
||||||
|
data?.toolCallId || (errorData.id as string | undefined)
|
||||||
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
|
if (!toolCallId) return
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const current = toolCallsById[toolCallId]
|
||||||
|
if (current) {
|
||||||
|
if (
|
||||||
|
isRejectedState(current.state) ||
|
||||||
|
isReviewState(current.state) ||
|
||||||
|
isBackgroundState(current.state)
|
||||||
|
) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const targetState = failedDependency
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
const updatedMap = { ...toolCallsById }
|
||||||
|
updatedMap[toolCallId] = {
|
||||||
|
...current,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||||
|
}
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
}
|
||||||
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
|
const b = context.contentBlocks[i]
|
||||||
|
if (b?.type === 'tool_call' && b?.toolCall?.id === toolCallId) {
|
||||||
|
if (
|
||||||
|
isRejectedState(b.toolCall?.state) ||
|
||||||
|
isReviewState(b.toolCall?.state) ||
|
||||||
|
isBackgroundState(b.toolCall?.state)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
const targetState = failedDependency
|
||||||
|
? ClientToolCallState.rejected
|
||||||
|
: ClientToolCallState.error
|
||||||
|
context.contentBlocks[i] = {
|
||||||
|
...b,
|
||||||
|
toolCall: {
|
||||||
|
...b.toolCall,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(
|
||||||
|
b.toolCall?.name,
|
||||||
|
targetState,
|
||||||
|
toolCallId,
|
||||||
|
b.toolCall?.params
|
||||||
|
),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to process tool_error SSE event', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_generating: (data, context, get, set) => {
|
||||||
|
const { toolCallId, toolName } = data
|
||||||
|
if (!toolCallId || !toolName) return
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
|
if (!toolCallsById[toolCallId]) {
|
||||||
|
const initialState = ClientToolCallState.pending
|
||||||
|
const tc: CopilotToolCall = {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
state: initialState,
|
||||||
|
display: resolveToolDisplay(toolName, initialState, toolCallId),
|
||||||
|
}
|
||||||
|
const updated = { ...toolCallsById, [toolCallId]: tc }
|
||||||
|
set({ toolCallsById: updated })
|
||||||
|
logger.info('[toolCallsById] map updated', updated)
|
||||||
|
|
||||||
|
upsertToolCallBlock(context, tc)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_call: (data, context, get, set) => {
|
||||||
|
const toolData = asRecord(data?.data)
|
||||||
|
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
||||||
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
|
if (!id) return
|
||||||
|
const args = toolData.arguments as Record<string, unknown> | undefined
|
||||||
|
const isPartial = toolData.partial === true
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
|
const existing = toolCallsById[id]
|
||||||
|
const next: CopilotToolCall = existing
|
||||||
|
? {
|
||||||
|
...existing,
|
||||||
|
state: ClientToolCallState.pending,
|
||||||
|
...(args ? { params: args } : {}),
|
||||||
|
display: resolveToolDisplay(name, ClientToolCallState.pending, id, args),
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
id,
|
||||||
|
name: name || 'unknown_tool',
|
||||||
|
state: ClientToolCallState.pending,
|
||||||
|
...(args ? { params: args } : {}),
|
||||||
|
display: resolveToolDisplay(name, ClientToolCallState.pending, id, args),
|
||||||
|
}
|
||||||
|
const updated = { ...toolCallsById, [id]: next }
|
||||||
|
set({ toolCallsById: updated })
|
||||||
|
logger.info('[toolCallsById] → pending', { id, name, params: args })
|
||||||
|
|
||||||
|
upsertToolCallBlock(context, next)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
|
||||||
|
if (isPartial) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// OAuth: dispatch event to open the OAuth connect modal
|
||||||
|
if (name === 'oauth_request_access' && args && typeof window !== 'undefined') {
|
||||||
|
try {
|
||||||
|
window.dispatchEvent(
|
||||||
|
new CustomEvent('open-oauth-connect', {
|
||||||
|
detail: {
|
||||||
|
providerName: (args.providerName || args.provider_name || '') as string,
|
||||||
|
serviceId: (args.serviceId || args.service_id || '') as string,
|
||||||
|
providerId: (args.providerId || args.provider_id || '') as string,
|
||||||
|
requiredScopes: (args.requiredScopes || args.required_scopes || []) as string[],
|
||||||
|
newScopes: (args.newScopes || args.new_scopes || []) as string[],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
logger.info('[SSE] Dispatched OAuth connect event', {
|
||||||
|
providerId: args.providerId || args.provider_id,
|
||||||
|
providerName: args.providerName || args.provider_name,
|
||||||
|
})
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to dispatch OAuth connect event', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
},
|
||||||
|
reasoning: (data, context, _get, set) => {
|
||||||
|
const phase = (data && (data.phase || data?.data?.phase)) as string | undefined
|
||||||
|
if (phase === 'start') {
|
||||||
|
beginThinkingBlock(context)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (phase === 'end') {
|
||||||
|
finalizeThinkingBlock(context)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const chunk: string = typeof data?.data === 'string' ? data.data : data?.content || ''
|
||||||
|
if (!chunk) return
|
||||||
|
appendThinkingContent(context, chunk)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
},
|
||||||
|
content: (data, context, get, set) => {
|
||||||
|
if (!data.data) return
|
||||||
|
context.pendingContent += data.data
|
||||||
|
|
||||||
|
let contentToProcess = context.pendingContent
|
||||||
|
let hasProcessedContent = false
|
||||||
|
|
||||||
|
const thinkingStartRegex = /<thinking>/
|
||||||
|
const thinkingEndRegex = /<\/thinking>/
|
||||||
|
const designWorkflowStartRegex = /<design_workflow>/
|
||||||
|
const designWorkflowEndRegex = /<\/design_workflow>/
|
||||||
|
|
||||||
|
const splitTrailingPartialTag = (
|
||||||
|
text: string,
|
||||||
|
tags: string[]
|
||||||
|
): { text: string; remaining: string } => {
|
||||||
|
const partialIndex = text.lastIndexOf('<')
|
||||||
|
if (partialIndex < 0) {
|
||||||
|
return { text, remaining: '' }
|
||||||
|
}
|
||||||
|
const possibleTag = text.substring(partialIndex)
|
||||||
|
const matchesTagStart = tags.some((tag) => tag.startsWith(possibleTag))
|
||||||
|
if (!matchesTagStart) {
|
||||||
|
return { text, remaining: '' }
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
text: text.substring(0, partialIndex),
|
||||||
|
remaining: possibleTag,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
while (contentToProcess.length > 0) {
|
||||||
|
if (context.isInDesignWorkflowBlock) {
|
||||||
|
const endMatch = designWorkflowEndRegex.exec(contentToProcess)
|
||||||
|
if (endMatch) {
|
||||||
|
const designContent = contentToProcess.substring(0, endMatch.index)
|
||||||
|
context.designWorkflowContent += designContent
|
||||||
|
context.isInDesignWorkflowBlock = false
|
||||||
|
|
||||||
|
logger.info('[design_workflow] Tag complete, setting plan content', {
|
||||||
|
contentLength: context.designWorkflowContent.length,
|
||||||
|
})
|
||||||
|
set({ streamingPlanContent: context.designWorkflowContent })
|
||||||
|
|
||||||
|
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
|
||||||
|
hasProcessedContent = true
|
||||||
|
} else {
|
||||||
|
const { text, remaining } = splitTrailingPartialTag(contentToProcess, [
|
||||||
|
'</design_workflow>',
|
||||||
|
])
|
||||||
|
context.designWorkflowContent += text
|
||||||
|
|
||||||
|
set({ streamingPlanContent: context.designWorkflowContent })
|
||||||
|
|
||||||
|
contentToProcess = remaining
|
||||||
|
hasProcessedContent = true
|
||||||
|
if (remaining) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!context.isInThinkingBlock && !context.isInDesignWorkflowBlock) {
|
||||||
|
const designStartMatch = designWorkflowStartRegex.exec(contentToProcess)
|
||||||
|
if (designStartMatch) {
|
||||||
|
const textBeforeDesign = contentToProcess.substring(0, designStartMatch.index)
|
||||||
|
if (textBeforeDesign) {
|
||||||
|
appendTextBlock(context, textBeforeDesign)
|
||||||
|
hasProcessedContent = true
|
||||||
|
}
|
||||||
|
context.isInDesignWorkflowBlock = true
|
||||||
|
context.designWorkflowContent = ''
|
||||||
|
contentToProcess = contentToProcess.substring(
|
||||||
|
designStartMatch.index + designStartMatch[0].length
|
||||||
|
)
|
||||||
|
hasProcessedContent = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextMarkIndex = contentToProcess.indexOf('<marktodo>')
|
||||||
|
const nextCheckIndex = contentToProcess.indexOf('<checkofftodo>')
|
||||||
|
const hasMark = nextMarkIndex >= 0
|
||||||
|
const hasCheck = nextCheckIndex >= 0
|
||||||
|
|
||||||
|
const nextTagIndex =
|
||||||
|
hasMark && hasCheck
|
||||||
|
? Math.min(nextMarkIndex, nextCheckIndex)
|
||||||
|
: hasMark
|
||||||
|
? nextMarkIndex
|
||||||
|
: hasCheck
|
||||||
|
? nextCheckIndex
|
||||||
|
: -1
|
||||||
|
|
||||||
|
if (nextTagIndex >= 0) {
|
||||||
|
const isMarkTodo = hasMark && nextMarkIndex === nextTagIndex
|
||||||
|
const tagStart = isMarkTodo ? '<marktodo>' : '<checkofftodo>'
|
||||||
|
const tagEnd = isMarkTodo ? '</marktodo>' : '</checkofftodo>'
|
||||||
|
const closingIndex = contentToProcess.indexOf(tagEnd, nextTagIndex + tagStart.length)
|
||||||
|
|
||||||
|
if (closingIndex === -1) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const todoId = contentToProcess
|
||||||
|
.substring(nextTagIndex + tagStart.length, closingIndex)
|
||||||
|
.trim()
|
||||||
|
logger.info(
|
||||||
|
isMarkTodo ? '[TODO] Detected marktodo tag' : '[TODO] Detected checkofftodo tag',
|
||||||
|
{ todoId }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (todoId) {
|
||||||
|
try {
|
||||||
|
get().updatePlanTodoStatus(todoId, isMarkTodo ? 'executing' : 'completed')
|
||||||
|
logger.info(
|
||||||
|
isMarkTodo
|
||||||
|
? '[TODO] Successfully marked todo in progress'
|
||||||
|
: '[TODO] Successfully checked off todo',
|
||||||
|
{ todoId }
|
||||||
|
)
|
||||||
|
} catch (e) {
|
||||||
|
logger.error(
|
||||||
|
isMarkTodo
|
||||||
|
? '[TODO] Failed to mark todo in progress'
|
||||||
|
: '[TODO] Failed to checkoff todo',
|
||||||
|
{ todoId, error: e }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.warn('[TODO] Empty todoId extracted from todo tag', { tagType: tagStart })
|
||||||
|
}
|
||||||
|
|
||||||
|
let beforeTag = contentToProcess.substring(0, nextTagIndex)
|
||||||
|
let afterTag = contentToProcess.substring(closingIndex + tagEnd.length)
|
||||||
|
|
||||||
|
const hadNewlineBefore = /(\r?\n)+$/.test(beforeTag)
|
||||||
|
const hadNewlineAfter = /^(\r?\n)+/.test(afterTag)
|
||||||
|
|
||||||
|
beforeTag = beforeTag.replace(/(\r?\n)+$/, '')
|
||||||
|
afterTag = afterTag.replace(/^(\r?\n)+/, '')
|
||||||
|
|
||||||
|
contentToProcess =
|
||||||
|
beforeTag + (hadNewlineBefore && hadNewlineAfter ? '\n' : '') + afterTag
|
||||||
|
context.currentTextBlock = null
|
||||||
|
hasProcessedContent = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (context.isInThinkingBlock) {
|
||||||
|
const endMatch = thinkingEndRegex.exec(contentToProcess)
|
||||||
|
if (endMatch) {
|
||||||
|
const thinkingContent = contentToProcess.substring(0, endMatch.index)
|
||||||
|
appendThinkingContent(context, thinkingContent)
|
||||||
|
finalizeThinkingBlock(context)
|
||||||
|
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
|
||||||
|
hasProcessedContent = true
|
||||||
|
} else {
|
||||||
|
const { text, remaining } = splitTrailingPartialTag(contentToProcess, ['</thinking>'])
|
||||||
|
if (text) {
|
||||||
|
appendThinkingContent(context, text)
|
||||||
|
hasProcessedContent = true
|
||||||
|
}
|
||||||
|
contentToProcess = remaining
|
||||||
|
if (remaining) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const startMatch = thinkingStartRegex.exec(contentToProcess)
|
||||||
|
if (startMatch) {
|
||||||
|
const textBeforeThinking = contentToProcess.substring(0, startMatch.index)
|
||||||
|
if (textBeforeThinking) {
|
||||||
|
appendTextBlock(context, textBeforeThinking)
|
||||||
|
hasProcessedContent = true
|
||||||
|
}
|
||||||
|
context.isInThinkingBlock = true
|
||||||
|
context.currentTextBlock = null
|
||||||
|
contentToProcess = contentToProcess.substring(startMatch.index + startMatch[0].length)
|
||||||
|
hasProcessedContent = true
|
||||||
|
} else {
|
||||||
|
let partialTagIndex = contentToProcess.lastIndexOf('<')
|
||||||
|
|
||||||
|
const partialMarkTodo = contentToProcess.lastIndexOf('<marktodo')
|
||||||
|
const partialCheckoffTodo = contentToProcess.lastIndexOf('<checkofftodo')
|
||||||
|
|
||||||
|
if (partialMarkTodo > partialTagIndex) {
|
||||||
|
partialTagIndex = partialMarkTodo
|
||||||
|
}
|
||||||
|
if (partialCheckoffTodo > partialTagIndex) {
|
||||||
|
partialTagIndex = partialCheckoffTodo
|
||||||
|
}
|
||||||
|
|
||||||
|
let textToAdd = contentToProcess
|
||||||
|
let remaining = ''
|
||||||
|
if (partialTagIndex >= 0 && partialTagIndex > contentToProcess.length - 50) {
|
||||||
|
textToAdd = contentToProcess.substring(0, partialTagIndex)
|
||||||
|
remaining = contentToProcess.substring(partialTagIndex)
|
||||||
|
}
|
||||||
|
if (textToAdd) {
|
||||||
|
appendTextBlock(context, textToAdd)
|
||||||
|
hasProcessedContent = true
|
||||||
|
}
|
||||||
|
contentToProcess = remaining
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
context.pendingContent = contentToProcess
|
||||||
|
if (hasProcessedContent) {
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
done: (_data, context) => {
|
||||||
|
logger.info('[SSE] DONE EVENT RECEIVED', {
|
||||||
|
doneEventCount: context.doneEventCount,
|
||||||
|
data: _data,
|
||||||
|
})
|
||||||
|
context.doneEventCount++
|
||||||
|
if (context.doneEventCount >= 1) {
|
||||||
|
logger.info('[SSE] Setting streamComplete = true, stream will terminate')
|
||||||
|
context.streamComplete = true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
error: (data, context, _get, set) => {
|
||||||
|
logger.error('Stream error:', data.error)
|
||||||
|
set((state: CopilotStore) => ({
|
||||||
|
messages: state.messages.map((msg) =>
|
||||||
|
msg.id === context.messageId
|
||||||
|
? {
|
||||||
|
...msg,
|
||||||
|
content: context.accumulatedContent || 'An error occurred.',
|
||||||
|
error: data.error,
|
||||||
|
}
|
||||||
|
: msg
|
||||||
|
),
|
||||||
|
}))
|
||||||
|
context.streamComplete = true
|
||||||
|
},
|
||||||
|
stream_end: (_data, context, _get, set) => {
|
||||||
|
if (context.pendingContent) {
|
||||||
|
if (context.isInThinkingBlock && context.currentThinkingBlock) {
|
||||||
|
appendThinkingContent(context, context.pendingContent)
|
||||||
|
} else if (context.pendingContent.trim()) {
|
||||||
|
appendTextBlock(context, context.pendingContent)
|
||||||
|
}
|
||||||
|
context.pendingContent = ''
|
||||||
|
}
|
||||||
|
finalizeThinkingBlock(context)
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
},
|
||||||
|
default: () => {},
|
||||||
|
}
|
||||||
3
apps/sim/lib/copilot/client-sse/index.ts
Normal file
3
apps/sim/lib/copilot/client-sse/index.ts
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
export type { SSEHandler } from './handlers'
|
||||||
|
export { sseHandlers } from './handlers'
|
||||||
|
export { applySseEvent, subAgentSSEHandlers } from './subagent-handlers'
|
||||||
385
apps/sim/lib/copilot/client-sse/subagent-handlers.ts
Normal file
385
apps/sim/lib/copilot/client-sse/subagent-handlers.ts
Normal file
@@ -0,0 +1,385 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import {
|
||||||
|
asRecord,
|
||||||
|
normalizeSseEvent,
|
||||||
|
shouldSkipToolCallEvent,
|
||||||
|
shouldSkipToolResultEvent,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
|
||||||
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
|
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import { type SSEHandler, sseHandlers, updateStreamingMessage } from './handlers'
|
||||||
|
import type { ClientStreamingContext } from './types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotClientSubagentHandlers')
|
||||||
|
|
||||||
|
type StoreSet = (
|
||||||
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
|
) => void
|
||||||
|
|
||||||
|
export function appendSubAgentContent(
|
||||||
|
context: ClientStreamingContext,
|
||||||
|
parentToolCallId: string,
|
||||||
|
text: string
|
||||||
|
) {
|
||||||
|
if (!context.subAgentContent[parentToolCallId]) {
|
||||||
|
context.subAgentContent[parentToolCallId] = ''
|
||||||
|
}
|
||||||
|
if (!context.subAgentBlocks[parentToolCallId]) {
|
||||||
|
context.subAgentBlocks[parentToolCallId] = []
|
||||||
|
}
|
||||||
|
context.subAgentContent[parentToolCallId] += text
|
||||||
|
const blocks = context.subAgentBlocks[parentToolCallId]
|
||||||
|
const lastBlock = blocks[blocks.length - 1]
|
||||||
|
if (lastBlock && lastBlock.type === 'subagent_text') {
|
||||||
|
lastBlock.content = (lastBlock.content || '') + text
|
||||||
|
} else {
|
||||||
|
blocks.push({
|
||||||
|
type: 'subagent_text',
|
||||||
|
content: text,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function updateToolCallWithSubAgentData(
|
||||||
|
context: ClientStreamingContext,
|
||||||
|
get: () => CopilotStore,
|
||||||
|
set: StoreSet,
|
||||||
|
parentToolCallId: string
|
||||||
|
) {
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const parentToolCall = toolCallsById[parentToolCallId]
|
||||||
|
if (!parentToolCall) {
|
||||||
|
logger.warn('[SubAgent] updateToolCallWithSubAgentData: parent tool call not found', {
|
||||||
|
parentToolCallId,
|
||||||
|
availableToolCallIds: Object.keys(toolCallsById),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = context.subAgentBlocks[parentToolCallId] ?? []
|
||||||
|
|
||||||
|
const updatedToolCall: CopilotToolCall = {
|
||||||
|
...parentToolCall,
|
||||||
|
subAgentContent: context.subAgentContent[parentToolCallId] || '',
|
||||||
|
subAgentToolCalls: context.subAgentToolCalls[parentToolCallId] ?? [],
|
||||||
|
subAgentBlocks: blocks,
|
||||||
|
subAgentStreaming: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[SubAgent] Updating tool call with subagent data', {
|
||||||
|
parentToolCallId,
|
||||||
|
parentToolName: parentToolCall.name,
|
||||||
|
subAgentContentLength: updatedToolCall.subAgentContent?.length,
|
||||||
|
subAgentBlocksCount: updatedToolCall.subAgentBlocks?.length,
|
||||||
|
subAgentToolCallsCount: updatedToolCall.subAgentToolCalls?.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
const updatedMap = { ...toolCallsById, [parentToolCallId]: updatedToolCall }
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
|
||||||
|
let foundInContentBlocks = false
|
||||||
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
|
const b = context.contentBlocks[i]
|
||||||
|
if (b.type === 'tool_call' && b.toolCall?.id === parentToolCallId) {
|
||||||
|
context.contentBlocks[i] = { ...b, toolCall: updatedToolCall }
|
||||||
|
foundInContentBlocks = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!foundInContentBlocks) {
|
||||||
|
logger.warn('[SubAgent] Parent tool call not found in contentBlocks', {
|
||||||
|
parentToolCallId,
|
||||||
|
contentBlocksCount: context.contentBlocks.length,
|
||||||
|
toolCallBlockIds: context.contentBlocks
|
||||||
|
.filter((b) => b.type === 'tool_call')
|
||||||
|
.map((b) => b.toolCall?.id),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
updateStreamingMessage(set, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||||
|
start: () => {
|
||||||
|
// Subagent start event - no action needed, parent is already tracked from subagent_start
|
||||||
|
},
|
||||||
|
|
||||||
|
content: (data, context, get, set) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
const contentStr = typeof data.data === 'string' ? data.data : data.content || ''
|
||||||
|
logger.info('[SubAgent] content event', {
|
||||||
|
parentToolCallId,
|
||||||
|
hasData: !!contentStr,
|
||||||
|
dataPreview: contentStr ? contentStr.substring(0, 50) : null,
|
||||||
|
})
|
||||||
|
if (!parentToolCallId || !contentStr) {
|
||||||
|
logger.warn('[SubAgent] content missing parentToolCallId or data', {
|
||||||
|
parentToolCallId,
|
||||||
|
hasData: !!contentStr,
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
appendSubAgentContent(context, parentToolCallId, contentStr)
|
||||||
|
|
||||||
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
},
|
||||||
|
|
||||||
|
reasoning: (data, context, get, set) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
const dataObj = asRecord(data?.data)
|
||||||
|
const phase = data?.phase || (dataObj.phase as string | undefined)
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
|
if (phase === 'start' || phase === 'end') return
|
||||||
|
|
||||||
|
const chunk = typeof data?.data === 'string' ? data.data : data?.content || ''
|
||||||
|
if (!chunk) return
|
||||||
|
|
||||||
|
appendSubAgentContent(context, parentToolCallId, chunk)
|
||||||
|
|
||||||
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
},
|
||||||
|
|
||||||
|
tool_generating: () => {
|
||||||
|
// Tool generating event - no action needed, we'll handle the actual tool_call
|
||||||
|
},
|
||||||
|
|
||||||
|
tool_call: async (data, context, get, set) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
|
const toolData = asRecord(data?.data)
|
||||||
|
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
||||||
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
|
if (!id || !name) return
|
||||||
|
const isPartial = toolData.partial === true
|
||||||
|
|
||||||
|
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
||||||
|
| Record<string, unknown>
|
||||||
|
| undefined
|
||||||
|
|
||||||
|
if (typeof args === 'string') {
|
||||||
|
try {
|
||||||
|
args = JSON.parse(args) as Record<string, unknown>
|
||||||
|
} catch {
|
||||||
|
logger.warn('[SubAgent] Failed to parse arguments string', { args })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[SubAgent] tool_call received', {
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
hasArgs: !!args,
|
||||||
|
argsKeys: args ? Object.keys(args) : [],
|
||||||
|
toolDataKeys: Object.keys(toolData),
|
||||||
|
dataKeys: Object.keys(data ?? {}),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!context.subAgentToolCalls[parentToolCallId]) {
|
||||||
|
context.subAgentToolCalls[parentToolCallId] = []
|
||||||
|
}
|
||||||
|
if (!context.subAgentBlocks[parentToolCallId]) {
|
||||||
|
context.subAgentBlocks[parentToolCallId] = []
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
||||||
|
(tc: CopilotToolCall) => tc.id === id
|
||||||
|
)
|
||||||
|
const subAgentToolCall: CopilotToolCall = {
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
state: ClientToolCallState.pending,
|
||||||
|
...(args ? { params: args } : {}),
|
||||||
|
display: resolveToolDisplay(name, ClientToolCallState.pending, id, args),
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
context.subAgentToolCalls[parentToolCallId][existingIndex] = subAgentToolCall
|
||||||
|
} else {
|
||||||
|
context.subAgentToolCalls[parentToolCallId].push(subAgentToolCall)
|
||||||
|
|
||||||
|
context.subAgentBlocks[parentToolCallId].push({
|
||||||
|
type: 'subagent_tool_call',
|
||||||
|
toolCall: subAgentToolCall,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const updated = { ...toolCallsById, [id]: subAgentToolCall }
|
||||||
|
set({ toolCallsById: updated })
|
||||||
|
|
||||||
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
|
||||||
|
if (isPartial) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
tool_result: (data, context, get, set) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
|
const resultData = asRecord(data?.data)
|
||||||
|
const toolCallId: string | undefined = data?.toolCallId || (resultData.id as string | undefined)
|
||||||
|
// Determine success: explicit `success` field takes priority; otherwise
|
||||||
|
// infer from presence of result data vs error (same logic as server-side
|
||||||
|
// inferToolSuccess). The Go backend uses `*bool` with omitempty so
|
||||||
|
// `success` is present when explicitly set, and absent for non-tool events.
|
||||||
|
const hasExplicitSuccess =
|
||||||
|
data?.success !== undefined || resultData.success !== undefined
|
||||||
|
const explicitSuccess = data?.success ?? resultData.success
|
||||||
|
const hasResultData = data?.result !== undefined || resultData.result !== undefined
|
||||||
|
const hasError = !!data?.error || !!resultData.error
|
||||||
|
const success: boolean = hasExplicitSuccess
|
||||||
|
? !!explicitSuccess
|
||||||
|
: hasResultData && !hasError
|
||||||
|
if (!toolCallId) return
|
||||||
|
|
||||||
|
if (!context.subAgentToolCalls[parentToolCallId]) return
|
||||||
|
if (!context.subAgentBlocks[parentToolCallId]) return
|
||||||
|
|
||||||
|
const targetState = success ? ClientToolCallState.success : ClientToolCallState.error
|
||||||
|
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
||||||
|
(tc: CopilotToolCall) => tc.id === toolCallId
|
||||||
|
)
|
||||||
|
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
||||||
|
const updatedSubAgentToolCall = {
|
||||||
|
...existing,
|
||||||
|
state: targetState,
|
||||||
|
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params),
|
||||||
|
}
|
||||||
|
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
||||||
|
|
||||||
|
for (const block of context.subAgentBlocks[parentToolCallId]) {
|
||||||
|
if (block.type === 'subagent_tool_call' && block.toolCall?.id === toolCallId) {
|
||||||
|
block.toolCall = updatedSubAgentToolCall
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
if (toolCallsById[toolCallId]) {
|
||||||
|
const updatedMap = {
|
||||||
|
...toolCallsById,
|
||||||
|
[toolCallId]: updatedSubAgentToolCall,
|
||||||
|
}
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
logger.info('[SubAgent] Updated subagent tool call state in toolCallsById', {
|
||||||
|
toolCallId,
|
||||||
|
name: existing.name,
|
||||||
|
state: targetState,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
},
|
||||||
|
|
||||||
|
done: (_data, context, get, set) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
|
||||||
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function applySseEvent(
|
||||||
|
rawData: SSEEvent,
|
||||||
|
context: ClientStreamingContext,
|
||||||
|
get: () => CopilotStore,
|
||||||
|
set: (next: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)) => void
|
||||||
|
): Promise<boolean> {
|
||||||
|
const normalizedEvent = normalizeSseEvent(rawData)
|
||||||
|
if (shouldSkipToolCallEvent(normalizedEvent) || shouldSkipToolResultEvent(normalizedEvent)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
const data = normalizedEvent
|
||||||
|
|
||||||
|
if (data.type === 'subagent_start') {
|
||||||
|
const startData = asRecord(data.data)
|
||||||
|
const toolCallId = startData.tool_call_id as string | undefined
|
||||||
|
if (toolCallId) {
|
||||||
|
context.subAgentParentToolCallId = toolCallId
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const parentToolCall = toolCallsById[toolCallId]
|
||||||
|
if (parentToolCall) {
|
||||||
|
const updatedToolCall: CopilotToolCall = {
|
||||||
|
...parentToolCall,
|
||||||
|
subAgentStreaming: true,
|
||||||
|
}
|
||||||
|
const updatedMap = { ...toolCallsById, [toolCallId]: updatedToolCall }
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
}
|
||||||
|
logger.info('[SSE] Subagent session started', {
|
||||||
|
subagent: data.subagent,
|
||||||
|
parentToolCallId: toolCallId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.type === 'subagent_end') {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (parentToolCallId) {
|
||||||
|
const { toolCallsById } = get()
|
||||||
|
const parentToolCall = toolCallsById[parentToolCallId]
|
||||||
|
if (parentToolCall) {
|
||||||
|
const updatedToolCall: CopilotToolCall = {
|
||||||
|
...parentToolCall,
|
||||||
|
subAgentContent: context.subAgentContent[parentToolCallId] || '',
|
||||||
|
subAgentToolCalls: context.subAgentToolCalls[parentToolCallId] ?? [],
|
||||||
|
subAgentBlocks: context.subAgentBlocks[parentToolCallId] ?? [],
|
||||||
|
subAgentStreaming: false,
|
||||||
|
}
|
||||||
|
const updatedMap = { ...toolCallsById, [parentToolCallId]: updatedToolCall }
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
logger.info('[SSE] Subagent session ended', {
|
||||||
|
subagent: data.subagent,
|
||||||
|
parentToolCallId,
|
||||||
|
contentLength: context.subAgentContent[parentToolCallId]?.length || 0,
|
||||||
|
toolCallCount: context.subAgentToolCalls[parentToolCallId]?.length || 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
context.subAgentParentToolCallId = undefined
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if (data.subagent) {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) {
|
||||||
|
logger.warn('[SSE] Subagent event without parent tool call ID', {
|
||||||
|
type: data.type,
|
||||||
|
subagent: data.subagent,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info('[SSE] Processing subagent event', {
|
||||||
|
type: data.type,
|
||||||
|
subagent: data.subagent,
|
||||||
|
parentToolCallId,
|
||||||
|
hasHandler: !!subAgentSSEHandlers[data.type],
|
||||||
|
})
|
||||||
|
|
||||||
|
const subAgentHandler = subAgentSSEHandlers[data.type]
|
||||||
|
if (subAgentHandler) {
|
||||||
|
await subAgentHandler(data, context, get, set)
|
||||||
|
} else {
|
||||||
|
logger.warn('[SSE] No handler for subagent event type', { type: data.type })
|
||||||
|
}
|
||||||
|
return !context.streamComplete
|
||||||
|
}
|
||||||
|
|
||||||
|
const handler = sseHandlers[data.type] || sseHandlers.default
|
||||||
|
await handler(data, context, get, set)
|
||||||
|
return !context.streamComplete
|
||||||
|
}
|
||||||
45
apps/sim/lib/copilot/client-sse/types.ts
Normal file
45
apps/sim/lib/copilot/client-sse/types.ts
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import type {
|
||||||
|
ChatContext,
|
||||||
|
CopilotToolCall,
|
||||||
|
SubAgentContentBlock,
|
||||||
|
} from '@/stores/panel/copilot/types'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A content block used in copilot messages and during streaming.
|
||||||
|
* Uses a literal type union for `type` to stay compatible with CopilotMessage.
|
||||||
|
*/
|
||||||
|
export type ContentBlockType = 'text' | 'thinking' | 'tool_call' | 'contexts'
|
||||||
|
|
||||||
|
export interface ClientContentBlock {
|
||||||
|
type: ContentBlockType
|
||||||
|
content?: string
|
||||||
|
timestamp: number
|
||||||
|
toolCall?: CopilotToolCall | null
|
||||||
|
startTime?: number
|
||||||
|
duration?: number
|
||||||
|
contexts?: ChatContext[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StreamingContext {
|
||||||
|
messageId: string
|
||||||
|
accumulatedContent: string
|
||||||
|
contentBlocks: ClientContentBlock[]
|
||||||
|
currentTextBlock: ClientContentBlock | null
|
||||||
|
isInThinkingBlock: boolean
|
||||||
|
currentThinkingBlock: ClientContentBlock | null
|
||||||
|
isInDesignWorkflowBlock: boolean
|
||||||
|
designWorkflowContent: string
|
||||||
|
pendingContent: string
|
||||||
|
newChatId?: string
|
||||||
|
doneEventCount: number
|
||||||
|
streamComplete?: boolean
|
||||||
|
wasAborted?: boolean
|
||||||
|
suppressContinueOption?: boolean
|
||||||
|
subAgentParentToolCallId?: string
|
||||||
|
subAgentContent: Record<string, string>
|
||||||
|
subAgentToolCalls: Record<string, CopilotToolCall[]>
|
||||||
|
subAgentBlocks: Record<string, SubAgentContentBlock[]>
|
||||||
|
suppressStreamingUpdates?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ClientStreamingContext = StreamingContext
|
||||||
@@ -109,14 +109,14 @@ function parseBooleanEnv(value: string | undefined): boolean | null {
|
|||||||
export const DEFAULT_COPILOT_CONFIG: CopilotConfig = {
|
export const DEFAULT_COPILOT_CONFIG: CopilotConfig = {
|
||||||
chat: {
|
chat: {
|
||||||
defaultProvider: 'anthropic',
|
defaultProvider: 'anthropic',
|
||||||
defaultModel: 'claude-3-7-sonnet-latest',
|
defaultModel: 'claude-4.6-opus',
|
||||||
temperature: 0.1,
|
temperature: 0.1,
|
||||||
maxTokens: 8192,
|
maxTokens: 8192,
|
||||||
systemPrompt: AGENT_MODE_SYSTEM_PROMPT,
|
systemPrompt: AGENT_MODE_SYSTEM_PROMPT,
|
||||||
},
|
},
|
||||||
rag: {
|
rag: {
|
||||||
defaultProvider: 'anthropic',
|
defaultProvider: 'anthropic',
|
||||||
defaultModel: 'claude-3-7-sonnet-latest',
|
defaultModel: 'claude-4.6-opus',
|
||||||
temperature: 0.1,
|
temperature: 0.1,
|
||||||
maxTokens: 2000,
|
maxTokens: 2000,
|
||||||
embeddingModel: 'text-embedding-3-small',
|
embeddingModel: 'text-embedding-3-small',
|
||||||
|
|||||||
@@ -1,2 +1,115 @@
|
|||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
|
||||||
export const SIM_AGENT_API_URL_DEFAULT = 'https://copilot.sim.ai'
|
export const SIM_AGENT_API_URL_DEFAULT = 'https://copilot.sim.ai'
|
||||||
export const SIM_AGENT_VERSION = '1.0.3'
|
export const SIM_AGENT_VERSION = '1.0.3'
|
||||||
|
|
||||||
|
/** Resolved copilot backend URL — reads from env with fallback to default. */
|
||||||
|
const rawAgentUrl = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||||
|
export const SIM_AGENT_API_URL =
|
||||||
|
rawAgentUrl.startsWith('http://') || rawAgentUrl.startsWith('https://')
|
||||||
|
? rawAgentUrl
|
||||||
|
: SIM_AGENT_API_URL_DEFAULT
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Redis key prefixes
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Redis key prefix for tool call confirmation payloads (polled by waitForToolDecision). */
|
||||||
|
export const REDIS_TOOL_CALL_PREFIX = 'tool_call:'
|
||||||
|
|
||||||
|
/** Redis key prefix for copilot SSE stream buffers. */
|
||||||
|
export const REDIS_COPILOT_STREAM_PREFIX = 'copilot_stream:'
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Timeouts
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Default timeout for the copilot orchestration stream loop (5 min). */
|
||||||
|
export const ORCHESTRATION_TIMEOUT_MS = 300_000
|
||||||
|
|
||||||
|
/** Timeout for the client-side streaming response handler (10 min). */
|
||||||
|
export const STREAM_TIMEOUT_MS = 600_000
|
||||||
|
|
||||||
|
/** TTL for Redis tool call confirmation entries (24 h). */
|
||||||
|
export const REDIS_TOOL_CALL_TTL_SECONDS = 86_400
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Tool decision polling
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Initial poll interval when waiting for a user tool decision. */
|
||||||
|
export const TOOL_DECISION_INITIAL_POLL_MS = 100
|
||||||
|
|
||||||
|
/** Maximum poll interval when waiting for a user tool decision. */
|
||||||
|
export const TOOL_DECISION_MAX_POLL_MS = 3_000
|
||||||
|
|
||||||
|
/** Backoff multiplier for the tool decision poll interval. */
|
||||||
|
export const TOOL_DECISION_POLL_BACKOFF = 1.5
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Stream resume
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Maximum number of resume attempts before giving up. */
|
||||||
|
export const MAX_RESUME_ATTEMPTS = 3
|
||||||
|
|
||||||
|
/** SessionStorage key for persisting active stream metadata across page reloads. */
|
||||||
|
export const STREAM_STORAGE_KEY = 'copilot_active_stream'
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Client-side streaming batching
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Delay (ms) before processing the next queued message after stream completion. */
|
||||||
|
export const QUEUE_PROCESS_DELAY_MS = 100
|
||||||
|
|
||||||
|
/** Delay (ms) before invalidating subscription queries after stream completion. */
|
||||||
|
export const SUBSCRIPTION_INVALIDATE_DELAY_MS = 1_000
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// UI helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Maximum character length for an optimistic chat title derived from a user message. */
|
||||||
|
export const OPTIMISTIC_TITLE_MAX_LENGTH = 50
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Copilot API paths (client-side fetch targets)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** POST — send a chat message to the copilot. */
|
||||||
|
export const COPILOT_CHAT_API_PATH = '/api/copilot/chat'
|
||||||
|
|
||||||
|
/** GET — resume/replay a copilot SSE stream. */
|
||||||
|
export const COPILOT_CHAT_STREAM_API_PATH = '/api/copilot/chat/stream'
|
||||||
|
|
||||||
|
/** POST — persist chat messages / plan artifact / config. */
|
||||||
|
export const COPILOT_UPDATE_MESSAGES_API_PATH = '/api/copilot/chat/update-messages'
|
||||||
|
|
||||||
|
/** DELETE — delete a copilot chat. */
|
||||||
|
export const COPILOT_DELETE_CHAT_API_PATH = '/api/copilot/chat/delete'
|
||||||
|
|
||||||
|
/** POST — confirm or reject a tool call. */
|
||||||
|
export const COPILOT_CONFIRM_API_PATH = '/api/copilot/confirm'
|
||||||
|
|
||||||
|
/** POST — forward diff-accepted/rejected stats to the copilot backend. */
|
||||||
|
export const COPILOT_STATS_API_PATH = '/api/copilot/stats'
|
||||||
|
|
||||||
|
/** GET — load checkpoints for a chat. */
|
||||||
|
export const COPILOT_CHECKPOINTS_API_PATH = '/api/copilot/checkpoints'
|
||||||
|
|
||||||
|
/** POST — revert to a checkpoint. */
|
||||||
|
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
||||||
|
|
||||||
|
/** GET/POST/DELETE — manage auto-allowed tools. */
|
||||||
|
export const COPILOT_AUTO_ALLOWED_TOOLS_API_PATH = '/api/copilot/auto-allowed-tools'
|
||||||
|
|
||||||
|
/** GET — fetch user credentials for masking. */
|
||||||
|
export const COPILOT_CREDENTIALS_API_PATH = '/api/copilot/credentials'
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Dedup limits
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Maximum entries in the in-memory SSE tool-event dedup cache. */
|
||||||
|
export const STREAM_BUFFER_MAX_DEDUP_ENTRIES = 1_000
|
||||||
|
|||||||
129
apps/sim/lib/copilot/messages/checkpoints.ts
Normal file
129
apps/sim/lib/copilot/messages/checkpoints.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { COPILOT_CHECKPOINTS_API_PATH } from '@/lib/copilot/constants'
|
||||||
|
import type { CopilotMessage, CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||||
|
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||||
|
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotMessageCheckpoints')
|
||||||
|
|
||||||
|
export function buildCheckpointWorkflowState(workflowId: string): WorkflowState | null {
|
||||||
|
const rawState = useWorkflowStore.getState().getWorkflowState()
|
||||||
|
if (!rawState) return null
|
||||||
|
|
||||||
|
const blocksWithSubblockValues = mergeSubblockState(rawState.blocks, workflowId)
|
||||||
|
|
||||||
|
const filteredBlocks = Object.entries(blocksWithSubblockValues).reduce(
|
||||||
|
(acc, [blockId, block]) => {
|
||||||
|
if (block?.type && block?.name) {
|
||||||
|
acc[blockId] = {
|
||||||
|
...block,
|
||||||
|
id: block.id || blockId,
|
||||||
|
enabled: block.enabled !== undefined ? block.enabled : true,
|
||||||
|
horizontalHandles: block.horizontalHandles !== undefined ? block.horizontalHandles : true,
|
||||||
|
height: block.height !== undefined ? block.height : 90,
|
||||||
|
subBlocks: block.subBlocks ?? {},
|
||||||
|
outputs: block.outputs ?? {},
|
||||||
|
data: block.data ?? {},
|
||||||
|
position: block.position || { x: 0, y: 0 },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return acc
|
||||||
|
},
|
||||||
|
{} as WorkflowState['blocks']
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
blocks: filteredBlocks,
|
||||||
|
edges: rawState.edges ?? [],
|
||||||
|
loops: rawState.loops ?? {},
|
||||||
|
parallels: rawState.parallels ?? {},
|
||||||
|
lastSaved: rawState.lastSaved || Date.now(),
|
||||||
|
deploymentStatuses: rawState.deploymentStatuses ?? {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveMessageCheckpoint(
|
||||||
|
messageId: string,
|
||||||
|
get: () => CopilotStore,
|
||||||
|
set: (partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)) => void
|
||||||
|
): Promise<boolean> {
|
||||||
|
const { workflowId, currentChat, messageSnapshots, messageCheckpoints } = get()
|
||||||
|
if (!workflowId || !currentChat?.id) return false
|
||||||
|
|
||||||
|
const snapshot = messageSnapshots[messageId]
|
||||||
|
if (!snapshot) return false
|
||||||
|
|
||||||
|
const nextSnapshots = { ...messageSnapshots }
|
||||||
|
delete nextSnapshots[messageId]
|
||||||
|
set({ messageSnapshots: nextSnapshots })
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(COPILOT_CHECKPOINTS_API_PATH, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({
|
||||||
|
workflowId,
|
||||||
|
chatId: currentChat.id,
|
||||||
|
messageId,
|
||||||
|
workflowState: JSON.stringify(snapshot),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to create checkpoint: ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await response.json()
|
||||||
|
const newCheckpoint = result.checkpoint
|
||||||
|
if (newCheckpoint) {
|
||||||
|
const existingCheckpoints = messageCheckpoints[messageId] ?? []
|
||||||
|
const updatedCheckpoints = {
|
||||||
|
...messageCheckpoints,
|
||||||
|
[messageId]: [newCheckpoint, ...existingCheckpoints],
|
||||||
|
}
|
||||||
|
set({ messageCheckpoints: updatedCheckpoints })
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to create checkpoint from snapshot:', error)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function extractToolCallsRecursively(
|
||||||
|
toolCall: CopilotToolCall,
|
||||||
|
map: Record<string, CopilotToolCall>
|
||||||
|
): void {
|
||||||
|
if (!toolCall?.id) return
|
||||||
|
map[toolCall.id] = toolCall
|
||||||
|
|
||||||
|
if (Array.isArray(toolCall.subAgentBlocks)) {
|
||||||
|
for (const block of toolCall.subAgentBlocks) {
|
||||||
|
if (block?.type === 'subagent_tool_call' && block.toolCall?.id) {
|
||||||
|
extractToolCallsRecursively(block.toolCall, map)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(toolCall.subAgentToolCalls)) {
|
||||||
|
for (const subTc of toolCall.subAgentToolCalls) {
|
||||||
|
extractToolCallsRecursively(subTc, map)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildToolCallsById(messages: CopilotMessage[]): Record<string, CopilotToolCall> {
|
||||||
|
const toolCallsById: Record<string, CopilotToolCall> = {}
|
||||||
|
for (const msg of messages) {
|
||||||
|
if (msg.contentBlocks) {
|
||||||
|
for (const block of msg.contentBlocks) {
|
||||||
|
if (block?.type === 'tool_call' && block.toolCall?.id) {
|
||||||
|
extractToolCallsRecursively(block.toolCall, toolCallsById)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return toolCallsById
|
||||||
|
}
|
||||||
28
apps/sim/lib/copilot/messages/credential-masking.ts
Normal file
28
apps/sim/lib/copilot/messages/credential-masking.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
export function maskCredentialIdsInValue<T>(value: T, credentialIds: Set<string>): T {
|
||||||
|
if (!value || credentialIds.size === 0) return value
|
||||||
|
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
let masked = value as string
|
||||||
|
const sortedIds = Array.from(credentialIds).sort((a, b) => b.length - a.length)
|
||||||
|
for (const id of sortedIds) {
|
||||||
|
if (id && masked.includes(id)) {
|
||||||
|
masked = masked.split(id).join('••••••••')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return masked as unknown as T
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return value.map((item) => maskCredentialIdsInValue(item, credentialIds)) as T
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof value === 'object') {
|
||||||
|
const masked: Record<string, unknown> = {}
|
||||||
|
for (const key of Object.keys(value as Record<string, unknown>)) {
|
||||||
|
masked[key] = maskCredentialIdsInValue((value as Record<string, unknown>)[key], credentialIds)
|
||||||
|
}
|
||||||
|
return masked as T
|
||||||
|
}
|
||||||
|
|
||||||
|
return value
|
||||||
|
}
|
||||||
4
apps/sim/lib/copilot/messages/index.ts
Normal file
4
apps/sim/lib/copilot/messages/index.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
export * from './checkpoints'
|
||||||
|
export * from './credential-masking'
|
||||||
|
export * from './persist'
|
||||||
|
export * from './serialization'
|
||||||
43
apps/sim/lib/copilot/messages/persist.ts
Normal file
43
apps/sim/lib/copilot/messages/persist.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { COPILOT_UPDATE_MESSAGES_API_PATH } from '@/lib/copilot/constants'
|
||||||
|
import type { CopilotMessage } from '@/stores/panel/copilot/types'
|
||||||
|
import { serializeMessagesForDB } from './serialization'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotMessagePersistence')
|
||||||
|
|
||||||
|
export async function persistMessages(params: {
|
||||||
|
chatId: string
|
||||||
|
messages: CopilotMessage[]
|
||||||
|
sensitiveCredentialIds?: Set<string>
|
||||||
|
planArtifact?: string | null
|
||||||
|
mode?: string
|
||||||
|
model?: string
|
||||||
|
conversationId?: string
|
||||||
|
}): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const dbMessages = serializeMessagesForDB(
|
||||||
|
params.messages,
|
||||||
|
params.sensitiveCredentialIds ?? new Set<string>()
|
||||||
|
)
|
||||||
|
const response = await fetch(COPILOT_UPDATE_MESSAGES_API_PATH, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({
|
||||||
|
chatId: params.chatId,
|
||||||
|
messages: dbMessages,
|
||||||
|
...(params.planArtifact !== undefined ? { planArtifact: params.planArtifact } : {}),
|
||||||
|
...(params.mode || params.model
|
||||||
|
? { config: { mode: params.mode, model: params.model } }
|
||||||
|
: {}),
|
||||||
|
...(params.conversationId ? { conversationId: params.conversationId } : {}),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
return response.ok
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to persist messages', {
|
||||||
|
chatId: params.chatId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
172
apps/sim/lib/copilot/messages/serialization.ts
Normal file
172
apps/sim/lib/copilot/messages/serialization.ts
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import type { CopilotMessage, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import { maskCredentialIdsInValue } from './credential-masking'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotMessageSerialization')
|
||||||
|
|
||||||
|
export function clearStreamingFlags(toolCall: CopilotToolCall): void {
|
||||||
|
if (!toolCall) return
|
||||||
|
|
||||||
|
toolCall.subAgentStreaming = false
|
||||||
|
|
||||||
|
if (Array.isArray(toolCall.subAgentBlocks)) {
|
||||||
|
for (const block of toolCall.subAgentBlocks) {
|
||||||
|
if (block?.type === 'subagent_tool_call' && block.toolCall) {
|
||||||
|
clearStreamingFlags(block.toolCall)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (Array.isArray(toolCall.subAgentToolCalls)) {
|
||||||
|
for (const subTc of toolCall.subAgentToolCalls) {
|
||||||
|
clearStreamingFlags(subTc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
|
||||||
|
try {
|
||||||
|
for (const message of messages) {
|
||||||
|
if (message.role === 'assistant') {
|
||||||
|
logger.debug('[normalizeMessagesForUI] Loading assistant message', {
|
||||||
|
id: message.id,
|
||||||
|
hasContent: !!message.content?.trim(),
|
||||||
|
contentBlockCount: message.contentBlocks?.length || 0,
|
||||||
|
contentBlockTypes: message.contentBlocks?.map((b) => b?.type) ?? [],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const message of messages) {
|
||||||
|
if (message.contentBlocks) {
|
||||||
|
for (const block of message.contentBlocks) {
|
||||||
|
if (block?.type === 'tool_call' && block.toolCall) {
|
||||||
|
clearStreamingFlags(block.toolCall)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (message.toolCalls) {
|
||||||
|
for (const toolCall of message.toolCalls) {
|
||||||
|
clearStreamingFlags(toolCall)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return messages
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('[normalizeMessagesForUI] Failed to normalize messages', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return messages
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deepClone<T>(obj: T): T {
|
||||||
|
try {
|
||||||
|
const json = JSON.stringify(obj)
|
||||||
|
if (!json || json === 'undefined') {
|
||||||
|
logger.warn('[deepClone] JSON.stringify returned empty for object', {
|
||||||
|
type: typeof obj,
|
||||||
|
isArray: Array.isArray(obj),
|
||||||
|
length: Array.isArray(obj) ? obj.length : undefined,
|
||||||
|
})
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
const parsed = JSON.parse(json)
|
||||||
|
if (Array.isArray(obj) && (!Array.isArray(parsed) || parsed.length !== obj.length)) {
|
||||||
|
logger.warn('[deepClone] Array clone mismatch', {
|
||||||
|
originalLength: obj.length,
|
||||||
|
clonedLength: Array.isArray(parsed) ? parsed.length : 'not array',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return parsed
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[deepClone] Failed to clone object', {
|
||||||
|
error: String(err),
|
||||||
|
type: typeof obj,
|
||||||
|
isArray: Array.isArray(obj),
|
||||||
|
})
|
||||||
|
return obj
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function serializeMessagesForDB(
|
||||||
|
messages: CopilotMessage[],
|
||||||
|
credentialIds: Set<string>
|
||||||
|
): CopilotMessage[] {
|
||||||
|
const result = messages
|
||||||
|
.map((msg) => {
|
||||||
|
let timestamp: string = msg.timestamp
|
||||||
|
if (typeof timestamp !== 'string') {
|
||||||
|
const ts = timestamp as unknown
|
||||||
|
timestamp = ts instanceof Date ? ts.toISOString() : new Date().toISOString()
|
||||||
|
}
|
||||||
|
|
||||||
|
const serialized: CopilotMessage = {
|
||||||
|
id: msg.id,
|
||||||
|
role: msg.role,
|
||||||
|
content: msg.content || '',
|
||||||
|
timestamp,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(msg.contentBlocks) && msg.contentBlocks.length > 0) {
|
||||||
|
serialized.contentBlocks = deepClone(msg.contentBlocks)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(msg.toolCalls) && msg.toolCalls.length > 0) {
|
||||||
|
serialized.toolCalls = deepClone(msg.toolCalls)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(msg.fileAttachments) && msg.fileAttachments.length > 0) {
|
||||||
|
serialized.fileAttachments = deepClone(msg.fileAttachments)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(msg.contexts) && msg.contexts.length > 0) {
|
||||||
|
serialized.contexts = deepClone(msg.contexts)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(msg.citations) && msg.citations.length > 0) {
|
||||||
|
serialized.citations = deepClone(msg.citations)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (msg.errorType) {
|
||||||
|
serialized.errorType = msg.errorType
|
||||||
|
}
|
||||||
|
|
||||||
|
return maskCredentialIdsInValue(serialized, credentialIds)
|
||||||
|
})
|
||||||
|
.filter((msg) => {
|
||||||
|
if (msg.role === 'assistant') {
|
||||||
|
const hasContent = typeof msg.content === 'string' && msg.content.trim().length > 0
|
||||||
|
const hasTools = Array.isArray(msg.toolCalls) && msg.toolCalls.length > 0
|
||||||
|
const hasBlocks = Array.isArray(msg.contentBlocks) && msg.contentBlocks.length > 0
|
||||||
|
return hasContent || hasTools || hasBlocks
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const msg of messages) {
|
||||||
|
if (msg.role === 'assistant') {
|
||||||
|
logger.debug('[serializeMessagesForDB] Input assistant message', {
|
||||||
|
id: msg.id,
|
||||||
|
hasContent: !!msg.content?.trim(),
|
||||||
|
contentBlockCount: msg.contentBlocks?.length || 0,
|
||||||
|
contentBlockTypes: msg.contentBlocks?.map((b) => b?.type) ?? [],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('[serializeMessagesForDB] Serialized messages', {
|
||||||
|
inputCount: messages.length,
|
||||||
|
outputCount: result.length,
|
||||||
|
sample:
|
||||||
|
result.length > 0
|
||||||
|
? {
|
||||||
|
role: result[result.length - 1].role,
|
||||||
|
hasContent: !!result[result.length - 1].content,
|
||||||
|
contentBlockCount: result[result.length - 1].contentBlocks?.length || 0,
|
||||||
|
toolCallCount: result[result.length - 1].toolCalls?.length || 0,
|
||||||
|
}
|
||||||
|
: null,
|
||||||
|
})
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
@@ -18,6 +18,7 @@ export const COPILOT_MODEL_IDS = [
|
|||||||
'claude-4-sonnet',
|
'claude-4-sonnet',
|
||||||
'claude-4.5-haiku',
|
'claude-4.5-haiku',
|
||||||
'claude-4.5-sonnet',
|
'claude-4.5-sonnet',
|
||||||
|
'claude-4.6-opus',
|
||||||
'claude-4.5-opus',
|
'claude-4.5-opus',
|
||||||
'claude-4.1-opus',
|
'claude-4.1-opus',
|
||||||
'gemini-3-pro',
|
'gemini-3-pro',
|
||||||
|
|||||||
67
apps/sim/lib/copilot/orchestrator/config.ts
Normal file
67
apps/sim/lib/copilot/orchestrator/config.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
export const INTERRUPT_TOOL_NAMES = [
|
||||||
|
'set_global_workflow_variables',
|
||||||
|
'run_workflow',
|
||||||
|
'run_workflow_until_block',
|
||||||
|
'run_from_block',
|
||||||
|
'run_block',
|
||||||
|
'manage_mcp_tool',
|
||||||
|
'manage_custom_tool',
|
||||||
|
'deploy_mcp',
|
||||||
|
'deploy_chat',
|
||||||
|
'deploy_api',
|
||||||
|
'create_workspace_mcp_server',
|
||||||
|
'set_environment_variables',
|
||||||
|
'make_api_request',
|
||||||
|
'oauth_request_access',
|
||||||
|
'navigate_ui',
|
||||||
|
'knowledge_base',
|
||||||
|
'generate_api_key',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const INTERRUPT_TOOL_SET = new Set<string>(INTERRUPT_TOOL_NAMES)
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_NAMES = [
|
||||||
|
'debug',
|
||||||
|
'edit',
|
||||||
|
'build',
|
||||||
|
'plan',
|
||||||
|
'test',
|
||||||
|
'deploy',
|
||||||
|
'auth',
|
||||||
|
'research',
|
||||||
|
'knowledge',
|
||||||
|
'custom_tool',
|
||||||
|
'tour',
|
||||||
|
'info',
|
||||||
|
'workflow',
|
||||||
|
'evaluate',
|
||||||
|
'superagent',
|
||||||
|
'discovery',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_SET = new Set<string>(SUBAGENT_TOOL_NAMES)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Respond tools are internal to the copilot's subagent system.
|
||||||
|
* They're used by subagents to signal completion and should NOT be executed by the sim side.
|
||||||
|
* The copilot backend handles these internally.
|
||||||
|
*/
|
||||||
|
export const RESPOND_TOOL_NAMES = [
|
||||||
|
'plan_respond',
|
||||||
|
'edit_respond',
|
||||||
|
'build_respond',
|
||||||
|
'debug_respond',
|
||||||
|
'info_respond',
|
||||||
|
'research_respond',
|
||||||
|
'deploy_respond',
|
||||||
|
'superagent_respond',
|
||||||
|
'discovery_respond',
|
||||||
|
'tour_respond',
|
||||||
|
'auth_respond',
|
||||||
|
'workflow_respond',
|
||||||
|
'knowledge_respond',
|
||||||
|
'custom_tool_respond',
|
||||||
|
'test_respond',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const RESPOND_TOOL_SET = new Set<string>(RESPOND_TOOL_NAMES)
|
||||||
70
apps/sim/lib/copilot/orchestrator/index.ts
Normal file
70
apps/sim/lib/copilot/orchestrator/index.ts
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
|
import { prepareExecutionContext } from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import type { OrchestratorOptions, OrchestratorResult } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { buildToolCallSummaries, createStreamingContext, runStreamLoop } from './stream-core'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotOrchestrator')
|
||||||
|
|
||||||
|
export interface OrchestrateStreamOptions extends OrchestratorOptions {
|
||||||
|
userId: string
|
||||||
|
workflowId: string
|
||||||
|
chatId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function orchestrateCopilotStream(
|
||||||
|
requestPayload: Record<string, unknown>,
|
||||||
|
options: OrchestrateStreamOptions
|
||||||
|
): Promise<OrchestratorResult> {
|
||||||
|
const { userId, workflowId, chatId } = options
|
||||||
|
const execContext = await prepareExecutionContext(userId, workflowId)
|
||||||
|
|
||||||
|
const payloadMsgId = requestPayload?.messageId
|
||||||
|
const context = createStreamingContext({
|
||||||
|
chatId,
|
||||||
|
messageId: typeof payloadMsgId === 'string' ? payloadMsgId : crypto.randomUUID(),
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
await runStreamLoop(
|
||||||
|
`${SIM_AGENT_API_URL}/api/chat-completion-streaming`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify(requestPayload),
|
||||||
|
},
|
||||||
|
context,
|
||||||
|
execContext,
|
||||||
|
options
|
||||||
|
)
|
||||||
|
|
||||||
|
const result: OrchestratorResult = {
|
||||||
|
success: context.errors.length === 0,
|
||||||
|
content: context.accumulatedContent,
|
||||||
|
contentBlocks: context.contentBlocks,
|
||||||
|
toolCalls: buildToolCallSummaries(context),
|
||||||
|
chatId: context.chatId,
|
||||||
|
conversationId: context.conversationId,
|
||||||
|
errors: context.errors.length ? context.errors : undefined,
|
||||||
|
}
|
||||||
|
await options.onComplete?.(result)
|
||||||
|
return result
|
||||||
|
} catch (error) {
|
||||||
|
const err = error instanceof Error ? error : new Error('Copilot orchestration failed')
|
||||||
|
logger.error('Copilot orchestration failed', { error: err.message })
|
||||||
|
await options.onError?.(err)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
content: '',
|
||||||
|
contentBlocks: [],
|
||||||
|
toolCalls: [],
|
||||||
|
chatId: context.chatId,
|
||||||
|
conversationId: context.conversationId,
|
||||||
|
error: err.message,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
29
apps/sim/lib/copilot/orchestrator/persistence.ts
Normal file
29
apps/sim/lib/copilot/orchestrator/persistence.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { REDIS_TOOL_CALL_PREFIX } from '@/lib/copilot/constants'
|
||||||
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotOrchestratorPersistence')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a tool call confirmation status from Redis.
|
||||||
|
*/
|
||||||
|
export async function getToolConfirmation(toolCallId: string): Promise<{
|
||||||
|
status: string
|
||||||
|
message?: string
|
||||||
|
timestamp?: string
|
||||||
|
} | null> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return null
|
||||||
|
|
||||||
|
try {
|
||||||
|
const data = await redis.get(`${REDIS_TOOL_CALL_PREFIX}${toolCallId}`)
|
||||||
|
if (!data) return null
|
||||||
|
return JSON.parse(data) as { status: string; message?: string; timestamp?: string }
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to read tool confirmation', {
|
||||||
|
toolCallId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
95
apps/sim/lib/copilot/orchestrator/sse-handlers.test.ts
Normal file
95
apps/sim/lib/copilot/orchestrator/sse-handlers.test.ts
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
/**
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { loggerMock } from '@sim/testing'
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
|
vi.mock('@sim/logger', () => loggerMock)
|
||||||
|
|
||||||
|
const executeToolServerSide = vi.fn()
|
||||||
|
const markToolComplete = vi.fn()
|
||||||
|
|
||||||
|
vi.mock('@/lib/copilot/orchestrator/tool-executor', () => ({
|
||||||
|
executeToolServerSide,
|
||||||
|
markToolComplete,
|
||||||
|
}))
|
||||||
|
|
||||||
|
import { sseHandlers } from '@/lib/copilot/orchestrator/sse-handlers'
|
||||||
|
import type { ExecutionContext, StreamingContext } from '@/lib/copilot/orchestrator/types'
|
||||||
|
|
||||||
|
describe('sse-handlers tool lifecycle', () => {
|
||||||
|
let context: StreamingContext
|
||||||
|
let execContext: ExecutionContext
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks()
|
||||||
|
context = {
|
||||||
|
chatId: undefined,
|
||||||
|
conversationId: undefined,
|
||||||
|
messageId: 'msg-1',
|
||||||
|
accumulatedContent: '',
|
||||||
|
contentBlocks: [],
|
||||||
|
toolCalls: new Map(),
|
||||||
|
currentThinkingBlock: null,
|
||||||
|
isInThinkingBlock: false,
|
||||||
|
subAgentParentToolCallId: undefined,
|
||||||
|
subAgentContent: {},
|
||||||
|
subAgentToolCalls: {},
|
||||||
|
pendingContent: '',
|
||||||
|
streamComplete: false,
|
||||||
|
wasAborted: false,
|
||||||
|
errors: [],
|
||||||
|
}
|
||||||
|
execContext = {
|
||||||
|
userId: 'user-1',
|
||||||
|
workflowId: 'workflow-1',
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
it('executes tool_call and emits tool_result + mark-complete', async () => {
|
||||||
|
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
||||||
|
markToolComplete.mockResolvedValueOnce(true)
|
||||||
|
const onEvent = vi.fn()
|
||||||
|
|
||||||
|
await sseHandlers.tool_call(
|
||||||
|
{
|
||||||
|
type: 'tool_call',
|
||||||
|
data: { id: 'tool-1', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||||
|
} as any,
|
||||||
|
context,
|
||||||
|
execContext,
|
||||||
|
{ onEvent, interactive: false, timeout: 1000 }
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(executeToolServerSide).toHaveBeenCalledTimes(1)
|
||||||
|
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
||||||
|
expect(onEvent).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
type: 'tool_result',
|
||||||
|
toolCallId: 'tool-1',
|
||||||
|
success: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const updated = context.toolCalls.get('tool-1')
|
||||||
|
expect(updated?.status).toBe('success')
|
||||||
|
expect(updated?.result?.output).toEqual({ ok: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
it('skips duplicate tool_call after result', async () => {
|
||||||
|
executeToolServerSide.mockResolvedValueOnce({ success: true, output: { ok: true } })
|
||||||
|
markToolComplete.mockResolvedValueOnce(true)
|
||||||
|
|
||||||
|
const event = {
|
||||||
|
type: 'tool_call',
|
||||||
|
data: { id: 'tool-dup', name: 'get_user_workflow', arguments: { workflowId: 'workflow-1' } },
|
||||||
|
}
|
||||||
|
|
||||||
|
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
||||||
|
await sseHandlers.tool_call(event as any, context, execContext, { interactive: false })
|
||||||
|
|
||||||
|
expect(executeToolServerSide).toHaveBeenCalledTimes(1)
|
||||||
|
expect(markToolComplete).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
431
apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts
Normal file
431
apps/sim/lib/copilot/orchestrator/sse-handlers/handlers.ts
Normal file
@@ -0,0 +1,431 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
||||||
|
import { RESPOND_TOOL_SET, SUBAGENT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||||
|
import {
|
||||||
|
asRecord,
|
||||||
|
getEventData,
|
||||||
|
markToolResultSeen,
|
||||||
|
wasToolResultSeen,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import {
|
||||||
|
isToolAvailableOnSimSide,
|
||||||
|
markToolComplete,
|
||||||
|
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import type {
|
||||||
|
ContentBlock,
|
||||||
|
ExecutionContext,
|
||||||
|
OrchestratorOptions,
|
||||||
|
SSEEvent,
|
||||||
|
StreamingContext,
|
||||||
|
ToolCallState,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { executeToolAndReport, isInterruptToolName, waitForToolDecision } from './tool-execution'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotSseHandlers')
|
||||||
|
|
||||||
|
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
||||||
|
|
||||||
|
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
||||||
|
success: boolean
|
||||||
|
hasResultData: boolean
|
||||||
|
hasError: boolean
|
||||||
|
} {
|
||||||
|
const resultObj = asRecord(data?.result)
|
||||||
|
const hasExplicitSuccess = data?.success !== undefined || resultObj.success !== undefined
|
||||||
|
const explicitSuccess = data?.success ?? resultObj.success
|
||||||
|
const hasResultData = data?.result !== undefined || data?.data !== undefined
|
||||||
|
const hasError = !!data?.error || !!resultObj.error
|
||||||
|
const success = hasExplicitSuccess ? !!explicitSuccess : hasResultData && !hasError
|
||||||
|
return { success, hasResultData, hasError }
|
||||||
|
}
|
||||||
|
|
||||||
|
export type SSEHandler = (
|
||||||
|
event: SSEEvent,
|
||||||
|
context: StreamingContext,
|
||||||
|
execContext: ExecutionContext,
|
||||||
|
options: OrchestratorOptions
|
||||||
|
) => void | Promise<void>
|
||||||
|
|
||||||
|
function addContentBlock(context: StreamingContext, block: Omit<ContentBlock, 'timestamp'>): void {
|
||||||
|
context.contentBlocks.push({
|
||||||
|
...block,
|
||||||
|
timestamp: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export const sseHandlers: Record<string, SSEHandler> = {
|
||||||
|
chat_id: (event, context) => {
|
||||||
|
context.chatId = asRecord(event.data).chatId as string | undefined
|
||||||
|
},
|
||||||
|
title_updated: () => {},
|
||||||
|
tool_result: (event, context) => {
|
||||||
|
const data = getEventData(event)
|
||||||
|
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
||||||
|
if (!toolCallId) return
|
||||||
|
const current = context.toolCalls.get(toolCallId)
|
||||||
|
if (!current) return
|
||||||
|
|
||||||
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
|
current.status = success ? 'success' : 'error'
|
||||||
|
current.endTime = Date.now()
|
||||||
|
if (hasResultData) {
|
||||||
|
current.result = {
|
||||||
|
success,
|
||||||
|
output: data?.result || data?.data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (hasError) {
|
||||||
|
const resultObj = asRecord(data?.result)
|
||||||
|
current.error = (data?.error || resultObj.error) as string | undefined
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_error: (event, context) => {
|
||||||
|
const data = getEventData(event)
|
||||||
|
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
||||||
|
if (!toolCallId) return
|
||||||
|
const current = context.toolCalls.get(toolCallId)
|
||||||
|
if (!current) return
|
||||||
|
current.status = 'error'
|
||||||
|
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
||||||
|
current.endTime = Date.now()
|
||||||
|
},
|
||||||
|
tool_generating: (event, context) => {
|
||||||
|
const data = getEventData(event)
|
||||||
|
const toolCallId =
|
||||||
|
event.toolCallId ||
|
||||||
|
(data?.toolCallId as string | undefined) ||
|
||||||
|
(data?.id as string | undefined)
|
||||||
|
const toolName =
|
||||||
|
event.toolName || (data?.toolName as string | undefined) || (data?.name as string | undefined)
|
||||||
|
if (!toolCallId || !toolName) return
|
||||||
|
if (!context.toolCalls.has(toolCallId)) {
|
||||||
|
context.toolCalls.set(toolCallId, {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
status: 'pending',
|
||||||
|
startTime: Date.now(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_call: async (event, context, execContext, options) => {
|
||||||
|
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
||||||
|
const toolCallId = (toolData.id as string | undefined) || event.toolCallId
|
||||||
|
const toolName = (toolData.name as string | undefined) || event.toolName
|
||||||
|
if (!toolCallId || !toolName) return
|
||||||
|
|
||||||
|
const args = (toolData.arguments || toolData.input || asRecord(event.data).input) as
|
||||||
|
| Record<string, unknown>
|
||||||
|
| undefined
|
||||||
|
const isPartial = toolData.partial === true
|
||||||
|
const existing = context.toolCalls.get(toolCallId)
|
||||||
|
|
||||||
|
// If we've already completed this tool call, ignore late/duplicate tool_call events
|
||||||
|
// to avoid resetting UI/state back to pending and re-executing.
|
||||||
|
if (
|
||||||
|
existing?.endTime ||
|
||||||
|
(existing && existing.status !== 'pending' && existing.status !== 'executing')
|
||||||
|
) {
|
||||||
|
if (!existing.params && args) {
|
||||||
|
existing.params = args
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
if (args && !existing.params) existing.params = args
|
||||||
|
} else {
|
||||||
|
context.toolCalls.set(toolCallId, {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
status: 'pending',
|
||||||
|
params: args,
|
||||||
|
startTime: Date.now(),
|
||||||
|
})
|
||||||
|
const created = context.toolCalls.get(toolCallId)!
|
||||||
|
addContentBlock(context, { type: 'tool_call', toolCall: created })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isPartial) return
|
||||||
|
if (wasToolResultSeen(toolCallId)) return
|
||||||
|
|
||||||
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
|
if (!toolCall) return
|
||||||
|
|
||||||
|
// Subagent tools are executed by the copilot backend, not sim side.
|
||||||
|
if (SUBAGENT_TOOL_SET.has(toolName)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||||
|
// The copilot backend handles these internally to signal subagent completion.
|
||||||
|
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||||
|
toolCall.status = 'success'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
toolCall.result = {
|
||||||
|
success: true,
|
||||||
|
output: 'Internal respond tool - handled by copilot backend',
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const isInterruptTool = isInterruptToolName(toolName)
|
||||||
|
const isInteractive = options.interactive === true
|
||||||
|
|
||||||
|
if (isInterruptTool && isInteractive) {
|
||||||
|
const decision = await waitForToolDecision(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
await markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
400,
|
||||||
|
decision.message || 'Tool execution rejected',
|
||||||
|
{ skipped: true, reason: 'user_rejected' }
|
||||||
|
)
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
await options.onEvent?.({
|
||||||
|
type: 'tool_result',
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
data: {
|
||||||
|
id: toolCall.id,
|
||||||
|
name: toolCall.name,
|
||||||
|
success: false,
|
||||||
|
result: { skipped: true, reason: 'user_rejected' },
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decision?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
await markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
decision.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
)
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
await options.onEvent?.({
|
||||||
|
type: 'tool_result',
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
data: {
|
||||||
|
id: toolCall.id,
|
||||||
|
name: toolCall.name,
|
||||||
|
success: true,
|
||||||
|
result: { background: true },
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.autoExecuteTools !== false) {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
reasoning: (event, context) => {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
const phase = d.phase || asRecord(d.data).phase
|
||||||
|
if (phase === 'start') {
|
||||||
|
context.isInThinkingBlock = true
|
||||||
|
context.currentThinkingBlock = {
|
||||||
|
type: 'thinking',
|
||||||
|
content: '',
|
||||||
|
timestamp: Date.now(),
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (phase === 'end') {
|
||||||
|
if (context.currentThinkingBlock) {
|
||||||
|
context.contentBlocks.push(context.currentThinkingBlock)
|
||||||
|
}
|
||||||
|
context.isInThinkingBlock = false
|
||||||
|
context.currentThinkingBlock = null
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const chunk = (d.data || d.content || event.content) as string | undefined
|
||||||
|
if (!chunk || !context.currentThinkingBlock) return
|
||||||
|
context.currentThinkingBlock.content = `${context.currentThinkingBlock.content || ''}${chunk}`
|
||||||
|
},
|
||||||
|
content: (event, context) => {
|
||||||
|
// Go backend sends content as a plain string in event.data, not wrapped in an object.
|
||||||
|
let chunk: string | undefined
|
||||||
|
if (typeof event.data === 'string') {
|
||||||
|
chunk = event.data
|
||||||
|
} else {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
chunk = (d.content || d.data || event.content) as string | undefined
|
||||||
|
}
|
||||||
|
if (!chunk) return
|
||||||
|
context.accumulatedContent += chunk
|
||||||
|
addContentBlock(context, { type: 'text', content: chunk })
|
||||||
|
},
|
||||||
|
done: (event, context) => {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
if (d.responseId) {
|
||||||
|
context.conversationId = d.responseId as string
|
||||||
|
}
|
||||||
|
context.streamComplete = true
|
||||||
|
},
|
||||||
|
start: (event, context) => {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
if (d.responseId) {
|
||||||
|
context.conversationId = d.responseId as string
|
||||||
|
}
|
||||||
|
},
|
||||||
|
error: (event, context) => {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
const message = (d.message || d.error || event.error) as string | undefined
|
||||||
|
if (message) {
|
||||||
|
context.errors.push(message)
|
||||||
|
}
|
||||||
|
context.streamComplete = true
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export const subAgentHandlers: Record<string, SSEHandler> = {
|
||||||
|
content: (event, context) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId || !event.data) return
|
||||||
|
// Go backend sends content as a plain string in event.data
|
||||||
|
let chunk: string | undefined
|
||||||
|
if (typeof event.data === 'string') {
|
||||||
|
chunk = event.data
|
||||||
|
} else {
|
||||||
|
const d = asRecord(event.data)
|
||||||
|
chunk = (d.content || d.data || event.content) as string | undefined
|
||||||
|
}
|
||||||
|
if (!chunk) return
|
||||||
|
context.subAgentContent[parentToolCallId] =
|
||||||
|
(context.subAgentContent[parentToolCallId] || '') + chunk
|
||||||
|
addContentBlock(context, { type: 'subagent_text', content: chunk })
|
||||||
|
},
|
||||||
|
tool_call: async (event, context, execContext, options) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
const toolData = getEventData(event) || ({} as Record<string, unknown>)
|
||||||
|
const toolCallId = (toolData.id as string | undefined) || event.toolCallId
|
||||||
|
const toolName = (toolData.name as string | undefined) || event.toolName
|
||||||
|
if (!toolCallId || !toolName) return
|
||||||
|
const isPartial = toolData.partial === true
|
||||||
|
const args = (toolData.arguments || toolData.input || asRecord(event.data).input) as
|
||||||
|
| Record<string, unknown>
|
||||||
|
| undefined
|
||||||
|
|
||||||
|
const existing = context.toolCalls.get(toolCallId)
|
||||||
|
// Ignore late/duplicate tool_call events once we already have a result.
|
||||||
|
if (wasToolResultSeen(toolCallId) || existing?.endTime) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolCall: ToolCallState = {
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
status: 'pending',
|
||||||
|
params: args,
|
||||||
|
startTime: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store in both places - but do NOT overwrite existing tool call state for the same id.
|
||||||
|
if (!context.subAgentToolCalls[parentToolCallId]) {
|
||||||
|
context.subAgentToolCalls[parentToolCallId] = []
|
||||||
|
}
|
||||||
|
if (!context.subAgentToolCalls[parentToolCallId].some((tc) => tc.id === toolCallId)) {
|
||||||
|
context.subAgentToolCalls[parentToolCallId].push(toolCall)
|
||||||
|
}
|
||||||
|
if (!context.toolCalls.has(toolCallId)) {
|
||||||
|
context.toolCalls.set(toolCallId, toolCall)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isPartial) return
|
||||||
|
|
||||||
|
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||||
|
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||||
|
toolCall.status = 'success'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
toolCall.result = {
|
||||||
|
success: true,
|
||||||
|
output: 'Internal respond tool - handled by copilot backend',
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tools that only exist on the Go backend (e.g. search_patterns,
|
||||||
|
// search_errors, remember_debug) should NOT be re-executed on the Sim side.
|
||||||
|
// The Go backend already executed them and will send its own tool_result
|
||||||
|
// SSE event with the real outcome. Trying to execute them here would fail
|
||||||
|
// with "Tool not found" and incorrectly mark the tool as failed.
|
||||||
|
if (!isToolAvailableOnSimSide(toolName)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.autoExecuteTools !== false) {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tool_result: (event, context) => {
|
||||||
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
|
if (!parentToolCallId) return
|
||||||
|
const data = getEventData(event)
|
||||||
|
const toolCallId = event.toolCallId || (data?.id as string | undefined)
|
||||||
|
if (!toolCallId) return
|
||||||
|
|
||||||
|
// Update in subAgentToolCalls.
|
||||||
|
const toolCalls = context.subAgentToolCalls[parentToolCallId] || []
|
||||||
|
const subAgentToolCall = toolCalls.find((tc) => tc.id === toolCallId)
|
||||||
|
|
||||||
|
// Also update in main toolCalls (where we added it for execution).
|
||||||
|
const mainToolCall = context.toolCalls.get(toolCallId)
|
||||||
|
|
||||||
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
|
const status = success ? 'success' : 'error'
|
||||||
|
const endTime = Date.now()
|
||||||
|
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
||||||
|
|
||||||
|
if (subAgentToolCall) {
|
||||||
|
subAgentToolCall.status = status
|
||||||
|
subAgentToolCall.endTime = endTime
|
||||||
|
if (result) subAgentToolCall.result = result
|
||||||
|
if (hasError) {
|
||||||
|
const resultObj = asRecord(data?.result)
|
||||||
|
subAgentToolCall.error = (data?.error || resultObj.error) as string | undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mainToolCall) {
|
||||||
|
mainToolCall.status = status
|
||||||
|
mainToolCall.endTime = endTime
|
||||||
|
if (result) mainToolCall.result = result
|
||||||
|
if (hasError) {
|
||||||
|
const resultObj = asRecord(data?.result)
|
||||||
|
mainToolCall.error = (data?.error || resultObj.error) as string | undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
export function handleSubagentRouting(event: SSEEvent, context: StreamingContext): boolean {
|
||||||
|
if (!event.subagent) return false
|
||||||
|
if (!context.subAgentParentToolCallId) {
|
||||||
|
logger.warn('Subagent event missing parent tool call', {
|
||||||
|
type: event.type,
|
||||||
|
subagent: event.subagent,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
2
apps/sim/lib/copilot/orchestrator/sse-handlers/index.ts
Normal file
2
apps/sim/lib/copilot/orchestrator/sse-handlers/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export type { SSEHandler } from './handlers'
|
||||||
|
export { handleSubagentRouting, sseHandlers, subAgentHandlers } from './handlers'
|
||||||
146
apps/sim/lib/copilot/orchestrator/sse-handlers/tool-execution.ts
Normal file
146
apps/sim/lib/copilot/orchestrator/sse-handlers/tool-execution.ts
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import {
|
||||||
|
TOOL_DECISION_INITIAL_POLL_MS,
|
||||||
|
TOOL_DECISION_MAX_POLL_MS,
|
||||||
|
TOOL_DECISION_POLL_BACKOFF,
|
||||||
|
} from '@/lib/copilot/constants'
|
||||||
|
import { INTERRUPT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||||
|
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
||||||
|
import {
|
||||||
|
asRecord,
|
||||||
|
markToolResultSeen,
|
||||||
|
wasToolResultSeen,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import { executeToolServerSide, markToolComplete } from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import type {
|
||||||
|
ExecutionContext,
|
||||||
|
OrchestratorOptions,
|
||||||
|
SSEEvent,
|
||||||
|
StreamingContext,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotSseToolExecution')
|
||||||
|
|
||||||
|
export function isInterruptToolName(toolName: string): boolean {
|
||||||
|
return INTERRUPT_TOOL_SET.has(toolName)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeToolAndReport(
|
||||||
|
toolCallId: string,
|
||||||
|
context: StreamingContext,
|
||||||
|
execContext: ExecutionContext,
|
||||||
|
options?: OrchestratorOptions
|
||||||
|
): Promise<void> {
|
||||||
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
|
if (!toolCall) return
|
||||||
|
|
||||||
|
if (toolCall.status === 'executing') return
|
||||||
|
if (wasToolResultSeen(toolCall.id)) return
|
||||||
|
|
||||||
|
toolCall.status = 'executing'
|
||||||
|
try {
|
||||||
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
|
toolCall.status = result.success ? 'success' : 'error'
|
||||||
|
toolCall.result = result
|
||||||
|
toolCall.error = result.error
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
|
||||||
|
// If create_workflow was successful, update the execution context with the new workflowId.
|
||||||
|
// This ensures subsequent tools in the same stream have access to the workflowId.
|
||||||
|
const output = asRecord(result.output)
|
||||||
|
if (
|
||||||
|
toolCall.name === 'create_workflow' &&
|
||||||
|
result.success &&
|
||||||
|
output.workflowId &&
|
||||||
|
!execContext.workflowId
|
||||||
|
) {
|
||||||
|
execContext.workflowId = output.workflowId as string
|
||||||
|
if (output.workspaceId) {
|
||||||
|
execContext.workspaceId = output.workspaceId as string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
|
||||||
|
// Fire-and-forget: notify the copilot backend that the tool completed.
|
||||||
|
// IMPORTANT: We must NOT await this — the Go backend may block on the
|
||||||
|
// mark-complete handler until it can write back on the SSE stream, but
|
||||||
|
// the SSE reader (our for-await loop) is paused while we're in this
|
||||||
|
// handler. Awaiting here would deadlock: sim waits for Go's response,
|
||||||
|
// Go waits for sim to drain the SSE stream.
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
result.success ? 200 : 500,
|
||||||
|
result.error || (result.success ? 'Tool completed' : 'Tool failed'),
|
||||||
|
result.output
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const resultEvent: SSEEvent = {
|
||||||
|
type: 'tool_result',
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
success: result.success,
|
||||||
|
result: result.output,
|
||||||
|
data: {
|
||||||
|
id: toolCall.id,
|
||||||
|
name: toolCall.name,
|
||||||
|
success: result.success,
|
||||||
|
result: result.output,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
await options?.onEvent?.(resultEvent)
|
||||||
|
} catch (error) {
|
||||||
|
toolCall.status = 'error'
|
||||||
|
toolCall.error = error instanceof Error ? error.message : String(error)
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
|
||||||
|
// Fire-and-forget (same reasoning as above).
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, 500, toolCall.error).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
const errorEvent: SSEEvent = {
|
||||||
|
type: 'tool_error',
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
data: {
|
||||||
|
id: toolCall.id,
|
||||||
|
name: toolCall.name,
|
||||||
|
error: toolCall.error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
await options?.onEvent?.(errorEvent)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function waitForToolDecision(
|
||||||
|
toolCallId: string,
|
||||||
|
timeoutMs: number,
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
): Promise<{ status: string; message?: string } | null> {
|
||||||
|
const start = Date.now()
|
||||||
|
let interval = TOOL_DECISION_INITIAL_POLL_MS
|
||||||
|
const maxInterval = TOOL_DECISION_MAX_POLL_MS
|
||||||
|
while (Date.now() - start < timeoutMs) {
|
||||||
|
if (abortSignal?.aborted) return null
|
||||||
|
const decision = await getToolConfirmation(toolCallId)
|
||||||
|
if (decision?.status) {
|
||||||
|
return decision
|
||||||
|
}
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, interval))
|
||||||
|
interval = Math.min(interval * TOOL_DECISION_POLL_BACKOFF, maxInterval)
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
71
apps/sim/lib/copilot/orchestrator/sse-parser.ts
Normal file
71
apps/sim/lib/copilot/orchestrator/sse-parser.ts
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotSseParser')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses SSE streams from the copilot backend into typed events.
|
||||||
|
*/
|
||||||
|
export async function* parseSSEStream(
|
||||||
|
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||||
|
decoder: TextDecoder,
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
): AsyncGenerator<SSEEvent> {
|
||||||
|
let buffer = ''
|
||||||
|
|
||||||
|
try {
|
||||||
|
while (true) {
|
||||||
|
if (abortSignal?.aborted) {
|
||||||
|
logger.info('SSE stream aborted by signal')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const { done, value } = await reader.read()
|
||||||
|
if (done) break
|
||||||
|
|
||||||
|
buffer += decoder.decode(value, { stream: true })
|
||||||
|
const lines = buffer.split('\n')
|
||||||
|
buffer = lines.pop() || ''
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
if (!line.trim()) continue
|
||||||
|
if (!line.startsWith('data: ')) continue
|
||||||
|
|
||||||
|
const jsonStr = line.slice(6)
|
||||||
|
if (jsonStr === '[DONE]') continue
|
||||||
|
|
||||||
|
try {
|
||||||
|
const event = JSON.parse(jsonStr) as SSEEvent
|
||||||
|
if (event?.type) {
|
||||||
|
yield event
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to parse SSE event', {
|
||||||
|
preview: jsonStr.slice(0, 200),
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (buffer.trim() && buffer.startsWith('data: ')) {
|
||||||
|
try {
|
||||||
|
const event = JSON.parse(buffer.slice(6)) as SSEEvent
|
||||||
|
if (event?.type) {
|
||||||
|
yield event
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to parse final SSE buffer', {
|
||||||
|
preview: buffer.slice(0, 200),
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
reader.releaseLock()
|
||||||
|
} catch {
|
||||||
|
logger.warn('Failed to release SSE reader lock')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
42
apps/sim/lib/copilot/orchestrator/sse-utils.test.ts
Normal file
42
apps/sim/lib/copilot/orchestrator/sse-utils.test.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
/**
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
import { describe, expect, it } from 'vitest'
|
||||||
|
import {
|
||||||
|
normalizeSseEvent,
|
||||||
|
shouldSkipToolCallEvent,
|
||||||
|
shouldSkipToolResultEvent,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
|
||||||
|
describe('sse-utils', () => {
|
||||||
|
it.concurrent('normalizes tool fields from string data', () => {
|
||||||
|
const event = {
|
||||||
|
type: 'tool_result',
|
||||||
|
data: JSON.stringify({
|
||||||
|
id: 'tool_1',
|
||||||
|
name: 'edit_workflow',
|
||||||
|
success: true,
|
||||||
|
result: { ok: true },
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = normalizeSseEvent(event as any)
|
||||||
|
|
||||||
|
expect(normalized.toolCallId).toBe('tool_1')
|
||||||
|
expect(normalized.toolName).toBe('edit_workflow')
|
||||||
|
expect(normalized.success).toBe(true)
|
||||||
|
expect(normalized.result).toEqual({ ok: true })
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('dedupes tool_call events', () => {
|
||||||
|
const event = { type: 'tool_call', data: { id: 'tool_call_1', name: 'plan' } }
|
||||||
|
expect(shouldSkipToolCallEvent(event as any)).toBe(false)
|
||||||
|
expect(shouldSkipToolCallEvent(event as any)).toBe(true)
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('dedupes tool_result events', () => {
|
||||||
|
const event = { type: 'tool_result', data: { id: 'tool_result_1', name: 'plan' } }
|
||||||
|
expect(shouldSkipToolResultEvent(event as any)).toBe(false)
|
||||||
|
expect(shouldSkipToolResultEvent(event as any)).toBe(true)
|
||||||
|
})
|
||||||
|
})
|
||||||
124
apps/sim/lib/copilot/orchestrator/sse-utils.ts
Normal file
124
apps/sim/lib/copilot/orchestrator/sse-utils.ts
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
import { STREAM_BUFFER_MAX_DEDUP_ENTRIES } from '@/lib/copilot/constants'
|
||||||
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
|
|
||||||
|
type EventDataObject = Record<string, unknown> | undefined
|
||||||
|
|
||||||
|
/** Safely cast event.data to a record for property access. */
|
||||||
|
export const asRecord = (data: unknown): Record<string, unknown> =>
|
||||||
|
(data && typeof data === 'object' && !Array.isArray(data) ? data : {}) as Record<string, unknown>
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In-memory tool event dedupe with bounded size.
|
||||||
|
*
|
||||||
|
* NOTE: Process-local only. In a multi-instance setup (e.g., ECS),
|
||||||
|
* each task maintains its own dedupe cache.
|
||||||
|
*/
|
||||||
|
const seenToolCalls = new Set<string>()
|
||||||
|
const seenToolResults = new Set<string>()
|
||||||
|
|
||||||
|
function addToSet(set: Set<string>, id: string): void {
|
||||||
|
if (set.size >= STREAM_BUFFER_MAX_DEDUP_ENTRIES) {
|
||||||
|
const first = set.values().next().value
|
||||||
|
if (first) set.delete(first)
|
||||||
|
}
|
||||||
|
set.add(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
const parseEventData = (data: unknown): EventDataObject => {
|
||||||
|
if (!data) return undefined
|
||||||
|
if (typeof data !== 'string') {
|
||||||
|
return data as EventDataObject
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return JSON.parse(data) as EventDataObject
|
||||||
|
} catch {
|
||||||
|
return undefined
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasToolFields = (data: EventDataObject): boolean => {
|
||||||
|
if (!data) return false
|
||||||
|
return (
|
||||||
|
data.id !== undefined ||
|
||||||
|
data.toolCallId !== undefined ||
|
||||||
|
data.name !== undefined ||
|
||||||
|
data.success !== undefined ||
|
||||||
|
data.result !== undefined ||
|
||||||
|
data.arguments !== undefined
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getEventData = (event: SSEEvent): EventDataObject => {
|
||||||
|
const topLevel = parseEventData(event.data)
|
||||||
|
if (!topLevel) return undefined
|
||||||
|
if (hasToolFields(topLevel)) return topLevel
|
||||||
|
const nested = parseEventData(topLevel.data)
|
||||||
|
return nested || topLevel
|
||||||
|
}
|
||||||
|
|
||||||
|
function getToolCallIdFromEvent(event: SSEEvent): string | undefined {
|
||||||
|
const data = getEventData(event)
|
||||||
|
return (
|
||||||
|
event.toolCallId || (data?.id as string | undefined) || (data?.toolCallId as string | undefined)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Normalizes SSE events so tool metadata is available at the top level. */
|
||||||
|
export function normalizeSseEvent(event: SSEEvent): SSEEvent {
|
||||||
|
if (!event) return event
|
||||||
|
const data = getEventData(event)
|
||||||
|
if (!data) return event
|
||||||
|
const toolCallId =
|
||||||
|
event.toolCallId || (data.id as string | undefined) || (data.toolCallId as string | undefined)
|
||||||
|
const toolName =
|
||||||
|
event.toolName || (data.name as string | undefined) || (data.toolName as string | undefined)
|
||||||
|
const success = event.success ?? (data.success as boolean | undefined)
|
||||||
|
const result = event.result ?? data.result
|
||||||
|
const normalizedData = typeof event.data === 'string' ? data : event.data
|
||||||
|
return {
|
||||||
|
...event,
|
||||||
|
data: normalizedData,
|
||||||
|
toolCallId,
|
||||||
|
toolName,
|
||||||
|
success,
|
||||||
|
result,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function markToolCallSeen(toolCallId: string): void {
|
||||||
|
addToSet(seenToolCalls, toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
function wasToolCallSeen(toolCallId: string): boolean {
|
||||||
|
return seenToolCalls.has(toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function markToolResultSeen(toolCallId: string): void {
|
||||||
|
addToSet(seenToolResults, toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function wasToolResultSeen(toolCallId: string): boolean {
|
||||||
|
return seenToolResults.has(toolCallId)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function shouldSkipToolCallEvent(event: SSEEvent): boolean {
|
||||||
|
if (event.type !== 'tool_call') return false
|
||||||
|
const toolCallId = getToolCallIdFromEvent(event)
|
||||||
|
if (!toolCallId) return false
|
||||||
|
const eventData = getEventData(event)
|
||||||
|
if (eventData?.partial === true) return false
|
||||||
|
if (wasToolResultSeen(toolCallId) || wasToolCallSeen(toolCallId)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
markToolCallSeen(toolCallId)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
export function shouldSkipToolResultEvent(event: SSEEvent): boolean {
|
||||||
|
if (event.type !== 'tool_result') return false
|
||||||
|
const toolCallId = getToolCallIdFromEvent(event)
|
||||||
|
if (!toolCallId) return false
|
||||||
|
if (wasToolResultSeen(toolCallId)) return true
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return false
|
||||||
|
}
|
||||||
119
apps/sim/lib/copilot/orchestrator/stream-buffer.test.ts
Normal file
119
apps/sim/lib/copilot/orchestrator/stream-buffer.test.ts
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
/**
|
||||||
|
* @vitest-environment node
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { loggerMock } from '@sim/testing'
|
||||||
|
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
|
|
||||||
|
vi.mock('@sim/logger', () => loggerMock)
|
||||||
|
|
||||||
|
type StoredEntry = { score: number; value: string }
|
||||||
|
|
||||||
|
const createRedisStub = () => {
|
||||||
|
const events = new Map<string, StoredEntry[]>()
|
||||||
|
const counters = new Map<string, number>()
|
||||||
|
|
||||||
|
const readEntries = (key: string, min: number, max: number) => {
|
||||||
|
const list = events.get(key) || []
|
||||||
|
return list
|
||||||
|
.filter((entry) => entry.score >= min && entry.score <= max)
|
||||||
|
.sort((a, b) => a.score - b.score)
|
||||||
|
.map((entry) => entry.value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
del: vi.fn().mockResolvedValue(1),
|
||||||
|
hset: vi.fn().mockResolvedValue(1),
|
||||||
|
hgetall: vi.fn().mockResolvedValue({}),
|
||||||
|
expire: vi.fn().mockResolvedValue(1),
|
||||||
|
eval: vi
|
||||||
|
.fn()
|
||||||
|
.mockImplementation(
|
||||||
|
(
|
||||||
|
_lua: string,
|
||||||
|
_keysCount: number,
|
||||||
|
seqKey: string,
|
||||||
|
eventsKey: string,
|
||||||
|
_ttl: number,
|
||||||
|
_limit: number,
|
||||||
|
streamId: string,
|
||||||
|
eventJson: string
|
||||||
|
) => {
|
||||||
|
const current = counters.get(seqKey) || 0
|
||||||
|
const next = current + 1
|
||||||
|
counters.set(seqKey, next)
|
||||||
|
const entry = JSON.stringify({ eventId: next, streamId, event: JSON.parse(eventJson) })
|
||||||
|
const list = events.get(eventsKey) || []
|
||||||
|
list.push({ score: next, value: entry })
|
||||||
|
events.set(eventsKey, list)
|
||||||
|
return next
|
||||||
|
}
|
||||||
|
),
|
||||||
|
incrby: vi.fn().mockImplementation((key: string, amount: number) => {
|
||||||
|
const current = counters.get(key) || 0
|
||||||
|
const next = current + amount
|
||||||
|
counters.set(key, next)
|
||||||
|
return next
|
||||||
|
}),
|
||||||
|
zrangebyscore: vi.fn().mockImplementation((key: string, min: string, max: string) => {
|
||||||
|
const minVal = Number(min)
|
||||||
|
const maxVal = max === '+inf' ? Number.POSITIVE_INFINITY : Number(max)
|
||||||
|
return Promise.resolve(readEntries(key, minVal, maxVal))
|
||||||
|
}),
|
||||||
|
pipeline: vi.fn().mockImplementation(() => {
|
||||||
|
const api: Record<string, any> = {}
|
||||||
|
api.zadd = vi.fn().mockImplementation((key: string, ...args: Array<string | number>) => {
|
||||||
|
const list = events.get(key) || []
|
||||||
|
for (let i = 0; i < args.length; i += 2) {
|
||||||
|
list.push({ score: Number(args[i]), value: String(args[i + 1]) })
|
||||||
|
}
|
||||||
|
events.set(key, list)
|
||||||
|
return api
|
||||||
|
})
|
||||||
|
api.expire = vi.fn().mockReturnValue(api)
|
||||||
|
api.zremrangebyrank = vi.fn().mockReturnValue(api)
|
||||||
|
api.exec = vi.fn().mockResolvedValue([])
|
||||||
|
return api
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mockRedis: ReturnType<typeof createRedisStub>
|
||||||
|
|
||||||
|
vi.mock('@/lib/core/config/redis', () => ({
|
||||||
|
getRedisClient: () => mockRedis,
|
||||||
|
}))
|
||||||
|
|
||||||
|
import {
|
||||||
|
appendStreamEvent,
|
||||||
|
createStreamEventWriter,
|
||||||
|
readStreamEvents,
|
||||||
|
} from '@/lib/copilot/orchestrator/stream-buffer'
|
||||||
|
|
||||||
|
describe('stream-buffer', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
mockRedis = createRedisStub()
|
||||||
|
vi.clearAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('replays events after a given event id', async () => {
|
||||||
|
await appendStreamEvent('stream-1', { type: 'content', data: 'hello' })
|
||||||
|
await appendStreamEvent('stream-1', { type: 'content', data: 'world' })
|
||||||
|
|
||||||
|
const allEvents = await readStreamEvents('stream-1', 0)
|
||||||
|
expect(allEvents.map((entry) => entry.event.data)).toEqual(['hello', 'world'])
|
||||||
|
|
||||||
|
const replayed = await readStreamEvents('stream-1', 1)
|
||||||
|
expect(replayed.map((entry) => entry.event.data)).toEqual(['world'])
|
||||||
|
})
|
||||||
|
|
||||||
|
it.concurrent('flushes buffered events for resume', async () => {
|
||||||
|
const writer = createStreamEventWriter('stream-2')
|
||||||
|
await writer.write({ type: 'content', data: 'a' })
|
||||||
|
await writer.write({ type: 'content', data: 'b' })
|
||||||
|
await writer.flush()
|
||||||
|
|
||||||
|
const events = await readStreamEvents('stream-2', 0)
|
||||||
|
expect(events.map((entry) => entry.event.data)).toEqual(['a', 'b'])
|
||||||
|
})
|
||||||
|
})
|
||||||
309
apps/sim/lib/copilot/orchestrator/stream-buffer.ts
Normal file
309
apps/sim/lib/copilot/orchestrator/stream-buffer.ts
Normal file
@@ -0,0 +1,309 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { REDIS_COPILOT_STREAM_PREFIX } from '@/lib/copilot/constants'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotStreamBuffer')
|
||||||
|
|
||||||
|
const STREAM_DEFAULTS = {
|
||||||
|
ttlSeconds: 60 * 60,
|
||||||
|
eventLimit: 5000,
|
||||||
|
reserveBatch: 200,
|
||||||
|
flushIntervalMs: 15,
|
||||||
|
flushMaxBatch: 200,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type StreamBufferConfig = {
|
||||||
|
ttlSeconds: number
|
||||||
|
eventLimit: number
|
||||||
|
reserveBatch: number
|
||||||
|
flushIntervalMs: number
|
||||||
|
flushMaxBatch: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const parseNumber = (value: number | string | undefined, fallback: number): number => {
|
||||||
|
if (typeof value === 'number' && Number.isFinite(value)) return value
|
||||||
|
const parsed = Number(value)
|
||||||
|
return Number.isFinite(parsed) ? parsed : fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getStreamBufferConfig(): StreamBufferConfig {
|
||||||
|
return {
|
||||||
|
ttlSeconds: parseNumber(env.COPILOT_STREAM_TTL_SECONDS, STREAM_DEFAULTS.ttlSeconds),
|
||||||
|
eventLimit: parseNumber(env.COPILOT_STREAM_EVENT_LIMIT, STREAM_DEFAULTS.eventLimit),
|
||||||
|
reserveBatch: parseNumber(env.COPILOT_STREAM_RESERVE_BATCH, STREAM_DEFAULTS.reserveBatch),
|
||||||
|
flushIntervalMs: parseNumber(
|
||||||
|
env.COPILOT_STREAM_FLUSH_INTERVAL_MS,
|
||||||
|
STREAM_DEFAULTS.flushIntervalMs
|
||||||
|
),
|
||||||
|
flushMaxBatch: parseNumber(env.COPILOT_STREAM_FLUSH_MAX_BATCH, STREAM_DEFAULTS.flushMaxBatch),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const APPEND_STREAM_EVENT_LUA = `
|
||||||
|
local seqKey = KEYS[1]
|
||||||
|
local eventsKey = KEYS[2]
|
||||||
|
local ttl = tonumber(ARGV[1])
|
||||||
|
local limit = tonumber(ARGV[2])
|
||||||
|
local streamId = ARGV[3]
|
||||||
|
local eventJson = ARGV[4]
|
||||||
|
|
||||||
|
local id = redis.call('INCR', seqKey)
|
||||||
|
local entry = '{"eventId":' .. id .. ',"streamId":' .. cjson.encode(streamId) .. ',"event":' .. eventJson .. '}'
|
||||||
|
redis.call('ZADD', eventsKey, id, entry)
|
||||||
|
redis.call('EXPIRE', eventsKey, ttl)
|
||||||
|
redis.call('EXPIRE', seqKey, ttl)
|
||||||
|
if limit > 0 then
|
||||||
|
redis.call('ZREMRANGEBYRANK', eventsKey, 0, -limit-1)
|
||||||
|
end
|
||||||
|
return id
|
||||||
|
`
|
||||||
|
|
||||||
|
function getStreamKeyPrefix(streamId: string) {
|
||||||
|
return `${REDIS_COPILOT_STREAM_PREFIX}${streamId}`
|
||||||
|
}
|
||||||
|
|
||||||
|
function getEventsKey(streamId: string) {
|
||||||
|
return `${getStreamKeyPrefix(streamId)}:events`
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSeqKey(streamId: string) {
|
||||||
|
return `${getStreamKeyPrefix(streamId)}:seq`
|
||||||
|
}
|
||||||
|
|
||||||
|
function getMetaKey(streamId: string) {
|
||||||
|
return `${getStreamKeyPrefix(streamId)}:meta`
|
||||||
|
}
|
||||||
|
|
||||||
|
export type StreamStatus = 'active' | 'complete' | 'error'
|
||||||
|
|
||||||
|
export type StreamMeta = {
|
||||||
|
status: StreamStatus
|
||||||
|
userId?: string
|
||||||
|
updatedAt?: string
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type StreamEventEntry = {
|
||||||
|
eventId: number
|
||||||
|
streamId: string
|
||||||
|
event: Record<string, unknown>
|
||||||
|
}
|
||||||
|
|
||||||
|
export type StreamEventWriter = {
|
||||||
|
write: (event: Record<string, unknown>) => Promise<StreamEventEntry>
|
||||||
|
flush: () => Promise<void>
|
||||||
|
close: () => Promise<void>
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function resetStreamBuffer(streamId: string): Promise<void> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return
|
||||||
|
try {
|
||||||
|
await redis.del(getEventsKey(streamId), getSeqKey(streamId), getMetaKey(streamId))
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to reset stream buffer', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function setStreamMeta(streamId: string, meta: StreamMeta): Promise<void> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return
|
||||||
|
try {
|
||||||
|
const config = getStreamBufferConfig()
|
||||||
|
const payload: Record<string, string> = {
|
||||||
|
status: meta.status,
|
||||||
|
updatedAt: meta.updatedAt || new Date().toISOString(),
|
||||||
|
}
|
||||||
|
if (meta.userId) payload.userId = meta.userId
|
||||||
|
if (meta.error) payload.error = meta.error
|
||||||
|
await redis.hset(getMetaKey(streamId), payload)
|
||||||
|
await redis.expire(getMetaKey(streamId), config.ttlSeconds)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to update stream meta', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getStreamMeta(streamId: string): Promise<StreamMeta | null> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return null
|
||||||
|
try {
|
||||||
|
const meta = await redis.hgetall(getMetaKey(streamId))
|
||||||
|
if (!meta || Object.keys(meta).length === 0) return null
|
||||||
|
return meta as StreamMeta
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to read stream meta', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function appendStreamEvent(
|
||||||
|
streamId: string,
|
||||||
|
event: Record<string, unknown>
|
||||||
|
): Promise<StreamEventEntry> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) {
|
||||||
|
return { eventId: 0, streamId, event }
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const config = getStreamBufferConfig()
|
||||||
|
const eventJson = JSON.stringify(event)
|
||||||
|
const nextId = await redis.eval(
|
||||||
|
APPEND_STREAM_EVENT_LUA,
|
||||||
|
2,
|
||||||
|
getSeqKey(streamId),
|
||||||
|
getEventsKey(streamId),
|
||||||
|
config.ttlSeconds,
|
||||||
|
config.eventLimit,
|
||||||
|
streamId,
|
||||||
|
eventJson
|
||||||
|
)
|
||||||
|
const eventId = typeof nextId === 'number' ? nextId : Number(nextId)
|
||||||
|
return { eventId, streamId, event }
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to append stream event', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return { eventId: 0, streamId, event }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createStreamEventWriter(streamId: string): StreamEventWriter {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) {
|
||||||
|
return {
|
||||||
|
write: async (event) => ({ eventId: 0, streamId, event }),
|
||||||
|
flush: async () => {},
|
||||||
|
close: async () => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = getStreamBufferConfig()
|
||||||
|
let pending: StreamEventEntry[] = []
|
||||||
|
let nextEventId = 0
|
||||||
|
let maxReservedId = 0
|
||||||
|
let flushTimer: ReturnType<typeof setTimeout> | null = null
|
||||||
|
const scheduleFlush = () => {
|
||||||
|
if (flushTimer) return
|
||||||
|
flushTimer = setTimeout(() => {
|
||||||
|
flushTimer = null
|
||||||
|
void flush()
|
||||||
|
}, config.flushIntervalMs)
|
||||||
|
}
|
||||||
|
|
||||||
|
const reserveIds = async (minCount: number) => {
|
||||||
|
const reserveCount = Math.max(config.reserveBatch, minCount)
|
||||||
|
const newMax = await redis.incrby(getSeqKey(streamId), reserveCount)
|
||||||
|
const startId = newMax - reserveCount + 1
|
||||||
|
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||||
|
nextEventId = startId
|
||||||
|
maxReservedId = newMax
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let flushPromise: Promise<void> | null = null
|
||||||
|
let closed = false
|
||||||
|
|
||||||
|
const doFlush = async () => {
|
||||||
|
if (pending.length === 0) return
|
||||||
|
const batch = pending
|
||||||
|
pending = []
|
||||||
|
try {
|
||||||
|
const key = getEventsKey(streamId)
|
||||||
|
const zaddArgs: (string | number)[] = []
|
||||||
|
for (const entry of batch) {
|
||||||
|
zaddArgs.push(entry.eventId, JSON.stringify(entry))
|
||||||
|
}
|
||||||
|
const pipeline = redis.pipeline()
|
||||||
|
pipeline.zadd(key, ...(zaddArgs as [number, string]))
|
||||||
|
pipeline.expire(key, config.ttlSeconds)
|
||||||
|
pipeline.expire(getSeqKey(streamId), config.ttlSeconds)
|
||||||
|
pipeline.zremrangebyrank(key, 0, -config.eventLimit - 1)
|
||||||
|
await pipeline.exec()
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to flush stream events', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
pending = batch.concat(pending)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const flush = async () => {
|
||||||
|
if (flushPromise) {
|
||||||
|
await flushPromise
|
||||||
|
return
|
||||||
|
}
|
||||||
|
flushPromise = doFlush()
|
||||||
|
try {
|
||||||
|
await flushPromise
|
||||||
|
} finally {
|
||||||
|
flushPromise = null
|
||||||
|
if (pending.length > 0) scheduleFlush()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const write = async (event: Record<string, unknown>) => {
|
||||||
|
if (closed) return { eventId: 0, streamId, event }
|
||||||
|
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||||
|
await reserveIds(1)
|
||||||
|
}
|
||||||
|
const eventId = nextEventId++
|
||||||
|
const entry: StreamEventEntry = { eventId, streamId, event }
|
||||||
|
pending.push(entry)
|
||||||
|
if (pending.length >= config.flushMaxBatch) {
|
||||||
|
await flush()
|
||||||
|
} else {
|
||||||
|
scheduleFlush()
|
||||||
|
}
|
||||||
|
return entry
|
||||||
|
}
|
||||||
|
|
||||||
|
const close = async () => {
|
||||||
|
closed = true
|
||||||
|
if (flushTimer) {
|
||||||
|
clearTimeout(flushTimer)
|
||||||
|
flushTimer = null
|
||||||
|
}
|
||||||
|
await flush()
|
||||||
|
}
|
||||||
|
|
||||||
|
return { write, flush, close }
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function readStreamEvents(
|
||||||
|
streamId: string,
|
||||||
|
afterEventId: number
|
||||||
|
): Promise<StreamEventEntry[]> {
|
||||||
|
const redis = getRedisClient()
|
||||||
|
if (!redis) return []
|
||||||
|
try {
|
||||||
|
const raw = await redis.zrangebyscore(getEventsKey(streamId), afterEventId + 1, '+inf')
|
||||||
|
return raw
|
||||||
|
.map((entry) => {
|
||||||
|
try {
|
||||||
|
return JSON.parse(entry) as StreamEventEntry
|
||||||
|
} catch {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter((entry): entry is StreamEventEntry => Boolean(entry))
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to read stream events', {
|
||||||
|
streamId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
}
|
||||||
182
apps/sim/lib/copilot/orchestrator/stream-core.ts
Normal file
182
apps/sim/lib/copilot/orchestrator/stream-core.ts
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { ORCHESTRATION_TIMEOUT_MS } from '@/lib/copilot/constants'
|
||||||
|
import {
|
||||||
|
handleSubagentRouting,
|
||||||
|
sseHandlers,
|
||||||
|
subAgentHandlers,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-handlers'
|
||||||
|
import { parseSSEStream } from '@/lib/copilot/orchestrator/sse-parser'
|
||||||
|
import {
|
||||||
|
normalizeSseEvent,
|
||||||
|
shouldSkipToolCallEvent,
|
||||||
|
shouldSkipToolResultEvent,
|
||||||
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
|
import type {
|
||||||
|
ExecutionContext,
|
||||||
|
OrchestratorOptions,
|
||||||
|
SSEEvent,
|
||||||
|
StreamingContext,
|
||||||
|
ToolCallSummary,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotStreamCore')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Options for the shared stream processing loop.
|
||||||
|
*/
|
||||||
|
export interface StreamLoopOptions extends OrchestratorOptions {
|
||||||
|
/**
|
||||||
|
* Called for each normalized event BEFORE standard handler dispatch.
|
||||||
|
* Return true to skip the default handler for this event.
|
||||||
|
*/
|
||||||
|
onBeforeDispatch?: (event: SSEEvent, context: StreamingContext) => boolean | undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a fresh StreamingContext.
|
||||||
|
*/
|
||||||
|
export function createStreamingContext(overrides?: Partial<StreamingContext>): StreamingContext {
|
||||||
|
return {
|
||||||
|
chatId: undefined,
|
||||||
|
conversationId: undefined,
|
||||||
|
messageId: crypto.randomUUID(),
|
||||||
|
accumulatedContent: '',
|
||||||
|
contentBlocks: [],
|
||||||
|
toolCalls: new Map(),
|
||||||
|
currentThinkingBlock: null,
|
||||||
|
isInThinkingBlock: false,
|
||||||
|
subAgentParentToolCallId: undefined,
|
||||||
|
subAgentContent: {},
|
||||||
|
subAgentToolCalls: {},
|
||||||
|
pendingContent: '',
|
||||||
|
streamComplete: false,
|
||||||
|
wasAborted: false,
|
||||||
|
errors: [],
|
||||||
|
...overrides,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run the SSE stream processing loop.
|
||||||
|
*
|
||||||
|
* Handles: fetch -> parse -> normalize -> dedupe -> subagent routing -> handler dispatch.
|
||||||
|
* Callers provide the fetch URL/options and can intercept events via onBeforeDispatch.
|
||||||
|
*/
|
||||||
|
export async function runStreamLoop(
|
||||||
|
fetchUrl: string,
|
||||||
|
fetchOptions: RequestInit,
|
||||||
|
context: StreamingContext,
|
||||||
|
execContext: ExecutionContext,
|
||||||
|
options: StreamLoopOptions
|
||||||
|
): Promise<void> {
|
||||||
|
const { timeout = ORCHESTRATION_TIMEOUT_MS, abortSignal } = options
|
||||||
|
|
||||||
|
const response = await fetch(fetchUrl, {
|
||||||
|
...fetchOptions,
|
||||||
|
signal: abortSignal,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text().catch(() => '')
|
||||||
|
throw new Error(
|
||||||
|
`Copilot backend error (${response.status}): ${errorText || response.statusText}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!response.body) {
|
||||||
|
throw new Error('Copilot backend response missing body')
|
||||||
|
}
|
||||||
|
|
||||||
|
const reader = response.body.getReader()
|
||||||
|
const decoder = new TextDecoder()
|
||||||
|
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
context.errors.push('Request timed out')
|
||||||
|
context.streamComplete = true
|
||||||
|
reader.cancel().catch(() => {})
|
||||||
|
}, timeout)
|
||||||
|
|
||||||
|
try {
|
||||||
|
for await (const event of parseSSEStream(reader, decoder, abortSignal)) {
|
||||||
|
if (abortSignal?.aborted) {
|
||||||
|
context.wasAborted = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedEvent = normalizeSseEvent(event)
|
||||||
|
|
||||||
|
// Skip duplicate tool events.
|
||||||
|
const shouldSkipToolCall = shouldSkipToolCallEvent(normalizedEvent)
|
||||||
|
const shouldSkipToolResult = shouldSkipToolResultEvent(normalizedEvent)
|
||||||
|
|
||||||
|
if (!shouldSkipToolCall && !shouldSkipToolResult) {
|
||||||
|
try {
|
||||||
|
await options.onEvent?.(normalizedEvent)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to forward SSE event', {
|
||||||
|
type: normalizedEvent.type,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Let the caller intercept before standard dispatch.
|
||||||
|
if (options.onBeforeDispatch?.(normalizedEvent, context)) {
|
||||||
|
if (context.streamComplete) break
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard subagent start/end handling.
|
||||||
|
if (normalizedEvent.type === 'subagent_start') {
|
||||||
|
const eventData = normalizedEvent.data as Record<string, unknown> | undefined
|
||||||
|
const toolCallId = eventData?.tool_call_id as string | undefined
|
||||||
|
if (toolCallId) {
|
||||||
|
context.subAgentParentToolCallId = toolCallId
|
||||||
|
context.subAgentContent[toolCallId] = ''
|
||||||
|
context.subAgentToolCalls[toolCallId] = []
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (normalizedEvent.type === 'subagent_end') {
|
||||||
|
context.subAgentParentToolCallId = undefined
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subagent event routing.
|
||||||
|
if (handleSubagentRouting(normalizedEvent, context)) {
|
||||||
|
const handler = subAgentHandlers[normalizedEvent.type]
|
||||||
|
if (handler) {
|
||||||
|
await handler(normalizedEvent, context, execContext, options)
|
||||||
|
}
|
||||||
|
if (context.streamComplete) break
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Main event handler dispatch.
|
||||||
|
const handler = sseHandlers[normalizedEvent.type]
|
||||||
|
if (handler) {
|
||||||
|
await handler(normalizedEvent, context, execContext, options)
|
||||||
|
}
|
||||||
|
if (context.streamComplete) break
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build a ToolCallSummary array from the streaming context.
|
||||||
|
*/
|
||||||
|
export function buildToolCallSummaries(context: StreamingContext): ToolCallSummary[] {
|
||||||
|
return Array.from(context.toolCalls.values()).map((toolCall) => ({
|
||||||
|
id: toolCall.id,
|
||||||
|
name: toolCall.name,
|
||||||
|
status: toolCall.status,
|
||||||
|
params: toolCall.params,
|
||||||
|
result: toolCall.result?.output,
|
||||||
|
error: toolCall.error,
|
||||||
|
durationMs:
|
||||||
|
toolCall.endTime && toolCall.startTime ? toolCall.endTime - toolCall.startTime : undefined,
|
||||||
|
}))
|
||||||
|
}
|
||||||
137
apps/sim/lib/copilot/orchestrator/subagent.ts
Normal file
137
apps/sim/lib/copilot/orchestrator/subagent.ts
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
|
import { prepareExecutionContext } from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
|
import type {
|
||||||
|
ExecutionContext,
|
||||||
|
OrchestratorOptions,
|
||||||
|
SSEEvent,
|
||||||
|
StreamingContext,
|
||||||
|
ToolCallSummary,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||||
|
import { buildToolCallSummaries, createStreamingContext, runStreamLoop } from './stream-core'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotSubagentOrchestrator')
|
||||||
|
|
||||||
|
export interface SubagentOrchestratorOptions extends Omit<OrchestratorOptions, 'onComplete'> {
|
||||||
|
userId: string
|
||||||
|
workflowId?: string
|
||||||
|
workspaceId?: string
|
||||||
|
onComplete?: (result: SubagentOrchestratorResult) => void | Promise<void>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SubagentOrchestratorResult {
|
||||||
|
success: boolean
|
||||||
|
content: string
|
||||||
|
toolCalls: ToolCallSummary[]
|
||||||
|
structuredResult?: {
|
||||||
|
type?: string
|
||||||
|
summary?: string
|
||||||
|
data?: unknown
|
||||||
|
success?: boolean
|
||||||
|
}
|
||||||
|
error?: string
|
||||||
|
errors?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function orchestrateSubagentStream(
|
||||||
|
agentId: string,
|
||||||
|
requestPayload: Record<string, unknown>,
|
||||||
|
options: SubagentOrchestratorOptions
|
||||||
|
): Promise<SubagentOrchestratorResult> {
|
||||||
|
const { userId, workflowId, workspaceId } = options
|
||||||
|
const execContext = await buildExecutionContext(userId, workflowId, workspaceId)
|
||||||
|
|
||||||
|
const msgId = requestPayload?.messageId
|
||||||
|
const context = createStreamingContext({
|
||||||
|
messageId: typeof msgId === 'string' ? msgId : crypto.randomUUID(),
|
||||||
|
})
|
||||||
|
|
||||||
|
let structuredResult: SubagentOrchestratorResult['structuredResult']
|
||||||
|
|
||||||
|
try {
|
||||||
|
await runStreamLoop(
|
||||||
|
`${SIM_AGENT_API_URL}/api/subagent/${agentId}`,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ ...requestPayload, userId, stream: true }),
|
||||||
|
},
|
||||||
|
context,
|
||||||
|
execContext,
|
||||||
|
{
|
||||||
|
...options,
|
||||||
|
onBeforeDispatch: (event: SSEEvent, ctx: StreamingContext) => {
|
||||||
|
// Handle structured_result / subagent_result - subagent-specific.
|
||||||
|
if (event.type === 'structured_result' || event.type === 'subagent_result') {
|
||||||
|
structuredResult = normalizeStructuredResult(event.data)
|
||||||
|
ctx.streamComplete = true
|
||||||
|
return true // skip default dispatch
|
||||||
|
}
|
||||||
|
|
||||||
|
// For direct subagent calls, events may have the subagent field set
|
||||||
|
// but no subagent_start because this IS the top-level agent.
|
||||||
|
// Skip subagent routing for events where the subagent field matches
|
||||||
|
// the current agentId - these are top-level events.
|
||||||
|
if (event.subagent === agentId && !ctx.subAgentParentToolCallId) {
|
||||||
|
return false // let default dispatch handle it
|
||||||
|
}
|
||||||
|
|
||||||
|
return false // let default dispatch handle it
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const result: SubagentOrchestratorResult = {
|
||||||
|
success: context.errors.length === 0 && !context.wasAborted,
|
||||||
|
content: context.accumulatedContent,
|
||||||
|
toolCalls: buildToolCallSummaries(context),
|
||||||
|
structuredResult,
|
||||||
|
errors: context.errors.length ? context.errors : undefined,
|
||||||
|
}
|
||||||
|
await options.onComplete?.(result)
|
||||||
|
return result
|
||||||
|
} catch (error) {
|
||||||
|
const err = error instanceof Error ? error : new Error('Subagent orchestration failed')
|
||||||
|
logger.error('Subagent orchestration failed', { error: err.message, agentId })
|
||||||
|
await options.onError?.(err)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
content: context.accumulatedContent,
|
||||||
|
toolCalls: [],
|
||||||
|
error: err.message,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeStructuredResult(data: unknown): SubagentOrchestratorResult['structuredResult'] {
|
||||||
|
if (!data || typeof data !== 'object') return undefined
|
||||||
|
const d = data as Record<string, unknown>
|
||||||
|
return {
|
||||||
|
type: (d.result_type || d.type) as string | undefined,
|
||||||
|
summary: d.summary as string | undefined,
|
||||||
|
data: d.data ?? d,
|
||||||
|
success: d.success as boolean | undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function buildExecutionContext(
|
||||||
|
userId: string,
|
||||||
|
workflowId?: string,
|
||||||
|
workspaceId?: string
|
||||||
|
): Promise<ExecutionContext> {
|
||||||
|
if (workflowId) {
|
||||||
|
return prepareExecutionContext(userId, workflowId)
|
||||||
|
}
|
||||||
|
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
||||||
|
return {
|
||||||
|
userId,
|
||||||
|
workflowId: workflowId || '',
|
||||||
|
workspaceId,
|
||||||
|
decryptedEnvVars,
|
||||||
|
}
|
||||||
|
}
|
||||||
129
apps/sim/lib/copilot/orchestrator/tool-executor/access.ts
Normal file
129
apps/sim/lib/copilot/orchestrator/tool-executor/access.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { permissions, workflow, workspace } from '@sim/db/schema'
|
||||||
|
import { and, asc, desc, eq, inArray, or } from 'drizzle-orm'
|
||||||
|
|
||||||
|
type WorkflowRecord = typeof workflow.$inferSelect
|
||||||
|
|
||||||
|
export async function ensureWorkflowAccess(
|
||||||
|
workflowId: string,
|
||||||
|
userId: string
|
||||||
|
): Promise<{
|
||||||
|
workflow: WorkflowRecord
|
||||||
|
workspaceId?: string | null
|
||||||
|
}> {
|
||||||
|
const [workflowRecord] = await db
|
||||||
|
.select()
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
if (!workflowRecord) {
|
||||||
|
throw new Error(`Workflow ${workflowId} not found`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowRecord.userId === userId) {
|
||||||
|
return { workflow: workflowRecord, workspaceId: workflowRecord.workspaceId }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowRecord.workspaceId) {
|
||||||
|
const [permissionRow] = await db
|
||||||
|
.select({ permissionType: permissions.permissionType })
|
||||||
|
.from(permissions)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.entityId, workflowRecord.workspaceId),
|
||||||
|
eq(permissions.userId, userId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
if (permissionRow) {
|
||||||
|
return { workflow: workflowRecord, workspaceId: workflowRecord.workspaceId }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Unauthorized workflow access')
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getDefaultWorkspaceId(userId: string): Promise<string> {
|
||||||
|
const workspaces = await db
|
||||||
|
.select({ workspaceId: workspace.id })
|
||||||
|
.from(permissions)
|
||||||
|
.innerJoin(workspace, eq(permissions.entityId, workspace.id))
|
||||||
|
.where(and(eq(permissions.userId, userId), eq(permissions.entityType, 'workspace')))
|
||||||
|
.orderBy(desc(workspace.createdAt))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const workspaceId = workspaces[0]?.workspaceId
|
||||||
|
if (!workspaceId) {
|
||||||
|
throw new Error('No workspace found for user')
|
||||||
|
}
|
||||||
|
|
||||||
|
return workspaceId
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function ensureWorkspaceAccess(
|
||||||
|
workspaceId: string,
|
||||||
|
userId: string,
|
||||||
|
requireWrite: boolean
|
||||||
|
): Promise<void> {
|
||||||
|
const [row] = await db
|
||||||
|
.select({
|
||||||
|
permissionType: permissions.permissionType,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
})
|
||||||
|
.from(permissions)
|
||||||
|
.innerJoin(workspace, eq(permissions.entityId, workspace.id))
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.entityId, workspaceId),
|
||||||
|
eq(permissions.userId, userId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!row) {
|
||||||
|
throw new Error(`Workspace ${workspaceId} not found`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const isOwner = row.ownerId === userId
|
||||||
|
const permissionType = row.permissionType
|
||||||
|
const canWrite = isOwner || permissionType === 'admin' || permissionType === 'write'
|
||||||
|
|
||||||
|
if (requireWrite && !canWrite) {
|
||||||
|
throw new Error('Write or admin access required for this workspace')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!requireWrite && !canWrite && permissionType !== 'read') {
|
||||||
|
throw new Error('Access denied to workspace')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getAccessibleWorkflowsForUser(
|
||||||
|
userId: string,
|
||||||
|
options?: { workspaceId?: string; folderId?: string }
|
||||||
|
) {
|
||||||
|
const workspaceIds = await db
|
||||||
|
.select({ entityId: permissions.entityId })
|
||||||
|
.from(permissions)
|
||||||
|
.where(and(eq(permissions.userId, userId), eq(permissions.entityType, 'workspace')))
|
||||||
|
|
||||||
|
const workspaceIdList = workspaceIds.map((row) => row.entityId)
|
||||||
|
|
||||||
|
const workflowConditions = [eq(workflow.userId, userId)]
|
||||||
|
if (workspaceIdList.length > 0) {
|
||||||
|
workflowConditions.push(inArray(workflow.workspaceId, workspaceIdList))
|
||||||
|
}
|
||||||
|
if (options?.workspaceId) {
|
||||||
|
workflowConditions.push(eq(workflow.workspaceId, options.workspaceId))
|
||||||
|
}
|
||||||
|
if (options?.folderId) {
|
||||||
|
workflowConditions.push(eq(workflow.folderId, options.folderId))
|
||||||
|
}
|
||||||
|
|
||||||
|
return db
|
||||||
|
.select()
|
||||||
|
.from(workflow)
|
||||||
|
.where(or(...workflowConditions))
|
||||||
|
.orderBy(asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id))
|
||||||
|
}
|
||||||
@@ -0,0 +1,288 @@
|
|||||||
|
import crypto from 'crypto'
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { chat, workflowMcpTool } from '@sim/db/schema'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||||
|
import { deployWorkflow, undeployWorkflow } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { checkChatAccess, checkWorkflowAccessForChatCreation } from '@/app/api/chat/utils'
|
||||||
|
import { ensureWorkflowAccess } from '../access'
|
||||||
|
import type { DeployApiParams, DeployChatParams, DeployMcpParams } from '../param-types'
|
||||||
|
|
||||||
|
export async function executeDeployApi(
|
||||||
|
params: DeployApiParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const action = params.action === 'undeploy' ? 'undeploy' : 'deploy'
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
if (action === 'undeploy') {
|
||||||
|
const result = await undeployWorkflow({ workflowId })
|
||||||
|
if (!result.success) {
|
||||||
|
return { success: false, error: result.error || 'Failed to undeploy workflow' }
|
||||||
|
}
|
||||||
|
return { success: true, output: { workflowId, isDeployed: false } }
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await deployWorkflow({
|
||||||
|
workflowId,
|
||||||
|
deployedBy: context.userId,
|
||||||
|
workflowName: workflowRecord.name || undefined,
|
||||||
|
})
|
||||||
|
if (!result.success) {
|
||||||
|
return { success: false, error: result.error || 'Failed to deploy workflow' }
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workflowId,
|
||||||
|
isDeployed: true,
|
||||||
|
deployedAt: result.deployedAt,
|
||||||
|
version: result.version,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeDeployChat(
|
||||||
|
params: DeployChatParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const action = params.action === 'undeploy' ? 'undeploy' : 'deploy'
|
||||||
|
if (action === 'undeploy') {
|
||||||
|
const existing = await db.select().from(chat).where(eq(chat.workflowId, workflowId)).limit(1)
|
||||||
|
if (!existing.length) {
|
||||||
|
return { success: false, error: 'No active chat deployment found for this workflow' }
|
||||||
|
}
|
||||||
|
const { hasAccess } = await checkChatAccess(existing[0].id, context.userId)
|
||||||
|
if (!hasAccess) {
|
||||||
|
return { success: false, error: 'Unauthorized chat access' }
|
||||||
|
}
|
||||||
|
await db.delete(chat).where(eq(chat.id, existing[0].id))
|
||||||
|
return { success: true, output: { success: true, action: 'undeploy', isDeployed: false } }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { hasAccess } = await checkWorkflowAccessForChatCreation(workflowId, context.userId)
|
||||||
|
if (!hasAccess) {
|
||||||
|
return { success: false, error: 'Workflow not found or access denied' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const existing = await db.select().from(chat).where(eq(chat.workflowId, workflowId)).limit(1)
|
||||||
|
const existingDeployment = existing[0] || null
|
||||||
|
|
||||||
|
const identifier = String(params.identifier || existingDeployment?.identifier || '').trim()
|
||||||
|
const title = String(params.title || existingDeployment?.title || '').trim()
|
||||||
|
if (!identifier || !title) {
|
||||||
|
return { success: false, error: 'Chat identifier and title are required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const identifierPattern = /^[a-z0-9-]+$/
|
||||||
|
if (!identifierPattern.test(identifier)) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Identifier can only contain lowercase letters, numbers, and hyphens',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingIdentifier = await db
|
||||||
|
.select()
|
||||||
|
.from(chat)
|
||||||
|
.where(eq(chat.identifier, identifier))
|
||||||
|
.limit(1)
|
||||||
|
if (existingIdentifier.length > 0 && existingIdentifier[0].id !== existingDeployment?.id) {
|
||||||
|
return { success: false, error: 'Identifier already in use' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const deployResult = await deployWorkflow({
|
||||||
|
workflowId,
|
||||||
|
deployedBy: context.userId,
|
||||||
|
})
|
||||||
|
if (!deployResult.success) {
|
||||||
|
return { success: false, error: deployResult.error || 'Failed to deploy workflow' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingCustomizations =
|
||||||
|
(existingDeployment?.customizations as
|
||||||
|
| { primaryColor?: string; welcomeMessage?: string }
|
||||||
|
| undefined) || {}
|
||||||
|
|
||||||
|
const payload = {
|
||||||
|
workflowId,
|
||||||
|
identifier,
|
||||||
|
title,
|
||||||
|
description: String(params.description || existingDeployment?.description || ''),
|
||||||
|
customizations: {
|
||||||
|
primaryColor:
|
||||||
|
params.customizations?.primaryColor ||
|
||||||
|
existingCustomizations.primaryColor ||
|
||||||
|
'var(--brand-primary-hover-hex)',
|
||||||
|
welcomeMessage:
|
||||||
|
params.customizations?.welcomeMessage ||
|
||||||
|
existingCustomizations.welcomeMessage ||
|
||||||
|
'Hi there! How can I help you today?',
|
||||||
|
},
|
||||||
|
authType: params.authType || existingDeployment?.authType || 'public',
|
||||||
|
password: params.password,
|
||||||
|
allowedEmails: params.allowedEmails || existingDeployment?.allowedEmails || [],
|
||||||
|
outputConfigs: params.outputConfigs || existingDeployment?.outputConfigs || [],
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existingDeployment) {
|
||||||
|
await db
|
||||||
|
.update(chat)
|
||||||
|
.set({
|
||||||
|
identifier: payload.identifier,
|
||||||
|
title: payload.title,
|
||||||
|
description: payload.description,
|
||||||
|
customizations: payload.customizations,
|
||||||
|
authType: payload.authType,
|
||||||
|
password: payload.password || existingDeployment.password,
|
||||||
|
allowedEmails:
|
||||||
|
payload.authType === 'email' || payload.authType === 'sso' ? payload.allowedEmails : [],
|
||||||
|
outputConfigs: payload.outputConfigs,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(chat.id, existingDeployment.id))
|
||||||
|
} else {
|
||||||
|
await db.insert(chat).values({
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
identifier: payload.identifier,
|
||||||
|
title: payload.title,
|
||||||
|
description: payload.description,
|
||||||
|
customizations: payload.customizations,
|
||||||
|
isActive: true,
|
||||||
|
authType: payload.authType,
|
||||||
|
password: payload.password || null,
|
||||||
|
allowedEmails:
|
||||||
|
payload.authType === 'email' || payload.authType === 'sso' ? payload.allowedEmails : [],
|
||||||
|
outputConfigs: payload.outputConfigs,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: { success: true, action: 'deploy', isDeployed: true, identifier },
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeDeployMcp(
|
||||||
|
params: DeployMcpParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
const workspaceId = workflowRecord.workspaceId
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!workflowRecord.isDeployed) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'Workflow must be deployed before adding as an MCP tool. Use deploy_api first.',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const serverId = params.serverId
|
||||||
|
if (!serverId) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'serverId is required. Use list_workspace_mcp_servers to get available servers.',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const existingTool = await db
|
||||||
|
.select()
|
||||||
|
.from(workflowMcpTool)
|
||||||
|
.where(
|
||||||
|
and(eq(workflowMcpTool.serverId, serverId), eq(workflowMcpTool.workflowId, workflowId))
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const toolName = sanitizeToolName(
|
||||||
|
params.toolName || workflowRecord.name || `workflow_${workflowId}`
|
||||||
|
)
|
||||||
|
const toolDescription =
|
||||||
|
params.toolDescription ||
|
||||||
|
workflowRecord.description ||
|
||||||
|
`Execute ${workflowRecord.name} workflow`
|
||||||
|
const parameterSchema = params.parameterSchema || {}
|
||||||
|
|
||||||
|
if (existingTool.length > 0) {
|
||||||
|
const toolId = existingTool[0].id
|
||||||
|
await db
|
||||||
|
.update(workflowMcpTool)
|
||||||
|
.set({
|
||||||
|
toolName,
|
||||||
|
toolDescription,
|
||||||
|
parameterSchema,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(workflowMcpTool.id, toolId))
|
||||||
|
return { success: true, output: { toolId, toolName, toolDescription, updated: true } }
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolId = crypto.randomUUID()
|
||||||
|
await db.insert(workflowMcpTool).values({
|
||||||
|
id: toolId,
|
||||||
|
serverId,
|
||||||
|
workflowId,
|
||||||
|
toolName,
|
||||||
|
toolDescription,
|
||||||
|
parameterSchema,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
|
||||||
|
return { success: true, output: { toolId, toolName, toolDescription, updated: false } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeRedeploy(context: ExecutionContext): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const result = await deployWorkflow({ workflowId, deployedBy: context.userId })
|
||||||
|
if (!result.success) {
|
||||||
|
return { success: false, error: result.error || 'Failed to redeploy workflow' }
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: { workflowId, deployedAt: result.deployedAt || null, version: result.version },
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
export * from './deploy'
|
||||||
|
export * from './manage'
|
||||||
@@ -0,0 +1,226 @@
|
|||||||
|
import crypto from 'crypto'
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { chat, workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
|
||||||
|
import { eq, inArray } from 'drizzle-orm'
|
||||||
|
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
|
||||||
|
import { hasValidStartBlock } from '@/lib/workflows/triggers/trigger-utils.server'
|
||||||
|
import { ensureWorkflowAccess } from '../access'
|
||||||
|
import type {
|
||||||
|
CheckDeploymentStatusParams,
|
||||||
|
CreateWorkspaceMcpServerParams,
|
||||||
|
ListWorkspaceMcpServersParams,
|
||||||
|
} from '../param-types'
|
||||||
|
|
||||||
|
export async function executeCheckDeploymentStatus(
|
||||||
|
params: CheckDeploymentStatusParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
const workspaceId = workflowRecord.workspaceId
|
||||||
|
|
||||||
|
const [apiDeploy, chatDeploy] = await Promise.all([
|
||||||
|
db.select().from(workflow).where(eq(workflow.id, workflowId)).limit(1),
|
||||||
|
db.select().from(chat).where(eq(chat.workflowId, workflowId)).limit(1),
|
||||||
|
])
|
||||||
|
|
||||||
|
const isApiDeployed = apiDeploy[0]?.isDeployed || false
|
||||||
|
const apiDetails = {
|
||||||
|
isDeployed: isApiDeployed,
|
||||||
|
deployedAt: apiDeploy[0]?.deployedAt || null,
|
||||||
|
endpoint: isApiDeployed ? `/api/workflows/${workflowId}/execute` : null,
|
||||||
|
apiKey: workflowRecord.workspaceId ? 'Workspace API keys' : 'Personal API keys',
|
||||||
|
needsRedeployment: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
const isChatDeployed = !!chatDeploy[0]
|
||||||
|
const chatCustomizations =
|
||||||
|
(chatDeploy[0]?.customizations as
|
||||||
|
| { welcomeMessage?: string; primaryColor?: string }
|
||||||
|
| undefined) || {}
|
||||||
|
const chatDetails = {
|
||||||
|
isDeployed: isChatDeployed,
|
||||||
|
chatId: chatDeploy[0]?.id || null,
|
||||||
|
identifier: chatDeploy[0]?.identifier || null,
|
||||||
|
chatUrl: isChatDeployed ? `/chat/${chatDeploy[0]?.identifier}` : null,
|
||||||
|
title: chatDeploy[0]?.title || null,
|
||||||
|
description: chatDeploy[0]?.description || null,
|
||||||
|
authType: chatDeploy[0]?.authType || null,
|
||||||
|
allowedEmails: chatDeploy[0]?.allowedEmails || null,
|
||||||
|
outputConfigs: chatDeploy[0]?.outputConfigs || null,
|
||||||
|
welcomeMessage: chatCustomizations.welcomeMessage || null,
|
||||||
|
primaryColor: chatCustomizations.primaryColor || null,
|
||||||
|
hasPassword: Boolean(chatDeploy[0]?.password),
|
||||||
|
}
|
||||||
|
|
||||||
|
const mcpDetails: {
|
||||||
|
isDeployed: boolean
|
||||||
|
servers: Array<{
|
||||||
|
serverId: string
|
||||||
|
serverName: string
|
||||||
|
toolName: string
|
||||||
|
toolDescription: string | null
|
||||||
|
parameterSchema: unknown
|
||||||
|
toolId: string
|
||||||
|
}>
|
||||||
|
} = { isDeployed: false, servers: [] }
|
||||||
|
if (workspaceId) {
|
||||||
|
const servers = await db
|
||||||
|
.select({
|
||||||
|
serverId: workflowMcpServer.id,
|
||||||
|
serverName: workflowMcpServer.name,
|
||||||
|
toolName: workflowMcpTool.toolName,
|
||||||
|
toolDescription: workflowMcpTool.toolDescription,
|
||||||
|
parameterSchema: workflowMcpTool.parameterSchema,
|
||||||
|
toolId: workflowMcpTool.id,
|
||||||
|
})
|
||||||
|
.from(workflowMcpTool)
|
||||||
|
.innerJoin(workflowMcpServer, eq(workflowMcpTool.serverId, workflowMcpServer.id))
|
||||||
|
.where(eq(workflowMcpTool.workflowId, workflowId))
|
||||||
|
|
||||||
|
if (servers.length > 0) {
|
||||||
|
mcpDetails.isDeployed = true
|
||||||
|
mcpDetails.servers = servers
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isDeployed = apiDetails.isDeployed || chatDetails.isDeployed || mcpDetails.isDeployed
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: { isDeployed, api: apiDetails, chat: chatDetails, mcp: mcpDetails },
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeListWorkspaceMcpServers(
|
||||||
|
params: ListWorkspaceMcpServersParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
const workspaceId = workflowRecord.workspaceId
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const servers = await db
|
||||||
|
.select({
|
||||||
|
id: workflowMcpServer.id,
|
||||||
|
name: workflowMcpServer.name,
|
||||||
|
description: workflowMcpServer.description,
|
||||||
|
})
|
||||||
|
.from(workflowMcpServer)
|
||||||
|
.where(eq(workflowMcpServer.workspaceId, workspaceId))
|
||||||
|
|
||||||
|
const serverIds = servers.map((server) => server.id)
|
||||||
|
const tools =
|
||||||
|
serverIds.length > 0
|
||||||
|
? await db
|
||||||
|
.select({
|
||||||
|
serverId: workflowMcpTool.serverId,
|
||||||
|
toolName: workflowMcpTool.toolName,
|
||||||
|
})
|
||||||
|
.from(workflowMcpTool)
|
||||||
|
.where(inArray(workflowMcpTool.serverId, serverIds))
|
||||||
|
: []
|
||||||
|
|
||||||
|
const toolNamesByServer: Record<string, string[]> = {}
|
||||||
|
for (const tool of tools) {
|
||||||
|
if (!toolNamesByServer[tool.serverId]) {
|
||||||
|
toolNamesByServer[tool.serverId] = []
|
||||||
|
}
|
||||||
|
toolNamesByServer[tool.serverId].push(tool.toolName)
|
||||||
|
}
|
||||||
|
|
||||||
|
const serversWithToolNames = servers.map((server) => ({
|
||||||
|
...server,
|
||||||
|
toolCount: toolNamesByServer[server.id]?.length || 0,
|
||||||
|
toolNames: toolNamesByServer[server.id] || [],
|
||||||
|
}))
|
||||||
|
|
||||||
|
return { success: true, output: { servers: serversWithToolNames, count: servers.length } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeCreateWorkspaceMcpServer(
|
||||||
|
params: CreateWorkspaceMcpServerParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
const workspaceId = workflowRecord.workspaceId
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const name = params.name?.trim()
|
||||||
|
if (!name) {
|
||||||
|
return { success: false, error: 'name is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const serverId = crypto.randomUUID()
|
||||||
|
const [server] = await db
|
||||||
|
.insert(workflowMcpServer)
|
||||||
|
.values({
|
||||||
|
id: serverId,
|
||||||
|
workspaceId,
|
||||||
|
createdBy: context.userId,
|
||||||
|
name,
|
||||||
|
description: params.description?.trim() || null,
|
||||||
|
isPublic: params.isPublic ?? false,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.returning()
|
||||||
|
|
||||||
|
const workflowIds: string[] = params.workflowIds || []
|
||||||
|
const addedTools: Array<{ workflowId: string; toolName: string }> = []
|
||||||
|
|
||||||
|
if (workflowIds.length > 0) {
|
||||||
|
const workflows = await db.select().from(workflow).where(inArray(workflow.id, workflowIds))
|
||||||
|
|
||||||
|
for (const wf of workflows) {
|
||||||
|
if (wf.workspaceId !== workspaceId || !wf.isDeployed) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const hasStartBlock = await hasValidStartBlock(wf.id)
|
||||||
|
if (!hasStartBlock) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const toolName = sanitizeToolName(wf.name || `workflow_${wf.id}`)
|
||||||
|
await db.insert(workflowMcpTool).values({
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
serverId,
|
||||||
|
workflowId: wf.id,
|
||||||
|
toolName,
|
||||||
|
toolDescription: wf.description || `Execute ${wf.name} workflow`,
|
||||||
|
parameterSchema: {},
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
addedTools.push({ workflowId: wf.id, toolName })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: true, output: { server, addedTools } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
302
apps/sim/lib/copilot/orchestrator/tool-executor/index.ts
Normal file
302
apps/sim/lib/copilot/orchestrator/tool-executor/index.ts
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
|
import type {
|
||||||
|
ExecutionContext,
|
||||||
|
ToolCallResult,
|
||||||
|
ToolCallState,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||||
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||||
|
import { getTool, resolveToolId } from '@/tools/utils'
|
||||||
|
import {
|
||||||
|
executeCheckDeploymentStatus,
|
||||||
|
executeCreateWorkspaceMcpServer,
|
||||||
|
executeDeployApi,
|
||||||
|
executeDeployChat,
|
||||||
|
executeDeployMcp,
|
||||||
|
executeListWorkspaceMcpServers,
|
||||||
|
executeRedeploy,
|
||||||
|
} from './deployment-tools'
|
||||||
|
import { executeIntegrationToolDirect } from './integration-tools'
|
||||||
|
import type {
|
||||||
|
CheckDeploymentStatusParams,
|
||||||
|
CreateFolderParams,
|
||||||
|
CreateWorkflowParams,
|
||||||
|
CreateWorkspaceMcpServerParams,
|
||||||
|
DeployApiParams,
|
||||||
|
DeployChatParams,
|
||||||
|
DeployMcpParams,
|
||||||
|
GenerateApiKeyParams,
|
||||||
|
GetBlockOutputsParams,
|
||||||
|
GetBlockUpstreamReferencesParams,
|
||||||
|
GetDeployedWorkflowStateParams,
|
||||||
|
GetUserWorkflowParams,
|
||||||
|
GetWorkflowDataParams,
|
||||||
|
GetWorkflowFromNameParams,
|
||||||
|
ListFoldersParams,
|
||||||
|
ListUserWorkflowsParams,
|
||||||
|
ListWorkspaceMcpServersParams,
|
||||||
|
MoveFolderParams,
|
||||||
|
MoveWorkflowParams,
|
||||||
|
RenameWorkflowParams,
|
||||||
|
RunBlockParams,
|
||||||
|
RunFromBlockParams,
|
||||||
|
RunWorkflowParams,
|
||||||
|
RunWorkflowUntilBlockParams,
|
||||||
|
SetGlobalWorkflowVariablesParams,
|
||||||
|
} from './param-types'
|
||||||
|
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
|
||||||
|
import {
|
||||||
|
executeCreateFolder,
|
||||||
|
executeCreateWorkflow,
|
||||||
|
executeGenerateApiKey,
|
||||||
|
executeGetBlockOutputs,
|
||||||
|
executeGetBlockUpstreamReferences,
|
||||||
|
executeGetDeployedWorkflowState,
|
||||||
|
executeGetUserWorkflow,
|
||||||
|
executeGetWorkflowData,
|
||||||
|
executeGetWorkflowFromName,
|
||||||
|
executeListFolders,
|
||||||
|
executeListUserWorkflows,
|
||||||
|
executeListUserWorkspaces,
|
||||||
|
executeMoveFolder,
|
||||||
|
executeMoveWorkflow,
|
||||||
|
executeRenameWorkflow,
|
||||||
|
executeRunBlock,
|
||||||
|
executeRunFromBlock,
|
||||||
|
executeRunWorkflow,
|
||||||
|
executeRunWorkflowUntilBlock,
|
||||||
|
executeSetGlobalWorkflowVariables,
|
||||||
|
} from './workflow-tools'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotToolExecutor')
|
||||||
|
|
||||||
|
const SERVER_TOOLS = new Set<string>([
|
||||||
|
'get_blocks_and_tools',
|
||||||
|
'get_blocks_metadata',
|
||||||
|
'get_block_options',
|
||||||
|
'get_block_config',
|
||||||
|
'get_trigger_blocks',
|
||||||
|
'edit_workflow',
|
||||||
|
'get_workflow_console',
|
||||||
|
'search_documentation',
|
||||||
|
'search_online',
|
||||||
|
'set_environment_variables',
|
||||||
|
'get_credentials',
|
||||||
|
'make_api_request',
|
||||||
|
'knowledge_base',
|
||||||
|
])
|
||||||
|
|
||||||
|
const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||||
|
string,
|
||||||
|
(params: Record<string, unknown>, context: ExecutionContext) => Promise<ToolCallResult>
|
||||||
|
> = {
|
||||||
|
get_user_workflow: (p, c) => executeGetUserWorkflow(p as GetUserWorkflowParams, c),
|
||||||
|
get_workflow_from_name: (p, c) => executeGetWorkflowFromName(p as GetWorkflowFromNameParams, c),
|
||||||
|
list_user_workflows: (p, c) => executeListUserWorkflows(p as ListUserWorkflowsParams, c),
|
||||||
|
list_user_workspaces: (_p, c) => executeListUserWorkspaces(c),
|
||||||
|
list_folders: (p, c) => executeListFolders(p as ListFoldersParams, c),
|
||||||
|
create_workflow: (p, c) => executeCreateWorkflow(p as CreateWorkflowParams, c),
|
||||||
|
create_folder: (p, c) => executeCreateFolder(p as CreateFolderParams, c),
|
||||||
|
rename_workflow: (p, c) => executeRenameWorkflow(p as unknown as RenameWorkflowParams, c),
|
||||||
|
move_workflow: (p, c) => executeMoveWorkflow(p as unknown as MoveWorkflowParams, c),
|
||||||
|
move_folder: (p, c) => executeMoveFolder(p as unknown as MoveFolderParams, c),
|
||||||
|
get_workflow_data: (p, c) => executeGetWorkflowData(p as GetWorkflowDataParams, c),
|
||||||
|
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
||||||
|
get_block_upstream_references: (p, c) =>
|
||||||
|
executeGetBlockUpstreamReferences(p as unknown as GetBlockUpstreamReferencesParams, c),
|
||||||
|
run_workflow: (p, c) => executeRunWorkflow(p as RunWorkflowParams, c),
|
||||||
|
run_workflow_until_block: (p, c) =>
|
||||||
|
executeRunWorkflowUntilBlock(p as unknown as RunWorkflowUntilBlockParams, c),
|
||||||
|
run_from_block: (p, c) => executeRunFromBlock(p as unknown as RunFromBlockParams, c),
|
||||||
|
run_block: (p, c) => executeRunBlock(p as unknown as RunBlockParams, c),
|
||||||
|
get_deployed_workflow_state: (p, c) =>
|
||||||
|
executeGetDeployedWorkflowState(p as GetDeployedWorkflowStateParams, c),
|
||||||
|
generate_api_key: (p, c) => executeGenerateApiKey(p as unknown as GenerateApiKeyParams, c),
|
||||||
|
get_platform_actions: () =>
|
||||||
|
Promise.resolve({
|
||||||
|
success: true,
|
||||||
|
output: { content: PLATFORM_ACTIONS_CONTENT },
|
||||||
|
}),
|
||||||
|
set_global_workflow_variables: (p, c) =>
|
||||||
|
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
|
||||||
|
deploy_api: (p, c) => executeDeployApi(p as DeployApiParams, c),
|
||||||
|
deploy_chat: (p, c) => executeDeployChat(p as DeployChatParams, c),
|
||||||
|
deploy_mcp: (p, c) => executeDeployMcp(p as DeployMcpParams, c),
|
||||||
|
redeploy: (_p, c) => executeRedeploy(c),
|
||||||
|
check_deployment_status: (p, c) =>
|
||||||
|
executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
|
||||||
|
list_workspace_mcp_servers: (p, c) =>
|
||||||
|
executeListWorkspaceMcpServers(p as ListWorkspaceMcpServersParams, c),
|
||||||
|
create_workspace_mcp_server: (p, c) =>
|
||||||
|
executeCreateWorkspaceMcpServer(p as CreateWorkspaceMcpServerParams, c),
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check whether a tool can be executed on the Sim (TypeScript) side.
|
||||||
|
*
|
||||||
|
* Tools that are only available on the Go backend (e.g. search_patterns,
|
||||||
|
* search_errors, remember_debug) will return false. The subagent tool_call
|
||||||
|
* handler uses this to decide whether to execute a tool locally or let the
|
||||||
|
* Go backend's own tool_result SSE event handle it.
|
||||||
|
*/
|
||||||
|
export function isToolAvailableOnSimSide(toolName: string): boolean {
|
||||||
|
if (SERVER_TOOLS.has(toolName)) return true
|
||||||
|
if (toolName in SIM_WORKFLOW_TOOL_HANDLERS) return true
|
||||||
|
const resolvedToolName = resolveToolId(toolName)
|
||||||
|
return !!getTool(resolvedToolName)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a tool server-side without calling internal routes.
|
||||||
|
*/
|
||||||
|
export async function executeToolServerSide(
|
||||||
|
toolCall: ToolCallState,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
const toolName = toolCall.name
|
||||||
|
const resolvedToolName = resolveToolId(toolName)
|
||||||
|
|
||||||
|
if (SERVER_TOOLS.has(toolName)) {
|
||||||
|
return executeServerToolDirect(toolName, toolCall.params || {}, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toolName in SIM_WORKFLOW_TOOL_HANDLERS) {
|
||||||
|
return executeSimWorkflowTool(toolName, toolCall.params || {}, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolConfig = getTool(resolvedToolName)
|
||||||
|
if (!toolConfig) {
|
||||||
|
logger.warn('Tool not found in registry', { toolName, resolvedToolName })
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `Tool not found: ${toolName}`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return executeIntegrationToolDirect(toolCall, toolConfig, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a server tool directly via the server tool router.
|
||||||
|
*/
|
||||||
|
async function executeServerToolDirect(
|
||||||
|
toolName: string,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
// Inject workflowId from context if not provided in params
|
||||||
|
// This is needed for tools like set_environment_variables that require workflowId
|
||||||
|
const enrichedParams = { ...params }
|
||||||
|
if (!enrichedParams.workflowId && context.workflowId) {
|
||||||
|
enrichedParams.workflowId = context.workflowId
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await routeExecution(toolName, enrichedParams, { userId: context.userId })
|
||||||
|
return { success: true, output: result }
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Server tool execution failed', {
|
||||||
|
toolName,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Server tool execution failed',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function executeSimWorkflowTool(
|
||||||
|
toolName: string,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
const handler = SIM_WORKFLOW_TOOL_HANDLERS[toolName]
|
||||||
|
if (!handler) return { success: false, error: `Unsupported workflow tool: ${toolName}` }
|
||||||
|
return handler(params, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Timeout for the mark-complete POST to the copilot backend (30 s). */
|
||||||
|
const MARK_COMPLETE_TIMEOUT_MS = 30_000
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Notify the copilot backend that a tool has completed.
|
||||||
|
*/
|
||||||
|
export async function markToolComplete(
|
||||||
|
toolCallId: string,
|
||||||
|
toolName: string,
|
||||||
|
status: number,
|
||||||
|
message?: unknown,
|
||||||
|
data?: unknown
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const controller = new AbortController()
|
||||||
|
const timeoutId = setTimeout(() => controller.abort(), MARK_COMPLETE_TIMEOUT_MS)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(`${SIM_AGENT_API_URL}/api/tools/mark-complete`, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
id: toolCallId,
|
||||||
|
name: toolName,
|
||||||
|
status,
|
||||||
|
message,
|
||||||
|
data,
|
||||||
|
}),
|
||||||
|
signal: controller.signal,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
logger.warn('Mark-complete call failed', { toolCallId, toolName, status: response.status })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
} finally {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const isTimeout =
|
||||||
|
error instanceof DOMException && error.name === 'AbortError'
|
||||||
|
logger.error('Mark-complete call failed', {
|
||||||
|
toolCallId,
|
||||||
|
toolName,
|
||||||
|
timedOut: isTimeout,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prepare execution context with cached environment values.
|
||||||
|
*/
|
||||||
|
export async function prepareExecutionContext(
|
||||||
|
userId: string,
|
||||||
|
workflowId: string
|
||||||
|
): Promise<ExecutionContext> {
|
||||||
|
const workflowResult = await db
|
||||||
|
.select({ workspaceId: workflow.workspaceId })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
const workspaceId = workflowResult[0]?.workspaceId ?? undefined
|
||||||
|
|
||||||
|
const decryptedEnvVars = await getEffectiveDecryptedEnv(userId, workspaceId)
|
||||||
|
|
||||||
|
return {
|
||||||
|
userId,
|
||||||
|
workflowId,
|
||||||
|
workspaceId,
|
||||||
|
decryptedEnvVars,
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,105 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { account, workflow } from '@sim/db/schema'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import type {
|
||||||
|
ExecutionContext,
|
||||||
|
ToolCallResult,
|
||||||
|
ToolCallState,
|
||||||
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||||
|
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
|
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
||||||
|
import { executeTool } from '@/tools'
|
||||||
|
import { resolveToolId } from '@/tools/utils'
|
||||||
|
|
||||||
|
export async function executeIntegrationToolDirect(
|
||||||
|
toolCall: ToolCallState,
|
||||||
|
toolConfig: {
|
||||||
|
oauth?: { required?: boolean; provider?: string }
|
||||||
|
params?: { apiKey?: { required?: boolean } }
|
||||||
|
},
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
const { userId, workflowId } = context
|
||||||
|
const toolName = resolveToolId(toolCall.name)
|
||||||
|
const toolArgs = toolCall.params || {}
|
||||||
|
|
||||||
|
let workspaceId = context.workspaceId
|
||||||
|
if (!workspaceId && workflowId) {
|
||||||
|
const workflowResult = await db
|
||||||
|
.select({ workspaceId: workflow.workspaceId })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
workspaceId = workflowResult[0]?.workspaceId ?? undefined
|
||||||
|
}
|
||||||
|
|
||||||
|
const decryptedEnvVars =
|
||||||
|
context.decryptedEnvVars || (await getEffectiveDecryptedEnv(userId, workspaceId))
|
||||||
|
|
||||||
|
// Deep resolution walks nested objects to replace {{ENV_VAR}} references.
|
||||||
|
// Safe because tool arguments originate from the LLM (not direct user input)
|
||||||
|
// and env vars belong to the user themselves.
|
||||||
|
const executionParams = resolveEnvVarReferences(toolArgs, decryptedEnvVars, {
|
||||||
|
deep: true,
|
||||||
|
}) as Record<string, unknown>
|
||||||
|
|
||||||
|
if (toolConfig.oauth?.required && toolConfig.oauth.provider) {
|
||||||
|
const provider = toolConfig.oauth.provider
|
||||||
|
const accounts = await db
|
||||||
|
.select()
|
||||||
|
.from(account)
|
||||||
|
.where(and(eq(account.providerId, provider), eq(account.userId, userId)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!accounts.length) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `No ${provider} account connected. Please connect your account first.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const acc = accounts[0]
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
const { accessToken } = await refreshTokenIfNeeded(requestId, acc, acc.id)
|
||||||
|
|
||||||
|
if (!accessToken) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `OAuth token not available for ${provider}. Please reconnect your account.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
executionParams.accessToken = accessToken
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toolConfig.params?.apiKey?.required && !executionParams.apiKey) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `API key not provided for ${toolName}. Use {{YOUR_API_KEY_ENV_VAR}} to reference your environment variable.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
executionParams._context = {
|
||||||
|
workflowId,
|
||||||
|
userId,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toolName === 'function_execute') {
|
||||||
|
executionParams.envVars = decryptedEnvVars
|
||||||
|
executionParams.workflowVariables = {}
|
||||||
|
executionParams.blockData = {}
|
||||||
|
executionParams.blockNameMapping = {}
|
||||||
|
executionParams.language = executionParams.language || 'javascript'
|
||||||
|
executionParams.timeout = executionParams.timeout || 30000
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await executeTool(toolName, executionParams)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: result.success,
|
||||||
|
output: result.output,
|
||||||
|
error: result.error,
|
||||||
|
}
|
||||||
|
}
|
||||||
187
apps/sim/lib/copilot/orchestrator/tool-executor/param-types.ts
Normal file
187
apps/sim/lib/copilot/orchestrator/tool-executor/param-types.ts
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
/**
|
||||||
|
* Typed parameter interfaces for tool executor functions.
|
||||||
|
* Replaces Record<string, any> with specific shapes based on actual property access.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// === Workflow Query Params ===
|
||||||
|
|
||||||
|
export interface GetUserWorkflowParams {
|
||||||
|
workflowId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetWorkflowFromNameParams {
|
||||||
|
workflow_name?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ListUserWorkflowsParams {
|
||||||
|
workspaceId?: string
|
||||||
|
folderId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetWorkflowDataParams {
|
||||||
|
workflowId?: string
|
||||||
|
data_type?: string
|
||||||
|
dataType?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetBlockOutputsParams {
|
||||||
|
workflowId?: string
|
||||||
|
blockIds?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetBlockUpstreamReferencesParams {
|
||||||
|
workflowId?: string
|
||||||
|
blockIds: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ListFoldersParams {
|
||||||
|
workspaceId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
// === Workflow Mutation Params ===
|
||||||
|
|
||||||
|
export interface CreateWorkflowParams {
|
||||||
|
name?: string
|
||||||
|
workspaceId?: string
|
||||||
|
folderId?: string
|
||||||
|
description?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateFolderParams {
|
||||||
|
name?: string
|
||||||
|
workspaceId?: string
|
||||||
|
parentId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RunWorkflowParams {
|
||||||
|
workflowId?: string
|
||||||
|
workflow_input?: unknown
|
||||||
|
input?: unknown
|
||||||
|
/** When true, runs the deployed version instead of the draft. Default: false (draft). */
|
||||||
|
useDeployedState?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RunWorkflowUntilBlockParams {
|
||||||
|
workflowId?: string
|
||||||
|
workflow_input?: unknown
|
||||||
|
input?: unknown
|
||||||
|
/** The block ID to stop after. Execution halts once this block completes. */
|
||||||
|
stopAfterBlockId: string
|
||||||
|
/** When true, runs the deployed version instead of the draft. Default: false (draft). */
|
||||||
|
useDeployedState?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RunFromBlockParams {
|
||||||
|
workflowId?: string
|
||||||
|
/** The block ID to start execution from. */
|
||||||
|
startBlockId: string
|
||||||
|
/** Optional execution ID to load the snapshot from. If omitted, uses the latest execution. */
|
||||||
|
executionId?: string
|
||||||
|
workflow_input?: unknown
|
||||||
|
input?: unknown
|
||||||
|
useDeployedState?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface RunBlockParams {
|
||||||
|
workflowId?: string
|
||||||
|
/** The block ID to run. Only this block executes using cached upstream outputs. */
|
||||||
|
blockId: string
|
||||||
|
/** Optional execution ID to load the snapshot from. If omitted, uses the latest execution. */
|
||||||
|
executionId?: string
|
||||||
|
workflow_input?: unknown
|
||||||
|
input?: unknown
|
||||||
|
useDeployedState?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GetDeployedWorkflowStateParams {
|
||||||
|
workflowId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GenerateApiKeyParams {
|
||||||
|
name: string
|
||||||
|
workspaceId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface VariableOperation {
|
||||||
|
name: string
|
||||||
|
operation: 'add' | 'edit' | 'delete'
|
||||||
|
value?: unknown
|
||||||
|
type?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SetGlobalWorkflowVariablesParams {
|
||||||
|
workflowId?: string
|
||||||
|
operations?: VariableOperation[]
|
||||||
|
}
|
||||||
|
|
||||||
|
// === Deployment Params ===
|
||||||
|
|
||||||
|
export interface DeployApiParams {
|
||||||
|
workflowId?: string
|
||||||
|
action?: 'deploy' | 'undeploy'
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DeployChatParams {
|
||||||
|
workflowId?: string
|
||||||
|
action?: 'deploy' | 'undeploy' | 'update'
|
||||||
|
identifier?: string
|
||||||
|
title?: string
|
||||||
|
description?: string
|
||||||
|
customizations?: {
|
||||||
|
primaryColor?: string
|
||||||
|
secondaryColor?: string
|
||||||
|
welcomeMessage?: string
|
||||||
|
iconUrl?: string
|
||||||
|
}
|
||||||
|
authType?: 'none' | 'password' | 'public' | 'email' | 'sso'
|
||||||
|
password?: string
|
||||||
|
subdomain?: string
|
||||||
|
allowedEmails?: string[]
|
||||||
|
outputConfigs?: unknown[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DeployMcpParams {
|
||||||
|
workflowId?: string
|
||||||
|
action?: 'deploy' | 'undeploy'
|
||||||
|
toolName?: string
|
||||||
|
toolDescription?: string
|
||||||
|
serverId?: string
|
||||||
|
parameterSchema?: Record<string, unknown>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CheckDeploymentStatusParams {
|
||||||
|
workflowId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ListWorkspaceMcpServersParams {
|
||||||
|
workspaceId?: string
|
||||||
|
workflowId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CreateWorkspaceMcpServerParams {
|
||||||
|
workflowId?: string
|
||||||
|
name?: string
|
||||||
|
description?: string
|
||||||
|
toolName?: string
|
||||||
|
toolDescription?: string
|
||||||
|
serverName?: string
|
||||||
|
isPublic?: boolean
|
||||||
|
workflowIds?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
// === Workflow Organization Params ===
|
||||||
|
|
||||||
|
export interface RenameWorkflowParams {
|
||||||
|
workflowId: string
|
||||||
|
name: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MoveWorkflowParams {
|
||||||
|
workflowId: string
|
||||||
|
folderId: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface MoveFolderParams {
|
||||||
|
folderId: string
|
||||||
|
parentId: string | null
|
||||||
|
}
|
||||||
@@ -0,0 +1,117 @@
|
|||||||
|
/**
|
||||||
|
* Static content for the get_platform_actions tool.
|
||||||
|
* Contains the Sim platform quick reference and keyboard shortcuts.
|
||||||
|
*/
|
||||||
|
export const PLATFORM_ACTIONS_CONTENT = `# Sim Platform Quick Reference & Keyboard Shortcuts
|
||||||
|
|
||||||
|
## Keyboard Shortcuts
|
||||||
|
**Mod** = Cmd (macOS) / Ctrl (Windows/Linux). Shortcuts work when canvas is focused.
|
||||||
|
|
||||||
|
### Workflow Actions
|
||||||
|
| Shortcut | Action |
|
||||||
|
|----------|--------|
|
||||||
|
| Mod+Enter | Run workflow (or cancel if running) |
|
||||||
|
| Mod+Z | Undo |
|
||||||
|
| Mod+Shift+Z | Redo |
|
||||||
|
| Mod+C | Copy selected blocks |
|
||||||
|
| Mod+V | Paste blocks |
|
||||||
|
| Delete/Backspace | Delete selected blocks or edges |
|
||||||
|
| Shift+L | Auto-layout canvas |
|
||||||
|
| Mod+Shift+F | Fit to view |
|
||||||
|
| Mod+Shift+Enter | Accept Copilot changes |
|
||||||
|
|
||||||
|
### Panel Navigation
|
||||||
|
| Shortcut | Action |
|
||||||
|
|----------|--------|
|
||||||
|
| C | Focus Copilot tab |
|
||||||
|
| T | Focus Toolbar tab |
|
||||||
|
| E | Focus Editor tab |
|
||||||
|
| Mod+F | Focus Toolbar search |
|
||||||
|
|
||||||
|
### Global Navigation
|
||||||
|
| Shortcut | Action |
|
||||||
|
|----------|--------|
|
||||||
|
| Mod+K | Open search |
|
||||||
|
| Mod+Shift+A | Add new agent workflow |
|
||||||
|
| Mod+Y | Go to templates |
|
||||||
|
| Mod+L | Go to logs |
|
||||||
|
|
||||||
|
### Utility
|
||||||
|
| Shortcut | Action |
|
||||||
|
|----------|--------|
|
||||||
|
| Mod+D | Clear terminal console |
|
||||||
|
| Mod+E | Clear notifications |
|
||||||
|
|
||||||
|
### Mouse Controls
|
||||||
|
| Action | Control |
|
||||||
|
|--------|---------|
|
||||||
|
| Pan/move canvas | Left-drag on empty space, scroll, or trackpad |
|
||||||
|
| Select multiple blocks | Right-drag to draw selection box |
|
||||||
|
| Drag block | Left-drag on block header |
|
||||||
|
| Add to selection | Mod+Click on blocks |
|
||||||
|
|
||||||
|
## Quick Reference — Workspaces
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Create workspace | Click workspace dropdown → New Workspace |
|
||||||
|
| Switch workspaces | Click workspace dropdown → Select workspace |
|
||||||
|
| Invite team members | Sidebar → Invite |
|
||||||
|
| Rename/Duplicate/Export/Delete workspace | Right-click workspace → action |
|
||||||
|
|
||||||
|
## Quick Reference — Workflows
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Create workflow | Click + button in sidebar |
|
||||||
|
| Reorder/move workflows | Drag workflow up/down or onto a folder |
|
||||||
|
| Import workflow | Click import button in sidebar → Select file |
|
||||||
|
| Multi-select workflows | Mod+Click or Shift+Click workflows in sidebar |
|
||||||
|
| Open in new tab | Right-click workflow → Open in New Tab |
|
||||||
|
| Rename/Color/Duplicate/Export/Delete | Right-click workflow → action |
|
||||||
|
|
||||||
|
## Quick Reference — Blocks
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Add a block | Drag from Toolbar panel, or right-click canvas → Add Block |
|
||||||
|
| Multi-select blocks | Mod+Click additional blocks, or shift-drag selection box |
|
||||||
|
| Copy/Paste blocks | Mod+C / Mod+V |
|
||||||
|
| Duplicate/Delete blocks | Right-click → action |
|
||||||
|
| Rename a block | Click block name in header |
|
||||||
|
| Enable/Disable block | Right-click → Enable/Disable |
|
||||||
|
| Lock/Unlock block | Hover block → Click lock icon (Admin only) |
|
||||||
|
| Toggle handle orientation | Right-click → Toggle Handles |
|
||||||
|
| Configure a block | Select block → use Editor panel on right |
|
||||||
|
|
||||||
|
## Quick Reference — Connections
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Create connection | Drag from output handle to input handle |
|
||||||
|
| Delete connection | Click edge to select → Delete key |
|
||||||
|
| Use output in another block | Drag connection tag into input field |
|
||||||
|
|
||||||
|
## Quick Reference — Running & Testing
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Run workflow | Click Run Workflow button or Mod+Enter |
|
||||||
|
| Stop workflow | Click Stop button or Mod+Enter while running |
|
||||||
|
| Test with chat | Use Chat panel on the right side |
|
||||||
|
| Run from block | Hover block → Click play button, or right-click → Run from block |
|
||||||
|
| Run until block | Right-click block → Run until block |
|
||||||
|
| View execution logs | Open terminal panel at bottom, or Mod+L |
|
||||||
|
| Filter/Search/Copy/Clear logs | Terminal panel controls |
|
||||||
|
|
||||||
|
## Quick Reference — Deployment
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Deploy workflow | Click Deploy button in panel |
|
||||||
|
| Update deployment | Click Update when changes are detected |
|
||||||
|
| Revert deployment | Previous versions in Deploy tab → Promote to live |
|
||||||
|
| Copy API endpoint | Deploy tab → API → Copy API cURL |
|
||||||
|
|
||||||
|
## Quick Reference — Variables
|
||||||
|
| Action | How |
|
||||||
|
|--------|-----|
|
||||||
|
| Add/Edit/Delete workflow variable | Panel → Variables → Add Variable |
|
||||||
|
| Add environment variable | Settings → Environment Variables → Add |
|
||||||
|
| Reference workflow variable | Use <blockName.itemName> syntax |
|
||||||
|
| Reference environment variable | Use {{ENV_VAR}} syntax |
|
||||||
|
`
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
export * from './mutations'
|
||||||
|
export * from './queries'
|
||||||
@@ -0,0 +1,624 @@
|
|||||||
|
import crypto from 'crypto'
|
||||||
|
import { db } from '@sim/db'
|
||||||
|
import { apiKey, workflow, workflowFolder } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq, isNull, max } from 'drizzle-orm'
|
||||||
|
import { nanoid } from 'nanoid'
|
||||||
|
import { createApiKey } from '@/lib/api-key/auth'
|
||||||
|
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults'
|
||||||
|
import { executeWorkflow } from '@/lib/workflows/executor/execute-workflow'
|
||||||
|
import {
|
||||||
|
getExecutionState,
|
||||||
|
getLatestExecutionState,
|
||||||
|
} from '@/lib/workflows/executor/execution-state'
|
||||||
|
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { ensureWorkflowAccess, ensureWorkspaceAccess, getDefaultWorkspaceId } from '../access'
|
||||||
|
import type {
|
||||||
|
CreateFolderParams,
|
||||||
|
CreateWorkflowParams,
|
||||||
|
GenerateApiKeyParams,
|
||||||
|
MoveFolderParams,
|
||||||
|
MoveWorkflowParams,
|
||||||
|
RenameWorkflowParams,
|
||||||
|
RunBlockParams,
|
||||||
|
RunFromBlockParams,
|
||||||
|
RunWorkflowParams,
|
||||||
|
RunWorkflowUntilBlockParams,
|
||||||
|
SetGlobalWorkflowVariablesParams,
|
||||||
|
VariableOperation,
|
||||||
|
} from '../param-types'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkflowMutations')
|
||||||
|
|
||||||
|
export async function executeCreateWorkflow(
|
||||||
|
params: CreateWorkflowParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const name = typeof params?.name === 'string' ? params.name.trim() : ''
|
||||||
|
if (!name) {
|
||||||
|
return { success: false, error: 'name is required' }
|
||||||
|
}
|
||||||
|
if (name.length > 200) {
|
||||||
|
return { success: false, error: 'Workflow name must be 200 characters or less' }
|
||||||
|
}
|
||||||
|
const description = typeof params?.description === 'string' ? params.description : null
|
||||||
|
if (description && description.length > 2000) {
|
||||||
|
return { success: false, error: 'Description must be 2000 characters or less' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceId = params?.workspaceId || (await getDefaultWorkspaceId(context.userId))
|
||||||
|
const folderId = params?.folderId || null
|
||||||
|
|
||||||
|
await ensureWorkspaceAccess(workspaceId, context.userId, true)
|
||||||
|
|
||||||
|
const workflowId = crypto.randomUUID()
|
||||||
|
const now = new Date()
|
||||||
|
|
||||||
|
const folderCondition = folderId ? eq(workflow.folderId, folderId) : isNull(workflow.folderId)
|
||||||
|
const [maxResult] = await db
|
||||||
|
.select({ maxOrder: max(workflow.sortOrder) })
|
||||||
|
.from(workflow)
|
||||||
|
.where(and(eq(workflow.workspaceId, workspaceId), folderCondition))
|
||||||
|
const sortOrder = (maxResult?.maxOrder ?? 0) + 1
|
||||||
|
|
||||||
|
await db.insert(workflow).values({
|
||||||
|
id: workflowId,
|
||||||
|
userId: context.userId,
|
||||||
|
workspaceId,
|
||||||
|
folderId,
|
||||||
|
sortOrder,
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
color: '#3972F6',
|
||||||
|
lastSynced: now,
|
||||||
|
createdAt: now,
|
||||||
|
updatedAt: now,
|
||||||
|
isDeployed: false,
|
||||||
|
runCount: 0,
|
||||||
|
variables: {},
|
||||||
|
})
|
||||||
|
|
||||||
|
const { workflowState } = buildDefaultWorkflowArtifacts()
|
||||||
|
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowState)
|
||||||
|
if (!saveResult.success) {
|
||||||
|
throw new Error(saveResult.error || 'Failed to save workflow state')
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workflowId,
|
||||||
|
workflowName: name,
|
||||||
|
workspaceId,
|
||||||
|
folderId,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeCreateFolder(
|
||||||
|
params: CreateFolderParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const name = typeof params?.name === 'string' ? params.name.trim() : ''
|
||||||
|
if (!name) {
|
||||||
|
return { success: false, error: 'name is required' }
|
||||||
|
}
|
||||||
|
if (name.length > 200) {
|
||||||
|
return { success: false, error: 'Folder name must be 200 characters or less' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceId = params?.workspaceId || (await getDefaultWorkspaceId(context.userId))
|
||||||
|
const parentId = params?.parentId || null
|
||||||
|
|
||||||
|
await ensureWorkspaceAccess(workspaceId, context.userId, true)
|
||||||
|
|
||||||
|
const [maxResult] = await db
|
||||||
|
.select({ maxOrder: max(workflowFolder.sortOrder) })
|
||||||
|
.from(workflowFolder)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workflowFolder.workspaceId, workspaceId),
|
||||||
|
parentId ? eq(workflowFolder.parentId, parentId) : isNull(workflowFolder.parentId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
const sortOrder = (maxResult?.maxOrder ?? 0) + 1
|
||||||
|
|
||||||
|
const folderId = crypto.randomUUID()
|
||||||
|
await db.insert(workflowFolder).values({
|
||||||
|
id: folderId,
|
||||||
|
userId: context.userId,
|
||||||
|
workspaceId,
|
||||||
|
parentId,
|
||||||
|
name,
|
||||||
|
sortOrder,
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
|
||||||
|
return { success: true, output: { folderId, name, workspaceId, parentId } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeRunWorkflow(
|
||||||
|
params: RunWorkflowParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const useDraftState = !params.useDeployedState
|
||||||
|
|
||||||
|
const result = await executeWorkflow(
|
||||||
|
{
|
||||||
|
id: workflowRecord.id,
|
||||||
|
userId: workflowRecord.userId,
|
||||||
|
workspaceId: workflowRecord.workspaceId,
|
||||||
|
variables: workflowRecord.variables || {},
|
||||||
|
},
|
||||||
|
generateRequestId(),
|
||||||
|
params.workflow_input || params.input || undefined,
|
||||||
|
context.userId,
|
||||||
|
{ enabled: true, useDraftState }
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: result.success,
|
||||||
|
output: {
|
||||||
|
executionId: result.metadata?.executionId,
|
||||||
|
success: result.success,
|
||||||
|
output: result.output,
|
||||||
|
logs: result.logs,
|
||||||
|
},
|
||||||
|
error: result.success ? undefined : result.error || 'Workflow execution failed',
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeSetGlobalWorkflowVariables(
|
||||||
|
params: SetGlobalWorkflowVariablesParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const operations: VariableOperation[] = Array.isArray(params.operations)
|
||||||
|
? params.operations
|
||||||
|
: []
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
interface WorkflowVariable {
|
||||||
|
id: string
|
||||||
|
workflowId?: string
|
||||||
|
name: string
|
||||||
|
type: string
|
||||||
|
value?: unknown
|
||||||
|
}
|
||||||
|
const currentVarsRecord = (workflowRecord.variables as Record<string, unknown>) || {}
|
||||||
|
const byName: Record<string, WorkflowVariable> = {}
|
||||||
|
Object.values(currentVarsRecord).forEach((v) => {
|
||||||
|
if (v && typeof v === 'object' && 'id' in v && 'name' in v) {
|
||||||
|
const variable = v as WorkflowVariable
|
||||||
|
byName[String(variable.name)] = variable
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
for (const op of operations) {
|
||||||
|
const key = String(op?.name || '')
|
||||||
|
if (!key) continue
|
||||||
|
const nextType = op?.type || byName[key]?.type || 'plain'
|
||||||
|
const coerceValue = (value: unknown, type: string): unknown => {
|
||||||
|
if (value === undefined) return value
|
||||||
|
if (type === 'number') {
|
||||||
|
const n = Number(value)
|
||||||
|
return Number.isNaN(n) ? value : n
|
||||||
|
}
|
||||||
|
if (type === 'boolean') {
|
||||||
|
const v = String(value).trim().toLowerCase()
|
||||||
|
if (v === 'true') return true
|
||||||
|
if (v === 'false') return false
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
if (type === 'array' || type === 'object') {
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(String(value))
|
||||||
|
if (type === 'array' && Array.isArray(parsed)) return parsed
|
||||||
|
if (type === 'object' && parsed && typeof parsed === 'object' && !Array.isArray(parsed))
|
||||||
|
return parsed
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to parse JSON value for variable coercion', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
if (op.operation === 'delete') {
|
||||||
|
delete byName[key]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
const typedValue = coerceValue(op.value, nextType)
|
||||||
|
if (op.operation === 'add') {
|
||||||
|
byName[key] = {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
workflowId,
|
||||||
|
name: key,
|
||||||
|
type: nextType,
|
||||||
|
value: typedValue,
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if (op.operation === 'edit') {
|
||||||
|
if (!byName[key]) {
|
||||||
|
byName[key] = {
|
||||||
|
id: crypto.randomUUID(),
|
||||||
|
workflowId,
|
||||||
|
name: key,
|
||||||
|
type: nextType,
|
||||||
|
value: typedValue,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
byName[key] = {
|
||||||
|
...byName[key],
|
||||||
|
type: nextType,
|
||||||
|
value: typedValue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextVarsRecord = Object.fromEntries(Object.values(byName).map((v) => [String(v.id), v]))
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(workflow)
|
||||||
|
.set({ variables: nextVarsRecord, updatedAt: new Date() })
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
|
||||||
|
return { success: true, output: { updated: Object.values(byName).length } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeRenameWorkflow(
|
||||||
|
params: RenameWorkflowParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
const name = typeof params.name === 'string' ? params.name.trim() : ''
|
||||||
|
if (!name) {
|
||||||
|
return { success: false, error: 'name is required' }
|
||||||
|
}
|
||||||
|
if (name.length > 200) {
|
||||||
|
return { success: false, error: 'Workflow name must be 200 characters or less' }
|
||||||
|
}
|
||||||
|
|
||||||
|
await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(workflow)
|
||||||
|
.set({ name, updatedAt: new Date() })
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
|
||||||
|
return { success: true, output: { workflowId, name } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeMoveWorkflow(
|
||||||
|
params: MoveWorkflowParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const folderId = params.folderId || null
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(workflow)
|
||||||
|
.set({ folderId, updatedAt: new Date() })
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
|
||||||
|
return { success: true, output: { workflowId, folderId } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeMoveFolder(
|
||||||
|
params: MoveFolderParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const folderId = params.folderId
|
||||||
|
if (!folderId) {
|
||||||
|
return { success: false, error: 'folderId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const parentId = params.parentId || null
|
||||||
|
|
||||||
|
if (parentId === folderId) {
|
||||||
|
return { success: false, error: 'A folder cannot be moved into itself' }
|
||||||
|
}
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(workflowFolder)
|
||||||
|
.set({ parentId, updatedAt: new Date() })
|
||||||
|
.where(eq(workflowFolder.id, folderId))
|
||||||
|
|
||||||
|
return { success: true, output: { folderId, parentId } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeRunWorkflowUntilBlock(
|
||||||
|
params: RunWorkflowUntilBlockParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
if (!params.stopAfterBlockId) {
|
||||||
|
return { success: false, error: 'stopAfterBlockId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const useDraftState = !params.useDeployedState
|
||||||
|
|
||||||
|
const result = await executeWorkflow(
|
||||||
|
{
|
||||||
|
id: workflowRecord.id,
|
||||||
|
userId: workflowRecord.userId,
|
||||||
|
workspaceId: workflowRecord.workspaceId,
|
||||||
|
variables: workflowRecord.variables || {},
|
||||||
|
},
|
||||||
|
generateRequestId(),
|
||||||
|
params.workflow_input || params.input || undefined,
|
||||||
|
context.userId,
|
||||||
|
{ enabled: true, useDraftState, stopAfterBlockId: params.stopAfterBlockId }
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: result.success,
|
||||||
|
output: {
|
||||||
|
executionId: result.metadata?.executionId,
|
||||||
|
success: result.success,
|
||||||
|
stoppedAfterBlockId: params.stopAfterBlockId,
|
||||||
|
output: result.output,
|
||||||
|
logs: result.logs,
|
||||||
|
},
|
||||||
|
error: result.success ? undefined : result.error || 'Workflow execution failed',
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeGenerateApiKey(
|
||||||
|
params: GenerateApiKeyParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const name = typeof params.name === 'string' ? params.name.trim() : ''
|
||||||
|
if (!name) {
|
||||||
|
return { success: false, error: 'name is required' }
|
||||||
|
}
|
||||||
|
if (name.length > 200) {
|
||||||
|
return { success: false, error: 'API key name must be 200 characters or less' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceId = params.workspaceId || (await getDefaultWorkspaceId(context.userId))
|
||||||
|
await ensureWorkspaceAccess(workspaceId, context.userId, true)
|
||||||
|
|
||||||
|
const existingKey = await db
|
||||||
|
.select({ id: apiKey.id })
|
||||||
|
.from(apiKey)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(apiKey.workspaceId, workspaceId),
|
||||||
|
eq(apiKey.name, name),
|
||||||
|
eq(apiKey.type, 'workspace')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (existingKey.length > 0) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: `A workspace API key named "${name}" already exists. Choose a different name.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { key: plainKey, encryptedKey } = await createApiKey(true)
|
||||||
|
if (!encryptedKey) {
|
||||||
|
return { success: false, error: 'Failed to encrypt API key for storage' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const [newKey] = await db
|
||||||
|
.insert(apiKey)
|
||||||
|
.values({
|
||||||
|
id: nanoid(),
|
||||||
|
workspaceId,
|
||||||
|
userId: context.userId,
|
||||||
|
createdBy: context.userId,
|
||||||
|
name,
|
||||||
|
key: encryptedKey,
|
||||||
|
type: 'workspace',
|
||||||
|
createdAt: new Date(),
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.returning({ id: apiKey.id, name: apiKey.name, createdAt: apiKey.createdAt })
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
id: newKey.id,
|
||||||
|
name: newKey.name,
|
||||||
|
key: plainKey,
|
||||||
|
workspaceId,
|
||||||
|
message:
|
||||||
|
'API key created successfully. Copy this key now — it will not be shown again. Use this key in the x-api-key header when calling workflow API endpoints.',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeRunFromBlock(
|
||||||
|
params: RunFromBlockParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
if (!params.startBlockId) {
|
||||||
|
return { success: false, error: 'startBlockId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const snapshot = params.executionId
|
||||||
|
? await getExecutionState(params.executionId)
|
||||||
|
: await getLatestExecutionState(workflowId)
|
||||||
|
|
||||||
|
if (!snapshot) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: params.executionId
|
||||||
|
? `No execution state found for execution ${params.executionId}. Run the full workflow first.`
|
||||||
|
: `No execution state found for workflow ${workflowId}. Run the full workflow first to create a snapshot.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
const useDraftState = !params.useDeployedState
|
||||||
|
|
||||||
|
const result = await executeWorkflow(
|
||||||
|
{
|
||||||
|
id: workflowRecord.id,
|
||||||
|
userId: workflowRecord.userId,
|
||||||
|
workspaceId: workflowRecord.workspaceId,
|
||||||
|
variables: workflowRecord.variables || {},
|
||||||
|
},
|
||||||
|
generateRequestId(),
|
||||||
|
params.workflow_input || params.input || undefined,
|
||||||
|
context.userId,
|
||||||
|
{
|
||||||
|
enabled: true,
|
||||||
|
useDraftState,
|
||||||
|
runFromBlock: { startBlockId: params.startBlockId, sourceSnapshot: snapshot },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: result.success,
|
||||||
|
output: {
|
||||||
|
executionId: result.metadata?.executionId,
|
||||||
|
success: result.success,
|
||||||
|
startBlockId: params.startBlockId,
|
||||||
|
output: result.output,
|
||||||
|
logs: result.logs,
|
||||||
|
},
|
||||||
|
error: result.success ? undefined : result.error || 'Workflow execution failed',
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeRunBlock(
|
||||||
|
params: RunBlockParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
if (!params.blockId) {
|
||||||
|
return { success: false, error: 'blockId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const snapshot = params.executionId
|
||||||
|
? await getExecutionState(params.executionId)
|
||||||
|
: await getLatestExecutionState(workflowId)
|
||||||
|
|
||||||
|
if (!snapshot) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: params.executionId
|
||||||
|
? `No execution state found for execution ${params.executionId}. Run the full workflow first.`
|
||||||
|
: `No execution state found for workflow ${workflowId}. Run the full workflow first to create a snapshot.`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
const useDraftState = !params.useDeployedState
|
||||||
|
|
||||||
|
const result = await executeWorkflow(
|
||||||
|
{
|
||||||
|
id: workflowRecord.id,
|
||||||
|
userId: workflowRecord.userId,
|
||||||
|
workspaceId: workflowRecord.workspaceId,
|
||||||
|
variables: workflowRecord.variables || {},
|
||||||
|
},
|
||||||
|
generateRequestId(),
|
||||||
|
params.workflow_input || params.input || undefined,
|
||||||
|
context.userId,
|
||||||
|
{
|
||||||
|
enabled: true,
|
||||||
|
useDraftState,
|
||||||
|
runFromBlock: { startBlockId: params.blockId, sourceSnapshot: snapshot },
|
||||||
|
stopAfterBlockId: params.blockId,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: result.success,
|
||||||
|
output: {
|
||||||
|
executionId: result.metadata?.executionId,
|
||||||
|
success: result.success,
|
||||||
|
blockId: params.blockId,
|
||||||
|
output: result.output,
|
||||||
|
logs: result.logs,
|
||||||
|
},
|
||||||
|
error: result.success ? undefined : result.error || 'Workflow execution failed',
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,615 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { customTools, permissions, workflow, workflowFolder, workspace } from '@sim/db/schema'
|
||||||
|
import { and, asc, desc, eq, isNull, or } from 'drizzle-orm'
|
||||||
|
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import {
|
||||||
|
formatNormalizedWorkflowForCopilot,
|
||||||
|
normalizeWorkflowName,
|
||||||
|
} from '@/lib/copilot/tools/shared/workflow-utils'
|
||||||
|
import { mcpService } from '@/lib/mcp/service'
|
||||||
|
import { listWorkspaceFiles } from '@/lib/uploads/contexts/workspace'
|
||||||
|
import { getBlockOutputPaths } from '@/lib/workflows/blocks/block-outputs'
|
||||||
|
import { BlockPathCalculator } from '@/lib/workflows/blocks/block-path-calculator'
|
||||||
|
import {
|
||||||
|
loadDeployedWorkflowState,
|
||||||
|
loadWorkflowFromNormalizedTables,
|
||||||
|
} from '@/lib/workflows/persistence/utils'
|
||||||
|
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||||
|
import { normalizeName } from '@/executor/constants'
|
||||||
|
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||||
|
import {
|
||||||
|
ensureWorkflowAccess,
|
||||||
|
ensureWorkspaceAccess,
|
||||||
|
getAccessibleWorkflowsForUser,
|
||||||
|
getDefaultWorkspaceId,
|
||||||
|
} from '../access'
|
||||||
|
import type {
|
||||||
|
GetBlockOutputsParams,
|
||||||
|
GetBlockUpstreamReferencesParams,
|
||||||
|
GetDeployedWorkflowStateParams,
|
||||||
|
GetUserWorkflowParams,
|
||||||
|
GetWorkflowDataParams,
|
||||||
|
GetWorkflowFromNameParams,
|
||||||
|
ListFoldersParams,
|
||||||
|
ListUserWorkflowsParams,
|
||||||
|
} from '../param-types'
|
||||||
|
|
||||||
|
export async function executeGetUserWorkflow(
|
||||||
|
params: GetUserWorkflowParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord, workspaceId } = await ensureWorkflowAccess(
|
||||||
|
workflowId,
|
||||||
|
context.userId
|
||||||
|
)
|
||||||
|
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
|
||||||
|
if (!userWorkflow) {
|
||||||
|
return { success: false, error: 'Workflow has no normalized data' }
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workflowId,
|
||||||
|
workflowName: workflowRecord.name || '',
|
||||||
|
workspaceId,
|
||||||
|
userWorkflow,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeGetWorkflowFromName(
|
||||||
|
params: GetWorkflowFromNameParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowName = typeof params.workflow_name === 'string' ? params.workflow_name.trim() : ''
|
||||||
|
if (!workflowName) {
|
||||||
|
return { success: false, error: 'workflow_name is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflows = await getAccessibleWorkflowsForUser(context.userId)
|
||||||
|
|
||||||
|
const targetName = normalizeWorkflowName(workflowName)
|
||||||
|
const match = workflows.find((w) => normalizeWorkflowName(w.name) === targetName)
|
||||||
|
if (!match) {
|
||||||
|
return { success: false, error: `Workflow not found: ${workflowName}` }
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(match.id)
|
||||||
|
const userWorkflow = formatNormalizedWorkflowForCopilot(normalized)
|
||||||
|
if (!userWorkflow) {
|
||||||
|
return { success: false, error: 'Workflow has no normalized data' }
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workflowId: match.id,
|
||||||
|
workflowName: match.name || '',
|
||||||
|
workspaceId: match.workspaceId,
|
||||||
|
userWorkflow,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeListUserWorkflows(
|
||||||
|
params: ListUserWorkflowsParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workspaceId = params?.workspaceId as string | undefined
|
||||||
|
const folderId = params?.folderId as string | undefined
|
||||||
|
|
||||||
|
const workflows = await getAccessibleWorkflowsForUser(context.userId, { workspaceId, folderId })
|
||||||
|
|
||||||
|
const workflowList = workflows.map((w) => ({
|
||||||
|
workflowId: w.id,
|
||||||
|
workflowName: w.name || '',
|
||||||
|
workspaceId: w.workspaceId,
|
||||||
|
folderId: w.folderId,
|
||||||
|
}))
|
||||||
|
|
||||||
|
return { success: true, output: { workflows: workflowList } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeListUserWorkspaces(
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workspaces = await db
|
||||||
|
.select({
|
||||||
|
workspaceId: workspace.id,
|
||||||
|
workspaceName: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
permissionType: permissions.permissionType,
|
||||||
|
})
|
||||||
|
.from(permissions)
|
||||||
|
.innerJoin(workspace, eq(permissions.entityId, workspace.id))
|
||||||
|
.where(and(eq(permissions.userId, context.userId), eq(permissions.entityType, 'workspace')))
|
||||||
|
.orderBy(desc(workspace.createdAt))
|
||||||
|
|
||||||
|
const output = workspaces.map((row) => ({
|
||||||
|
workspaceId: row.workspaceId,
|
||||||
|
workspaceName: row.workspaceName,
|
||||||
|
role: row.ownerId === context.userId ? 'owner' : row.permissionType,
|
||||||
|
}))
|
||||||
|
|
||||||
|
return { success: true, output: { workspaces: output } }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeListFolders(
|
||||||
|
params: ListFoldersParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workspaceId =
|
||||||
|
(params?.workspaceId as string | undefined) || (await getDefaultWorkspaceId(context.userId))
|
||||||
|
|
||||||
|
await ensureWorkspaceAccess(workspaceId, context.userId, false)
|
||||||
|
|
||||||
|
const folders = await db
|
||||||
|
.select({
|
||||||
|
folderId: workflowFolder.id,
|
||||||
|
folderName: workflowFolder.name,
|
||||||
|
parentId: workflowFolder.parentId,
|
||||||
|
sortOrder: workflowFolder.sortOrder,
|
||||||
|
})
|
||||||
|
.from(workflowFolder)
|
||||||
|
.where(eq(workflowFolder.workspaceId, workspaceId))
|
||||||
|
.orderBy(asc(workflowFolder.sortOrder), asc(workflowFolder.createdAt))
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workspaceId,
|
||||||
|
folders,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeGetWorkflowData(
|
||||||
|
params: GetWorkflowDataParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
const dataType = params.data_type || params.dataType || ''
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
if (!dataType) {
|
||||||
|
return { success: false, error: 'data_type is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord, workspaceId } = await ensureWorkflowAccess(
|
||||||
|
workflowId,
|
||||||
|
context.userId
|
||||||
|
)
|
||||||
|
|
||||||
|
if (dataType === 'global_variables') {
|
||||||
|
const variablesRecord = (workflowRecord.variables as Record<string, unknown>) || {}
|
||||||
|
const variables = Object.values(variablesRecord).map((v) => {
|
||||||
|
const variable = v as Record<string, unknown> | null
|
||||||
|
return {
|
||||||
|
id: String(variable?.id || ''),
|
||||||
|
name: String(variable?.name || ''),
|
||||||
|
value: variable?.value,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return { success: true, output: { variables } }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dataType === 'custom_tools') {
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
const conditions = [
|
||||||
|
eq(customTools.workspaceId, workspaceId),
|
||||||
|
and(eq(customTools.userId, context.userId), isNull(customTools.workspaceId)),
|
||||||
|
]
|
||||||
|
const toolsRows = await db
|
||||||
|
.select()
|
||||||
|
.from(customTools)
|
||||||
|
.where(or(...conditions))
|
||||||
|
.orderBy(desc(customTools.createdAt))
|
||||||
|
|
||||||
|
const customToolsData = toolsRows.map((tool) => {
|
||||||
|
const schema = tool.schema as Record<string, unknown> | null
|
||||||
|
const fn = (schema?.function ?? {}) as Record<string, unknown>
|
||||||
|
return {
|
||||||
|
id: String(tool.id || ''),
|
||||||
|
title: String(tool.title || ''),
|
||||||
|
functionName: String(fn.name || ''),
|
||||||
|
description: String(fn.description || ''),
|
||||||
|
parameters: fn.parameters,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return { success: true, output: { customTools: customToolsData } }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dataType === 'mcp_tools') {
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
const tools = await mcpService.discoverTools(context.userId, workspaceId, false)
|
||||||
|
const mcpTools = tools.map((tool) => ({
|
||||||
|
name: String(tool.name || ''),
|
||||||
|
serverId: String(tool.serverId || ''),
|
||||||
|
serverName: String(tool.serverName || ''),
|
||||||
|
description: String(tool.description || ''),
|
||||||
|
inputSchema: tool.inputSchema,
|
||||||
|
}))
|
||||||
|
return { success: true, output: { mcpTools } }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dataType === 'files') {
|
||||||
|
if (!workspaceId) {
|
||||||
|
return { success: false, error: 'workspaceId is required' }
|
||||||
|
}
|
||||||
|
const files = await listWorkspaceFiles(workspaceId)
|
||||||
|
const fileResults = files.map((file) => ({
|
||||||
|
id: String(file.id || ''),
|
||||||
|
name: String(file.name || ''),
|
||||||
|
key: String(file.key || ''),
|
||||||
|
path: String(file.path || ''),
|
||||||
|
size: Number(file.size || 0),
|
||||||
|
type: String(file.type || ''),
|
||||||
|
uploadedAt: String(file.uploadedAt || ''),
|
||||||
|
}))
|
||||||
|
return { success: true, output: { files: fileResults } }
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: false, error: `Unknown data_type: ${dataType}` }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeGetBlockOutputs(
|
||||||
|
params: GetBlockOutputsParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
if (!normalized) {
|
||||||
|
return { success: false, error: 'Workflow has no normalized data' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = normalized.blocks || {}
|
||||||
|
const loops = normalized.loops || {}
|
||||||
|
const parallels = normalized.parallels || {}
|
||||||
|
const blockIds =
|
||||||
|
Array.isArray(params.blockIds) && params.blockIds.length > 0
|
||||||
|
? params.blockIds
|
||||||
|
: Object.keys(blocks)
|
||||||
|
|
||||||
|
const results: Array<{
|
||||||
|
blockId: string
|
||||||
|
blockName: string
|
||||||
|
blockType: string
|
||||||
|
outputs: string[]
|
||||||
|
insideSubflowOutputs?: string[]
|
||||||
|
outsideSubflowOutputs?: string[]
|
||||||
|
triggerMode?: boolean
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
for (const blockId of blockIds) {
|
||||||
|
const block = blocks[blockId]
|
||||||
|
if (!block?.type) continue
|
||||||
|
const blockName = block.name || block.type
|
||||||
|
|
||||||
|
if (block.type === 'loop' || block.type === 'parallel') {
|
||||||
|
const insidePaths = getSubflowInsidePaths(block.type, blockId, loops, parallels)
|
||||||
|
results.push({
|
||||||
|
blockId,
|
||||||
|
blockName,
|
||||||
|
blockType: block.type,
|
||||||
|
outputs: [],
|
||||||
|
insideSubflowOutputs: formatOutputsWithPrefix(insidePaths, blockName),
|
||||||
|
outsideSubflowOutputs: formatOutputsWithPrefix(['results'], blockName),
|
||||||
|
triggerMode: block.triggerMode,
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const outputs = getBlockOutputPaths(block.type, block.subBlocks, block.triggerMode)
|
||||||
|
results.push({
|
||||||
|
blockId,
|
||||||
|
blockName,
|
||||||
|
blockType: block.type,
|
||||||
|
outputs: formatOutputsWithPrefix(outputs, blockName),
|
||||||
|
triggerMode: block.triggerMode,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const variables = await getWorkflowVariablesForTool(workflowId)
|
||||||
|
|
||||||
|
const payload = { blocks: results, variables }
|
||||||
|
return { success: true, output: payload }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeGetBlockUpstreamReferences(
|
||||||
|
params: GetBlockUpstreamReferencesParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
if (!Array.isArray(params.blockIds) || params.blockIds.length === 0) {
|
||||||
|
return { success: false, error: 'blockIds array is required' }
|
||||||
|
}
|
||||||
|
await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||||
|
if (!normalized) {
|
||||||
|
return { success: false, error: 'Workflow has no normalized data' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const blocks = normalized.blocks || {}
|
||||||
|
const edges = normalized.edges || []
|
||||||
|
const loops = normalized.loops || {}
|
||||||
|
const parallels = normalized.parallels || {}
|
||||||
|
|
||||||
|
const graphEdges = edges.map((edge) => ({ source: edge.source, target: edge.target }))
|
||||||
|
const variableOutputs = await getWorkflowVariablesForTool(workflowId)
|
||||||
|
|
||||||
|
interface AccessibleBlockEntry {
|
||||||
|
blockId: string
|
||||||
|
blockName: string
|
||||||
|
blockType: string
|
||||||
|
outputs: string[]
|
||||||
|
triggerMode?: boolean
|
||||||
|
accessContext?: 'inside' | 'outside'
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UpstreamReferenceResult {
|
||||||
|
blockId: string
|
||||||
|
blockName: string
|
||||||
|
blockType: string
|
||||||
|
accessibleBlocks: AccessibleBlockEntry[]
|
||||||
|
insideSubflows: Array<{ blockId: string; blockName: string; blockType: string }>
|
||||||
|
variables: Array<{ id: string; name: string; type: string; tag: string }>
|
||||||
|
}
|
||||||
|
|
||||||
|
const results: UpstreamReferenceResult[] = []
|
||||||
|
|
||||||
|
for (const blockId of params.blockIds) {
|
||||||
|
const targetBlock = blocks[blockId]
|
||||||
|
if (!targetBlock) continue
|
||||||
|
|
||||||
|
const insideSubflows: Array<{ blockId: string; blockName: string; blockType: string }> = []
|
||||||
|
const containingLoopIds = new Set<string>()
|
||||||
|
const containingParallelIds = new Set<string>()
|
||||||
|
|
||||||
|
Object.values(loops).forEach((loop) => {
|
||||||
|
if (loop?.nodes?.includes(blockId)) {
|
||||||
|
containingLoopIds.add(loop.id)
|
||||||
|
const loopBlock = blocks[loop.id]
|
||||||
|
if (loopBlock) {
|
||||||
|
insideSubflows.push({
|
||||||
|
blockId: loop.id,
|
||||||
|
blockName: loopBlock.name || loopBlock.type,
|
||||||
|
blockType: 'loop',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
Object.values(parallels).forEach((parallel) => {
|
||||||
|
if (parallel?.nodes?.includes(blockId)) {
|
||||||
|
containingParallelIds.add(parallel.id)
|
||||||
|
const parallelBlock = blocks[parallel.id]
|
||||||
|
if (parallelBlock) {
|
||||||
|
insideSubflows.push({
|
||||||
|
blockId: parallel.id,
|
||||||
|
blockName: parallelBlock.name || parallelBlock.type,
|
||||||
|
blockType: 'parallel',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const ancestorIds = BlockPathCalculator.findAllPathNodes(graphEdges, blockId)
|
||||||
|
const accessibleIds = new Set<string>(ancestorIds)
|
||||||
|
accessibleIds.add(blockId)
|
||||||
|
|
||||||
|
const starterBlock = Object.values(blocks).find((b) => isInputDefinitionTrigger(b.type))
|
||||||
|
if (starterBlock && ancestorIds.includes(starterBlock.id)) {
|
||||||
|
accessibleIds.add(starterBlock.id)
|
||||||
|
}
|
||||||
|
|
||||||
|
containingLoopIds.forEach((loopId) => {
|
||||||
|
accessibleIds.add(loopId)
|
||||||
|
loops[loopId]?.nodes?.forEach((nodeId: string) => accessibleIds.add(nodeId))
|
||||||
|
})
|
||||||
|
|
||||||
|
containingParallelIds.forEach((parallelId) => {
|
||||||
|
accessibleIds.add(parallelId)
|
||||||
|
parallels[parallelId]?.nodes?.forEach((nodeId: string) => accessibleIds.add(nodeId))
|
||||||
|
})
|
||||||
|
|
||||||
|
const accessibleBlocks: AccessibleBlockEntry[] = []
|
||||||
|
|
||||||
|
for (const accessibleBlockId of accessibleIds) {
|
||||||
|
const block = blocks[accessibleBlockId]
|
||||||
|
if (!block?.type) continue
|
||||||
|
const canSelfReference = block.type === 'approval' || block.type === 'human_in_the_loop'
|
||||||
|
if (accessibleBlockId === blockId && !canSelfReference) continue
|
||||||
|
|
||||||
|
const blockName = block.name || block.type
|
||||||
|
let accessContext: 'inside' | 'outside' | undefined
|
||||||
|
let outputPaths: string[]
|
||||||
|
|
||||||
|
if (block.type === 'loop' || block.type === 'parallel') {
|
||||||
|
const isInside =
|
||||||
|
(block.type === 'loop' && containingLoopIds.has(accessibleBlockId)) ||
|
||||||
|
(block.type === 'parallel' && containingParallelIds.has(accessibleBlockId))
|
||||||
|
accessContext = isInside ? 'inside' : 'outside'
|
||||||
|
outputPaths = isInside
|
||||||
|
? getSubflowInsidePaths(block.type, accessibleBlockId, loops, parallels)
|
||||||
|
: ['results']
|
||||||
|
} else {
|
||||||
|
outputPaths = getBlockOutputPaths(block.type, block.subBlocks, block.triggerMode)
|
||||||
|
}
|
||||||
|
|
||||||
|
const formattedOutputs = formatOutputsWithPrefix(outputPaths, blockName)
|
||||||
|
const entry: AccessibleBlockEntry = {
|
||||||
|
blockId: accessibleBlockId,
|
||||||
|
blockName,
|
||||||
|
blockType: block.type,
|
||||||
|
outputs: formattedOutputs,
|
||||||
|
...(block.triggerMode ? { triggerMode: true } : {}),
|
||||||
|
...(accessContext ? { accessContext } : {}),
|
||||||
|
}
|
||||||
|
accessibleBlocks.push(entry)
|
||||||
|
}
|
||||||
|
|
||||||
|
results.push({
|
||||||
|
blockId,
|
||||||
|
blockName: targetBlock.name || targetBlock.type,
|
||||||
|
blockType: targetBlock.type,
|
||||||
|
accessibleBlocks,
|
||||||
|
insideSubflows,
|
||||||
|
variables: variableOutputs,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = { results }
|
||||||
|
return { success: true, output: payload }
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getWorkflowVariablesForTool(
|
||||||
|
workflowId: string
|
||||||
|
): Promise<Array<{ id: string; name: string; type: string; tag: string }>> {
|
||||||
|
const [workflowRecord] = await db
|
||||||
|
.select({ variables: workflow.variables })
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const variablesRecord = (workflowRecord?.variables as Record<string, unknown>) || {}
|
||||||
|
return Object.values(variablesRecord)
|
||||||
|
.filter((v): v is Record<string, unknown> => {
|
||||||
|
if (!v || typeof v !== 'object') return false
|
||||||
|
const variable = v as Record<string, unknown>
|
||||||
|
return !!variable.name && String(variable.name).trim() !== ''
|
||||||
|
})
|
||||||
|
.map((v) => ({
|
||||||
|
id: String(v.id || ''),
|
||||||
|
name: String(v.name || ''),
|
||||||
|
type: String(v.type || 'plain'),
|
||||||
|
tag: `variable.${normalizeName(String(v.name || ''))}`,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSubflowInsidePaths(
|
||||||
|
blockType: 'loop' | 'parallel',
|
||||||
|
blockId: string,
|
||||||
|
loops: Record<string, Loop>,
|
||||||
|
parallels: Record<string, Parallel>
|
||||||
|
): string[] {
|
||||||
|
const paths = ['index']
|
||||||
|
if (blockType === 'loop') {
|
||||||
|
const loopType = loops[blockId]?.loopType || 'for'
|
||||||
|
if (loopType === 'forEach') {
|
||||||
|
paths.push('currentItem', 'items')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const parallelType = parallels[blockId]?.parallelType || 'count'
|
||||||
|
if (parallelType === 'collection') {
|
||||||
|
paths.push('currentItem', 'items')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return paths
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatOutputsWithPrefix(paths: string[], blockName: string): string[] {
|
||||||
|
const normalizedName = normalizeName(blockName)
|
||||||
|
return paths.map((path) => `${normalizedName}.${path}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function executeGetDeployedWorkflowState(
|
||||||
|
params: GetDeployedWorkflowStateParams,
|
||||||
|
context: ExecutionContext
|
||||||
|
): Promise<ToolCallResult> {
|
||||||
|
try {
|
||||||
|
const workflowId = params.workflowId || context.workflowId
|
||||||
|
if (!workflowId) {
|
||||||
|
return { success: false, error: 'workflowId is required' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { workflow: workflowRecord } = await ensureWorkflowAccess(workflowId, context.userId)
|
||||||
|
|
||||||
|
try {
|
||||||
|
const deployedState = await loadDeployedWorkflowState(workflowId)
|
||||||
|
const formatted = formatNormalizedWorkflowForCopilot({
|
||||||
|
blocks: deployedState.blocks,
|
||||||
|
edges: deployedState.edges,
|
||||||
|
loops: deployedState.loops as Record<string, Loop>,
|
||||||
|
parallels: deployedState.parallels as Record<string, Parallel>,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workflowId,
|
||||||
|
workflowName: workflowRecord.name || '',
|
||||||
|
isDeployed: true,
|
||||||
|
deploymentVersionId: deployedState.deploymentVersionId,
|
||||||
|
deployedState: formatted,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
workflowId,
|
||||||
|
workflowName: workflowRecord.name || '',
|
||||||
|
isDeployed: false,
|
||||||
|
message: 'Workflow has not been deployed yet.',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: error instanceof Error ? error.message : String(error) }
|
||||||
|
}
|
||||||
|
}
|
||||||
150
apps/sim/lib/copilot/orchestrator/types.ts
Normal file
150
apps/sim/lib/copilot/orchestrator/types.ts
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
import type { CopilotProviderConfig } from '@/lib/copilot/types'
|
||||||
|
|
||||||
|
export type SSEEventType =
|
||||||
|
| 'chat_id'
|
||||||
|
| 'title_updated'
|
||||||
|
| 'content'
|
||||||
|
| 'reasoning'
|
||||||
|
| 'tool_call'
|
||||||
|
| 'tool_generating'
|
||||||
|
| 'tool_result'
|
||||||
|
| 'tool_error'
|
||||||
|
| 'subagent_start'
|
||||||
|
| 'subagent_end'
|
||||||
|
| 'structured_result'
|
||||||
|
| 'subagent_result'
|
||||||
|
| 'done'
|
||||||
|
| 'error'
|
||||||
|
| 'start'
|
||||||
|
|
||||||
|
export interface SSEEvent {
|
||||||
|
type: SSEEventType
|
||||||
|
data?: Record<string, unknown>
|
||||||
|
subagent?: string
|
||||||
|
toolCallId?: string
|
||||||
|
toolName?: string
|
||||||
|
success?: boolean
|
||||||
|
result?: unknown
|
||||||
|
/** Set on chat_id events */
|
||||||
|
chatId?: string
|
||||||
|
/** Set on title_updated events */
|
||||||
|
title?: string
|
||||||
|
/** Set on error events */
|
||||||
|
error?: string
|
||||||
|
/** Set on content/reasoning events */
|
||||||
|
content?: string
|
||||||
|
/** Set on reasoning events */
|
||||||
|
phase?: string
|
||||||
|
/** Set on tool_result events */
|
||||||
|
failedDependency?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ToolCallStatus = 'pending' | 'executing' | 'success' | 'error' | 'skipped' | 'rejected'
|
||||||
|
|
||||||
|
export interface ToolCallState {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
status: ToolCallStatus
|
||||||
|
params?: Record<string, unknown>
|
||||||
|
result?: ToolCallResult
|
||||||
|
error?: string
|
||||||
|
startTime?: number
|
||||||
|
endTime?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ToolCallResult<T = unknown> {
|
||||||
|
success: boolean
|
||||||
|
output?: T
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export type ContentBlockType = 'text' | 'thinking' | 'tool_call' | 'subagent_text'
|
||||||
|
|
||||||
|
export interface ContentBlock {
|
||||||
|
type: ContentBlockType
|
||||||
|
content?: string
|
||||||
|
toolCall?: ToolCallState
|
||||||
|
timestamp: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StreamingContext {
|
||||||
|
chatId?: string
|
||||||
|
conversationId?: string
|
||||||
|
messageId: string
|
||||||
|
accumulatedContent: string
|
||||||
|
contentBlocks: ContentBlock[]
|
||||||
|
toolCalls: Map<string, ToolCallState>
|
||||||
|
currentThinkingBlock: ContentBlock | null
|
||||||
|
isInThinkingBlock: boolean
|
||||||
|
subAgentParentToolCallId?: string
|
||||||
|
subAgentContent: Record<string, string>
|
||||||
|
subAgentToolCalls: Record<string, ToolCallState[]>
|
||||||
|
pendingContent: string
|
||||||
|
streamComplete: boolean
|
||||||
|
wasAborted: boolean
|
||||||
|
errors: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileAttachment {
|
||||||
|
id: string
|
||||||
|
key: string
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
size: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OrchestratorRequest {
|
||||||
|
message: string
|
||||||
|
workflowId: string
|
||||||
|
userId: string
|
||||||
|
chatId?: string
|
||||||
|
mode?: 'agent' | 'ask' | 'plan'
|
||||||
|
model?: string
|
||||||
|
conversationId?: string
|
||||||
|
contexts?: Array<{ type: string; content: string }>
|
||||||
|
fileAttachments?: FileAttachment[]
|
||||||
|
commands?: string[]
|
||||||
|
provider?: CopilotProviderConfig
|
||||||
|
streamToolCalls?: boolean
|
||||||
|
version?: string
|
||||||
|
prefetch?: boolean
|
||||||
|
userName?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OrchestratorOptions {
|
||||||
|
autoExecuteTools?: boolean
|
||||||
|
timeout?: number
|
||||||
|
onEvent?: (event: SSEEvent) => void | Promise<void>
|
||||||
|
onComplete?: (result: OrchestratorResult) => void | Promise<void>
|
||||||
|
onError?: (error: Error) => void | Promise<void>
|
||||||
|
abortSignal?: AbortSignal
|
||||||
|
interactive?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface OrchestratorResult {
|
||||||
|
success: boolean
|
||||||
|
content: string
|
||||||
|
contentBlocks: ContentBlock[]
|
||||||
|
toolCalls: ToolCallSummary[]
|
||||||
|
chatId?: string
|
||||||
|
conversationId?: string
|
||||||
|
error?: string
|
||||||
|
errors?: string[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ToolCallSummary {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
status: ToolCallStatus
|
||||||
|
params?: Record<string, unknown>
|
||||||
|
result?: unknown
|
||||||
|
error?: string
|
||||||
|
durationMs?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ExecutionContext {
|
||||||
|
userId: string
|
||||||
|
workflowId: string
|
||||||
|
workspaceId?: string
|
||||||
|
decryptedEnvVars?: Record<string, string>
|
||||||
|
}
|
||||||
@@ -44,29 +44,20 @@ export async function processContexts(
|
|||||||
ctx.kind
|
ctx.kind
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'knowledge' && (ctx as any).knowledgeId) {
|
if (ctx.kind === 'knowledge' && ctx.knowledgeId) {
|
||||||
return await processKnowledgeFromDb(
|
return await processKnowledgeFromDb(ctx.knowledgeId, ctx.label ? `@${ctx.label}` : '@')
|
||||||
(ctx as any).knowledgeId,
|
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'blocks' && (ctx as any).blockId) {
|
if (ctx.kind === 'blocks' && ctx.blockIds?.length > 0) {
|
||||||
return await processBlockMetadata((ctx as any).blockId, ctx.label ? `@${ctx.label}` : '@')
|
return await processBlockMetadata(ctx.blockIds[0], ctx.label ? `@${ctx.label}` : '@')
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'templates' && (ctx as any).templateId) {
|
if (ctx.kind === 'templates' && ctx.templateId) {
|
||||||
return await processTemplateFromDb(
|
return await processTemplateFromDb(ctx.templateId, ctx.label ? `@${ctx.label}` : '@')
|
||||||
(ctx as any).templateId,
|
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'logs' && (ctx as any).executionId) {
|
if (ctx.kind === 'logs' && ctx.executionId) {
|
||||||
return await processExecutionLogFromDb(
|
return await processExecutionLogFromDb(ctx.executionId, ctx.label ? `@${ctx.label}` : '@')
|
||||||
(ctx as any).executionId,
|
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'workflow_block' && ctx.workflowId && (ctx as any).blockId) {
|
if (ctx.kind === 'workflow_block' && ctx.workflowId && ctx.blockId) {
|
||||||
return await processWorkflowBlockFromDb(ctx.workflowId, (ctx as any).blockId, ctx.label)
|
return await processWorkflowBlockFromDb(ctx.workflowId, ctx.blockId, ctx.label)
|
||||||
}
|
}
|
||||||
// Other kinds can be added here: workflow, blocks, logs, knowledge, templates, docs
|
// Other kinds can be added here: workflow, blocks, logs, knowledge, templates, docs
|
||||||
return null
|
return null
|
||||||
@@ -99,33 +90,24 @@ export async function processContextsServer(
|
|||||||
ctx.kind
|
ctx.kind
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'knowledge' && (ctx as any).knowledgeId) {
|
if (ctx.kind === 'knowledge' && ctx.knowledgeId) {
|
||||||
return await processKnowledgeFromDb(
|
return await processKnowledgeFromDb(ctx.knowledgeId, ctx.label ? `@${ctx.label}` : '@')
|
||||||
(ctx as any).knowledgeId,
|
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'blocks' && (ctx as any).blockId) {
|
if (ctx.kind === 'blocks' && ctx.blockIds?.length > 0) {
|
||||||
return await processBlockMetadata(
|
return await processBlockMetadata(
|
||||||
(ctx as any).blockId,
|
ctx.blockIds[0],
|
||||||
ctx.label ? `@${ctx.label}` : '@',
|
ctx.label ? `@${ctx.label}` : '@',
|
||||||
userId
|
userId
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'templates' && (ctx as any).templateId) {
|
if (ctx.kind === 'templates' && ctx.templateId) {
|
||||||
return await processTemplateFromDb(
|
return await processTemplateFromDb(ctx.templateId, ctx.label ? `@${ctx.label}` : '@')
|
||||||
(ctx as any).templateId,
|
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'logs' && (ctx as any).executionId) {
|
if (ctx.kind === 'logs' && ctx.executionId) {
|
||||||
return await processExecutionLogFromDb(
|
return await processExecutionLogFromDb(ctx.executionId, ctx.label ? `@${ctx.label}` : '@')
|
||||||
(ctx as any).executionId,
|
|
||||||
ctx.label ? `@${ctx.label}` : '@'
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'workflow_block' && ctx.workflowId && (ctx as any).blockId) {
|
if (ctx.kind === 'workflow_block' && ctx.workflowId && ctx.blockId) {
|
||||||
return await processWorkflowBlockFromDb(ctx.workflowId, (ctx as any).blockId, ctx.label)
|
return await processWorkflowBlockFromDb(ctx.workflowId, ctx.blockId, ctx.label)
|
||||||
}
|
}
|
||||||
if (ctx.kind === 'docs') {
|
if (ctx.kind === 'docs') {
|
||||||
try {
|
try {
|
||||||
|
|||||||
193
apps/sim/lib/copilot/store-utils.ts
Normal file
193
apps/sim/lib/copilot/store-utils.ts
Normal file
@@ -0,0 +1,193 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { Loader2 } from 'lucide-react'
|
||||||
|
import {
|
||||||
|
ClientToolCallState,
|
||||||
|
type ClientToolDisplay,
|
||||||
|
TOOL_DISPLAY_REGISTRY,
|
||||||
|
} from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
|
import type { CopilotStore } from '@/stores/panel/copilot/types'
|
||||||
|
|
||||||
|
const logger = createLogger('CopilotStoreUtils')
|
||||||
|
|
||||||
|
type StoreSet = (
|
||||||
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
|
) => void
|
||||||
|
|
||||||
|
/** Respond tools are internal to copilot subagents and should never be shown in the UI */
|
||||||
|
const HIDDEN_TOOL_SUFFIX = '_respond'
|
||||||
|
|
||||||
|
export function resolveToolDisplay(
|
||||||
|
toolName: string | undefined,
|
||||||
|
state: ClientToolCallState,
|
||||||
|
_toolCallId?: string,
|
||||||
|
params?: Record<string, any>
|
||||||
|
): ClientToolDisplay | undefined {
|
||||||
|
if (!toolName) return undefined
|
||||||
|
if (toolName.endsWith(HIDDEN_TOOL_SUFFIX)) return undefined
|
||||||
|
const entry = TOOL_DISPLAY_REGISTRY[toolName]
|
||||||
|
if (!entry) return humanizedFallback(toolName, state)
|
||||||
|
|
||||||
|
if (entry.uiConfig?.dynamicText && params) {
|
||||||
|
const dynamicText = entry.uiConfig.dynamicText(params, state)
|
||||||
|
const stateDisplay = entry.displayNames[state]
|
||||||
|
if (dynamicText && stateDisplay?.icon) {
|
||||||
|
return { text: dynamicText, icon: stateDisplay.icon }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const display = entry.displayNames[state]
|
||||||
|
if (display?.text || display?.icon) return display
|
||||||
|
|
||||||
|
const fallbackOrder = [
|
||||||
|
ClientToolCallState.generating,
|
||||||
|
ClientToolCallState.executing,
|
||||||
|
ClientToolCallState.success,
|
||||||
|
]
|
||||||
|
for (const fallbackState of fallbackOrder) {
|
||||||
|
const fallback = entry.displayNames[fallbackState]
|
||||||
|
if (fallback?.text || fallback?.icon) return fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
return humanizedFallback(toolName, state)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function humanizedFallback(
|
||||||
|
toolName: string,
|
||||||
|
state: ClientToolCallState
|
||||||
|
): ClientToolDisplay | undefined {
|
||||||
|
const formattedName = toolName.replace(/_/g, ' ').replace(/\b\w/g, (c) => c.toUpperCase())
|
||||||
|
const stateVerb =
|
||||||
|
state === ClientToolCallState.success
|
||||||
|
? 'Executed'
|
||||||
|
: state === ClientToolCallState.error
|
||||||
|
? 'Failed'
|
||||||
|
: state === ClientToolCallState.rejected || state === ClientToolCallState.aborted
|
||||||
|
? 'Skipped'
|
||||||
|
: 'Executing'
|
||||||
|
return { text: `${stateVerb} ${formattedName}`, icon: Loader2 }
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isRejectedState(state: string): boolean {
|
||||||
|
return state === 'rejected'
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isReviewState(state: string): boolean {
|
||||||
|
return state === 'review'
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isBackgroundState(state: string): boolean {
|
||||||
|
return state === 'background'
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isTerminalState(state: string): boolean {
|
||||||
|
return (
|
||||||
|
state === ClientToolCallState.success ||
|
||||||
|
state === ClientToolCallState.error ||
|
||||||
|
state === ClientToolCallState.rejected ||
|
||||||
|
state === ClientToolCallState.aborted ||
|
||||||
|
isReviewState(state) ||
|
||||||
|
isBackgroundState(state)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function abortAllInProgressTools(set: StoreSet, get: () => CopilotStore) {
|
||||||
|
try {
|
||||||
|
const { toolCallsById, messages } = get()
|
||||||
|
const updatedMap = { ...toolCallsById }
|
||||||
|
const abortedIds = new Set<string>()
|
||||||
|
let hasUpdates = false
|
||||||
|
for (const [id, tc] of Object.entries(toolCallsById)) {
|
||||||
|
const st = tc.state
|
||||||
|
const isTerminal =
|
||||||
|
st === ClientToolCallState.success ||
|
||||||
|
st === ClientToolCallState.error ||
|
||||||
|
st === ClientToolCallState.rejected ||
|
||||||
|
st === ClientToolCallState.aborted
|
||||||
|
if (!isTerminal || isReviewState(st)) {
|
||||||
|
abortedIds.add(id)
|
||||||
|
updatedMap[id] = {
|
||||||
|
...tc,
|
||||||
|
state: ClientToolCallState.aborted,
|
||||||
|
subAgentStreaming: false,
|
||||||
|
display: resolveToolDisplay(tc.name, ClientToolCallState.aborted, id, tc.params),
|
||||||
|
}
|
||||||
|
hasUpdates = true
|
||||||
|
} else if (tc.subAgentStreaming) {
|
||||||
|
updatedMap[id] = {
|
||||||
|
...tc,
|
||||||
|
subAgentStreaming: false,
|
||||||
|
}
|
||||||
|
hasUpdates = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (abortedIds.size > 0 || hasUpdates) {
|
||||||
|
set({ toolCallsById: updatedMap })
|
||||||
|
set((s: CopilotStore) => {
|
||||||
|
const msgs = [...s.messages]
|
||||||
|
for (let mi = msgs.length - 1; mi >= 0; mi--) {
|
||||||
|
const m = msgs[mi]
|
||||||
|
if (m.role !== 'assistant' || !Array.isArray(m.contentBlocks)) continue
|
||||||
|
let changed = false
|
||||||
|
const blocks = m.contentBlocks.map((b: any) => {
|
||||||
|
if (b?.type === 'tool_call' && b.toolCall?.id && abortedIds.has(b.toolCall.id)) {
|
||||||
|
changed = true
|
||||||
|
const prev = b.toolCall
|
||||||
|
return {
|
||||||
|
...b,
|
||||||
|
toolCall: {
|
||||||
|
...prev,
|
||||||
|
state: ClientToolCallState.aborted,
|
||||||
|
display: resolveToolDisplay(
|
||||||
|
prev?.name,
|
||||||
|
ClientToolCallState.aborted,
|
||||||
|
prev?.id,
|
||||||
|
prev?.params
|
||||||
|
),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
})
|
||||||
|
if (changed) {
|
||||||
|
msgs[mi] = { ...m, contentBlocks: blocks }
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return { messages: msgs }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to abort in-progress tools', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function cleanupActiveState(
|
||||||
|
set: (partial: Record<string, unknown>) => void,
|
||||||
|
get: () => Record<string, unknown>
|
||||||
|
): void {
|
||||||
|
abortAllInProgressTools(set as unknown as StoreSet, get as unknown as () => CopilotStore)
|
||||||
|
try {
|
||||||
|
const { useWorkflowDiffStore } = require('@/stores/workflow-diff/store') as {
|
||||||
|
useWorkflowDiffStore: {
|
||||||
|
getState: () => { clearDiff: (options?: { restoreBaseline?: boolean }) => void }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('Failed to clear diff during cleanup', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function stripTodoTags(text: string): string {
|
||||||
|
if (!text) return text
|
||||||
|
return text
|
||||||
|
.replace(/<marktodo>[\s\S]*?<\/marktodo>/g, '')
|
||||||
|
.replace(/<checkofftodo>[\s\S]*?<\/checkofftodo>/g, '')
|
||||||
|
.replace(/<design_workflow>[\s\S]*?<\/design_workflow>/g, '')
|
||||||
|
.replace(/[ \t]+\n/g, '\n')
|
||||||
|
.replace(/\n{2,}/g, '\n')
|
||||||
|
}
|
||||||
@@ -1,120 +0,0 @@
|
|||||||
/**
|
|
||||||
* Base class for subagent tools.
|
|
||||||
*
|
|
||||||
* Subagent tools spawn a server-side subagent that does the actual work.
|
|
||||||
* The tool auto-executes and the subagent's output is streamed back
|
|
||||||
* as nested content under the tool call.
|
|
||||||
*
|
|
||||||
* Examples: edit, plan, debug, evaluate, research, etc.
|
|
||||||
*/
|
|
||||||
import type { LucideIcon } from 'lucide-react'
|
|
||||||
import { BaseClientTool, type BaseClientToolMetadata, ClientToolCallState } from './base-tool'
|
|
||||||
import type { SubagentConfig, ToolUIConfig } from './ui-config'
|
|
||||||
import { registerToolUIConfig } from './ui-config'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Configuration for creating a subagent tool
|
|
||||||
*/
|
|
||||||
export interface SubagentToolConfig {
|
|
||||||
/** Unique tool ID */
|
|
||||||
id: string
|
|
||||||
/** Display names per state */
|
|
||||||
displayNames: {
|
|
||||||
streaming: { text: string; icon: LucideIcon }
|
|
||||||
success: { text: string; icon: LucideIcon }
|
|
||||||
error: { text: string; icon: LucideIcon }
|
|
||||||
}
|
|
||||||
/** Subagent UI configuration */
|
|
||||||
subagent: SubagentConfig
|
|
||||||
/**
|
|
||||||
* Optional: Whether this is a "special" tool (gets gradient styling).
|
|
||||||
* Default: false
|
|
||||||
*/
|
|
||||||
isSpecial?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create metadata for a subagent tool from config
|
|
||||||
*/
|
|
||||||
function createSubagentMetadata(config: SubagentToolConfig): BaseClientToolMetadata {
|
|
||||||
const { displayNames, subagent, isSpecial } = config
|
|
||||||
const { streaming, success, error } = displayNames
|
|
||||||
|
|
||||||
const uiConfig: ToolUIConfig = {
|
|
||||||
isSpecial: isSpecial ?? false,
|
|
||||||
subagent,
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: streaming,
|
|
||||||
[ClientToolCallState.pending]: streaming,
|
|
||||||
[ClientToolCallState.executing]: streaming,
|
|
||||||
[ClientToolCallState.success]: success,
|
|
||||||
[ClientToolCallState.error]: error,
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: `${config.id.charAt(0).toUpperCase() + config.id.slice(1)} skipped`,
|
|
||||||
icon: error.icon,
|
|
||||||
},
|
|
||||||
[ClientToolCallState.aborted]: {
|
|
||||||
text: `${config.id.charAt(0).toUpperCase() + config.id.slice(1)} aborted`,
|
|
||||||
icon: error.icon,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
uiConfig,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Base class for subagent tools.
|
|
||||||
* Extends BaseClientTool with subagent-specific behavior.
|
|
||||||
*/
|
|
||||||
export abstract class BaseSubagentTool extends BaseClientTool {
|
|
||||||
/**
|
|
||||||
* Subagent configuration.
|
|
||||||
* Override in subclasses to customize behavior.
|
|
||||||
*/
|
|
||||||
static readonly subagentConfig: SubagentToolConfig
|
|
||||||
|
|
||||||
constructor(toolCallId: string, config: SubagentToolConfig) {
|
|
||||||
super(toolCallId, config.id, createSubagentMetadata(config))
|
|
||||||
// Register UI config for this tool
|
|
||||||
registerToolUIConfig(config.id, this.metadata.uiConfig!)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the subagent tool.
|
|
||||||
* Immediately transitions to executing state - the actual work
|
|
||||||
* is done server-side by the subagent.
|
|
||||||
*/
|
|
||||||
async execute(_args?: Record<string, any>): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
// The tool result will come from the server via tool_result event
|
|
||||||
// when the subagent completes its work
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Factory function to create a subagent tool class.
|
|
||||||
* Use this for simple subagent tools that don't need custom behavior.
|
|
||||||
*/
|
|
||||||
export function createSubagentToolClass(config: SubagentToolConfig) {
|
|
||||||
// Register UI config at class creation time
|
|
||||||
const uiConfig: ToolUIConfig = {
|
|
||||||
isSpecial: config.isSpecial ?? false,
|
|
||||||
subagent: config.subagent,
|
|
||||||
}
|
|
||||||
registerToolUIConfig(config.id, uiConfig)
|
|
||||||
|
|
||||||
return class extends BaseClientTool {
|
|
||||||
static readonly id = config.id
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, config.id, createSubagentMetadata(config))
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(_args?: Record<string, any>): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,15 +1,5 @@
|
|||||||
// Lazy require in setState to avoid circular init issues
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import type { LucideIcon } from 'lucide-react'
|
import type { LucideIcon } from 'lucide-react'
|
||||||
import type { ToolUIConfig } from './ui-config'
|
|
||||||
|
|
||||||
const baseToolLogger = createLogger('BaseClientTool')
|
|
||||||
|
|
||||||
const DEFAULT_TOOL_TIMEOUT_MS = 5 * 60 * 1000
|
|
||||||
|
|
||||||
export const WORKFLOW_EXECUTION_TIMEOUT_MS = 10 * 60 * 1000
|
|
||||||
|
|
||||||
// Client tool call states used by the new runtime
|
|
||||||
export enum ClientToolCallState {
|
export enum ClientToolCallState {
|
||||||
generating = 'generating',
|
generating = 'generating',
|
||||||
pending = 'pending',
|
pending = 'pending',
|
||||||
@@ -22,252 +12,32 @@ export enum ClientToolCallState {
|
|||||||
background = 'background',
|
background = 'background',
|
||||||
}
|
}
|
||||||
|
|
||||||
// Display configuration for a given state
|
|
||||||
export interface ClientToolDisplay {
|
export interface ClientToolDisplay {
|
||||||
text: string
|
text: string
|
||||||
icon: LucideIcon
|
icon: LucideIcon
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
export interface BaseClientToolMetadata {
|
||||||
* Function to generate dynamic display text based on tool parameters and state
|
displayNames: Partial<Record<ClientToolCallState, ClientToolDisplay>>
|
||||||
* @param params - The tool call parameters
|
uiConfig?: Record<string, unknown>
|
||||||
* @param state - The current tool call state
|
getDynamicText?: (
|
||||||
* @returns The dynamic text to display, or undefined to use the default text
|
params: Record<string, unknown>,
|
||||||
*/
|
state: ClientToolCallState
|
||||||
|
) => string | undefined
|
||||||
|
}
|
||||||
|
|
||||||
export type DynamicTextFormatter = (
|
export type DynamicTextFormatter = (
|
||||||
params: Record<string, any>,
|
params: Record<string, unknown>,
|
||||||
state: ClientToolCallState
|
state: ClientToolCallState
|
||||||
) => string | undefined
|
) => string | undefined
|
||||||
|
|
||||||
export interface BaseClientToolMetadata {
|
export const WORKFLOW_EXECUTION_TIMEOUT_MS = 10 * 60 * 1000
|
||||||
displayNames: Partial<Record<ClientToolCallState, ClientToolDisplay>>
|
|
||||||
interrupt?: {
|
/** Event detail for OAuth connect events dispatched by the copilot. */
|
||||||
accept: ClientToolDisplay
|
export interface OAuthConnectEventDetail {
|
||||||
reject: ClientToolDisplay
|
providerName: string
|
||||||
}
|
serviceId: string
|
||||||
/**
|
providerId: string
|
||||||
* Optional function to generate dynamic display text based on parameters
|
requiredScopes: string[]
|
||||||
* If provided, this will override the default text in displayNames
|
newScopes?: string[]
|
||||||
*/
|
|
||||||
getDynamicText?: DynamicTextFormatter
|
|
||||||
/**
|
|
||||||
* UI configuration for how this tool renders in the tool-call component.
|
|
||||||
* This replaces hardcoded logic in tool-call.tsx with declarative config.
|
|
||||||
*/
|
|
||||||
uiConfig?: ToolUIConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
export class BaseClientTool {
|
|
||||||
readonly toolCallId: string
|
|
||||||
readonly name: string
|
|
||||||
protected state: ClientToolCallState
|
|
||||||
protected metadata: BaseClientToolMetadata
|
|
||||||
protected isMarkedComplete = false
|
|
||||||
protected timeoutMs: number = DEFAULT_TOOL_TIMEOUT_MS
|
|
||||||
|
|
||||||
constructor(toolCallId: string, name: string, metadata: BaseClientToolMetadata) {
|
|
||||||
this.toolCallId = toolCallId
|
|
||||||
this.name = name
|
|
||||||
this.metadata = metadata
|
|
||||||
this.state = ClientToolCallState.generating
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set a custom timeout for this tool (in milliseconds)
|
|
||||||
*/
|
|
||||||
setTimeoutMs(ms: number): void {
|
|
||||||
this.timeoutMs = ms
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if this tool has been marked complete
|
|
||||||
*/
|
|
||||||
hasBeenMarkedComplete(): boolean {
|
|
||||||
return this.isMarkedComplete
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Ensure the tool is marked complete. If not already marked, marks it with error.
|
|
||||||
* This should be called in finally blocks to prevent leaked tool calls.
|
|
||||||
*/
|
|
||||||
async ensureMarkedComplete(
|
|
||||||
fallbackMessage = 'Tool execution did not complete properly'
|
|
||||||
): Promise<void> {
|
|
||||||
if (!this.isMarkedComplete) {
|
|
||||||
baseToolLogger.warn('Tool was not marked complete, marking with error', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
state: this.state,
|
|
||||||
})
|
|
||||||
await this.markToolComplete(500, fallbackMessage)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute with timeout protection. Wraps the execution in a timeout and ensures
|
|
||||||
* markToolComplete is always called.
|
|
||||||
*/
|
|
||||||
async executeWithTimeout(executeFn: () => Promise<void>, timeoutMs?: number): Promise<void> {
|
|
||||||
const timeout = timeoutMs ?? this.timeoutMs
|
|
||||||
let timeoutId: NodeJS.Timeout | null = null
|
|
||||||
|
|
||||||
try {
|
|
||||||
await Promise.race([
|
|
||||||
executeFn(),
|
|
||||||
new Promise<never>((_, reject) => {
|
|
||||||
timeoutId = setTimeout(() => {
|
|
||||||
reject(new Error(`Tool execution timed out after ${timeout / 1000} seconds`))
|
|
||||||
}, timeout)
|
|
||||||
}),
|
|
||||||
])
|
|
||||||
} catch (error) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
baseToolLogger.error('Tool execution failed or timed out', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
error: message,
|
|
||||||
})
|
|
||||||
// Only mark complete if not already marked
|
|
||||||
if (!this.isMarkedComplete) {
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
if (timeoutId) clearTimeout(timeoutId)
|
|
||||||
// Ensure tool is always marked complete
|
|
||||||
await this.ensureMarkedComplete()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Intentionally left empty - specific tools can override
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
||||||
async execute(_args?: Record<string, any>): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Mark a tool as complete on the server (proxies to server-side route).
|
|
||||||
* Once called, the tool is considered complete and won't be marked again.
|
|
||||||
*/
|
|
||||||
async markToolComplete(status: number, message?: any, data?: any): Promise<boolean> {
|
|
||||||
// Prevent double-marking
|
|
||||||
if (this.isMarkedComplete) {
|
|
||||||
baseToolLogger.warn('markToolComplete called but tool already marked complete', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
existingState: this.state,
|
|
||||||
attemptedStatus: status,
|
|
||||||
})
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
this.isMarkedComplete = true
|
|
||||||
|
|
||||||
try {
|
|
||||||
baseToolLogger.info('markToolComplete called', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
state: this.state,
|
|
||||||
status,
|
|
||||||
hasMessage: message !== undefined,
|
|
||||||
hasData: data !== undefined,
|
|
||||||
})
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const res = await fetch('/api/copilot/tools/mark-complete', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
id: this.toolCallId,
|
|
||||||
name: this.name,
|
|
||||||
status,
|
|
||||||
message,
|
|
||||||
data,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
// Try to surface server error
|
|
||||||
let errorText = `Failed to mark tool complete (status ${res.status})`
|
|
||||||
try {
|
|
||||||
const { error } = await res.json()
|
|
||||||
if (error) errorText = String(error)
|
|
||||||
} catch {}
|
|
||||||
throw new Error(errorText)
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = (await res.json()) as { success?: boolean }
|
|
||||||
return json?.success === true
|
|
||||||
} catch (e) {
|
|
||||||
// Default failure path - but tool is still marked complete locally
|
|
||||||
baseToolLogger.error('Failed to mark tool complete on server', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
error: e instanceof Error ? e.message : String(e),
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Accept (continue) for interrupt flows: move pending -> executing
|
|
||||||
async handleAccept(): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reject (skip) for interrupt flows: mark complete with a standard skip message
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await this.markToolComplete(200, 'Tool execution was skipped by the user')
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return the display configuration for the current state
|
|
||||||
getDisplayState(): ClientToolDisplay | undefined {
|
|
||||||
return this.metadata.displayNames[this.state]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return interrupt display config (labels/icons) if defined
|
|
||||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
|
||||||
return this.metadata.interrupt
|
|
||||||
}
|
|
||||||
|
|
||||||
// Transition to a new state (also sync to Copilot store)
|
|
||||||
setState(next: ClientToolCallState, options?: { result?: any }): void {
|
|
||||||
const prev = this.state
|
|
||||||
this.state = next
|
|
||||||
|
|
||||||
// Notify store via manager to avoid import cycles
|
|
||||||
try {
|
|
||||||
const { syncToolState } = require('@/lib/copilot/tools/client/manager')
|
|
||||||
syncToolState(this.toolCallId, next, options)
|
|
||||||
} catch {}
|
|
||||||
|
|
||||||
// Log transition after syncing
|
|
||||||
try {
|
|
||||||
baseToolLogger.info('setState transition', {
|
|
||||||
toolCallId: this.toolCallId,
|
|
||||||
toolName: this.name,
|
|
||||||
prev,
|
|
||||||
next,
|
|
||||||
hasResult: options?.result !== undefined,
|
|
||||||
})
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Expose current state
|
|
||||||
getState(): ClientToolCallState {
|
|
||||||
return this.state
|
|
||||||
}
|
|
||||||
|
|
||||||
hasInterrupt(): boolean {
|
|
||||||
return !!this.metadata.interrupt
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get UI configuration for this tool.
|
|
||||||
* Used by tool-call component to determine rendering behavior.
|
|
||||||
*/
|
|
||||||
getUIConfig(): ToolUIConfig | undefined {
|
|
||||||
return this.metadata.uiConfig
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,100 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { FileCode, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlockConfigInput,
|
|
||||||
GetBlockConfigResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { getLatestBlock } from '@/blocks/registry'
|
|
||||||
|
|
||||||
interface GetBlockConfigArgs {
|
|
||||||
blockType: string
|
|
||||||
operation?: string
|
|
||||||
trigger?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlockConfigClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_block_config'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetBlockConfigClientTool.id, GetBlockConfigClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Getting block config', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Getting block config', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Getting block config', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Retrieved block config', icon: FileCode },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to get block config', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block config', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped getting block config',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.blockType && typeof params.blockType === 'string') {
|
|
||||||
const blockConfig = getLatestBlock(params.blockType)
|
|
||||||
const blockName = (blockConfig?.name ?? params.blockType.replace(/_/g, ' ')).toLowerCase()
|
|
||||||
const opSuffix = params.operation ? ` (${params.operation})` : ''
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Retrieved ${blockName}${opSuffix} config`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Retrieving ${blockName}${opSuffix} config`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to retrieve ${blockName}${opSuffix} config`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted retrieving ${blockName}${opSuffix} config`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped retrieving ${blockName}${opSuffix} config`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: GetBlockConfigArgs): Promise<void> {
|
|
||||||
const logger = createLogger('GetBlockConfigClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const { blockType, operation, trigger } = GetBlockConfigInput.parse(args || {})
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({
|
|
||||||
toolName: 'get_block_config',
|
|
||||||
payload: { blockType, operation, trigger },
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlockConfigResult.parse(parsed.result)
|
|
||||||
|
|
||||||
const inputCount = Object.keys(result.inputs).length
|
|
||||||
const outputCount = Object.keys(result.outputs).length
|
|
||||||
await this.markToolComplete(200, { inputs: inputCount, outputs: outputCount }, result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Execute failed', { message })
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,110 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlockOptionsInput,
|
|
||||||
GetBlockOptionsResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { getLatestBlock } from '@/blocks/registry'
|
|
||||||
|
|
||||||
interface GetBlockOptionsArgs {
|
|
||||||
blockId: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlockOptionsClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_block_options'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetBlockOptionsClientTool.id, GetBlockOptionsClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Getting block operations', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Getting block operations', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Getting block operations', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Retrieved block operations', icon: ListFilter },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to get block operations', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block operations', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped getting block operations',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
const blockId =
|
|
||||||
(params as any)?.blockId ||
|
|
||||||
(params as any)?.blockType ||
|
|
||||||
(params as any)?.block_id ||
|
|
||||||
(params as any)?.block_type
|
|
||||||
if (typeof blockId === 'string') {
|
|
||||||
const blockConfig = getLatestBlock(blockId)
|
|
||||||
const blockName = (blockConfig?.name ?? blockId.replace(/_/g, ' ')).toLowerCase()
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Retrieved ${blockName} operations`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Retrieving ${blockName} operations`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to retrieve ${blockName} operations`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted retrieving ${blockName} operations`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped retrieving ${blockName} operations`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: GetBlockOptionsArgs): Promise<void> {
|
|
||||||
const logger = createLogger('GetBlockOptionsClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Handle both camelCase and snake_case parameter names, plus blockType as an alias
|
|
||||||
const normalizedArgs = args
|
|
||||||
? {
|
|
||||||
blockId:
|
|
||||||
args.blockId ||
|
|
||||||
(args as any).block_id ||
|
|
||||||
(args as any).blockType ||
|
|
||||||
(args as any).block_type,
|
|
||||||
}
|
|
||||||
: {}
|
|
||||||
|
|
||||||
logger.info('execute called', { originalArgs: args, normalizedArgs })
|
|
||||||
|
|
||||||
const { blockId } = GetBlockOptionsInput.parse(normalizedArgs)
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_block_options', payload: { blockId } }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlockOptionsResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, { operations: result.operations.length }, result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Execute failed', { message })
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Blocks, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlocksAndToolsResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
export class GetBlocksAndToolsClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_blocks_and_tools'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetBlocksAndToolsClientTool.id, GetBlocksAndToolsClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Exploring available options', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Exploring available options', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Exploring available options', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Explored available options', icon: Blocks },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to explore options', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted exploring options', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped exploring options', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
const logger = createLogger('GetBlocksAndToolsClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_blocks_and_tools', payload: {} }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlocksAndToolsResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, 'Successfully retrieved blocks and tools', result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,95 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetBlocksMetadataInput,
|
|
||||||
GetBlocksMetadataResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
interface GetBlocksMetadataArgs {
|
|
||||||
blockIds: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export class GetBlocksMetadataClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_blocks_metadata'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetBlocksMetadataClientTool.id, GetBlocksMetadataClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Searching block choices', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Searching block choices', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Searching block choices', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Searched block choices', icon: ListFilter },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to search block choices', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted searching block choices', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped searching block choices',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.blockIds && Array.isArray(params.blockIds) && params.blockIds.length > 0) {
|
|
||||||
const blockList = params.blockIds
|
|
||||||
.slice(0, 3)
|
|
||||||
.map((blockId) => blockId.replace(/_/g, ' '))
|
|
||||||
.join(', ')
|
|
||||||
const more = params.blockIds.length > 3 ? '...' : ''
|
|
||||||
const blocks = `${blockList}${more}`
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Searched ${blocks}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Searching ${blocks}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to search ${blocks}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted searching ${blocks}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped searching ${blocks}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: GetBlocksMetadataArgs): Promise<void> {
|
|
||||||
const logger = createLogger('GetBlocksMetadataClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const { blockIds } = GetBlocksMetadataInput.parse(args || {})
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_blocks_metadata', payload: { blockIds } }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetBlocksMetadataResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, { retrieved: Object.keys(result.metadata).length }, result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
logger.error('Execute failed', { message })
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { ListFilter, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
GetTriggerBlocksResult,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
|
|
||||||
export class GetTriggerBlocksClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_trigger_blocks'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetTriggerBlocksClientTool.id, GetTriggerBlocksClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Finding trigger blocks', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Finding trigger blocks', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Finding trigger blocks', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Found trigger blocks', icon: ListFilter },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to find trigger blocks', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted finding trigger blocks', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped finding trigger blocks', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
const logger = createLogger('GetTriggerBlocksClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'get_trigger_blocks', payload: {} }),
|
|
||||||
})
|
|
||||||
if (!res.ok) {
|
|
||||||
const errorText = await res.text().catch(() => '')
|
|
||||||
try {
|
|
||||||
const errorJson = JSON.parse(errorText)
|
|
||||||
throw new Error(errorJson.error || errorText || `Server error (${res.status})`)
|
|
||||||
} catch {
|
|
||||||
throw new Error(errorText || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
const result = GetTriggerBlocksResult.parse(parsed.result)
|
|
||||||
|
|
||||||
await this.markToolComplete(200, 'Successfully retrieved trigger blocks', result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (error: any) {
|
|
||||||
const message = error instanceof Error ? error.message : String(error)
|
|
||||||
await this.markToolComplete(500, message)
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
import { Loader2, MinusCircle, Search, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class GetExamplesRagClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_examples_rag'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetExamplesRagClientTool.id, GetExamplesRagClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Fetching examples', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Fetching examples', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Fetching examples', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Fetched examples', icon: Search },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to fetch examples', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted getting examples', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped getting examples', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.query && typeof params.query === 'string') {
|
|
||||||
const query = params.query
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Found examples for ${query}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Searching examples for ${query}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to find examples for ${query}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted searching examples for ${query}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped searching examples for ${query}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,58 +0,0 @@
|
|||||||
import { Loader2, MinusCircle, XCircle, Zap } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class GetOperationsExamplesClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_operations_examples'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetOperationsExamplesClientTool.id, GetOperationsExamplesClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Designing workflow component', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Designing workflow component', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Designing workflow component', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Designed workflow component', icon: Zap },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to design workflow component', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: {
|
|
||||||
text: 'Aborted designing workflow component',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped designing workflow component',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.query && typeof params.query === 'string') {
|
|
||||||
const query = params.query
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Designed ${query}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Designing ${query}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to design ${query}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted designing ${query}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped designing ${query}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
import { Loader2, MinusCircle, XCircle, Zap } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class GetTriggerExamplesClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'get_trigger_examples'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, GetTriggerExamplesClientTool.id, GetTriggerExamplesClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Selecting a trigger', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Selecting a trigger', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Selecting a trigger', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Selected a trigger', icon: Zap },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to select a trigger', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted selecting a trigger', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped selecting a trigger', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
import { Loader2, MinusCircle, PencilLine, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class SummarizeClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'summarize_conversation'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, SummarizeClientTool.id, SummarizeClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Summarizing conversation', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Summarizing conversation', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Summarizing conversation', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Summarized conversation', icon: PencilLine },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to summarize conversation', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: {
|
|
||||||
text: 'Aborted summarizing conversation',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped summarizing conversation',
|
|
||||||
icon: MinusCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
/**
|
|
||||||
* Initialize all tool UI configurations.
|
|
||||||
*
|
|
||||||
* This module imports all client tools to trigger their UI config registration.
|
|
||||||
* Import this module early in the app to ensure all tool configs are available.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Other tools (subagents)
|
|
||||||
import './other/auth'
|
|
||||||
import './other/custom-tool'
|
|
||||||
import './other/debug'
|
|
||||||
import './other/deploy'
|
|
||||||
import './other/edit'
|
|
||||||
import './other/evaluate'
|
|
||||||
import './other/info'
|
|
||||||
import './other/knowledge'
|
|
||||||
import './other/make-api-request'
|
|
||||||
import './other/plan'
|
|
||||||
import './other/research'
|
|
||||||
import './other/sleep'
|
|
||||||
import './other/superagent'
|
|
||||||
import './other/test'
|
|
||||||
import './other/tour'
|
|
||||||
import './other/workflow'
|
|
||||||
|
|
||||||
// Workflow tools
|
|
||||||
import './workflow/deploy-api'
|
|
||||||
import './workflow/deploy-chat'
|
|
||||||
import './workflow/deploy-mcp'
|
|
||||||
import './workflow/edit-workflow'
|
|
||||||
import './workflow/redeploy'
|
|
||||||
import './workflow/run-workflow'
|
|
||||||
import './workflow/set-global-workflow-variables'
|
|
||||||
|
|
||||||
// User tools
|
|
||||||
import './user/set-environment-variables'
|
|
||||||
@@ -1,143 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Database, Loader2, MinusCircle, PlusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import {
|
|
||||||
ExecuteResponseSuccessSchema,
|
|
||||||
type KnowledgeBaseArgs,
|
|
||||||
} from '@/lib/copilot/tools/shared/schemas'
|
|
||||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Client tool for knowledge base operations
|
|
||||||
*/
|
|
||||||
export class KnowledgeBaseClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'knowledge_base'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, KnowledgeBaseClientTool.id, KnowledgeBaseClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Only show interrupt for create operation
|
|
||||||
*/
|
|
||||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
|
||||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
|
||||||
const toolCall = toolCallsById[this.toolCallId]
|
|
||||||
const params = toolCall?.params as KnowledgeBaseArgs | undefined
|
|
||||||
|
|
||||||
// Only require confirmation for create operation
|
|
||||||
if (params?.operation === 'create') {
|
|
||||||
const name = params?.args?.name || 'new knowledge base'
|
|
||||||
return {
|
|
||||||
accept: { text: `Create "${name}"`, icon: PlusCircle },
|
|
||||||
reject: { text: 'Skip', icon: XCircle },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// No interrupt for list, get, query - auto-execute
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Accessing knowledge base', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Accessing knowledge base', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Accessing knowledge base', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Accessed knowledge base', icon: Database },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to access knowledge base', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted knowledge base access', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped knowledge base access', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
getDynamicText: (params: Record<string, any>, state: ClientToolCallState) => {
|
|
||||||
const operation = params?.operation as string | undefined
|
|
||||||
const name = params?.args?.name as string | undefined
|
|
||||||
|
|
||||||
const opVerbs: Record<string, { active: string; past: string; pending?: string }> = {
|
|
||||||
create: {
|
|
||||||
active: 'Creating knowledge base',
|
|
||||||
past: 'Created knowledge base',
|
|
||||||
pending: name ? `Create knowledge base "${name}"?` : 'Create knowledge base?',
|
|
||||||
},
|
|
||||||
list: { active: 'Listing knowledge bases', past: 'Listed knowledge bases' },
|
|
||||||
get: { active: 'Getting knowledge base', past: 'Retrieved knowledge base' },
|
|
||||||
query: { active: 'Querying knowledge base', past: 'Queried knowledge base' },
|
|
||||||
}
|
|
||||||
const defaultVerb: { active: string; past: string; pending?: string } = {
|
|
||||||
active: 'Accessing knowledge base',
|
|
||||||
past: 'Accessed knowledge base',
|
|
||||||
}
|
|
||||||
const verb = operation ? opVerbs[operation] || defaultVerb : defaultVerb
|
|
||||||
|
|
||||||
if (state === ClientToolCallState.success) {
|
|
||||||
return verb.past
|
|
||||||
}
|
|
||||||
if (state === ClientToolCallState.pending && verb.pending) {
|
|
||||||
return verb.pending
|
|
||||||
}
|
|
||||||
if (
|
|
||||||
state === ClientToolCallState.generating ||
|
|
||||||
state === ClientToolCallState.pending ||
|
|
||||||
state === ClientToolCallState.executing
|
|
||||||
) {
|
|
||||||
return verb.active
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: KnowledgeBaseArgs): Promise<void> {
|
|
||||||
await this.execute(args)
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: KnowledgeBaseArgs): Promise<void> {
|
|
||||||
const logger = createLogger('KnowledgeBaseClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Get the workspace ID from the workflow registry hydration state
|
|
||||||
const { hydration } = useWorkflowRegistry.getState()
|
|
||||||
const workspaceId = hydration.workspaceId
|
|
||||||
|
|
||||||
// Build payload with workspace ID included in args
|
|
||||||
const payload: KnowledgeBaseArgs = {
|
|
||||||
...(args || { operation: 'list' }),
|
|
||||||
args: {
|
|
||||||
...(args?.args || {}),
|
|
||||||
workspaceId: workspaceId || undefined,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ toolName: 'knowledge_base', payload }),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
const txt = await res.text().catch(() => '')
|
|
||||||
throw new Error(txt || `Server error (${res.status})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const json = await res.json()
|
|
||||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Knowledge base operation completed', parsed.result)
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to access knowledge base')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
const instances: Record<string, any> = {}
|
|
||||||
|
|
||||||
let syncStateFn: ((toolCallId: string, nextState: any, options?: { result?: any }) => void) | null =
|
|
||||||
null
|
|
||||||
|
|
||||||
export function registerClientTool(toolCallId: string, instance: any) {
|
|
||||||
instances[toolCallId] = instance
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getClientTool(toolCallId: string): any | undefined {
|
|
||||||
return instances[toolCallId]
|
|
||||||
}
|
|
||||||
|
|
||||||
export function registerToolStateSync(
|
|
||||||
fn: (toolCallId: string, nextState: any, options?: { result?: any }) => void
|
|
||||||
) {
|
|
||||||
syncStateFn = fn
|
|
||||||
}
|
|
||||||
|
|
||||||
export function syncToolState(toolCallId: string, nextState: any, options?: { result?: any }) {
|
|
||||||
try {
|
|
||||||
syncStateFn?.(toolCallId, nextState, options)
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
@@ -1,241 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Loader2, Navigation, X, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
|
|
||||||
type NavigationDestination = 'workflow' | 'logs' | 'templates' | 'vector_db' | 'settings'
|
|
||||||
|
|
||||||
interface NavigateUIArgs {
|
|
||||||
destination: NavigationDestination
|
|
||||||
workflowName?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class NavigateUIClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'navigate_ui'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, NavigateUIClientTool.id, NavigateUIClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Override to provide dynamic button text based on destination
|
|
||||||
*/
|
|
||||||
getInterruptDisplays(): BaseClientToolMetadata['interrupt'] | undefined {
|
|
||||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
|
||||||
const toolCall = toolCallsById[this.toolCallId]
|
|
||||||
const params = toolCall?.params as NavigateUIArgs | undefined
|
|
||||||
|
|
||||||
const destination = params?.destination
|
|
||||||
const workflowName = params?.workflowName
|
|
||||||
|
|
||||||
let buttonText = 'Navigate'
|
|
||||||
|
|
||||||
if (destination === 'workflow' && workflowName) {
|
|
||||||
buttonText = 'Open workflow'
|
|
||||||
} else if (destination === 'logs') {
|
|
||||||
buttonText = 'Open logs'
|
|
||||||
} else if (destination === 'templates') {
|
|
||||||
buttonText = 'Open templates'
|
|
||||||
} else if (destination === 'vector_db') {
|
|
||||||
buttonText = 'Open vector DB'
|
|
||||||
} else if (destination === 'settings') {
|
|
||||||
buttonText = 'Open settings'
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
accept: { text: buttonText, icon: Navigation },
|
|
||||||
reject: { text: 'Skip', icon: XCircle },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: {
|
|
||||||
text: 'Preparing to open',
|
|
||||||
icon: Loader2,
|
|
||||||
},
|
|
||||||
[ClientToolCallState.pending]: { text: 'Open?', icon: Navigation },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Opening', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Opened', icon: Navigation },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to open', icon: X },
|
|
||||||
[ClientToolCallState.aborted]: {
|
|
||||||
text: 'Aborted opening',
|
|
||||||
icon: XCircle,
|
|
||||||
},
|
|
||||||
[ClientToolCallState.rejected]: {
|
|
||||||
text: 'Skipped opening',
|
|
||||||
icon: XCircle,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
interrupt: {
|
|
||||||
accept: { text: 'Open', icon: Navigation },
|
|
||||||
reject: { text: 'Skip', icon: XCircle },
|
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
const destination = params?.destination as NavigationDestination | undefined
|
|
||||||
const workflowName = params?.workflowName
|
|
||||||
|
|
||||||
const action = 'open'
|
|
||||||
const actionCapitalized = 'Open'
|
|
||||||
const actionPast = 'opened'
|
|
||||||
const actionIng = 'opening'
|
|
||||||
let target = ''
|
|
||||||
|
|
||||||
if (destination === 'workflow' && workflowName) {
|
|
||||||
target = ` workflow "${workflowName}"`
|
|
||||||
} else if (destination === 'workflow') {
|
|
||||||
target = ' workflows'
|
|
||||||
} else if (destination === 'logs') {
|
|
||||||
target = ' logs'
|
|
||||||
} else if (destination === 'templates') {
|
|
||||||
target = ' templates'
|
|
||||||
} else if (destination === 'vector_db') {
|
|
||||||
target = ' vector database'
|
|
||||||
} else if (destination === 'settings') {
|
|
||||||
target = ' settings'
|
|
||||||
}
|
|
||||||
|
|
||||||
const fullAction = `${action}${target}`
|
|
||||||
const fullActionCapitalized = `${actionCapitalized}${target}`
|
|
||||||
const fullActionPast = `${actionPast}${target}`
|
|
||||||
const fullActionIng = `${actionIng}${target}`
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return fullActionPast.charAt(0).toUpperCase() + fullActionPast.slice(1)
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
return fullActionIng.charAt(0).toUpperCase() + fullActionIng.slice(1)
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
return `Preparing to ${fullAction}`
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `${fullActionCapitalized}?`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to ${fullAction}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted ${fullAction}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped ${fullAction}`
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleReject(): Promise<void> {
|
|
||||||
await super.handleReject()
|
|
||||||
this.setState(ClientToolCallState.rejected)
|
|
||||||
}
|
|
||||||
|
|
||||||
async handleAccept(args?: NavigateUIArgs): Promise<void> {
|
|
||||||
const logger = createLogger('NavigateUIClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
// Get params from copilot store if not provided directly
|
|
||||||
let destination = args?.destination
|
|
||||||
let workflowName = args?.workflowName
|
|
||||||
|
|
||||||
if (!destination) {
|
|
||||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
|
||||||
const toolCall = toolCallsById[this.toolCallId]
|
|
||||||
const params = toolCall?.params as NavigateUIArgs | undefined
|
|
||||||
destination = params?.destination
|
|
||||||
workflowName = params?.workflowName
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!destination) {
|
|
||||||
throw new Error('No destination provided')
|
|
||||||
}
|
|
||||||
|
|
||||||
let navigationUrl = ''
|
|
||||||
let successMessage = ''
|
|
||||||
|
|
||||||
// Get current workspace ID from URL
|
|
||||||
const workspaceId = window.location.pathname.split('/')[2]
|
|
||||||
|
|
||||||
switch (destination) {
|
|
||||||
case 'workflow':
|
|
||||||
if (workflowName) {
|
|
||||||
// Find workflow by name
|
|
||||||
const { workflows } = useWorkflowRegistry.getState()
|
|
||||||
const workflow = Object.values(workflows).find(
|
|
||||||
(w) => w.name.toLowerCase() === workflowName.toLowerCase()
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!workflow) {
|
|
||||||
throw new Error(`Workflow "${workflowName}" not found`)
|
|
||||||
}
|
|
||||||
|
|
||||||
navigationUrl = `/workspace/${workspaceId}/w/${workflow.id}`
|
|
||||||
successMessage = `Navigated to workflow "${workflowName}"`
|
|
||||||
} else {
|
|
||||||
navigationUrl = `/workspace/${workspaceId}/w`
|
|
||||||
successMessage = 'Navigated to workflows'
|
|
||||||
}
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'logs':
|
|
||||||
navigationUrl = `/workspace/${workspaceId}/logs`
|
|
||||||
successMessage = 'Navigated to logs'
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'templates':
|
|
||||||
navigationUrl = `/workspace/${workspaceId}/templates`
|
|
||||||
successMessage = 'Navigated to templates'
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'vector_db':
|
|
||||||
navigationUrl = `/workspace/${workspaceId}/vector-db`
|
|
||||||
successMessage = 'Navigated to vector database'
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'settings':
|
|
||||||
window.dispatchEvent(new CustomEvent('open-settings', { detail: { tab: 'general' } }))
|
|
||||||
successMessage = 'Opened settings'
|
|
||||||
break
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown destination: ${destination}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Navigate if URL was set
|
|
||||||
if (navigationUrl) {
|
|
||||||
window.location.href = navigationUrl
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, successMessage, {
|
|
||||||
destination,
|
|
||||||
workflowName,
|
|
||||||
navigated: true,
|
|
||||||
})
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('Navigation failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
|
|
||||||
// Get destination info for better error message
|
|
||||||
const toolCallsById = useCopilotStore.getState().toolCallsById
|
|
||||||
const toolCall = toolCallsById[this.toolCallId]
|
|
||||||
const params = toolCall?.params as NavigateUIArgs | undefined
|
|
||||||
const dest = params?.destination
|
|
||||||
const wfName = params?.workflowName
|
|
||||||
|
|
||||||
let errorMessage = e?.message || 'Failed to navigate'
|
|
||||||
if (dest === 'workflow' && wfName) {
|
|
||||||
errorMessage = `Failed to navigate to workflow "${wfName}": ${e?.message || 'Unknown error'}`
|
|
||||||
} else if (dest) {
|
|
||||||
errorMessage = `Failed to navigate to ${dest}: ${e?.message || 'Unknown error'}`
|
|
||||||
}
|
|
||||||
|
|
||||||
await this.markToolComplete(500, errorMessage)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: NavigateUIArgs): Promise<void> {
|
|
||||||
await this.handleAccept(args)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import { KeyRound, Loader2, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface AuthArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Auth tool that spawns a subagent to handle authentication setup.
|
|
||||||
* This tool auto-executes and the actual work is done by the auth subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class AuthClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'auth'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, AuthClientTool.id, AuthClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Authenticating', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Authenticating', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Authenticating', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Authenticated', icon: KeyRound },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to authenticate', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped auth', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted auth', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Authenticating',
|
|
||||||
completedLabel: 'Authenticated',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the auth tool.
|
|
||||||
* This just marks the tool as executing - the actual auth work is done server-side
|
|
||||||
* by the auth subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: AuthArgs): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(AuthClientTool.id, AuthClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { Check, Loader2, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
interface CheckoffTodoArgs {
|
|
||||||
id?: string
|
|
||||||
todoId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export class CheckoffTodoClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'checkoff_todo'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, CheckoffTodoClientTool.id, CheckoffTodoClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Marking todo', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Marking todo', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Marked todo complete', icon: Check },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to mark todo', icon: XCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(args?: CheckoffTodoArgs): Promise<void> {
|
|
||||||
const logger = createLogger('CheckoffTodoClientTool')
|
|
||||||
try {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
|
|
||||||
const todoId = args?.id || args?.todoId
|
|
||||||
if (!todoId) {
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(400, 'Missing todo id')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
|
||||||
const store = useCopilotStore.getState()
|
|
||||||
if (store.updatePlanTodoStatus) {
|
|
||||||
store.updatePlanTodoStatus(todoId, 'completed')
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
logger.warn('Failed to update todo status in store', { message: (e as any)?.message })
|
|
||||||
}
|
|
||||||
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
await this.markToolComplete(200, 'Todo checked off', { todoId })
|
|
||||||
this.setState(ClientToolCallState.success)
|
|
||||||
} catch (e: any) {
|
|
||||||
logger.error('execute failed', { message: e?.message })
|
|
||||||
this.setState(ClientToolCallState.error)
|
|
||||||
await this.markToolComplete(500, e?.message || 'Failed to check off todo')
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
import { Globe, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
|
|
||||||
export class CrawlWebsiteClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'crawl_website'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, CrawlWebsiteClientTool.id, CrawlWebsiteClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Crawling website', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Crawling website', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Crawling website', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Crawled website', icon: Globe },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to crawl website', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted crawling website', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped crawling website', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
interrupt: undefined,
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
if (params?.url && typeof params.url === 'string') {
|
|
||||||
const url = params.url
|
|
||||||
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return `Crawled ${url}`
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return `Crawling ${url}`
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return `Failed to crawl ${url}`
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return `Aborted crawling ${url}`
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return `Skipped crawling ${url}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
async execute(): Promise<void> {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import { Loader2, Wrench, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface CustomToolArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Custom tool that spawns a subagent to manage custom tools.
|
|
||||||
* This tool auto-executes and the actual work is done by the custom_tool subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class CustomToolClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'custom_tool'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, CustomToolClientTool.id, CustomToolClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Managing custom tool', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Managing custom tool', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Managing custom tool', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Managed custom tool', icon: Wrench },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed custom tool', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped custom tool', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted custom tool', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Managing custom tool',
|
|
||||||
completedLabel: 'Custom tool managed',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the custom_tool tool.
|
|
||||||
* This just marks the tool as executing - the actual custom tool work is done server-side
|
|
||||||
* by the custom_tool subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: CustomToolArgs): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(CustomToolClientTool.id, CustomToolClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
import { Bug, Loader2, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface DebugArgs {
|
|
||||||
error_description: string
|
|
||||||
context?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Debug tool that spawns a subagent to diagnose workflow issues.
|
|
||||||
* This tool auto-executes and the actual work is done by the debug subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class DebugClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'debug'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, DebugClientTool.id, DebugClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Debugging', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Debugging', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Debugging', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Debugged', icon: Bug },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to debug', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped debug', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted debug', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Debugging',
|
|
||||||
completedLabel: 'Debugged',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the debug tool.
|
|
||||||
* This just marks the tool as executing - the actual debug work is done server-side
|
|
||||||
* by the debug subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: DebugArgs): Promise<void> {
|
|
||||||
// Immediately transition to executing state - no user confirmation needed
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
// The tool result will come from the server via tool_result event
|
|
||||||
// when the debug subagent completes its work
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(DebugClientTool.id, DebugClientTool.metadata.uiConfig!)
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
import { Loader2, Rocket, XCircle } from 'lucide-react'
|
|
||||||
import {
|
|
||||||
BaseClientTool,
|
|
||||||
type BaseClientToolMetadata,
|
|
||||||
ClientToolCallState,
|
|
||||||
} from '@/lib/copilot/tools/client/base-tool'
|
|
||||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
|
||||||
|
|
||||||
interface DeployArgs {
|
|
||||||
instruction: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Deploy tool that spawns a subagent to handle deployment.
|
|
||||||
* This tool auto-executes and the actual work is done by the deploy subagent.
|
|
||||||
* The subagent's output is streamed as nested content under this tool call.
|
|
||||||
*/
|
|
||||||
export class DeployClientTool extends BaseClientTool {
|
|
||||||
static readonly id = 'deploy'
|
|
||||||
|
|
||||||
constructor(toolCallId: string) {
|
|
||||||
super(toolCallId, DeployClientTool.id, DeployClientTool.metadata)
|
|
||||||
}
|
|
||||||
|
|
||||||
static readonly metadata: BaseClientToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Deploying', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Deploying', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Deploying', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Deployed', icon: Rocket },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to deploy', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped deploy', icon: XCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted deploy', icon: XCircle },
|
|
||||||
},
|
|
||||||
uiConfig: {
|
|
||||||
subagent: {
|
|
||||||
streamingLabel: 'Deploying',
|
|
||||||
completedLabel: 'Deployed',
|
|
||||||
shouldCollapse: true,
|
|
||||||
outputArtifacts: [],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the deploy tool.
|
|
||||||
* This just marks the tool as executing - the actual deploy work is done server-side
|
|
||||||
* by the deploy subagent, and its output is streamed as subagent events.
|
|
||||||
*/
|
|
||||||
async execute(_args?: DeployArgs): Promise<void> {
|
|
||||||
this.setState(ClientToolCallState.executing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register UI config at module load
|
|
||||||
registerToolUIConfig(DeployClientTool.id, DeployClientTool.metadata.uiConfig!)
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user