mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-12 15:34:58 -05:00
Compare commits
53 Commits
feat/smart
...
v0.5.86
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
50585273ce | ||
|
|
654cb2b407 | ||
|
|
6c66521d64 | ||
|
|
479cd347ad | ||
|
|
a3a99eda19 | ||
|
|
1a66d48add | ||
|
|
46822e91f3 | ||
|
|
2bb68335ee | ||
|
|
8528fbe2d2 | ||
|
|
31fdd2be13 | ||
|
|
028bc652c2 | ||
|
|
c6bf5cd58c | ||
|
|
11dc18a80d | ||
|
|
ab4e9dc72f | ||
|
|
1c58c35bd8 | ||
|
|
d63a5cb504 | ||
|
|
8bd5d41723 | ||
|
|
c12931bc50 | ||
|
|
e9c4251c1c | ||
|
|
cc2be33d6b | ||
|
|
45371e521e | ||
|
|
0ce0f98aa5 | ||
|
|
dff1c9d083 | ||
|
|
b09f683072 | ||
|
|
a8bb0db660 | ||
|
|
af82820a28 | ||
|
|
4372841797 | ||
|
|
5e8c843241 | ||
|
|
7bf3d73ee6 | ||
|
|
7ffc11a738 | ||
|
|
be578e2ed7 | ||
|
|
f415e5edc4 | ||
|
|
13a6e6c3fa | ||
|
|
f5ab7f21ae | ||
|
|
bfb6fffe38 | ||
|
|
4fbec0a43f | ||
|
|
585f5e365b | ||
|
|
3792bdd252 | ||
|
|
eb5d1f3e5b | ||
|
|
54ab82c8dd | ||
|
|
f895bf469b | ||
|
|
dd3209af06 | ||
|
|
b6ba3b50a7 | ||
|
|
b304233062 | ||
|
|
57e4b49bd6 | ||
|
|
e12dd204ed | ||
|
|
3d9d9cbc54 | ||
|
|
0f4ec962ad | ||
|
|
4827866f9a | ||
|
|
3e697d9ed9 | ||
|
|
4431a1a484 | ||
|
|
4d1a9a3f22 | ||
|
|
eb07a080fb |
@@ -41,6 +41,9 @@ Diese Tastenkombinationen wechseln zwischen den Panel-Tabs auf der rechten Seite
|
|||||||
|
|
||||||
| Tastenkombination | Aktion |
|
| Tastenkombination | Aktion |
|
||||||
|----------|--------|
|
|----------|--------|
|
||||||
|
| `C` | Copilot-Tab fokussieren |
|
||||||
|
| `T` | Toolbar-Tab fokussieren |
|
||||||
|
| `E` | Editor-Tab fokussieren |
|
||||||
| `Mod` + `F` | Toolbar-Suche fokussieren |
|
| `Mod` + `F` | Toolbar-Suche fokussieren |
|
||||||
|
|
||||||
## Globale Navigation
|
## Globale Navigation
|
||||||
|
|||||||
@@ -43,6 +43,9 @@ These shortcuts switch between panel tabs on the right side of the canvas.
|
|||||||
|
|
||||||
| Shortcut | Action |
|
| Shortcut | Action |
|
||||||
|----------|--------|
|
|----------|--------|
|
||||||
|
| `C` | Focus Copilot tab |
|
||||||
|
| `T` | Focus Toolbar tab |
|
||||||
|
| `E` | Focus Editor tab |
|
||||||
| `Mod` + `F` | Focus Toolbar search |
|
| `Mod` + `F` | Focus Toolbar search |
|
||||||
|
|
||||||
## Global Navigation
|
## Global Navigation
|
||||||
|
|||||||
@@ -399,28 +399,6 @@ Create a new custom property (metadata) on a Confluence page.
|
|||||||
| ↳ `authorId` | string | Account ID of the version author |
|
| ↳ `authorId` | string | Account ID of the version author |
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||||
|
|
||||||
### `confluence_delete_page_property`
|
|
||||||
|
|
||||||
Delete a content property from a Confluence page by its property ID.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `pageId` | string | Yes | The ID of the page containing the property |
|
|
||||||
| `propertyId` | string | Yes | The ID of the property to delete |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `pageId` | string | ID of the page |
|
|
||||||
| `propertyId` | string | ID of the deleted property |
|
|
||||||
| `deleted` | boolean | Deletion status |
|
|
||||||
|
|
||||||
### `confluence_search`
|
### `confluence_search`
|
||||||
|
|
||||||
Search for content across Confluence pages, blog posts, and other content.
|
Search for content across Confluence pages, blog posts, and other content.
|
||||||
@@ -894,90 +872,6 @@ Add a label to a Confluence page for organization and categorization.
|
|||||||
| `labelName` | string | Name of the added label |
|
| `labelName` | string | Name of the added label |
|
||||||
| `labelId` | string | ID of the added label |
|
| `labelId` | string | ID of the added label |
|
||||||
|
|
||||||
### `confluence_delete_label`
|
|
||||||
|
|
||||||
Remove a label from a Confluence page.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `pageId` | string | Yes | Confluence page ID to remove the label from |
|
|
||||||
| `labelName` | string | Yes | Name of the label to remove |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `pageId` | string | Page ID the label was removed from |
|
|
||||||
| `labelName` | string | Name of the removed label |
|
|
||||||
| `deleted` | boolean | Deletion status |
|
|
||||||
|
|
||||||
### `confluence_get_pages_by_label`
|
|
||||||
|
|
||||||
Retrieve all pages that have a specific label applied.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `labelId` | string | Yes | The ID of the label to get pages for |
|
|
||||||
| `limit` | number | No | Maximum number of pages to return \(default: 50, max: 250\) |
|
|
||||||
| `cursor` | string | No | Pagination cursor from previous response |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `labelId` | string | ID of the label |
|
|
||||||
| `pages` | array | Array of pages with this label |
|
|
||||||
| ↳ `id` | string | Unique page identifier |
|
|
||||||
| ↳ `title` | string | Page title |
|
|
||||||
| ↳ `status` | string | Page status \(e.g., current, archived, trashed, draft\) |
|
|
||||||
| ↳ `spaceId` | string | ID of the space containing the page |
|
|
||||||
| ↳ `parentId` | string | ID of the parent page \(null if top-level\) |
|
|
||||||
| ↳ `authorId` | string | Account ID of the page author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp when the page was created |
|
|
||||||
| ↳ `version` | object | Page version information |
|
|
||||||
| ↳ `number` | number | Version number |
|
|
||||||
| ↳ `message` | string | Version message |
|
|
||||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
|
||||||
| ↳ `authorId` | string | Account ID of the version author |
|
|
||||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_list_space_labels`
|
|
||||||
|
|
||||||
List all labels associated with a Confluence space.
|
|
||||||
|
|
||||||
#### Input
|
|
||||||
|
|
||||||
| Parameter | Type | Required | Description |
|
|
||||||
| --------- | ---- | -------- | ----------- |
|
|
||||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
|
||||||
| `spaceId` | string | Yes | The ID of the Confluence space to list labels from |
|
|
||||||
| `limit` | number | No | Maximum number of labels to return \(default: 25, max: 250\) |
|
|
||||||
| `cursor` | string | No | Pagination cursor from previous response |
|
|
||||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
|
||||||
|
|
||||||
#### Output
|
|
||||||
|
|
||||||
| Parameter | Type | Description |
|
|
||||||
| --------- | ---- | ----------- |
|
|
||||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
|
||||||
| `spaceId` | string | ID of the space |
|
|
||||||
| `labels` | array | Array of labels on the space |
|
|
||||||
| ↳ `id` | string | Unique label identifier |
|
|
||||||
| ↳ `name` | string | Label name |
|
|
||||||
| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) |
|
|
||||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
|
||||||
|
|
||||||
### `confluence_get_space`
|
### `confluence_get_space`
|
||||||
|
|
||||||
Get details about a specific Confluence space.
|
Get details about a specific Confluence space.
|
||||||
|
|||||||
@@ -42,6 +42,9 @@ Estos atajos cambian entre las pestañas del panel en el lado derecho del lienzo
|
|||||||
|
|
||||||
| Atajo | Acción |
|
| Atajo | Acción |
|
||||||
|----------|--------|
|
|----------|--------|
|
||||||
|
| `C` | Enfocar pestaña Copilot |
|
||||||
|
| `T` | Enfocar pestaña Barra de herramientas |
|
||||||
|
| `E` | Enfocar pestaña Editor |
|
||||||
| `Mod` + `F` | Enfocar búsqueda de Barra de herramientas |
|
| `Mod` + `F` | Enfocar búsqueda de Barra de herramientas |
|
||||||
|
|
||||||
## Navegación global
|
## Navegación global
|
||||||
|
|||||||
@@ -42,6 +42,9 @@ Ces raccourcis permettent de basculer entre les onglets du panneau sur le côté
|
|||||||
|
|
||||||
| Raccourci | Action |
|
| Raccourci | Action |
|
||||||
|----------|--------|
|
|----------|--------|
|
||||||
|
| `C` | Activer l'onglet Copilot |
|
||||||
|
| `T` | Activer l'onglet Barre d'outils |
|
||||||
|
| `E` | Activer l'onglet Éditeur |
|
||||||
| `Mod` + `F` | Activer la recherche dans la barre d'outils |
|
| `Mod` + `F` | Activer la recherche dans la barre d'outils |
|
||||||
|
|
||||||
## Navigation globale
|
## Navigation globale
|
||||||
|
|||||||
@@ -41,6 +41,9 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
|||||||
|
|
||||||
| ショートカット | 操作 |
|
| ショートカット | 操作 |
|
||||||
|----------|--------|
|
|----------|--------|
|
||||||
|
| `C` | Copilotタブにフォーカス |
|
||||||
|
| `T` | Toolbarタブにフォーカス |
|
||||||
|
| `E` | Editorタブにフォーカス |
|
||||||
| `Mod` + `F` | Toolbar検索にフォーカス |
|
| `Mod` + `F` | Toolbar検索にフォーカス |
|
||||||
|
|
||||||
## グローバルナビゲーション
|
## グローバルナビゲーション
|
||||||
|
|||||||
@@ -41,6 +41,9 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
|||||||
|
|
||||||
| 快捷键 | 操作 |
|
| 快捷键 | 操作 |
|
||||||
|----------|--------|
|
|----------|--------|
|
||||||
|
| `C` | 聚焦 Copilot 标签页 |
|
||||||
|
| `T` | 聚焦 Toolbar 标签页 |
|
||||||
|
| `E` | 聚焦 Editor 标签页 |
|
||||||
| `Mod` + `F` | 聚焦 Toolbar 搜索 |
|
| `Mod` + `F` | 聚焦 Toolbar 搜索 |
|
||||||
|
|
||||||
## 全局导航
|
## 全局导航
|
||||||
|
|||||||
@@ -1,81 +1,145 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { settings } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
import { getSession } from '@/lib/auth'
|
||||||
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request-helpers'
|
|
||||||
import { env } from '@/lib/core/config/env'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
const logger = createLogger('CopilotAutoAllowedToolsAPI')
|
||||||
|
|
||||||
function copilotHeaders(): HeadersInit {
|
/**
|
||||||
const headers: Record<string, string> = {
|
* GET - Fetch user's auto-allowed integration tools
|
||||||
'Content-Type': 'application/json',
|
*/
|
||||||
}
|
export async function GET() {
|
||||||
if (env.COPILOT_API_KEY) {
|
|
||||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
|
||||||
}
|
|
||||||
return headers
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function DELETE(request: NextRequest) {
|
|
||||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
|
||||||
if (!isAuthenticated || !userId) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const toolIdFromQuery = new URL(request.url).searchParams.get('toolId') || undefined
|
|
||||||
const toolIdFromBody = await request
|
|
||||||
.json()
|
|
||||||
.then((body) => (typeof body?.toolId === 'string' ? body.toolId : undefined))
|
|
||||||
.catch(() => undefined)
|
|
||||||
const toolId = toolIdFromBody || toolIdFromQuery
|
|
||||||
if (!toolId) {
|
|
||||||
return NextResponse.json({ error: 'toolId is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
const session = await getSession()
|
||||||
method: 'DELETE',
|
|
||||||
headers: copilotHeaders(),
|
|
||||||
body: JSON.stringify({
|
|
||||||
userId,
|
|
||||||
toolId,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
const payload = await res.json().catch(() => ({}))
|
if (!session?.user?.id) {
|
||||||
if (!res.ok) {
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
logger.warn('Failed to remove auto-allowed tool via copilot backend', {
|
|
||||||
status: res.status,
|
|
||||||
userId,
|
|
||||||
toolId,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: payload?.error || 'Failed to remove auto-allowed tool',
|
|
||||||
autoAllowedTools: [],
|
|
||||||
},
|
|
||||||
{ status: res.status }
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json({
|
const userId = session.user.id
|
||||||
success: true,
|
|
||||||
autoAllowedTools: Array.isArray(payload?.autoAllowedTools) ? payload.autoAllowedTools : [],
|
const [userSettings] = await db
|
||||||
})
|
.select()
|
||||||
} catch (error) {
|
.from(settings)
|
||||||
logger.error('Error removing auto-allowed tool', {
|
.where(eq(settings.userId, userId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (userSettings) {
|
||||||
|
const autoAllowedTools = (userSettings.copilotAutoAllowedTools as string[]) || []
|
||||||
|
return NextResponse.json({ autoAllowedTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.insert(settings).values({
|
||||||
|
id: userId,
|
||||||
userId,
|
userId,
|
||||||
toolId,
|
copilotAutoAllowedTools: [],
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
})
|
||||||
return NextResponse.json(
|
|
||||||
{
|
return NextResponse.json({ autoAllowedTools: [] })
|
||||||
success: false,
|
} catch (error) {
|
||||||
error: 'Failed to remove auto-allowed tool',
|
logger.error('Failed to fetch auto-allowed tools', { error })
|
||||||
autoAllowedTools: [],
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
},
|
}
|
||||||
{ status: 500 }
|
}
|
||||||
)
|
|
||||||
|
/**
|
||||||
|
* POST - Add a tool to the auto-allowed list
|
||||||
|
*/
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = session.user.id
|
||||||
|
const body = await request.json()
|
||||||
|
|
||||||
|
if (!body.toolId || typeof body.toolId !== 'string') {
|
||||||
|
return NextResponse.json({ error: 'toolId must be a string' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const toolId = body.toolId
|
||||||
|
|
||||||
|
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||||
|
|
||||||
|
if (!currentTools.includes(toolId)) {
|
||||||
|
const updatedTools = [...currentTools, toolId]
|
||||||
|
await db
|
||||||
|
.update(settings)
|
||||||
|
.set({
|
||||||
|
copilotAutoAllowedTools: updatedTools,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(settings.userId, userId))
|
||||||
|
|
||||||
|
logger.info('Added tool to auto-allowed list', { userId, toolId })
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: currentTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
await db.insert(settings).values({
|
||||||
|
id: userId,
|
||||||
|
userId,
|
||||||
|
copilotAutoAllowedTools: [toolId],
|
||||||
|
})
|
||||||
|
|
||||||
|
logger.info('Created settings and added tool to auto-allowed list', { userId, toolId })
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: [toolId] })
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to add auto-allowed tool', { error })
|
||||||
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE - Remove a tool from the auto-allowed list
|
||||||
|
*/
|
||||||
|
export async function DELETE(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = session.user.id
|
||||||
|
const { searchParams } = new URL(request.url)
|
||||||
|
const toolId = searchParams.get('toolId')
|
||||||
|
|
||||||
|
if (!toolId) {
|
||||||
|
return NextResponse.json({ error: 'toolId query parameter is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
const currentTools = (existing.copilotAutoAllowedTools as string[]) || []
|
||||||
|
const updatedTools = currentTools.filter((t) => t !== toolId)
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(settings)
|
||||||
|
.set({
|
||||||
|
copilotAutoAllowedTools: updatedTools,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(settings.userId, userId))
|
||||||
|
|
||||||
|
logger.info('Removed tool from auto-allowed list', { userId, toolId })
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: updatedTools })
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({ success: true, autoAllowedTools: [] })
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to remove auto-allowed tool', { error })
|
||||||
|
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,24 +28,13 @@ import { resolveWorkflowIdForUser } from '@/lib/workflows/utils'
|
|||||||
|
|
||||||
const logger = createLogger('CopilotChatAPI')
|
const logger = createLogger('CopilotChatAPI')
|
||||||
|
|
||||||
function truncateForLog(value: string, maxLength = 120): string {
|
|
||||||
if (!value || maxLength <= 0) return ''
|
|
||||||
return value.length <= maxLength ? value : `${value.slice(0, maxLength)}...`
|
|
||||||
}
|
|
||||||
|
|
||||||
async function requestChatTitleFromCopilot(params: {
|
async function requestChatTitleFromCopilot(params: {
|
||||||
message: string
|
message: string
|
||||||
model: string
|
model: string
|
||||||
provider?: string
|
provider?: string
|
||||||
}): Promise<string | null> {
|
}): Promise<string | null> {
|
||||||
const { message, model, provider } = params
|
const { message, model, provider } = params
|
||||||
if (!message || !model) {
|
if (!message || !model) return null
|
||||||
logger.warn('Skipping chat title request because message/model is missing', {
|
|
||||||
hasMessage: !!message,
|
|
||||||
hasModel: !!model,
|
|
||||||
})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
const headers: Record<string, string> = {
|
const headers: Record<string, string> = {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -55,13 +44,6 @@ async function requestChatTitleFromCopilot(params: {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
logger.info('Requesting chat title from copilot backend', {
|
|
||||||
model,
|
|
||||||
provider: provider || null,
|
|
||||||
messageLength: message.length,
|
|
||||||
messagePreview: truncateForLog(message),
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/generate-chat-title`, {
|
const response = await fetch(`${SIM_AGENT_API_URL}/api/generate-chat-title`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers,
|
headers,
|
||||||
@@ -81,32 +63,10 @@ async function requestChatTitleFromCopilot(params: {
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const rawTitle = typeof payload?.title === 'string' ? payload.title : ''
|
const title = typeof payload?.title === 'string' ? payload.title.trim() : ''
|
||||||
const title = rawTitle.trim()
|
|
||||||
logger.info('Received chat title response from copilot backend', {
|
|
||||||
status: response.status,
|
|
||||||
hasRawTitle: !!rawTitle,
|
|
||||||
rawTitle,
|
|
||||||
normalizedTitle: title,
|
|
||||||
messagePreview: truncateForLog(message),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!title) {
|
|
||||||
logger.warn('Copilot backend returned empty chat title', {
|
|
||||||
payload,
|
|
||||||
model,
|
|
||||||
provider: provider || null,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return title || null
|
return title || null
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error generating chat title:', {
|
logger.error('Error generating chat title:', error)
|
||||||
error,
|
|
||||||
model,
|
|
||||||
provider: provider || null,
|
|
||||||
messagePreview: truncateForLog(message),
|
|
||||||
})
|
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -153,7 +113,6 @@ const ChatMessageSchema = z.object({
|
|||||||
workflowId: z.string().optional(),
|
workflowId: z.string().optional(),
|
||||||
knowledgeId: z.string().optional(),
|
knowledgeId: z.string().optional(),
|
||||||
blockId: z.string().optional(),
|
blockId: z.string().optional(),
|
||||||
blockIds: z.array(z.string()).optional(),
|
|
||||||
templateId: z.string().optional(),
|
templateId: z.string().optional(),
|
||||||
executionId: z.string().optional(),
|
executionId: z.string().optional(),
|
||||||
// For workflow_block, provide both workflowId and blockId
|
// For workflow_block, provide both workflowId and blockId
|
||||||
@@ -200,20 +159,6 @@ export async function POST(req: NextRequest) {
|
|||||||
commands,
|
commands,
|
||||||
} = ChatMessageSchema.parse(body)
|
} = ChatMessageSchema.parse(body)
|
||||||
|
|
||||||
const normalizedContexts = Array.isArray(contexts)
|
|
||||||
? contexts.map((ctx) => {
|
|
||||||
if (ctx.kind !== 'blocks') return ctx
|
|
||||||
if (Array.isArray(ctx.blockIds) && ctx.blockIds.length > 0) return ctx
|
|
||||||
if (ctx.blockId) {
|
|
||||||
return {
|
|
||||||
...ctx,
|
|
||||||
blockIds: [ctx.blockId],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ctx
|
|
||||||
})
|
|
||||||
: contexts
|
|
||||||
|
|
||||||
// Resolve workflowId - if not provided, use first workflow or find by name
|
// Resolve workflowId - if not provided, use first workflow or find by name
|
||||||
const resolved = await resolveWorkflowIdForUser(
|
const resolved = await resolveWorkflowIdForUser(
|
||||||
authenticatedUserId,
|
authenticatedUserId,
|
||||||
@@ -231,10 +176,10 @@ export async function POST(req: NextRequest) {
|
|||||||
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
||||||
try {
|
try {
|
||||||
logger.info(`[${tracker.requestId}] Received chat POST`, {
|
logger.info(`[${tracker.requestId}] Received chat POST`, {
|
||||||
hasContexts: Array.isArray(normalizedContexts),
|
hasContexts: Array.isArray(contexts),
|
||||||
contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0,
|
contextsCount: Array.isArray(contexts) ? contexts.length : 0,
|
||||||
contextsPreview: Array.isArray(normalizedContexts)
|
contextsPreview: Array.isArray(contexts)
|
||||||
? normalizedContexts.map((c: any) => ({
|
? contexts.map((c: any) => ({
|
||||||
kind: c?.kind,
|
kind: c?.kind,
|
||||||
chatId: c?.chatId,
|
chatId: c?.chatId,
|
||||||
workflowId: c?.workflowId,
|
workflowId: c?.workflowId,
|
||||||
@@ -246,25 +191,17 @@ export async function POST(req: NextRequest) {
|
|||||||
} catch {}
|
} catch {}
|
||||||
// Preprocess contexts server-side
|
// Preprocess contexts server-side
|
||||||
let agentContexts: Array<{ type: string; content: string }> = []
|
let agentContexts: Array<{ type: string; content: string }> = []
|
||||||
if (Array.isArray(normalizedContexts) && normalizedContexts.length > 0) {
|
if (Array.isArray(contexts) && contexts.length > 0) {
|
||||||
try {
|
try {
|
||||||
const { processContextsServer } = await import('@/lib/copilot/process-contents')
|
const { processContextsServer } = await import('@/lib/copilot/process-contents')
|
||||||
const processed = await processContextsServer(
|
const processed = await processContextsServer(contexts as any, authenticatedUserId, message)
|
||||||
normalizedContexts as any,
|
|
||||||
authenticatedUserId,
|
|
||||||
message
|
|
||||||
)
|
|
||||||
agentContexts = processed
|
agentContexts = processed
|
||||||
logger.info(`[${tracker.requestId}] Contexts processed for request`, {
|
logger.info(`[${tracker.requestId}] Contexts processed for request`, {
|
||||||
processedCount: agentContexts.length,
|
processedCount: agentContexts.length,
|
||||||
kinds: agentContexts.map((c) => c.type),
|
kinds: agentContexts.map((c) => c.type),
|
||||||
lengthPreview: agentContexts.map((c) => c.content?.length ?? 0),
|
lengthPreview: agentContexts.map((c) => c.content?.length ?? 0),
|
||||||
})
|
})
|
||||||
if (
|
if (Array.isArray(contexts) && contexts.length > 0 && agentContexts.length === 0) {
|
||||||
Array.isArray(normalizedContexts) &&
|
|
||||||
normalizedContexts.length > 0 &&
|
|
||||||
agentContexts.length === 0
|
|
||||||
) {
|
|
||||||
logger.warn(
|
logger.warn(
|
||||||
`[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.`
|
`[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.`
|
||||||
)
|
)
|
||||||
@@ -278,7 +215,6 @@ export async function POST(req: NextRequest) {
|
|||||||
let currentChat: any = null
|
let currentChat: any = null
|
||||||
let conversationHistory: any[] = []
|
let conversationHistory: any[] = []
|
||||||
let actualChatId = chatId
|
let actualChatId = chatId
|
||||||
let chatWasCreatedForRequest = false
|
|
||||||
const selectedModel = model || 'claude-opus-4-6'
|
const selectedModel = model || 'claude-opus-4-6'
|
||||||
|
|
||||||
if (chatId || createNewChat) {
|
if (chatId || createNewChat) {
|
||||||
@@ -290,7 +226,6 @@ export async function POST(req: NextRequest) {
|
|||||||
})
|
})
|
||||||
currentChat = chatResult.chat
|
currentChat = chatResult.chat
|
||||||
actualChatId = chatResult.chatId || chatId
|
actualChatId = chatResult.chatId || chatId
|
||||||
chatWasCreatedForRequest = chatResult.isNew
|
|
||||||
const history = buildConversationHistory(
|
const history = buildConversationHistory(
|
||||||
chatResult.conversationHistory,
|
chatResult.conversationHistory,
|
||||||
(chatResult.chat?.conversationId as string | undefined) || conversationId
|
(chatResult.chat?.conversationId as string | undefined) || conversationId
|
||||||
@@ -298,18 +233,6 @@ export async function POST(req: NextRequest) {
|
|||||||
conversationHistory = history.history
|
conversationHistory = history.history
|
||||||
}
|
}
|
||||||
|
|
||||||
const shouldGenerateTitleForRequest =
|
|
||||||
!!actualChatId &&
|
|
||||||
chatWasCreatedForRequest &&
|
|
||||||
!currentChat?.title &&
|
|
||||||
conversationHistory.length === 0
|
|
||||||
|
|
||||||
const titleGenerationParams = {
|
|
||||||
message,
|
|
||||||
model: selectedModel,
|
|
||||||
provider,
|
|
||||||
}
|
|
||||||
|
|
||||||
const effectiveMode = mode === 'agent' ? 'build' : mode
|
const effectiveMode = mode === 'agent' ? 'build' : mode
|
||||||
const effectiveConversationId =
|
const effectiveConversationId =
|
||||||
(currentChat?.conversationId as string | undefined) || conversationId
|
(currentChat?.conversationId as string | undefined) || conversationId
|
||||||
@@ -323,13 +246,11 @@ export async function POST(req: NextRequest) {
|
|||||||
mode,
|
mode,
|
||||||
model: selectedModel,
|
model: selectedModel,
|
||||||
provider,
|
provider,
|
||||||
conversationId: effectiveConversationId,
|
|
||||||
conversationHistory,
|
conversationHistory,
|
||||||
contexts: agentContexts,
|
contexts: agentContexts,
|
||||||
fileAttachments,
|
fileAttachments,
|
||||||
commands,
|
commands,
|
||||||
chatId: actualChatId,
|
chatId: actualChatId,
|
||||||
prefetch,
|
|
||||||
implicitFeedback,
|
implicitFeedback,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -402,22 +323,10 @@ export async function POST(req: NextRequest) {
|
|||||||
await pushEvent({ type: 'chat_id', chatId: actualChatId })
|
await pushEvent({ type: 'chat_id', chatId: actualChatId })
|
||||||
}
|
}
|
||||||
|
|
||||||
if (shouldGenerateTitleForRequest) {
|
if (actualChatId && !currentChat?.title && conversationHistory.length === 0) {
|
||||||
logger.info(`[${tracker.requestId}] Starting title generation for streaming response`, {
|
requestChatTitleFromCopilot({ message, model: selectedModel, provider })
|
||||||
chatId: actualChatId,
|
|
||||||
model: titleGenerationParams.model,
|
|
||||||
provider: provider || null,
|
|
||||||
messageLength: message.length,
|
|
||||||
messagePreview: truncateForLog(message),
|
|
||||||
chatWasCreatedForRequest,
|
|
||||||
})
|
|
||||||
requestChatTitleFromCopilot(titleGenerationParams)
|
|
||||||
.then(async (title) => {
|
.then(async (title) => {
|
||||||
if (title) {
|
if (title) {
|
||||||
logger.info(`[${tracker.requestId}] Generated title for streaming response`, {
|
|
||||||
chatId: actualChatId,
|
|
||||||
title,
|
|
||||||
})
|
|
||||||
await db
|
await db
|
||||||
.update(copilotChats)
|
.update(copilotChats)
|
||||||
.set({
|
.set({
|
||||||
@@ -425,30 +334,12 @@ export async function POST(req: NextRequest) {
|
|||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
await pushEvent({ type: 'title_updated', title, chatId: actualChatId })
|
await pushEvent({ type: 'title_updated', title })
|
||||||
logger.info(`[${tracker.requestId}] Emitted title_updated SSE event`, {
|
|
||||||
chatId: actualChatId,
|
|
||||||
title,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
logger.warn(`[${tracker.requestId}] No title returned for streaming response`, {
|
|
||||||
chatId: actualChatId,
|
|
||||||
model: selectedModel,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||||
})
|
})
|
||||||
} else if (actualChatId && !chatWasCreatedForRequest) {
|
|
||||||
logger.info(
|
|
||||||
`[${tracker.requestId}] Skipping title generation because chat already exists`,
|
|
||||||
{
|
|
||||||
chatId: actualChatId,
|
|
||||||
model: titleGenerationParams.model,
|
|
||||||
provider: provider || null,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -541,15 +432,10 @@ export async function POST(req: NextRequest) {
|
|||||||
content: message,
|
content: message,
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
|
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
|
||||||
...(Array.isArray(normalizedContexts) &&
|
...(Array.isArray(contexts) && contexts.length > 0 && { contexts }),
|
||||||
normalizedContexts.length > 0 && {
|
...(Array.isArray(contexts) &&
|
||||||
contexts: normalizedContexts,
|
contexts.length > 0 && {
|
||||||
}),
|
contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }],
|
||||||
...(Array.isArray(normalizedContexts) &&
|
|
||||||
normalizedContexts.length > 0 && {
|
|
||||||
contentBlocks: [
|
|
||||||
{ type: 'contexts', contexts: normalizedContexts as any, timestamp: Date.now() },
|
|
||||||
],
|
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -563,9 +449,9 @@ export async function POST(req: NextRequest) {
|
|||||||
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
|
const updatedMessages = [...conversationHistory, userMessage, assistantMessage]
|
||||||
|
|
||||||
// Start title generation in parallel if this is first message (non-streaming)
|
// Start title generation in parallel if this is first message (non-streaming)
|
||||||
if (shouldGenerateTitleForRequest) {
|
if (actualChatId && !currentChat.title && conversationHistory.length === 0) {
|
||||||
logger.info(`[${tracker.requestId}] Starting title generation for non-streaming response`)
|
logger.info(`[${tracker.requestId}] Starting title generation for non-streaming response`)
|
||||||
requestChatTitleFromCopilot(titleGenerationParams)
|
requestChatTitleFromCopilot({ message, model: selectedModel, provider })
|
||||||
.then(async (title) => {
|
.then(async (title) => {
|
||||||
if (title) {
|
if (title) {
|
||||||
await db
|
await db
|
||||||
@@ -576,22 +462,11 @@ export async function POST(req: NextRequest) {
|
|||||||
})
|
})
|
||||||
.where(eq(copilotChats.id, actualChatId!))
|
.where(eq(copilotChats.id, actualChatId!))
|
||||||
logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`)
|
logger.info(`[${tracker.requestId}] Generated and saved title: ${title}`)
|
||||||
} else {
|
|
||||||
logger.warn(`[${tracker.requestId}] No title returned for non-streaming response`, {
|
|
||||||
chatId: actualChatId,
|
|
||||||
model: selectedModel,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch((error) => {
|
||||||
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
logger.error(`[${tracker.requestId}] Title generation failed:`, error)
|
||||||
})
|
})
|
||||||
} else if (actualChatId && !chatWasCreatedForRequest) {
|
|
||||||
logger.info(`[${tracker.requestId}] Skipping title generation because chat already exists`, {
|
|
||||||
chatId: actualChatId,
|
|
||||||
model: titleGenerationParams.model,
|
|
||||||
provider: provider || null,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update chat in database immediately (without blocking for title)
|
// Update chat in database immediately (without blocking for title)
|
||||||
|
|||||||
@@ -1,11 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import {
|
import { REDIS_TOOL_CALL_PREFIX, REDIS_TOOL_CALL_TTL_SECONDS } from '@/lib/copilot/constants'
|
||||||
REDIS_TOOL_CALL_PREFIX,
|
|
||||||
REDIS_TOOL_CALL_TTL_SECONDS,
|
|
||||||
SIM_AGENT_API_URL,
|
|
||||||
} from '@/lib/copilot/constants'
|
|
||||||
import {
|
import {
|
||||||
authenticateCopilotRequestSessionOnly,
|
authenticateCopilotRequestSessionOnly,
|
||||||
createBadRequestResponse,
|
createBadRequestResponse,
|
||||||
@@ -14,7 +10,6 @@ import {
|
|||||||
createUnauthorizedResponse,
|
createUnauthorizedResponse,
|
||||||
type NotificationStatus,
|
type NotificationStatus,
|
||||||
} from '@/lib/copilot/request-helpers'
|
} from '@/lib/copilot/request-helpers'
|
||||||
import { env } from '@/lib/core/config/env'
|
|
||||||
import { getRedisClient } from '@/lib/core/config/redis'
|
import { getRedisClient } from '@/lib/core/config/redis'
|
||||||
|
|
||||||
const logger = createLogger('CopilotConfirmAPI')
|
const logger = createLogger('CopilotConfirmAPI')
|
||||||
@@ -26,8 +21,6 @@ const ConfirmationSchema = z.object({
|
|||||||
errorMap: () => ({ message: 'Invalid notification status' }),
|
errorMap: () => ({ message: 'Invalid notification status' }),
|
||||||
}),
|
}),
|
||||||
message: z.string().optional(), // Optional message for background moves or additional context
|
message: z.string().optional(), // Optional message for background moves or additional context
|
||||||
toolName: z.string().optional(),
|
|
||||||
remember: z.boolean().optional(),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -64,44 +57,6 @@ async function updateToolCallStatus(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function saveAutoAllowedToolPreference(userId: string, toolName: string): Promise<boolean> {
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
}
|
|
||||||
if (env.COPILOT_API_KEY) {
|
|
||||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers,
|
|
||||||
body: JSON.stringify({
|
|
||||||
userId,
|
|
||||||
toolId: toolName,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
logger.warn('Failed to persist auto-allowed tool preference', {
|
|
||||||
userId,
|
|
||||||
toolName,
|
|
||||||
status: response.status,
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
return true
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error persisting auto-allowed tool preference', {
|
|
||||||
userId,
|
|
||||||
toolName,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* POST /api/copilot/confirm
|
* POST /api/copilot/confirm
|
||||||
* Update tool call status (Accept/Reject)
|
* Update tool call status (Accept/Reject)
|
||||||
@@ -119,7 +74,7 @@ export async function POST(req: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const body = await req.json()
|
const body = await req.json()
|
||||||
const { toolCallId, status, message, toolName, remember } = ConfirmationSchema.parse(body)
|
const { toolCallId, status, message } = ConfirmationSchema.parse(body)
|
||||||
|
|
||||||
// Update the tool call status in Redis
|
// Update the tool call status in Redis
|
||||||
const updated = await updateToolCallStatus(toolCallId, status, message)
|
const updated = await updateToolCallStatus(toolCallId, status, message)
|
||||||
@@ -135,22 +90,14 @@ export async function POST(req: NextRequest) {
|
|||||||
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
||||||
}
|
}
|
||||||
|
|
||||||
let rememberSaved = false
|
const duration = tracker.getDuration()
|
||||||
if (status === 'accepted' && remember === true && toolName && authenticatedUserId) {
|
|
||||||
rememberSaved = await saveAutoAllowedToolPreference(authenticatedUserId, toolName)
|
|
||||||
}
|
|
||||||
|
|
||||||
const response: Record<string, unknown> = {
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
}
|
})
|
||||||
if (remember === true) {
|
|
||||||
response.rememberSaved = rememberSaved
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json(response)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const duration = tracker.getDuration()
|
const duration = tracker.getDuration()
|
||||||
|
|
||||||
|
|||||||
@@ -191,84 +191,3 @@ export async function GET(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Delete a label from a page
|
|
||||||
export async function DELETE(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const {
|
|
||||||
domain,
|
|
||||||
accessToken,
|
|
||||||
cloudId: providedCloudId,
|
|
||||||
pageId,
|
|
||||||
labelName,
|
|
||||||
} = await request.json()
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!pageId) {
|
|
||||||
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!labelName) {
|
|
||||||
return NextResponse.json({ error: 'Label name is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
|
||||||
if (!pageIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const encodedLabel = encodeURIComponent(labelName.trim())
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label?name=${encodedLabel}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage =
|
|
||||||
errorData?.message || `Failed to delete Confluence label (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
pageId,
|
|
||||||
labelName,
|
|
||||||
deleted: true,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error deleting Confluence label:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,103 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ConfluencePagesByLabelAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
export async function GET(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { searchParams } = new URL(request.url)
|
|
||||||
const domain = searchParams.get('domain')
|
|
||||||
const accessToken = searchParams.get('accessToken')
|
|
||||||
const labelId = searchParams.get('labelId')
|
|
||||||
const providedCloudId = searchParams.get('cloudId')
|
|
||||||
const limit = searchParams.get('limit') || '50'
|
|
||||||
const cursor = searchParams.get('cursor')
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!labelId) {
|
|
||||||
return NextResponse.json({ error: 'Label ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
|
|
||||||
if (!labelIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: labelIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/labels/${labelId}/pages?${queryParams.toString()}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to get pages by label (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const pages = (data.results || []).map((page: any) => ({
|
|
||||||
id: page.id,
|
|
||||||
title: page.title,
|
|
||||||
status: page.status ?? null,
|
|
||||||
spaceId: page.spaceId ?? null,
|
|
||||||
parentId: page.parentId ?? null,
|
|
||||||
authorId: page.authorId ?? null,
|
|
||||||
createdAt: page.createdAt ?? null,
|
|
||||||
version: page.version ?? null,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
pages,
|
|
||||||
labelId,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error getting pages by label:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
|
||||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('ConfluenceSpaceLabelsAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
export async function GET(request: NextRequest) {
|
|
||||||
try {
|
|
||||||
const auth = await checkSessionOrInternalAuth(request)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { searchParams } = new URL(request.url)
|
|
||||||
const domain = searchParams.get('domain')
|
|
||||||
const accessToken = searchParams.get('accessToken')
|
|
||||||
const spaceId = searchParams.get('spaceId')
|
|
||||||
const providedCloudId = searchParams.get('cloudId')
|
|
||||||
const limit = searchParams.get('limit') || '25'
|
|
||||||
const cursor = searchParams.get('cursor')
|
|
||||||
|
|
||||||
if (!domain) {
|
|
||||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!accessToken) {
|
|
||||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!spaceId) {
|
|
||||||
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
|
||||||
if (!spaceIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
|
||||||
|
|
||||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
|
||||||
if (!cloudIdValidation.isValid) {
|
|
||||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryParams = new URLSearchParams()
|
|
||||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
|
||||||
if (cursor) {
|
|
||||||
queryParams.append('cursor', cursor)
|
|
||||||
}
|
|
||||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/labels?${queryParams.toString()}`
|
|
||||||
|
|
||||||
const response = await fetch(url, {
|
|
||||||
method: 'GET',
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Confluence API error response:', {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: JSON.stringify(errorData, null, 2),
|
|
||||||
})
|
|
||||||
const errorMessage = errorData?.message || `Failed to list space labels (${response.status})`
|
|
||||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
|
||||||
}
|
|
||||||
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
const labels = (data.results || []).map((label: any) => ({
|
|
||||||
id: label.id,
|
|
||||||
name: label.name,
|
|
||||||
prefix: label.prefix || 'global',
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
labels,
|
|
||||||
spaceId,
|
|
||||||
nextCursor: data._links?.next
|
|
||||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
|
||||||
: null,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error listing space labels:', error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: (error as Error).message || 'Internal server error' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -38,7 +38,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const isInternalCall = auth.authType === 'internal_jwt'
|
|
||||||
const userId = auth.userId || null
|
const userId = auth.userId || null
|
||||||
|
|
||||||
let workflowData = await getWorkflowById(workflowId)
|
let workflowData = await getWorkflowById(workflowId)
|
||||||
@@ -48,32 +47,29 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isInternalCall && !userId) {
|
// Check if user has access to this workflow
|
||||||
// Internal system calls (e.g. workflow-in-workflow executor) may not carry a userId.
|
if (!userId) {
|
||||||
// These are already authenticated via internal JWT; allow read access.
|
|
||||||
logger.info(`[${requestId}] Internal API call for workflow ${workflowId}`)
|
|
||||||
} else if (!userId) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized access attempt for workflow ${workflowId}`)
|
logger.warn(`[${requestId}] Unauthorized access attempt for workflow ${workflowId}`)
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
} else {
|
}
|
||||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId,
|
|
||||||
userId,
|
|
||||||
action: 'read',
|
|
||||||
})
|
|
||||||
if (!authorization.workflow) {
|
|
||||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
|
||||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
workflowData = authorization.workflow
|
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||||
if (!authorization.allowed) {
|
workflowId,
|
||||||
logger.warn(`[${requestId}] User ${userId} denied access to workflow ${workflowId}`)
|
userId,
|
||||||
return NextResponse.json(
|
action: 'read',
|
||||||
{ error: authorization.message || 'Access denied' },
|
})
|
||||||
{ status: authorization.status }
|
if (!authorization.workflow) {
|
||||||
)
|
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
||||||
}
|
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
workflowData = authorization.workflow
|
||||||
|
if (!authorization.allowed) {
|
||||||
|
logger.warn(`[${requestId}] User ${userId} denied access to workflow ${workflowId}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: authorization.message || 'Access denied' },
|
||||||
|
{ status: authorization.status }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug(`[${requestId}] Attempting to load workflow ${workflowId} from normalized tables`)
|
logger.debug(`[${requestId}] Attempting to load workflow ${workflowId} from normalized tables`)
|
||||||
|
|||||||
@@ -13,6 +13,9 @@ export type CommandId =
|
|||||||
| 'goto-logs'
|
| 'goto-logs'
|
||||||
| 'open-search'
|
| 'open-search'
|
||||||
| 'run-workflow'
|
| 'run-workflow'
|
||||||
|
| 'focus-copilot-tab'
|
||||||
|
| 'focus-toolbar-tab'
|
||||||
|
| 'focus-editor-tab'
|
||||||
| 'clear-terminal-console'
|
| 'clear-terminal-console'
|
||||||
| 'focus-toolbar-search'
|
| 'focus-toolbar-search'
|
||||||
| 'clear-notifications'
|
| 'clear-notifications'
|
||||||
@@ -72,6 +75,21 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
|
|||||||
shortcut: 'Mod+Enter',
|
shortcut: 'Mod+Enter',
|
||||||
allowInEditable: false,
|
allowInEditable: false,
|
||||||
},
|
},
|
||||||
|
'focus-copilot-tab': {
|
||||||
|
id: 'focus-copilot-tab',
|
||||||
|
shortcut: 'C',
|
||||||
|
allowInEditable: false,
|
||||||
|
},
|
||||||
|
'focus-toolbar-tab': {
|
||||||
|
id: 'focus-toolbar-tab',
|
||||||
|
shortcut: 'T',
|
||||||
|
allowInEditable: false,
|
||||||
|
},
|
||||||
|
'focus-editor-tab': {
|
||||||
|
id: 'focus-editor-tab',
|
||||||
|
shortcut: 'E',
|
||||||
|
allowInEditable: false,
|
||||||
|
},
|
||||||
'clear-terminal-console': {
|
'clear-terminal-console': {
|
||||||
id: 'clear-terminal-console',
|
id: 'clear-terminal-console',
|
||||||
shortcut: 'Mod+D',
|
shortcut: 'Mod+D',
|
||||||
|
|||||||
@@ -14,14 +14,6 @@ const logger = createLogger('DiffControls')
|
|||||||
const NOTIFICATION_WIDTH = 240
|
const NOTIFICATION_WIDTH = 240
|
||||||
const NOTIFICATION_GAP = 16
|
const NOTIFICATION_GAP = 16
|
||||||
|
|
||||||
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
|
||||||
if (name !== 'workflow_change') return false
|
|
||||||
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
export const DiffControls = memo(function DiffControls() {
|
export const DiffControls = memo(function DiffControls() {
|
||||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||||
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
||||||
@@ -72,7 +64,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
if (tn === 'edit_workflow') {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -80,9 +72,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||||
isWorkflowEditToolCall(t.name, t.params)
|
|
||||||
)
|
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
if (id) updatePreviewToolCallState('accepted', id)
|
if (id) updatePreviewToolCallState('accepted', id)
|
||||||
@@ -112,7 +102,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
if (tn === 'edit_workflow') {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -120,9 +110,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||||
isWorkflowEditToolCall(t.name, t.params)
|
|
||||||
)
|
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
if (id) updatePreviewToolCallState('rejected', id)
|
if (id) updatePreviewToolCallState('rejected', id)
|
||||||
|
|||||||
@@ -47,27 +47,6 @@ interface ParsedTags {
|
|||||||
cleanContent: string
|
cleanContent: string
|
||||||
}
|
}
|
||||||
|
|
||||||
function getToolCallParams(toolCall?: CopilotToolCall): Record<string, unknown> {
|
|
||||||
const candidate = ((toolCall as any)?.parameters ||
|
|
||||||
(toolCall as any)?.input ||
|
|
||||||
(toolCall as any)?.params ||
|
|
||||||
{}) as Record<string, unknown>
|
|
||||||
return candidate && typeof candidate === 'object' ? candidate : {}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isWorkflowChangeApplyMode(toolCall?: CopilotToolCall): boolean {
|
|
||||||
if (!toolCall || toolCall.name !== 'workflow_change') return false
|
|
||||||
const params = getToolCallParams(toolCall)
|
|
||||||
const mode = typeof params.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
function isWorkflowEditSummaryTool(toolCall?: CopilotToolCall): boolean {
|
|
||||||
if (!toolCall) return false
|
|
||||||
return isWorkflowChangeApplyMode(toolCall)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
* Extracts plan steps from plan_respond tool calls in subagent blocks.
|
||||||
* @param blocks - The subagent content blocks to search
|
* @param blocks - The subagent content blocks to search
|
||||||
@@ -892,10 +871,7 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (segment.type === 'tool' && segment.block.toolCall) {
|
if (segment.type === 'tool' && segment.block.toolCall) {
|
||||||
if (
|
if (toolCall.name === 'edit' && segment.block.toolCall.name === 'edit_workflow') {
|
||||||
(toolCall.name === 'edit' || toolCall.name === 'build') &&
|
|
||||||
isWorkflowEditSummaryTool(segment.block.toolCall)
|
|
||||||
) {
|
|
||||||
return (
|
return (
|
||||||
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
<div key={`tool-${segment.block.toolCall.id || index}`}>
|
||||||
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
<WorkflowEditSummary toolCall={segment.block.toolCall} />
|
||||||
@@ -992,11 +968,12 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
}
|
}
|
||||||
}, [blocks])
|
}, [blocks])
|
||||||
|
|
||||||
if (!isWorkflowEditSummaryTool(toolCall)) {
|
if (toolCall.name !== 'edit_workflow') {
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
const params = getToolCallParams(toolCall)
|
const params =
|
||||||
|
(toolCall as any).parameters || (toolCall as any).input || (toolCall as any).params || {}
|
||||||
let operations = Array.isArray(params.operations) ? params.operations : []
|
let operations = Array.isArray(params.operations) ? params.operations : []
|
||||||
|
|
||||||
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
if (operations.length === 0 && Array.isArray((toolCall as any).operations)) {
|
||||||
@@ -1242,6 +1219,11 @@ const WorkflowEditSummary = memo(function WorkflowEditSummary({
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
/** Checks if a tool is server-side executed (not a client tool) */
|
||||||
|
function isIntegrationTool(toolName: string): boolean {
|
||||||
|
return !TOOL_DISPLAY_REGISTRY[toolName]
|
||||||
|
}
|
||||||
|
|
||||||
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
||||||
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
if (!toolCall.name || toolCall.name === 'unknown_tool') {
|
||||||
return false
|
return false
|
||||||
@@ -1251,96 +1233,59 @@ function shouldShowRunSkipButtons(toolCall: CopilotToolCall): boolean {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
if (toolCall.ui?.showInterrupt !== true) {
|
// Never show buttons for tools the user has marked as always-allowed
|
||||||
|
if (useCopilotStore.getState().isToolAutoAllowed(toolCall.name)) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
return true
|
const hasInterrupt = !!TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.interrupt
|
||||||
|
if (hasInterrupt) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Integration tools (user-installed) always require approval
|
||||||
|
if (isIntegrationTool(toolCall.name)) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
const toolCallLogger = createLogger('CopilotToolCall')
|
const toolCallLogger = createLogger('CopilotToolCall')
|
||||||
|
|
||||||
async function sendToolDecision(
|
async function sendToolDecision(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
status: 'accepted' | 'rejected' | 'background',
|
status: 'accepted' | 'rejected' | 'background'
|
||||||
options?: {
|
|
||||||
toolName?: string
|
|
||||||
remember?: boolean
|
|
||||||
}
|
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
await fetch('/api/copilot/confirm', {
|
await fetch('/api/copilot/confirm', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({ toolCallId, status }),
|
||||||
toolCallId,
|
|
||||||
status,
|
|
||||||
...(options?.toolName ? { toolName: options.toolName } : {}),
|
|
||||||
...(options?.remember ? { remember: true } : {}),
|
|
||||||
}),
|
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toolCallLogger.warn('Failed to send tool decision', {
|
toolCallLogger.warn('Failed to send tool decision', {
|
||||||
toolCallId,
|
toolCallId,
|
||||||
status,
|
status,
|
||||||
remember: options?.remember === true,
|
|
||||||
toolName: options?.toolName,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
error: error instanceof Error ? error.message : String(error),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function removeAutoAllowedToolPreference(toolName: string): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
const response = await fetch(`/api/copilot/auto-allowed-tools?toolId=${encodeURIComponent(toolName)}`, {
|
|
||||||
method: 'DELETE',
|
|
||||||
})
|
|
||||||
return response.ok
|
|
||||||
} catch (error) {
|
|
||||||
toolCallLogger.warn('Failed to remove auto-allowed tool preference', {
|
|
||||||
toolName,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type ToolUiAction = NonNullable<NonNullable<CopilotToolCall['ui']>['actions']>[number]
|
|
||||||
|
|
||||||
function actionDecision(action: ToolUiAction): 'accepted' | 'rejected' | 'background' {
|
|
||||||
const id = action.id.toLowerCase()
|
|
||||||
if (id.includes('background')) return 'background'
|
|
||||||
if (action.kind === 'reject') return 'rejected'
|
|
||||||
return 'accepted'
|
|
||||||
}
|
|
||||||
|
|
||||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
|
||||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
|
||||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
|
||||||
}
|
|
||||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleRun(
|
async function handleRun(
|
||||||
toolCall: CopilotToolCall,
|
toolCall: CopilotToolCall,
|
||||||
setToolCallState: any,
|
setToolCallState: any,
|
||||||
onStateChange?: any,
|
onStateChange?: any,
|
||||||
editedParams?: any,
|
editedParams?: any
|
||||||
options?: {
|
|
||||||
remember?: boolean
|
|
||||||
}
|
|
||||||
) {
|
) {
|
||||||
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
setToolCallState(toolCall, 'executing', editedParams ? { params: editedParams } : undefined)
|
||||||
onStateChange?.('executing')
|
onStateChange?.('executing')
|
||||||
await sendToolDecision(toolCall.id, 'accepted', {
|
await sendToolDecision(toolCall.id, 'accepted')
|
||||||
toolName: toolCall.name,
|
|
||||||
remember: options?.remember === true,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Client-executable run tools: execute on the client for real-time feedback
|
// Client-executable run tools: execute on the client for real-time feedback
|
||||||
// (block pulsing, console logs, stop button). The server defers execution
|
// (block pulsing, console logs, stop button). The server defers execution
|
||||||
// for these tools; the client reports back via mark-complete.
|
// for these tools; the client reports back via mark-complete.
|
||||||
if (isClientRunCapability(toolCall)) {
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)) {
|
||||||
const params = editedParams || toolCall.params || {}
|
const params = editedParams || toolCall.params || {}
|
||||||
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
executeRunToolOnClient(toolCall.id, toolCall.name, params)
|
||||||
}
|
}
|
||||||
@@ -1353,9 +1298,6 @@ async function handleSkip(toolCall: CopilotToolCall, setToolCallState: any, onSt
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getDisplayName(toolCall: CopilotToolCall): string {
|
function getDisplayName(toolCall: CopilotToolCall): string {
|
||||||
if (toolCall.ui?.phaseLabel) return toolCall.ui.phaseLabel
|
|
||||||
if (toolCall.ui?.title) return `${getStateVerb(toolCall.state)} ${toolCall.ui.title}`
|
|
||||||
|
|
||||||
const fromStore = (toolCall as any).display?.text
|
const fromStore = (toolCall as any).display?.text
|
||||||
if (fromStore) return fromStore
|
if (fromStore) return fromStore
|
||||||
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
const registryEntry = TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
@@ -1400,37 +1342,53 @@ function RunSkipButtons({
|
|||||||
toolCall,
|
toolCall,
|
||||||
onStateChange,
|
onStateChange,
|
||||||
editedParams,
|
editedParams,
|
||||||
actions,
|
|
||||||
}: {
|
}: {
|
||||||
toolCall: CopilotToolCall
|
toolCall: CopilotToolCall
|
||||||
onStateChange?: (state: any) => void
|
onStateChange?: (state: any) => void
|
||||||
editedParams?: any
|
editedParams?: any
|
||||||
actions: ToolUiAction[]
|
|
||||||
}) {
|
}) {
|
||||||
const [isProcessing, setIsProcessing] = useState(false)
|
const [isProcessing, setIsProcessing] = useState(false)
|
||||||
const [buttonsHidden, setButtonsHidden] = useState(false)
|
const [buttonsHidden, setButtonsHidden] = useState(false)
|
||||||
const actionInProgressRef = useRef(false)
|
const actionInProgressRef = useRef(false)
|
||||||
const { setToolCallState } = useCopilotStore()
|
const { setToolCallState, addAutoAllowedTool } = useCopilotStore()
|
||||||
|
|
||||||
const onAction = async (action: ToolUiAction) => {
|
const onRun = async () => {
|
||||||
// Prevent race condition - check ref synchronously
|
// Prevent race condition - check ref synchronously
|
||||||
if (actionInProgressRef.current) return
|
if (actionInProgressRef.current) return
|
||||||
actionInProgressRef.current = true
|
actionInProgressRef.current = true
|
||||||
setIsProcessing(true)
|
setIsProcessing(true)
|
||||||
setButtonsHidden(true)
|
setButtonsHidden(true)
|
||||||
try {
|
try {
|
||||||
const decision = actionDecision(action)
|
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||||
if (decision === 'accepted') {
|
} finally {
|
||||||
await handleRun(toolCall, setToolCallState, onStateChange, editedParams, {
|
setIsProcessing(false)
|
||||||
remember: action.remember === true,
|
actionInProgressRef.current = false
|
||||||
})
|
}
|
||||||
} else if (decision === 'rejected') {
|
}
|
||||||
await handleSkip(toolCall, setToolCallState, onStateChange)
|
|
||||||
} else {
|
const onAlwaysAllow = async () => {
|
||||||
setToolCallState(toolCall, ClientToolCallState.background)
|
// Prevent race condition - check ref synchronously
|
||||||
onStateChange?.('background')
|
if (actionInProgressRef.current) return
|
||||||
await sendToolDecision(toolCall.id, 'background')
|
actionInProgressRef.current = true
|
||||||
}
|
setIsProcessing(true)
|
||||||
|
setButtonsHidden(true)
|
||||||
|
try {
|
||||||
|
await addAutoAllowedTool(toolCall.name)
|
||||||
|
await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
|
||||||
|
} finally {
|
||||||
|
setIsProcessing(false)
|
||||||
|
actionInProgressRef.current = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const onSkip = async () => {
|
||||||
|
// Prevent race condition - check ref synchronously
|
||||||
|
if (actionInProgressRef.current) return
|
||||||
|
actionInProgressRef.current = true
|
||||||
|
setIsProcessing(true)
|
||||||
|
setButtonsHidden(true)
|
||||||
|
try {
|
||||||
|
await handleSkip(toolCall, setToolCallState, onStateChange)
|
||||||
} finally {
|
} finally {
|
||||||
setIsProcessing(false)
|
setIsProcessing(false)
|
||||||
actionInProgressRef.current = false
|
actionInProgressRef.current = false
|
||||||
@@ -1439,22 +1397,23 @@ function RunSkipButtons({
|
|||||||
|
|
||||||
if (buttonsHidden) return null
|
if (buttonsHidden) return null
|
||||||
|
|
||||||
|
// Show "Always Allow" for all tools that require confirmation
|
||||||
|
const showAlwaysAllow = true
|
||||||
|
|
||||||
|
// Standardized buttons for all interrupt tools: Allow, Always Allow, Skip
|
||||||
return (
|
return (
|
||||||
<div className='mt-[10px] flex gap-[6px]'>
|
<div className='mt-[10px] flex gap-[6px]'>
|
||||||
{actions.map((action, index) => {
|
<Button onClick={onRun} disabled={isProcessing} variant='tertiary'>
|
||||||
const variant =
|
{isProcessing ? 'Allowing...' : 'Allow'}
|
||||||
action.kind === 'reject' ? 'default' : action.remember ? 'default' : 'tertiary'
|
</Button>
|
||||||
return (
|
{showAlwaysAllow && (
|
||||||
<Button
|
<Button onClick={onAlwaysAllow} disabled={isProcessing} variant='default'>
|
||||||
key={action.id}
|
{isProcessing ? 'Allowing...' : 'Always Allow'}
|
||||||
onClick={() => onAction(action)}
|
</Button>
|
||||||
disabled={isProcessing}
|
)}
|
||||||
variant={variant}
|
<Button onClick={onSkip} disabled={isProcessing} variant='default'>
|
||||||
>
|
Skip
|
||||||
{isProcessing && index === 0 ? 'Working...' : action.label}
|
</Button>
|
||||||
</Button>
|
|
||||||
)
|
|
||||||
})}
|
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -1471,16 +1430,10 @@ export function ToolCall({
|
|||||||
const liveToolCall = useCopilotStore((s) =>
|
const liveToolCall = useCopilotStore((s) =>
|
||||||
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
effectiveId ? s.toolCallsById[effectiveId] : undefined
|
||||||
)
|
)
|
||||||
const rawToolCall = liveToolCall || toolCallProp
|
const toolCall = liveToolCall || toolCallProp
|
||||||
const hasRealToolCall = !!rawToolCall
|
|
||||||
const toolCall: CopilotToolCall =
|
// Guard: nothing to render without a toolCall
|
||||||
rawToolCall ||
|
if (!toolCall) return null
|
||||||
({
|
|
||||||
id: effectiveId || '',
|
|
||||||
name: '',
|
|
||||||
state: ClientToolCallState.generating,
|
|
||||||
params: {},
|
|
||||||
} as CopilotToolCall)
|
|
||||||
|
|
||||||
const isExpandablePending =
|
const isExpandablePending =
|
||||||
toolCall?.state === 'pending' &&
|
toolCall?.state === 'pending' &&
|
||||||
@@ -1488,15 +1441,17 @@ export function ToolCall({
|
|||||||
|
|
||||||
const [expanded, setExpanded] = useState(isExpandablePending)
|
const [expanded, setExpanded] = useState(isExpandablePending)
|
||||||
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
const [showRemoveAutoAllow, setShowRemoveAutoAllow] = useState(false)
|
||||||
const [autoAllowRemovedForCall, setAutoAllowRemovedForCall] = useState(false)
|
|
||||||
|
|
||||||
// State for editable parameters
|
// State for editable parameters
|
||||||
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
const params = (toolCall as any).parameters || (toolCall as any).input || toolCall.params || {}
|
||||||
const [editedParams, setEditedParams] = useState(params)
|
const [editedParams, setEditedParams] = useState(params)
|
||||||
const paramsRef = useRef(params)
|
const paramsRef = useRef(params)
|
||||||
|
|
||||||
const { setToolCallState } = useCopilotStore()
|
// Check if this integration tool is auto-allowed
|
||||||
const isAutoAllowed = toolCall.ui?.autoAllowed === true && !autoAllowRemovedForCall
|
const { removeAutoAllowedTool, setToolCallState } = useCopilotStore()
|
||||||
|
const isAutoAllowed = useCopilotStore(
|
||||||
|
(s) => isIntegrationTool(toolCall.name) && s.isToolAutoAllowed(toolCall.name)
|
||||||
|
)
|
||||||
|
|
||||||
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
// Update edited params when toolCall params change (deep comparison to avoid resetting user edits on ref change)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
@@ -1506,14 +1461,6 @@ export function ToolCall({
|
|||||||
}
|
}
|
||||||
}, [params])
|
}, [params])
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
setAutoAllowRemovedForCall(false)
|
|
||||||
setShowRemoveAutoAllow(false)
|
|
||||||
}, [toolCall.id])
|
|
||||||
|
|
||||||
// Guard: nothing to render without a toolCall
|
|
||||||
if (!hasRealToolCall) return null
|
|
||||||
|
|
||||||
// Skip rendering some internal tools
|
// Skip rendering some internal tools
|
||||||
if (
|
if (
|
||||||
toolCall.name === 'checkoff_todo' ||
|
toolCall.name === 'checkoff_todo' ||
|
||||||
@@ -1525,9 +1472,7 @@ export function ToolCall({
|
|||||||
return null
|
return null
|
||||||
|
|
||||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||||
const isSubagentTool =
|
const isSubagentTool = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
||||||
toolCall.execution?.target === 'go_subagent' ||
|
|
||||||
TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig?.subagent === true
|
|
||||||
|
|
||||||
// For ALL subagent tools, don't show anything until we have blocks with content
|
// For ALL subagent tools, don't show anything until we have blocks with content
|
||||||
if (isSubagentTool) {
|
if (isSubagentTool) {
|
||||||
@@ -1554,6 +1499,28 @@ export function ToolCall({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get current mode from store to determine if we should render integration tools
|
||||||
|
const mode = useCopilotStore.getState().mode
|
||||||
|
|
||||||
|
// Check if this is a completed/historical tool call (not pending/executing)
|
||||||
|
// Use string comparison to handle both enum values and string values from DB
|
||||||
|
const stateStr = String(toolCall.state)
|
||||||
|
const isCompletedToolCall =
|
||||||
|
stateStr === 'success' ||
|
||||||
|
stateStr === 'error' ||
|
||||||
|
stateStr === 'rejected' ||
|
||||||
|
stateStr === 'aborted'
|
||||||
|
|
||||||
|
// Allow rendering if:
|
||||||
|
// 1. Tool is in TOOL_DISPLAY_REGISTRY (client tools), OR
|
||||||
|
// 2. We're in build mode (integration tools are executed server-side), OR
|
||||||
|
// 3. Tool call is already completed (historical - should always render)
|
||||||
|
const isClientTool = !!TOOL_DISPLAY_REGISTRY[toolCall.name]
|
||||||
|
const isIntegrationToolInBuildMode = mode === 'build' && !isClientTool
|
||||||
|
|
||||||
|
if (!isClientTool && !isIntegrationToolInBuildMode && !isCompletedToolCall) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
const toolUIConfig = TOOL_DISPLAY_REGISTRY[toolCall.name]?.uiConfig
|
||||||
// Check if tool has params table config (meaning it's expandable)
|
// Check if tool has params table config (meaning it's expandable)
|
||||||
const hasParamsTable = !!toolUIConfig?.paramsTable
|
const hasParamsTable = !!toolUIConfig?.paramsTable
|
||||||
@@ -1563,14 +1530,6 @@ export function ToolCall({
|
|||||||
toolCall.name === 'make_api_request' ||
|
toolCall.name === 'make_api_request' ||
|
||||||
toolCall.name === 'set_global_workflow_variables'
|
toolCall.name === 'set_global_workflow_variables'
|
||||||
|
|
||||||
const interruptActions =
|
|
||||||
(toolCall.ui?.actions && toolCall.ui.actions.length > 0
|
|
||||||
? toolCall.ui.actions
|
|
||||||
: [
|
|
||||||
{ id: 'allow_once', label: 'Allow', kind: 'accept' as const },
|
|
||||||
{ id: 'allow_always', label: 'Always Allow', kind: 'accept' as const, remember: true },
|
|
||||||
{ id: 'reject', label: 'Skip', kind: 'reject' as const },
|
|
||||||
]) as ToolUiAction[]
|
|
||||||
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
const showButtons = isCurrentMessage && shouldShowRunSkipButtons(toolCall)
|
||||||
|
|
||||||
// Check UI config for secondary action - only show for current message tool calls
|
// Check UI config for secondary action - only show for current message tool calls
|
||||||
@@ -2028,12 +1987,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
await removeAutoAllowedTool(toolCall.name)
|
||||||
if (removed) {
|
setShowRemoveAutoAllow(false)
|
||||||
setAutoAllowRemovedForCall(true)
|
forceUpdate({})
|
||||||
setShowRemoveAutoAllow(false)
|
|
||||||
forceUpdate({})
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2047,7 +2003,6 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
actions={interruptActions}
|
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
@@ -2093,12 +2048,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
await removeAutoAllowedTool(toolCall.name)
|
||||||
if (removed) {
|
setShowRemoveAutoAllow(false)
|
||||||
setAutoAllowRemovedForCall(true)
|
forceUpdate({})
|
||||||
setShowRemoveAutoAllow(false)
|
|
||||||
forceUpdate({})
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2112,7 +2064,6 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
actions={interruptActions}
|
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
@@ -2136,7 +2087,7 @@ export function ToolCall({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const isEditWorkflow = isWorkflowEditSummaryTool(toolCall)
|
const isEditWorkflow = toolCall.name === 'edit_workflow'
|
||||||
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
const shouldShowDetails = isRunWorkflow || (isExpandableTool && expanded)
|
||||||
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
const hasOperations = Array.isArray(params.operations) && params.operations.length > 0
|
||||||
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
const hideTextForEditWorkflow = isEditWorkflow && hasOperations
|
||||||
@@ -2158,12 +2109,9 @@ export function ToolCall({
|
|||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
<Button
|
<Button
|
||||||
onClick={async () => {
|
onClick={async () => {
|
||||||
const removed = await removeAutoAllowedToolPreference(toolCall.name)
|
await removeAutoAllowedTool(toolCall.name)
|
||||||
if (removed) {
|
setShowRemoveAutoAllow(false)
|
||||||
setAutoAllowRemovedForCall(true)
|
forceUpdate({})
|
||||||
setShowRemoveAutoAllow(false)
|
|
||||||
forceUpdate({})
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
variant='default'
|
variant='default'
|
||||||
className='text-xs'
|
className='text-xs'
|
||||||
@@ -2177,7 +2125,6 @@ export function ToolCall({
|
|||||||
toolCall={toolCall}
|
toolCall={toolCall}
|
||||||
onStateChange={handleStateChange}
|
onStateChange={handleStateChange}
|
||||||
editedParams={editedParams}
|
editedParams={editedParams}
|
||||||
actions={interruptActions}
|
|
||||||
/>
|
/>
|
||||||
) : showMoveToBackground ? (
|
) : showMoveToBackground ? (
|
||||||
<div className='mt-[10px]'>
|
<div className='mt-[10px]'>
|
||||||
@@ -2208,7 +2155,7 @@ export function ToolCall({
|
|||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
) : null}
|
) : null}
|
||||||
{/* Workflow edit summary - shows block changes after workflow_change(apply) */}
|
{/* Workflow edit summary - shows block changes after edit_workflow completes */}
|
||||||
<WorkflowEditSummary toolCall={toolCall} />
|
<WorkflowEditSummary toolCall={toolCall} />
|
||||||
|
|
||||||
{/* Render subagent content as thinking text */}
|
{/* Render subagent content as thinking text */}
|
||||||
|
|||||||
@@ -113,6 +113,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
clearPlanArtifact,
|
clearPlanArtifact,
|
||||||
savePlanArtifact,
|
savePlanArtifact,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
|
loadAutoAllowedTools,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
} = useCopilotStore()
|
} = useCopilotStore()
|
||||||
|
|
||||||
@@ -124,6 +125,8 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
setCopilotWorkflowId,
|
setCopilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
|
loadAutoAllowedTools,
|
||||||
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
})
|
})
|
||||||
@@ -151,8 +154,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
planTodos,
|
planTodos,
|
||||||
})
|
})
|
||||||
|
|
||||||
const renderedChatTitle = currentChat?.title || 'New Chat'
|
|
||||||
|
|
||||||
/** Gets markdown content for design document section (available in all modes once created) */
|
/** Gets markdown content for design document section (available in all modes once created) */
|
||||||
const designDocumentContent = useMemo(() => {
|
const designDocumentContent = useMemo(() => {
|
||||||
if (streamingPlanContent) {
|
if (streamingPlanContent) {
|
||||||
@@ -165,14 +166,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
return ''
|
return ''
|
||||||
}, [streamingPlanContent])
|
}, [streamingPlanContent])
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
logger.info('[TitleRender] Copilot header title changed', {
|
|
||||||
currentChatId: currentChat?.id || null,
|
|
||||||
currentChatTitle: currentChat?.title || null,
|
|
||||||
renderedTitle: renderedChatTitle,
|
|
||||||
})
|
|
||||||
}, [currentChat?.id, currentChat?.title, renderedChatTitle])
|
|
||||||
|
|
||||||
/** Focuses the copilot input */
|
/** Focuses the copilot input */
|
||||||
const focusInput = useCallback(() => {
|
const focusInput = useCallback(() => {
|
||||||
userInputRef.current?.focus()
|
userInputRef.current?.focus()
|
||||||
@@ -355,7 +348,7 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
|||||||
{/* Header */}
|
{/* Header */}
|
||||||
<div className='mx-[-1px] flex flex-shrink-0 items-center justify-between gap-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] px-[12px] py-[6px]'>
|
<div className='mx-[-1px] flex flex-shrink-0 items-center justify-between gap-[8px] rounded-[4px] border border-[var(--border)] bg-[var(--surface-4)] px-[12px] py-[6px]'>
|
||||||
<h2 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
<h2 className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
|
||||||
{renderedChatTitle}
|
{currentChat?.title || 'New Chat'}
|
||||||
</h2>
|
</h2>
|
||||||
<div className='flex items-center gap-[8px]'>
|
<div className='flex items-center gap-[8px]'>
|
||||||
<Button variant='ghost' className='p-0' onClick={handleStartNewChat}>
|
<Button variant='ghost' className='p-0' onClick={handleStartNewChat}>
|
||||||
|
|||||||
@@ -12,6 +12,8 @@ interface UseCopilotInitializationProps {
|
|||||||
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
setCopilotWorkflowId: (workflowId: string | null) => Promise<void>
|
||||||
loadChats: (forceRefresh?: boolean) => Promise<void>
|
loadChats: (forceRefresh?: boolean) => Promise<void>
|
||||||
loadAvailableModels: () => Promise<void>
|
loadAvailableModels: () => Promise<void>
|
||||||
|
loadAutoAllowedTools: () => Promise<void>
|
||||||
|
currentChat: any
|
||||||
isSendingMessage: boolean
|
isSendingMessage: boolean
|
||||||
resumeActiveStream: () => Promise<boolean>
|
resumeActiveStream: () => Promise<boolean>
|
||||||
}
|
}
|
||||||
@@ -30,6 +32,8 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
setCopilotWorkflowId,
|
setCopilotWorkflowId,
|
||||||
loadChats,
|
loadChats,
|
||||||
loadAvailableModels,
|
loadAvailableModels,
|
||||||
|
loadAutoAllowedTools,
|
||||||
|
currentChat,
|
||||||
isSendingMessage,
|
isSendingMessage,
|
||||||
resumeActiveStream,
|
resumeActiveStream,
|
||||||
} = props
|
} = props
|
||||||
@@ -116,6 +120,17 @@ export function useCopilotInitialization(props: UseCopilotInitializationProps) {
|
|||||||
})
|
})
|
||||||
}, [isSendingMessage, resumeActiveStream])
|
}, [isSendingMessage, resumeActiveStream])
|
||||||
|
|
||||||
|
/** Load auto-allowed tools once on mount - runs immediately, independent of workflow */
|
||||||
|
const hasLoadedAutoAllowedToolsRef = useRef(false)
|
||||||
|
useEffect(() => {
|
||||||
|
if (!hasLoadedAutoAllowedToolsRef.current) {
|
||||||
|
hasLoadedAutoAllowedToolsRef.current = true
|
||||||
|
loadAutoAllowedTools().catch((err) => {
|
||||||
|
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}, [loadAutoAllowedTools])
|
||||||
|
|
||||||
/** Load available models once on mount */
|
/** Load available models once on mount */
|
||||||
const hasLoadedModelsRef = useRef(false)
|
const hasLoadedModelsRef = useRef(false)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -340,7 +340,13 @@ export const Panel = memo(function Panel() {
|
|||||||
* Register global keyboard shortcuts using the central commands registry.
|
* Register global keyboard shortcuts using the central commands registry.
|
||||||
*
|
*
|
||||||
* - Mod+Enter: Run / cancel workflow (matches the Run button behavior)
|
* - Mod+Enter: Run / cancel workflow (matches the Run button behavior)
|
||||||
|
* - C: Focus Copilot tab
|
||||||
|
* - T: Focus Toolbar tab
|
||||||
|
* - E: Focus Editor tab
|
||||||
* - Mod+F: Focus Toolbar tab and search input
|
* - Mod+F: Focus Toolbar tab and search input
|
||||||
|
*
|
||||||
|
* The tab-switching commands are disabled inside editable elements so typing
|
||||||
|
* in inputs or textareas is not interrupted.
|
||||||
*/
|
*/
|
||||||
useRegisterGlobalCommands(() =>
|
useRegisterGlobalCommands(() =>
|
||||||
createCommands([
|
createCommands([
|
||||||
@@ -357,6 +363,33 @@ export const Panel = memo(function Panel() {
|
|||||||
allowInEditable: false,
|
allowInEditable: false,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: 'focus-copilot-tab',
|
||||||
|
handler: () => {
|
||||||
|
setActiveTab('copilot')
|
||||||
|
},
|
||||||
|
overrides: {
|
||||||
|
allowInEditable: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'focus-toolbar-tab',
|
||||||
|
handler: () => {
|
||||||
|
setActiveTab('toolbar')
|
||||||
|
},
|
||||||
|
overrides: {
|
||||||
|
allowInEditable: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'focus-editor-tab',
|
||||||
|
handler: () => {
|
||||||
|
setActiveTab('editor')
|
||||||
|
},
|
||||||
|
overrides: {
|
||||||
|
allowInEditable: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
id: 'focus-toolbar-search',
|
id: 'focus-toolbar-search',
|
||||||
handler: () => {
|
handler: () => {
|
||||||
|
|||||||
@@ -589,7 +589,6 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
|
|||||||
|
|
||||||
export const scheduleExecution = task({
|
export const scheduleExecution = task({
|
||||||
id: 'schedule-execution',
|
id: 'schedule-execution',
|
||||||
machine: 'medium-1x',
|
|
||||||
retry: {
|
retry: {
|
||||||
maxAttempts: 1,
|
maxAttempts: 1,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -669,7 +669,6 @@ async function executeWebhookJobInternal(
|
|||||||
|
|
||||||
export const webhookExecution = task({
|
export const webhookExecution = task({
|
||||||
id: 'webhook-execution',
|
id: 'webhook-execution',
|
||||||
machine: 'medium-1x',
|
|
||||||
retry: {
|
retry: {
|
||||||
maxAttempts: 1,
|
maxAttempts: 1,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -197,6 +197,5 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
|||||||
|
|
||||||
export const workflowExecutionTask = task({
|
export const workflowExecutionTask = task({
|
||||||
id: 'workflow-execution',
|
id: 'workflow-execution',
|
||||||
machine: 'medium-1x',
|
|
||||||
run: executeWorkflowJob,
|
run: executeWorkflowJob,
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -10,11 +10,9 @@ import {
|
|||||||
getReasoningEffortValuesForModel,
|
getReasoningEffortValuesForModel,
|
||||||
getThinkingLevelsForModel,
|
getThinkingLevelsForModel,
|
||||||
getVerbosityValuesForModel,
|
getVerbosityValuesForModel,
|
||||||
MODELS_WITH_DEEP_RESEARCH,
|
|
||||||
MODELS_WITH_REASONING_EFFORT,
|
MODELS_WITH_REASONING_EFFORT,
|
||||||
MODELS_WITH_THINKING,
|
MODELS_WITH_THINKING,
|
||||||
MODELS_WITH_VERBOSITY,
|
MODELS_WITH_VERBOSITY,
|
||||||
MODELS_WITHOUT_MEMORY,
|
|
||||||
providers,
|
providers,
|
||||||
supportsTemperature,
|
supportsTemperature,
|
||||||
} from '@/providers/utils'
|
} from '@/providers/utils'
|
||||||
@@ -414,22 +412,12 @@ Return ONLY the JSON array.`,
|
|||||||
title: 'Tools',
|
title: 'Tools',
|
||||||
type: 'tool-input',
|
type: 'tool-input',
|
||||||
defaultValue: [],
|
defaultValue: [],
|
||||||
condition: {
|
|
||||||
field: 'model',
|
|
||||||
value: MODELS_WITH_DEEP_RESEARCH,
|
|
||||||
not: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'skills',
|
id: 'skills',
|
||||||
title: 'Skills',
|
title: 'Skills',
|
||||||
type: 'skill-input',
|
type: 'skill-input',
|
||||||
defaultValue: [],
|
defaultValue: [],
|
||||||
condition: {
|
|
||||||
field: 'model',
|
|
||||||
value: MODELS_WITH_DEEP_RESEARCH,
|
|
||||||
not: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'memoryType',
|
id: 'memoryType',
|
||||||
@@ -443,11 +431,6 @@ Return ONLY the JSON array.`,
|
|||||||
{ label: 'Sliding window (tokens)', id: 'sliding_window_tokens' },
|
{ label: 'Sliding window (tokens)', id: 'sliding_window_tokens' },
|
||||||
],
|
],
|
||||||
defaultValue: 'none',
|
defaultValue: 'none',
|
||||||
condition: {
|
|
||||||
field: 'model',
|
|
||||||
value: MODELS_WITHOUT_MEMORY,
|
|
||||||
not: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'conversationId',
|
id: 'conversationId',
|
||||||
@@ -461,7 +444,6 @@ Return ONLY the JSON array.`,
|
|||||||
condition: {
|
condition: {
|
||||||
field: 'memoryType',
|
field: 'memoryType',
|
||||||
value: ['conversation', 'sliding_window', 'sliding_window_tokens'],
|
value: ['conversation', 'sliding_window', 'sliding_window_tokens'],
|
||||||
and: { field: 'model', value: MODELS_WITHOUT_MEMORY, not: true },
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -472,7 +454,6 @@ Return ONLY the JSON array.`,
|
|||||||
condition: {
|
condition: {
|
||||||
field: 'memoryType',
|
field: 'memoryType',
|
||||||
value: ['sliding_window'],
|
value: ['sliding_window'],
|
||||||
and: { field: 'model', value: MODELS_WITHOUT_MEMORY, not: true },
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -483,7 +464,6 @@ Return ONLY the JSON array.`,
|
|||||||
condition: {
|
condition: {
|
||||||
field: 'memoryType',
|
field: 'memoryType',
|
||||||
value: ['sliding_window_tokens'],
|
value: ['sliding_window_tokens'],
|
||||||
and: { field: 'model', value: MODELS_WITHOUT_MEMORY, not: true },
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -497,13 +477,9 @@ Return ONLY the JSON array.`,
|
|||||||
condition: () => ({
|
condition: () => ({
|
||||||
field: 'model',
|
field: 'model',
|
||||||
value: (() => {
|
value: (() => {
|
||||||
const deepResearch = new Set(MODELS_WITH_DEEP_RESEARCH.map((m) => m.toLowerCase()))
|
|
||||||
const allModels = Object.keys(getBaseModelProviders())
|
const allModels = Object.keys(getBaseModelProviders())
|
||||||
return allModels.filter(
|
return allModels.filter(
|
||||||
(model) =>
|
(model) => supportsTemperature(model) && getMaxTemperature(model) === 1
|
||||||
supportsTemperature(model) &&
|
|
||||||
getMaxTemperature(model) === 1 &&
|
|
||||||
!deepResearch.has(model.toLowerCase())
|
|
||||||
)
|
)
|
||||||
})(),
|
})(),
|
||||||
}),
|
}),
|
||||||
@@ -519,13 +495,9 @@ Return ONLY the JSON array.`,
|
|||||||
condition: () => ({
|
condition: () => ({
|
||||||
field: 'model',
|
field: 'model',
|
||||||
value: (() => {
|
value: (() => {
|
||||||
const deepResearch = new Set(MODELS_WITH_DEEP_RESEARCH.map((m) => m.toLowerCase()))
|
|
||||||
const allModels = Object.keys(getBaseModelProviders())
|
const allModels = Object.keys(getBaseModelProviders())
|
||||||
return allModels.filter(
|
return allModels.filter(
|
||||||
(model) =>
|
(model) => supportsTemperature(model) && getMaxTemperature(model) === 2
|
||||||
supportsTemperature(model) &&
|
|
||||||
getMaxTemperature(model) === 2 &&
|
|
||||||
!deepResearch.has(model.toLowerCase())
|
|
||||||
)
|
)
|
||||||
})(),
|
})(),
|
||||||
}),
|
}),
|
||||||
@@ -536,11 +508,6 @@ Return ONLY the JSON array.`,
|
|||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter max tokens (e.g., 4096)...',
|
placeholder: 'Enter max tokens (e.g., 4096)...',
|
||||||
mode: 'advanced',
|
mode: 'advanced',
|
||||||
condition: {
|
|
||||||
field: 'model',
|
|
||||||
value: MODELS_WITH_DEEP_RESEARCH,
|
|
||||||
not: true,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'responseFormat',
|
id: 'responseFormat',
|
||||||
@@ -548,11 +515,6 @@ Return ONLY the JSON array.`,
|
|||||||
type: 'code',
|
type: 'code',
|
||||||
placeholder: 'Enter JSON schema...',
|
placeholder: 'Enter JSON schema...',
|
||||||
language: 'json',
|
language: 'json',
|
||||||
condition: {
|
|
||||||
field: 'model',
|
|
||||||
value: MODELS_WITH_DEEP_RESEARCH,
|
|
||||||
not: true,
|
|
||||||
},
|
|
||||||
wandConfig: {
|
wandConfig: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
maintainHistory: true,
|
maintainHistory: true,
|
||||||
@@ -645,16 +607,6 @@ Example 3 (Array Input):
|
|||||||
generationType: 'json-schema',
|
generationType: 'json-schema',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
|
||||||
id: 'previousInteractionId',
|
|
||||||
title: 'Previous Interaction ID',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'e.g., {{agent_1.interactionId}}',
|
|
||||||
condition: {
|
|
||||||
field: 'model',
|
|
||||||
value: MODELS_WITH_DEEP_RESEARCH,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
tools: {
|
tools: {
|
||||||
access: [
|
access: [
|
||||||
@@ -818,13 +770,5 @@ Example 3 (Array Input):
|
|||||||
description: 'Provider timing information',
|
description: 'Provider timing information',
|
||||||
},
|
},
|
||||||
cost: { type: 'json', description: 'Cost of the API call' },
|
cost: { type: 'json', description: 'Cost of the API call' },
|
||||||
interactionId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Interaction ID for multi-turn deep research follow-ups',
|
|
||||||
condition: {
|
|
||||||
field: 'model',
|
|
||||||
value: MODELS_WITH_DEEP_RESEARCH,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -394,7 +394,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
// Page Property Operations
|
// Page Property Operations
|
||||||
{ label: 'List Page Properties', id: 'list_page_properties' },
|
{ label: 'List Page Properties', id: 'list_page_properties' },
|
||||||
{ label: 'Create Page Property', id: 'create_page_property' },
|
{ label: 'Create Page Property', id: 'create_page_property' },
|
||||||
{ label: 'Delete Page Property', id: 'delete_page_property' },
|
|
||||||
// Search Operations
|
// Search Operations
|
||||||
{ label: 'Search Content', id: 'search' },
|
{ label: 'Search Content', id: 'search' },
|
||||||
{ label: 'Search in Space', id: 'search_in_space' },
|
{ label: 'Search in Space', id: 'search_in_space' },
|
||||||
@@ -415,9 +414,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
// Label Operations
|
// Label Operations
|
||||||
{ label: 'List Labels', id: 'list_labels' },
|
{ label: 'List Labels', id: 'list_labels' },
|
||||||
{ label: 'Add Label', id: 'add_label' },
|
{ label: 'Add Label', id: 'add_label' },
|
||||||
{ label: 'Delete Label', id: 'delete_label' },
|
|
||||||
{ label: 'Get Pages by Label', id: 'get_pages_by_label' },
|
|
||||||
{ label: 'List Space Labels', id: 'list_space_labels' },
|
|
||||||
// Space Operations
|
// Space Operations
|
||||||
{ label: 'Get Space', id: 'get_space' },
|
{ label: 'Get Space', id: 'get_space' },
|
||||||
{ label: 'List Spaces', id: 'list_spaces' },
|
{ label: 'List Spaces', id: 'list_spaces' },
|
||||||
@@ -489,8 +485,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'search_in_space',
|
'search_in_space',
|
||||||
'get_space',
|
'get_space',
|
||||||
'list_spaces',
|
'list_spaces',
|
||||||
'get_pages_by_label',
|
|
||||||
'list_space_labels',
|
|
||||||
],
|
],
|
||||||
not: true,
|
not: true,
|
||||||
},
|
},
|
||||||
@@ -506,8 +500,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'list_labels',
|
'list_labels',
|
||||||
'upload_attachment',
|
'upload_attachment',
|
||||||
'add_label',
|
'add_label',
|
||||||
'delete_label',
|
|
||||||
'delete_page_property',
|
|
||||||
'get_page_children',
|
'get_page_children',
|
||||||
'get_page_ancestors',
|
'get_page_ancestors',
|
||||||
'list_page_versions',
|
'list_page_versions',
|
||||||
@@ -535,8 +527,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'search_in_space',
|
'search_in_space',
|
||||||
'get_space',
|
'get_space',
|
||||||
'list_spaces',
|
'list_spaces',
|
||||||
'get_pages_by_label',
|
|
||||||
'list_space_labels',
|
|
||||||
],
|
],
|
||||||
not: true,
|
not: true,
|
||||||
},
|
},
|
||||||
@@ -552,8 +542,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'list_labels',
|
'list_labels',
|
||||||
'upload_attachment',
|
'upload_attachment',
|
||||||
'add_label',
|
'add_label',
|
||||||
'delete_label',
|
|
||||||
'delete_page_property',
|
|
||||||
'get_page_children',
|
'get_page_children',
|
||||||
'get_page_ancestors',
|
'get_page_ancestors',
|
||||||
'list_page_versions',
|
'list_page_versions',
|
||||||
@@ -578,7 +566,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'search_in_space',
|
'search_in_space',
|
||||||
'create_blogpost',
|
'create_blogpost',
|
||||||
'list_blogposts_in_space',
|
'list_blogposts_in_space',
|
||||||
'list_space_labels',
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -614,14 +601,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
required: true,
|
required: true,
|
||||||
condition: { field: 'operation', value: 'create_page_property' },
|
condition: { field: 'operation', value: 'create_page_property' },
|
||||||
},
|
},
|
||||||
{
|
|
||||||
id: 'propertyId',
|
|
||||||
title: 'Property ID',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Enter property ID to delete',
|
|
||||||
required: true,
|
|
||||||
condition: { field: 'operation', value: 'delete_page_property' },
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
id: 'title',
|
id: 'title',
|
||||||
title: 'Title',
|
title: 'Title',
|
||||||
@@ -715,7 +694,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter label name',
|
placeholder: 'Enter label name',
|
||||||
required: true,
|
required: true,
|
||||||
condition: { field: 'operation', value: ['add_label', 'delete_label'] },
|
condition: { field: 'operation', value: 'add_label' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'labelPrefix',
|
id: 'labelPrefix',
|
||||||
@@ -730,14 +709,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
value: () => 'global',
|
value: () => 'global',
|
||||||
condition: { field: 'operation', value: 'add_label' },
|
condition: { field: 'operation', value: 'add_label' },
|
||||||
},
|
},
|
||||||
{
|
|
||||||
id: 'labelId',
|
|
||||||
title: 'Label ID',
|
|
||||||
type: 'short-input',
|
|
||||||
placeholder: 'Enter label ID',
|
|
||||||
required: true,
|
|
||||||
condition: { field: 'operation', value: 'get_pages_by_label' },
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
id: 'blogPostStatus',
|
id: 'blogPostStatus',
|
||||||
title: 'Status',
|
title: 'Status',
|
||||||
@@ -788,8 +759,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'list_page_versions',
|
'list_page_versions',
|
||||||
'list_page_properties',
|
'list_page_properties',
|
||||||
'list_labels',
|
'list_labels',
|
||||||
'get_pages_by_label',
|
|
||||||
'list_space_labels',
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -811,8 +780,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
'list_page_versions',
|
'list_page_versions',
|
||||||
'list_page_properties',
|
'list_page_properties',
|
||||||
'list_labels',
|
'list_labels',
|
||||||
'get_pages_by_label',
|
|
||||||
'list_space_labels',
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -833,7 +800,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
// Property Tools
|
// Property Tools
|
||||||
'confluence_list_page_properties',
|
'confluence_list_page_properties',
|
||||||
'confluence_create_page_property',
|
'confluence_create_page_property',
|
||||||
'confluence_delete_page_property',
|
|
||||||
// Search Tools
|
// Search Tools
|
||||||
'confluence_search',
|
'confluence_search',
|
||||||
'confluence_search_in_space',
|
'confluence_search_in_space',
|
||||||
@@ -854,9 +820,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
// Label Tools
|
// Label Tools
|
||||||
'confluence_list_labels',
|
'confluence_list_labels',
|
||||||
'confluence_add_label',
|
'confluence_add_label',
|
||||||
'confluence_delete_label',
|
|
||||||
'confluence_get_pages_by_label',
|
|
||||||
'confluence_list_space_labels',
|
|
||||||
// Space Tools
|
// Space Tools
|
||||||
'confluence_get_space',
|
'confluence_get_space',
|
||||||
'confluence_list_spaces',
|
'confluence_list_spaces',
|
||||||
@@ -889,8 +852,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
return 'confluence_list_page_properties'
|
return 'confluence_list_page_properties'
|
||||||
case 'create_page_property':
|
case 'create_page_property':
|
||||||
return 'confluence_create_page_property'
|
return 'confluence_create_page_property'
|
||||||
case 'delete_page_property':
|
|
||||||
return 'confluence_delete_page_property'
|
|
||||||
// Search Operations
|
// Search Operations
|
||||||
case 'search':
|
case 'search':
|
||||||
return 'confluence_search'
|
return 'confluence_search'
|
||||||
@@ -926,12 +887,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
return 'confluence_list_labels'
|
return 'confluence_list_labels'
|
||||||
case 'add_label':
|
case 'add_label':
|
||||||
return 'confluence_add_label'
|
return 'confluence_add_label'
|
||||||
case 'delete_label':
|
|
||||||
return 'confluence_delete_label'
|
|
||||||
case 'get_pages_by_label':
|
|
||||||
return 'confluence_get_pages_by_label'
|
|
||||||
case 'list_space_labels':
|
|
||||||
return 'confluence_list_space_labels'
|
|
||||||
// Space Operations
|
// Space Operations
|
||||||
case 'get_space':
|
case 'get_space':
|
||||||
return 'confluence_get_space'
|
return 'confluence_get_space'
|
||||||
@@ -953,9 +908,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
versionNumber,
|
versionNumber,
|
||||||
propertyKey,
|
propertyKey,
|
||||||
propertyValue,
|
propertyValue,
|
||||||
propertyId,
|
|
||||||
labelPrefix,
|
labelPrefix,
|
||||||
labelId,
|
|
||||||
blogPostStatus,
|
blogPostStatus,
|
||||||
purge,
|
purge,
|
||||||
bodyFormat,
|
bodyFormat,
|
||||||
@@ -1006,9 +959,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Operations that support generic cursor pagination.
|
// Operations that support cursor pagination
|
||||||
// get_pages_by_label and list_space_labels have dedicated handlers
|
|
||||||
// below that pass cursor along with their required params (labelId, spaceId).
|
|
||||||
const supportsCursor = [
|
const supportsCursor = [
|
||||||
'list_attachments',
|
'list_attachments',
|
||||||
'list_spaces',
|
'list_spaces',
|
||||||
@@ -1045,35 +996,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (operation === 'delete_page_property') {
|
|
||||||
return {
|
|
||||||
credential,
|
|
||||||
pageId: effectivePageId,
|
|
||||||
operation,
|
|
||||||
propertyId,
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'get_pages_by_label') {
|
|
||||||
return {
|
|
||||||
credential,
|
|
||||||
operation,
|
|
||||||
labelId,
|
|
||||||
cursor: cursor || undefined,
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'list_space_labels') {
|
|
||||||
return {
|
|
||||||
credential,
|
|
||||||
operation,
|
|
||||||
cursor: cursor || undefined,
|
|
||||||
...rest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'upload_attachment') {
|
if (operation === 'upload_attachment') {
|
||||||
const normalizedFile = normalizeFileInput(attachmentFile, { single: true })
|
const normalizedFile = normalizeFileInput(attachmentFile, { single: true })
|
||||||
if (!normalizedFile) {
|
if (!normalizedFile) {
|
||||||
@@ -1122,9 +1044,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
|
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
|
||||||
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
|
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
|
||||||
labelName: { type: 'string', description: 'Label name' },
|
labelName: { type: 'string', description: 'Label name' },
|
||||||
labelId: { type: 'string', description: 'Label identifier' },
|
|
||||||
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
|
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
|
||||||
propertyId: { type: 'string', description: 'Property identifier' },
|
|
||||||
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
|
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
|
||||||
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
|
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
|
||||||
bodyFormat: { type: 'string', description: 'Body format for comments' },
|
bodyFormat: { type: 'string', description: 'Body format for comments' },
|
||||||
@@ -1160,7 +1080,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
// Label Results
|
// Label Results
|
||||||
labels: { type: 'array', description: 'List of labels' },
|
labels: { type: 'array', description: 'List of labels' },
|
||||||
labelName: { type: 'string', description: 'Label name' },
|
labelName: { type: 'string', description: 'Label name' },
|
||||||
labelId: { type: 'string', description: 'Label identifier' },
|
|
||||||
// Space Results
|
// Space Results
|
||||||
spaces: { type: 'array', description: 'List of spaces' },
|
spaces: { type: 'array', description: 'List of spaces' },
|
||||||
spaceId: { type: 'string', description: 'Space identifier' },
|
spaceId: { type: 'string', description: 'Space identifier' },
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
slug: enterprise
|
slug: enterprise
|
||||||
title: 'Build with Sim for Enterprise'
|
title: 'Build with Sim for Enterprise'
|
||||||
description: 'Access control, BYOK, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, Admin API, and flexible data retention—enterprise features for teams with strict security and compliance requirements.'
|
description: 'Access control, BYOK, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, Admin API, and flexible data retention—enterprise features for teams with strict security and compliance requirements.'
|
||||||
date: 2026-02-11
|
date: 2026-01-23
|
||||||
updated: 2026-02-11
|
updated: 2026-01-23
|
||||||
authors:
|
authors:
|
||||||
- vik
|
- vik
|
||||||
readingTime: 10
|
readingTime: 10
|
||||||
@@ -13,8 +13,8 @@ ogAlt: 'Sim Enterprise features overview'
|
|||||||
about: ['Enterprise Software', 'Security', 'Compliance', 'Self-Hosting']
|
about: ['Enterprise Software', 'Security', 'Compliance', 'Self-Hosting']
|
||||||
timeRequired: PT10M
|
timeRequired: PT10M
|
||||||
canonical: https://sim.ai/studio/enterprise
|
canonical: https://sim.ai/studio/enterprise
|
||||||
featured: true
|
featured: false
|
||||||
draft: false
|
draft: true
|
||||||
---
|
---
|
||||||
|
|
||||||
We've been working with security teams at larger organizations to bring Sim into environments with strict compliance and data handling requirements. This post covers the enterprise capabilities we've built: granular access control, bring-your-own-keys, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, compliance, and programmatic management via the Admin API.
|
We've been working with security teams at larger organizations to bring Sim into environments with strict compliance and data handling requirements. This post covers the enterprise capabilities we've built: granular access control, bring-your-own-keys, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, compliance, and programmatic management via the Admin API.
|
||||||
|
|||||||
@@ -999,7 +999,6 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
reasoningEffort: inputs.reasoningEffort,
|
reasoningEffort: inputs.reasoningEffort,
|
||||||
verbosity: inputs.verbosity,
|
verbosity: inputs.verbosity,
|
||||||
thinkingLevel: inputs.thinkingLevel,
|
thinkingLevel: inputs.thinkingLevel,
|
||||||
previousInteractionId: inputs.previousInteractionId,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1070,7 +1069,6 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
reasoningEffort: providerRequest.reasoningEffort,
|
reasoningEffort: providerRequest.reasoningEffort,
|
||||||
verbosity: providerRequest.verbosity,
|
verbosity: providerRequest.verbosity,
|
||||||
thinkingLevel: providerRequest.thinkingLevel,
|
thinkingLevel: providerRequest.thinkingLevel,
|
||||||
previousInteractionId: providerRequest.previousInteractionId,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
return this.processProviderResponse(response, block, responseFormat)
|
return this.processProviderResponse(response, block, responseFormat)
|
||||||
@@ -1271,7 +1269,6 @@ export class AgentBlockHandler implements BlockHandler {
|
|||||||
content: result.content,
|
content: result.content,
|
||||||
model: result.model,
|
model: result.model,
|
||||||
...this.createResponseMetadata(result),
|
...this.createResponseMetadata(result),
|
||||||
...(result.interactionId && { interactionId: result.interactionId }),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -20,8 +20,6 @@ export interface AgentInputs {
|
|||||||
conversationId?: string // Required for all non-none memory types
|
conversationId?: string // Required for all non-none memory types
|
||||||
slidingWindowSize?: string // For message-based sliding window
|
slidingWindowSize?: string // For message-based sliding window
|
||||||
slidingWindowTokens?: string // For token-based sliding window
|
slidingWindowTokens?: string // For token-based sliding window
|
||||||
// Deep research multi-turn
|
|
||||||
previousInteractionId?: string // Interactions API previous interaction reference
|
|
||||||
// LLM parameters
|
// LLM parameters
|
||||||
temperature?: string
|
temperature?: string
|
||||||
maxTokens?: string
|
maxTokens?: string
|
||||||
|
|||||||
@@ -20,8 +20,6 @@ export interface BuildPayloadParams {
|
|||||||
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
|
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
|
||||||
commands?: string[]
|
commands?: string[]
|
||||||
chatId?: string
|
chatId?: string
|
||||||
conversationId?: string
|
|
||||||
prefetch?: boolean
|
|
||||||
implicitFeedback?: string
|
implicitFeedback?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -66,10 +64,6 @@ export async function buildCopilotRequestPayload(
|
|||||||
fileAttachments,
|
fileAttachments,
|
||||||
commands,
|
commands,
|
||||||
chatId,
|
chatId,
|
||||||
conversationId,
|
|
||||||
prefetch,
|
|
||||||
conversationHistory,
|
|
||||||
implicitFeedback,
|
|
||||||
} = params
|
} = params
|
||||||
|
|
||||||
const selectedModel = options.selectedModel
|
const selectedModel = options.selectedModel
|
||||||
@@ -160,12 +154,6 @@ export async function buildCopilotRequestPayload(
|
|||||||
version: SIM_AGENT_VERSION,
|
version: SIM_AGENT_VERSION,
|
||||||
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
|
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
|
||||||
...(chatId ? { chatId } : {}),
|
...(chatId ? { chatId } : {}),
|
||||||
...(conversationId ? { conversationId } : {}),
|
|
||||||
...(Array.isArray(conversationHistory) && conversationHistory.length > 0
|
|
||||||
? { conversationHistory }
|
|
||||||
: {}),
|
|
||||||
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
|
|
||||||
...(implicitFeedback ? { implicitFeedback } : {}),
|
|
||||||
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
|
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
|
||||||
...(integrationTools.length > 0 ? { integrationTools } : {}),
|
...(integrationTools.length > 0 ? { integrationTools } : {}),
|
||||||
...(credentials ? { credentials } : {}),
|
...(credentials ? { credentials } : {}),
|
||||||
|
|||||||
@@ -1,21 +1,22 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
import { COPILOT_CONFIRM_API_PATH, STREAM_STORAGE_KEY } from '@/lib/copilot/constants'
|
||||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
import { isBackgroundState, isRejectedState, isReviewState } from '@/lib/copilot/store-utils'
|
import {
|
||||||
|
isBackgroundState,
|
||||||
|
isRejectedState,
|
||||||
|
isReviewState,
|
||||||
|
resolveToolDisplay,
|
||||||
|
} from '@/lib/copilot/store-utils'
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
import type { CopilotStore, CopilotStreamInfo, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
|
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||||
|
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
||||||
|
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||||
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
|
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
|
import { appendTextBlock, beginThinkingBlock, finalizeThinkingBlock } from './content-blocks'
|
||||||
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
||||||
import {
|
|
||||||
extractOperationListFromResultPayload,
|
|
||||||
extractToolExecutionMetadata,
|
|
||||||
extractToolUiMetadata,
|
|
||||||
isWorkflowChangeApplyCall,
|
|
||||||
mapServerStateToClientState,
|
|
||||||
resolveDisplayFromServerUi,
|
|
||||||
} from './tool-call-helpers'
|
|
||||||
import { applyToolEffects } from './tool-effects'
|
|
||||||
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
import type { ClientContentBlock, ClientStreamingContext } from './types'
|
||||||
|
|
||||||
const logger = createLogger('CopilotClientSseHandlers')
|
const logger = createLogger('CopilotClientSseHandlers')
|
||||||
@@ -25,11 +26,21 @@ const MAX_BATCH_INTERVAL = 50
|
|||||||
const MIN_BATCH_INTERVAL = 16
|
const MIN_BATCH_INTERVAL = 16
|
||||||
const MAX_QUEUE_SIZE = 5
|
const MAX_QUEUE_SIZE = 5
|
||||||
|
|
||||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
/**
|
||||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
* Send an auto-accept confirmation to the server for auto-allowed tools.
|
||||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
* The server-side orchestrator polls Redis for this decision.
|
||||||
}
|
*/
|
||||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
export function sendAutoAcceptConfirmation(toolCallId: string): void {
|
||||||
|
fetch(COPILOT_CONFIRM_API_PATH, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ toolCallId, status: 'accepted' }),
|
||||||
|
}).catch((error) => {
|
||||||
|
logger.warn('Failed to send auto-accept confirmation', {
|
||||||
|
toolCallId,
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
function writeActiveStreamToStorage(info: CopilotStreamInfo | null): void {
|
||||||
@@ -219,86 +230,28 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
title_updated: (_data, _context, get, set) => {
|
title_updated: (_data, _context, get, set) => {
|
||||||
const title = typeof _data.title === 'string' ? _data.title.trim() : ''
|
const title = _data.title
|
||||||
const eventChatId = typeof _data.chatId === 'string' ? _data.chatId : undefined
|
if (!title) return
|
||||||
const { currentChat, chats } = get()
|
const { currentChat, chats } = get()
|
||||||
|
if (currentChat) {
|
||||||
logger.info('[Title] Received title_updated SSE event', {
|
set({
|
||||||
eventTitle: title,
|
currentChat: { ...currentChat, title },
|
||||||
eventChatId: eventChatId || null,
|
chats: chats.map((c) => (c.id === currentChat.id ? { ...c, title } : c)),
|
||||||
currentChatId: currentChat?.id || null,
|
|
||||||
currentChatTitle: currentChat?.title || null,
|
|
||||||
chatCount: chats.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!title) {
|
|
||||||
logger.warn('[Title] Ignoring title_updated event with empty title', {
|
|
||||||
payload: _data,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!currentChat) {
|
|
||||||
logger.warn('[Title] Received title_updated event without an active currentChat', {
|
|
||||||
eventChatId: eventChatId || null,
|
|
||||||
title,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const targetChatId = eventChatId || currentChat.id
|
|
||||||
if (eventChatId && eventChatId !== currentChat.id) {
|
|
||||||
logger.warn('[Title] title_updated event chatId does not match currentChat', {
|
|
||||||
eventChatId,
|
|
||||||
currentChatId: currentChat.id,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
set({
|
|
||||||
currentChat:
|
|
||||||
currentChat.id === targetChatId
|
|
||||||
? {
|
|
||||||
...currentChat,
|
|
||||||
title,
|
|
||||||
}
|
|
||||||
: currentChat,
|
|
||||||
chats: chats.map((c) => (c.id === targetChatId ? { ...c, title } : c)),
|
|
||||||
})
|
|
||||||
|
|
||||||
const updatedState = get()
|
|
||||||
logger.info('[Title] Applied title_updated event to copilot store', {
|
|
||||||
targetChatId,
|
|
||||||
renderedCurrentChatId: updatedState.currentChat?.id || null,
|
|
||||||
renderedCurrentChatTitle: updatedState.currentChat?.title || null,
|
|
||||||
chatListTitle: updatedState.chats.find((c) => c.id === targetChatId)?.title || null,
|
|
||||||
})
|
|
||||||
},
|
},
|
||||||
tool_result: (data, context, get, set) => {
|
tool_result: (data, context, get, set) => {
|
||||||
try {
|
try {
|
||||||
const eventData = asRecord(data?.data)
|
const eventData = asRecord(data?.data)
|
||||||
const toolCallId: string | undefined =
|
const toolCallId: string | undefined =
|
||||||
data?.toolCallId ||
|
data?.toolCallId || (eventData.id as string | undefined)
|
||||||
(eventData.id as string | undefined) ||
|
|
||||||
(eventData.callId as string | undefined)
|
|
||||||
const success: boolean | undefined = data?.success
|
const success: boolean | undefined = data?.success
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
const resultObj = asRecord(data?.result)
|
const resultObj = asRecord(data?.result)
|
||||||
const skipped: boolean = resultObj.skipped === true
|
const skipped: boolean = resultObj.skipped === true
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
const uiMetadata = extractToolUiMetadata(eventData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(eventData)
|
|
||||||
const serverState = (eventData.state as string | undefined) || undefined
|
|
||||||
const targetState = serverState
|
|
||||||
? mapServerStateToClientState(serverState)
|
|
||||||
: success
|
|
||||||
? ClientToolCallState.success
|
|
||||||
: failedDependency || skipped
|
|
||||||
? ClientToolCallState.rejected
|
|
||||||
: ClientToolCallState.error
|
|
||||||
const resultPayload = asRecord(data?.result || eventData.result || eventData.data || data?.data)
|
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
const current = toolCallsById[toolCallId]
|
const current = toolCallsById[toolCallId]
|
||||||
let paramsForCurrentToolCall: Record<string, unknown> | undefined = current?.params
|
|
||||||
if (current) {
|
if (current) {
|
||||||
if (
|
if (
|
||||||
isRejectedState(current.state) ||
|
isRejectedState(current.state) ||
|
||||||
@@ -307,33 +260,16 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if (
|
const targetState = success
|
||||||
targetState === ClientToolCallState.success &&
|
? ClientToolCallState.success
|
||||||
isWorkflowChangeApplyCall(current.name, paramsForCurrentToolCall)
|
: failedDependency || skipped
|
||||||
) {
|
? ClientToolCallState.rejected
|
||||||
const operations = extractOperationListFromResultPayload(resultPayload || {})
|
: ClientToolCallState.error
|
||||||
if (operations && operations.length > 0) {
|
|
||||||
paramsForCurrentToolCall = {
|
|
||||||
...(current.params || {}),
|
|
||||||
operations,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const updatedMap = { ...toolCallsById }
|
const updatedMap = { ...toolCallsById }
|
||||||
updatedMap[toolCallId] = {
|
updatedMap[toolCallId] = {
|
||||||
...current,
|
...current,
|
||||||
ui: uiMetadata || current.ui,
|
|
||||||
execution: executionMetadata || current.execution,
|
|
||||||
params: paramsForCurrentToolCall,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveDisplayFromServerUi(
|
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||||
current.name,
|
|
||||||
targetState,
|
|
||||||
current.id,
|
|
||||||
paramsForCurrentToolCall,
|
|
||||||
uiMetadata || current.ui
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
set({ toolCallsById: updatedMap })
|
set({ toolCallsById: updatedMap })
|
||||||
|
|
||||||
@@ -376,11 +312,138 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
applyToolEffects({
|
if (current.name === 'edit_workflow') {
|
||||||
effectsRaw: eventData.effects,
|
try {
|
||||||
toolCall: updatedMap[toolCallId],
|
const resultPayload = asRecord(
|
||||||
resultPayload,
|
data?.result || eventData.result || eventData.data || data?.data
|
||||||
})
|
)
|
||||||
|
const workflowState = asRecord(resultPayload?.workflowState)
|
||||||
|
const hasWorkflowState = !!resultPayload?.workflowState
|
||||||
|
logger.info('[SSE] edit_workflow result received', {
|
||||||
|
hasWorkflowState,
|
||||||
|
blockCount: hasWorkflowState ? Object.keys(workflowState.blocks ?? {}).length : 0,
|
||||||
|
edgeCount: Array.isArray(workflowState.edges) ? workflowState.edges.length : 0,
|
||||||
|
})
|
||||||
|
if (hasWorkflowState) {
|
||||||
|
const diffStore = useWorkflowDiffStore.getState()
|
||||||
|
diffStore
|
||||||
|
.setProposedChanges(resultPayload.workflowState as WorkflowState)
|
||||||
|
.catch((err) => {
|
||||||
|
logger.error('[SSE] Failed to apply edit_workflow diff', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[SSE] edit_workflow result handling failed', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deploy tools: update deployment status in workflow registry
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
(current.name === 'deploy_api' ||
|
||||||
|
current.name === 'deploy_chat' ||
|
||||||
|
current.name === 'deploy_mcp' ||
|
||||||
|
current.name === 'redeploy')
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const resultPayload = asRecord(
|
||||||
|
data?.result || eventData.result || eventData.data || data?.data
|
||||||
|
)
|
||||||
|
const input = asRecord(current.params)
|
||||||
|
const workflowId =
|
||||||
|
(resultPayload?.workflowId as string) ||
|
||||||
|
(input?.workflowId as string) ||
|
||||||
|
useWorkflowRegistry.getState().activeWorkflowId
|
||||||
|
const isDeployed = resultPayload?.isDeployed !== false
|
||||||
|
if (workflowId) {
|
||||||
|
useWorkflowRegistry
|
||||||
|
.getState()
|
||||||
|
.setDeploymentStatus(workflowId, isDeployed, isDeployed ? new Date() : undefined)
|
||||||
|
logger.info('[SSE] Updated deployment status from tool result', {
|
||||||
|
toolName: current.name,
|
||||||
|
workflowId,
|
||||||
|
isDeployed,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to hydrate deployment status', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Environment variables: reload store after successful set
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
current.name === 'set_environment_variables'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
useEnvironmentStore.getState().loadEnvironmentVariables()
|
||||||
|
logger.info('[SSE] Triggered environment variables reload')
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to reload environment variables', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Workflow variables: reload store after successful set
|
||||||
|
if (
|
||||||
|
targetState === ClientToolCallState.success &&
|
||||||
|
current.name === 'set_global_workflow_variables'
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const input = asRecord(current.params)
|
||||||
|
const workflowId =
|
||||||
|
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
||||||
|
if (workflowId) {
|
||||||
|
useVariablesStore.getState().loadForWorkflow(workflowId)
|
||||||
|
logger.info('[SSE] Triggered workflow variables reload', { workflowId })
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to reload workflow variables', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate API key: update deployment status with the new key
|
||||||
|
if (targetState === ClientToolCallState.success && current.name === 'generate_api_key') {
|
||||||
|
try {
|
||||||
|
const resultPayload = asRecord(
|
||||||
|
data?.result || eventData.result || eventData.data || data?.data
|
||||||
|
)
|
||||||
|
const input = asRecord(current.params)
|
||||||
|
const workflowId =
|
||||||
|
(input?.workflowId as string) || useWorkflowRegistry.getState().activeWorkflowId
|
||||||
|
const apiKey = (resultPayload?.apiKey || resultPayload?.key) as string | undefined
|
||||||
|
if (workflowId) {
|
||||||
|
const existingStatus = useWorkflowRegistry
|
||||||
|
.getState()
|
||||||
|
.getWorkflowDeploymentStatus(workflowId)
|
||||||
|
useWorkflowRegistry
|
||||||
|
.getState()
|
||||||
|
.setDeploymentStatus(
|
||||||
|
workflowId,
|
||||||
|
existingStatus?.isDeployed ?? false,
|
||||||
|
existingStatus?.deployedAt,
|
||||||
|
apiKey
|
||||||
|
)
|
||||||
|
logger.info('[SSE] Updated deployment status with API key', {
|
||||||
|
workflowId,
|
||||||
|
hasKey: !!apiKey,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.warn('[SSE] Failed to hydrate API key status', {
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||||
@@ -397,24 +460,16 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
: failedDependency || skipped
|
: failedDependency || skipped
|
||||||
? ClientToolCallState.rejected
|
? ClientToolCallState.rejected
|
||||||
: ClientToolCallState.error
|
: ClientToolCallState.error
|
||||||
const paramsForBlock =
|
|
||||||
b.toolCall?.id === toolCallId
|
|
||||||
? paramsForCurrentToolCall || b.toolCall?.params
|
|
||||||
: b.toolCall?.params
|
|
||||||
context.contentBlocks[i] = {
|
context.contentBlocks[i] = {
|
||||||
...b,
|
...b,
|
||||||
toolCall: {
|
toolCall: {
|
||||||
...b.toolCall,
|
...b.toolCall,
|
||||||
params: paramsForBlock,
|
|
||||||
ui: uiMetadata || b.toolCall?.ui,
|
|
||||||
execution: executionMetadata || b.toolCall?.execution,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveDisplayFromServerUi(
|
display: resolveToolDisplay(
|
||||||
b.toolCall?.name,
|
b.toolCall?.name,
|
||||||
targetState,
|
targetState,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
paramsForBlock,
|
b.toolCall?.params
|
||||||
uiMetadata || b.toolCall?.ui
|
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -432,9 +487,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
try {
|
try {
|
||||||
const errorData = asRecord(data?.data)
|
const errorData = asRecord(data?.data)
|
||||||
const toolCallId: string | undefined =
|
const toolCallId: string | undefined =
|
||||||
data?.toolCallId ||
|
data?.toolCallId || (errorData.id as string | undefined)
|
||||||
(errorData.id as string | undefined) ||
|
|
||||||
(errorData.callId as string | undefined)
|
|
||||||
const failedDependency: boolean = data?.failedDependency === true
|
const failedDependency: boolean = data?.failedDependency === true
|
||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
@@ -447,26 +500,14 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
) {
|
) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const targetState = errorData.state
|
const targetState = failedDependency
|
||||||
? mapServerStateToClientState(errorData.state)
|
? ClientToolCallState.rejected
|
||||||
: failedDependency
|
: ClientToolCallState.error
|
||||||
? ClientToolCallState.rejected
|
|
||||||
: ClientToolCallState.error
|
|
||||||
const uiMetadata = extractToolUiMetadata(errorData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(errorData)
|
|
||||||
const updatedMap = { ...toolCallsById }
|
const updatedMap = { ...toolCallsById }
|
||||||
updatedMap[toolCallId] = {
|
updatedMap[toolCallId] = {
|
||||||
...current,
|
...current,
|
||||||
ui: uiMetadata || current.ui,
|
|
||||||
execution: executionMetadata || current.execution,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveDisplayFromServerUi(
|
display: resolveToolDisplay(current.name, targetState, current.id, current.params),
|
||||||
current.name,
|
|
||||||
targetState,
|
|
||||||
current.id,
|
|
||||||
current.params,
|
|
||||||
uiMetadata || current.ui
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
set({ toolCallsById: updatedMap })
|
set({ toolCallsById: updatedMap })
|
||||||
}
|
}
|
||||||
@@ -479,26 +520,19 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
isBackgroundState(b.toolCall?.state)
|
isBackgroundState(b.toolCall?.state)
|
||||||
)
|
)
|
||||||
break
|
break
|
||||||
const targetState = errorData.state
|
const targetState = failedDependency
|
||||||
? mapServerStateToClientState(errorData.state)
|
? ClientToolCallState.rejected
|
||||||
: failedDependency
|
: ClientToolCallState.error
|
||||||
? ClientToolCallState.rejected
|
|
||||||
: ClientToolCallState.error
|
|
||||||
const uiMetadata = extractToolUiMetadata(errorData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(errorData)
|
|
||||||
context.contentBlocks[i] = {
|
context.contentBlocks[i] = {
|
||||||
...b,
|
...b,
|
||||||
toolCall: {
|
toolCall: {
|
||||||
...b.toolCall,
|
...b.toolCall,
|
||||||
ui: uiMetadata || b.toolCall?.ui,
|
|
||||||
execution: executionMetadata || b.toolCall?.execution,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveDisplayFromServerUi(
|
display: resolveToolDisplay(
|
||||||
b.toolCall?.name,
|
b.toolCall?.name,
|
||||||
targetState,
|
targetState,
|
||||||
toolCallId,
|
toolCallId,
|
||||||
b.toolCall?.params,
|
b.toolCall?.params
|
||||||
uiMetadata || b.toolCall?.ui
|
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -513,28 +547,20 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
tool_generating: (data, context, get, set) => {
|
tool_generating: (data, context, get, set) => {
|
||||||
const eventData = asRecord(data?.data)
|
const { toolCallId, toolName } = data
|
||||||
const toolCallId =
|
|
||||||
data?.toolCallId ||
|
|
||||||
(eventData.id as string | undefined) ||
|
|
||||||
(eventData.callId as string | undefined)
|
|
||||||
const toolName =
|
|
||||||
data?.toolName ||
|
|
||||||
(eventData.name as string | undefined) ||
|
|
||||||
(eventData.toolName as string | undefined)
|
|
||||||
if (!toolCallId || !toolName) return
|
if (!toolCallId || !toolName) return
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
if (!toolCallsById[toolCallId]) {
|
if (!toolCallsById[toolCallId]) {
|
||||||
const initialState = ClientToolCallState.generating
|
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||||
const uiMetadata = extractToolUiMetadata(eventData)
|
const initialState = isAutoAllowed
|
||||||
|
? ClientToolCallState.executing
|
||||||
|
: ClientToolCallState.pending
|
||||||
const tc: CopilotToolCall = {
|
const tc: CopilotToolCall = {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: uiMetadata,
|
display: resolveToolDisplay(toolName, initialState, toolCallId),
|
||||||
execution: extractToolExecutionMetadata(eventData),
|
|
||||||
display: resolveDisplayFromServerUi(toolName, initialState, toolCallId, undefined, uiMetadata),
|
|
||||||
}
|
}
|
||||||
const updated = { ...toolCallsById, [toolCallId]: tc }
|
const updated = { ...toolCallsById, [toolCallId]: tc }
|
||||||
set({ toolCallsById: updated })
|
set({ toolCallsById: updated })
|
||||||
@@ -546,27 +572,17 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
},
|
},
|
||||||
tool_call: (data, context, get, set) => {
|
tool_call: (data, context, get, set) => {
|
||||||
const toolData = asRecord(data?.data)
|
const toolData = asRecord(data?.data)
|
||||||
const id: string | undefined =
|
const id: string | undefined = (toolData.id as string | undefined) || data?.toolCallId
|
||||||
(toolData.id as string | undefined) ||
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
(toolData.callId as string | undefined) ||
|
|
||||||
data?.toolCallId
|
|
||||||
const name: string | undefined =
|
|
||||||
(toolData.name as string | undefined) ||
|
|
||||||
(toolData.toolName as string | undefined) ||
|
|
||||||
data?.toolName
|
|
||||||
if (!id) return
|
if (!id) return
|
||||||
const args = toolData.arguments as Record<string, unknown> | undefined
|
const args = toolData.arguments as Record<string, unknown> | undefined
|
||||||
const isPartial = toolData.partial === true
|
const isPartial = toolData.partial === true
|
||||||
const uiMetadata = extractToolUiMetadata(toolData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(toolData)
|
|
||||||
const serverState = toolData.state
|
|
||||||
const { toolCallsById } = get()
|
const { toolCallsById } = get()
|
||||||
|
|
||||||
const existing = toolCallsById[id]
|
const existing = toolCallsById[id]
|
||||||
const toolName = name || existing?.name || 'unknown_tool'
|
const toolName = name || existing?.name || 'unknown_tool'
|
||||||
let initialState = serverState
|
const isAutoAllowed = get().isToolAutoAllowed(toolName)
|
||||||
? mapServerStateToClientState(serverState)
|
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
||||||
: ClientToolCallState.pending
|
|
||||||
|
|
||||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||||
if (
|
if (
|
||||||
@@ -581,25 +597,15 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
...existing,
|
...existing,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: uiMetadata || existing.ui,
|
|
||||||
execution: executionMetadata || existing.execution,
|
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveDisplayFromServerUi(
|
display: resolveToolDisplay(toolName, initialState, id, args || existing.params),
|
||||||
toolName,
|
|
||||||
initialState,
|
|
||||||
id,
|
|
||||||
args || existing.params,
|
|
||||||
uiMetadata || existing.ui
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
: {
|
: {
|
||||||
id,
|
id,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: uiMetadata,
|
|
||||||
execution: executionMetadata,
|
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveDisplayFromServerUi(toolName, initialState, id, args, uiMetadata),
|
display: resolveToolDisplay(toolName, initialState, id, args),
|
||||||
}
|
}
|
||||||
const updated = { ...toolCallsById, [id]: next }
|
const updated = { ...toolCallsById, [id]: next }
|
||||||
set({ toolCallsById: updated })
|
set({ toolCallsById: updated })
|
||||||
@@ -612,12 +618,20 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const shouldInterrupt = next.ui?.showInterrupt === true
|
// Auto-allowed tools: send confirmation to the server so it can proceed
|
||||||
|
// without waiting for the user to click "Allow".
|
||||||
|
if (isAutoAllowed) {
|
||||||
|
sendAutoAcceptConfirmation(id)
|
||||||
|
}
|
||||||
|
|
||||||
// Client-run capability: execution is delegated to the browser.
|
// Client-executable run tools: execute on the client for real-time feedback
|
||||||
// We run immediately only when no interrupt is required.
|
// (block pulsing, console logs, stop button). The server defers execution
|
||||||
if (isClientRunCapability(next) && !shouldInterrupt) {
|
// for these tools in interactive mode; the client reports back via mark-complete.
|
||||||
executeRunToolOnClient(id, toolName, args || next.params || {})
|
if (
|
||||||
|
CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName) &&
|
||||||
|
initialState === ClientToolCallState.executing
|
||||||
|
) {
|
||||||
|
executeRunToolOnClient(id, toolName, args || existing?.params || {})
|
||||||
}
|
}
|
||||||
|
|
||||||
// OAuth: dispatch event to open the OAuth connect modal
|
// OAuth: dispatch event to open the OAuth connect modal
|
||||||
|
|||||||
@@ -15,7 +15,10 @@ const logger = createLogger('CopilotRunToolExecution')
|
|||||||
* (block pulsing, logs, stop button, etc.).
|
* (block pulsing, logs, stop button, etc.).
|
||||||
*/
|
*/
|
||||||
export const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
export const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
||||||
'workflow_run',
|
'run_workflow',
|
||||||
|
'run_workflow_until_block',
|
||||||
|
'run_from_block',
|
||||||
|
'run_block',
|
||||||
])
|
])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -71,44 +74,21 @@ async function doExecuteRunTool(
|
|||||||
| Record<string, unknown>
|
| Record<string, unknown>
|
||||||
| undefined
|
| undefined
|
||||||
|
|
||||||
const runMode =
|
|
||||||
toolName === 'workflow_run' ? ((params.mode as string | undefined) || 'full').toLowerCase() : undefined
|
|
||||||
|
|
||||||
if (
|
|
||||||
toolName === 'workflow_run' &&
|
|
||||||
runMode !== 'full' &&
|
|
||||||
runMode !== 'until_block' &&
|
|
||||||
runMode !== 'from_block' &&
|
|
||||||
runMode !== 'block'
|
|
||||||
) {
|
|
||||||
const error = `Unsupported workflow_run mode: ${String(params.mode)}`
|
|
||||||
logger.warn('[RunTool] Execution prevented: unsupported workflow_run mode', {
|
|
||||||
toolCallId,
|
|
||||||
mode: params.mode,
|
|
||||||
})
|
|
||||||
setToolState(toolCallId, ClientToolCallState.error)
|
|
||||||
await reportCompletion(toolCallId, false, error)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const stopAfterBlockId = (() => {
|
const stopAfterBlockId = (() => {
|
||||||
if (toolName === 'workflow_run' && runMode === 'until_block') {
|
if (toolName === 'run_workflow_until_block')
|
||||||
return params.stopAfterBlockId as string | undefined
|
return params.stopAfterBlockId as string | undefined
|
||||||
}
|
if (toolName === 'run_block') return params.blockId as string | undefined
|
||||||
if (toolName === 'workflow_run' && runMode === 'block') {
|
|
||||||
return params.blockId as string | undefined
|
|
||||||
}
|
|
||||||
return undefined
|
return undefined
|
||||||
})()
|
})()
|
||||||
|
|
||||||
const runFromBlock = (() => {
|
const runFromBlock = (() => {
|
||||||
if (toolName === 'workflow_run' && runMode === 'from_block' && params.startBlockId) {
|
if (toolName === 'run_from_block' && params.startBlockId) {
|
||||||
return {
|
return {
|
||||||
startBlockId: params.startBlockId as string,
|
startBlockId: params.startBlockId as string,
|
||||||
executionId: (params.executionId as string | undefined) || 'latest',
|
executionId: (params.executionId as string | undefined) || 'latest',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (toolName === 'workflow_run' && runMode === 'block' && params.blockId) {
|
if (toolName === 'run_block' && params.blockId) {
|
||||||
return {
|
return {
|
||||||
startBlockId: params.blockId as string,
|
startBlockId: params.blockId as string,
|
||||||
executionId: (params.executionId as string | undefined) || 'latest',
|
executionId: (params.executionId as string | undefined) || 'latest',
|
||||||
|
|||||||
@@ -6,23 +6,16 @@ import {
|
|||||||
shouldSkipToolResultEvent,
|
shouldSkipToolResultEvent,
|
||||||
} from '@/lib/copilot/orchestrator/sse-utils'
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
import type { SSEEvent } from '@/lib/copilot/orchestrator/types'
|
||||||
|
import { resolveToolDisplay } from '@/lib/copilot/store-utils'
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
||||||
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
import type { CopilotStore, CopilotToolCall } from '@/stores/panel/copilot/types'
|
||||||
import {
|
import {
|
||||||
type SSEHandler,
|
type SSEHandler,
|
||||||
|
sendAutoAcceptConfirmation,
|
||||||
sseHandlers,
|
sseHandlers,
|
||||||
updateStreamingMessage,
|
updateStreamingMessage,
|
||||||
} from './handlers'
|
} from './handlers'
|
||||||
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
import { CLIENT_EXECUTABLE_RUN_TOOLS, executeRunToolOnClient } from './run-tool-execution'
|
||||||
import {
|
|
||||||
extractOperationListFromResultPayload,
|
|
||||||
extractToolExecutionMetadata,
|
|
||||||
extractToolUiMetadata,
|
|
||||||
isWorkflowChangeApplyCall,
|
|
||||||
mapServerStateToClientState,
|
|
||||||
resolveDisplayFromServerUi,
|
|
||||||
} from './tool-call-helpers'
|
|
||||||
import { applyToolEffects } from './tool-effects'
|
|
||||||
import type { ClientStreamingContext } from './types'
|
import type { ClientStreamingContext } from './types'
|
||||||
|
|
||||||
const logger = createLogger('CopilotClientSubagentHandlers')
|
const logger = createLogger('CopilotClientSubagentHandlers')
|
||||||
@@ -31,13 +24,6 @@ type StoreSet = (
|
|||||||
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)
|
||||||
) => void
|
) => void
|
||||||
|
|
||||||
function isClientRunCapability(toolCall: CopilotToolCall): boolean {
|
|
||||||
if (toolCall.execution?.target === 'sim_client_capability') {
|
|
||||||
return toolCall.execution.capabilityId === 'workflow.run' || !toolCall.execution.capabilityId
|
|
||||||
}
|
|
||||||
return CLIENT_EXECUTABLE_RUN_TOOLS.has(toolCall.name)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function appendSubAgentContent(
|
export function appendSubAgentContent(
|
||||||
context: ClientStreamingContext,
|
context: ClientStreamingContext,
|
||||||
parentToolCallId: string,
|
parentToolCallId: string,
|
||||||
@@ -178,8 +164,6 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
const name: string | undefined = (toolData.name as string | undefined) || data?.toolName
|
||||||
if (!id || !name) return
|
if (!id || !name) return
|
||||||
const isPartial = toolData.partial === true
|
const isPartial = toolData.partial === true
|
||||||
const uiMetadata = extractToolUiMetadata(toolData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(toolData)
|
|
||||||
|
|
||||||
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
let args: Record<string, unknown> | undefined = (toolData.arguments || toolData.input) as
|
||||||
| Record<string, unknown>
|
| Record<string, unknown>
|
||||||
@@ -215,10 +199,9 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
const existingToolCall =
|
const existingToolCall =
|
||||||
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
existingIndex >= 0 ? context.subAgentToolCalls[parentToolCallId][existingIndex] : undefined
|
||||||
|
|
||||||
const serverState = toolData.state
|
// Auto-allowed tools skip pending state to avoid flashing interrupt buttons
|
||||||
let initialState = serverState
|
const isAutoAllowed = get().isToolAutoAllowed(name)
|
||||||
? mapServerStateToClientState(serverState)
|
let initialState = isAutoAllowed ? ClientToolCallState.executing : ClientToolCallState.pending
|
||||||
: ClientToolCallState.pending
|
|
||||||
|
|
||||||
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
// Avoid flickering back to pending on partial/duplicate events once a tool is executing.
|
||||||
if (
|
if (
|
||||||
@@ -232,10 +215,8 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
id,
|
id,
|
||||||
name,
|
name,
|
||||||
state: initialState,
|
state: initialState,
|
||||||
ui: uiMetadata,
|
|
||||||
execution: executionMetadata,
|
|
||||||
...(args ? { params: args } : {}),
|
...(args ? { params: args } : {}),
|
||||||
display: resolveDisplayFromServerUi(name, initialState, id, args, uiMetadata),
|
display: resolveToolDisplay(name, initialState, id, args),
|
||||||
}
|
}
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
@@ -260,11 +241,16 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const shouldInterrupt = subAgentToolCall.ui?.showInterrupt === true
|
// Auto-allowed tools: send confirmation to the server so it can proceed
|
||||||
|
// without waiting for the user to click "Allow".
|
||||||
|
if (isAutoAllowed) {
|
||||||
|
sendAutoAcceptConfirmation(id)
|
||||||
|
}
|
||||||
|
|
||||||
// Client-run capability: execution is delegated to the browser.
|
// Client-executable run tools: if auto-allowed, execute immediately for
|
||||||
// Execute immediately only for non-interrupting calls.
|
// real-time feedback. For non-auto-allowed, the user must click "Allow"
|
||||||
if (isClientRunCapability(subAgentToolCall) && !shouldInterrupt) {
|
// first — handleRun in tool-call.tsx triggers executeRunToolOnClient.
|
||||||
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(name) && isAutoAllowed) {
|
||||||
executeRunToolOnClient(id, name, args || {})
|
executeRunToolOnClient(id, name, args || {})
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -289,51 +275,17 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
if (!context.subAgentToolCalls[parentToolCallId]) return
|
if (!context.subAgentToolCalls[parentToolCallId]) return
|
||||||
if (!context.subAgentBlocks[parentToolCallId]) return
|
if (!context.subAgentBlocks[parentToolCallId]) return
|
||||||
|
|
||||||
const serverState = resultData.state
|
const targetState = success ? ClientToolCallState.success : ClientToolCallState.error
|
||||||
const targetState = serverState
|
|
||||||
? mapServerStateToClientState(serverState)
|
|
||||||
: success
|
|
||||||
? ClientToolCallState.success
|
|
||||||
: ClientToolCallState.error
|
|
||||||
const uiMetadata = extractToolUiMetadata(resultData)
|
|
||||||
const executionMetadata = extractToolExecutionMetadata(resultData)
|
|
||||||
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
const existingIndex = context.subAgentToolCalls[parentToolCallId].findIndex(
|
||||||
(tc: CopilotToolCall) => tc.id === toolCallId
|
(tc: CopilotToolCall) => tc.id === toolCallId
|
||||||
)
|
)
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
const existing = context.subAgentToolCalls[parentToolCallId][existingIndex]
|
||||||
let nextParams = existing.params
|
|
||||||
const resultPayload = asRecord(
|
|
||||||
data?.result || resultData.result || resultData.data || data?.data
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
targetState === ClientToolCallState.success &&
|
|
||||||
isWorkflowChangeApplyCall(existing.name, existing.params as Record<string, unknown>) &&
|
|
||||||
resultPayload
|
|
||||||
) {
|
|
||||||
const operations = extractOperationListFromResultPayload(resultPayload)
|
|
||||||
if (operations && operations.length > 0) {
|
|
||||||
nextParams = {
|
|
||||||
...(existing.params || {}),
|
|
||||||
operations,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const updatedSubAgentToolCall = {
|
const updatedSubAgentToolCall = {
|
||||||
...existing,
|
...existing,
|
||||||
params: nextParams,
|
|
||||||
ui: uiMetadata || existing.ui,
|
|
||||||
execution: executionMetadata || existing.execution,
|
|
||||||
state: targetState,
|
state: targetState,
|
||||||
display: resolveDisplayFromServerUi(
|
display: resolveToolDisplay(existing.name, targetState, toolCallId, existing.params),
|
||||||
existing.name,
|
|
||||||
targetState,
|
|
||||||
toolCallId,
|
|
||||||
nextParams,
|
|
||||||
uiMetadata || existing.ui
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
context.subAgentToolCalls[parentToolCallId][existingIndex] = updatedSubAgentToolCall
|
||||||
|
|
||||||
@@ -357,12 +309,6 @@ export const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
|||||||
state: targetState,
|
state: targetState,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
applyToolEffects({
|
|
||||||
effectsRaw: resultData.effects,
|
|
||||||
toolCall: updatedSubAgentToolCall,
|
|
||||||
resultPayload,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||||
|
|||||||
@@ -1,134 +0,0 @@
|
|||||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
|
||||||
import { humanizedFallback, resolveToolDisplay } from '@/lib/copilot/store-utils'
|
|
||||||
import { ClientToolCallState } from '@/lib/copilot/tools/client/tool-display-registry'
|
|
||||||
import type { CopilotToolCall } from '@/stores/panel/copilot/types'
|
|
||||||
|
|
||||||
export function mapServerStateToClientState(state: unknown): ClientToolCallState {
|
|
||||||
switch (String(state || '')) {
|
|
||||||
case 'generating':
|
|
||||||
return ClientToolCallState.generating
|
|
||||||
case 'pending':
|
|
||||||
case 'awaiting_approval':
|
|
||||||
return ClientToolCallState.pending
|
|
||||||
case 'executing':
|
|
||||||
return ClientToolCallState.executing
|
|
||||||
case 'success':
|
|
||||||
return ClientToolCallState.success
|
|
||||||
case 'rejected':
|
|
||||||
case 'skipped':
|
|
||||||
return ClientToolCallState.rejected
|
|
||||||
case 'aborted':
|
|
||||||
return ClientToolCallState.aborted
|
|
||||||
case 'error':
|
|
||||||
case 'failed':
|
|
||||||
return ClientToolCallState.error
|
|
||||||
default:
|
|
||||||
return ClientToolCallState.pending
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function extractToolUiMetadata(
|
|
||||||
data: Record<string, unknown>
|
|
||||||
): CopilotToolCall['ui'] | undefined {
|
|
||||||
const ui = asRecord(data.ui)
|
|
||||||
if (!ui || Object.keys(ui).length === 0) return undefined
|
|
||||||
const autoAllowedFromUi = ui.autoAllowed === true
|
|
||||||
const autoAllowedFromData = data.autoAllowed === true
|
|
||||||
return {
|
|
||||||
title: typeof ui.title === 'string' ? ui.title : undefined,
|
|
||||||
phaseLabel: typeof ui.phaseLabel === 'string' ? ui.phaseLabel : undefined,
|
|
||||||
icon: typeof ui.icon === 'string' ? ui.icon : undefined,
|
|
||||||
showInterrupt: ui.showInterrupt === true,
|
|
||||||
showRemember: ui.showRemember === true,
|
|
||||||
autoAllowed: autoAllowedFromUi || autoAllowedFromData,
|
|
||||||
actions: Array.isArray(ui.actions)
|
|
||||||
? ui.actions
|
|
||||||
.map((action) => {
|
|
||||||
const a = asRecord(action)
|
|
||||||
const id = typeof a.id === 'string' ? a.id : undefined
|
|
||||||
const label = typeof a.label === 'string' ? a.label : undefined
|
|
||||||
const kind: 'accept' | 'reject' = a.kind === 'reject' ? 'reject' : 'accept'
|
|
||||||
if (!id || !label) return null
|
|
||||||
return {
|
|
||||||
id,
|
|
||||||
label,
|
|
||||||
kind,
|
|
||||||
remember: a.remember === true,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter((a): a is NonNullable<typeof a> => !!a)
|
|
||||||
: undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function extractToolExecutionMetadata(
|
|
||||||
data: Record<string, unknown>
|
|
||||||
): CopilotToolCall['execution'] | undefined {
|
|
||||||
const execution = asRecord(data.execution)
|
|
||||||
if (!execution || Object.keys(execution).length === 0) return undefined
|
|
||||||
return {
|
|
||||||
target: typeof execution.target === 'string' ? execution.target : undefined,
|
|
||||||
capabilityId: typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function displayVerb(state: ClientToolCallState): string {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return 'Completed'
|
|
||||||
case ClientToolCallState.error:
|
|
||||||
return 'Failed'
|
|
||||||
case ClientToolCallState.rejected:
|
|
||||||
return 'Skipped'
|
|
||||||
case ClientToolCallState.aborted:
|
|
||||||
return 'Aborted'
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
return 'Preparing'
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return 'Waiting'
|
|
||||||
default:
|
|
||||||
return 'Running'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function resolveDisplayFromServerUi(
|
|
||||||
toolName: string,
|
|
||||||
state: ClientToolCallState,
|
|
||||||
toolCallId: string,
|
|
||||||
params: Record<string, unknown> | undefined,
|
|
||||||
ui?: CopilotToolCall['ui']
|
|
||||||
) {
|
|
||||||
const fallback =
|
|
||||||
resolveToolDisplay(toolName, state, toolCallId, params) ||
|
|
||||||
humanizedFallback(toolName, state)
|
|
||||||
if (!fallback) return undefined
|
|
||||||
if (ui?.phaseLabel) {
|
|
||||||
return { text: ui.phaseLabel, icon: fallback.icon }
|
|
||||||
}
|
|
||||||
if (ui?.title) {
|
|
||||||
return { text: `${displayVerb(state)} ${ui.title}`, icon: fallback.icon }
|
|
||||||
}
|
|
||||||
return fallback
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isWorkflowChangeApplyCall(
|
|
||||||
toolName?: string,
|
|
||||||
params?: Record<string, unknown>
|
|
||||||
): boolean {
|
|
||||||
if (toolName !== 'workflow_change') return false
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
export function extractOperationListFromResultPayload(
|
|
||||||
resultPayload: Record<string, unknown>
|
|
||||||
): Array<Record<string, unknown>> | undefined {
|
|
||||||
const operations = resultPayload.operations
|
|
||||||
if (Array.isArray(operations)) return operations as Array<Record<string, unknown>>
|
|
||||||
|
|
||||||
const compiled = resultPayload.compiledOperations
|
|
||||||
if (Array.isArray(compiled)) return compiled as Array<Record<string, unknown>>
|
|
||||||
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
@@ -1,175 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { asRecord } from '@/lib/copilot/orchestrator/sse-utils'
|
|
||||||
import type { CopilotToolCall } from '@/stores/panel/copilot/types'
|
|
||||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
|
||||||
import { useEnvironmentStore } from '@/stores/settings/environment/store'
|
|
||||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
|
||||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
|
||||||
|
|
||||||
const logger = createLogger('CopilotToolEffects')
|
|
||||||
|
|
||||||
type ParsedToolEffect = {
|
|
||||||
kind: string
|
|
||||||
payload: Record<string, unknown>
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseToolEffects(raw: unknown): ParsedToolEffect[] {
|
|
||||||
if (!Array.isArray(raw)) return []
|
|
||||||
const effects: ParsedToolEffect[] = []
|
|
||||||
for (const item of raw) {
|
|
||||||
const effect = asRecord(item)
|
|
||||||
const kind = typeof effect.kind === 'string' ? effect.kind : ''
|
|
||||||
if (!kind) continue
|
|
||||||
effects.push({
|
|
||||||
kind,
|
|
||||||
payload: asRecord(effect.payload) || {},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return effects
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolveWorkflowId(
|
|
||||||
payload: Record<string, unknown>,
|
|
||||||
toolCall?: CopilotToolCall
|
|
||||||
): string | undefined {
|
|
||||||
const payloadWorkflowId = typeof payload.workflowId === 'string' ? payload.workflowId : undefined
|
|
||||||
if (payloadWorkflowId) return payloadWorkflowId
|
|
||||||
|
|
||||||
const params = asRecord(toolCall?.params)
|
|
||||||
const paramWorkflowId = typeof params?.workflowId === 'string' ? params.workflowId : undefined
|
|
||||||
if (paramWorkflowId) return paramWorkflowId
|
|
||||||
|
|
||||||
return useWorkflowRegistry.getState().activeWorkflowId || undefined
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolveWorkflowState(
|
|
||||||
payload: Record<string, unknown>,
|
|
||||||
resultPayload?: Record<string, unknown>
|
|
||||||
): WorkflowState | null {
|
|
||||||
const payloadState = asRecord(payload.workflowState)
|
|
||||||
if (payloadState) return payloadState as unknown as WorkflowState
|
|
||||||
|
|
||||||
if (resultPayload) {
|
|
||||||
const directState = asRecord(resultPayload.workflowState)
|
|
||||||
if (directState) return directState as unknown as WorkflowState
|
|
||||||
const editResult = asRecord(resultPayload.editResult)
|
|
||||||
const nestedState = asRecord(editResult?.workflowState)
|
|
||||||
if (nestedState) return nestedState as unknown as WorkflowState
|
|
||||||
}
|
|
||||||
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyDeploymentSyncEffect(payload: Record<string, unknown>, toolCall?: CopilotToolCall): void {
|
|
||||||
const workflowId = resolveWorkflowId(payload, toolCall)
|
|
||||||
if (!workflowId) return
|
|
||||||
|
|
||||||
const registry = useWorkflowRegistry.getState()
|
|
||||||
const existingStatus = registry.getWorkflowDeploymentStatus(workflowId)
|
|
||||||
|
|
||||||
const isDeployed =
|
|
||||||
typeof payload.isDeployed === 'boolean'
|
|
||||||
? payload.isDeployed
|
|
||||||
: (existingStatus?.isDeployed ?? true)
|
|
||||||
|
|
||||||
const deployedAt = (() => {
|
|
||||||
if (typeof payload.deployedAt === 'string' && payload.deployedAt) {
|
|
||||||
const parsed = new Date(payload.deployedAt)
|
|
||||||
if (!Number.isNaN(parsed.getTime())) return parsed
|
|
||||||
}
|
|
||||||
return existingStatus?.deployedAt
|
|
||||||
})()
|
|
||||||
|
|
||||||
const apiKey =
|
|
||||||
typeof payload.apiKey === 'string' && payload.apiKey.length > 0
|
|
||||||
? payload.apiKey
|
|
||||||
: existingStatus?.apiKey
|
|
||||||
|
|
||||||
registry.setDeploymentStatus(workflowId, isDeployed, deployedAt, apiKey)
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyApiKeySyncEffect(payload: Record<string, unknown>, toolCall?: CopilotToolCall): void {
|
|
||||||
const workflowId = resolveWorkflowId(payload, toolCall)
|
|
||||||
if (!workflowId) return
|
|
||||||
|
|
||||||
const apiKey = typeof payload.apiKey === 'string' ? payload.apiKey : undefined
|
|
||||||
const registry = useWorkflowRegistry.getState()
|
|
||||||
const existingStatus = registry.getWorkflowDeploymentStatus(workflowId)
|
|
||||||
registry.setDeploymentStatus(
|
|
||||||
workflowId,
|
|
||||||
existingStatus?.isDeployed ?? false,
|
|
||||||
existingStatus?.deployedAt,
|
|
||||||
apiKey || existingStatus?.apiKey
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyWorkflowVariablesReload(
|
|
||||||
payload: Record<string, unknown>,
|
|
||||||
toolCall?: CopilotToolCall
|
|
||||||
): void {
|
|
||||||
const workflowId = resolveWorkflowId(payload, toolCall)
|
|
||||||
if (!workflowId) return
|
|
||||||
useVariablesStore.getState().loadForWorkflow(workflowId)
|
|
||||||
}
|
|
||||||
|
|
||||||
export function applyToolEffects(params: {
|
|
||||||
effectsRaw: unknown
|
|
||||||
toolCall?: CopilotToolCall
|
|
||||||
resultPayload?: Record<string, unknown>
|
|
||||||
}): void {
|
|
||||||
const effects = parseToolEffects(params.effectsRaw)
|
|
||||||
if (effects.length === 0) {
|
|
||||||
if (params.toolCall?.name === 'workflow_change' && params.resultPayload) {
|
|
||||||
const workflowState = resolveWorkflowState({}, params.resultPayload)
|
|
||||||
if (!workflowState) return
|
|
||||||
useWorkflowDiffStore
|
|
||||||
.getState()
|
|
||||||
.setProposedChanges(workflowState)
|
|
||||||
.catch((error) => {
|
|
||||||
logger.error('Failed to apply fallback workflow diff from result payload', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const effect of effects) {
|
|
||||||
switch (effect.kind) {
|
|
||||||
case 'workflow.diff.proposed': {
|
|
||||||
const workflowState = resolveWorkflowState(effect.payload, params.resultPayload)
|
|
||||||
if (!workflowState) break
|
|
||||||
useWorkflowDiffStore
|
|
||||||
.getState()
|
|
||||||
.setProposedChanges(workflowState)
|
|
||||||
.catch((error) => {
|
|
||||||
logger.error('Failed to apply workflow diff effect', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
case 'workflow.deployment.sync':
|
|
||||||
applyDeploymentSyncEffect(effect.payload, params.toolCall)
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'workflow.api_key.sync':
|
|
||||||
applyApiKeySyncEffect(effect.payload, params.toolCall)
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'environment.variables.reload':
|
|
||||||
useEnvironmentStore.getState().loadEnvironmentVariables()
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'workflow.variables.reload':
|
|
||||||
applyWorkflowVariablesReload(effect.payload, params.toolCall)
|
|
||||||
break
|
|
||||||
|
|
||||||
default:
|
|
||||||
logger.debug('Ignoring unknown tool effect', { kind: effect.kind })
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -101,6 +101,9 @@ export const COPILOT_CHECKPOINTS_API_PATH = '/api/copilot/checkpoints'
|
|||||||
/** POST — revert to a checkpoint. */
|
/** POST — revert to a checkpoint. */
|
||||||
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
export const COPILOT_CHECKPOINTS_REVERT_API_PATH = '/api/copilot/checkpoints/revert'
|
||||||
|
|
||||||
|
/** GET/POST/DELETE — manage auto-allowed tools. */
|
||||||
|
export const COPILOT_AUTO_ALLOWED_TOOLS_API_PATH = '/api/copilot/auto-allowed-tools'
|
||||||
|
|
||||||
/** GET — fetch dynamically available copilot models. */
|
/** GET — fetch dynamically available copilot models. */
|
||||||
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
export const COPILOT_MODELS_API_PATH = '/api/copilot/models'
|
||||||
|
|
||||||
|
|||||||
67
apps/sim/lib/copilot/orchestrator/config.ts
Normal file
67
apps/sim/lib/copilot/orchestrator/config.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
export const INTERRUPT_TOOL_NAMES = [
|
||||||
|
'set_global_workflow_variables',
|
||||||
|
'run_workflow',
|
||||||
|
'run_workflow_until_block',
|
||||||
|
'run_from_block',
|
||||||
|
'run_block',
|
||||||
|
'manage_mcp_tool',
|
||||||
|
'manage_custom_tool',
|
||||||
|
'deploy_mcp',
|
||||||
|
'deploy_chat',
|
||||||
|
'deploy_api',
|
||||||
|
'create_workspace_mcp_server',
|
||||||
|
'set_environment_variables',
|
||||||
|
'make_api_request',
|
||||||
|
'oauth_request_access',
|
||||||
|
'navigate_ui',
|
||||||
|
'knowledge_base',
|
||||||
|
'generate_api_key',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const INTERRUPT_TOOL_SET = new Set<string>(INTERRUPT_TOOL_NAMES)
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_NAMES = [
|
||||||
|
'debug',
|
||||||
|
'edit',
|
||||||
|
'build',
|
||||||
|
'plan',
|
||||||
|
'test',
|
||||||
|
'deploy',
|
||||||
|
'auth',
|
||||||
|
'research',
|
||||||
|
'knowledge',
|
||||||
|
'custom_tool',
|
||||||
|
'tour',
|
||||||
|
'info',
|
||||||
|
'workflow',
|
||||||
|
'evaluate',
|
||||||
|
'superagent',
|
||||||
|
'discovery',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const SUBAGENT_TOOL_SET = new Set<string>(SUBAGENT_TOOL_NAMES)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Respond tools are internal to the copilot's subagent system.
|
||||||
|
* They're used by subagents to signal completion and should NOT be executed by the sim side.
|
||||||
|
* The copilot backend handles these internally.
|
||||||
|
*/
|
||||||
|
export const RESPOND_TOOL_NAMES = [
|
||||||
|
'plan_respond',
|
||||||
|
'edit_respond',
|
||||||
|
'build_respond',
|
||||||
|
'debug_respond',
|
||||||
|
'info_respond',
|
||||||
|
'research_respond',
|
||||||
|
'deploy_respond',
|
||||||
|
'superagent_respond',
|
||||||
|
'discovery_respond',
|
||||||
|
'tour_respond',
|
||||||
|
'auth_respond',
|
||||||
|
'workflow_respond',
|
||||||
|
'knowledge_respond',
|
||||||
|
'custom_tool_respond',
|
||||||
|
'test_respond',
|
||||||
|
] as const
|
||||||
|
|
||||||
|
export const RESPOND_TOOL_SET = new Set<string>(RESPOND_TOOL_NAMES)
|
||||||
@@ -1,12 +1,17 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
import { STREAM_TIMEOUT_MS } from '@/lib/copilot/constants'
|
||||||
|
import { RESPOND_TOOL_SET, SUBAGENT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||||
import {
|
import {
|
||||||
asRecord,
|
asRecord,
|
||||||
getEventData,
|
getEventData,
|
||||||
markToolResultSeen,
|
markToolResultSeen,
|
||||||
wasToolResultSeen,
|
wasToolResultSeen,
|
||||||
} from '@/lib/copilot/orchestrator/sse-utils'
|
} from '@/lib/copilot/orchestrator/sse-utils'
|
||||||
import { markToolComplete } from '@/lib/copilot/orchestrator/tool-executor'
|
import {
|
||||||
|
isIntegrationTool,
|
||||||
|
isToolAvailableOnSimSide,
|
||||||
|
markToolComplete,
|
||||||
|
} from '@/lib/copilot/orchestrator/tool-executor'
|
||||||
import type {
|
import type {
|
||||||
ContentBlock,
|
ContentBlock,
|
||||||
ExecutionContext,
|
ExecutionContext,
|
||||||
@@ -17,6 +22,7 @@ import type {
|
|||||||
} from '@/lib/copilot/orchestrator/types'
|
} from '@/lib/copilot/orchestrator/types'
|
||||||
import {
|
import {
|
||||||
executeToolAndReport,
|
executeToolAndReport,
|
||||||
|
isInterruptToolName,
|
||||||
waitForToolCompletion,
|
waitForToolCompletion,
|
||||||
waitForToolDecision,
|
waitForToolDecision,
|
||||||
} from './tool-execution'
|
} from './tool-execution'
|
||||||
@@ -29,208 +35,12 @@ const logger = createLogger('CopilotSseHandlers')
|
|||||||
* execution to the browser client instead of running executeWorkflow directly.
|
* execution to the browser client instead of running executeWorkflow directly.
|
||||||
*/
|
*/
|
||||||
const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
const CLIENT_EXECUTABLE_RUN_TOOLS = new Set([
|
||||||
'workflow_run',
|
'run_workflow',
|
||||||
|
'run_workflow_until_block',
|
||||||
|
'run_from_block',
|
||||||
|
'run_block',
|
||||||
])
|
])
|
||||||
|
|
||||||
function mapServerStateToToolStatus(state: unknown): ToolCallState['status'] {
|
|
||||||
switch (String(state || '')) {
|
|
||||||
case 'generating':
|
|
||||||
case 'pending':
|
|
||||||
case 'awaiting_approval':
|
|
||||||
return 'pending'
|
|
||||||
case 'executing':
|
|
||||||
return 'executing'
|
|
||||||
case 'success':
|
|
||||||
return 'success'
|
|
||||||
case 'rejected':
|
|
||||||
case 'skipped':
|
|
||||||
return 'rejected'
|
|
||||||
case 'aborted':
|
|
||||||
return 'skipped'
|
|
||||||
case 'error':
|
|
||||||
case 'failed':
|
|
||||||
return 'error'
|
|
||||||
default:
|
|
||||||
return 'pending'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getExecutionTarget(
|
|
||||||
toolData: Record<string, unknown>,
|
|
||||||
toolName: string
|
|
||||||
): { target: string; capabilityId?: string } {
|
|
||||||
const execution = asRecord(toolData.execution)
|
|
||||||
if (typeof execution.target === 'string' && execution.target.length > 0) {
|
|
||||||
return {
|
|
||||||
target: execution.target,
|
|
||||||
capabilityId:
|
|
||||||
typeof execution.capabilityId === 'string' ? execution.capabilityId : undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback only when metadata is missing.
|
|
||||||
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
|
||||||
return { target: 'sim_client_capability', capabilityId: 'workflow.run' }
|
|
||||||
}
|
|
||||||
return { target: 'sim_server' }
|
|
||||||
}
|
|
||||||
|
|
||||||
function needsApproval(toolData: Record<string, unknown>): boolean {
|
|
||||||
const ui = asRecord(toolData.ui)
|
|
||||||
return ui.showInterrupt === true
|
|
||||||
}
|
|
||||||
|
|
||||||
async function waitForClientCapabilityAndReport(
|
|
||||||
toolCall: ToolCallState,
|
|
||||||
options: OrchestratorOptions,
|
|
||||||
logScope: string
|
|
||||||
): Promise<void> {
|
|
||||||
toolCall.status = 'executing'
|
|
||||||
const completion = await waitForToolCompletion(
|
|
||||||
toolCall.id,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (completion?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(
|
|
||||||
toolCall.id,
|
|
||||||
toolCall.name,
|
|
||||||
202,
|
|
||||||
completion.message || 'Tool execution moved to background',
|
|
||||||
{ background: true }
|
|
||||||
).catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope} background)`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (completion?.status === 'rejected') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, 400, completion.message || 'Tool execution rejected')
|
|
||||||
.catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope} rejected)`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const success = completion?.status === 'success'
|
|
||||||
toolCall.status = success ? 'success' : 'error'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope})`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
toolName: toolCall.name,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
function markToolCallAndNotify(
|
|
||||||
toolCall: ToolCallState,
|
|
||||||
statusCode: number,
|
|
||||||
message: string,
|
|
||||||
data: Record<string, unknown> | undefined,
|
|
||||||
logScope: string
|
|
||||||
): void {
|
|
||||||
markToolComplete(toolCall.id, toolCall.name, statusCode, message, data).catch((err) => {
|
|
||||||
logger.error(`markToolComplete fire-and-forget failed (${logScope})`, {
|
|
||||||
toolCallId: toolCall.id,
|
|
||||||
error: err instanceof Error ? err.message : String(err),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
markToolResultSeen(toolCall.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function executeToolCallWithPolicy(
|
|
||||||
toolCall: ToolCallState,
|
|
||||||
toolName: string,
|
|
||||||
toolData: Record<string, unknown>,
|
|
||||||
context: StreamingContext,
|
|
||||||
execContext: ExecutionContext,
|
|
||||||
options: OrchestratorOptions,
|
|
||||||
logScope: string
|
|
||||||
): Promise<void> {
|
|
||||||
const execution = getExecutionTarget(toolData, toolName)
|
|
||||||
const isInteractive = options.interactive === true
|
|
||||||
const requiresApproval = isInteractive && needsApproval(toolData)
|
|
||||||
|
|
||||||
if (toolData.state) {
|
|
||||||
toolCall.status = mapServerStateToToolStatus(toolData.state)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (requiresApproval) {
|
|
||||||
const decision = await waitForToolDecision(
|
|
||||||
toolCall.id,
|
|
||||||
options.timeout || STREAM_TIMEOUT_MS,
|
|
||||||
options.abortSignal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
|
||||||
// Continue below into normal execution path.
|
|
||||||
} else if (decision?.status === 'rejected' || decision?.status === 'error') {
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolCallAndNotify(
|
|
||||||
toolCall,
|
|
||||||
400,
|
|
||||||
decision.message || 'Tool execution rejected',
|
|
||||||
{ skipped: true, reason: 'user_rejected' },
|
|
||||||
`${logScope} rejected`
|
|
||||||
)
|
|
||||||
return
|
|
||||||
} else if (decision?.status === 'background') {
|
|
||||||
toolCall.status = 'skipped'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolCallAndNotify(
|
|
||||||
toolCall,
|
|
||||||
202,
|
|
||||||
decision.message || 'Tool execution moved to background',
|
|
||||||
{ background: true },
|
|
||||||
`${logScope} background`
|
|
||||||
)
|
|
||||||
return
|
|
||||||
} else {
|
|
||||||
// Decision was null (timeout/abort).
|
|
||||||
toolCall.status = 'rejected'
|
|
||||||
toolCall.endTime = Date.now()
|
|
||||||
markToolCallAndNotify(
|
|
||||||
toolCall,
|
|
||||||
408,
|
|
||||||
'Tool approval timed out',
|
|
||||||
{ skipped: true, reason: 'timeout' },
|
|
||||||
`${logScope} timeout`
|
|
||||||
)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (execution.target === 'sim_client_capability' && isInteractive) {
|
|
||||||
await waitForClientCapabilityAndReport(toolCall, options, logScope)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
(execution.target === 'sim_server' || execution.target === 'sim_client_capability') &&
|
|
||||||
options.autoExecuteTools !== false
|
|
||||||
) {
|
|
||||||
await executeToolAndReport(toolCall.id, context, execContext, options)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
// Normalization + dedupe helpers live in sse-utils to keep server/client in sync.
|
||||||
|
|
||||||
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
function inferToolSuccess(data: Record<string, unknown> | undefined): {
|
||||||
@@ -275,11 +85,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
current.status = data?.state
|
current.status = success ? 'success' : 'error'
|
||||||
? mapServerStateToToolStatus(data.state)
|
|
||||||
: success
|
|
||||||
? 'success'
|
|
||||||
: 'error'
|
|
||||||
current.endTime = Date.now()
|
current.endTime = Date.now()
|
||||||
if (hasResultData) {
|
if (hasResultData) {
|
||||||
current.result = {
|
current.result = {
|
||||||
@@ -298,7 +104,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
if (!toolCallId) return
|
if (!toolCallId) return
|
||||||
const current = context.toolCalls.get(toolCallId)
|
const current = context.toolCalls.get(toolCallId)
|
||||||
if (!current) return
|
if (!current) return
|
||||||
current.status = data?.state ? mapServerStateToToolStatus(data.state) : 'error'
|
current.status = 'error'
|
||||||
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
current.error = (data?.error as string | undefined) || 'Tool execution failed'
|
||||||
current.endTime = Date.now()
|
current.endTime = Date.now()
|
||||||
},
|
},
|
||||||
@@ -315,7 +121,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.toolCalls.set(toolCallId, {
|
context.toolCalls.set(toolCallId, {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: data?.state ? mapServerStateToToolStatus(data.state) : 'pending',
|
status: 'pending',
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -350,7 +156,7 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
context.toolCalls.set(toolCallId, {
|
context.toolCalls.set(toolCallId, {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
status: 'pending',
|
||||||
params: args,
|
params: args,
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
})
|
})
|
||||||
@@ -364,15 +170,147 @@ export const sseHandlers: Record<string, SSEHandler> = {
|
|||||||
const toolCall = context.toolCalls.get(toolCallId)
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
if (!toolCall) return
|
if (!toolCall) return
|
||||||
|
|
||||||
await executeToolCallWithPolicy(
|
// Subagent tools are executed by the copilot backend, not sim side.
|
||||||
toolCall,
|
if (SUBAGENT_TOOL_SET.has(toolName)) {
|
||||||
toolName,
|
return
|
||||||
toolData,
|
}
|
||||||
context,
|
|
||||||
execContext,
|
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||||
options,
|
// The copilot backend handles these internally to signal subagent completion.
|
||||||
'run tool'
|
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||||
)
|
toolCall.status = 'success'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
toolCall.result = {
|
||||||
|
success: true,
|
||||||
|
output: 'Internal respond tool - handled by copilot backend',
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const isInterruptTool = isInterruptToolName(toolName)
|
||||||
|
const isInteractive = options.interactive === true
|
||||||
|
// Integration tools (user-installed) also require approval in interactive mode
|
||||||
|
const needsApproval = isInterruptTool || isIntegrationTool(toolName)
|
||||||
|
|
||||||
|
if (needsApproval && isInteractive) {
|
||||||
|
const decision = await waitForToolDecision(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||||
|
// Client-executable run tools: defer execution to the browser client.
|
||||||
|
// The client calls executeWorkflowWithFullLogging for real-time feedback
|
||||||
|
// (block pulsing, logs, stop button) and reports completion via
|
||||||
|
// /api/copilot/confirm with status success/error. We poll Redis for
|
||||||
|
// that completion signal, then fire-and-forget markToolComplete to Go.
|
||||||
|
if (CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||||
|
toolCall.status = 'executing'
|
||||||
|
const completion = await waitForToolCompletion(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (completion?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
completion.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (run tool background)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const success = completion?.status === 'success'
|
||||||
|
toolCall.status = success ? 'success' : 'error'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
const msg =
|
||||||
|
completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||||
|
// Fire-and-forget: tell Go backend the tool is done
|
||||||
|
// (must NOT await — see deadlock note in executeToolAndReport)
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (run tool)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
400,
|
||||||
|
decision.message || 'Tool execution rejected',
|
||||||
|
{ skipped: true, reason: 'user_rejected' }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (rejected)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (decision?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
decision.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (background)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decision was null — timed out or aborted.
|
||||||
|
// Do NOT fall through to auto-execute. Mark the tool as timed out
|
||||||
|
// and notify Go so it can unblock waitForExternalTool.
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, 408, 'Tool approval timed out', {
|
||||||
|
skipped: true,
|
||||||
|
reason: 'timeout',
|
||||||
|
}).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (timeout)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.autoExecuteTools !== false) {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
reasoning: (event, context) => {
|
reasoning: (event, context) => {
|
||||||
const d = asRecord(event.data)
|
const d = asRecord(event.data)
|
||||||
@@ -472,7 +410,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
const toolCall: ToolCallState = {
|
const toolCall: ToolCallState = {
|
||||||
id: toolCallId,
|
id: toolCallId,
|
||||||
name: toolName,
|
name: toolName,
|
||||||
status: toolData.state ? mapServerStateToToolStatus(toolData.state) : 'pending',
|
status: 'pending',
|
||||||
params: args,
|
params: args,
|
||||||
startTime: Date.now(),
|
startTime: Date.now(),
|
||||||
}
|
}
|
||||||
@@ -490,15 +428,157 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
if (isPartial) return
|
if (isPartial) return
|
||||||
|
|
||||||
await executeToolCallWithPolicy(
|
// Respond tools are internal to copilot's subagent system - skip execution.
|
||||||
toolCall,
|
if (RESPOND_TOOL_SET.has(toolName)) {
|
||||||
toolName,
|
toolCall.status = 'success'
|
||||||
toolData,
|
toolCall.endTime = Date.now()
|
||||||
context,
|
toolCall.result = {
|
||||||
execContext,
|
success: true,
|
||||||
options,
|
output: 'Internal respond tool - handled by copilot backend',
|
||||||
'subagent run tool'
|
}
|
||||||
)
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tools that only exist on the Go backend (e.g. search_patterns,
|
||||||
|
// search_errors, remember_debug) should NOT be re-executed on the Sim side.
|
||||||
|
// The Go backend already executed them and will send its own tool_result
|
||||||
|
// SSE event with the real outcome. Trying to execute them here would fail
|
||||||
|
// with "Tool not found" and incorrectly mark the tool as failed.
|
||||||
|
if (!isToolAvailableOnSimSide(toolName)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Interrupt tools and integration tools (user-installed) require approval
|
||||||
|
// in interactive mode, same as top-level handler.
|
||||||
|
const needsSubagentApproval = isInterruptToolName(toolName) || isIntegrationTool(toolName)
|
||||||
|
if (options.interactive === true && needsSubagentApproval) {
|
||||||
|
const decision = await waitForToolDecision(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (decision?.status === 'accepted' || decision?.status === 'success') {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (decision?.status === 'rejected' || decision?.status === 'error') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
400,
|
||||||
|
decision.message || 'Tool execution rejected',
|
||||||
|
{ skipped: true, reason: 'user_rejected' }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent rejected)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (decision?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
// Fire-and-forget: must NOT await — see deadlock note in executeToolAndReport
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
decision.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent background)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decision was null — timed out or aborted.
|
||||||
|
// Do NOT fall through to auto-execute.
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, 408, 'Tool approval timed out', {
|
||||||
|
skipped: true,
|
||||||
|
reason: 'timeout',
|
||||||
|
}).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent timeout)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCall.id)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Client-executable run tools in interactive mode: defer to client.
|
||||||
|
// Same pattern as main handler: wait for client completion, then tell Go.
|
||||||
|
if (options.interactive === true && CLIENT_EXECUTABLE_RUN_TOOLS.has(toolName)) {
|
||||||
|
toolCall.status = 'executing'
|
||||||
|
const completion = await waitForToolCompletion(
|
||||||
|
toolCallId,
|
||||||
|
options.timeout || STREAM_TIMEOUT_MS,
|
||||||
|
options.abortSignal
|
||||||
|
)
|
||||||
|
if (completion?.status === 'rejected') {
|
||||||
|
toolCall.status = 'rejected'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
400,
|
||||||
|
completion.message || 'Tool execution rejected'
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent run tool rejected)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (completion?.status === 'background') {
|
||||||
|
toolCall.status = 'skipped'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
markToolComplete(
|
||||||
|
toolCall.id,
|
||||||
|
toolCall.name,
|
||||||
|
202,
|
||||||
|
completion.message || 'Tool execution moved to background',
|
||||||
|
{ background: true }
|
||||||
|
).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent run tool background)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
const success = completion?.status === 'success'
|
||||||
|
toolCall.status = success ? 'success' : 'error'
|
||||||
|
toolCall.endTime = Date.now()
|
||||||
|
const msg = completion?.message || (success ? 'Tool completed' : 'Tool failed or timed out')
|
||||||
|
markToolComplete(toolCall.id, toolCall.name, success ? 200 : 500, msg).catch((err) => {
|
||||||
|
logger.error('markToolComplete fire-and-forget failed (subagent run tool)', {
|
||||||
|
toolCallId: toolCall.id,
|
||||||
|
toolName: toolCall.name,
|
||||||
|
error: err instanceof Error ? err.message : String(err),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
markToolResultSeen(toolCallId)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.autoExecuteTools !== false) {
|
||||||
|
await executeToolAndReport(toolCallId, context, execContext, options)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
tool_result: (event, context) => {
|
tool_result: (event, context) => {
|
||||||
const parentToolCallId = context.subAgentParentToolCallId
|
const parentToolCallId = context.subAgentParentToolCallId
|
||||||
@@ -516,7 +596,7 @@ export const subAgentHandlers: Record<string, SSEHandler> = {
|
|||||||
|
|
||||||
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
const { success, hasResultData, hasError } = inferToolSuccess(data)
|
||||||
|
|
||||||
const status = data?.state ? mapServerStateToToolStatus(data.state) : success ? 'success' : 'error'
|
const status = success ? 'success' : 'error'
|
||||||
const endTime = Date.now()
|
const endTime = Date.now()
|
||||||
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
const result = hasResultData ? { success, output: data?.result || data?.data } : undefined
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import {
|
|||||||
TOOL_DECISION_MAX_POLL_MS,
|
TOOL_DECISION_MAX_POLL_MS,
|
||||||
TOOL_DECISION_POLL_BACKOFF,
|
TOOL_DECISION_POLL_BACKOFF,
|
||||||
} from '@/lib/copilot/constants'
|
} from '@/lib/copilot/constants'
|
||||||
|
import { INTERRUPT_TOOL_SET } from '@/lib/copilot/orchestrator/config'
|
||||||
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
import { getToolConfirmation } from '@/lib/copilot/orchestrator/persistence'
|
||||||
import {
|
import {
|
||||||
asRecord,
|
asRecord,
|
||||||
@@ -20,6 +21,10 @@ import type {
|
|||||||
|
|
||||||
const logger = createLogger('CopilotSseToolExecution')
|
const logger = createLogger('CopilotSseToolExecution')
|
||||||
|
|
||||||
|
export function isInterruptToolName(toolName: string): boolean {
|
||||||
|
return INTERRUPT_TOOL_SET.has(toolName)
|
||||||
|
}
|
||||||
|
|
||||||
export async function executeToolAndReport(
|
export async function executeToolAndReport(
|
||||||
toolCallId: string,
|
toolCallId: string,
|
||||||
context: StreamingContext,
|
context: StreamingContext,
|
||||||
@@ -29,11 +34,9 @@ export async function executeToolAndReport(
|
|||||||
const toolCall = context.toolCalls.get(toolCallId)
|
const toolCall = context.toolCalls.get(toolCallId)
|
||||||
if (!toolCall) return
|
if (!toolCall) return
|
||||||
|
|
||||||
const lockable = toolCall as typeof toolCall & { __simExecuting?: boolean }
|
if (toolCall.status === 'executing') return
|
||||||
if (lockable.__simExecuting) return
|
|
||||||
if (wasToolResultSeen(toolCall.id)) return
|
if (wasToolResultSeen(toolCall.id)) return
|
||||||
|
|
||||||
lockable.__simExecuting = true
|
|
||||||
toolCall.status = 'executing'
|
toolCall.status = 'executing'
|
||||||
try {
|
try {
|
||||||
const result = await executeToolServerSide(toolCall, execContext)
|
const result = await executeToolServerSide(toolCall, execContext)
|
||||||
@@ -119,8 +122,6 @@ export async function executeToolAndReport(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
await options?.onEvent?.(errorEvent)
|
await options?.onEvent?.(errorEvent)
|
||||||
} finally {
|
|
||||||
delete lockable.__simExecuting
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ describe('sse-utils', () => {
|
|||||||
type: 'tool_result',
|
type: 'tool_result',
|
||||||
data: JSON.stringify({
|
data: JSON.stringify({
|
||||||
id: 'tool_1',
|
id: 'tool_1',
|
||||||
name: 'workflow_change',
|
name: 'edit_workflow',
|
||||||
success: true,
|
success: true,
|
||||||
result: { ok: true },
|
result: { ok: true },
|
||||||
}),
|
}),
|
||||||
@@ -23,7 +23,7 @@ describe('sse-utils', () => {
|
|||||||
const normalized = normalizeSseEvent(event as any)
|
const normalized = normalizeSseEvent(event as any)
|
||||||
|
|
||||||
expect(normalized.toolCallId).toBe('tool_1')
|
expect(normalized.toolCallId).toBe('tool_1')
|
||||||
expect(normalized.toolName).toBe('workflow_change')
|
expect(normalized.toolName).toBe('edit_workflow')
|
||||||
expect(normalized.success).toBe(true)
|
expect(normalized.success).toBe(true)
|
||||||
expect(normalized.result).toEqual({ ok: true })
|
expect(normalized.result).toEqual({ ok: true })
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -220,8 +220,7 @@ export async function executeDeployMcp(
|
|||||||
if (!workflowRecord.isDeployed) {
|
if (!workflowRecord.isDeployed) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error:
|
error: 'Workflow must be deployed before adding as an MCP tool. Use deploy_api first.',
|
||||||
'Workflow must be deployed before adding as an MCP tool. Use workflow_deploy(mode: "api") first.',
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { customTools, workflow } from '@sim/db/schema'
|
import { workflow } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { and, desc, eq, isNull, or } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||||
import type {
|
import type {
|
||||||
ExecutionContext,
|
ExecutionContext,
|
||||||
@@ -12,7 +12,6 @@ import { routeExecution } from '@/lib/copilot/tools/server/router'
|
|||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||||
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
|
|
||||||
import { getTool, resolveToolId } from '@/tools/utils'
|
import { getTool, resolveToolId } from '@/tools/utils'
|
||||||
import {
|
import {
|
||||||
executeCheckDeploymentStatus,
|
executeCheckDeploymentStatus,
|
||||||
@@ -50,8 +49,6 @@ import type {
|
|||||||
RunWorkflowParams,
|
RunWorkflowParams,
|
||||||
RunWorkflowUntilBlockParams,
|
RunWorkflowUntilBlockParams,
|
||||||
SetGlobalWorkflowVariablesParams,
|
SetGlobalWorkflowVariablesParams,
|
||||||
WorkflowDeployParams,
|
|
||||||
WorkflowRunParams,
|
|
||||||
} from './param-types'
|
} from './param-types'
|
||||||
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
|
import { PLATFORM_ACTIONS_CONTENT } from './platform-actions'
|
||||||
import {
|
import {
|
||||||
@@ -79,332 +76,13 @@ import {
|
|||||||
|
|
||||||
const logger = createLogger('CopilotToolExecutor')
|
const logger = createLogger('CopilotToolExecutor')
|
||||||
|
|
||||||
type ManageCustomToolOperation = 'add' | 'edit' | 'delete' | 'list'
|
|
||||||
|
|
||||||
interface ManageCustomToolSchema {
|
|
||||||
type: 'function'
|
|
||||||
function: {
|
|
||||||
name: string
|
|
||||||
description?: string
|
|
||||||
parameters: Record<string, unknown>
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ManageCustomToolParams {
|
|
||||||
operation?: string
|
|
||||||
toolId?: string
|
|
||||||
schema?: ManageCustomToolSchema
|
|
||||||
code?: string
|
|
||||||
title?: string
|
|
||||||
workspaceId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
async function executeManageCustomTool(
|
|
||||||
rawParams: Record<string, unknown>,
|
|
||||||
context: ExecutionContext
|
|
||||||
): Promise<ToolCallResult> {
|
|
||||||
const params = rawParams as ManageCustomToolParams
|
|
||||||
const operation = String(params.operation || '').toLowerCase() as ManageCustomToolOperation
|
|
||||||
const workspaceId = params.workspaceId || context.workspaceId
|
|
||||||
|
|
||||||
if (!operation) {
|
|
||||||
return { success: false, error: "Missing required 'operation' argument" }
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (operation === 'list') {
|
|
||||||
const toolsForUser = workspaceId
|
|
||||||
? await db
|
|
||||||
.select()
|
|
||||||
.from(customTools)
|
|
||||||
.where(
|
|
||||||
or(
|
|
||||||
eq(customTools.workspaceId, workspaceId),
|
|
||||||
and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.orderBy(desc(customTools.createdAt))
|
|
||||||
: await db
|
|
||||||
.select()
|
|
||||||
.from(customTools)
|
|
||||||
.where(and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId)))
|
|
||||||
.orderBy(desc(customTools.createdAt))
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
success: true,
|
|
||||||
operation,
|
|
||||||
tools: toolsForUser,
|
|
||||||
count: toolsForUser.length,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'add') {
|
|
||||||
if (!workspaceId) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: "workspaceId is required for operation 'add'",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!params.schema || !params.code) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: "Both 'schema' and 'code' are required for operation 'add'",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const title = params.title || params.schema.function?.name
|
|
||||||
if (!title) {
|
|
||||||
return { success: false, error: "Missing tool title or schema.function.name for 'add'" }
|
|
||||||
}
|
|
||||||
|
|
||||||
const resultTools = await upsertCustomTools({
|
|
||||||
tools: [
|
|
||||||
{
|
|
||||||
title,
|
|
||||||
schema: params.schema,
|
|
||||||
code: params.code,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
workspaceId,
|
|
||||||
userId: context.userId,
|
|
||||||
})
|
|
||||||
const created = resultTools.find((tool) => tool.title === title)
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
success: true,
|
|
||||||
operation,
|
|
||||||
toolId: created?.id,
|
|
||||||
title,
|
|
||||||
message: `Created custom tool "${title}"`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'edit') {
|
|
||||||
if (!workspaceId) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: "workspaceId is required for operation 'edit'",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!params.toolId) {
|
|
||||||
return { success: false, error: "'toolId' is required for operation 'edit'" }
|
|
||||||
}
|
|
||||||
if (!params.schema && !params.code) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: "At least one of 'schema' or 'code' is required for operation 'edit'",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const workspaceTool = await db
|
|
||||||
.select()
|
|
||||||
.from(customTools)
|
|
||||||
.where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId)))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
const legacyTool =
|
|
||||||
workspaceTool.length === 0
|
|
||||||
? await db
|
|
||||||
.select()
|
|
||||||
.from(customTools)
|
|
||||||
.where(
|
|
||||||
and(
|
|
||||||
eq(customTools.id, params.toolId),
|
|
||||||
isNull(customTools.workspaceId),
|
|
||||||
eq(customTools.userId, context.userId)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.limit(1)
|
|
||||||
: []
|
|
||||||
|
|
||||||
const existing = workspaceTool[0] || legacyTool[0]
|
|
||||||
if (!existing) {
|
|
||||||
return { success: false, error: `Custom tool not found: ${params.toolId}` }
|
|
||||||
}
|
|
||||||
|
|
||||||
const mergedSchema = params.schema || (existing.schema as ManageCustomToolSchema)
|
|
||||||
const mergedCode = params.code || existing.code
|
|
||||||
const title = params.title || mergedSchema.function?.name || existing.title
|
|
||||||
|
|
||||||
await upsertCustomTools({
|
|
||||||
tools: [
|
|
||||||
{
|
|
||||||
id: params.toolId,
|
|
||||||
title,
|
|
||||||
schema: mergedSchema,
|
|
||||||
code: mergedCode,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
workspaceId,
|
|
||||||
userId: context.userId,
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
success: true,
|
|
||||||
operation,
|
|
||||||
toolId: params.toolId,
|
|
||||||
title,
|
|
||||||
message: `Updated custom tool "${title}"`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operation === 'delete') {
|
|
||||||
if (!params.toolId) {
|
|
||||||
return { success: false, error: "'toolId' is required for operation 'delete'" }
|
|
||||||
}
|
|
||||||
|
|
||||||
const workspaceDelete =
|
|
||||||
workspaceId != null
|
|
||||||
? await db
|
|
||||||
.delete(customTools)
|
|
||||||
.where(
|
|
||||||
and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))
|
|
||||||
)
|
|
||||||
.returning({ id: customTools.id })
|
|
||||||
: []
|
|
||||||
|
|
||||||
const legacyDelete =
|
|
||||||
workspaceDelete.length === 0
|
|
||||||
? await db
|
|
||||||
.delete(customTools)
|
|
||||||
.where(
|
|
||||||
and(
|
|
||||||
eq(customTools.id, params.toolId),
|
|
||||||
isNull(customTools.workspaceId),
|
|
||||||
eq(customTools.userId, context.userId)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.returning({ id: customTools.id })
|
|
||||||
: []
|
|
||||||
|
|
||||||
const deleted = workspaceDelete[0] || legacyDelete[0]
|
|
||||||
if (!deleted) {
|
|
||||||
return { success: false, error: `Custom tool not found: ${params.toolId}` }
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
success: true,
|
|
||||||
operation,
|
|
||||||
toolId: params.toolId,
|
|
||||||
message: 'Deleted custom tool',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `Unsupported operation for manage_custom_tool: ${operation}`,
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('manage_custom_tool execution failed', {
|
|
||||||
operation,
|
|
||||||
workspaceId,
|
|
||||||
userId: context.userId,
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: error instanceof Error ? error.message : 'Failed to manage custom tool',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function executeWorkflowRunUnified(
|
|
||||||
rawParams: Record<string, unknown>,
|
|
||||||
context: ExecutionContext
|
|
||||||
): Promise<ToolCallResult> {
|
|
||||||
const params = rawParams as WorkflowRunParams
|
|
||||||
const mode = params.mode || 'full'
|
|
||||||
|
|
||||||
switch (mode) {
|
|
||||||
case 'full':
|
|
||||||
return executeRunWorkflow(params as RunWorkflowParams, context)
|
|
||||||
case 'until_block':
|
|
||||||
if (!params.stopAfterBlockId) {
|
|
||||||
return { success: false, error: 'stopAfterBlockId is required for mode=until_block' }
|
|
||||||
}
|
|
||||||
return executeRunWorkflowUntilBlock(params as RunWorkflowUntilBlockParams, context)
|
|
||||||
case 'from_block':
|
|
||||||
if (!params.startBlockId) {
|
|
||||||
return { success: false, error: 'startBlockId is required for mode=from_block' }
|
|
||||||
}
|
|
||||||
return executeRunFromBlock(params as RunFromBlockParams, context)
|
|
||||||
case 'block':
|
|
||||||
if (!params.blockId) {
|
|
||||||
return { success: false, error: 'blockId is required for mode=block' }
|
|
||||||
}
|
|
||||||
return executeRunBlock(params as RunBlockParams, context)
|
|
||||||
default:
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `Unsupported workflow_run mode: ${String(mode)}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function executeWorkflowDeployUnified(
|
|
||||||
rawParams: Record<string, unknown>,
|
|
||||||
context: ExecutionContext
|
|
||||||
): Promise<ToolCallResult> {
|
|
||||||
const params = rawParams as unknown as WorkflowDeployParams
|
|
||||||
const mode = params.mode
|
|
||||||
|
|
||||||
if (!mode) {
|
|
||||||
return { success: false, error: 'mode is required for workflow_deploy' }
|
|
||||||
}
|
|
||||||
|
|
||||||
const scopedContext =
|
|
||||||
params.workflowId && params.workflowId !== context.workflowId
|
|
||||||
? { ...context, workflowId: params.workflowId }
|
|
||||||
: context
|
|
||||||
|
|
||||||
switch (mode) {
|
|
||||||
case 'status':
|
|
||||||
return executeCheckDeploymentStatus(params as CheckDeploymentStatusParams, scopedContext)
|
|
||||||
case 'redeploy':
|
|
||||||
return executeRedeploy(scopedContext)
|
|
||||||
case 'api':
|
|
||||||
return executeDeployApi(params as DeployApiParams, scopedContext)
|
|
||||||
case 'chat':
|
|
||||||
return executeDeployChat(params as DeployChatParams, scopedContext)
|
|
||||||
case 'mcp':
|
|
||||||
return executeDeployMcp(params as DeployMcpParams, scopedContext)
|
|
||||||
case 'list_mcp_servers':
|
|
||||||
return executeListWorkspaceMcpServers(params as ListWorkspaceMcpServersParams, scopedContext)
|
|
||||||
case 'create_mcp_server':
|
|
||||||
return executeCreateWorkspaceMcpServer(
|
|
||||||
params as CreateWorkspaceMcpServerParams,
|
|
||||||
scopedContext
|
|
||||||
)
|
|
||||||
default:
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: `Unsupported workflow_deploy mode: ${String(mode)}`,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const SERVER_TOOLS = new Set<string>([
|
const SERVER_TOOLS = new Set<string>([
|
||||||
'get_blocks_and_tools',
|
'get_blocks_and_tools',
|
||||||
'get_blocks_metadata',
|
'get_blocks_metadata',
|
||||||
'get_block_options',
|
'get_block_options',
|
||||||
'get_block_config',
|
'get_block_config',
|
||||||
'get_trigger_blocks',
|
'get_trigger_blocks',
|
||||||
'workflow_context_get',
|
'edit_workflow',
|
||||||
'workflow_context_expand',
|
|
||||||
'workflow_change',
|
|
||||||
'workflow_verify',
|
|
||||||
'get_workflow_console',
|
'get_workflow_console',
|
||||||
'search_documentation',
|
'search_documentation',
|
||||||
'search_online',
|
'search_online',
|
||||||
@@ -432,7 +110,11 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
|||||||
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
get_block_outputs: (p, c) => executeGetBlockOutputs(p as GetBlockOutputsParams, c),
|
||||||
get_block_upstream_references: (p, c) =>
|
get_block_upstream_references: (p, c) =>
|
||||||
executeGetBlockUpstreamReferences(p as unknown as GetBlockUpstreamReferencesParams, c),
|
executeGetBlockUpstreamReferences(p as unknown as GetBlockUpstreamReferencesParams, c),
|
||||||
workflow_run: (p, c) => executeWorkflowRunUnified(p, c),
|
run_workflow: (p, c) => executeRunWorkflow(p as RunWorkflowParams, c),
|
||||||
|
run_workflow_until_block: (p, c) =>
|
||||||
|
executeRunWorkflowUntilBlock(p as unknown as RunWorkflowUntilBlockParams, c),
|
||||||
|
run_from_block: (p, c) => executeRunFromBlock(p as unknown as RunFromBlockParams, c),
|
||||||
|
run_block: (p, c) => executeRunBlock(p as unknown as RunBlockParams, c),
|
||||||
get_deployed_workflow_state: (p, c) =>
|
get_deployed_workflow_state: (p, c) =>
|
||||||
executeGetDeployedWorkflowState(p as GetDeployedWorkflowStateParams, c),
|
executeGetDeployedWorkflowState(p as GetDeployedWorkflowStateParams, c),
|
||||||
generate_api_key: (p, c) => executeGenerateApiKey(p as unknown as GenerateApiKeyParams, c),
|
generate_api_key: (p, c) => executeGenerateApiKey(p as unknown as GenerateApiKeyParams, c),
|
||||||
@@ -443,7 +125,10 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
|||||||
}),
|
}),
|
||||||
set_global_workflow_variables: (p, c) =>
|
set_global_workflow_variables: (p, c) =>
|
||||||
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
|
executeSetGlobalWorkflowVariables(p as SetGlobalWorkflowVariablesParams, c),
|
||||||
workflow_deploy: (p, c) => executeWorkflowDeployUnified(p, c),
|
deploy_api: (p, c) => executeDeployApi(p as DeployApiParams, c),
|
||||||
|
deploy_chat: (p, c) => executeDeployChat(p as DeployChatParams, c),
|
||||||
|
deploy_mcp: (p, c) => executeDeployMcp(p as DeployMcpParams, c),
|
||||||
|
redeploy: (_p, c) => executeRedeploy(c),
|
||||||
check_deployment_status: (p, c) =>
|
check_deployment_status: (p, c) =>
|
||||||
executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
|
executeCheckDeploymentStatus(p as CheckDeploymentStatusParams, c),
|
||||||
list_workspace_mcp_servers: (p, c) =>
|
list_workspace_mcp_servers: (p, c) =>
|
||||||
@@ -476,19 +161,6 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
oauth_request_access: async (p, _c) => {
|
|
||||||
const providerName = (p.providerName || p.provider_name || 'the provider') as string
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
success: true,
|
|
||||||
status: 'requested',
|
|
||||||
providerName,
|
|
||||||
message: `Requested ${providerName} OAuth connection. The user should complete the OAuth modal in the UI, then retry credential-dependent actions.`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -93,18 +93,6 @@ export interface RunBlockParams {
|
|||||||
useDeployedState?: boolean
|
useDeployedState?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WorkflowRunParams {
|
|
||||||
mode?: 'full' | 'until_block' | 'from_block' | 'block'
|
|
||||||
workflowId?: string
|
|
||||||
workflow_input?: unknown
|
|
||||||
input?: unknown
|
|
||||||
useDeployedState?: boolean
|
|
||||||
stopAfterBlockId?: string
|
|
||||||
startBlockId?: string
|
|
||||||
blockId?: string
|
|
||||||
executionId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface GetDeployedWorkflowStateParams {
|
export interface GetDeployedWorkflowStateParams {
|
||||||
workflowId?: string
|
workflowId?: string
|
||||||
}
|
}
|
||||||
@@ -181,39 +169,6 @@ export interface CreateWorkspaceMcpServerParams {
|
|||||||
workflowIds?: string[]
|
workflowIds?: string[]
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WorkflowDeployParams {
|
|
||||||
mode:
|
|
||||||
| 'status'
|
|
||||||
| 'redeploy'
|
|
||||||
| 'api'
|
|
||||||
| 'chat'
|
|
||||||
| 'mcp'
|
|
||||||
| 'list_mcp_servers'
|
|
||||||
| 'create_mcp_server'
|
|
||||||
workflowId?: string
|
|
||||||
action?: 'deploy' | 'undeploy'
|
|
||||||
identifier?: string
|
|
||||||
title?: string
|
|
||||||
description?: string
|
|
||||||
customizations?: {
|
|
||||||
primaryColor?: string
|
|
||||||
secondaryColor?: string
|
|
||||||
welcomeMessage?: string
|
|
||||||
iconUrl?: string
|
|
||||||
}
|
|
||||||
authType?: 'none' | 'password' | 'public' | 'email' | 'sso'
|
|
||||||
password?: string
|
|
||||||
allowedEmails?: string[]
|
|
||||||
outputConfigs?: unknown[]
|
|
||||||
serverId?: string
|
|
||||||
toolName?: string
|
|
||||||
toolDescription?: string
|
|
||||||
parameterSchema?: Record<string, unknown>
|
|
||||||
name?: string
|
|
||||||
isPublic?: boolean
|
|
||||||
workflowIds?: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
// === Workflow Organization Params ===
|
// === Workflow Organization Params ===
|
||||||
|
|
||||||
export interface RenameWorkflowParams {
|
export interface RenameWorkflowParams {
|
||||||
|
|||||||
@@ -592,40 +592,16 @@ const META_edit: ToolMetadata = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const META_workflow_change: ToolMetadata = {
|
const META_edit_workflow: ToolMetadata = {
|
||||||
displayNames: {
|
displayNames: {
|
||||||
[ClientToolCallState.generating]: { text: 'Planning workflow changes', icon: Loader2 },
|
[ClientToolCallState.generating]: { text: 'Editing your workflow', icon: Loader2 },
|
||||||
[ClientToolCallState.executing]: { text: 'Applying workflow changes', icon: Loader2 },
|
[ClientToolCallState.executing]: { text: 'Editing your workflow', icon: Loader2 },
|
||||||
[ClientToolCallState.success]: { text: 'Updated your workflow', icon: Grid2x2Check },
|
[ClientToolCallState.success]: { text: 'Edited your workflow', icon: Grid2x2Check },
|
||||||
[ClientToolCallState.error]: { text: 'Failed to update your workflow', icon: XCircle },
|
[ClientToolCallState.error]: { text: 'Failed to edit your workflow', icon: XCircle },
|
||||||
[ClientToolCallState.review]: { text: 'Review your workflow changes', icon: Grid2x2 },
|
[ClientToolCallState.review]: { text: 'Review your workflow changes', icon: Grid2x2 },
|
||||||
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow changes', icon: MinusCircle },
|
[ClientToolCallState.aborted]: { text: 'Aborted editing your workflow', icon: MinusCircle },
|
||||||
[ClientToolCallState.pending]: { text: 'Planning workflow changes', icon: Loader2 },
|
[ClientToolCallState.pending]: { text: 'Editing your workflow', icon: Loader2 },
|
||||||
},
|
|
||||||
getDynamicText: (params, state) => {
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'dry_run') {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return 'Planned workflow changes'
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return 'Planning workflow changes'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (mode === 'apply' || typeof params?.proposalId === 'string') {
|
|
||||||
switch (state) {
|
|
||||||
case ClientToolCallState.success:
|
|
||||||
return 'Applied workflow changes'
|
|
||||||
case ClientToolCallState.executing:
|
|
||||||
case ClientToolCallState.generating:
|
|
||||||
case ClientToolCallState.pending:
|
|
||||||
return 'Applying workflow changes'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return undefined
|
|
||||||
},
|
},
|
||||||
uiConfig: {
|
uiConfig: {
|
||||||
isSpecial: true,
|
isSpecial: true,
|
||||||
@@ -633,42 +609,6 @@ const META_workflow_change: ToolMetadata = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
const META_workflow_context_get: ToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Gathering workflow context', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Gathering workflow context', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Gathering workflow context', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Gathered workflow context', icon: FileText },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to gather workflow context', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped workflow context', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow context', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const META_workflow_context_expand: ToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Expanding workflow schemas', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Expanded workflow schemas', icon: FileText },
|
|
||||||
[ClientToolCallState.error]: { text: 'Failed to expand workflow schemas', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped schema expansion', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted schema expansion', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const META_workflow_verify: ToolMetadata = {
|
|
||||||
displayNames: {
|
|
||||||
[ClientToolCallState.generating]: { text: 'Verifying workflow', icon: Loader2 },
|
|
||||||
[ClientToolCallState.pending]: { text: 'Verifying workflow', icon: Loader2 },
|
|
||||||
[ClientToolCallState.executing]: { text: 'Verifying workflow', icon: Loader2 },
|
|
||||||
[ClientToolCallState.success]: { text: 'Verified workflow', icon: CheckCircle2 },
|
|
||||||
[ClientToolCallState.error]: { text: 'Workflow verification failed', icon: XCircle },
|
|
||||||
[ClientToolCallState.rejected]: { text: 'Skipped workflow verification', icon: MinusCircle },
|
|
||||||
[ClientToolCallState.aborted]: { text: 'Aborted workflow verification', icon: MinusCircle },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const META_evaluate: ToolMetadata = {
|
const META_evaluate: ToolMetadata = {
|
||||||
displayNames: {
|
displayNames: {
|
||||||
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
[ClientToolCallState.generating]: { text: 'Evaluating', icon: Loader2 },
|
||||||
@@ -2601,12 +2541,7 @@ const TOOL_METADATA_BY_ID: Record<string, ToolMetadata> = {
|
|||||||
deploy_chat: META_deploy_chat,
|
deploy_chat: META_deploy_chat,
|
||||||
deploy_mcp: META_deploy_mcp,
|
deploy_mcp: META_deploy_mcp,
|
||||||
edit: META_edit,
|
edit: META_edit,
|
||||||
workflow_context_get: META_workflow_context_get,
|
edit_workflow: META_edit_workflow,
|
||||||
workflow_context_expand: META_workflow_context_expand,
|
|
||||||
workflow_change: META_workflow_change,
|
|
||||||
workflow_verify: META_workflow_verify,
|
|
||||||
workflow_run: META_run_workflow,
|
|
||||||
workflow_deploy: META_deploy_api,
|
|
||||||
evaluate: META_evaluate,
|
evaluate: META_evaluate,
|
||||||
get_block_config: META_get_block_config,
|
get_block_config: META_get_block_config,
|
||||||
get_block_options: META_get_block_options,
|
get_block_options: META_get_block_options,
|
||||||
|
|||||||
@@ -191,10 +191,10 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'workflow_run',
|
name: 'run_workflow',
|
||||||
toolId: 'workflow_run',
|
toolId: 'run_workflow',
|
||||||
description:
|
description:
|
||||||
'Run a workflow using one unified interface. Supports full runs and partial execution modes.',
|
'Run a workflow and return its output. Works on both draft and deployed states. By default runs the draft (live) state.',
|
||||||
inputSchema: {
|
inputSchema: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
properties: {
|
properties: {
|
||||||
@@ -202,40 +202,114 @@ export const DIRECT_TOOL_DEFS: DirectToolDef[] = [
|
|||||||
type: 'string',
|
type: 'string',
|
||||||
description: 'REQUIRED. The workflow ID to run.',
|
description: 'REQUIRED. The workflow ID to run.',
|
||||||
},
|
},
|
||||||
mode: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Execution mode: full, until_block, from_block, or block. Default: full.',
|
|
||||||
enum: ['full', 'until_block', 'from_block', 'block'],
|
|
||||||
},
|
|
||||||
workflow_input: {
|
workflow_input: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
description:
|
description:
|
||||||
'JSON object with input values. Keys should match workflow start block input names.',
|
'JSON object with input values. Keys should match the workflow start block input field names.',
|
||||||
},
|
|
||||||
stopAfterBlockId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Required when mode is until_block.',
|
|
||||||
},
|
|
||||||
startBlockId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Required when mode is from_block.',
|
|
||||||
},
|
|
||||||
blockId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Required when mode is block.',
|
|
||||||
},
|
|
||||||
executionId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Optional execution snapshot ID for from_block or block modes.',
|
|
||||||
},
|
},
|
||||||
useDeployedState: {
|
useDeployedState: {
|
||||||
type: 'boolean',
|
type: 'boolean',
|
||||||
description: 'When true, runs deployed state instead of draft. Default: false.',
|
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
required: ['workflowId'],
|
required: ['workflowId'],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: 'run_workflow_until_block',
|
||||||
|
toolId: 'run_workflow_until_block',
|
||||||
|
description:
|
||||||
|
'Run a workflow and stop after a specific block completes. Useful for testing partial execution or debugging specific blocks.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The workflow ID to run.',
|
||||||
|
},
|
||||||
|
stopAfterBlockId: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'REQUIRED. The block ID to stop after. Execution halts once this block completes.',
|
||||||
|
},
|
||||||
|
workflow_input: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'JSON object with input values for the workflow.',
|
||||||
|
},
|
||||||
|
useDeployedState: {
|
||||||
|
type: 'boolean',
|
||||||
|
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId', 'stopAfterBlockId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'run_from_block',
|
||||||
|
toolId: 'run_from_block',
|
||||||
|
description:
|
||||||
|
'Run a workflow starting from a specific block, using cached outputs from a prior execution for upstream blocks. The workflow must have been run at least once first.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The workflow ID to run.',
|
||||||
|
},
|
||||||
|
startBlockId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The block ID to start execution from.',
|
||||||
|
},
|
||||||
|
executionId: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
|
||||||
|
},
|
||||||
|
workflow_input: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Optional input values for the workflow.',
|
||||||
|
},
|
||||||
|
useDeployedState: {
|
||||||
|
type: 'boolean',
|
||||||
|
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId', 'startBlockId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'run_block',
|
||||||
|
toolId: 'run_block',
|
||||||
|
description:
|
||||||
|
'Run a single block in isolation using cached outputs from a prior execution. Only the specified block executes — nothing upstream or downstream. The workflow must have been run at least once first.',
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
workflowId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The workflow ID.',
|
||||||
|
},
|
||||||
|
blockId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'REQUIRED. The block ID to run in isolation.',
|
||||||
|
},
|
||||||
|
executionId: {
|
||||||
|
type: 'string',
|
||||||
|
description:
|
||||||
|
'Optional. Specific execution ID to load the snapshot from. Uses latest if omitted.',
|
||||||
|
},
|
||||||
|
workflow_input: {
|
||||||
|
type: 'object',
|
||||||
|
description: 'Optional input values for the workflow.',
|
||||||
|
},
|
||||||
|
useDeployedState: {
|
||||||
|
type: 'boolean',
|
||||||
|
description: 'When true, runs the deployed version instead of the draft. Default: false.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['workflowId', 'blockId'],
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
name: 'get_deployed_workflow_state',
|
name: 'get_deployed_workflow_state',
|
||||||
toolId: 'get_deployed_workflow_state',
|
toolId: 'get_deployed_workflow_state',
|
||||||
@@ -457,10 +531,10 @@ ALSO CAN:
|
|||||||
description: `Run a workflow and verify its outputs. Works on both deployed and undeployed (draft) workflows. Use after building to verify correctness.
|
description: `Run a workflow and verify its outputs. Works on both deployed and undeployed (draft) workflows. Use after building to verify correctness.
|
||||||
|
|
||||||
Supports full and partial execution:
|
Supports full and partial execution:
|
||||||
- Full run with test inputs using workflow_run mode "full"
|
- Full run with test inputs
|
||||||
- Stop after a specific block using workflow_run mode "until_block"
|
- Stop after a specific block (run_workflow_until_block)
|
||||||
- Run a single block in isolation using workflow_run mode "block"
|
- Run a single block in isolation (run_block)
|
||||||
- Resume from a specific block using workflow_run mode "from_block"`,
|
- Resume from a specific block (run_from_block)`,
|
||||||
inputSchema: {
|
inputSchema: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
properties: {
|
properties: {
|
||||||
|
|||||||
@@ -109,7 +109,7 @@ function resolveSubBlockOptions(sb: SubBlockConfig): string[] | undefined {
|
|||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return canonical option IDs/values expected by workflow_change compilation and apply
|
// Return the actual option ID/value that edit_workflow expects, not the display label
|
||||||
return rawOptions
|
return rawOptions
|
||||||
.map((opt: any) => {
|
.map((opt: any) => {
|
||||||
if (!opt) return undefined
|
if (!opt) return undefined
|
||||||
|
|||||||
@@ -11,13 +11,8 @@ import { makeApiRequestServerTool } from '@/lib/copilot/tools/server/other/make-
|
|||||||
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-online'
|
||||||
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
import { getCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-credentials'
|
||||||
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
||||||
|
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
||||||
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
||||||
import { workflowChangeServerTool } from '@/lib/copilot/tools/server/workflow/workflow-change'
|
|
||||||
import {
|
|
||||||
workflowContextExpandServerTool,
|
|
||||||
workflowContextGetServerTool,
|
|
||||||
} from '@/lib/copilot/tools/server/workflow/workflow-context'
|
|
||||||
import { workflowVerifyServerTool } from '@/lib/copilot/tools/server/workflow/workflow-verify'
|
|
||||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||||
|
|
||||||
export { ExecuteResponseSuccessSchema }
|
export { ExecuteResponseSuccessSchema }
|
||||||
@@ -32,6 +27,7 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
|
|||||||
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
[getBlockOptionsServerTool.name]: getBlockOptionsServerTool,
|
||||||
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
[getBlockConfigServerTool.name]: getBlockConfigServerTool,
|
||||||
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
[getTriggerBlocksServerTool.name]: getTriggerBlocksServerTool,
|
||||||
|
[editWorkflowServerTool.name]: editWorkflowServerTool,
|
||||||
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
[getWorkflowConsoleServerTool.name]: getWorkflowConsoleServerTool,
|
||||||
[searchDocumentationServerTool.name]: searchDocumentationServerTool,
|
[searchDocumentationServerTool.name]: searchDocumentationServerTool,
|
||||||
[searchOnlineServerTool.name]: searchOnlineServerTool,
|
[searchOnlineServerTool.name]: searchOnlineServerTool,
|
||||||
@@ -39,10 +35,6 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
|
|||||||
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
[getCredentialsServerTool.name]: getCredentialsServerTool,
|
||||||
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
[makeApiRequestServerTool.name]: makeApiRequestServerTool,
|
||||||
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
|
||||||
[workflowContextGetServerTool.name]: workflowContextGetServerTool,
|
|
||||||
[workflowContextExpandServerTool.name]: workflowContextExpandServerTool,
|
|
||||||
[workflowChangeServerTool.name]: workflowChangeServerTool,
|
|
||||||
[workflowVerifyServerTool.name]: workflowVerifyServerTool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -1,185 +0,0 @@
|
|||||||
import crypto from 'crypto'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { getRedisClient } from '@/lib/core/config/redis'
|
|
||||||
|
|
||||||
type StoreEntry<T> = {
|
|
||||||
value: T
|
|
||||||
expiresAt: number
|
|
||||||
}
|
|
||||||
|
|
||||||
const DEFAULT_TTL_MS = 30 * 60 * 1000
|
|
||||||
const MAX_ENTRIES = 500
|
|
||||||
const DEFAULT_TTL_SECONDS = Math.floor(DEFAULT_TTL_MS / 1000)
|
|
||||||
const CONTEXT_PREFIX = 'copilot:workflow_change:context'
|
|
||||||
const PROPOSAL_PREFIX = 'copilot:workflow_change:proposal'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowChangeStore')
|
|
||||||
|
|
||||||
class TTLStore<T> {
|
|
||||||
private readonly data = new Map<string, StoreEntry<T>>()
|
|
||||||
|
|
||||||
constructor(private readonly ttlMs = DEFAULT_TTL_MS) {}
|
|
||||||
|
|
||||||
set(value: T): string {
|
|
||||||
this.gc()
|
|
||||||
if (this.data.size >= MAX_ENTRIES) {
|
|
||||||
const firstKey = this.data.keys().next().value as string | undefined
|
|
||||||
if (firstKey) {
|
|
||||||
this.data.delete(firstKey)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const id = crypto.randomUUID()
|
|
||||||
this.data.set(id, {
|
|
||||||
value,
|
|
||||||
expiresAt: Date.now() + this.ttlMs,
|
|
||||||
})
|
|
||||||
return id
|
|
||||||
}
|
|
||||||
|
|
||||||
get(id: string): T | null {
|
|
||||||
const entry = this.data.get(id)
|
|
||||||
if (!entry) return null
|
|
||||||
if (entry.expiresAt <= Date.now()) {
|
|
||||||
this.data.delete(id)
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
return entry.value
|
|
||||||
}
|
|
||||||
|
|
||||||
private gc(): void {
|
|
||||||
const now = Date.now()
|
|
||||||
for (const [key, entry] of this.data.entries()) {
|
|
||||||
if (entry.expiresAt <= now) {
|
|
||||||
this.data.delete(key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export type WorkflowContextPack = {
|
|
||||||
workflowId: string
|
|
||||||
snapshotHash: string
|
|
||||||
workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
loops: Record<string, any>
|
|
||||||
parallels: Record<string, any>
|
|
||||||
}
|
|
||||||
schemasByType: Record<string, any>
|
|
||||||
schemaRefsByType: Record<string, string>
|
|
||||||
summary: Record<string, any>
|
|
||||||
}
|
|
||||||
|
|
||||||
export type WorkflowChangeProposal = {
|
|
||||||
workflowId: string
|
|
||||||
baseSnapshotHash: string
|
|
||||||
compiledOperations: Array<Record<string, any>>
|
|
||||||
diffSummary: Record<string, any>
|
|
||||||
warnings: string[]
|
|
||||||
diagnostics: string[]
|
|
||||||
touchedBlocks: string[]
|
|
||||||
acceptanceAssertions: string[]
|
|
||||||
postApply?: {
|
|
||||||
verify?: boolean
|
|
||||||
run?: Record<string, any>
|
|
||||||
evaluator?: Record<string, any>
|
|
||||||
}
|
|
||||||
handoff?: {
|
|
||||||
objective?: string
|
|
||||||
constraints?: string[]
|
|
||||||
resolvedIds?: Record<string, string>
|
|
||||||
assumptions?: string[]
|
|
||||||
unresolvedRisks?: string[]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const contextPackStore = new TTLStore<WorkflowContextPack>()
|
|
||||||
const proposalStore = new TTLStore<WorkflowChangeProposal>()
|
|
||||||
|
|
||||||
function getContextRedisKey(id: string): string {
|
|
||||||
return `${CONTEXT_PREFIX}:${id}`
|
|
||||||
}
|
|
||||||
|
|
||||||
function getProposalRedisKey(id: string): string {
|
|
||||||
return `${PROPOSAL_PREFIX}:${id}`
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writeRedisJson(key: string, value: unknown): Promise<void> {
|
|
||||||
const redis = getRedisClient()!
|
|
||||||
await redis.set(key, JSON.stringify(value), 'EX', DEFAULT_TTL_SECONDS)
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readRedisJson<T>(key: string): Promise<T | null> {
|
|
||||||
const redis = getRedisClient()!
|
|
||||||
|
|
||||||
const raw = await redis.get(key)
|
|
||||||
if (!raw) {
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
return JSON.parse(raw) as T
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Failed parsing workflow change store JSON payload', { key, error })
|
|
||||||
await redis.del(key).catch(() => {})
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function saveContextPack(pack: WorkflowContextPack): Promise<string> {
|
|
||||||
if (!getRedisClient()) {
|
|
||||||
return contextPackStore.set(pack)
|
|
||||||
}
|
|
||||||
const id = crypto.randomUUID()
|
|
||||||
try {
|
|
||||||
await writeRedisJson(getContextRedisKey(id), pack)
|
|
||||||
return id
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Redis write failed for workflow context pack, using memory fallback', { error })
|
|
||||||
return contextPackStore.set(pack)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getContextPack(id: string): Promise<WorkflowContextPack | null> {
|
|
||||||
if (!getRedisClient()) {
|
|
||||||
return contextPackStore.get(id)
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const redisPayload = await readRedisJson<WorkflowContextPack>(getContextRedisKey(id))
|
|
||||||
if (redisPayload) {
|
|
||||||
return redisPayload
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Redis read failed for workflow context pack, using memory fallback', { error })
|
|
||||||
}
|
|
||||||
return contextPackStore.get(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function saveProposal(proposal: WorkflowChangeProposal): Promise<string> {
|
|
||||||
if (!getRedisClient()) {
|
|
||||||
return proposalStore.set(proposal)
|
|
||||||
}
|
|
||||||
const id = crypto.randomUUID()
|
|
||||||
try {
|
|
||||||
await writeRedisJson(getProposalRedisKey(id), proposal)
|
|
||||||
return id
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Redis write failed for workflow proposal, using memory fallback', { error })
|
|
||||||
return proposalStore.set(proposal)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getProposal(id: string): Promise<WorkflowChangeProposal | null> {
|
|
||||||
if (!getRedisClient()) {
|
|
||||||
return proposalStore.get(id)
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
const redisPayload = await readRedisJson<WorkflowChangeProposal>(getProposalRedisKey(id))
|
|
||||||
if (redisPayload) {
|
|
||||||
return redisPayload
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Redis read failed for workflow proposal, using memory fallback', { error })
|
|
||||||
}
|
|
||||||
return proposalStore.get(id)
|
|
||||||
}
|
|
||||||
@@ -68,8 +68,8 @@ async function getCurrentWorkflowStateFromDb(
|
|||||||
return { workflowState, subBlockValues }
|
return { workflowState, subBlockValues }
|
||||||
}
|
}
|
||||||
|
|
||||||
export const applyWorkflowOperationsServerTool: BaseServerTool<EditWorkflowParams, unknown> = {
|
export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, unknown> = {
|
||||||
name: '__internal_apply_workflow_operations',
|
name: 'edit_workflow',
|
||||||
async execute(params: EditWorkflowParams, context?: { userId: string }): Promise<unknown> {
|
async execute(params: EditWorkflowParams, context?: { userId: string }): Promise<unknown> {
|
||||||
const logger = createLogger('EditWorkflowServerTool')
|
const logger = createLogger('EditWorkflowServerTool')
|
||||||
const { operations, workflowId, currentUserWorkflow } = params
|
const { operations, workflowId, currentUserWorkflow } = params
|
||||||
@@ -90,7 +90,7 @@ export const applyWorkflowOperationsServerTool: BaseServerTool<EditWorkflowParam
|
|||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
throw new Error(authorization.message || 'Unauthorized workflow access')
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Executing internal workflow operation apply', {
|
logger.info('Executing edit_workflow', {
|
||||||
operationCount: operations.length,
|
operationCount: operations.length,
|
||||||
workflowId,
|
workflowId,
|
||||||
hasCurrentUserWorkflow: !!currentUserWorkflow,
|
hasCurrentUserWorkflow: !!currentUserWorkflow,
|
||||||
@@ -210,7 +210,7 @@ export const applyWorkflowOperationsServerTool: BaseServerTool<EditWorkflowParam
|
|||||||
logger.warn('No userId in context - skipping custom tools persistence', { workflowId })
|
logger.warn('No userId in context - skipping custom tools persistence', { workflowId })
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Internal workflow operation apply succeeded', {
|
logger.info('edit_workflow successfully applied operations', {
|
||||||
operationCount: operations.length,
|
operationCount: operations.length,
|
||||||
blocksCount: Object.keys(modifiedWorkflowState.blocks).length,
|
blocksCount: Object.keys(modifiedWorkflowState.blocks).length,
|
||||||
edgesCount: modifiedWorkflowState.edges.length,
|
edgesCount: modifiedWorkflowState.edges.length,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,166 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
|
||||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
|
||||||
import { getContextPack, saveContextPack } from './change-store'
|
|
||||||
import {
|
|
||||||
buildSchemasByType,
|
|
||||||
getAllKnownBlockTypes,
|
|
||||||
hashWorkflowState,
|
|
||||||
loadWorkflowStateFromDb,
|
|
||||||
summarizeWorkflowState,
|
|
||||||
} from './workflow-state'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowContextServerTool')
|
|
||||||
|
|
||||||
const WorkflowContextGetInputSchema = z.object({
|
|
||||||
workflowId: z.string(),
|
|
||||||
objective: z.string().optional(),
|
|
||||||
includeBlockTypes: z.array(z.string()).optional(),
|
|
||||||
includeAllSchemas: z.boolean().optional(),
|
|
||||||
schemaMode: z.enum(['minimal', 'workflow', 'all']).optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
type WorkflowContextGetParams = z.infer<typeof WorkflowContextGetInputSchema>
|
|
||||||
|
|
||||||
const WorkflowContextExpandInputSchema = z.object({
|
|
||||||
contextPackId: z.string(),
|
|
||||||
blockTypes: z.array(z.string()).optional(),
|
|
||||||
schemaRefs: z.array(z.string()).optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
type WorkflowContextExpandParams = z.infer<typeof WorkflowContextExpandInputSchema>
|
|
||||||
|
|
||||||
function parseSchemaRefToBlockType(schemaRef: string): string | null {
|
|
||||||
if (!schemaRef) return null
|
|
||||||
const [blockType] = schemaRef.split('@')
|
|
||||||
return blockType || null
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildAvailableBlockCatalog(
|
|
||||||
schemaRefsByType: Record<string, string>
|
|
||||||
): Array<Record<string, any>> {
|
|
||||||
return Object.entries(schemaRefsByType)
|
|
||||||
.sort((a, b) => a[0].localeCompare(b[0]))
|
|
||||||
.map(([blockType, schemaRef]) => ({
|
|
||||||
blockType,
|
|
||||||
schemaRef,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
export const workflowContextGetServerTool: BaseServerTool<WorkflowContextGetParams, any> = {
|
|
||||||
name: 'workflow_context_get',
|
|
||||||
inputSchema: WorkflowContextGetInputSchema,
|
|
||||||
async execute(params: WorkflowContextGetParams, context?: { userId: string }): Promise<any> {
|
|
||||||
if (!context?.userId) {
|
|
||||||
throw new Error('Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
userId: context.userId,
|
|
||||||
action: 'read',
|
|
||||||
})
|
|
||||||
if (!authorization.allowed) {
|
|
||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
|
||||||
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
|
||||||
|
|
||||||
const blockTypesInWorkflow = Object.values(workflowState.blocks || {}).map((block: any) =>
|
|
||||||
String(block?.type || '')
|
|
||||||
)
|
|
||||||
const requestedTypes = params.includeBlockTypes || []
|
|
||||||
const schemaMode =
|
|
||||||
params.includeAllSchemas === true ? 'all' : (params.schemaMode || 'minimal')
|
|
||||||
const candidateTypes =
|
|
||||||
schemaMode === 'all'
|
|
||||||
? getAllKnownBlockTypes()
|
|
||||||
: schemaMode === 'workflow'
|
|
||||||
? [...blockTypesInWorkflow, ...requestedTypes]
|
|
||||||
: [...requestedTypes]
|
|
||||||
const { schemasByType, schemaRefsByType } = buildSchemasByType(candidateTypes)
|
|
||||||
const suggestedSchemaTypes = [...new Set(blockTypesInWorkflow.filter(Boolean))]
|
|
||||||
|
|
||||||
const summary = summarizeWorkflowState(workflowState)
|
|
||||||
const packId = await saveContextPack({
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
snapshotHash,
|
|
||||||
workflowState,
|
|
||||||
schemasByType,
|
|
||||||
schemaRefsByType,
|
|
||||||
summary: {
|
|
||||||
...summary,
|
|
||||||
objective: params.objective || null,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info('Generated workflow context pack', {
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
contextPackId: packId,
|
|
||||||
schemaCount: Object.keys(schemaRefsByType).length,
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
contextPackId: packId,
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
snapshotHash,
|
|
||||||
schemaMode,
|
|
||||||
summary: {
|
|
||||||
...summary,
|
|
||||||
objective: params.objective || null,
|
|
||||||
},
|
|
||||||
schemaRefsByType,
|
|
||||||
availableBlockCatalog: buildAvailableBlockCatalog(schemaRefsByType),
|
|
||||||
suggestedSchemaTypes,
|
|
||||||
inScopeSchemas: schemasByType,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
export const workflowContextExpandServerTool: BaseServerTool<WorkflowContextExpandParams, any> = {
|
|
||||||
name: 'workflow_context_expand',
|
|
||||||
inputSchema: WorkflowContextExpandInputSchema,
|
|
||||||
async execute(params: WorkflowContextExpandParams, context?: { userId: string }): Promise<any> {
|
|
||||||
if (!context?.userId) {
|
|
||||||
throw new Error('Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const contextPack = await getContextPack(params.contextPackId)
|
|
||||||
if (!contextPack) {
|
|
||||||
throw new Error(`Context pack not found or expired: ${params.contextPackId}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId: contextPack.workflowId,
|
|
||||||
userId: context.userId,
|
|
||||||
action: 'read',
|
|
||||||
})
|
|
||||||
if (!authorization.allowed) {
|
|
||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const requestedBlockTypes = new Set<string>()
|
|
||||||
for (const blockType of params.blockTypes || []) {
|
|
||||||
if (blockType) requestedBlockTypes.add(blockType)
|
|
||||||
}
|
|
||||||
for (const schemaRef of params.schemaRefs || []) {
|
|
||||||
const blockType = parseSchemaRefToBlockType(schemaRef)
|
|
||||||
if (blockType) requestedBlockTypes.add(blockType)
|
|
||||||
}
|
|
||||||
|
|
||||||
const typesToExpand = [...requestedBlockTypes]
|
|
||||||
const { schemasByType, schemaRefsByType } = buildSchemasByType(typesToExpand)
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
contextPackId: params.contextPackId,
|
|
||||||
workflowId: contextPack.workflowId,
|
|
||||||
snapshotHash: contextPack.snapshotHash,
|
|
||||||
schemasByType,
|
|
||||||
schemaRefsByType,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,226 +0,0 @@
|
|||||||
import crypto from 'crypto'
|
|
||||||
import { db } from '@sim/db'
|
|
||||||
import { workflow as workflowTable } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
|
||||||
import { getAllBlockTypes, getBlock } from '@/blocks/registry'
|
|
||||||
import type { SubBlockConfig } from '@/blocks/types'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowContextState')
|
|
||||||
|
|
||||||
function stableSortValue(value: any): any {
|
|
||||||
if (Array.isArray(value)) {
|
|
||||||
return value.map(stableSortValue)
|
|
||||||
}
|
|
||||||
if (value && typeof value === 'object') {
|
|
||||||
const sorted: Record<string, any> = {}
|
|
||||||
for (const key of Object.keys(value).sort()) {
|
|
||||||
sorted[key] = stableSortValue(value[key])
|
|
||||||
}
|
|
||||||
return sorted
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
export function hashWorkflowState(state: Record<string, unknown>): string {
|
|
||||||
const stable = stableSortValue(state)
|
|
||||||
const payload = JSON.stringify(stable)
|
|
||||||
return `sha256:${crypto.createHash('sha256').update(payload).digest('hex')}`
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeOptions(options: unknown): string[] | null {
|
|
||||||
if (!Array.isArray(options)) return null
|
|
||||||
const normalized = options
|
|
||||||
.map((option) => {
|
|
||||||
if (option == null) return null
|
|
||||||
if (typeof option === 'object') {
|
|
||||||
const optionRecord = option as Record<string, unknown>
|
|
||||||
const id = optionRecord.id
|
|
||||||
if (typeof id === 'string') return id
|
|
||||||
const label = optionRecord.label
|
|
||||||
if (typeof label === 'string') return label
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
return String(option)
|
|
||||||
})
|
|
||||||
.filter((value): value is string => Boolean(value))
|
|
||||||
return normalized.length > 0 ? normalized : null
|
|
||||||
}
|
|
||||||
|
|
||||||
function serializeRequired(required: SubBlockConfig['required']): boolean | Record<string, any> {
|
|
||||||
if (typeof required === 'boolean') return required
|
|
||||||
if (!required) return false
|
|
||||||
if (typeof required === 'object') {
|
|
||||||
const out: Record<string, any> = {}
|
|
||||||
const record = required as Record<string, unknown>
|
|
||||||
for (const key of ['field', 'operator', 'value']) {
|
|
||||||
if (record[key] !== undefined) {
|
|
||||||
out[key] = record[key]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
function serializeSubBlock(subBlock: SubBlockConfig): Record<string, unknown> {
|
|
||||||
const staticOptions =
|
|
||||||
typeof subBlock.options === 'function' ? null : normalizeOptions(subBlock.options)
|
|
||||||
return {
|
|
||||||
id: subBlock.id,
|
|
||||||
type: subBlock.type,
|
|
||||||
title: subBlock.title,
|
|
||||||
description: subBlock.description || null,
|
|
||||||
mode: subBlock.mode || null,
|
|
||||||
placeholder: subBlock.placeholder || null,
|
|
||||||
hidden: Boolean(subBlock.hidden),
|
|
||||||
multiSelect: Boolean(subBlock.multiSelect),
|
|
||||||
required: serializeRequired(subBlock.required),
|
|
||||||
hasDynamicOptions: typeof subBlock.options === 'function',
|
|
||||||
options: staticOptions,
|
|
||||||
defaultValue: subBlock.defaultValue ?? null,
|
|
||||||
min: subBlock.min ?? null,
|
|
||||||
max: subBlock.max ?? null,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function serializeBlockSchema(blockType: string): Record<string, unknown> | null {
|
|
||||||
const blockConfig = getBlock(blockType)
|
|
||||||
if (!blockConfig) return null
|
|
||||||
|
|
||||||
const subBlocks = Array.isArray(blockConfig.subBlocks)
|
|
||||||
? blockConfig.subBlocks.map(serializeSubBlock)
|
|
||||||
: []
|
|
||||||
const outputs = blockConfig.outputs || {}
|
|
||||||
const outputKeys = Object.keys(outputs)
|
|
||||||
|
|
||||||
return {
|
|
||||||
blockType,
|
|
||||||
blockName: blockConfig.name || blockType,
|
|
||||||
category: blockConfig.category,
|
|
||||||
triggerAllowed: Boolean(blockConfig.triggerAllowed || blockConfig.triggers?.enabled),
|
|
||||||
hasTriggersConfig: Boolean(blockConfig.triggers?.enabled),
|
|
||||||
subBlocks,
|
|
||||||
outputKeys,
|
|
||||||
longDescription: blockConfig.longDescription || null,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function buildSchemasByType(blockTypes: string[]): {
|
|
||||||
schemasByType: Record<string, any>
|
|
||||||
schemaRefsByType: Record<string, string>
|
|
||||||
} {
|
|
||||||
const schemasByType: Record<string, any> = {}
|
|
||||||
const schemaRefsByType: Record<string, string> = {}
|
|
||||||
|
|
||||||
const uniqueTypes = [...new Set(blockTypes.filter(Boolean))]
|
|
||||||
for (const blockType of uniqueTypes) {
|
|
||||||
const schema = serializeBlockSchema(blockType)
|
|
||||||
if (!schema) continue
|
|
||||||
const stableSchema = stableSortValue(schema)
|
|
||||||
const schemaHash = crypto
|
|
||||||
.createHash('sha256')
|
|
||||||
.update(JSON.stringify(stableSchema))
|
|
||||||
.digest('hex')
|
|
||||||
schemasByType[blockType] = stableSchema
|
|
||||||
schemaRefsByType[blockType] = `${blockType}@sha256:${schemaHash}`
|
|
||||||
}
|
|
||||||
|
|
||||||
return { schemasByType, schemaRefsByType }
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function loadWorkflowStateFromDb(workflowId: string): Promise<{
|
|
||||||
workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
loops: Record<string, any>
|
|
||||||
parallels: Record<string, any>
|
|
||||||
}
|
|
||||||
workspaceId?: string
|
|
||||||
}> {
|
|
||||||
const [workflowRecord] = await db
|
|
||||||
.select({ workspaceId: workflowTable.workspaceId })
|
|
||||||
.from(workflowTable)
|
|
||||||
.where(eq(workflowTable.id, workflowId))
|
|
||||||
.limit(1)
|
|
||||||
if (!workflowRecord) {
|
|
||||||
throw new Error(`Workflow ${workflowId} not found`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
|
||||||
if (!normalized) {
|
|
||||||
throw new Error(`Workflow ${workflowId} has no normalized data`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const blocks = { ...normalized.blocks }
|
|
||||||
const invalidBlockIds: string[] = []
|
|
||||||
for (const [blockId, block] of Object.entries(blocks)) {
|
|
||||||
if (!(block as { type?: unknown })?.type) {
|
|
||||||
invalidBlockIds.push(blockId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const blockId of invalidBlockIds) {
|
|
||||||
delete blocks[blockId]
|
|
||||||
}
|
|
||||||
|
|
||||||
const invalidSet = new Set(invalidBlockIds)
|
|
||||||
const edges = (normalized.edges || []).filter(
|
|
||||||
(edge: any) => !invalidSet.has(edge.source) && !invalidSet.has(edge.target)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (invalidBlockIds.length > 0) {
|
|
||||||
logger.warn('Dropped blocks without type while loading workflow state', {
|
|
||||||
workflowId,
|
|
||||||
dropped: invalidBlockIds,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
workflowState: {
|
|
||||||
blocks,
|
|
||||||
edges,
|
|
||||||
loops: normalized.loops || {},
|
|
||||||
parallels: normalized.parallels || {},
|
|
||||||
},
|
|
||||||
workspaceId: workflowRecord.workspaceId || undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function summarizeWorkflowState(workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
loops: Record<string, any>
|
|
||||||
parallels: Record<string, any>
|
|
||||||
}): Record<string, unknown> {
|
|
||||||
const blocks = workflowState.blocks || {}
|
|
||||||
const edges = workflowState.edges || []
|
|
||||||
const blockTypes: Record<string, number> = {}
|
|
||||||
const triggerBlocks: Array<{ id: string; name: string; type: string }> = []
|
|
||||||
|
|
||||||
for (const [blockId, block] of Object.entries(blocks)) {
|
|
||||||
const blockType = String((block as Record<string, unknown>).type || 'unknown')
|
|
||||||
blockTypes[blockType] = (blockTypes[blockType] || 0) + 1
|
|
||||||
if ((block as Record<string, unknown>).triggerMode === true) {
|
|
||||||
triggerBlocks.push({
|
|
||||||
id: blockId,
|
|
||||||
name: String((block as Record<string, unknown>).name || blockType),
|
|
||||||
type: blockType,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
blockCount: Object.keys(blocks).length,
|
|
||||||
edgeCount: edges.length,
|
|
||||||
loopCount: Object.keys(workflowState.loops || {}).length,
|
|
||||||
parallelCount: Object.keys(workflowState.parallels || {}).length,
|
|
||||||
blockTypes,
|
|
||||||
triggerBlocks,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getAllKnownBlockTypes(): string[] {
|
|
||||||
return getAllBlockTypes()
|
|
||||||
}
|
|
||||||
@@ -1,194 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
|
||||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
|
||||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
|
||||||
import { hashWorkflowState, loadWorkflowStateFromDb } from './workflow-state'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowVerifyServerTool')
|
|
||||||
|
|
||||||
const AcceptanceItemSchema = z.union([
|
|
||||||
z.string(),
|
|
||||||
z.object({
|
|
||||||
kind: z.string().optional(),
|
|
||||||
assert: z.string(),
|
|
||||||
}),
|
|
||||||
])
|
|
||||||
|
|
||||||
const WorkflowVerifyInputSchema = z
|
|
||||||
.object({
|
|
||||||
workflowId: z.string(),
|
|
||||||
acceptance: z.array(AcceptanceItemSchema).optional(),
|
|
||||||
baseSnapshotHash: z.string().optional(),
|
|
||||||
})
|
|
||||||
.strict()
|
|
||||||
|
|
||||||
type WorkflowVerifyParams = z.infer<typeof WorkflowVerifyInputSchema>
|
|
||||||
|
|
||||||
function normalizeName(value: string): string {
|
|
||||||
return value.trim().toLowerCase()
|
|
||||||
}
|
|
||||||
|
|
||||||
function resolveBlockToken(
|
|
||||||
workflowState: { blocks: Record<string, any> },
|
|
||||||
token: string
|
|
||||||
): string | null {
|
|
||||||
if (!token) return null
|
|
||||||
if (workflowState.blocks[token]) return token
|
|
||||||
const normalized = normalizeName(token)
|
|
||||||
for (const [blockId, block] of Object.entries(workflowState.blocks || {})) {
|
|
||||||
const blockName = normalizeName(String((block as Record<string, unknown>).name || ''))
|
|
||||||
if (blockName === normalized) return blockId
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
|
|
||||||
function hasPath(
|
|
||||||
workflowState: { edges: Array<Record<string, any>> },
|
|
||||||
blockPath: string[]
|
|
||||||
): boolean {
|
|
||||||
if (blockPath.length < 2) return true
|
|
||||||
const adjacency = new Map<string, string[]>()
|
|
||||||
for (const edge of workflowState.edges || []) {
|
|
||||||
const source = String(edge.source || '')
|
|
||||||
const target = String(edge.target || '')
|
|
||||||
if (!source || !target) continue
|
|
||||||
const existing = adjacency.get(source) || []
|
|
||||||
existing.push(target)
|
|
||||||
adjacency.set(source, existing)
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < blockPath.length - 1; i++) {
|
|
||||||
const from = blockPath[i]
|
|
||||||
const to = blockPath[i + 1]
|
|
||||||
const next = adjacency.get(from) || []
|
|
||||||
if (!next.includes(to)) return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
function evaluateAssertions(params: {
|
|
||||||
workflowState: {
|
|
||||||
blocks: Record<string, any>
|
|
||||||
edges: Array<Record<string, any>>
|
|
||||||
}
|
|
||||||
assertions: string[]
|
|
||||||
}): { failures: string[]; checks: Array<Record<string, any>> } {
|
|
||||||
const failures: string[] = []
|
|
||||||
const checks: Array<Record<string, any>> = []
|
|
||||||
|
|
||||||
for (const assertion of params.assertions) {
|
|
||||||
if (assertion.startsWith('block_exists:')) {
|
|
||||||
const token = assertion.slice('block_exists:'.length).trim()
|
|
||||||
const blockId = resolveBlockToken(params.workflowState, token)
|
|
||||||
const passed = Boolean(blockId)
|
|
||||||
checks.push({ assert: assertion, passed, resolvedBlockId: blockId || null })
|
|
||||||
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (assertion.startsWith('trigger_exists:')) {
|
|
||||||
const triggerType = normalizeName(assertion.slice('trigger_exists:'.length))
|
|
||||||
const triggerBlock = Object.values(params.workflowState.blocks || {}).find((block: any) => {
|
|
||||||
if (block?.triggerMode !== true) return false
|
|
||||||
return normalizeName(String(block?.type || '')) === triggerType
|
|
||||||
})
|
|
||||||
const passed = Boolean(triggerBlock)
|
|
||||||
checks.push({ assert: assertion, passed })
|
|
||||||
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (assertion.startsWith('path_exists:')) {
|
|
||||||
const rawPath = assertion.slice('path_exists:'.length).trim()
|
|
||||||
const tokens = rawPath
|
|
||||||
.split('->')
|
|
||||||
.map((token) => token.trim())
|
|
||||||
.filter(Boolean)
|
|
||||||
const resolvedPath = tokens
|
|
||||||
.map((token) => resolveBlockToken(params.workflowState, token))
|
|
||||||
.filter((value): value is string => Boolean(value))
|
|
||||||
|
|
||||||
const resolvedAll = resolvedPath.length === tokens.length
|
|
||||||
const passed = resolvedAll && hasPath(params.workflowState, resolvedPath)
|
|
||||||
checks.push({
|
|
||||||
assert: assertion,
|
|
||||||
passed,
|
|
||||||
resolvedPath,
|
|
||||||
})
|
|
||||||
if (!passed) failures.push(`Assertion failed: ${assertion}`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Unknown assertion format - mark as warning failure for explicit visibility.
|
|
||||||
checks.push({ assert: assertion, passed: false, reason: 'unknown_assertion_type' })
|
|
||||||
failures.push(`Unknown assertion format: ${assertion}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { failures, checks }
|
|
||||||
}
|
|
||||||
|
|
||||||
export const workflowVerifyServerTool: BaseServerTool<WorkflowVerifyParams, any> = {
|
|
||||||
name: 'workflow_verify',
|
|
||||||
inputSchema: WorkflowVerifyInputSchema,
|
|
||||||
async execute(params: WorkflowVerifyParams, context?: { userId: string }): Promise<any> {
|
|
||||||
if (!context?.userId) {
|
|
||||||
throw new Error('Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
userId: context.userId,
|
|
||||||
action: 'read',
|
|
||||||
})
|
|
||||||
if (!authorization.allowed) {
|
|
||||||
throw new Error(authorization.message || 'Unauthorized workflow access')
|
|
||||||
}
|
|
||||||
|
|
||||||
const { workflowState } = await loadWorkflowStateFromDb(params.workflowId)
|
|
||||||
const snapshotHash = hashWorkflowState(workflowState as unknown as Record<string, unknown>)
|
|
||||||
if (params.baseSnapshotHash && params.baseSnapshotHash !== snapshotHash) {
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
verified: false,
|
|
||||||
reason: 'snapshot_mismatch',
|
|
||||||
expected: params.baseSnapshotHash,
|
|
||||||
current: snapshotHash,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const validation = validateWorkflowState(workflowState as any, { sanitize: false })
|
|
||||||
|
|
||||||
const assertions = (params.acceptance || []).map((item) =>
|
|
||||||
typeof item === 'string' ? item : item.assert
|
|
||||||
)
|
|
||||||
const assertionResults = evaluateAssertions({
|
|
||||||
workflowState,
|
|
||||||
assertions,
|
|
||||||
})
|
|
||||||
|
|
||||||
const verified =
|
|
||||||
validation.valid && assertionResults.failures.length === 0 && validation.errors.length === 0
|
|
||||||
|
|
||||||
logger.info('Workflow verification complete', {
|
|
||||||
workflowId: params.workflowId,
|
|
||||||
verified,
|
|
||||||
errorCount: validation.errors.length,
|
|
||||||
warningCount: validation.warnings.length,
|
|
||||||
assertionFailures: assertionResults.failures.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
verified,
|
|
||||||
snapshotHash,
|
|
||||||
validation: {
|
|
||||||
valid: validation.valid,
|
|
||||||
errors: validation.errors,
|
|
||||||
warnings: validation.warnings,
|
|
||||||
},
|
|
||||||
assertions: assertionResults.checks,
|
|
||||||
failures: assertionResults.failures,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -5,7 +5,6 @@ import {
|
|||||||
type GenerateContentConfig,
|
type GenerateContentConfig,
|
||||||
type GenerateContentResponse,
|
type GenerateContentResponse,
|
||||||
type GoogleGenAI,
|
type GoogleGenAI,
|
||||||
type Interactions,
|
|
||||||
type Part,
|
type Part,
|
||||||
type Schema,
|
type Schema,
|
||||||
type ThinkingConfig,
|
type ThinkingConfig,
|
||||||
@@ -28,7 +27,6 @@ import {
|
|||||||
import type { FunctionCallResponse, ProviderRequest, ProviderResponse } from '@/providers/types'
|
import type { FunctionCallResponse, ProviderRequest, ProviderResponse } from '@/providers/types'
|
||||||
import {
|
import {
|
||||||
calculateCost,
|
calculateCost,
|
||||||
isDeepResearchModel,
|
|
||||||
prepareToolExecution,
|
prepareToolExecution,
|
||||||
prepareToolsWithUsageControl,
|
prepareToolsWithUsageControl,
|
||||||
} from '@/providers/utils'
|
} from '@/providers/utils'
|
||||||
@@ -383,468 +381,6 @@ export interface GeminiExecutionConfig {
|
|||||||
providerType: GeminiProviderType
|
providerType: GeminiProviderType
|
||||||
}
|
}
|
||||||
|
|
||||||
const DEEP_RESEARCH_POLL_INTERVAL_MS = 10_000
|
|
||||||
const DEEP_RESEARCH_MAX_DURATION_MS = 60 * 60 * 1000
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sleeps for the specified number of milliseconds
|
|
||||||
*/
|
|
||||||
function sleep(ms: number): Promise<void> {
|
|
||||||
return new Promise((resolve) => setTimeout(resolve, ms))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Collapses a ProviderRequest into a single input string and optional system instruction
|
|
||||||
* for the Interactions API, which takes a flat input rather than a messages array.
|
|
||||||
*
|
|
||||||
* Deep research is single-turn only — it takes one research query and returns a report.
|
|
||||||
* Memory/conversation history is hidden in the UI for deep research models, so only
|
|
||||||
* the last user message is used as input. System messages are passed via system_instruction.
|
|
||||||
*/
|
|
||||||
function collapseMessagesToInput(request: ProviderRequest): {
|
|
||||||
input: string
|
|
||||||
systemInstruction: string | undefined
|
|
||||||
} {
|
|
||||||
const systemParts: string[] = []
|
|
||||||
const userParts: string[] = []
|
|
||||||
|
|
||||||
if (request.systemPrompt) {
|
|
||||||
systemParts.push(request.systemPrompt)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (request.messages) {
|
|
||||||
for (const msg of request.messages) {
|
|
||||||
if (msg.role === 'system' && msg.content) {
|
|
||||||
systemParts.push(msg.content)
|
|
||||||
} else if (msg.role === 'user' && msg.content) {
|
|
||||||
userParts.push(msg.content)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
input:
|
|
||||||
userParts.length > 0
|
|
||||||
? userParts[userParts.length - 1]
|
|
||||||
: 'Please conduct research on the provided topic.',
|
|
||||||
systemInstruction: systemParts.length > 0 ? systemParts.join('\n\n') : undefined,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extracts text content from a completed interaction's outputs array.
|
|
||||||
* The outputs array can contain text, thought, google_search_result, and other types.
|
|
||||||
* We concatenate all text outputs to get the full research report.
|
|
||||||
*/
|
|
||||||
function extractTextFromInteractionOutputs(outputs: Interactions.Interaction['outputs']): string {
|
|
||||||
if (!outputs || outputs.length === 0) return ''
|
|
||||||
|
|
||||||
const textParts: string[] = []
|
|
||||||
for (const output of outputs) {
|
|
||||||
if (output.type === 'text') {
|
|
||||||
const text = (output as Interactions.TextContent).text
|
|
||||||
if (text) textParts.push(text)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return textParts.join('\n\n')
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extracts token usage from an Interaction's Usage object.
|
|
||||||
* The Interactions API provides total_input_tokens, total_output_tokens, total_tokens,
|
|
||||||
* and total_reasoning_tokens (for thinking models).
|
|
||||||
*
|
|
||||||
* Also handles the raw API field name total_thought_tokens which the SDK may
|
|
||||||
* map to total_reasoning_tokens.
|
|
||||||
*/
|
|
||||||
function extractInteractionUsage(usage: Interactions.Usage | undefined): {
|
|
||||||
inputTokens: number
|
|
||||||
outputTokens: number
|
|
||||||
reasoningTokens: number
|
|
||||||
totalTokens: number
|
|
||||||
} {
|
|
||||||
if (!usage) {
|
|
||||||
return { inputTokens: 0, outputTokens: 0, reasoningTokens: 0, totalTokens: 0 }
|
|
||||||
}
|
|
||||||
|
|
||||||
const usageLogger = createLogger('DeepResearchUsage')
|
|
||||||
usageLogger.info('Raw interaction usage', { usage: JSON.stringify(usage) })
|
|
||||||
|
|
||||||
const inputTokens = usage.total_input_tokens ?? 0
|
|
||||||
const outputTokens = usage.total_output_tokens ?? 0
|
|
||||||
const reasoningTokens =
|
|
||||||
usage.total_reasoning_tokens ??
|
|
||||||
((usage as Record<string, unknown>).total_thought_tokens as number) ??
|
|
||||||
0
|
|
||||||
const totalTokens = usage.total_tokens ?? inputTokens + outputTokens
|
|
||||||
|
|
||||||
return { inputTokens, outputTokens, reasoningTokens, totalTokens }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Builds a standard ProviderResponse from a completed deep research interaction.
|
|
||||||
*/
|
|
||||||
function buildDeepResearchResponse(
|
|
||||||
content: string,
|
|
||||||
model: string,
|
|
||||||
usage: {
|
|
||||||
inputTokens: number
|
|
||||||
outputTokens: number
|
|
||||||
reasoningTokens: number
|
|
||||||
totalTokens: number
|
|
||||||
},
|
|
||||||
providerStartTime: number,
|
|
||||||
providerStartTimeISO: string,
|
|
||||||
interactionId?: string
|
|
||||||
): ProviderResponse {
|
|
||||||
const providerEndTime = Date.now()
|
|
||||||
const duration = providerEndTime - providerStartTime
|
|
||||||
|
|
||||||
return {
|
|
||||||
content,
|
|
||||||
model,
|
|
||||||
tokens: {
|
|
||||||
input: usage.inputTokens,
|
|
||||||
output: usage.outputTokens,
|
|
||||||
total: usage.totalTokens,
|
|
||||||
},
|
|
||||||
timing: {
|
|
||||||
startTime: providerStartTimeISO,
|
|
||||||
endTime: new Date(providerEndTime).toISOString(),
|
|
||||||
duration,
|
|
||||||
modelTime: duration,
|
|
||||||
toolsTime: 0,
|
|
||||||
firstResponseTime: duration,
|
|
||||||
iterations: 1,
|
|
||||||
timeSegments: [
|
|
||||||
{
|
|
||||||
type: 'model',
|
|
||||||
name: 'Deep research',
|
|
||||||
startTime: providerStartTime,
|
|
||||||
endTime: providerEndTime,
|
|
||||||
duration,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
cost: calculateCost(model, usage.inputTokens, usage.outputTokens),
|
|
||||||
interactionId,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a ReadableStream from a deep research streaming interaction.
|
|
||||||
*
|
|
||||||
* Deep research streaming returns InteractionSSEEvent chunks including:
|
|
||||||
* - interaction.start: initial interaction with ID
|
|
||||||
* - content.delta: incremental text and thought_summary updates
|
|
||||||
* - content.start / content.stop: output boundaries
|
|
||||||
* - interaction.complete: final event (outputs is undefined in streaming; must reconstruct)
|
|
||||||
* - error: error events
|
|
||||||
*
|
|
||||||
* We stream text deltas to the client and track usage from the interaction.complete event.
|
|
||||||
*/
|
|
||||||
function createDeepResearchStream(
|
|
||||||
stream: AsyncIterable<Interactions.InteractionSSEEvent>,
|
|
||||||
onComplete?: (
|
|
||||||
content: string,
|
|
||||||
usage: {
|
|
||||||
inputTokens: number
|
|
||||||
outputTokens: number
|
|
||||||
reasoningTokens: number
|
|
||||||
totalTokens: number
|
|
||||||
},
|
|
||||||
interactionId?: string
|
|
||||||
) => void
|
|
||||||
): ReadableStream<Uint8Array> {
|
|
||||||
const streamLogger = createLogger('DeepResearchStream')
|
|
||||||
let fullContent = ''
|
|
||||||
let completionUsage = { inputTokens: 0, outputTokens: 0, reasoningTokens: 0, totalTokens: 0 }
|
|
||||||
let completedInteractionId: string | undefined
|
|
||||||
|
|
||||||
return new ReadableStream({
|
|
||||||
async start(controller) {
|
|
||||||
try {
|
|
||||||
for await (const event of stream) {
|
|
||||||
if (event.event_type === 'content.delta') {
|
|
||||||
const delta = (event as Interactions.ContentDelta).delta
|
|
||||||
if (delta?.type === 'text' && 'text' in delta && delta.text) {
|
|
||||||
fullContent += delta.text
|
|
||||||
controller.enqueue(new TextEncoder().encode(delta.text))
|
|
||||||
}
|
|
||||||
} else if (event.event_type === 'interaction.complete') {
|
|
||||||
const interaction = (event as Interactions.InteractionEvent).interaction
|
|
||||||
if (interaction?.usage) {
|
|
||||||
completionUsage = extractInteractionUsage(interaction.usage)
|
|
||||||
}
|
|
||||||
completedInteractionId = interaction?.id
|
|
||||||
} else if (event.event_type === 'interaction.start') {
|
|
||||||
const interaction = (event as Interactions.InteractionEvent).interaction
|
|
||||||
if (interaction?.id) {
|
|
||||||
completedInteractionId = interaction.id
|
|
||||||
}
|
|
||||||
} else if (event.event_type === 'error') {
|
|
||||||
const errorEvent = event as { error?: { code?: string; message?: string } }
|
|
||||||
const message = errorEvent.error?.message ?? 'Unknown deep research stream error'
|
|
||||||
streamLogger.error('Deep research stream error', {
|
|
||||||
code: errorEvent.error?.code,
|
|
||||||
message,
|
|
||||||
})
|
|
||||||
controller.error(new Error(message))
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
onComplete?.(fullContent, completionUsage, completedInteractionId)
|
|
||||||
controller.close()
|
|
||||||
} catch (error) {
|
|
||||||
streamLogger.error('Error reading deep research stream', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
})
|
|
||||||
controller.error(error)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Executes a deep research request using the Interactions API.
|
|
||||||
*
|
|
||||||
* Deep research uses the Interactions API ({@link https://ai.google.dev/api/interactions-api}),
|
|
||||||
* a completely different surface from generateContent. It creates a background interaction
|
|
||||||
* that performs comprehensive research (up to 60 minutes).
|
|
||||||
*
|
|
||||||
* Supports both streaming and non-streaming modes:
|
|
||||||
* - Streaming: returns a StreamingExecution with a ReadableStream of text deltas
|
|
||||||
* - Non-streaming: polls until completion and returns a ProviderResponse
|
|
||||||
*
|
|
||||||
* Deep research does NOT support custom function calling tools, MCP servers,
|
|
||||||
* or structured output (response_format). These are gracefully ignored.
|
|
||||||
*/
|
|
||||||
export async function executeDeepResearchRequest(
|
|
||||||
config: GeminiExecutionConfig
|
|
||||||
): Promise<ProviderResponse | StreamingExecution> {
|
|
||||||
const { ai, model, request, providerType } = config
|
|
||||||
const logger = createLogger(providerType === 'google' ? 'GoogleProvider' : 'VertexProvider')
|
|
||||||
|
|
||||||
logger.info('Preparing deep research request', {
|
|
||||||
model,
|
|
||||||
hasSystemPrompt: !!request.systemPrompt,
|
|
||||||
hasMessages: !!request.messages?.length,
|
|
||||||
streaming: !!request.stream,
|
|
||||||
hasPreviousInteractionId: !!request.previousInteractionId,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (request.tools?.length) {
|
|
||||||
logger.warn('Deep research does not support custom tools — ignoring tools parameter')
|
|
||||||
}
|
|
||||||
if (request.responseFormat) {
|
|
||||||
logger.warn(
|
|
||||||
'Deep research does not support structured output — ignoring responseFormat parameter'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const providerStartTime = Date.now()
|
|
||||||
const providerStartTimeISO = new Date(providerStartTime).toISOString()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { input, systemInstruction } = collapseMessagesToInput(request)
|
|
||||||
|
|
||||||
// Deep research requires background=true and store=true (store defaults to true,
|
|
||||||
// but we set it explicitly per API requirements)
|
|
||||||
const baseParams = {
|
|
||||||
agent: model as Interactions.CreateAgentInteractionParamsNonStreaming['agent'],
|
|
||||||
input,
|
|
||||||
background: true,
|
|
||||||
store: true,
|
|
||||||
...(systemInstruction && { system_instruction: systemInstruction }),
|
|
||||||
...(request.previousInteractionId && {
|
|
||||||
previous_interaction_id: request.previousInteractionId,
|
|
||||||
}),
|
|
||||||
agent_config: {
|
|
||||||
type: 'deep-research' as const,
|
|
||||||
thinking_summaries: 'auto' as const,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('Creating deep research interaction', {
|
|
||||||
inputLength: input.length,
|
|
||||||
hasSystemInstruction: !!systemInstruction,
|
|
||||||
streaming: !!request.stream,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Streaming mode: create a streaming interaction and return a StreamingExecution
|
|
||||||
if (request.stream) {
|
|
||||||
const streamParams: Interactions.CreateAgentInteractionParamsStreaming = {
|
|
||||||
...baseParams,
|
|
||||||
stream: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
const streamResponse = await ai.interactions.create(streamParams)
|
|
||||||
const firstResponseTime = Date.now() - providerStartTime
|
|
||||||
|
|
||||||
const streamingResult: StreamingExecution = {
|
|
||||||
stream: undefined as unknown as ReadableStream<Uint8Array>,
|
|
||||||
execution: {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
content: '',
|
|
||||||
model,
|
|
||||||
tokens: { input: 0, output: 0, total: 0 },
|
|
||||||
providerTiming: {
|
|
||||||
startTime: providerStartTimeISO,
|
|
||||||
endTime: new Date().toISOString(),
|
|
||||||
duration: Date.now() - providerStartTime,
|
|
||||||
modelTime: firstResponseTime,
|
|
||||||
toolsTime: 0,
|
|
||||||
firstResponseTime,
|
|
||||||
iterations: 1,
|
|
||||||
timeSegments: [
|
|
||||||
{
|
|
||||||
type: 'model',
|
|
||||||
name: 'Deep research (streaming)',
|
|
||||||
startTime: providerStartTime,
|
|
||||||
endTime: providerStartTime + firstResponseTime,
|
|
||||||
duration: firstResponseTime,
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
cost: {
|
|
||||||
input: 0,
|
|
||||||
output: 0,
|
|
||||||
total: 0,
|
|
||||||
pricing: { input: 0, output: 0, updatedAt: new Date().toISOString() },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
logs: [],
|
|
||||||
metadata: {
|
|
||||||
startTime: providerStartTimeISO,
|
|
||||||
endTime: new Date().toISOString(),
|
|
||||||
duration: Date.now() - providerStartTime,
|
|
||||||
},
|
|
||||||
isStreaming: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
streamingResult.stream = createDeepResearchStream(
|
|
||||||
streamResponse,
|
|
||||||
(content, usage, streamInteractionId) => {
|
|
||||||
streamingResult.execution.output.content = content
|
|
||||||
streamingResult.execution.output.tokens = {
|
|
||||||
input: usage.inputTokens,
|
|
||||||
output: usage.outputTokens,
|
|
||||||
total: usage.totalTokens,
|
|
||||||
}
|
|
||||||
streamingResult.execution.output.interactionId = streamInteractionId
|
|
||||||
|
|
||||||
const cost = calculateCost(model, usage.inputTokens, usage.outputTokens)
|
|
||||||
streamingResult.execution.output.cost = cost
|
|
||||||
|
|
||||||
const streamEndTime = Date.now()
|
|
||||||
if (streamingResult.execution.output.providerTiming) {
|
|
||||||
streamingResult.execution.output.providerTiming.endTime = new Date(
|
|
||||||
streamEndTime
|
|
||||||
).toISOString()
|
|
||||||
streamingResult.execution.output.providerTiming.duration =
|
|
||||||
streamEndTime - providerStartTime
|
|
||||||
const segments = streamingResult.execution.output.providerTiming.timeSegments
|
|
||||||
if (segments?.[0]) {
|
|
||||||
segments[0].endTime = streamEndTime
|
|
||||||
segments[0].duration = streamEndTime - providerStartTime
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
return streamingResult
|
|
||||||
}
|
|
||||||
|
|
||||||
// Non-streaming mode: create and poll
|
|
||||||
const createParams: Interactions.CreateAgentInteractionParamsNonStreaming = {
|
|
||||||
...baseParams,
|
|
||||||
stream: false,
|
|
||||||
}
|
|
||||||
|
|
||||||
const interaction = await ai.interactions.create(createParams)
|
|
||||||
const interactionId = interaction.id
|
|
||||||
|
|
||||||
logger.info('Deep research interaction created', { interactionId, status: interaction.status })
|
|
||||||
|
|
||||||
// Poll until a terminal status
|
|
||||||
const pollStartTime = Date.now()
|
|
||||||
let result: Interactions.Interaction = interaction
|
|
||||||
|
|
||||||
while (Date.now() - pollStartTime < DEEP_RESEARCH_MAX_DURATION_MS) {
|
|
||||||
if (result.status === 'completed') {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.status === 'failed') {
|
|
||||||
throw new Error(`Deep research interaction failed: ${interactionId}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.status === 'cancelled') {
|
|
||||||
throw new Error(`Deep research interaction was cancelled: ${interactionId}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('Deep research in progress, polling...', {
|
|
||||||
interactionId,
|
|
||||||
status: result.status,
|
|
||||||
elapsedMs: Date.now() - pollStartTime,
|
|
||||||
})
|
|
||||||
|
|
||||||
await sleep(DEEP_RESEARCH_POLL_INTERVAL_MS)
|
|
||||||
result = await ai.interactions.get(interactionId)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (result.status !== 'completed') {
|
|
||||||
throw new Error(
|
|
||||||
`Deep research timed out after ${DEEP_RESEARCH_MAX_DURATION_MS / 1000}s (status: ${result.status})`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const content = extractTextFromInteractionOutputs(result.outputs)
|
|
||||||
const usage = extractInteractionUsage(result.usage)
|
|
||||||
|
|
||||||
logger.info('Deep research completed', {
|
|
||||||
interactionId,
|
|
||||||
contentLength: content.length,
|
|
||||||
inputTokens: usage.inputTokens,
|
|
||||||
outputTokens: usage.outputTokens,
|
|
||||||
reasoningTokens: usage.reasoningTokens,
|
|
||||||
totalTokens: usage.totalTokens,
|
|
||||||
durationMs: Date.now() - providerStartTime,
|
|
||||||
})
|
|
||||||
|
|
||||||
return buildDeepResearchResponse(
|
|
||||||
content,
|
|
||||||
model,
|
|
||||||
usage,
|
|
||||||
providerStartTime,
|
|
||||||
providerStartTimeISO,
|
|
||||||
interactionId
|
|
||||||
)
|
|
||||||
} catch (error) {
|
|
||||||
const providerEndTime = Date.now()
|
|
||||||
const duration = providerEndTime - providerStartTime
|
|
||||||
|
|
||||||
logger.error('Error in deep research request:', {
|
|
||||||
error: error instanceof Error ? error.message : String(error),
|
|
||||||
stack: error instanceof Error ? error.stack : undefined,
|
|
||||||
})
|
|
||||||
|
|
||||||
const enhancedError = error instanceof Error ? error : new Error(String(error))
|
|
||||||
Object.assign(enhancedError, {
|
|
||||||
timing: {
|
|
||||||
startTime: providerStartTimeISO,
|
|
||||||
endTime: new Date(providerEndTime).toISOString(),
|
|
||||||
duration,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
throw enhancedError
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes a request using the Gemini API
|
* Executes a request using the Gemini API
|
||||||
*
|
*
|
||||||
@@ -855,12 +391,6 @@ export async function executeGeminiRequest(
|
|||||||
config: GeminiExecutionConfig
|
config: GeminiExecutionConfig
|
||||||
): Promise<ProviderResponse | StreamingExecution> {
|
): Promise<ProviderResponse | StreamingExecution> {
|
||||||
const { ai, model, request, providerType } = config
|
const { ai, model, request, providerType } = config
|
||||||
|
|
||||||
// Route deep research models to the interactions API
|
|
||||||
if (isDeepResearchModel(model)) {
|
|
||||||
return executeDeepResearchRequest(config)
|
|
||||||
}
|
|
||||||
|
|
||||||
const logger = createLogger(providerType === 'google' ? 'GoogleProvider' : 'VertexProvider')
|
const logger = createLogger(providerType === 'google' ? 'GoogleProvider' : 'VertexProvider')
|
||||||
|
|
||||||
logger.info(`Preparing ${providerType} Gemini request`, {
|
logger.info(`Preparing ${providerType} Gemini request`, {
|
||||||
|
|||||||
@@ -46,9 +46,6 @@ export interface ModelCapabilities {
|
|||||||
levels: string[]
|
levels: string[]
|
||||||
default?: string
|
default?: string
|
||||||
}
|
}
|
||||||
deepResearch?: boolean
|
|
||||||
/** Whether this model supports conversation memory. Defaults to true if omitted. */
|
|
||||||
memory?: boolean
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ModelDefinition {
|
export interface ModelDefinition {
|
||||||
@@ -828,7 +825,7 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
|||||||
name: 'Google',
|
name: 'Google',
|
||||||
description: "Google's Gemini models",
|
description: "Google's Gemini models",
|
||||||
defaultModel: 'gemini-2.5-pro',
|
defaultModel: 'gemini-2.5-pro',
|
||||||
modelPatterns: [/^gemini/, /^deep-research/],
|
modelPatterns: [/^gemini/],
|
||||||
capabilities: {
|
capabilities: {
|
||||||
toolUsageControl: true,
|
toolUsageControl: true,
|
||||||
},
|
},
|
||||||
@@ -931,19 +928,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
|||||||
},
|
},
|
||||||
contextWindow: 1000000,
|
contextWindow: 1000000,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
id: 'deep-research-pro-preview-12-2025',
|
|
||||||
pricing: {
|
|
||||||
input: 2.0,
|
|
||||||
output: 2.0,
|
|
||||||
updatedAt: '2026-02-10',
|
|
||||||
},
|
|
||||||
capabilities: {
|
|
||||||
deepResearch: true,
|
|
||||||
memory: false,
|
|
||||||
},
|
|
||||||
contextWindow: 1000000,
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
vertex: {
|
vertex: {
|
||||||
@@ -1054,19 +1038,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
|||||||
},
|
},
|
||||||
contextWindow: 1000000,
|
contextWindow: 1000000,
|
||||||
},
|
},
|
||||||
{
|
|
||||||
id: 'vertex/deep-research-pro-preview-12-2025',
|
|
||||||
pricing: {
|
|
||||||
input: 2.0,
|
|
||||||
output: 2.0,
|
|
||||||
updatedAt: '2026-02-10',
|
|
||||||
},
|
|
||||||
capabilities: {
|
|
||||||
deepResearch: true,
|
|
||||||
memory: false,
|
|
||||||
},
|
|
||||||
contextWindow: 1000000,
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
deepseek: {
|
deepseek: {
|
||||||
@@ -2509,37 +2480,6 @@ export function getThinkingLevelsForModel(modelId: string): string[] | null {
|
|||||||
return capability?.levels ?? null
|
return capability?.levels ?? null
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all models that support deep research capability
|
|
||||||
*/
|
|
||||||
export function getModelsWithDeepResearch(): string[] {
|
|
||||||
const models: string[] = []
|
|
||||||
for (const provider of Object.values(PROVIDER_DEFINITIONS)) {
|
|
||||||
for (const model of provider.models) {
|
|
||||||
if (model.capabilities.deepResearch) {
|
|
||||||
models.push(model.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return models
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get all models that explicitly disable memory support (memory: false).
|
|
||||||
* Models without this capability default to supporting memory.
|
|
||||||
*/
|
|
||||||
export function getModelsWithoutMemory(): string[] {
|
|
||||||
const models: string[] = []
|
|
||||||
for (const provider of Object.values(PROVIDER_DEFINITIONS)) {
|
|
||||||
for (const model of provider.models) {
|
|
||||||
if (model.capabilities.memory === false) {
|
|
||||||
models.push(model.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return models
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the max output tokens for a specific model.
|
* Get the max output tokens for a specific model.
|
||||||
*
|
*
|
||||||
|
|||||||
@@ -95,8 +95,6 @@ export interface ProviderResponse {
|
|||||||
total: number
|
total: number
|
||||||
pricing: ModelPricing
|
pricing: ModelPricing
|
||||||
}
|
}
|
||||||
/** Interaction ID returned by the Interactions API (used for multi-turn deep research) */
|
|
||||||
interactionId?: string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ToolUsageControl = 'auto' | 'force' | 'none'
|
export type ToolUsageControl = 'auto' | 'force' | 'none'
|
||||||
@@ -171,8 +169,6 @@ export interface ProviderRequest {
|
|||||||
verbosity?: string
|
verbosity?: string
|
||||||
thinkingLevel?: string
|
thinkingLevel?: string
|
||||||
isDeployedContext?: boolean
|
isDeployedContext?: boolean
|
||||||
/** Previous interaction ID for multi-turn Interactions API requests (deep research follow-ups) */
|
|
||||||
previousInteractionId?: string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const providers: Record<string, ProviderConfig> = {}
|
export const providers: Record<string, ProviderConfig> = {}
|
||||||
|
|||||||
@@ -12,8 +12,6 @@ import {
|
|||||||
getMaxOutputTokensForModel as getMaxOutputTokensForModelFromDefinitions,
|
getMaxOutputTokensForModel as getMaxOutputTokensForModelFromDefinitions,
|
||||||
getMaxTemperature as getMaxTempFromDefinitions,
|
getMaxTemperature as getMaxTempFromDefinitions,
|
||||||
getModelPricing as getModelPricingFromDefinitions,
|
getModelPricing as getModelPricingFromDefinitions,
|
||||||
getModelsWithDeepResearch,
|
|
||||||
getModelsWithoutMemory,
|
|
||||||
getModelsWithReasoningEffort,
|
getModelsWithReasoningEffort,
|
||||||
getModelsWithTemperatureSupport,
|
getModelsWithTemperatureSupport,
|
||||||
getModelsWithTempRange01,
|
getModelsWithTempRange01,
|
||||||
@@ -955,8 +953,6 @@ export const MODELS_WITH_TEMPERATURE_SUPPORT = getModelsWithTemperatureSupport()
|
|||||||
export const MODELS_WITH_REASONING_EFFORT = getModelsWithReasoningEffort()
|
export const MODELS_WITH_REASONING_EFFORT = getModelsWithReasoningEffort()
|
||||||
export const MODELS_WITH_VERBOSITY = getModelsWithVerbosity()
|
export const MODELS_WITH_VERBOSITY = getModelsWithVerbosity()
|
||||||
export const MODELS_WITH_THINKING = getModelsWithThinking()
|
export const MODELS_WITH_THINKING = getModelsWithThinking()
|
||||||
export const MODELS_WITH_DEEP_RESEARCH = getModelsWithDeepResearch()
|
|
||||||
export const MODELS_WITHOUT_MEMORY = getModelsWithoutMemory()
|
|
||||||
export const PROVIDERS_WITH_TOOL_USAGE_CONTROL = getProvidersWithToolUsageControl()
|
export const PROVIDERS_WITH_TOOL_USAGE_CONTROL = getProvidersWithToolUsageControl()
|
||||||
|
|
||||||
export function supportsTemperature(model: string): boolean {
|
export function supportsTemperature(model: string): boolean {
|
||||||
@@ -975,10 +971,6 @@ export function supportsThinking(model: string): boolean {
|
|||||||
return MODELS_WITH_THINKING.includes(model.toLowerCase())
|
return MODELS_WITH_THINKING.includes(model.toLowerCase())
|
||||||
}
|
}
|
||||||
|
|
||||||
export function isDeepResearchModel(model: string): boolean {
|
|
||||||
return MODELS_WITH_DEEP_RESEARCH.includes(model.toLowerCase())
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the maximum temperature value for a model
|
* Get the maximum temperature value for a model
|
||||||
* @returns Maximum temperature value (1 or 2) or undefined if temperature not supported
|
* @returns Maximum temperature value (1 or 2) or undefined if temperature not supported
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 78 KiB After Width: | Height: | Size: 45 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 58 KiB After Width: | Height: | Size: 58 KiB |
@@ -18,6 +18,7 @@ import {
|
|||||||
import { flushStreamingUpdates, stopStreamingUpdates } from '@/lib/copilot/client-sse/handlers'
|
import { flushStreamingUpdates, stopStreamingUpdates } from '@/lib/copilot/client-sse/handlers'
|
||||||
import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
import type { ClientContentBlock, ClientStreamingContext } from '@/lib/copilot/client-sse/types'
|
||||||
import {
|
import {
|
||||||
|
COPILOT_AUTO_ALLOWED_TOOLS_API_PATH,
|
||||||
COPILOT_CHAT_API_PATH,
|
COPILOT_CHAT_API_PATH,
|
||||||
COPILOT_CHAT_STREAM_API_PATH,
|
COPILOT_CHAT_STREAM_API_PATH,
|
||||||
COPILOT_CHECKPOINTS_API_PATH,
|
COPILOT_CHECKPOINTS_API_PATH,
|
||||||
@@ -83,14 +84,6 @@ function isPageUnloading(): boolean {
|
|||||||
return _isPageUnloading
|
return _isPageUnloading
|
||||||
}
|
}
|
||||||
|
|
||||||
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
|
||||||
if (name !== 'workflow_change') return false
|
|
||||||
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
function readActiveStreamFromStorage(): CopilotStreamInfo | null {
|
function readActiveStreamFromStorage(): CopilotStreamInfo | null {
|
||||||
if (typeof window === 'undefined') return null
|
if (typeof window === 'undefined') return null
|
||||||
try {
|
try {
|
||||||
@@ -147,6 +140,41 @@ function updateActiveStreamEventId(
|
|||||||
writeActiveStreamToStorage(next)
|
writeActiveStreamToStorage(next)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const AUTO_ALLOWED_TOOLS_STORAGE_KEY = 'copilot_auto_allowed_tools'
|
||||||
|
|
||||||
|
function readAutoAllowedToolsFromStorage(): string[] | null {
|
||||||
|
if (typeof window === 'undefined') return null
|
||||||
|
try {
|
||||||
|
const raw = window.localStorage.getItem(AUTO_ALLOWED_TOOLS_STORAGE_KEY)
|
||||||
|
if (!raw) return null
|
||||||
|
const parsed = JSON.parse(raw)
|
||||||
|
if (!Array.isArray(parsed)) return null
|
||||||
|
return parsed.filter((item): item is string => typeof item === 'string')
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('[AutoAllowedTools] Failed to read local cache', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function writeAutoAllowedToolsToStorage(tools: string[]): void {
|
||||||
|
if (typeof window === 'undefined') return
|
||||||
|
try {
|
||||||
|
window.localStorage.setItem(AUTO_ALLOWED_TOOLS_STORAGE_KEY, JSON.stringify(tools))
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('[AutoAllowedTools] Failed to write local cache', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isToolAutoAllowedByList(toolId: string, autoAllowedTools: string[]): boolean {
|
||||||
|
if (!toolId) return false
|
||||||
|
const normalizedTarget = toolId.trim()
|
||||||
|
return autoAllowedTools.some((allowed) => allowed?.trim() === normalizedTarget)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clear any lingering diff preview from a previous session.
|
* Clear any lingering diff preview from a previous session.
|
||||||
* Called lazily when the store is first activated (setWorkflowId).
|
* Called lazily when the store is first activated (setWorkflowId).
|
||||||
@@ -282,50 +310,6 @@ function parseModelKey(compositeKey: string): { provider: string; modelId: strin
|
|||||||
return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) }
|
return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert legacy/variant Claude IDs into the canonical ID shape used by the model catalog.
|
|
||||||
*
|
|
||||||
* Examples:
|
|
||||||
* - claude-4.5-opus -> claude-opus-4-5
|
|
||||||
* - claude-opus-4.6 -> claude-opus-4-6
|
|
||||||
* - anthropic.claude-opus-4-5-20251101-v1:0 -> claude-opus-4-5 (match key only)
|
|
||||||
*/
|
|
||||||
function canonicalizeModelMatchKey(modelId: string): string {
|
|
||||||
if (!modelId) return modelId
|
|
||||||
const normalized = modelId.trim().toLowerCase()
|
|
||||||
|
|
||||||
const toCanonicalClaude = (tier: string, version: string): string => {
|
|
||||||
const normalizedVersion = version.replace(/\./g, '-')
|
|
||||||
return `claude-${tier}-${normalizedVersion}`
|
|
||||||
}
|
|
||||||
|
|
||||||
const tierFirstExact = normalized.match(/^claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)$/)
|
|
||||||
if (tierFirstExact) {
|
|
||||||
const [, tier, version] = tierFirstExact
|
|
||||||
return toCanonicalClaude(tier, version)
|
|
||||||
}
|
|
||||||
|
|
||||||
const versionFirstExact = normalized.match(/^claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)$/)
|
|
||||||
if (versionFirstExact) {
|
|
||||||
const [, version, tier] = versionFirstExact
|
|
||||||
return toCanonicalClaude(tier, version)
|
|
||||||
}
|
|
||||||
|
|
||||||
const tierFirstEmbedded = normalized.match(/claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)/)
|
|
||||||
if (tierFirstEmbedded) {
|
|
||||||
const [, tier, version] = tierFirstEmbedded
|
|
||||||
return toCanonicalClaude(tier, version)
|
|
||||||
}
|
|
||||||
|
|
||||||
const versionFirstEmbedded = normalized.match(/claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)/)
|
|
||||||
if (versionFirstEmbedded) {
|
|
||||||
const [, version, tier] = versionFirstEmbedded
|
|
||||||
return toCanonicalClaude(tier, version)
|
|
||||||
}
|
|
||||||
|
|
||||||
return normalized
|
|
||||||
}
|
|
||||||
|
|
||||||
const MODEL_PROVIDER_PRIORITY = [
|
const MODEL_PROVIDER_PRIORITY = [
|
||||||
'anthropic',
|
'anthropic',
|
||||||
'bedrock',
|
'bedrock',
|
||||||
@@ -366,23 +350,12 @@ function normalizeSelectedModelKey(selectedModel: string, models: AvailableModel
|
|||||||
|
|
||||||
const { provider, modelId } = parseModelKey(selectedModel)
|
const { provider, modelId } = parseModelKey(selectedModel)
|
||||||
const targetModelId = modelId || selectedModel
|
const targetModelId = modelId || selectedModel
|
||||||
const targetMatchKey = canonicalizeModelMatchKey(targetModelId)
|
|
||||||
|
|
||||||
const matches = models.filter((m) => {
|
const matches = models.filter((m) => m.id.endsWith(`/${targetModelId}`))
|
||||||
const candidateModelId = parseModelKey(m.id).modelId || m.id
|
|
||||||
const candidateMatchKey = canonicalizeModelMatchKey(candidateModelId)
|
|
||||||
return (
|
|
||||||
candidateModelId === targetModelId ||
|
|
||||||
m.id.endsWith(`/${targetModelId}`) ||
|
|
||||||
candidateMatchKey === targetMatchKey
|
|
||||||
)
|
|
||||||
})
|
|
||||||
if (matches.length === 0) return selectedModel
|
if (matches.length === 0) return selectedModel
|
||||||
|
|
||||||
if (provider) {
|
if (provider) {
|
||||||
const sameProvider = matches.find(
|
const sameProvider = matches.find((m) => m.provider === provider)
|
||||||
(m) => m.provider === provider || m.id.startsWith(`${provider}/`)
|
|
||||||
)
|
|
||||||
if (sameProvider) return sameProvider.id
|
if (sameProvider) return sameProvider.id
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -452,6 +425,11 @@ function prepareSendContext(
|
|||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
logger.warn('[Copilot] Failed to load sensitive credential IDs', err)
|
logger.warn('[Copilot] Failed to load sensitive credential IDs', err)
|
||||||
})
|
})
|
||||||
|
get()
|
||||||
|
.loadAutoAllowedTools()
|
||||||
|
.catch((err) => {
|
||||||
|
logger.warn('[Copilot] Failed to load auto-allowed tools', err)
|
||||||
|
})
|
||||||
|
|
||||||
let newMessages: CopilotMessage[]
|
let newMessages: CopilotMessage[]
|
||||||
if (revertState) {
|
if (revertState) {
|
||||||
@@ -1004,6 +982,8 @@ async function resumeFromLiveStream(
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const cachedAutoAllowedTools = readAutoAllowedToolsFromStorage()
|
||||||
|
|
||||||
// Initial state (subset required for UI/streaming)
|
// Initial state (subset required for UI/streaming)
|
||||||
const initialState = {
|
const initialState = {
|
||||||
mode: 'build' as const,
|
mode: 'build' as const,
|
||||||
@@ -1038,6 +1018,8 @@ const initialState = {
|
|||||||
streamingPlanContent: '',
|
streamingPlanContent: '',
|
||||||
toolCallsById: {} as Record<string, CopilotToolCall>,
|
toolCallsById: {} as Record<string, CopilotToolCall>,
|
||||||
suppressAutoSelect: false,
|
suppressAutoSelect: false,
|
||||||
|
autoAllowedTools: cachedAutoAllowedTools ?? ([] as string[]),
|
||||||
|
autoAllowedToolsLoaded: cachedAutoAllowedTools !== null,
|
||||||
activeStream: null as CopilotStreamInfo | null,
|
activeStream: null as CopilotStreamInfo | null,
|
||||||
messageQueue: [] as import('./types').QueuedMessage[],
|
messageQueue: [] as import('./types').QueuedMessage[],
|
||||||
suppressAbortContinueOption: false,
|
suppressAbortContinueOption: false,
|
||||||
@@ -1076,6 +1058,8 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
agentPrefetch: get().agentPrefetch,
|
agentPrefetch: get().agentPrefetch,
|
||||||
availableModels: get().availableModels,
|
availableModels: get().availableModels,
|
||||||
isLoadingModels: get().isLoadingModels,
|
isLoadingModels: get().isLoadingModels,
|
||||||
|
autoAllowedTools: get().autoAllowedTools,
|
||||||
|
autoAllowedToolsLoaded: get().autoAllowedToolsLoaded,
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -1109,12 +1093,11 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
const chatConfig = chat.config ?? {}
|
const chatConfig = chat.config ?? {}
|
||||||
const chatMode = chatConfig.mode || get().mode
|
const chatMode = chatConfig.mode || get().mode
|
||||||
const chatModel = chatConfig.model || get().selectedModel
|
const chatModel = chatConfig.model || get().selectedModel
|
||||||
const normalizedChatModel = normalizeSelectedModelKey(chatModel, get().availableModels)
|
|
||||||
|
|
||||||
logger.debug('[Chat] Restoring chat config', {
|
logger.debug('[Chat] Restoring chat config', {
|
||||||
chatId: chat.id,
|
chatId: chat.id,
|
||||||
mode: chatMode,
|
mode: chatMode,
|
||||||
model: normalizedChatModel,
|
model: chatModel,
|
||||||
hasPlanArtifact: !!planArtifact,
|
hasPlanArtifact: !!planArtifact,
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -1136,7 +1119,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
showPlanTodos: false,
|
showPlanTodos: false,
|
||||||
streamingPlanContent: planArtifact,
|
streamingPlanContent: planArtifact,
|
||||||
mode: chatMode,
|
mode: chatMode,
|
||||||
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
|
selectedModel: chatModel as CopilotStore['selectedModel'],
|
||||||
suppressAutoSelect: false,
|
suppressAutoSelect: false,
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -1309,10 +1292,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
const refreshedConfig = updatedCurrentChat.config ?? {}
|
const refreshedConfig = updatedCurrentChat.config ?? {}
|
||||||
const refreshedMode = refreshedConfig.mode || get().mode
|
const refreshedMode = refreshedConfig.mode || get().mode
|
||||||
const refreshedModel = refreshedConfig.model || get().selectedModel
|
const refreshedModel = refreshedConfig.model || get().selectedModel
|
||||||
const normalizedRefreshedModel = normalizeSelectedModelKey(
|
|
||||||
refreshedModel,
|
|
||||||
get().availableModels
|
|
||||||
)
|
|
||||||
const toolCallsById = buildToolCallsById(normalizedMessages)
|
const toolCallsById = buildToolCallsById(normalizedMessages)
|
||||||
|
|
||||||
set({
|
set({
|
||||||
@@ -1321,7 +1300,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
toolCallsById,
|
toolCallsById,
|
||||||
streamingPlanContent: refreshedPlanArtifact,
|
streamingPlanContent: refreshedPlanArtifact,
|
||||||
mode: refreshedMode,
|
mode: refreshedMode,
|
||||||
selectedModel: normalizedRefreshedModel as CopilotStore['selectedModel'],
|
selectedModel: refreshedModel as CopilotStore['selectedModel'],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@@ -1341,15 +1320,11 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
const chatConfig = mostRecentChat.config ?? {}
|
const chatConfig = mostRecentChat.config ?? {}
|
||||||
const chatMode = chatConfig.mode || get().mode
|
const chatMode = chatConfig.mode || get().mode
|
||||||
const chatModel = chatConfig.model || get().selectedModel
|
const chatModel = chatConfig.model || get().selectedModel
|
||||||
const normalizedChatModel = normalizeSelectedModelKey(
|
|
||||||
chatModel,
|
|
||||||
get().availableModels
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info('[Chat] Auto-selecting most recent chat with config', {
|
logger.info('[Chat] Auto-selecting most recent chat with config', {
|
||||||
chatId: mostRecentChat.id,
|
chatId: mostRecentChat.id,
|
||||||
mode: chatMode,
|
mode: chatMode,
|
||||||
model: normalizedChatModel,
|
model: chatModel,
|
||||||
hasPlanArtifact: !!planArtifact,
|
hasPlanArtifact: !!planArtifact,
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -1361,7 +1336,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
toolCallsById,
|
toolCallsById,
|
||||||
streamingPlanContent: planArtifact,
|
streamingPlanContent: planArtifact,
|
||||||
mode: chatMode,
|
mode: chatMode,
|
||||||
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
|
selectedModel: chatModel as CopilotStore['selectedModel'],
|
||||||
})
|
})
|
||||||
try {
|
try {
|
||||||
await get().loadMessageCheckpoints(mostRecentChat.id)
|
await get().loadMessageCheckpoints(mostRecentChat.id)
|
||||||
@@ -1390,6 +1365,16 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
|
|
||||||
// Send a message (streaming only)
|
// Send a message (streaming only)
|
||||||
sendMessage: async (message: string, options = {}) => {
|
sendMessage: async (message: string, options = {}) => {
|
||||||
|
if (!get().autoAllowedToolsLoaded) {
|
||||||
|
try {
|
||||||
|
await get().loadAutoAllowedTools()
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn('[Copilot] Failed to preload auto-allowed tools before send', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const prepared = prepareSendContext(get, set, message, options as SendMessageOptionsInput)
|
const prepared = prepareSendContext(get, set, message, options as SendMessageOptionsInput)
|
||||||
if (!prepared) return
|
if (!prepared) return
|
||||||
|
|
||||||
@@ -1656,7 +1641,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
const b = blocks[bi]
|
const b = blocks[bi]
|
||||||
if (b?.type === 'tool_call') {
|
if (b?.type === 'tool_call') {
|
||||||
const tn = b.toolCall?.name
|
const tn = b.toolCall?.name
|
||||||
if (isWorkflowEditToolCall(tn, b.toolCall?.params)) {
|
if (tn === 'edit_workflow') {
|
||||||
id = b.toolCall?.id
|
id = b.toolCall?.id
|
||||||
break outer
|
break outer
|
||||||
}
|
}
|
||||||
@@ -1665,9 +1650,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
}
|
}
|
||||||
// Fallback to map if not found in messages
|
// Fallback to map if not found in messages
|
||||||
if (!id) {
|
if (!id) {
|
||||||
const candidates = Object.values(toolCallsById).filter((t) =>
|
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||||
isWorkflowEditToolCall(t.name, t.params)
|
|
||||||
)
|
|
||||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2285,8 +2268,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
},
|
},
|
||||||
|
|
||||||
setSelectedModel: async (model) => {
|
setSelectedModel: async (model) => {
|
||||||
const normalizedModel = normalizeSelectedModelKey(model, get().availableModels)
|
set({ selectedModel: model })
|
||||||
set({ selectedModel: normalizedModel as CopilotStore['selectedModel'] })
|
|
||||||
},
|
},
|
||||||
setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }),
|
setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }),
|
||||||
loadAvailableModels: async () => {
|
loadAvailableModels: async () => {
|
||||||
@@ -2360,6 +2342,74 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
loadAutoAllowedTools: async () => {
|
||||||
|
try {
|
||||||
|
logger.debug('[AutoAllowedTools] Loading from API...')
|
||||||
|
const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH)
|
||||||
|
logger.debug('[AutoAllowedTools] Load response', { status: res.status, ok: res.ok })
|
||||||
|
if (res.ok) {
|
||||||
|
const data = await res.json()
|
||||||
|
const tools = data.autoAllowedTools ?? []
|
||||||
|
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
||||||
|
writeAutoAllowedToolsToStorage(tools)
|
||||||
|
logger.debug('[AutoAllowedTools] Loaded successfully', { count: tools.length, tools })
|
||||||
|
} else {
|
||||||
|
set({ autoAllowedToolsLoaded: true })
|
||||||
|
logger.warn('[AutoAllowedTools] Load failed with status', { status: res.status })
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
set({ autoAllowedToolsLoaded: true })
|
||||||
|
logger.error('[AutoAllowedTools] Failed to load', { error: err })
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
addAutoAllowedTool: async (toolId: string) => {
|
||||||
|
try {
|
||||||
|
logger.debug('[AutoAllowedTools] Adding tool...', { toolId })
|
||||||
|
const res = await fetch(COPILOT_AUTO_ALLOWED_TOOLS_API_PATH, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ toolId }),
|
||||||
|
})
|
||||||
|
logger.debug('[AutoAllowedTools] API response', { toolId, status: res.status, ok: res.ok })
|
||||||
|
if (res.ok) {
|
||||||
|
const data = await res.json()
|
||||||
|
logger.debug('[AutoAllowedTools] API returned', { toolId, tools: data.autoAllowedTools })
|
||||||
|
const tools = data.autoAllowedTools ?? []
|
||||||
|
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
||||||
|
writeAutoAllowedToolsToStorage(tools)
|
||||||
|
logger.debug('[AutoAllowedTools] Added tool to store', { toolId })
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[AutoAllowedTools] Failed to add tool', { toolId, error: err })
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
removeAutoAllowedTool: async (toolId: string) => {
|
||||||
|
try {
|
||||||
|
const res = await fetch(
|
||||||
|
`${COPILOT_AUTO_ALLOWED_TOOLS_API_PATH}?toolId=${encodeURIComponent(toolId)}`,
|
||||||
|
{
|
||||||
|
method: 'DELETE',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if (res.ok) {
|
||||||
|
const data = await res.json()
|
||||||
|
const tools = data.autoAllowedTools ?? []
|
||||||
|
set({ autoAllowedTools: tools, autoAllowedToolsLoaded: true })
|
||||||
|
writeAutoAllowedToolsToStorage(tools)
|
||||||
|
logger.debug('[AutoAllowedTools] Removed tool', { toolId })
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error('[AutoAllowedTools] Failed to remove tool', { toolId, error: err })
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
isToolAutoAllowed: (toolId: string) => {
|
||||||
|
const { autoAllowedTools } = get()
|
||||||
|
return isToolAutoAllowedByList(toolId, autoAllowedTools)
|
||||||
|
},
|
||||||
|
|
||||||
// Credential masking
|
// Credential masking
|
||||||
loadSensitiveCredentialIds: async () => {
|
loadSensitiveCredentialIds: async () => {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@@ -26,26 +26,6 @@ export interface CopilotToolCall {
|
|||||||
params?: Record<string, unknown>
|
params?: Record<string, unknown>
|
||||||
input?: Record<string, unknown>
|
input?: Record<string, unknown>
|
||||||
display?: ClientToolDisplay
|
display?: ClientToolDisplay
|
||||||
/** Server-provided UI contract for this tool call phase */
|
|
||||||
ui?: {
|
|
||||||
title?: string
|
|
||||||
phaseLabel?: string
|
|
||||||
icon?: string
|
|
||||||
showInterrupt?: boolean
|
|
||||||
showRemember?: boolean
|
|
||||||
autoAllowed?: boolean
|
|
||||||
actions?: Array<{
|
|
||||||
id: string
|
|
||||||
label: string
|
|
||||||
kind: 'accept' | 'reject'
|
|
||||||
remember?: boolean
|
|
||||||
}>
|
|
||||||
}
|
|
||||||
/** Server-provided execution routing contract */
|
|
||||||
execution?: {
|
|
||||||
target?: 'go' | 'go_subagent' | 'sim_server' | 'sim_client_capability' | string
|
|
||||||
capabilityId?: string
|
|
||||||
}
|
|
||||||
/** Content streamed from a subagent (e.g., debug agent) */
|
/** Content streamed from a subagent (e.g., debug agent) */
|
||||||
subAgentContent?: string
|
subAgentContent?: string
|
||||||
/** Tool calls made by the subagent */
|
/** Tool calls made by the subagent */
|
||||||
@@ -187,6 +167,10 @@ export interface CopilotState {
|
|||||||
|
|
||||||
// Per-message metadata captured at send-time for reliable stats
|
// Per-message metadata captured at send-time for reliable stats
|
||||||
|
|
||||||
|
// Auto-allowed integration tools (tools that can run without confirmation)
|
||||||
|
autoAllowedTools: string[]
|
||||||
|
autoAllowedToolsLoaded: boolean
|
||||||
|
|
||||||
// Active stream metadata for reconnect/replay
|
// Active stream metadata for reconnect/replay
|
||||||
activeStream: CopilotStreamInfo | null
|
activeStream: CopilotStreamInfo | null
|
||||||
|
|
||||||
@@ -263,6 +247,11 @@ export interface CopilotActions {
|
|||||||
abortSignal?: AbortSignal
|
abortSignal?: AbortSignal
|
||||||
) => Promise<void>
|
) => Promise<void>
|
||||||
handleNewChatCreation: (newChatId: string) => Promise<void>
|
handleNewChatCreation: (newChatId: string) => Promise<void>
|
||||||
|
loadAutoAllowedTools: () => Promise<void>
|
||||||
|
addAutoAllowedTool: (toolId: string) => Promise<void>
|
||||||
|
removeAutoAllowedTool: (toolId: string) => Promise<void>
|
||||||
|
isToolAutoAllowed: (toolId: string) => boolean
|
||||||
|
|
||||||
// Credential masking
|
// Credential masking
|
||||||
loadSensitiveCredentialIds: () => Promise<void>
|
loadSensitiveCredentialIds: () => Promise<void>
|
||||||
maskCredentialValue: (value: string) => string
|
maskCredentialValue: (value: string) => string
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import {
|
|||||||
captureBaselineSnapshot,
|
captureBaselineSnapshot,
|
||||||
cloneWorkflowState,
|
cloneWorkflowState,
|
||||||
createBatchedUpdater,
|
createBatchedUpdater,
|
||||||
findLatestWorkflowEditToolCallId,
|
findLatestEditWorkflowToolCallId,
|
||||||
getLatestUserMessageId,
|
getLatestUserMessageId,
|
||||||
persistWorkflowStateToServer,
|
persistWorkflowStateToServer,
|
||||||
} from './utils'
|
} from './utils'
|
||||||
@@ -334,7 +334,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
findLatestWorkflowEditToolCallId().then((toolCallId) => {
|
findLatestEditWorkflowToolCallId().then((toolCallId) => {
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
import('@/stores/panel/copilot/store')
|
import('@/stores/panel/copilot/store')
|
||||||
.then(({ useCopilotStore }) => {
|
.then(({ useCopilotStore }) => {
|
||||||
@@ -439,7 +439,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
findLatestWorkflowEditToolCallId().then((toolCallId) => {
|
findLatestEditWorkflowToolCallId().then((toolCallId) => {
|
||||||
if (toolCallId) {
|
if (toolCallId) {
|
||||||
import('@/stores/panel/copilot/store')
|
import('@/stores/panel/copilot/store')
|
||||||
.then(({ useCopilotStore }) => {
|
.then(({ useCopilotStore }) => {
|
||||||
|
|||||||
@@ -126,20 +126,6 @@ export async function getLatestUserMessageId(): Promise<string | null> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function findLatestEditWorkflowToolCallId(): Promise<string | undefined> {
|
export async function findLatestEditWorkflowToolCallId(): Promise<string | undefined> {
|
||||||
return findLatestWorkflowEditToolCallId()
|
|
||||||
}
|
|
||||||
|
|
||||||
function isWorkflowEditToolCall(name?: string, params?: Record<string, unknown>): boolean {
|
|
||||||
if (name !== 'workflow_change') return false
|
|
||||||
|
|
||||||
const mode = typeof params?.mode === 'string' ? params.mode.toLowerCase() : ''
|
|
||||||
if (mode === 'apply') return true
|
|
||||||
|
|
||||||
// Be permissive for incomplete events: apply calls always include proposalId.
|
|
||||||
return typeof params?.proposalId === 'string' && params.proposalId.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function findLatestWorkflowEditToolCallId(): Promise<string | undefined> {
|
|
||||||
try {
|
try {
|
||||||
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
const { useCopilotStore } = await import('@/stores/panel/copilot/store')
|
||||||
const { messages, toolCallsById } = useCopilotStore.getState()
|
const { messages, toolCallsById } = useCopilotStore.getState()
|
||||||
@@ -148,22 +134,17 @@ export async function findLatestWorkflowEditToolCallId(): Promise<string | undef
|
|||||||
const message = messages[mi]
|
const message = messages[mi]
|
||||||
if (message.role !== 'assistant' || !message.contentBlocks) continue
|
if (message.role !== 'assistant' || !message.contentBlocks) continue
|
||||||
for (const block of message.contentBlocks) {
|
for (const block of message.contentBlocks) {
|
||||||
if (
|
if (block?.type === 'tool_call' && block.toolCall?.name === 'edit_workflow') {
|
||||||
block?.type === 'tool_call' &&
|
|
||||||
isWorkflowEditToolCall(block.toolCall?.name, block.toolCall?.params)
|
|
||||||
) {
|
|
||||||
return block.toolCall?.id
|
return block.toolCall?.id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const fallback = Object.values(toolCallsById).filter((call) =>
|
const fallback = Object.values(toolCallsById).filter((call) => call.name === 'edit_workflow')
|
||||||
isWorkflowEditToolCall(call.name, call.params)
|
|
||||||
)
|
|
||||||
|
|
||||||
return fallback.length ? fallback[fallback.length - 1].id : undefined
|
return fallback.length ? fallback[fallback.length - 1].id : undefined
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn('Failed to resolve workflow edit tool call id', { error })
|
logger.warn('Failed to resolve edit_workflow tool call id', { error })
|
||||||
return undefined
|
return undefined
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,114 +0,0 @@
|
|||||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceDeleteLabelParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
pageId: string
|
|
||||||
labelName: string
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceDeleteLabelResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
pageId: string
|
|
||||||
labelName: string
|
|
||||||
deleted: boolean
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceDeleteLabelTool: ToolConfig<
|
|
||||||
ConfluenceDeleteLabelParams,
|
|
||||||
ConfluenceDeleteLabelResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_delete_label',
|
|
||||||
name: 'Confluence Delete Label',
|
|
||||||
description: 'Remove a label from a Confluence page.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
pageId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Confluence page ID to remove the label from',
|
|
||||||
},
|
|
||||||
labelName: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Name of the label to remove',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: () => '/api/tools/confluence/labels',
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: (params: ConfluenceDeleteLabelParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
body: (params: ConfluenceDeleteLabelParams) => ({
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
pageId: params.pageId?.trim(),
|
|
||||||
labelName: params.labelName?.trim(),
|
|
||||||
cloudId: params.cloudId,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
pageId: data.pageId ?? '',
|
|
||||||
labelName: data.labelName ?? '',
|
|
||||||
deleted: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
pageId: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Page ID the label was removed from',
|
|
||||||
},
|
|
||||||
labelName: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Name of the removed label',
|
|
||||||
},
|
|
||||||
deleted: {
|
|
||||||
type: 'boolean',
|
|
||||||
description: 'Deletion status',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceDeletePagePropertyParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
pageId: string
|
|
||||||
propertyId: string
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceDeletePagePropertyResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
pageId: string
|
|
||||||
propertyId: string
|
|
||||||
deleted: boolean
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceDeletePagePropertyTool: ToolConfig<
|
|
||||||
ConfluenceDeletePagePropertyParams,
|
|
||||||
ConfluenceDeletePagePropertyResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_delete_page_property',
|
|
||||||
name: 'Confluence Delete Page Property',
|
|
||||||
description: 'Delete a content property from a Confluence page by its property ID.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
pageId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the page containing the property',
|
|
||||||
},
|
|
||||||
propertyId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the property to delete',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: () => '/api/tools/confluence/page-properties',
|
|
||||||
method: 'DELETE',
|
|
||||||
headers: (params: ConfluenceDeletePagePropertyParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
body: (params: ConfluenceDeletePagePropertyParams) => ({
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
pageId: params.pageId?.trim(),
|
|
||||||
propertyId: params.propertyId?.trim(),
|
|
||||||
cloudId: params.cloudId,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
pageId: data.pageId ?? '',
|
|
||||||
propertyId: data.propertyId ?? '',
|
|
||||||
deleted: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
pageId: { type: 'string', description: 'ID of the page' },
|
|
||||||
propertyId: { type: 'string', description: 'ID of the deleted property' },
|
|
||||||
deleted: { type: 'boolean', description: 'Deletion status' },
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,143 +0,0 @@
|
|||||||
import { PAGE_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceGetPagesByLabelParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
labelId: string
|
|
||||||
limit?: number
|
|
||||||
cursor?: string
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceGetPagesByLabelResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
labelId: string
|
|
||||||
pages: Array<{
|
|
||||||
id: string
|
|
||||||
title: string
|
|
||||||
status: string | null
|
|
||||||
spaceId: string | null
|
|
||||||
parentId: string | null
|
|
||||||
authorId: string | null
|
|
||||||
createdAt: string | null
|
|
||||||
version: {
|
|
||||||
number: number
|
|
||||||
message?: string
|
|
||||||
createdAt?: string
|
|
||||||
} | null
|
|
||||||
}>
|
|
||||||
nextCursor: string | null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceGetPagesByLabelTool: ToolConfig<
|
|
||||||
ConfluenceGetPagesByLabelParams,
|
|
||||||
ConfluenceGetPagesByLabelResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_get_pages_by_label',
|
|
||||||
name: 'Confluence Get Pages by Label',
|
|
||||||
description: 'Retrieve all pages that have a specific label applied.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
labelId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the label to get pages for',
|
|
||||||
},
|
|
||||||
limit: {
|
|
||||||
type: 'number',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Maximum number of pages to return (default: 50, max: 250)',
|
|
||||||
},
|
|
||||||
cursor: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Pagination cursor from previous response',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: (params: ConfluenceGetPagesByLabelParams) => {
|
|
||||||
const query = new URLSearchParams({
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
labelId: params.labelId,
|
|
||||||
limit: String(params.limit || 50),
|
|
||||||
})
|
|
||||||
if (params.cursor) {
|
|
||||||
query.set('cursor', params.cursor)
|
|
||||||
}
|
|
||||||
if (params.cloudId) {
|
|
||||||
query.set('cloudId', params.cloudId)
|
|
||||||
}
|
|
||||||
return `/api/tools/confluence/pages-by-label?${query.toString()}`
|
|
||||||
},
|
|
||||||
method: 'GET',
|
|
||||||
headers: (params: ConfluenceGetPagesByLabelParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
labelId: data.labelId ?? '',
|
|
||||||
pages: data.pages ?? [],
|
|
||||||
nextCursor: data.nextCursor ?? null,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
labelId: { type: 'string', description: 'ID of the label' },
|
|
||||||
pages: {
|
|
||||||
type: 'array',
|
|
||||||
description: 'Array of pages with this label',
|
|
||||||
items: {
|
|
||||||
type: 'object',
|
|
||||||
properties: PAGE_ITEM_PROPERTIES,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
nextCursor: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Cursor for fetching the next page of results',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -5,14 +5,11 @@ import { confluenceCreatePageTool } from '@/tools/confluence/create_page'
|
|||||||
import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property'
|
import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property'
|
||||||
import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment'
|
import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment'
|
||||||
import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment'
|
import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment'
|
||||||
import { confluenceDeleteLabelTool } from '@/tools/confluence/delete_label'
|
|
||||||
import { confluenceDeletePageTool } from '@/tools/confluence/delete_page'
|
import { confluenceDeletePageTool } from '@/tools/confluence/delete_page'
|
||||||
import { confluenceDeletePagePropertyTool } from '@/tools/confluence/delete_page_property'
|
|
||||||
import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost'
|
import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost'
|
||||||
import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors'
|
import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors'
|
||||||
import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children'
|
import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children'
|
||||||
import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version'
|
import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version'
|
||||||
import { confluenceGetPagesByLabelTool } from '@/tools/confluence/get_pages_by_label'
|
|
||||||
import { confluenceGetSpaceTool } from '@/tools/confluence/get_space'
|
import { confluenceGetSpaceTool } from '@/tools/confluence/get_space'
|
||||||
import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments'
|
import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments'
|
||||||
import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts'
|
import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts'
|
||||||
@@ -22,7 +19,6 @@ import { confluenceListLabelsTool } from '@/tools/confluence/list_labels'
|
|||||||
import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties'
|
import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties'
|
||||||
import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions'
|
import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions'
|
||||||
import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space'
|
import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space'
|
||||||
import { confluenceListSpaceLabelsTool } from '@/tools/confluence/list_space_labels'
|
|
||||||
import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces'
|
import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces'
|
||||||
import { confluenceRetrieveTool } from '@/tools/confluence/retrieve'
|
import { confluenceRetrieveTool } from '@/tools/confluence/retrieve'
|
||||||
import { confluenceSearchTool } from '@/tools/confluence/search'
|
import { confluenceSearchTool } from '@/tools/confluence/search'
|
||||||
@@ -82,7 +78,6 @@ export {
|
|||||||
// Page Properties Tools
|
// Page Properties Tools
|
||||||
confluenceListPagePropertiesTool,
|
confluenceListPagePropertiesTool,
|
||||||
confluenceCreatePagePropertyTool,
|
confluenceCreatePagePropertyTool,
|
||||||
confluenceDeletePagePropertyTool,
|
|
||||||
// Blog Post Tools
|
// Blog Post Tools
|
||||||
confluenceListBlogPostsTool,
|
confluenceListBlogPostsTool,
|
||||||
confluenceGetBlogPostTool,
|
confluenceGetBlogPostTool,
|
||||||
@@ -103,9 +98,6 @@ export {
|
|||||||
// Label Tools
|
// Label Tools
|
||||||
confluenceListLabelsTool,
|
confluenceListLabelsTool,
|
||||||
confluenceAddLabelTool,
|
confluenceAddLabelTool,
|
||||||
confluenceDeleteLabelTool,
|
|
||||||
confluenceGetPagesByLabelTool,
|
|
||||||
confluenceListSpaceLabelsTool,
|
|
||||||
// Space Tools
|
// Space Tools
|
||||||
confluenceGetSpaceTool,
|
confluenceGetSpaceTool,
|
||||||
confluenceListSpacesTool,
|
confluenceListSpacesTool,
|
||||||
|
|||||||
@@ -1,134 +0,0 @@
|
|||||||
import { LABEL_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
|
||||||
import type { ToolConfig } from '@/tools/types'
|
|
||||||
|
|
||||||
export interface ConfluenceListSpaceLabelsParams {
|
|
||||||
accessToken: string
|
|
||||||
domain: string
|
|
||||||
spaceId: string
|
|
||||||
limit?: number
|
|
||||||
cursor?: string
|
|
||||||
cloudId?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfluenceListSpaceLabelsResponse {
|
|
||||||
success: boolean
|
|
||||||
output: {
|
|
||||||
ts: string
|
|
||||||
spaceId: string
|
|
||||||
labels: Array<{
|
|
||||||
id: string
|
|
||||||
name: string
|
|
||||||
prefix: string
|
|
||||||
}>
|
|
||||||
nextCursor: string | null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export const confluenceListSpaceLabelsTool: ToolConfig<
|
|
||||||
ConfluenceListSpaceLabelsParams,
|
|
||||||
ConfluenceListSpaceLabelsResponse
|
|
||||||
> = {
|
|
||||||
id: 'confluence_list_space_labels',
|
|
||||||
name: 'Confluence List Space Labels',
|
|
||||||
description: 'List all labels associated with a Confluence space.',
|
|
||||||
version: '1.0.0',
|
|
||||||
|
|
||||||
oauth: {
|
|
||||||
required: true,
|
|
||||||
provider: 'confluence',
|
|
||||||
},
|
|
||||||
|
|
||||||
params: {
|
|
||||||
accessToken: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'hidden',
|
|
||||||
description: 'OAuth access token for Confluence',
|
|
||||||
},
|
|
||||||
domain: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
|
||||||
},
|
|
||||||
spaceId: {
|
|
||||||
type: 'string',
|
|
||||||
required: true,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'The ID of the Confluence space to list labels from',
|
|
||||||
},
|
|
||||||
limit: {
|
|
||||||
type: 'number',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Maximum number of labels to return (default: 25, max: 250)',
|
|
||||||
},
|
|
||||||
cursor: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-or-llm',
|
|
||||||
description: 'Pagination cursor from previous response',
|
|
||||||
},
|
|
||||||
cloudId: {
|
|
||||||
type: 'string',
|
|
||||||
required: false,
|
|
||||||
visibility: 'user-only',
|
|
||||||
description:
|
|
||||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
request: {
|
|
||||||
url: (params: ConfluenceListSpaceLabelsParams) => {
|
|
||||||
const query = new URLSearchParams({
|
|
||||||
domain: params.domain,
|
|
||||||
accessToken: params.accessToken,
|
|
||||||
spaceId: params.spaceId,
|
|
||||||
limit: String(params.limit || 25),
|
|
||||||
})
|
|
||||||
if (params.cursor) {
|
|
||||||
query.set('cursor', params.cursor)
|
|
||||||
}
|
|
||||||
if (params.cloudId) {
|
|
||||||
query.set('cloudId', params.cloudId)
|
|
||||||
}
|
|
||||||
return `/api/tools/confluence/space-labels?${query.toString()}`
|
|
||||||
},
|
|
||||||
method: 'GET',
|
|
||||||
headers: (params: ConfluenceListSpaceLabelsParams) => ({
|
|
||||||
Accept: 'application/json',
|
|
||||||
Authorization: `Bearer ${params.accessToken}`,
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
const data = await response.json()
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
ts: new Date().toISOString(),
|
|
||||||
spaceId: data.spaceId ?? '',
|
|
||||||
labels: data.labels ?? [],
|
|
||||||
nextCursor: data.nextCursor ?? null,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
outputs: {
|
|
||||||
ts: TIMESTAMP_OUTPUT,
|
|
||||||
spaceId: { type: 'string', description: 'ID of the space' },
|
|
||||||
labels: {
|
|
||||||
type: 'array',
|
|
||||||
description: 'Array of labels on the space',
|
|
||||||
items: {
|
|
||||||
type: 'object',
|
|
||||||
properties: LABEL_ITEM_PROPERTIES,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
nextCursor: {
|
|
||||||
type: 'string',
|
|
||||||
description: 'Cursor for fetching the next page of results',
|
|
||||||
optional: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -118,13 +118,10 @@ import {
|
|||||||
confluenceCreatePageTool,
|
confluenceCreatePageTool,
|
||||||
confluenceDeleteAttachmentTool,
|
confluenceDeleteAttachmentTool,
|
||||||
confluenceDeleteCommentTool,
|
confluenceDeleteCommentTool,
|
||||||
confluenceDeleteLabelTool,
|
|
||||||
confluenceDeletePagePropertyTool,
|
|
||||||
confluenceDeletePageTool,
|
confluenceDeletePageTool,
|
||||||
confluenceGetBlogPostTool,
|
confluenceGetBlogPostTool,
|
||||||
confluenceGetPageAncestorsTool,
|
confluenceGetPageAncestorsTool,
|
||||||
confluenceGetPageChildrenTool,
|
confluenceGetPageChildrenTool,
|
||||||
confluenceGetPagesByLabelTool,
|
|
||||||
confluenceGetPageVersionTool,
|
confluenceGetPageVersionTool,
|
||||||
confluenceGetSpaceTool,
|
confluenceGetSpaceTool,
|
||||||
confluenceListAttachmentsTool,
|
confluenceListAttachmentsTool,
|
||||||
@@ -135,7 +132,6 @@ import {
|
|||||||
confluenceListPagePropertiesTool,
|
confluenceListPagePropertiesTool,
|
||||||
confluenceListPagesInSpaceTool,
|
confluenceListPagesInSpaceTool,
|
||||||
confluenceListPageVersionsTool,
|
confluenceListPageVersionsTool,
|
||||||
confluenceListSpaceLabelsTool,
|
|
||||||
confluenceListSpacesTool,
|
confluenceListSpacesTool,
|
||||||
confluenceRetrieveTool,
|
confluenceRetrieveTool,
|
||||||
confluenceSearchInSpaceTool,
|
confluenceSearchInSpaceTool,
|
||||||
@@ -2671,10 +2667,6 @@ export const tools: Record<string, ToolConfig> = {
|
|||||||
confluence_delete_attachment: confluenceDeleteAttachmentTool,
|
confluence_delete_attachment: confluenceDeleteAttachmentTool,
|
||||||
confluence_list_labels: confluenceListLabelsTool,
|
confluence_list_labels: confluenceListLabelsTool,
|
||||||
confluence_add_label: confluenceAddLabelTool,
|
confluence_add_label: confluenceAddLabelTool,
|
||||||
confluence_get_pages_by_label: confluenceGetPagesByLabelTool,
|
|
||||||
confluence_list_space_labels: confluenceListSpaceLabelsTool,
|
|
||||||
confluence_delete_label: confluenceDeleteLabelTool,
|
|
||||||
confluence_delete_page_property: confluenceDeletePagePropertyTool,
|
|
||||||
confluence_get_space: confluenceGetSpaceTool,
|
confluence_get_space: confluenceGetSpaceTool,
|
||||||
confluence_list_spaces: confluenceListSpacesTool,
|
confluence_list_spaces: confluenceListSpacesTool,
|
||||||
cursor_list_agents: cursorListAgentsTool,
|
cursor_list_agents: cursorListAgentsTool,
|
||||||
|
|||||||
Reference in New Issue
Block a user