Compare commits

..

1 Commits

Author SHA1 Message Date
waleed
031866e07c fix(copilot): persist thinking blocks on page refresh via sendBeacon
- Use navigator.sendBeacon in beforeunload handler to reliably persist
  in-progress messages (including thinking blocks) during page teardown
- Flush batched streaming updates before beacon persistence
- Fall back to sendBeacon in abortMessage when page is unloading
- Fix double-digit ordered list clipping in thinking block (pl-6 → pl-8)
2026-02-10 22:46:39 -08:00
39 changed files with 198 additions and 2023 deletions

View File

@@ -41,6 +41,9 @@ Diese Tastenkombinationen wechseln zwischen den Panel-Tabs auf der rechten Seite
| Tastenkombination | Aktion | | Tastenkombination | Aktion |
|----------|--------| |----------|--------|
| `C` | Copilot-Tab fokussieren |
| `T` | Toolbar-Tab fokussieren |
| `E` | Editor-Tab fokussieren |
| `Mod` + `F` | Toolbar-Suche fokussieren | | `Mod` + `F` | Toolbar-Suche fokussieren |
## Globale Navigation ## Globale Navigation

View File

@@ -43,6 +43,9 @@ These shortcuts switch between panel tabs on the right side of the canvas.
| Shortcut | Action | | Shortcut | Action |
|----------|--------| |----------|--------|
| `C` | Focus Copilot tab |
| `T` | Focus Toolbar tab |
| `E` | Focus Editor tab |
| `Mod` + `F` | Focus Toolbar search | | `Mod` + `F` | Focus Toolbar search |
## Global Navigation ## Global Navigation

View File

@@ -399,28 +399,6 @@ Create a new custom property (metadata) on a Confluence page.
| ↳ `authorId` | string | Account ID of the version author | | ↳ `authorId` | string | Account ID of the version author |
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation | | ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
### `confluence_delete_page_property`
Delete a content property from a Confluence page by its property ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `pageId` | string | Yes | The ID of the page containing the property |
| `propertyId` | string | Yes | The ID of the property to delete |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `pageId` | string | ID of the page |
| `propertyId` | string | ID of the deleted property |
| `deleted` | boolean | Deletion status |
### `confluence_search` ### `confluence_search`
Search for content across Confluence pages, blog posts, and other content. Search for content across Confluence pages, blog posts, and other content.
@@ -894,90 +872,6 @@ Add a label to a Confluence page for organization and categorization.
| `labelName` | string | Name of the added label | | `labelName` | string | Name of the added label |
| `labelId` | string | ID of the added label | | `labelId` | string | ID of the added label |
### `confluence_delete_label`
Remove a label from a Confluence page.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `pageId` | string | Yes | Confluence page ID to remove the label from |
| `labelName` | string | Yes | Name of the label to remove |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `pageId` | string | Page ID the label was removed from |
| `labelName` | string | Name of the removed label |
| `deleted` | boolean | Deletion status |
### `confluence_get_pages_by_label`
Retrieve all pages that have a specific label applied.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `labelId` | string | Yes | The ID of the label to get pages for |
| `limit` | number | No | Maximum number of pages to return \(default: 50, max: 250\) |
| `cursor` | string | No | Pagination cursor from previous response |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `labelId` | string | ID of the label |
| `pages` | array | Array of pages with this label |
| ↳ `id` | string | Unique page identifier |
| ↳ `title` | string | Page title |
| ↳ `status` | string | Page status \(e.g., current, archived, trashed, draft\) |
| ↳ `spaceId` | string | ID of the space containing the page |
| ↳ `parentId` | string | ID of the parent page \(null if top-level\) |
| ↳ `authorId` | string | Account ID of the page author |
| ↳ `createdAt` | string | ISO 8601 timestamp when the page was created |
| ↳ `version` | object | Page version information |
| ↳ `number` | number | Version number |
| ↳ `message` | string | Version message |
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
| ↳ `authorId` | string | Account ID of the version author |
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
| `nextCursor` | string | Cursor for fetching the next page of results |
### `confluence_list_space_labels`
List all labels associated with a Confluence space.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `spaceId` | string | Yes | The ID of the Confluence space to list labels from |
| `limit` | number | No | Maximum number of labels to return \(default: 25, max: 250\) |
| `cursor` | string | No | Pagination cursor from previous response |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `spaceId` | string | ID of the space |
| `labels` | array | Array of labels on the space |
| ↳ `id` | string | Unique label identifier |
| ↳ `name` | string | Label name |
| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) |
| `nextCursor` | string | Cursor for fetching the next page of results |
### `confluence_get_space` ### `confluence_get_space`
Get details about a specific Confluence space. Get details about a specific Confluence space.

View File

@@ -42,6 +42,9 @@ Estos atajos cambian entre las pestañas del panel en el lado derecho del lienzo
| Atajo | Acción | | Atajo | Acción |
|----------|--------| |----------|--------|
| `C` | Enfocar pestaña Copilot |
| `T` | Enfocar pestaña Barra de herramientas |
| `E` | Enfocar pestaña Editor |
| `Mod` + `F` | Enfocar búsqueda de Barra de herramientas | | `Mod` + `F` | Enfocar búsqueda de Barra de herramientas |
## Navegación global ## Navegación global

View File

@@ -42,6 +42,9 @@ Ces raccourcis permettent de basculer entre les onglets du panneau sur le côté
| Raccourci | Action | | Raccourci | Action |
|----------|--------| |----------|--------|
| `C` | Activer l'onglet Copilot |
| `T` | Activer l'onglet Barre d'outils |
| `E` | Activer l'onglet Éditeur |
| `Mod` + `F` | Activer la recherche dans la barre d'outils | | `Mod` + `F` | Activer la recherche dans la barre d'outils |
## Navigation globale ## Navigation globale

View File

@@ -41,6 +41,9 @@ import { Callout } from 'fumadocs-ui/components/callout'
| ショートカット | 操作 | | ショートカット | 操作 |
|----------|--------| |----------|--------|
| `C` | Copilotタブにフォーカス |
| `T` | Toolbarタブにフォーカス |
| `E` | Editorタブにフォーカス |
| `Mod` + `F` | Toolbar検索にフォーカス | | `Mod` + `F` | Toolbar検索にフォーカス |
## グローバルナビゲーション ## グローバルナビゲーション

View File

@@ -41,6 +41,9 @@ import { Callout } from 'fumadocs-ui/components/callout'
| 快捷键 | 操作 | | 快捷键 | 操作 |
|----------|--------| |----------|--------|
| `C` | 聚焦 Copilot 标签页 |
| `T` | 聚焦 Toolbar 标签页 |
| `E` | 聚焦 Editor 标签页 |
| `Mod` + `F` | 聚焦 Toolbar 搜索 | | `Mod` + `F` | 聚焦 Toolbar 搜索 |
## 全局导航 ## 全局导航

View File

@@ -113,7 +113,6 @@ const ChatMessageSchema = z.object({
workflowId: z.string().optional(), workflowId: z.string().optional(),
knowledgeId: z.string().optional(), knowledgeId: z.string().optional(),
blockId: z.string().optional(), blockId: z.string().optional(),
blockIds: z.array(z.string()).optional(),
templateId: z.string().optional(), templateId: z.string().optional(),
executionId: z.string().optional(), executionId: z.string().optional(),
// For workflow_block, provide both workflowId and blockId // For workflow_block, provide both workflowId and blockId
@@ -160,20 +159,6 @@ export async function POST(req: NextRequest) {
commands, commands,
} = ChatMessageSchema.parse(body) } = ChatMessageSchema.parse(body)
const normalizedContexts = Array.isArray(contexts)
? contexts.map((ctx) => {
if (ctx.kind !== 'blocks') return ctx
if (Array.isArray(ctx.blockIds) && ctx.blockIds.length > 0) return ctx
if (ctx.blockId) {
return {
...ctx,
blockIds: [ctx.blockId],
}
}
return ctx
})
: contexts
// Resolve workflowId - if not provided, use first workflow or find by name // Resolve workflowId - if not provided, use first workflow or find by name
const resolved = await resolveWorkflowIdForUser( const resolved = await resolveWorkflowIdForUser(
authenticatedUserId, authenticatedUserId,
@@ -191,10 +176,10 @@ export async function POST(req: NextRequest) {
const userMessageIdToUse = userMessageId || crypto.randomUUID() const userMessageIdToUse = userMessageId || crypto.randomUUID()
try { try {
logger.info(`[${tracker.requestId}] Received chat POST`, { logger.info(`[${tracker.requestId}] Received chat POST`, {
hasContexts: Array.isArray(normalizedContexts), hasContexts: Array.isArray(contexts),
contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0, contextsCount: Array.isArray(contexts) ? contexts.length : 0,
contextsPreview: Array.isArray(normalizedContexts) contextsPreview: Array.isArray(contexts)
? normalizedContexts.map((c: any) => ({ ? contexts.map((c: any) => ({
kind: c?.kind, kind: c?.kind,
chatId: c?.chatId, chatId: c?.chatId,
workflowId: c?.workflowId, workflowId: c?.workflowId,
@@ -206,25 +191,17 @@ export async function POST(req: NextRequest) {
} catch {} } catch {}
// Preprocess contexts server-side // Preprocess contexts server-side
let agentContexts: Array<{ type: string; content: string }> = [] let agentContexts: Array<{ type: string; content: string }> = []
if (Array.isArray(normalizedContexts) && normalizedContexts.length > 0) { if (Array.isArray(contexts) && contexts.length > 0) {
try { try {
const { processContextsServer } = await import('@/lib/copilot/process-contents') const { processContextsServer } = await import('@/lib/copilot/process-contents')
const processed = await processContextsServer( const processed = await processContextsServer(contexts as any, authenticatedUserId, message)
normalizedContexts as any,
authenticatedUserId,
message
)
agentContexts = processed agentContexts = processed
logger.info(`[${tracker.requestId}] Contexts processed for request`, { logger.info(`[${tracker.requestId}] Contexts processed for request`, {
processedCount: agentContexts.length, processedCount: agentContexts.length,
kinds: agentContexts.map((c) => c.type), kinds: agentContexts.map((c) => c.type),
lengthPreview: agentContexts.map((c) => c.content?.length ?? 0), lengthPreview: agentContexts.map((c) => c.content?.length ?? 0),
}) })
if ( if (Array.isArray(contexts) && contexts.length > 0 && agentContexts.length === 0) {
Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 &&
agentContexts.length === 0
) {
logger.warn( logger.warn(
`[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.` `[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.`
) )
@@ -269,13 +246,11 @@ export async function POST(req: NextRequest) {
mode, mode,
model: selectedModel, model: selectedModel,
provider, provider,
conversationId: effectiveConversationId,
conversationHistory, conversationHistory,
contexts: agentContexts, contexts: agentContexts,
fileAttachments, fileAttachments,
commands, commands,
chatId: actualChatId, chatId: actualChatId,
prefetch,
implicitFeedback, implicitFeedback,
}, },
{ {
@@ -457,15 +432,10 @@ export async function POST(req: NextRequest) {
content: message, content: message,
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }), ...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
...(Array.isArray(normalizedContexts) && ...(Array.isArray(contexts) && contexts.length > 0 && { contexts }),
normalizedContexts.length > 0 && { ...(Array.isArray(contexts) &&
contexts: normalizedContexts, contexts.length > 0 && {
}), contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }],
...(Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 && {
contentBlocks: [
{ type: 'contexts', contexts: normalizedContexts as any, timestamp: Date.now() },
],
}), }),
} }

View File

@@ -191,84 +191,3 @@ export async function GET(request: NextRequest) {
) )
} }
} }
// Delete a label from a page
export async function DELETE(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const {
domain,
accessToken,
cloudId: providedCloudId,
pageId,
labelName,
} = await request.json()
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!pageId) {
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
}
if (!labelName) {
return NextResponse.json({ error: 'Label name is required' }, { status: 400 })
}
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
if (!pageIdValidation.isValid) {
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const encodedLabel = encodeURIComponent(labelName.trim())
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label?name=${encodedLabel}`
const response = await fetch(url, {
method: 'DELETE',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage =
errorData?.message || `Failed to delete Confluence label (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
return NextResponse.json({
pageId,
labelName,
deleted: true,
})
} catch (error) {
logger.error('Error deleting Confluence label:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -1,103 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
import { getConfluenceCloudId } from '@/tools/confluence/utils'
const logger = createLogger('ConfluencePagesByLabelAPI')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const domain = searchParams.get('domain')
const accessToken = searchParams.get('accessToken')
const labelId = searchParams.get('labelId')
const providedCloudId = searchParams.get('cloudId')
const limit = searchParams.get('limit') || '50'
const cursor = searchParams.get('cursor')
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!labelId) {
return NextResponse.json({ error: 'Label ID is required' }, { status: 400 })
}
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
if (!labelIdValidation.isValid) {
return NextResponse.json({ error: labelIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const queryParams = new URLSearchParams()
queryParams.append('limit', String(Math.min(Number(limit), 250)))
if (cursor) {
queryParams.append('cursor', cursor)
}
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/labels/${labelId}/pages?${queryParams.toString()}`
const response = await fetch(url, {
method: 'GET',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage = errorData?.message || `Failed to get pages by label (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const data = await response.json()
const pages = (data.results || []).map((page: any) => ({
id: page.id,
title: page.title,
status: page.status ?? null,
spaceId: page.spaceId ?? null,
parentId: page.parentId ?? null,
authorId: page.authorId ?? null,
createdAt: page.createdAt ?? null,
version: page.version ?? null,
}))
return NextResponse.json({
pages,
labelId,
nextCursor: data._links?.next
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
: null,
})
} catch (error) {
logger.error('Error getting pages by label:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -1,98 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
import { getConfluenceCloudId } from '@/tools/confluence/utils'
const logger = createLogger('ConfluenceSpaceLabelsAPI')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const domain = searchParams.get('domain')
const accessToken = searchParams.get('accessToken')
const spaceId = searchParams.get('spaceId')
const providedCloudId = searchParams.get('cloudId')
const limit = searchParams.get('limit') || '25'
const cursor = searchParams.get('cursor')
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!spaceId) {
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
}
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
if (!spaceIdValidation.isValid) {
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const queryParams = new URLSearchParams()
queryParams.append('limit', String(Math.min(Number(limit), 250)))
if (cursor) {
queryParams.append('cursor', cursor)
}
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/labels?${queryParams.toString()}`
const response = await fetch(url, {
method: 'GET',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage = errorData?.message || `Failed to list space labels (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const data = await response.json()
const labels = (data.results || []).map((label: any) => ({
id: label.id,
name: label.name,
prefix: label.prefix || 'global',
}))
return NextResponse.json({
labels,
spaceId,
nextCursor: data._links?.next
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
: null,
})
} catch (error) {
logger.error('Error listing space labels:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -38,7 +38,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
} }
const isInternalCall = auth.authType === 'internal_jwt'
const userId = auth.userId || null const userId = auth.userId || null
let workflowData = await getWorkflowById(workflowId) let workflowData = await getWorkflowById(workflowId)
@@ -48,14 +47,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
} }
if (isInternalCall && !userId) { // Check if user has access to this workflow
// Internal system calls (e.g. workflow-in-workflow executor) may not carry a userId. if (!userId) {
// These are already authenticated via internal JWT; allow read access.
logger.info(`[${requestId}] Internal API call for workflow ${workflowId}`)
} else if (!userId) {
logger.warn(`[${requestId}] Unauthorized access attempt for workflow ${workflowId}`) logger.warn(`[${requestId}] Unauthorized access attempt for workflow ${workflowId}`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
} else { }
const authorization = await authorizeWorkflowByWorkspacePermission({ const authorization = await authorizeWorkflowByWorkspacePermission({
workflowId, workflowId,
userId, userId,
@@ -74,7 +71,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
{ status: authorization.status } { status: authorization.status }
) )
} }
}
logger.debug(`[${requestId}] Attempting to load workflow ${workflowId} from normalized tables`) logger.debug(`[${requestId}] Attempting to load workflow ${workflowId} from normalized tables`)
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId) const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)

View File

@@ -13,6 +13,9 @@ export type CommandId =
| 'goto-logs' | 'goto-logs'
| 'open-search' | 'open-search'
| 'run-workflow' | 'run-workflow'
| 'focus-copilot-tab'
| 'focus-toolbar-tab'
| 'focus-editor-tab'
| 'clear-terminal-console' | 'clear-terminal-console'
| 'focus-toolbar-search' | 'focus-toolbar-search'
| 'clear-notifications' | 'clear-notifications'
@@ -72,6 +75,21 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
shortcut: 'Mod+Enter', shortcut: 'Mod+Enter',
allowInEditable: false, allowInEditable: false,
}, },
'focus-copilot-tab': {
id: 'focus-copilot-tab',
shortcut: 'C',
allowInEditable: false,
},
'focus-toolbar-tab': {
id: 'focus-toolbar-tab',
shortcut: 'T',
allowInEditable: false,
},
'focus-editor-tab': {
id: 'focus-editor-tab',
shortcut: 'E',
allowInEditable: false,
},
'clear-terminal-console': { 'clear-terminal-console': {
id: 'clear-terminal-console', id: 'clear-terminal-console',
shortcut: 'Mod+D', shortcut: 'Mod+D',

View File

@@ -108,7 +108,7 @@ const SmoothThinkingText = memo(
return ( return (
<div <div
ref={textRef} ref={textRef}
className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]' className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-8 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'
> >
<CopilotMarkdownRenderer content={displayedContent} /> <CopilotMarkdownRenderer content={displayedContent} />
</div> </div>
@@ -355,7 +355,7 @@ export function ThinkingBlock({
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0' isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
)} )}
> >
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'> <div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-8 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'>
<CopilotMarkdownRenderer content={cleanContent} /> <CopilotMarkdownRenderer content={cleanContent} />
</div> </div>
</div> </div>

View File

@@ -340,7 +340,13 @@ export const Panel = memo(function Panel() {
* Register global keyboard shortcuts using the central commands registry. * Register global keyboard shortcuts using the central commands registry.
* *
* - Mod+Enter: Run / cancel workflow (matches the Run button behavior) * - Mod+Enter: Run / cancel workflow (matches the Run button behavior)
* - C: Focus Copilot tab
* - T: Focus Toolbar tab
* - E: Focus Editor tab
* - Mod+F: Focus Toolbar tab and search input * - Mod+F: Focus Toolbar tab and search input
*
* The tab-switching commands are disabled inside editable elements so typing
* in inputs or textareas is not interrupted.
*/ */
useRegisterGlobalCommands(() => useRegisterGlobalCommands(() =>
createCommands([ createCommands([
@@ -357,6 +363,33 @@ export const Panel = memo(function Panel() {
allowInEditable: false, allowInEditable: false,
}, },
}, },
{
id: 'focus-copilot-tab',
handler: () => {
setActiveTab('copilot')
},
overrides: {
allowInEditable: false,
},
},
{
id: 'focus-toolbar-tab',
handler: () => {
setActiveTab('toolbar')
},
overrides: {
allowInEditable: false,
},
},
{
id: 'focus-editor-tab',
handler: () => {
setActiveTab('editor')
},
overrides: {
allowInEditable: false,
},
},
{ {
id: 'focus-toolbar-search', id: 'focus-toolbar-search',
handler: () => { handler: () => {

View File

@@ -589,7 +589,6 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
export const scheduleExecution = task({ export const scheduleExecution = task({
id: 'schedule-execution', id: 'schedule-execution',
machine: 'medium-1x',
retry: { retry: {
maxAttempts: 1, maxAttempts: 1,
}, },

View File

@@ -669,7 +669,6 @@ async function executeWebhookJobInternal(
export const webhookExecution = task({ export const webhookExecution = task({
id: 'webhook-execution', id: 'webhook-execution',
machine: 'medium-1x',
retry: { retry: {
maxAttempts: 1, maxAttempts: 1,
}, },

View File

@@ -197,6 +197,5 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
export const workflowExecutionTask = task({ export const workflowExecutionTask = task({
id: 'workflow-execution', id: 'workflow-execution',
machine: 'medium-1x',
run: executeWorkflowJob, run: executeWorkflowJob,
}) })

View File

@@ -10,11 +10,9 @@ import {
getReasoningEffortValuesForModel, getReasoningEffortValuesForModel,
getThinkingLevelsForModel, getThinkingLevelsForModel,
getVerbosityValuesForModel, getVerbosityValuesForModel,
MODELS_WITH_DEEP_RESEARCH,
MODELS_WITH_REASONING_EFFORT, MODELS_WITH_REASONING_EFFORT,
MODELS_WITH_THINKING, MODELS_WITH_THINKING,
MODELS_WITH_VERBOSITY, MODELS_WITH_VERBOSITY,
MODELS_WITHOUT_MEMORY,
providers, providers,
supportsTemperature, supportsTemperature,
} from '@/providers/utils' } from '@/providers/utils'
@@ -414,22 +412,12 @@ Return ONLY the JSON array.`,
title: 'Tools', title: 'Tools',
type: 'tool-input', type: 'tool-input',
defaultValue: [], defaultValue: [],
condition: {
field: 'model',
value: MODELS_WITH_DEEP_RESEARCH,
not: true,
},
}, },
{ {
id: 'skills', id: 'skills',
title: 'Skills', title: 'Skills',
type: 'skill-input', type: 'skill-input',
defaultValue: [], defaultValue: [],
condition: {
field: 'model',
value: MODELS_WITH_DEEP_RESEARCH,
not: true,
},
}, },
{ {
id: 'memoryType', id: 'memoryType',
@@ -443,11 +431,6 @@ Return ONLY the JSON array.`,
{ label: 'Sliding window (tokens)', id: 'sliding_window_tokens' }, { label: 'Sliding window (tokens)', id: 'sliding_window_tokens' },
], ],
defaultValue: 'none', defaultValue: 'none',
condition: {
field: 'model',
value: MODELS_WITHOUT_MEMORY,
not: true,
},
}, },
{ {
id: 'conversationId', id: 'conversationId',
@@ -461,7 +444,6 @@ Return ONLY the JSON array.`,
condition: { condition: {
field: 'memoryType', field: 'memoryType',
value: ['conversation', 'sliding_window', 'sliding_window_tokens'], value: ['conversation', 'sliding_window', 'sliding_window_tokens'],
and: { field: 'model', value: MODELS_WITHOUT_MEMORY, not: true },
}, },
}, },
{ {
@@ -472,7 +454,6 @@ Return ONLY the JSON array.`,
condition: { condition: {
field: 'memoryType', field: 'memoryType',
value: ['sliding_window'], value: ['sliding_window'],
and: { field: 'model', value: MODELS_WITHOUT_MEMORY, not: true },
}, },
}, },
{ {
@@ -483,7 +464,6 @@ Return ONLY the JSON array.`,
condition: { condition: {
field: 'memoryType', field: 'memoryType',
value: ['sliding_window_tokens'], value: ['sliding_window_tokens'],
and: { field: 'model', value: MODELS_WITHOUT_MEMORY, not: true },
}, },
}, },
{ {
@@ -497,13 +477,9 @@ Return ONLY the JSON array.`,
condition: () => ({ condition: () => ({
field: 'model', field: 'model',
value: (() => { value: (() => {
const deepResearch = new Set(MODELS_WITH_DEEP_RESEARCH.map((m) => m.toLowerCase()))
const allModels = Object.keys(getBaseModelProviders()) const allModels = Object.keys(getBaseModelProviders())
return allModels.filter( return allModels.filter(
(model) => (model) => supportsTemperature(model) && getMaxTemperature(model) === 1
supportsTemperature(model) &&
getMaxTemperature(model) === 1 &&
!deepResearch.has(model.toLowerCase())
) )
})(), })(),
}), }),
@@ -519,13 +495,9 @@ Return ONLY the JSON array.`,
condition: () => ({ condition: () => ({
field: 'model', field: 'model',
value: (() => { value: (() => {
const deepResearch = new Set(MODELS_WITH_DEEP_RESEARCH.map((m) => m.toLowerCase()))
const allModels = Object.keys(getBaseModelProviders()) const allModels = Object.keys(getBaseModelProviders())
return allModels.filter( return allModels.filter(
(model) => (model) => supportsTemperature(model) && getMaxTemperature(model) === 2
supportsTemperature(model) &&
getMaxTemperature(model) === 2 &&
!deepResearch.has(model.toLowerCase())
) )
})(), })(),
}), }),
@@ -536,11 +508,6 @@ Return ONLY the JSON array.`,
type: 'short-input', type: 'short-input',
placeholder: 'Enter max tokens (e.g., 4096)...', placeholder: 'Enter max tokens (e.g., 4096)...',
mode: 'advanced', mode: 'advanced',
condition: {
field: 'model',
value: MODELS_WITH_DEEP_RESEARCH,
not: true,
},
}, },
{ {
id: 'responseFormat', id: 'responseFormat',
@@ -548,11 +515,6 @@ Return ONLY the JSON array.`,
type: 'code', type: 'code',
placeholder: 'Enter JSON schema...', placeholder: 'Enter JSON schema...',
language: 'json', language: 'json',
condition: {
field: 'model',
value: MODELS_WITH_DEEP_RESEARCH,
not: true,
},
wandConfig: { wandConfig: {
enabled: true, enabled: true,
maintainHistory: true, maintainHistory: true,
@@ -645,16 +607,6 @@ Example 3 (Array Input):
generationType: 'json-schema', generationType: 'json-schema',
}, },
}, },
{
id: 'previousInteractionId',
title: 'Previous Interaction ID',
type: 'short-input',
placeholder: 'e.g., {{agent_1.interactionId}}',
condition: {
field: 'model',
value: MODELS_WITH_DEEP_RESEARCH,
},
},
], ],
tools: { tools: {
access: [ access: [
@@ -818,13 +770,5 @@ Example 3 (Array Input):
description: 'Provider timing information', description: 'Provider timing information',
}, },
cost: { type: 'json', description: 'Cost of the API call' }, cost: { type: 'json', description: 'Cost of the API call' },
interactionId: {
type: 'string',
description: 'Interaction ID for multi-turn deep research follow-ups',
condition: {
field: 'model',
value: MODELS_WITH_DEEP_RESEARCH,
},
},
}, },
} }

View File

@@ -394,7 +394,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Page Property Operations // Page Property Operations
{ label: 'List Page Properties', id: 'list_page_properties' }, { label: 'List Page Properties', id: 'list_page_properties' },
{ label: 'Create Page Property', id: 'create_page_property' }, { label: 'Create Page Property', id: 'create_page_property' },
{ label: 'Delete Page Property', id: 'delete_page_property' },
// Search Operations // Search Operations
{ label: 'Search Content', id: 'search' }, { label: 'Search Content', id: 'search' },
{ label: 'Search in Space', id: 'search_in_space' }, { label: 'Search in Space', id: 'search_in_space' },
@@ -415,9 +414,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Operations // Label Operations
{ label: 'List Labels', id: 'list_labels' }, { label: 'List Labels', id: 'list_labels' },
{ label: 'Add Label', id: 'add_label' }, { label: 'Add Label', id: 'add_label' },
{ label: 'Delete Label', id: 'delete_label' },
{ label: 'Get Pages by Label', id: 'get_pages_by_label' },
{ label: 'List Space Labels', id: 'list_space_labels' },
// Space Operations // Space Operations
{ label: 'Get Space', id: 'get_space' }, { label: 'Get Space', id: 'get_space' },
{ label: 'List Spaces', id: 'list_spaces' }, { label: 'List Spaces', id: 'list_spaces' },
@@ -489,8 +485,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space', 'search_in_space',
'get_space', 'get_space',
'list_spaces', 'list_spaces',
'get_pages_by_label',
'list_space_labels',
], ],
not: true, not: true,
}, },
@@ -506,8 +500,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_labels', 'list_labels',
'upload_attachment', 'upload_attachment',
'add_label', 'add_label',
'delete_label',
'delete_page_property',
'get_page_children', 'get_page_children',
'get_page_ancestors', 'get_page_ancestors',
'list_page_versions', 'list_page_versions',
@@ -535,8 +527,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space', 'search_in_space',
'get_space', 'get_space',
'list_spaces', 'list_spaces',
'get_pages_by_label',
'list_space_labels',
], ],
not: true, not: true,
}, },
@@ -552,8 +542,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_labels', 'list_labels',
'upload_attachment', 'upload_attachment',
'add_label', 'add_label',
'delete_label',
'delete_page_property',
'get_page_children', 'get_page_children',
'get_page_ancestors', 'get_page_ancestors',
'list_page_versions', 'list_page_versions',
@@ -578,7 +566,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space', 'search_in_space',
'create_blogpost', 'create_blogpost',
'list_blogposts_in_space', 'list_blogposts_in_space',
'list_space_labels',
], ],
}, },
}, },
@@ -614,14 +601,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
required: true, required: true,
condition: { field: 'operation', value: 'create_page_property' }, condition: { field: 'operation', value: 'create_page_property' },
}, },
{
id: 'propertyId',
title: 'Property ID',
type: 'short-input',
placeholder: 'Enter property ID to delete',
required: true,
condition: { field: 'operation', value: 'delete_page_property' },
},
{ {
id: 'title', id: 'title',
title: 'Title', title: 'Title',
@@ -715,7 +694,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
type: 'short-input', type: 'short-input',
placeholder: 'Enter label name', placeholder: 'Enter label name',
required: true, required: true,
condition: { field: 'operation', value: ['add_label', 'delete_label'] }, condition: { field: 'operation', value: 'add_label' },
}, },
{ {
id: 'labelPrefix', id: 'labelPrefix',
@@ -730,14 +709,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
value: () => 'global', value: () => 'global',
condition: { field: 'operation', value: 'add_label' }, condition: { field: 'operation', value: 'add_label' },
}, },
{
id: 'labelId',
title: 'Label ID',
type: 'short-input',
placeholder: 'Enter label ID',
required: true,
condition: { field: 'operation', value: 'get_pages_by_label' },
},
{ {
id: 'blogPostStatus', id: 'blogPostStatus',
title: 'Status', title: 'Status',
@@ -788,8 +759,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_page_versions', 'list_page_versions',
'list_page_properties', 'list_page_properties',
'list_labels', 'list_labels',
'get_pages_by_label',
'list_space_labels',
], ],
}, },
}, },
@@ -811,8 +780,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_page_versions', 'list_page_versions',
'list_page_properties', 'list_page_properties',
'list_labels', 'list_labels',
'get_pages_by_label',
'list_space_labels',
], ],
}, },
}, },
@@ -833,7 +800,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Property Tools // Property Tools
'confluence_list_page_properties', 'confluence_list_page_properties',
'confluence_create_page_property', 'confluence_create_page_property',
'confluence_delete_page_property',
// Search Tools // Search Tools
'confluence_search', 'confluence_search',
'confluence_search_in_space', 'confluence_search_in_space',
@@ -854,9 +820,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Tools // Label Tools
'confluence_list_labels', 'confluence_list_labels',
'confluence_add_label', 'confluence_add_label',
'confluence_delete_label',
'confluence_get_pages_by_label',
'confluence_list_space_labels',
// Space Tools // Space Tools
'confluence_get_space', 'confluence_get_space',
'confluence_list_spaces', 'confluence_list_spaces',
@@ -889,8 +852,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
return 'confluence_list_page_properties' return 'confluence_list_page_properties'
case 'create_page_property': case 'create_page_property':
return 'confluence_create_page_property' return 'confluence_create_page_property'
case 'delete_page_property':
return 'confluence_delete_page_property'
// Search Operations // Search Operations
case 'search': case 'search':
return 'confluence_search' return 'confluence_search'
@@ -926,12 +887,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
return 'confluence_list_labels' return 'confluence_list_labels'
case 'add_label': case 'add_label':
return 'confluence_add_label' return 'confluence_add_label'
case 'delete_label':
return 'confluence_delete_label'
case 'get_pages_by_label':
return 'confluence_get_pages_by_label'
case 'list_space_labels':
return 'confluence_list_space_labels'
// Space Operations // Space Operations
case 'get_space': case 'get_space':
return 'confluence_get_space' return 'confluence_get_space'
@@ -953,9 +908,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
versionNumber, versionNumber,
propertyKey, propertyKey,
propertyValue, propertyValue,
propertyId,
labelPrefix, labelPrefix,
labelId,
blogPostStatus, blogPostStatus,
purge, purge,
bodyFormat, bodyFormat,
@@ -1006,9 +959,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
} }
} }
// Operations that support generic cursor pagination. // Operations that support cursor pagination
// get_pages_by_label and list_space_labels have dedicated handlers
// below that pass cursor along with their required params (labelId, spaceId).
const supportsCursor = [ const supportsCursor = [
'list_attachments', 'list_attachments',
'list_spaces', 'list_spaces',
@@ -1045,35 +996,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
} }
} }
if (operation === 'delete_page_property') {
return {
credential,
pageId: effectivePageId,
operation,
propertyId,
...rest,
}
}
if (operation === 'get_pages_by_label') {
return {
credential,
operation,
labelId,
cursor: cursor || undefined,
...rest,
}
}
if (operation === 'list_space_labels') {
return {
credential,
operation,
cursor: cursor || undefined,
...rest,
}
}
if (operation === 'upload_attachment') { if (operation === 'upload_attachment') {
const normalizedFile = normalizeFileInput(attachmentFile, { single: true }) const normalizedFile = normalizeFileInput(attachmentFile, { single: true })
if (!normalizedFile) { if (!normalizedFile) {
@@ -1122,9 +1044,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' }, attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
attachmentComment: { type: 'string', description: 'Comment for the attachment' }, attachmentComment: { type: 'string', description: 'Comment for the attachment' },
labelName: { type: 'string', description: 'Label name' }, labelName: { type: 'string', description: 'Label name' },
labelId: { type: 'string', description: 'Label identifier' },
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' }, labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
propertyId: { type: 'string', description: 'Property identifier' },
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' }, blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' }, purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
bodyFormat: { type: 'string', description: 'Body format for comments' }, bodyFormat: { type: 'string', description: 'Body format for comments' },
@@ -1160,7 +1080,6 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Results // Label Results
labels: { type: 'array', description: 'List of labels' }, labels: { type: 'array', description: 'List of labels' },
labelName: { type: 'string', description: 'Label name' }, labelName: { type: 'string', description: 'Label name' },
labelId: { type: 'string', description: 'Label identifier' },
// Space Results // Space Results
spaces: { type: 'array', description: 'List of spaces' }, spaces: { type: 'array', description: 'List of spaces' },
spaceId: { type: 'string', description: 'Space identifier' }, spaceId: { type: 'string', description: 'Space identifier' },

View File

@@ -2,8 +2,8 @@
slug: enterprise slug: enterprise
title: 'Build with Sim for Enterprise' title: 'Build with Sim for Enterprise'
description: 'Access control, BYOK, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, Admin API, and flexible data retention—enterprise features for teams with strict security and compliance requirements.' description: 'Access control, BYOK, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, Admin API, and flexible data retention—enterprise features for teams with strict security and compliance requirements.'
date: 2026-02-11 date: 2026-01-23
updated: 2026-02-11 updated: 2026-01-23
authors: authors:
- vik - vik
readingTime: 10 readingTime: 10
@@ -13,8 +13,8 @@ ogAlt: 'Sim Enterprise features overview'
about: ['Enterprise Software', 'Security', 'Compliance', 'Self-Hosting'] about: ['Enterprise Software', 'Security', 'Compliance', 'Self-Hosting']
timeRequired: PT10M timeRequired: PT10M
canonical: https://sim.ai/studio/enterprise canonical: https://sim.ai/studio/enterprise
featured: true featured: false
draft: false draft: true
--- ---
We've been working with security teams at larger organizations to bring Sim into environments with strict compliance and data handling requirements. This post covers the enterprise capabilities we've built: granular access control, bring-your-own-keys, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, compliance, and programmatic management via the Admin API. We've been working with security teams at larger organizations to bring Sim into environments with strict compliance and data handling requirements. This post covers the enterprise capabilities we've built: granular access control, bring-your-own-keys, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, compliance, and programmatic management via the Admin API.

View File

@@ -999,7 +999,6 @@ export class AgentBlockHandler implements BlockHandler {
reasoningEffort: inputs.reasoningEffort, reasoningEffort: inputs.reasoningEffort,
verbosity: inputs.verbosity, verbosity: inputs.verbosity,
thinkingLevel: inputs.thinkingLevel, thinkingLevel: inputs.thinkingLevel,
previousInteractionId: inputs.previousInteractionId,
} }
} }
@@ -1070,7 +1069,6 @@ export class AgentBlockHandler implements BlockHandler {
reasoningEffort: providerRequest.reasoningEffort, reasoningEffort: providerRequest.reasoningEffort,
verbosity: providerRequest.verbosity, verbosity: providerRequest.verbosity,
thinkingLevel: providerRequest.thinkingLevel, thinkingLevel: providerRequest.thinkingLevel,
previousInteractionId: providerRequest.previousInteractionId,
}) })
return this.processProviderResponse(response, block, responseFormat) return this.processProviderResponse(response, block, responseFormat)
@@ -1271,7 +1269,6 @@ export class AgentBlockHandler implements BlockHandler {
content: result.content, content: result.content,
model: result.model, model: result.model,
...this.createResponseMetadata(result), ...this.createResponseMetadata(result),
...(result.interactionId && { interactionId: result.interactionId }),
} }
} }

View File

@@ -20,8 +20,6 @@ export interface AgentInputs {
conversationId?: string // Required for all non-none memory types conversationId?: string // Required for all non-none memory types
slidingWindowSize?: string // For message-based sliding window slidingWindowSize?: string // For message-based sliding window
slidingWindowTokens?: string // For token-based sliding window slidingWindowTokens?: string // For token-based sliding window
// Deep research multi-turn
previousInteractionId?: string // Interactions API previous interaction reference
// LLM parameters // LLM parameters
temperature?: string temperature?: string
maxTokens?: string maxTokens?: string

View File

@@ -20,8 +20,6 @@ export interface BuildPayloadParams {
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }> fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
commands?: string[] commands?: string[]
chatId?: string chatId?: string
conversationId?: string
prefetch?: boolean
implicitFeedback?: string implicitFeedback?: string
} }
@@ -66,10 +64,6 @@ export async function buildCopilotRequestPayload(
fileAttachments, fileAttachments,
commands, commands,
chatId, chatId,
conversationId,
prefetch,
conversationHistory,
implicitFeedback,
} = params } = params
const selectedModel = options.selectedModel const selectedModel = options.selectedModel
@@ -160,12 +154,6 @@ export async function buildCopilotRequestPayload(
version: SIM_AGENT_VERSION, version: SIM_AGENT_VERSION,
...(contexts && contexts.length > 0 ? { context: contexts } : {}), ...(contexts && contexts.length > 0 ? { context: contexts } : {}),
...(chatId ? { chatId } : {}), ...(chatId ? { chatId } : {}),
...(conversationId ? { conversationId } : {}),
...(Array.isArray(conversationHistory) && conversationHistory.length > 0
? { conversationHistory }
: {}),
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
...(implicitFeedback ? { implicitFeedback } : {}),
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}), ...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
...(integrationTools.length > 0 ? { integrationTools } : {}), ...(integrationTools.length > 0 ? { integrationTools } : {}),
...(credentials ? { credentials } : {}), ...(credentials ? { credentials } : {}),

View File

@@ -5,7 +5,7 @@ import { serializeMessagesForDB } from './serialization'
const logger = createLogger('CopilotMessagePersistence') const logger = createLogger('CopilotMessagePersistence')
export async function persistMessages(params: { interface PersistParams {
chatId: string chatId: string
messages: CopilotMessage[] messages: CopilotMessage[]
sensitiveCredentialIds?: Set<string> sensitiveCredentialIds?: Set<string>
@@ -13,24 +13,29 @@ export async function persistMessages(params: {
mode?: string mode?: string
model?: string model?: string
conversationId?: string conversationId?: string
}): Promise<boolean> { }
try {
/** Builds the JSON body used by both fetch and sendBeacon persistence paths. */
function buildPersistBody(params: PersistParams): string {
const dbMessages = serializeMessagesForDB( const dbMessages = serializeMessagesForDB(
params.messages, params.messages,
params.sensitiveCredentialIds ?? new Set<string>() params.sensitiveCredentialIds ?? new Set<string>()
) )
const response = await fetch(COPILOT_UPDATE_MESSAGES_API_PATH, { return JSON.stringify({
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
chatId: params.chatId, chatId: params.chatId,
messages: dbMessages, messages: dbMessages,
...(params.planArtifact !== undefined ? { planArtifact: params.planArtifact } : {}), ...(params.planArtifact !== undefined ? { planArtifact: params.planArtifact } : {}),
...(params.mode || params.model ...(params.mode || params.model ? { config: { mode: params.mode, model: params.model } } : {}),
? { config: { mode: params.mode, model: params.model } }
: {}),
...(params.conversationId ? { conversationId: params.conversationId } : {}), ...(params.conversationId ? { conversationId: params.conversationId } : {}),
}), })
}
export async function persistMessages(params: PersistParams): Promise<boolean> {
try {
const response = await fetch(COPILOT_UPDATE_MESSAGES_API_PATH, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: buildPersistBody(params),
}) })
return response.ok return response.ok
} catch (error) { } catch (error) {
@@ -41,3 +46,27 @@ export async function persistMessages(params: {
return false return false
} }
} }
/**
* Persists messages using navigator.sendBeacon, which is reliable during page unload.
* Unlike fetch, sendBeacon is guaranteed to be queued even when the page is being torn down.
*/
export function persistMessagesBeacon(params: PersistParams): boolean {
try {
const body = buildPersistBody(params)
const blob = new Blob([body], { type: 'application/json' })
const sent = navigator.sendBeacon(COPILOT_UPDATE_MESSAGES_API_PATH, blob)
if (!sent) {
logger.warn('sendBeacon returned false — browser may have rejected the request', {
chatId: params.chatId,
})
}
return sent
} catch (error) {
logger.warn('Failed to persist messages via sendBeacon', {
chatId: params.chatId,
error: error instanceof Error ? error.message : String(error),
})
return false
}
}

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { customTools, workflow } from '@sim/db/schema' import { workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, desc, eq, isNull, or } from 'drizzle-orm' import { eq } from 'drizzle-orm'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants' import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import type { import type {
ExecutionContext, ExecutionContext,
@@ -12,7 +12,6 @@ import { routeExecution } from '@/lib/copilot/tools/server/router'
import { env } from '@/lib/core/config/env' import { env } from '@/lib/core/config/env'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils' import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
import { getTool, resolveToolId } from '@/tools/utils' import { getTool, resolveToolId } from '@/tools/utils'
import { import {
executeCheckDeploymentStatus, executeCheckDeploymentStatus,
@@ -77,247 +76,6 @@ import {
const logger = createLogger('CopilotToolExecutor') const logger = createLogger('CopilotToolExecutor')
type ManageCustomToolOperation = 'add' | 'edit' | 'delete' | 'list'
interface ManageCustomToolSchema {
type: 'function'
function: {
name: string
description?: string
parameters: Record<string, unknown>
}
}
interface ManageCustomToolParams {
operation?: string
toolId?: string
schema?: ManageCustomToolSchema
code?: string
title?: string
workspaceId?: string
}
async function executeManageCustomTool(
rawParams: Record<string, unknown>,
context: ExecutionContext
): Promise<ToolCallResult> {
const params = rawParams as ManageCustomToolParams
const operation = String(params.operation || '').toLowerCase() as ManageCustomToolOperation
const workspaceId = params.workspaceId || context.workspaceId
if (!operation) {
return { success: false, error: "Missing required 'operation' argument" }
}
try {
if (operation === 'list') {
const toolsForUser = workspaceId
? await db
.select()
.from(customTools)
.where(
or(
eq(customTools.workspaceId, workspaceId),
and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId))
)
)
.orderBy(desc(customTools.createdAt))
: await db
.select()
.from(customTools)
.where(and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId)))
.orderBy(desc(customTools.createdAt))
return {
success: true,
output: {
success: true,
operation,
tools: toolsForUser,
count: toolsForUser.length,
},
}
}
if (operation === 'add') {
if (!workspaceId) {
return {
success: false,
error: "workspaceId is required for operation 'add'",
}
}
if (!params.schema || !params.code) {
return {
success: false,
error: "Both 'schema' and 'code' are required for operation 'add'",
}
}
const title = params.title || params.schema.function?.name
if (!title) {
return { success: false, error: "Missing tool title or schema.function.name for 'add'" }
}
const resultTools = await upsertCustomTools({
tools: [
{
title,
schema: params.schema,
code: params.code,
},
],
workspaceId,
userId: context.userId,
})
const created = resultTools.find((tool) => tool.title === title)
return {
success: true,
output: {
success: true,
operation,
toolId: created?.id,
title,
message: `Created custom tool "${title}"`,
},
}
}
if (operation === 'edit') {
if (!workspaceId) {
return {
success: false,
error: "workspaceId is required for operation 'edit'",
}
}
if (!params.toolId) {
return { success: false, error: "'toolId' is required for operation 'edit'" }
}
if (!params.schema && !params.code) {
return {
success: false,
error: "At least one of 'schema' or 'code' is required for operation 'edit'",
}
}
const workspaceTool = await db
.select()
.from(customTools)
.where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId)))
.limit(1)
const legacyTool =
workspaceTool.length === 0
? await db
.select()
.from(customTools)
.where(
and(
eq(customTools.id, params.toolId),
isNull(customTools.workspaceId),
eq(customTools.userId, context.userId)
)
)
.limit(1)
: []
const existing = workspaceTool[0] || legacyTool[0]
if (!existing) {
return { success: false, error: `Custom tool not found: ${params.toolId}` }
}
const mergedSchema = params.schema || (existing.schema as ManageCustomToolSchema)
const mergedCode = params.code || existing.code
const title = params.title || mergedSchema.function?.name || existing.title
await upsertCustomTools({
tools: [
{
id: params.toolId,
title,
schema: mergedSchema,
code: mergedCode,
},
],
workspaceId,
userId: context.userId,
})
return {
success: true,
output: {
success: true,
operation,
toolId: params.toolId,
title,
message: `Updated custom tool "${title}"`,
},
}
}
if (operation === 'delete') {
if (!params.toolId) {
return { success: false, error: "'toolId' is required for operation 'delete'" }
}
const workspaceDelete =
workspaceId != null
? await db
.delete(customTools)
.where(
and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))
)
.returning({ id: customTools.id })
: []
const legacyDelete =
workspaceDelete.length === 0
? await db
.delete(customTools)
.where(
and(
eq(customTools.id, params.toolId),
isNull(customTools.workspaceId),
eq(customTools.userId, context.userId)
)
)
.returning({ id: customTools.id })
: []
const deleted = workspaceDelete[0] || legacyDelete[0]
if (!deleted) {
return { success: false, error: `Custom tool not found: ${params.toolId}` }
}
return {
success: true,
output: {
success: true,
operation,
toolId: params.toolId,
message: 'Deleted custom tool',
},
}
}
return {
success: false,
error: `Unsupported operation for manage_custom_tool: ${operation}`,
}
} catch (error) {
logger.error('manage_custom_tool execution failed', {
operation,
workspaceId,
userId: context.userId,
error: error instanceof Error ? error.message : String(error),
})
return {
success: false,
error: error instanceof Error ? error.message : 'Failed to manage custom tool',
}
}
}
const SERVER_TOOLS = new Set<string>([ const SERVER_TOOLS = new Set<string>([
'get_blocks_and_tools', 'get_blocks_and_tools',
'get_blocks_metadata', 'get_blocks_metadata',
@@ -403,19 +161,6 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
} }
} }
}, },
oauth_request_access: async (p, _c) => {
const providerName = (p.providerName || p.provider_name || 'the provider') as string
return {
success: true,
output: {
success: true,
status: 'requested',
providerName,
message: `Requested ${providerName} OAuth connection. The user should complete the OAuth modal in the UI, then retry credential-dependent actions.`,
},
}
},
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
} }
/** /**

View File

@@ -5,7 +5,6 @@ import {
type GenerateContentConfig, type GenerateContentConfig,
type GenerateContentResponse, type GenerateContentResponse,
type GoogleGenAI, type GoogleGenAI,
type Interactions,
type Part, type Part,
type Schema, type Schema,
type ThinkingConfig, type ThinkingConfig,
@@ -28,7 +27,6 @@ import {
import type { FunctionCallResponse, ProviderRequest, ProviderResponse } from '@/providers/types' import type { FunctionCallResponse, ProviderRequest, ProviderResponse } from '@/providers/types'
import { import {
calculateCost, calculateCost,
isDeepResearchModel,
prepareToolExecution, prepareToolExecution,
prepareToolsWithUsageControl, prepareToolsWithUsageControl,
} from '@/providers/utils' } from '@/providers/utils'
@@ -383,468 +381,6 @@ export interface GeminiExecutionConfig {
providerType: GeminiProviderType providerType: GeminiProviderType
} }
const DEEP_RESEARCH_POLL_INTERVAL_MS = 10_000
const DEEP_RESEARCH_MAX_DURATION_MS = 60 * 60 * 1000
/**
* Sleeps for the specified number of milliseconds
*/
function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms))
}
/**
* Collapses a ProviderRequest into a single input string and optional system instruction
* for the Interactions API, which takes a flat input rather than a messages array.
*
* Deep research is single-turn only — it takes one research query and returns a report.
* Memory/conversation history is hidden in the UI for deep research models, so only
* the last user message is used as input. System messages are passed via system_instruction.
*/
function collapseMessagesToInput(request: ProviderRequest): {
input: string
systemInstruction: string | undefined
} {
const systemParts: string[] = []
const userParts: string[] = []
if (request.systemPrompt) {
systemParts.push(request.systemPrompt)
}
if (request.messages) {
for (const msg of request.messages) {
if (msg.role === 'system' && msg.content) {
systemParts.push(msg.content)
} else if (msg.role === 'user' && msg.content) {
userParts.push(msg.content)
}
}
}
return {
input:
userParts.length > 0
? userParts[userParts.length - 1]
: 'Please conduct research on the provided topic.',
systemInstruction: systemParts.length > 0 ? systemParts.join('\n\n') : undefined,
}
}
/**
* Extracts text content from a completed interaction's outputs array.
* The outputs array can contain text, thought, google_search_result, and other types.
* We concatenate all text outputs to get the full research report.
*/
function extractTextFromInteractionOutputs(outputs: Interactions.Interaction['outputs']): string {
if (!outputs || outputs.length === 0) return ''
const textParts: string[] = []
for (const output of outputs) {
if (output.type === 'text') {
const text = (output as Interactions.TextContent).text
if (text) textParts.push(text)
}
}
return textParts.join('\n\n')
}
/**
* Extracts token usage from an Interaction's Usage object.
* The Interactions API provides total_input_tokens, total_output_tokens, total_tokens,
* and total_reasoning_tokens (for thinking models).
*
* Also handles the raw API field name total_thought_tokens which the SDK may
* map to total_reasoning_tokens.
*/
function extractInteractionUsage(usage: Interactions.Usage | undefined): {
inputTokens: number
outputTokens: number
reasoningTokens: number
totalTokens: number
} {
if (!usage) {
return { inputTokens: 0, outputTokens: 0, reasoningTokens: 0, totalTokens: 0 }
}
const usageLogger = createLogger('DeepResearchUsage')
usageLogger.info('Raw interaction usage', { usage: JSON.stringify(usage) })
const inputTokens = usage.total_input_tokens ?? 0
const outputTokens = usage.total_output_tokens ?? 0
const reasoningTokens =
usage.total_reasoning_tokens ??
((usage as Record<string, unknown>).total_thought_tokens as number) ??
0
const totalTokens = usage.total_tokens ?? inputTokens + outputTokens
return { inputTokens, outputTokens, reasoningTokens, totalTokens }
}
/**
* Builds a standard ProviderResponse from a completed deep research interaction.
*/
function buildDeepResearchResponse(
content: string,
model: string,
usage: {
inputTokens: number
outputTokens: number
reasoningTokens: number
totalTokens: number
},
providerStartTime: number,
providerStartTimeISO: string,
interactionId?: string
): ProviderResponse {
const providerEndTime = Date.now()
const duration = providerEndTime - providerStartTime
return {
content,
model,
tokens: {
input: usage.inputTokens,
output: usage.outputTokens,
total: usage.totalTokens,
},
timing: {
startTime: providerStartTimeISO,
endTime: new Date(providerEndTime).toISOString(),
duration,
modelTime: duration,
toolsTime: 0,
firstResponseTime: duration,
iterations: 1,
timeSegments: [
{
type: 'model',
name: 'Deep research',
startTime: providerStartTime,
endTime: providerEndTime,
duration,
},
],
},
cost: calculateCost(model, usage.inputTokens, usage.outputTokens),
interactionId,
}
}
/**
* Creates a ReadableStream from a deep research streaming interaction.
*
* Deep research streaming returns InteractionSSEEvent chunks including:
* - interaction.start: initial interaction with ID
* - content.delta: incremental text and thought_summary updates
* - content.start / content.stop: output boundaries
* - interaction.complete: final event (outputs is undefined in streaming; must reconstruct)
* - error: error events
*
* We stream text deltas to the client and track usage from the interaction.complete event.
*/
function createDeepResearchStream(
stream: AsyncIterable<Interactions.InteractionSSEEvent>,
onComplete?: (
content: string,
usage: {
inputTokens: number
outputTokens: number
reasoningTokens: number
totalTokens: number
},
interactionId?: string
) => void
): ReadableStream<Uint8Array> {
const streamLogger = createLogger('DeepResearchStream')
let fullContent = ''
let completionUsage = { inputTokens: 0, outputTokens: 0, reasoningTokens: 0, totalTokens: 0 }
let completedInteractionId: string | undefined
return new ReadableStream({
async start(controller) {
try {
for await (const event of stream) {
if (event.event_type === 'content.delta') {
const delta = (event as Interactions.ContentDelta).delta
if (delta?.type === 'text' && 'text' in delta && delta.text) {
fullContent += delta.text
controller.enqueue(new TextEncoder().encode(delta.text))
}
} else if (event.event_type === 'interaction.complete') {
const interaction = (event as Interactions.InteractionEvent).interaction
if (interaction?.usage) {
completionUsage = extractInteractionUsage(interaction.usage)
}
completedInteractionId = interaction?.id
} else if (event.event_type === 'interaction.start') {
const interaction = (event as Interactions.InteractionEvent).interaction
if (interaction?.id) {
completedInteractionId = interaction.id
}
} else if (event.event_type === 'error') {
const errorEvent = event as { error?: { code?: string; message?: string } }
const message = errorEvent.error?.message ?? 'Unknown deep research stream error'
streamLogger.error('Deep research stream error', {
code: errorEvent.error?.code,
message,
})
controller.error(new Error(message))
return
}
}
onComplete?.(fullContent, completionUsage, completedInteractionId)
controller.close()
} catch (error) {
streamLogger.error('Error reading deep research stream', {
error: error instanceof Error ? error.message : String(error),
})
controller.error(error)
}
},
})
}
/**
* Executes a deep research request using the Interactions API.
*
* Deep research uses the Interactions API ({@link https://ai.google.dev/api/interactions-api}),
* a completely different surface from generateContent. It creates a background interaction
* that performs comprehensive research (up to 60 minutes).
*
* Supports both streaming and non-streaming modes:
* - Streaming: returns a StreamingExecution with a ReadableStream of text deltas
* - Non-streaming: polls until completion and returns a ProviderResponse
*
* Deep research does NOT support custom function calling tools, MCP servers,
* or structured output (response_format). These are gracefully ignored.
*/
export async function executeDeepResearchRequest(
config: GeminiExecutionConfig
): Promise<ProviderResponse | StreamingExecution> {
const { ai, model, request, providerType } = config
const logger = createLogger(providerType === 'google' ? 'GoogleProvider' : 'VertexProvider')
logger.info('Preparing deep research request', {
model,
hasSystemPrompt: !!request.systemPrompt,
hasMessages: !!request.messages?.length,
streaming: !!request.stream,
hasPreviousInteractionId: !!request.previousInteractionId,
})
if (request.tools?.length) {
logger.warn('Deep research does not support custom tools — ignoring tools parameter')
}
if (request.responseFormat) {
logger.warn(
'Deep research does not support structured output — ignoring responseFormat parameter'
)
}
const providerStartTime = Date.now()
const providerStartTimeISO = new Date(providerStartTime).toISOString()
try {
const { input, systemInstruction } = collapseMessagesToInput(request)
// Deep research requires background=true and store=true (store defaults to true,
// but we set it explicitly per API requirements)
const baseParams = {
agent: model as Interactions.CreateAgentInteractionParamsNonStreaming['agent'],
input,
background: true,
store: true,
...(systemInstruction && { system_instruction: systemInstruction }),
...(request.previousInteractionId && {
previous_interaction_id: request.previousInteractionId,
}),
agent_config: {
type: 'deep-research' as const,
thinking_summaries: 'auto' as const,
},
}
logger.info('Creating deep research interaction', {
inputLength: input.length,
hasSystemInstruction: !!systemInstruction,
streaming: !!request.stream,
})
// Streaming mode: create a streaming interaction and return a StreamingExecution
if (request.stream) {
const streamParams: Interactions.CreateAgentInteractionParamsStreaming = {
...baseParams,
stream: true,
}
const streamResponse = await ai.interactions.create(streamParams)
const firstResponseTime = Date.now() - providerStartTime
const streamingResult: StreamingExecution = {
stream: undefined as unknown as ReadableStream<Uint8Array>,
execution: {
success: true,
output: {
content: '',
model,
tokens: { input: 0, output: 0, total: 0 },
providerTiming: {
startTime: providerStartTimeISO,
endTime: new Date().toISOString(),
duration: Date.now() - providerStartTime,
modelTime: firstResponseTime,
toolsTime: 0,
firstResponseTime,
iterations: 1,
timeSegments: [
{
type: 'model',
name: 'Deep research (streaming)',
startTime: providerStartTime,
endTime: providerStartTime + firstResponseTime,
duration: firstResponseTime,
},
],
},
cost: {
input: 0,
output: 0,
total: 0,
pricing: { input: 0, output: 0, updatedAt: new Date().toISOString() },
},
},
logs: [],
metadata: {
startTime: providerStartTimeISO,
endTime: new Date().toISOString(),
duration: Date.now() - providerStartTime,
},
isStreaming: true,
},
}
streamingResult.stream = createDeepResearchStream(
streamResponse,
(content, usage, streamInteractionId) => {
streamingResult.execution.output.content = content
streamingResult.execution.output.tokens = {
input: usage.inputTokens,
output: usage.outputTokens,
total: usage.totalTokens,
}
streamingResult.execution.output.interactionId = streamInteractionId
const cost = calculateCost(model, usage.inputTokens, usage.outputTokens)
streamingResult.execution.output.cost = cost
const streamEndTime = Date.now()
if (streamingResult.execution.output.providerTiming) {
streamingResult.execution.output.providerTiming.endTime = new Date(
streamEndTime
).toISOString()
streamingResult.execution.output.providerTiming.duration =
streamEndTime - providerStartTime
const segments = streamingResult.execution.output.providerTiming.timeSegments
if (segments?.[0]) {
segments[0].endTime = streamEndTime
segments[0].duration = streamEndTime - providerStartTime
}
}
}
)
return streamingResult
}
// Non-streaming mode: create and poll
const createParams: Interactions.CreateAgentInteractionParamsNonStreaming = {
...baseParams,
stream: false,
}
const interaction = await ai.interactions.create(createParams)
const interactionId = interaction.id
logger.info('Deep research interaction created', { interactionId, status: interaction.status })
// Poll until a terminal status
const pollStartTime = Date.now()
let result: Interactions.Interaction = interaction
while (Date.now() - pollStartTime < DEEP_RESEARCH_MAX_DURATION_MS) {
if (result.status === 'completed') {
break
}
if (result.status === 'failed') {
throw new Error(`Deep research interaction failed: ${interactionId}`)
}
if (result.status === 'cancelled') {
throw new Error(`Deep research interaction was cancelled: ${interactionId}`)
}
logger.info('Deep research in progress, polling...', {
interactionId,
status: result.status,
elapsedMs: Date.now() - pollStartTime,
})
await sleep(DEEP_RESEARCH_POLL_INTERVAL_MS)
result = await ai.interactions.get(interactionId)
}
if (result.status !== 'completed') {
throw new Error(
`Deep research timed out after ${DEEP_RESEARCH_MAX_DURATION_MS / 1000}s (status: ${result.status})`
)
}
const content = extractTextFromInteractionOutputs(result.outputs)
const usage = extractInteractionUsage(result.usage)
logger.info('Deep research completed', {
interactionId,
contentLength: content.length,
inputTokens: usage.inputTokens,
outputTokens: usage.outputTokens,
reasoningTokens: usage.reasoningTokens,
totalTokens: usage.totalTokens,
durationMs: Date.now() - providerStartTime,
})
return buildDeepResearchResponse(
content,
model,
usage,
providerStartTime,
providerStartTimeISO,
interactionId
)
} catch (error) {
const providerEndTime = Date.now()
const duration = providerEndTime - providerStartTime
logger.error('Error in deep research request:', {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
})
const enhancedError = error instanceof Error ? error : new Error(String(error))
Object.assign(enhancedError, {
timing: {
startTime: providerStartTimeISO,
endTime: new Date(providerEndTime).toISOString(),
duration,
},
})
throw enhancedError
}
}
/** /**
* Executes a request using the Gemini API * Executes a request using the Gemini API
* *
@@ -855,12 +391,6 @@ export async function executeGeminiRequest(
config: GeminiExecutionConfig config: GeminiExecutionConfig
): Promise<ProviderResponse | StreamingExecution> { ): Promise<ProviderResponse | StreamingExecution> {
const { ai, model, request, providerType } = config const { ai, model, request, providerType } = config
// Route deep research models to the interactions API
if (isDeepResearchModel(model)) {
return executeDeepResearchRequest(config)
}
const logger = createLogger(providerType === 'google' ? 'GoogleProvider' : 'VertexProvider') const logger = createLogger(providerType === 'google' ? 'GoogleProvider' : 'VertexProvider')
logger.info(`Preparing ${providerType} Gemini request`, { logger.info(`Preparing ${providerType} Gemini request`, {

View File

@@ -46,9 +46,6 @@ export interface ModelCapabilities {
levels: string[] levels: string[]
default?: string default?: string
} }
deepResearch?: boolean
/** Whether this model supports conversation memory. Defaults to true if omitted. */
memory?: boolean
} }
export interface ModelDefinition { export interface ModelDefinition {
@@ -828,7 +825,7 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
name: 'Google', name: 'Google',
description: "Google's Gemini models", description: "Google's Gemini models",
defaultModel: 'gemini-2.5-pro', defaultModel: 'gemini-2.5-pro',
modelPatterns: [/^gemini/, /^deep-research/], modelPatterns: [/^gemini/],
capabilities: { capabilities: {
toolUsageControl: true, toolUsageControl: true,
}, },
@@ -931,19 +928,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
}, },
contextWindow: 1000000, contextWindow: 1000000,
}, },
{
id: 'deep-research-pro-preview-12-2025',
pricing: {
input: 2.0,
output: 2.0,
updatedAt: '2026-02-10',
},
capabilities: {
deepResearch: true,
memory: false,
},
contextWindow: 1000000,
},
], ],
}, },
vertex: { vertex: {
@@ -1054,19 +1038,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
}, },
contextWindow: 1000000, contextWindow: 1000000,
}, },
{
id: 'vertex/deep-research-pro-preview-12-2025',
pricing: {
input: 2.0,
output: 2.0,
updatedAt: '2026-02-10',
},
capabilities: {
deepResearch: true,
memory: false,
},
contextWindow: 1000000,
},
], ],
}, },
deepseek: { deepseek: {
@@ -2509,37 +2480,6 @@ export function getThinkingLevelsForModel(modelId: string): string[] | null {
return capability?.levels ?? null return capability?.levels ?? null
} }
/**
* Get all models that support deep research capability
*/
export function getModelsWithDeepResearch(): string[] {
const models: string[] = []
for (const provider of Object.values(PROVIDER_DEFINITIONS)) {
for (const model of provider.models) {
if (model.capabilities.deepResearch) {
models.push(model.id)
}
}
}
return models
}
/**
* Get all models that explicitly disable memory support (memory: false).
* Models without this capability default to supporting memory.
*/
export function getModelsWithoutMemory(): string[] {
const models: string[] = []
for (const provider of Object.values(PROVIDER_DEFINITIONS)) {
for (const model of provider.models) {
if (model.capabilities.memory === false) {
models.push(model.id)
}
}
}
return models
}
/** /**
* Get the max output tokens for a specific model. * Get the max output tokens for a specific model.
* *

View File

@@ -95,8 +95,6 @@ export interface ProviderResponse {
total: number total: number
pricing: ModelPricing pricing: ModelPricing
} }
/** Interaction ID returned by the Interactions API (used for multi-turn deep research) */
interactionId?: string
} }
export type ToolUsageControl = 'auto' | 'force' | 'none' export type ToolUsageControl = 'auto' | 'force' | 'none'
@@ -171,8 +169,6 @@ export interface ProviderRequest {
verbosity?: string verbosity?: string
thinkingLevel?: string thinkingLevel?: string
isDeployedContext?: boolean isDeployedContext?: boolean
/** Previous interaction ID for multi-turn Interactions API requests (deep research follow-ups) */
previousInteractionId?: string
} }
export const providers: Record<string, ProviderConfig> = {} export const providers: Record<string, ProviderConfig> = {}

View File

@@ -12,8 +12,6 @@ import {
getMaxOutputTokensForModel as getMaxOutputTokensForModelFromDefinitions, getMaxOutputTokensForModel as getMaxOutputTokensForModelFromDefinitions,
getMaxTemperature as getMaxTempFromDefinitions, getMaxTemperature as getMaxTempFromDefinitions,
getModelPricing as getModelPricingFromDefinitions, getModelPricing as getModelPricingFromDefinitions,
getModelsWithDeepResearch,
getModelsWithoutMemory,
getModelsWithReasoningEffort, getModelsWithReasoningEffort,
getModelsWithTemperatureSupport, getModelsWithTemperatureSupport,
getModelsWithTempRange01, getModelsWithTempRange01,
@@ -955,8 +953,6 @@ export const MODELS_WITH_TEMPERATURE_SUPPORT = getModelsWithTemperatureSupport()
export const MODELS_WITH_REASONING_EFFORT = getModelsWithReasoningEffort() export const MODELS_WITH_REASONING_EFFORT = getModelsWithReasoningEffort()
export const MODELS_WITH_VERBOSITY = getModelsWithVerbosity() export const MODELS_WITH_VERBOSITY = getModelsWithVerbosity()
export const MODELS_WITH_THINKING = getModelsWithThinking() export const MODELS_WITH_THINKING = getModelsWithThinking()
export const MODELS_WITH_DEEP_RESEARCH = getModelsWithDeepResearch()
export const MODELS_WITHOUT_MEMORY = getModelsWithoutMemory()
export const PROVIDERS_WITH_TOOL_USAGE_CONTROL = getProvidersWithToolUsageControl() export const PROVIDERS_WITH_TOOL_USAGE_CONTROL = getProvidersWithToolUsageControl()
export function supportsTemperature(model: string): boolean { export function supportsTemperature(model: string): boolean {
@@ -975,10 +971,6 @@ export function supportsThinking(model: string): boolean {
return MODELS_WITH_THINKING.includes(model.toLowerCase()) return MODELS_WITH_THINKING.includes(model.toLowerCase())
} }
export function isDeepResearchModel(model: string): boolean {
return MODELS_WITH_DEEP_RESEARCH.includes(model.toLowerCase())
}
/** /**
* Get the maximum temperature value for a model * Get the maximum temperature value for a model
* @returns Maximum temperature value (1 or 2) or undefined if temperature not supported * @returns Maximum temperature value (1 or 2) or undefined if temperature not supported

Binary file not shown.

Before

Width:  |  Height:  |  Size: 78 KiB

After

Width:  |  Height:  |  Size: 45 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

After

Width:  |  Height:  |  Size: 58 KiB

View File

@@ -39,6 +39,7 @@ import {
buildToolCallsById, buildToolCallsById,
normalizeMessagesForUI, normalizeMessagesForUI,
persistMessages, persistMessages,
persistMessagesBeacon,
saveMessageCheckpoint, saveMessageCheckpoint,
} from '@/lib/copilot/messages' } from '@/lib/copilot/messages'
import type { CopilotTransportMode } from '@/lib/copilot/models' import type { CopilotTransportMode } from '@/lib/copilot/models'
@@ -78,6 +79,28 @@ let _isPageUnloading = false
if (typeof window !== 'undefined') { if (typeof window !== 'undefined') {
window.addEventListener('beforeunload', () => { window.addEventListener('beforeunload', () => {
_isPageUnloading = true _isPageUnloading = true
// Emergency persistence: flush any pending streaming updates to the store and
// persist via sendBeacon (which is guaranteed to be queued during page teardown).
// Without this, thinking blocks and in-progress content are lost on refresh.
try {
const state = useCopilotStore.getState()
if (state.isSendingMessage && state.currentChat) {
// Flush batched streaming updates into the store messages
flushStreamingUpdates(useCopilotStore.setState.bind(useCopilotStore))
const flushedState = useCopilotStore.getState()
persistMessagesBeacon({
chatId: flushedState.currentChat!.id,
messages: flushedState.messages,
sensitiveCredentialIds: flushedState.sensitiveCredentialIds,
planArtifact: flushedState.streamingPlanContent || null,
mode: flushedState.mode,
model: flushedState.selectedModel,
})
}
} catch {
// Best-effort — don't let errors prevent page unload
}
}) })
} }
function isPageUnloading(): boolean { function isPageUnloading(): boolean {
@@ -310,50 +333,6 @@ function parseModelKey(compositeKey: string): { provider: string; modelId: strin
return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) } return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) }
} }
/**
* Convert legacy/variant Claude IDs into the canonical ID shape used by the model catalog.
*
* Examples:
* - claude-4.5-opus -> claude-opus-4-5
* - claude-opus-4.6 -> claude-opus-4-6
* - anthropic.claude-opus-4-5-20251101-v1:0 -> claude-opus-4-5 (match key only)
*/
function canonicalizeModelMatchKey(modelId: string): string {
if (!modelId) return modelId
const normalized = modelId.trim().toLowerCase()
const toCanonicalClaude = (tier: string, version: string): string => {
const normalizedVersion = version.replace(/\./g, '-')
return `claude-${tier}-${normalizedVersion}`
}
const tierFirstExact = normalized.match(/^claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)$/)
if (tierFirstExact) {
const [, tier, version] = tierFirstExact
return toCanonicalClaude(tier, version)
}
const versionFirstExact = normalized.match(/^claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)$/)
if (versionFirstExact) {
const [, version, tier] = versionFirstExact
return toCanonicalClaude(tier, version)
}
const tierFirstEmbedded = normalized.match(/claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)/)
if (tierFirstEmbedded) {
const [, tier, version] = tierFirstEmbedded
return toCanonicalClaude(tier, version)
}
const versionFirstEmbedded = normalized.match(/claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)/)
if (versionFirstEmbedded) {
const [, version, tier] = versionFirstEmbedded
return toCanonicalClaude(tier, version)
}
return normalized
}
const MODEL_PROVIDER_PRIORITY = [ const MODEL_PROVIDER_PRIORITY = [
'anthropic', 'anthropic',
'bedrock', 'bedrock',
@@ -394,23 +373,12 @@ function normalizeSelectedModelKey(selectedModel: string, models: AvailableModel
const { provider, modelId } = parseModelKey(selectedModel) const { provider, modelId } = parseModelKey(selectedModel)
const targetModelId = modelId || selectedModel const targetModelId = modelId || selectedModel
const targetMatchKey = canonicalizeModelMatchKey(targetModelId)
const matches = models.filter((m) => { const matches = models.filter((m) => m.id.endsWith(`/${targetModelId}`))
const candidateModelId = parseModelKey(m.id).modelId || m.id
const candidateMatchKey = canonicalizeModelMatchKey(candidateModelId)
return (
candidateModelId === targetModelId ||
m.id.endsWith(`/${targetModelId}`) ||
candidateMatchKey === targetMatchKey
)
})
if (matches.length === 0) return selectedModel if (matches.length === 0) return selectedModel
if (provider) { if (provider) {
const sameProvider = matches.find( const sameProvider = matches.find((m) => m.provider === provider)
(m) => m.provider === provider || m.id.startsWith(`${provider}/`)
)
if (sameProvider) return sameProvider.id if (sameProvider) return sameProvider.id
} }
@@ -1148,12 +1116,11 @@ export const useCopilotStore = create<CopilotStore>()(
const chatConfig = chat.config ?? {} const chatConfig = chat.config ?? {}
const chatMode = chatConfig.mode || get().mode const chatMode = chatConfig.mode || get().mode
const chatModel = chatConfig.model || get().selectedModel const chatModel = chatConfig.model || get().selectedModel
const normalizedChatModel = normalizeSelectedModelKey(chatModel, get().availableModels)
logger.debug('[Chat] Restoring chat config', { logger.debug('[Chat] Restoring chat config', {
chatId: chat.id, chatId: chat.id,
mode: chatMode, mode: chatMode,
model: normalizedChatModel, model: chatModel,
hasPlanArtifact: !!planArtifact, hasPlanArtifact: !!planArtifact,
}) })
@@ -1175,7 +1142,7 @@ export const useCopilotStore = create<CopilotStore>()(
showPlanTodos: false, showPlanTodos: false,
streamingPlanContent: planArtifact, streamingPlanContent: planArtifact,
mode: chatMode, mode: chatMode,
selectedModel: normalizedChatModel as CopilotStore['selectedModel'], selectedModel: chatModel as CopilotStore['selectedModel'],
suppressAutoSelect: false, suppressAutoSelect: false,
}) })
@@ -1348,10 +1315,6 @@ export const useCopilotStore = create<CopilotStore>()(
const refreshedConfig = updatedCurrentChat.config ?? {} const refreshedConfig = updatedCurrentChat.config ?? {}
const refreshedMode = refreshedConfig.mode || get().mode const refreshedMode = refreshedConfig.mode || get().mode
const refreshedModel = refreshedConfig.model || get().selectedModel const refreshedModel = refreshedConfig.model || get().selectedModel
const normalizedRefreshedModel = normalizeSelectedModelKey(
refreshedModel,
get().availableModels
)
const toolCallsById = buildToolCallsById(normalizedMessages) const toolCallsById = buildToolCallsById(normalizedMessages)
set({ set({
@@ -1360,7 +1323,7 @@ export const useCopilotStore = create<CopilotStore>()(
toolCallsById, toolCallsById,
streamingPlanContent: refreshedPlanArtifact, streamingPlanContent: refreshedPlanArtifact,
mode: refreshedMode, mode: refreshedMode,
selectedModel: normalizedRefreshedModel as CopilotStore['selectedModel'], selectedModel: refreshedModel as CopilotStore['selectedModel'],
}) })
} }
try { try {
@@ -1380,15 +1343,11 @@ export const useCopilotStore = create<CopilotStore>()(
const chatConfig = mostRecentChat.config ?? {} const chatConfig = mostRecentChat.config ?? {}
const chatMode = chatConfig.mode || get().mode const chatMode = chatConfig.mode || get().mode
const chatModel = chatConfig.model || get().selectedModel const chatModel = chatConfig.model || get().selectedModel
const normalizedChatModel = normalizeSelectedModelKey(
chatModel,
get().availableModels
)
logger.info('[Chat] Auto-selecting most recent chat with config', { logger.info('[Chat] Auto-selecting most recent chat with config', {
chatId: mostRecentChat.id, chatId: mostRecentChat.id,
mode: chatMode, mode: chatMode,
model: normalizedChatModel, model: chatModel,
hasPlanArtifact: !!planArtifact, hasPlanArtifact: !!planArtifact,
}) })
@@ -1400,7 +1359,7 @@ export const useCopilotStore = create<CopilotStore>()(
toolCallsById, toolCallsById,
streamingPlanContent: planArtifact, streamingPlanContent: planArtifact,
mode: chatMode, mode: chatMode,
selectedModel: normalizedChatModel as CopilotStore['selectedModel'], selectedModel: chatModel as CopilotStore['selectedModel'],
}) })
try { try {
await get().loadMessageCheckpoints(mostRecentChat.id) await get().loadMessageCheckpoints(mostRecentChat.id)
@@ -1525,19 +1484,26 @@ export const useCopilotStore = create<CopilotStore>()(
// Immediately put all in-progress tools into aborted state // Immediately put all in-progress tools into aborted state
abortAllInProgressTools(set, get) abortAllInProgressTools(set, get)
// Persist whatever contentBlocks/text we have to keep ordering for reloads // Persist whatever contentBlocks/text we have to keep ordering for reloads.
// During page unload, use sendBeacon which is guaranteed to be queued even
// as the page tears down. Regular async fetch won't complete in time.
const { currentChat, streamingPlanContent, mode, selectedModel } = get() const { currentChat, streamingPlanContent, mode, selectedModel } = get()
if (currentChat) { if (currentChat) {
try { try {
const currentMessages = get().messages const currentMessages = get().messages
void persistMessages({ const persistParams = {
chatId: currentChat.id, chatId: currentChat.id,
messages: currentMessages, messages: currentMessages,
sensitiveCredentialIds: get().sensitiveCredentialIds, sensitiveCredentialIds: get().sensitiveCredentialIds,
planArtifact: streamingPlanContent || null, planArtifact: streamingPlanContent || null,
mode, mode,
model: selectedModel, model: selectedModel,
}) }
if (isPageUnloading()) {
persistMessagesBeacon(persistParams)
} else {
void persistMessages(persistParams)
}
} catch (error) { } catch (error) {
logger.warn('[Copilot] Failed to queue abort snapshot persistence', { logger.warn('[Copilot] Failed to queue abort snapshot persistence', {
error: error instanceof Error ? error.message : String(error), error: error instanceof Error ? error.message : String(error),
@@ -2332,8 +2298,7 @@ export const useCopilotStore = create<CopilotStore>()(
}, },
setSelectedModel: async (model) => { setSelectedModel: async (model) => {
const normalizedModel = normalizeSelectedModelKey(model, get().availableModels) set({ selectedModel: model })
set({ selectedModel: normalizedModel as CopilotStore['selectedModel'] })
}, },
setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }), setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }),
loadAvailableModels: async () => { loadAvailableModels: async () => {

View File

@@ -1,114 +0,0 @@
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceDeleteLabelParams {
accessToken: string
domain: string
pageId: string
labelName: string
cloudId?: string
}
export interface ConfluenceDeleteLabelResponse {
success: boolean
output: {
ts: string
pageId: string
labelName: string
deleted: boolean
}
}
export const confluenceDeleteLabelTool: ToolConfig<
ConfluenceDeleteLabelParams,
ConfluenceDeleteLabelResponse
> = {
id: 'confluence_delete_label',
name: 'Confluence Delete Label',
description: 'Remove a label from a Confluence page.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
pageId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Confluence page ID to remove the label from',
},
labelName: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Name of the label to remove',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: () => '/api/tools/confluence/labels',
method: 'DELETE',
headers: (params: ConfluenceDeleteLabelParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
body: (params: ConfluenceDeleteLabelParams) => ({
domain: params.domain,
accessToken: params.accessToken,
pageId: params.pageId?.trim(),
labelName: params.labelName?.trim(),
cloudId: params.cloudId,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
pageId: data.pageId ?? '',
labelName: data.labelName ?? '',
deleted: true,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
pageId: {
type: 'string',
description: 'Page ID the label was removed from',
},
labelName: {
type: 'string',
description: 'Name of the removed label',
},
deleted: {
type: 'boolean',
description: 'Deletion status',
},
},
}

View File

@@ -1,105 +0,0 @@
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceDeletePagePropertyParams {
accessToken: string
domain: string
pageId: string
propertyId: string
cloudId?: string
}
export interface ConfluenceDeletePagePropertyResponse {
success: boolean
output: {
ts: string
pageId: string
propertyId: string
deleted: boolean
}
}
export const confluenceDeletePagePropertyTool: ToolConfig<
ConfluenceDeletePagePropertyParams,
ConfluenceDeletePagePropertyResponse
> = {
id: 'confluence_delete_page_property',
name: 'Confluence Delete Page Property',
description: 'Delete a content property from a Confluence page by its property ID.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
pageId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the page containing the property',
},
propertyId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the property to delete',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: () => '/api/tools/confluence/page-properties',
method: 'DELETE',
headers: (params: ConfluenceDeletePagePropertyParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
body: (params: ConfluenceDeletePagePropertyParams) => ({
domain: params.domain,
accessToken: params.accessToken,
pageId: params.pageId?.trim(),
propertyId: params.propertyId?.trim(),
cloudId: params.cloudId,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
pageId: data.pageId ?? '',
propertyId: data.propertyId ?? '',
deleted: true,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
pageId: { type: 'string', description: 'ID of the page' },
propertyId: { type: 'string', description: 'ID of the deleted property' },
deleted: { type: 'boolean', description: 'Deletion status' },
},
}

View File

@@ -1,143 +0,0 @@
import { PAGE_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceGetPagesByLabelParams {
accessToken: string
domain: string
labelId: string
limit?: number
cursor?: string
cloudId?: string
}
export interface ConfluenceGetPagesByLabelResponse {
success: boolean
output: {
ts: string
labelId: string
pages: Array<{
id: string
title: string
status: string | null
spaceId: string | null
parentId: string | null
authorId: string | null
createdAt: string | null
version: {
number: number
message?: string
createdAt?: string
} | null
}>
nextCursor: string | null
}
}
export const confluenceGetPagesByLabelTool: ToolConfig<
ConfluenceGetPagesByLabelParams,
ConfluenceGetPagesByLabelResponse
> = {
id: 'confluence_get_pages_by_label',
name: 'Confluence Get Pages by Label',
description: 'Retrieve all pages that have a specific label applied.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
labelId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the label to get pages for',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of pages to return (default: 50, max: 250)',
},
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination cursor from previous response',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: (params: ConfluenceGetPagesByLabelParams) => {
const query = new URLSearchParams({
domain: params.domain,
accessToken: params.accessToken,
labelId: params.labelId,
limit: String(params.limit || 50),
})
if (params.cursor) {
query.set('cursor', params.cursor)
}
if (params.cloudId) {
query.set('cloudId', params.cloudId)
}
return `/api/tools/confluence/pages-by-label?${query.toString()}`
},
method: 'GET',
headers: (params: ConfluenceGetPagesByLabelParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
labelId: data.labelId ?? '',
pages: data.pages ?? [],
nextCursor: data.nextCursor ?? null,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
labelId: { type: 'string', description: 'ID of the label' },
pages: {
type: 'array',
description: 'Array of pages with this label',
items: {
type: 'object',
properties: PAGE_ITEM_PROPERTIES,
},
},
nextCursor: {
type: 'string',
description: 'Cursor for fetching the next page of results',
optional: true,
},
},
}

View File

@@ -5,14 +5,11 @@ import { confluenceCreatePageTool } from '@/tools/confluence/create_page'
import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property' import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property'
import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment' import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment'
import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment' import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment'
import { confluenceDeleteLabelTool } from '@/tools/confluence/delete_label'
import { confluenceDeletePageTool } from '@/tools/confluence/delete_page' import { confluenceDeletePageTool } from '@/tools/confluence/delete_page'
import { confluenceDeletePagePropertyTool } from '@/tools/confluence/delete_page_property'
import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost' import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost'
import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors' import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors'
import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children' import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children'
import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version' import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version'
import { confluenceGetPagesByLabelTool } from '@/tools/confluence/get_pages_by_label'
import { confluenceGetSpaceTool } from '@/tools/confluence/get_space' import { confluenceGetSpaceTool } from '@/tools/confluence/get_space'
import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments' import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments'
import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts' import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts'
@@ -22,7 +19,6 @@ import { confluenceListLabelsTool } from '@/tools/confluence/list_labels'
import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties' import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties'
import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions' import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions'
import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space' import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space'
import { confluenceListSpaceLabelsTool } from '@/tools/confluence/list_space_labels'
import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces' import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces'
import { confluenceRetrieveTool } from '@/tools/confluence/retrieve' import { confluenceRetrieveTool } from '@/tools/confluence/retrieve'
import { confluenceSearchTool } from '@/tools/confluence/search' import { confluenceSearchTool } from '@/tools/confluence/search'
@@ -82,7 +78,6 @@ export {
// Page Properties Tools // Page Properties Tools
confluenceListPagePropertiesTool, confluenceListPagePropertiesTool,
confluenceCreatePagePropertyTool, confluenceCreatePagePropertyTool,
confluenceDeletePagePropertyTool,
// Blog Post Tools // Blog Post Tools
confluenceListBlogPostsTool, confluenceListBlogPostsTool,
confluenceGetBlogPostTool, confluenceGetBlogPostTool,
@@ -103,9 +98,6 @@ export {
// Label Tools // Label Tools
confluenceListLabelsTool, confluenceListLabelsTool,
confluenceAddLabelTool, confluenceAddLabelTool,
confluenceDeleteLabelTool,
confluenceGetPagesByLabelTool,
confluenceListSpaceLabelsTool,
// Space Tools // Space Tools
confluenceGetSpaceTool, confluenceGetSpaceTool,
confluenceListSpacesTool, confluenceListSpacesTool,

View File

@@ -1,134 +0,0 @@
import { LABEL_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceListSpaceLabelsParams {
accessToken: string
domain: string
spaceId: string
limit?: number
cursor?: string
cloudId?: string
}
export interface ConfluenceListSpaceLabelsResponse {
success: boolean
output: {
ts: string
spaceId: string
labels: Array<{
id: string
name: string
prefix: string
}>
nextCursor: string | null
}
}
export const confluenceListSpaceLabelsTool: ToolConfig<
ConfluenceListSpaceLabelsParams,
ConfluenceListSpaceLabelsResponse
> = {
id: 'confluence_list_space_labels',
name: 'Confluence List Space Labels',
description: 'List all labels associated with a Confluence space.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
spaceId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the Confluence space to list labels from',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of labels to return (default: 25, max: 250)',
},
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination cursor from previous response',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: (params: ConfluenceListSpaceLabelsParams) => {
const query = new URLSearchParams({
domain: params.domain,
accessToken: params.accessToken,
spaceId: params.spaceId,
limit: String(params.limit || 25),
})
if (params.cursor) {
query.set('cursor', params.cursor)
}
if (params.cloudId) {
query.set('cloudId', params.cloudId)
}
return `/api/tools/confluence/space-labels?${query.toString()}`
},
method: 'GET',
headers: (params: ConfluenceListSpaceLabelsParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
spaceId: data.spaceId ?? '',
labels: data.labels ?? [],
nextCursor: data.nextCursor ?? null,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
spaceId: { type: 'string', description: 'ID of the space' },
labels: {
type: 'array',
description: 'Array of labels on the space',
items: {
type: 'object',
properties: LABEL_ITEM_PROPERTIES,
},
},
nextCursor: {
type: 'string',
description: 'Cursor for fetching the next page of results',
optional: true,
},
},
}

View File

@@ -118,13 +118,10 @@ import {
confluenceCreatePageTool, confluenceCreatePageTool,
confluenceDeleteAttachmentTool, confluenceDeleteAttachmentTool,
confluenceDeleteCommentTool, confluenceDeleteCommentTool,
confluenceDeleteLabelTool,
confluenceDeletePagePropertyTool,
confluenceDeletePageTool, confluenceDeletePageTool,
confluenceGetBlogPostTool, confluenceGetBlogPostTool,
confluenceGetPageAncestorsTool, confluenceGetPageAncestorsTool,
confluenceGetPageChildrenTool, confluenceGetPageChildrenTool,
confluenceGetPagesByLabelTool,
confluenceGetPageVersionTool, confluenceGetPageVersionTool,
confluenceGetSpaceTool, confluenceGetSpaceTool,
confluenceListAttachmentsTool, confluenceListAttachmentsTool,
@@ -135,7 +132,6 @@ import {
confluenceListPagePropertiesTool, confluenceListPagePropertiesTool,
confluenceListPagesInSpaceTool, confluenceListPagesInSpaceTool,
confluenceListPageVersionsTool, confluenceListPageVersionsTool,
confluenceListSpaceLabelsTool,
confluenceListSpacesTool, confluenceListSpacesTool,
confluenceRetrieveTool, confluenceRetrieveTool,
confluenceSearchInSpaceTool, confluenceSearchInSpaceTool,
@@ -2671,10 +2667,6 @@ export const tools: Record<string, ToolConfig> = {
confluence_delete_attachment: confluenceDeleteAttachmentTool, confluence_delete_attachment: confluenceDeleteAttachmentTool,
confluence_list_labels: confluenceListLabelsTool, confluence_list_labels: confluenceListLabelsTool,
confluence_add_label: confluenceAddLabelTool, confluence_add_label: confluenceAddLabelTool,
confluence_get_pages_by_label: confluenceGetPagesByLabelTool,
confluence_list_space_labels: confluenceListSpaceLabelsTool,
confluence_delete_label: confluenceDeleteLabelTool,
confluence_delete_page_property: confluenceDeletePagePropertyTool,
confluence_get_space: confluenceGetSpaceTool, confluence_get_space: confluenceGetSpaceTool,
confluence_list_spaces: confluenceListSpacesTool, confluence_list_spaces: confluenceListSpacesTool,
cursor_list_agents: cursorListAgentsTool, cursor_list_agents: cursorListAgentsTool,