mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-12 15:34:58 -05:00
Compare commits
11 Commits
feat/strea
...
staging
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2944579d21 | ||
|
|
81dfeb0bb0 | ||
|
|
01577a18b4 | ||
|
|
52aff4d60b | ||
|
|
3a3bddd6f8 | ||
|
|
639d50d6b9 | ||
|
|
cec74e09c2 | ||
|
|
d5a756c9f2 | ||
|
|
f3e994baf0 | ||
|
|
2f492cacc1 | ||
|
|
5792e7e5f9 |
@@ -41,9 +41,6 @@ Diese Tastenkombinationen wechseln zwischen den Panel-Tabs auf der rechten Seite
|
||||
|
||||
| Tastenkombination | Aktion |
|
||||
|----------|--------|
|
||||
| `C` | Copilot-Tab fokussieren |
|
||||
| `T` | Toolbar-Tab fokussieren |
|
||||
| `E` | Editor-Tab fokussieren |
|
||||
| `Mod` + `F` | Toolbar-Suche fokussieren |
|
||||
|
||||
## Globale Navigation
|
||||
|
||||
@@ -43,9 +43,6 @@ These shortcuts switch between panel tabs on the right side of the canvas.
|
||||
|
||||
| Shortcut | Action |
|
||||
|----------|--------|
|
||||
| `C` | Focus Copilot tab |
|
||||
| `T` | Focus Toolbar tab |
|
||||
| `E` | Focus Editor tab |
|
||||
| `Mod` + `F` | Focus Toolbar search |
|
||||
|
||||
## Global Navigation
|
||||
|
||||
@@ -399,6 +399,28 @@ Create a new custom property (metadata) on a Confluence page.
|
||||
| ↳ `authorId` | string | Account ID of the version author |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||
|
||||
### `confluence_delete_page_property`
|
||||
|
||||
Delete a content property from a Confluence page by its property ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `pageId` | string | Yes | The ID of the page containing the property |
|
||||
| `propertyId` | string | Yes | The ID of the property to delete |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `pageId` | string | ID of the page |
|
||||
| `propertyId` | string | ID of the deleted property |
|
||||
| `deleted` | boolean | Deletion status |
|
||||
|
||||
### `confluence_search`
|
||||
|
||||
Search for content across Confluence pages, blog posts, and other content.
|
||||
@@ -872,6 +894,90 @@ Add a label to a Confluence page for organization and categorization.
|
||||
| `labelName` | string | Name of the added label |
|
||||
| `labelId` | string | ID of the added label |
|
||||
|
||||
### `confluence_delete_label`
|
||||
|
||||
Remove a label from a Confluence page.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `pageId` | string | Yes | Confluence page ID to remove the label from |
|
||||
| `labelName` | string | Yes | Name of the label to remove |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `pageId` | string | Page ID the label was removed from |
|
||||
| `labelName` | string | Name of the removed label |
|
||||
| `deleted` | boolean | Deletion status |
|
||||
|
||||
### `confluence_get_pages_by_label`
|
||||
|
||||
Retrieve all pages that have a specific label applied.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `labelId` | string | Yes | The ID of the label to get pages for |
|
||||
| `limit` | number | No | Maximum number of pages to return \(default: 50, max: 250\) |
|
||||
| `cursor` | string | No | Pagination cursor from previous response |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `labelId` | string | ID of the label |
|
||||
| `pages` | array | Array of pages with this label |
|
||||
| ↳ `id` | string | Unique page identifier |
|
||||
| ↳ `title` | string | Page title |
|
||||
| ↳ `status` | string | Page status \(e.g., current, archived, trashed, draft\) |
|
||||
| ↳ `spaceId` | string | ID of the space containing the page |
|
||||
| ↳ `parentId` | string | ID of the parent page \(null if top-level\) |
|
||||
| ↳ `authorId` | string | Account ID of the page author |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp when the page was created |
|
||||
| ↳ `version` | object | Page version information |
|
||||
| ↳ `number` | number | Version number |
|
||||
| ↳ `message` | string | Version message |
|
||||
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
|
||||
| ↳ `authorId` | string | Account ID of the version author |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_list_space_labels`
|
||||
|
||||
List all labels associated with a Confluence space.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
|
||||
| `spaceId` | string | Yes | The ID of the Confluence space to list labels from |
|
||||
| `limit` | number | No | Maximum number of labels to return \(default: 25, max: 250\) |
|
||||
| `cursor` | string | No | Pagination cursor from previous response |
|
||||
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `ts` | string | ISO 8601 timestamp of the operation |
|
||||
| `spaceId` | string | ID of the space |
|
||||
| `labels` | array | Array of labels on the space |
|
||||
| ↳ `id` | string | Unique label identifier |
|
||||
| ↳ `name` | string | Label name |
|
||||
| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) |
|
||||
| `nextCursor` | string | Cursor for fetching the next page of results |
|
||||
|
||||
### `confluence_get_space`
|
||||
|
||||
Get details about a specific Confluence space.
|
||||
|
||||
@@ -42,9 +42,6 @@ Estos atajos cambian entre las pestañas del panel en el lado derecho del lienzo
|
||||
|
||||
| Atajo | Acción |
|
||||
|----------|--------|
|
||||
| `C` | Enfocar pestaña Copilot |
|
||||
| `T` | Enfocar pestaña Barra de herramientas |
|
||||
| `E` | Enfocar pestaña Editor |
|
||||
| `Mod` + `F` | Enfocar búsqueda de Barra de herramientas |
|
||||
|
||||
## Navegación global
|
||||
|
||||
@@ -42,9 +42,6 @@ Ces raccourcis permettent de basculer entre les onglets du panneau sur le côté
|
||||
|
||||
| Raccourci | Action |
|
||||
|----------|--------|
|
||||
| `C` | Activer l'onglet Copilot |
|
||||
| `T` | Activer l'onglet Barre d'outils |
|
||||
| `E` | Activer l'onglet Éditeur |
|
||||
| `Mod` + `F` | Activer la recherche dans la barre d'outils |
|
||||
|
||||
## Navigation globale
|
||||
|
||||
@@ -41,9 +41,6 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
| ショートカット | 操作 |
|
||||
|----------|--------|
|
||||
| `C` | Copilotタブにフォーカス |
|
||||
| `T` | Toolbarタブにフォーカス |
|
||||
| `E` | Editorタブにフォーカス |
|
||||
| `Mod` + `F` | Toolbar検索にフォーカス |
|
||||
|
||||
## グローバルナビゲーション
|
||||
|
||||
@@ -41,9 +41,6 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
||||
|
||||
| 快捷键 | 操作 |
|
||||
|----------|--------|
|
||||
| `C` | 聚焦 Copilot 标签页 |
|
||||
| `T` | 聚焦 Toolbar 标签页 |
|
||||
| `E` | 聚焦 Editor 标签页 |
|
||||
| `Mod` + `F` | 聚焦 Toolbar 搜索 |
|
||||
|
||||
## 全局导航
|
||||
|
||||
@@ -113,6 +113,7 @@ const ChatMessageSchema = z.object({
|
||||
workflowId: z.string().optional(),
|
||||
knowledgeId: z.string().optional(),
|
||||
blockId: z.string().optional(),
|
||||
blockIds: z.array(z.string()).optional(),
|
||||
templateId: z.string().optional(),
|
||||
executionId: z.string().optional(),
|
||||
// For workflow_block, provide both workflowId and blockId
|
||||
@@ -159,6 +160,20 @@ export async function POST(req: NextRequest) {
|
||||
commands,
|
||||
} = ChatMessageSchema.parse(body)
|
||||
|
||||
const normalizedContexts = Array.isArray(contexts)
|
||||
? contexts.map((ctx) => {
|
||||
if (ctx.kind !== 'blocks') return ctx
|
||||
if (Array.isArray(ctx.blockIds) && ctx.blockIds.length > 0) return ctx
|
||||
if (ctx.blockId) {
|
||||
return {
|
||||
...ctx,
|
||||
blockIds: [ctx.blockId],
|
||||
}
|
||||
}
|
||||
return ctx
|
||||
})
|
||||
: contexts
|
||||
|
||||
// Resolve workflowId - if not provided, use first workflow or find by name
|
||||
const resolved = await resolveWorkflowIdForUser(
|
||||
authenticatedUserId,
|
||||
@@ -176,10 +191,10 @@ export async function POST(req: NextRequest) {
|
||||
const userMessageIdToUse = userMessageId || crypto.randomUUID()
|
||||
try {
|
||||
logger.info(`[${tracker.requestId}] Received chat POST`, {
|
||||
hasContexts: Array.isArray(contexts),
|
||||
contextsCount: Array.isArray(contexts) ? contexts.length : 0,
|
||||
contextsPreview: Array.isArray(contexts)
|
||||
? contexts.map((c: any) => ({
|
||||
hasContexts: Array.isArray(normalizedContexts),
|
||||
contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0,
|
||||
contextsPreview: Array.isArray(normalizedContexts)
|
||||
? normalizedContexts.map((c: any) => ({
|
||||
kind: c?.kind,
|
||||
chatId: c?.chatId,
|
||||
workflowId: c?.workflowId,
|
||||
@@ -191,17 +206,25 @@ export async function POST(req: NextRequest) {
|
||||
} catch {}
|
||||
// Preprocess contexts server-side
|
||||
let agentContexts: Array<{ type: string; content: string }> = []
|
||||
if (Array.isArray(contexts) && contexts.length > 0) {
|
||||
if (Array.isArray(normalizedContexts) && normalizedContexts.length > 0) {
|
||||
try {
|
||||
const { processContextsServer } = await import('@/lib/copilot/process-contents')
|
||||
const processed = await processContextsServer(contexts as any, authenticatedUserId, message)
|
||||
const processed = await processContextsServer(
|
||||
normalizedContexts as any,
|
||||
authenticatedUserId,
|
||||
message
|
||||
)
|
||||
agentContexts = processed
|
||||
logger.info(`[${tracker.requestId}] Contexts processed for request`, {
|
||||
processedCount: agentContexts.length,
|
||||
kinds: agentContexts.map((c) => c.type),
|
||||
lengthPreview: agentContexts.map((c) => c.content?.length ?? 0),
|
||||
})
|
||||
if (Array.isArray(contexts) && contexts.length > 0 && agentContexts.length === 0) {
|
||||
if (
|
||||
Array.isArray(normalizedContexts) &&
|
||||
normalizedContexts.length > 0 &&
|
||||
agentContexts.length === 0
|
||||
) {
|
||||
logger.warn(
|
||||
`[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.`
|
||||
)
|
||||
@@ -246,11 +269,13 @@ export async function POST(req: NextRequest) {
|
||||
mode,
|
||||
model: selectedModel,
|
||||
provider,
|
||||
conversationId: effectiveConversationId,
|
||||
conversationHistory,
|
||||
contexts: agentContexts,
|
||||
fileAttachments,
|
||||
commands,
|
||||
chatId: actualChatId,
|
||||
prefetch,
|
||||
implicitFeedback,
|
||||
},
|
||||
{
|
||||
@@ -432,10 +457,15 @@ export async function POST(req: NextRequest) {
|
||||
content: message,
|
||||
timestamp: new Date().toISOString(),
|
||||
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
|
||||
...(Array.isArray(contexts) && contexts.length > 0 && { contexts }),
|
||||
...(Array.isArray(contexts) &&
|
||||
contexts.length > 0 && {
|
||||
contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }],
|
||||
...(Array.isArray(normalizedContexts) &&
|
||||
normalizedContexts.length > 0 && {
|
||||
contexts: normalizedContexts,
|
||||
}),
|
||||
...(Array.isArray(normalizedContexts) &&
|
||||
normalizedContexts.length > 0 && {
|
||||
contentBlocks: [
|
||||
{ type: 'contexts', contexts: normalizedContexts as any, timestamp: Date.now() },
|
||||
],
|
||||
}),
|
||||
}
|
||||
|
||||
|
||||
@@ -191,3 +191,84 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Delete a label from a page
|
||||
export async function DELETE(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const {
|
||||
domain,
|
||||
accessToken,
|
||||
cloudId: providedCloudId,
|
||||
pageId,
|
||||
labelName,
|
||||
} = await request.json()
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!pageId) {
|
||||
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!labelName) {
|
||||
return NextResponse.json({ error: 'Label name is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
|
||||
if (!pageIdValidation.isValid) {
|
||||
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const encodedLabel = encodeURIComponent(labelName.trim())
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label?name=${encodedLabel}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage =
|
||||
errorData?.message || `Failed to delete Confluence label (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
pageId,
|
||||
labelName,
|
||||
deleted: true,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error deleting Confluence label:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
103
apps/sim/app/api/tools/confluence/pages-by-label/route.ts
Normal file
103
apps/sim/app/api/tools/confluence/pages-by-label/route.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluencePagesByLabelAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
const labelId = searchParams.get('labelId')
|
||||
const providedCloudId = searchParams.get('cloudId')
|
||||
const limit = searchParams.get('limit') || '50'
|
||||
const cursor = searchParams.get('cursor')
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!labelId) {
|
||||
return NextResponse.json({ error: 'Label ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
|
||||
if (!labelIdValidation.isValid) {
|
||||
return NextResponse.json({ error: labelIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
||||
if (cursor) {
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/labels/${labelId}/pages?${queryParams.toString()}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to get pages by label (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const pages = (data.results || []).map((page: any) => ({
|
||||
id: page.id,
|
||||
title: page.title,
|
||||
status: page.status ?? null,
|
||||
spaceId: page.spaceId ?? null,
|
||||
parentId: page.parentId ?? null,
|
||||
authorId: page.authorId ?? null,
|
||||
createdAt: page.createdAt ?? null,
|
||||
version: page.version ?? null,
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
pages,
|
||||
labelId,
|
||||
nextCursor: data._links?.next
|
||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
||||
: null,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error getting pages by label:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
98
apps/sim/app/api/tools/confluence/space-labels/route.ts
Normal file
98
apps/sim/app/api/tools/confluence/space-labels/route.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
const logger = createLogger('ConfluenceSpaceLabelsAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const auth = await checkSessionOrInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { searchParams } = new URL(request.url)
|
||||
const domain = searchParams.get('domain')
|
||||
const accessToken = searchParams.get('accessToken')
|
||||
const spaceId = searchParams.get('spaceId')
|
||||
const providedCloudId = searchParams.get('cloudId')
|
||||
const limit = searchParams.get('limit') || '25'
|
||||
const cursor = searchParams.get('cursor')
|
||||
|
||||
if (!domain) {
|
||||
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!accessToken) {
|
||||
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!spaceId) {
|
||||
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
|
||||
if (!spaceIdValidation.isValid) {
|
||||
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
|
||||
|
||||
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
|
||||
if (!cloudIdValidation.isValid) {
|
||||
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const queryParams = new URLSearchParams()
|
||||
queryParams.append('limit', String(Math.min(Number(limit), 250)))
|
||||
if (cursor) {
|
||||
queryParams.append('cursor', cursor)
|
||||
}
|
||||
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/labels?${queryParams.toString()}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
logger.error('Confluence API error response:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: JSON.stringify(errorData, null, 2),
|
||||
})
|
||||
const errorMessage = errorData?.message || `Failed to list space labels (${response.status})`
|
||||
return NextResponse.json({ error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const labels = (data.results || []).map((label: any) => ({
|
||||
id: label.id,
|
||||
name: label.name,
|
||||
prefix: label.prefix || 'global',
|
||||
}))
|
||||
|
||||
return NextResponse.json({
|
||||
labels,
|
||||
spaceId,
|
||||
nextCursor: data._links?.next
|
||||
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
|
||||
: null,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error listing space labels:', error)
|
||||
return NextResponse.json(
|
||||
{ error: (error as Error).message || 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -29,7 +29,7 @@ const patchBodySchema = z
|
||||
description: z
|
||||
.string()
|
||||
.trim()
|
||||
.max(500, 'Description must be 500 characters or less')
|
||||
.max(2000, 'Description must be 2000 characters or less')
|
||||
.nullable()
|
||||
.optional(),
|
||||
isActive: z.literal(true).optional(), // Set to true to activate this version
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer'
|
||||
import { processInputFileFields } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
@@ -700,15 +700,27 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
||||
let isStreamClosed = false
|
||||
|
||||
const eventWriter = createExecutionEventWriter(executionId)
|
||||
setExecutionMeta(executionId, {
|
||||
status: 'active',
|
||||
userId: actorUserId,
|
||||
workflowId,
|
||||
}).catch(() => {})
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
const sendEvent = (event: ExecutionEvent) => {
|
||||
if (isStreamClosed) return
|
||||
let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null
|
||||
|
||||
try {
|
||||
controller.enqueue(encodeSSEEvent(event))
|
||||
} catch {
|
||||
isStreamClosed = true
|
||||
const sendEvent = (event: ExecutionEvent) => {
|
||||
if (!isStreamClosed) {
|
||||
try {
|
||||
controller.enqueue(encodeSSEEvent(event))
|
||||
} catch {
|
||||
isStreamClosed = true
|
||||
}
|
||||
}
|
||||
if (event.type !== 'stream:chunk' && event.type !== 'stream:done') {
|
||||
eventWriter.write(event).catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -829,14 +841,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
const reader = streamingExec.stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let chunkCount = 0
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
chunkCount++
|
||||
const chunk = decoder.decode(value, { stream: true })
|
||||
sendEvent({
|
||||
type: 'stream:chunk',
|
||||
@@ -951,6 +961,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'error'
|
||||
} else {
|
||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||
|
||||
@@ -963,6 +974,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'cancelled'
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -986,6 +998,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'complete'
|
||||
} catch (error: unknown) {
|
||||
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
||||
const errorMessage = isTimeout
|
||||
@@ -1017,7 +1030,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
duration: executionResult?.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
finalMetaStatus = 'error'
|
||||
} finally {
|
||||
try {
|
||||
await eventWriter.close()
|
||||
} catch (closeError) {
|
||||
logger.warn(`[${requestId}] Failed to close event writer`, {
|
||||
error: closeError instanceof Error ? closeError.message : String(closeError),
|
||||
})
|
||||
}
|
||||
if (finalMetaStatus) {
|
||||
setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {})
|
||||
}
|
||||
timeoutController.cleanup()
|
||||
if (executionId) {
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
@@ -1032,10 +1056,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
timeoutController.cleanup()
|
||||
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
||||
timeoutController.abort()
|
||||
markExecutionCancelled(executionId).catch(() => {})
|
||||
logger.info(`[${requestId}] Client disconnected from SSE stream`)
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
@@ -0,0 +1,170 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import {
|
||||
type ExecutionStreamStatus,
|
||||
getExecutionMeta,
|
||||
readExecutionEvents,
|
||||
} from '@/lib/execution/event-buffer'
|
||||
import { formatSSEEvent } from '@/lib/workflows/executor/execution-events'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
|
||||
const logger = createLogger('ExecutionStreamReconnectAPI')
|
||||
|
||||
const POLL_INTERVAL_MS = 500
|
||||
const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes
|
||||
|
||||
function isTerminalStatus(status: ExecutionStreamStatus): boolean {
|
||||
return status === 'complete' || status === 'error' || status === 'cancelled'
|
||||
}
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function GET(
|
||||
req: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; executionId: string }> }
|
||||
) {
|
||||
const { id: workflowId, executionId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId,
|
||||
userId: auth.userId,
|
||||
action: 'read',
|
||||
})
|
||||
if (!workflowAuthorization.allowed) {
|
||||
return NextResponse.json(
|
||||
{ error: workflowAuthorization.message || 'Access denied' },
|
||||
{ status: workflowAuthorization.status }
|
||||
)
|
||||
}
|
||||
|
||||
const meta = await getExecutionMeta(executionId)
|
||||
if (!meta) {
|
||||
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
|
||||
}
|
||||
|
||||
if (meta.workflowId && meta.workflowId !== workflowId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Execution does not belong to this workflow' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
|
||||
const fromParam = req.nextUrl.searchParams.get('from')
|
||||
const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0
|
||||
const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0
|
||||
|
||||
logger.info('Reconnection stream requested', {
|
||||
workflowId,
|
||||
executionId,
|
||||
fromEventId,
|
||||
metaStatus: meta.status,
|
||||
})
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
|
||||
let closed = false
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
let lastEventId = fromEventId
|
||||
const pollDeadline = Date.now() + MAX_POLL_DURATION_MS
|
||||
|
||||
const enqueue = (text: string) => {
|
||||
if (closed) return
|
||||
try {
|
||||
controller.enqueue(encoder.encode(text))
|
||||
} catch {
|
||||
closed = true
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const events = await readExecutionEvents(executionId, lastEventId)
|
||||
for (const entry of events) {
|
||||
if (closed) return
|
||||
enqueue(formatSSEEvent(entry.event))
|
||||
lastEventId = entry.eventId
|
||||
}
|
||||
|
||||
const currentMeta = await getExecutionMeta(executionId)
|
||||
if (!currentMeta || isTerminalStatus(currentMeta.status)) {
|
||||
enqueue('data: [DONE]\n\n')
|
||||
if (!closed) controller.close()
|
||||
return
|
||||
}
|
||||
|
||||
while (!closed && Date.now() < pollDeadline) {
|
||||
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
|
||||
if (closed) return
|
||||
|
||||
const newEvents = await readExecutionEvents(executionId, lastEventId)
|
||||
for (const entry of newEvents) {
|
||||
if (closed) return
|
||||
enqueue(formatSSEEvent(entry.event))
|
||||
lastEventId = entry.eventId
|
||||
}
|
||||
|
||||
const polledMeta = await getExecutionMeta(executionId)
|
||||
if (!polledMeta || isTerminalStatus(polledMeta.status)) {
|
||||
const finalEvents = await readExecutionEvents(executionId, lastEventId)
|
||||
for (const entry of finalEvents) {
|
||||
if (closed) return
|
||||
enqueue(formatSSEEvent(entry.event))
|
||||
lastEventId = entry.eventId
|
||||
}
|
||||
enqueue('data: [DONE]\n\n')
|
||||
if (!closed) controller.close()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (!closed) {
|
||||
logger.warn('Reconnection stream poll deadline reached', { executionId })
|
||||
enqueue('data: [DONE]\n\n')
|
||||
controller.close()
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error in reconnection stream', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
if (!closed) {
|
||||
try {
|
||||
controller.close()
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
},
|
||||
cancel() {
|
||||
closed = true
|
||||
logger.info('Client disconnected from reconnection stream', { executionId })
|
||||
},
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
headers: {
|
||||
...SSE_HEADERS,
|
||||
'X-Execution-Id': executionId,
|
||||
},
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error('Failed to start reconnection stream', {
|
||||
workflowId,
|
||||
executionId,
|
||||
error: error.message,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: error.message || 'Failed to start reconnection stream' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -38,6 +38,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const isInternalCall = auth.authType === 'internal_jwt'
|
||||
const userId = auth.userId || null
|
||||
|
||||
let workflowData = await getWorkflowById(workflowId)
|
||||
@@ -47,29 +48,32 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check if user has access to this workflow
|
||||
if (!userId) {
|
||||
if (isInternalCall && !userId) {
|
||||
// Internal system calls (e.g. workflow-in-workflow executor) may not carry a userId.
|
||||
// These are already authenticated via internal JWT; allow read access.
|
||||
logger.info(`[${requestId}] Internal API call for workflow ${workflowId}`)
|
||||
} else if (!userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized access attempt for workflow ${workflowId}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
} else {
|
||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId,
|
||||
userId,
|
||||
action: 'read',
|
||||
})
|
||||
if (!authorization.workflow) {
|
||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
const authorization = await authorizeWorkflowByWorkspacePermission({
|
||||
workflowId,
|
||||
userId,
|
||||
action: 'read',
|
||||
})
|
||||
if (!authorization.workflow) {
|
||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
workflowData = authorization.workflow
|
||||
if (!authorization.allowed) {
|
||||
logger.warn(`[${requestId}] User ${userId} denied access to workflow ${workflowId}`)
|
||||
return NextResponse.json(
|
||||
{ error: authorization.message || 'Access denied' },
|
||||
{ status: authorization.status }
|
||||
)
|
||||
workflowData = authorization.workflow
|
||||
if (!authorization.allowed) {
|
||||
logger.warn(`[${requestId}] User ${userId} denied access to workflow ${workflowId}`)
|
||||
return NextResponse.json(
|
||||
{ error: authorization.message || 'Access denied' },
|
||||
{ status: authorization.status }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Attempting to load workflow ${workflowId} from normalized tables`)
|
||||
|
||||
@@ -13,9 +13,6 @@ export type CommandId =
|
||||
| 'goto-logs'
|
||||
| 'open-search'
|
||||
| 'run-workflow'
|
||||
| 'focus-copilot-tab'
|
||||
| 'focus-toolbar-tab'
|
||||
| 'focus-editor-tab'
|
||||
| 'clear-terminal-console'
|
||||
| 'focus-toolbar-search'
|
||||
| 'clear-notifications'
|
||||
@@ -75,21 +72,6 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
|
||||
shortcut: 'Mod+Enter',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'focus-copilot-tab': {
|
||||
id: 'focus-copilot-tab',
|
||||
shortcut: 'C',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'focus-toolbar-tab': {
|
||||
id: 'focus-toolbar-tab',
|
||||
shortcut: 'T',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'focus-editor-tab': {
|
||||
id: 'focus-editor-tab',
|
||||
shortcut: 'E',
|
||||
allowInEditable: false,
|
||||
},
|
||||
'clear-terminal-console': {
|
||||
id: 'clear-terminal-console',
|
||||
shortcut: 'Mod+D',
|
||||
|
||||
@@ -108,7 +108,7 @@ const SmoothThinkingText = memo(
|
||||
return (
|
||||
<div
|
||||
ref={textRef}
|
||||
className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-8 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'
|
||||
className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'
|
||||
>
|
||||
<CopilotMarkdownRenderer content={displayedContent} />
|
||||
</div>
|
||||
@@ -355,7 +355,7 @@ export function ThinkingBlock({
|
||||
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-8 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'>
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'>
|
||||
<CopilotMarkdownRenderer content={cleanContent} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -113,7 +113,7 @@ export function VersionDescriptionModal({
|
||||
className='min-h-[120px] resize-none'
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
maxLength={500}
|
||||
maxLength={2000}
|
||||
disabled={isGenerating}
|
||||
/>
|
||||
<div className='flex items-center justify-between'>
|
||||
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({
|
||||
</p>
|
||||
)}
|
||||
{!updateMutation.error && !generateMutation.error && <div />}
|
||||
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p>
|
||||
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/2000</p>
|
||||
</div>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
|
||||
@@ -57,6 +57,21 @@ export function useChangeDetection({
|
||||
}
|
||||
}
|
||||
|
||||
if (block.triggerMode) {
|
||||
const triggerConfigValue = blockSubValues?.triggerConfig
|
||||
if (
|
||||
triggerConfigValue &&
|
||||
typeof triggerConfigValue === 'object' &&
|
||||
!subBlocks.triggerConfig
|
||||
) {
|
||||
subBlocks.triggerConfig = {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: triggerConfigValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
blocksWithSubBlocks[blockId] = {
|
||||
...block,
|
||||
subBlocks,
|
||||
|
||||
@@ -340,13 +340,7 @@ export const Panel = memo(function Panel() {
|
||||
* Register global keyboard shortcuts using the central commands registry.
|
||||
*
|
||||
* - Mod+Enter: Run / cancel workflow (matches the Run button behavior)
|
||||
* - C: Focus Copilot tab
|
||||
* - T: Focus Toolbar tab
|
||||
* - E: Focus Editor tab
|
||||
* - Mod+F: Focus Toolbar tab and search input
|
||||
*
|
||||
* The tab-switching commands are disabled inside editable elements so typing
|
||||
* in inputs or textareas is not interrupted.
|
||||
*/
|
||||
useRegisterGlobalCommands(() =>
|
||||
createCommands([
|
||||
@@ -363,33 +357,6 @@ export const Panel = memo(function Panel() {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-copilot-tab',
|
||||
handler: () => {
|
||||
setActiveTab('copilot')
|
||||
},
|
||||
overrides: {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-toolbar-tab',
|
||||
handler: () => {
|
||||
setActiveTab('toolbar')
|
||||
},
|
||||
overrides: {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-editor-tab',
|
||||
handler: () => {
|
||||
setActiveTab('editor')
|
||||
},
|
||||
overrides: {
|
||||
allowInEditable: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'focus-toolbar-search',
|
||||
handler: () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
@@ -46,7 +46,13 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('useWorkflowExecution')
|
||||
|
||||
// Debug state validation result
|
||||
/**
|
||||
* Module-level Set tracking which workflows have an active reconnection effect.
|
||||
* Prevents multiple hook instances (from different components) from starting
|
||||
* concurrent reconnection streams for the same workflow during the same mount cycle.
|
||||
*/
|
||||
const activeReconnections = new Set<string>()
|
||||
|
||||
interface DebugValidationResult {
|
||||
isValid: boolean
|
||||
error?: string
|
||||
@@ -54,7 +60,7 @@ interface DebugValidationResult {
|
||||
|
||||
interface BlockEventHandlerConfig {
|
||||
workflowId?: string
|
||||
executionId?: string
|
||||
executionIdRef: { current: string }
|
||||
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
|
||||
activeBlocksSet: Set<string>
|
||||
accumulatedBlockLogs: BlockLog[]
|
||||
@@ -108,12 +114,15 @@ export function useWorkflowExecution() {
|
||||
const queryClient = useQueryClient()
|
||||
const currentWorkflow = useCurrentWorkflow()
|
||||
const { activeWorkflowId, workflows } = useWorkflowRegistry()
|
||||
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } =
|
||||
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } =
|
||||
useTerminalConsoleStore()
|
||||
const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated)
|
||||
const { getAllVariables } = useEnvironmentStore()
|
||||
const { getVariablesByWorkflowId, variables } = useVariablesStore()
|
||||
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
|
||||
useCurrentWorkflowExecution()
|
||||
const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId)
|
||||
const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId)
|
||||
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
|
||||
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
|
||||
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
|
||||
@@ -297,7 +306,7 @@ export function useWorkflowExecution() {
|
||||
(config: BlockEventHandlerConfig) => {
|
||||
const {
|
||||
workflowId,
|
||||
executionId,
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
@@ -308,6 +317,14 @@ export function useWorkflowExecution() {
|
||||
onBlockCompleteCallback,
|
||||
} = config
|
||||
|
||||
/** Returns true if this execution was cancelled or superseded by another run. */
|
||||
const isStaleExecution = () =>
|
||||
!!(
|
||||
workflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current
|
||||
)
|
||||
|
||||
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
||||
if (!workflowId) return
|
||||
if (isActive) {
|
||||
@@ -360,7 +377,7 @@ export function useWorkflowExecution() {
|
||||
endedAt: data.endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
@@ -383,7 +400,7 @@ export function useWorkflowExecution() {
|
||||
endedAt: data.endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
@@ -410,7 +427,7 @@ export function useWorkflowExecution() {
|
||||
iterationType: data.iterationType,
|
||||
iterationContainerId: data.iterationContainerId,
|
||||
},
|
||||
executionId
|
||||
executionIdRef.current
|
||||
)
|
||||
}
|
||||
|
||||
@@ -432,11 +449,12 @@ export function useWorkflowExecution() {
|
||||
iterationType: data.iterationType,
|
||||
iterationContainerId: data.iterationContainerId,
|
||||
},
|
||||
executionId
|
||||
executionIdRef.current
|
||||
)
|
||||
}
|
||||
|
||||
const onBlockStarted = (data: BlockStartedData) => {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, true)
|
||||
markIncomingEdges(data.blockId)
|
||||
|
||||
@@ -453,7 +471,7 @@ export function useWorkflowExecution() {
|
||||
endedAt: undefined,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
isRunning: true,
|
||||
@@ -465,6 +483,7 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
const onBlockCompleted = (data: BlockCompletedData) => {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, false)
|
||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
|
||||
|
||||
@@ -495,6 +514,7 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
const onBlockError = (data: BlockErrorData) => {
|
||||
if (isStaleExecution()) return
|
||||
updateActiveBlocks(data.blockId, false)
|
||||
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
|
||||
|
||||
@@ -902,10 +922,6 @@ export function useWorkflowExecution() {
|
||||
|
||||
// Update block logs with actual stream completion times
|
||||
if (result.logs && streamCompletionTimes.size > 0) {
|
||||
const streamCompletionEndTime = new Date(
|
||||
Math.max(...Array.from(streamCompletionTimes.values()))
|
||||
).toISOString()
|
||||
|
||||
result.logs.forEach((log: BlockLog) => {
|
||||
if (streamCompletionTimes.has(log.blockId)) {
|
||||
const completionTime = streamCompletionTimes.get(log.blockId)!
|
||||
@@ -987,7 +1003,6 @@ export function useWorkflowExecution() {
|
||||
return { success: true, stream }
|
||||
}
|
||||
|
||||
// For manual (non-chat) execution
|
||||
const manualExecutionId = uuidv4()
|
||||
try {
|
||||
const result = await executeWorkflow(
|
||||
@@ -1002,29 +1017,10 @@ export function useWorkflowExecution() {
|
||||
if (result.metadata.pendingBlocks) {
|
||||
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
|
||||
}
|
||||
} else if (result && 'success' in result) {
|
||||
setExecutionResult(result)
|
||||
// Reset execution state after successful non-debug execution
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
|
||||
if (isChatExecution) {
|
||||
if (!result.metadata) {
|
||||
result.metadata = { duration: 0, startTime: new Date().toISOString() }
|
||||
}
|
||||
;(result.metadata as any).source = 'chat'
|
||||
}
|
||||
|
||||
// Invalidate subscription queries to update usage
|
||||
setTimeout(() => {
|
||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||
}, 1000)
|
||||
}
|
||||
return result
|
||||
} catch (error: any) {
|
||||
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
|
||||
// Note: Error logs are already persisted server-side via execution-core.ts
|
||||
return errorResult
|
||||
}
|
||||
},
|
||||
@@ -1275,7 +1271,7 @@ export function useWorkflowExecution() {
|
||||
if (activeWorkflowId) {
|
||||
logger.info('Using server-side executor')
|
||||
|
||||
const executionId = uuidv4()
|
||||
const executionIdRef = { current: '' }
|
||||
|
||||
let executionResult: ExecutionResult = {
|
||||
success: false,
|
||||
@@ -1293,7 +1289,7 @@ export function useWorkflowExecution() {
|
||||
try {
|
||||
const blockHandlers = buildBlockEventHandlers({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId,
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
@@ -1326,6 +1322,10 @@ export function useWorkflowExecution() {
|
||||
loops: clientWorkflowState.loops,
|
||||
parallels: clientWorkflowState.parallels,
|
||||
},
|
||||
onExecutionId: (id) => {
|
||||
executionIdRef.current = id
|
||||
setCurrentExecutionId(activeWorkflowId, id)
|
||||
},
|
||||
callbacks: {
|
||||
onExecutionStarted: (data) => {
|
||||
logger.info('Server execution started:', data)
|
||||
@@ -1368,6 +1368,18 @@ export function useWorkflowExecution() {
|
||||
},
|
||||
|
||||
onExecutionCompleted: (data) => {
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||
executionIdRef.current
|
||||
)
|
||||
return
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
}
|
||||
|
||||
executionResult = {
|
||||
success: data.success,
|
||||
output: data.output,
|
||||
@@ -1425,9 +1437,33 @@ export function useWorkflowExecution() {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const workflowExecState = activeWorkflowId
|
||||
? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId)
|
||||
: null
|
||||
if (activeWorkflowId && !workflowExecState?.isDebugging) {
|
||||
setExecutionResult(executionResult)
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
setTimeout(() => {
|
||||
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
|
||||
}, 1000)
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||
executionIdRef.current
|
||||
)
|
||||
return
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
}
|
||||
|
||||
executionResult = {
|
||||
success: false,
|
||||
output: {},
|
||||
@@ -1441,43 +1477,53 @@ export function useWorkflowExecution() {
|
||||
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
||||
handleExecutionErrorConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
error: data.error,
|
||||
durationMs: data.duration,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
isPreExecutionError,
|
||||
})
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
},
|
||||
|
||||
onExecutionCancelled: (data) => {
|
||||
if (
|
||||
activeWorkflowId &&
|
||||
executionIdRef.current &&
|
||||
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
|
||||
executionIdRef.current
|
||||
)
|
||||
return
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
}
|
||||
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
durationMs: data?.duration,
|
||||
})
|
||||
|
||||
if (activeWorkflowId) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
return executionResult
|
||||
} catch (error: any) {
|
||||
// Don't log abort errors - they're intentional user actions
|
||||
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
|
||||
logger.info('Execution aborted by user')
|
||||
|
||||
// Reset execution state
|
||||
if (activeWorkflowId) {
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
}
|
||||
|
||||
// Return gracefully without error
|
||||
return {
|
||||
success: false,
|
||||
output: {},
|
||||
metadata: { duration: 0 },
|
||||
logs: [],
|
||||
}
|
||||
return executionResult
|
||||
}
|
||||
|
||||
logger.error('Server-side execution failed:', error)
|
||||
@@ -1485,7 +1531,6 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: should never reach here
|
||||
throw new Error('Server-side execution is required')
|
||||
}
|
||||
|
||||
@@ -1717,25 +1762,28 @@ export function useWorkflowExecution() {
|
||||
* Handles cancelling the current workflow execution
|
||||
*/
|
||||
const handleCancelExecution = useCallback(() => {
|
||||
if (!activeWorkflowId) return
|
||||
logger.info('Workflow execution cancellation requested')
|
||||
|
||||
// Cancel the execution stream for this workflow (server-side)
|
||||
executionStream.cancel(activeWorkflowId ?? undefined)
|
||||
const storedExecutionId = getCurrentExecutionId(activeWorkflowId)
|
||||
|
||||
// Mark current chat execution as superseded so its cleanup won't affect new executions
|
||||
currentChatExecutionIdRef.current = null
|
||||
|
||||
// Mark all running entries as canceled in the terminal
|
||||
if (activeWorkflowId) {
|
||||
cancelRunningEntries(activeWorkflowId)
|
||||
|
||||
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
if (storedExecutionId) {
|
||||
setCurrentExecutionId(activeWorkflowId, null)
|
||||
fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, {
|
||||
method: 'POST',
|
||||
}).catch(() => {})
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId: storedExecutionId,
|
||||
})
|
||||
}
|
||||
|
||||
// If in debug mode, also reset debug state
|
||||
executionStream.cancel(activeWorkflowId)
|
||||
currentChatExecutionIdRef.current = null
|
||||
setIsExecuting(activeWorkflowId, false)
|
||||
setIsDebugging(activeWorkflowId, false)
|
||||
setActiveBlocks(activeWorkflowId, new Set())
|
||||
|
||||
if (isDebugging) {
|
||||
resetDebugState()
|
||||
}
|
||||
@@ -1747,7 +1795,9 @@ export function useWorkflowExecution() {
|
||||
setIsDebugging,
|
||||
setActiveBlocks,
|
||||
activeWorkflowId,
|
||||
cancelRunningEntries,
|
||||
getCurrentExecutionId,
|
||||
setCurrentExecutionId,
|
||||
handleExecutionCancelledConsole,
|
||||
])
|
||||
|
||||
/**
|
||||
@@ -1847,7 +1897,7 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
|
||||
setIsExecuting(workflowId, true)
|
||||
const executionId = uuidv4()
|
||||
const executionIdRef = { current: '' }
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
@@ -1856,7 +1906,7 @@ export function useWorkflowExecution() {
|
||||
try {
|
||||
const blockHandlers = buildBlockEventHandlers({
|
||||
workflowId,
|
||||
executionId,
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
@@ -1871,6 +1921,10 @@ export function useWorkflowExecution() {
|
||||
startBlockId: blockId,
|
||||
sourceSnapshot: effectiveSnapshot,
|
||||
input: workflowInput,
|
||||
onExecutionId: (id) => {
|
||||
executionIdRef.current = id
|
||||
setCurrentExecutionId(workflowId, id)
|
||||
},
|
||||
callbacks: {
|
||||
onBlockStarted: blockHandlers.onBlockStarted,
|
||||
onBlockCompleted: blockHandlers.onBlockCompleted,
|
||||
@@ -1878,7 +1932,6 @@ export function useWorkflowExecution() {
|
||||
|
||||
onExecutionCompleted: (data) => {
|
||||
if (data.success) {
|
||||
// Add the start block (trigger) to executed blocks
|
||||
executedBlockIds.add(blockId)
|
||||
|
||||
const mergedBlockStates: Record<string, BlockState> = {
|
||||
@@ -1902,6 +1955,10 @@ export function useWorkflowExecution() {
|
||||
}
|
||||
setLastExecutionSnapshot(workflowId, updatedSnapshot)
|
||||
}
|
||||
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
},
|
||||
|
||||
onExecutionError: (data) => {
|
||||
@@ -1921,19 +1978,27 @@ export function useWorkflowExecution() {
|
||||
|
||||
handleExecutionErrorConsole({
|
||||
workflowId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
error: data.error,
|
||||
durationMs: data.duration,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
})
|
||||
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
},
|
||||
|
||||
onExecutionCancelled: (data) => {
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId,
|
||||
executionId,
|
||||
executionId: executionIdRef.current,
|
||||
durationMs: data?.duration,
|
||||
})
|
||||
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -1942,14 +2007,20 @@ export function useWorkflowExecution() {
|
||||
logger.error('Run-from-block failed:', error)
|
||||
}
|
||||
} finally {
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
const currentId = getCurrentExecutionId(workflowId)
|
||||
if (currentId === null || currentId === executionIdRef.current) {
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
}
|
||||
}
|
||||
},
|
||||
[
|
||||
getLastExecutionSnapshot,
|
||||
setLastExecutionSnapshot,
|
||||
clearLastExecutionSnapshot,
|
||||
getCurrentExecutionId,
|
||||
setCurrentExecutionId,
|
||||
setIsExecuting,
|
||||
setActiveBlocks,
|
||||
setBlockRunStatus,
|
||||
@@ -1979,29 +2050,213 @@ export function useWorkflowExecution() {
|
||||
|
||||
const executionId = uuidv4()
|
||||
try {
|
||||
const result = await executeWorkflow(
|
||||
undefined,
|
||||
undefined,
|
||||
executionId,
|
||||
undefined,
|
||||
'manual',
|
||||
blockId
|
||||
)
|
||||
if (result && 'success' in result) {
|
||||
setExecutionResult(result)
|
||||
}
|
||||
await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId)
|
||||
} catch (error) {
|
||||
const errorResult = handleExecutionError(error, { executionId })
|
||||
return errorResult
|
||||
} finally {
|
||||
setCurrentExecutionId(workflowId, null)
|
||||
setIsExecuting(workflowId, false)
|
||||
setIsDebugging(workflowId, false)
|
||||
setActiveBlocks(workflowId, new Set())
|
||||
}
|
||||
},
|
||||
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
|
||||
[
|
||||
activeWorkflowId,
|
||||
setCurrentExecutionId,
|
||||
setExecutionResult,
|
||||
setIsExecuting,
|
||||
setIsDebugging,
|
||||
setActiveBlocks,
|
||||
]
|
||||
)
|
||||
|
||||
useEffect(() => {
|
||||
if (!activeWorkflowId || !hasHydrated) return
|
||||
|
||||
const entries = useTerminalConsoleStore.getState().entries
|
||||
const runningEntries = entries.filter(
|
||||
(e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId
|
||||
)
|
||||
if (runningEntries.length === 0) return
|
||||
|
||||
if (activeReconnections.has(activeWorkflowId)) return
|
||||
activeReconnections.add(activeWorkflowId)
|
||||
|
||||
executionStream.cancel(activeWorkflowId)
|
||||
|
||||
const sorted = [...runningEntries].sort((a, b) => {
|
||||
const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0
|
||||
const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0
|
||||
return bTime - aTime
|
||||
})
|
||||
const executionId = sorted[0].executionId!
|
||||
|
||||
const otherExecutionIds = new Set(
|
||||
sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!)
|
||||
)
|
||||
if (otherExecutionIds.size > 0) {
|
||||
cancelRunningEntries(activeWorkflowId)
|
||||
}
|
||||
|
||||
setCurrentExecutionId(activeWorkflowId, executionId)
|
||||
setIsExecuting(activeWorkflowId, true)
|
||||
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
const activeBlocksSet = new Set<string>()
|
||||
const accumulatedBlockLogs: BlockLog[] = []
|
||||
const accumulatedBlockStates = new Map<string, BlockState>()
|
||||
const executedBlockIds = new Set<string>()
|
||||
|
||||
const executionIdRef = { current: executionId }
|
||||
|
||||
const handlers = buildBlockEventHandlers({
|
||||
workflowId: activeWorkflowId,
|
||||
executionIdRef,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
accumulatedBlockStates,
|
||||
executedBlockIds,
|
||||
consoleMode: 'update',
|
||||
includeStartConsoleEntry: true,
|
||||
})
|
||||
|
||||
const originalEntries = entries
|
||||
.filter((e) => e.executionId === executionId)
|
||||
.map((e) => ({ ...e }))
|
||||
|
||||
let cleared = false
|
||||
let reconnectionComplete = false
|
||||
let cleanupRan = false
|
||||
const clearOnce = () => {
|
||||
if (!cleared) {
|
||||
cleared = true
|
||||
clearExecutionEntries(executionId)
|
||||
}
|
||||
}
|
||||
|
||||
const reconnectWorkflowId = activeWorkflowId
|
||||
|
||||
executionStream
|
||||
.reconnect({
|
||||
workflowId: reconnectWorkflowId,
|
||||
executionId,
|
||||
callbacks: {
|
||||
onBlockStarted: (data) => {
|
||||
clearOnce()
|
||||
handlers.onBlockStarted(data)
|
||||
},
|
||||
onBlockCompleted: (data) => {
|
||||
clearOnce()
|
||||
handlers.onBlockCompleted(data)
|
||||
},
|
||||
onBlockError: (data) => {
|
||||
clearOnce()
|
||||
handlers.onBlockError(data)
|
||||
},
|
||||
onExecutionCompleted: () => {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
clearOnce()
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
},
|
||||
onExecutionError: (data) => {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
clearOnce()
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
handleExecutionErrorConsole({
|
||||
workflowId: reconnectWorkflowId,
|
||||
executionId,
|
||||
error: data.error,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
})
|
||||
},
|
||||
onExecutionCancelled: () => {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
clearOnce()
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId: reconnectWorkflowId,
|
||||
executionId,
|
||||
})
|
||||
},
|
||||
},
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.warn('Execution reconnection failed', { executionId, error })
|
||||
})
|
||||
.finally(() => {
|
||||
if (reconnectionComplete || cleanupRan) return
|
||||
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== executionId) return
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
clearExecutionEntries(executionId)
|
||||
for (const entry of originalEntries) {
|
||||
addConsole({
|
||||
workflowId: entry.workflowId,
|
||||
blockId: entry.blockId,
|
||||
blockName: entry.blockName,
|
||||
blockType: entry.blockType,
|
||||
executionId: entry.executionId,
|
||||
executionOrder: entry.executionOrder,
|
||||
isRunning: false,
|
||||
warning: 'Execution result unavailable — check the logs page',
|
||||
})
|
||||
}
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
})
|
||||
|
||||
return () => {
|
||||
cleanupRan = true
|
||||
executionStream.cancel(reconnectWorkflowId)
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
|
||||
if (cleared && !reconnectionComplete) {
|
||||
clearExecutionEntries(executionId)
|
||||
for (const entry of originalEntries) {
|
||||
addConsole(entry)
|
||||
}
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [activeWorkflowId, hasHydrated])
|
||||
|
||||
return {
|
||||
isExecuting,
|
||||
isDebugging,
|
||||
|
||||
@@ -589,6 +589,7 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
|
||||
|
||||
export const scheduleExecution = task({
|
||||
id: 'schedule-execution',
|
||||
machine: 'medium-1x',
|
||||
retry: {
|
||||
maxAttempts: 1,
|
||||
},
|
||||
|
||||
@@ -669,6 +669,7 @@ async function executeWebhookJobInternal(
|
||||
|
||||
export const webhookExecution = task({
|
||||
id: 'webhook-execution',
|
||||
machine: 'medium-1x',
|
||||
retry: {
|
||||
maxAttempts: 1,
|
||||
},
|
||||
|
||||
@@ -197,5 +197,6 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||
|
||||
export const workflowExecutionTask = task({
|
||||
id: 'workflow-execution',
|
||||
machine: 'medium-1x',
|
||||
run: executeWorkflowJob,
|
||||
})
|
||||
|
||||
@@ -10,9 +10,11 @@ import {
|
||||
getReasoningEffortValuesForModel,
|
||||
getThinkingLevelsForModel,
|
||||
getVerbosityValuesForModel,
|
||||
MODELS_WITH_DEEP_RESEARCH,
|
||||
MODELS_WITH_REASONING_EFFORT,
|
||||
MODELS_WITH_THINKING,
|
||||
MODELS_WITH_VERBOSITY,
|
||||
MODELS_WITHOUT_MEMORY,
|
||||
providers,
|
||||
supportsTemperature,
|
||||
} from '@/providers/utils'
|
||||
@@ -412,12 +414,22 @@ Return ONLY the JSON array.`,
|
||||
title: 'Tools',
|
||||
type: 'tool-input',
|
||||
defaultValue: [],
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: MODELS_WITH_DEEP_RESEARCH,
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'skills',
|
||||
title: 'Skills',
|
||||
type: 'skill-input',
|
||||
defaultValue: [],
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: MODELS_WITH_DEEP_RESEARCH,
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'memoryType',
|
||||
@@ -431,6 +443,11 @@ Return ONLY the JSON array.`,
|
||||
{ label: 'Sliding window (tokens)', id: 'sliding_window_tokens' },
|
||||
],
|
||||
defaultValue: 'none',
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: MODELS_WITHOUT_MEMORY,
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'conversationId',
|
||||
@@ -444,6 +461,7 @@ Return ONLY the JSON array.`,
|
||||
condition: {
|
||||
field: 'memoryType',
|
||||
value: ['conversation', 'sliding_window', 'sliding_window_tokens'],
|
||||
and: { field: 'model', value: MODELS_WITHOUT_MEMORY, not: true },
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -454,6 +472,7 @@ Return ONLY the JSON array.`,
|
||||
condition: {
|
||||
field: 'memoryType',
|
||||
value: ['sliding_window'],
|
||||
and: { field: 'model', value: MODELS_WITHOUT_MEMORY, not: true },
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -464,6 +483,7 @@ Return ONLY the JSON array.`,
|
||||
condition: {
|
||||
field: 'memoryType',
|
||||
value: ['sliding_window_tokens'],
|
||||
and: { field: 'model', value: MODELS_WITHOUT_MEMORY, not: true },
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -477,9 +497,13 @@ Return ONLY the JSON array.`,
|
||||
condition: () => ({
|
||||
field: 'model',
|
||||
value: (() => {
|
||||
const deepResearch = new Set(MODELS_WITH_DEEP_RESEARCH.map((m) => m.toLowerCase()))
|
||||
const allModels = Object.keys(getBaseModelProviders())
|
||||
return allModels.filter(
|
||||
(model) => supportsTemperature(model) && getMaxTemperature(model) === 1
|
||||
(model) =>
|
||||
supportsTemperature(model) &&
|
||||
getMaxTemperature(model) === 1 &&
|
||||
!deepResearch.has(model.toLowerCase())
|
||||
)
|
||||
})(),
|
||||
}),
|
||||
@@ -495,9 +519,13 @@ Return ONLY the JSON array.`,
|
||||
condition: () => ({
|
||||
field: 'model',
|
||||
value: (() => {
|
||||
const deepResearch = new Set(MODELS_WITH_DEEP_RESEARCH.map((m) => m.toLowerCase()))
|
||||
const allModels = Object.keys(getBaseModelProviders())
|
||||
return allModels.filter(
|
||||
(model) => supportsTemperature(model) && getMaxTemperature(model) === 2
|
||||
(model) =>
|
||||
supportsTemperature(model) &&
|
||||
getMaxTemperature(model) === 2 &&
|
||||
!deepResearch.has(model.toLowerCase())
|
||||
)
|
||||
})(),
|
||||
}),
|
||||
@@ -508,6 +536,11 @@ Return ONLY the JSON array.`,
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter max tokens (e.g., 4096)...',
|
||||
mode: 'advanced',
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: MODELS_WITH_DEEP_RESEARCH,
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'responseFormat',
|
||||
@@ -515,6 +548,11 @@ Return ONLY the JSON array.`,
|
||||
type: 'code',
|
||||
placeholder: 'Enter JSON schema...',
|
||||
language: 'json',
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: MODELS_WITH_DEEP_RESEARCH,
|
||||
not: true,
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
@@ -607,6 +645,16 @@ Example 3 (Array Input):
|
||||
generationType: 'json-schema',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'previousInteractionId',
|
||||
title: 'Previous Interaction ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., {{agent_1.interactionId}}',
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: MODELS_WITH_DEEP_RESEARCH,
|
||||
},
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -770,5 +818,13 @@ Example 3 (Array Input):
|
||||
description: 'Provider timing information',
|
||||
},
|
||||
cost: { type: 'json', description: 'Cost of the API call' },
|
||||
interactionId: {
|
||||
type: 'string',
|
||||
description: 'Interaction ID for multi-turn deep research follow-ups',
|
||||
condition: {
|
||||
field: 'model',
|
||||
value: MODELS_WITH_DEEP_RESEARCH,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -394,6 +394,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Page Property Operations
|
||||
{ label: 'List Page Properties', id: 'list_page_properties' },
|
||||
{ label: 'Create Page Property', id: 'create_page_property' },
|
||||
{ label: 'Delete Page Property', id: 'delete_page_property' },
|
||||
// Search Operations
|
||||
{ label: 'Search Content', id: 'search' },
|
||||
{ label: 'Search in Space', id: 'search_in_space' },
|
||||
@@ -414,6 +415,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Label Operations
|
||||
{ label: 'List Labels', id: 'list_labels' },
|
||||
{ label: 'Add Label', id: 'add_label' },
|
||||
{ label: 'Delete Label', id: 'delete_label' },
|
||||
{ label: 'Get Pages by Label', id: 'get_pages_by_label' },
|
||||
{ label: 'List Space Labels', id: 'list_space_labels' },
|
||||
// Space Operations
|
||||
{ label: 'Get Space', id: 'get_space' },
|
||||
{ label: 'List Spaces', id: 'list_spaces' },
|
||||
@@ -485,6 +489,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'search_in_space',
|
||||
'get_space',
|
||||
'list_spaces',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
not: true,
|
||||
},
|
||||
@@ -500,6 +506,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_labels',
|
||||
'upload_attachment',
|
||||
'add_label',
|
||||
'delete_label',
|
||||
'delete_page_property',
|
||||
'get_page_children',
|
||||
'get_page_ancestors',
|
||||
'list_page_versions',
|
||||
@@ -527,6 +535,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'search_in_space',
|
||||
'get_space',
|
||||
'list_spaces',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
not: true,
|
||||
},
|
||||
@@ -542,6 +552,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_labels',
|
||||
'upload_attachment',
|
||||
'add_label',
|
||||
'delete_label',
|
||||
'delete_page_property',
|
||||
'get_page_children',
|
||||
'get_page_ancestors',
|
||||
'list_page_versions',
|
||||
@@ -566,6 +578,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'search_in_space',
|
||||
'create_blogpost',
|
||||
'list_blogposts_in_space',
|
||||
'list_space_labels',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -601,6 +614,14 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'create_page_property' },
|
||||
},
|
||||
{
|
||||
id: 'propertyId',
|
||||
title: 'Property ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter property ID to delete',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'delete_page_property' },
|
||||
},
|
||||
{
|
||||
id: 'title',
|
||||
title: 'Title',
|
||||
@@ -694,7 +715,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter label name',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'add_label' },
|
||||
condition: { field: 'operation', value: ['add_label', 'delete_label'] },
|
||||
},
|
||||
{
|
||||
id: 'labelPrefix',
|
||||
@@ -709,6 +730,14 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
value: () => 'global',
|
||||
condition: { field: 'operation', value: 'add_label' },
|
||||
},
|
||||
{
|
||||
id: 'labelId',
|
||||
title: 'Label ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter label ID',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'get_pages_by_label' },
|
||||
},
|
||||
{
|
||||
id: 'blogPostStatus',
|
||||
title: 'Status',
|
||||
@@ -759,6 +788,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_page_versions',
|
||||
'list_page_properties',
|
||||
'list_labels',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -780,6 +811,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
'list_page_versions',
|
||||
'list_page_properties',
|
||||
'list_labels',
|
||||
'get_pages_by_label',
|
||||
'list_space_labels',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -800,6 +833,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Property Tools
|
||||
'confluence_list_page_properties',
|
||||
'confluence_create_page_property',
|
||||
'confluence_delete_page_property',
|
||||
// Search Tools
|
||||
'confluence_search',
|
||||
'confluence_search_in_space',
|
||||
@@ -820,6 +854,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Label Tools
|
||||
'confluence_list_labels',
|
||||
'confluence_add_label',
|
||||
'confluence_delete_label',
|
||||
'confluence_get_pages_by_label',
|
||||
'confluence_list_space_labels',
|
||||
// Space Tools
|
||||
'confluence_get_space',
|
||||
'confluence_list_spaces',
|
||||
@@ -852,6 +889,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
return 'confluence_list_page_properties'
|
||||
case 'create_page_property':
|
||||
return 'confluence_create_page_property'
|
||||
case 'delete_page_property':
|
||||
return 'confluence_delete_page_property'
|
||||
// Search Operations
|
||||
case 'search':
|
||||
return 'confluence_search'
|
||||
@@ -887,6 +926,12 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
return 'confluence_list_labels'
|
||||
case 'add_label':
|
||||
return 'confluence_add_label'
|
||||
case 'delete_label':
|
||||
return 'confluence_delete_label'
|
||||
case 'get_pages_by_label':
|
||||
return 'confluence_get_pages_by_label'
|
||||
case 'list_space_labels':
|
||||
return 'confluence_list_space_labels'
|
||||
// Space Operations
|
||||
case 'get_space':
|
||||
return 'confluence_get_space'
|
||||
@@ -908,7 +953,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
versionNumber,
|
||||
propertyKey,
|
||||
propertyValue,
|
||||
propertyId,
|
||||
labelPrefix,
|
||||
labelId,
|
||||
blogPostStatus,
|
||||
purge,
|
||||
bodyFormat,
|
||||
@@ -959,7 +1006,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
}
|
||||
}
|
||||
|
||||
// Operations that support cursor pagination
|
||||
// Operations that support generic cursor pagination.
|
||||
// get_pages_by_label and list_space_labels have dedicated handlers
|
||||
// below that pass cursor along with their required params (labelId, spaceId).
|
||||
const supportsCursor = [
|
||||
'list_attachments',
|
||||
'list_spaces',
|
||||
@@ -996,6 +1045,35 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'delete_page_property') {
|
||||
return {
|
||||
credential,
|
||||
pageId: effectivePageId,
|
||||
operation,
|
||||
propertyId,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'get_pages_by_label') {
|
||||
return {
|
||||
credential,
|
||||
operation,
|
||||
labelId,
|
||||
cursor: cursor || undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'list_space_labels') {
|
||||
return {
|
||||
credential,
|
||||
operation,
|
||||
cursor: cursor || undefined,
|
||||
...rest,
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'upload_attachment') {
|
||||
const normalizedFile = normalizeFileInput(attachmentFile, { single: true })
|
||||
if (!normalizedFile) {
|
||||
@@ -1044,7 +1122,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
|
||||
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
|
||||
labelName: { type: 'string', description: 'Label name' },
|
||||
labelId: { type: 'string', description: 'Label identifier' },
|
||||
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
|
||||
propertyId: { type: 'string', description: 'Property identifier' },
|
||||
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
|
||||
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
|
||||
bodyFormat: { type: 'string', description: 'Body format for comments' },
|
||||
@@ -1080,6 +1160,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
// Label Results
|
||||
labels: { type: 'array', description: 'List of labels' },
|
||||
labelName: { type: 'string', description: 'Label name' },
|
||||
labelId: { type: 'string', description: 'Label identifier' },
|
||||
// Space Results
|
||||
spaces: { type: 'array', description: 'List of spaces' },
|
||||
spaceId: { type: 'string', description: 'Space identifier' },
|
||||
|
||||
@@ -58,6 +58,16 @@ export const S3Block: BlockConfig<S3Response> = {
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'getObjectRegion',
|
||||
title: 'AWS Region',
|
||||
type: 'short-input',
|
||||
placeholder: 'Used when S3 URL does not include region',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['get_object'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'bucketName',
|
||||
title: 'Bucket Name',
|
||||
@@ -291,34 +301,11 @@ export const S3Block: BlockConfig<S3Response> = {
|
||||
if (!params.s3Uri) {
|
||||
throw new Error('S3 Object URL is required')
|
||||
}
|
||||
|
||||
// Parse S3 URI for get_object
|
||||
try {
|
||||
const url = new URL(params.s3Uri)
|
||||
const hostname = url.hostname
|
||||
const bucketName = hostname.split('.')[0]
|
||||
const regionMatch = hostname.match(/s3[.-]([^.]+)\.amazonaws\.com/)
|
||||
const region = regionMatch ? regionMatch[1] : params.region
|
||||
const objectKey = url.pathname.startsWith('/')
|
||||
? url.pathname.substring(1)
|
||||
: url.pathname
|
||||
|
||||
if (!bucketName || !objectKey) {
|
||||
throw new Error('Could not parse S3 URL')
|
||||
}
|
||||
|
||||
return {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region,
|
||||
bucketName,
|
||||
objectKey,
|
||||
s3Uri: params.s3Uri,
|
||||
}
|
||||
} catch (_error) {
|
||||
throw new Error(
|
||||
'Invalid S3 Object URL format. Expected: https://bucket-name.s3.region.amazonaws.com/path/to/file'
|
||||
)
|
||||
return {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.getObjectRegion || params.region,
|
||||
s3Uri: params.s3Uri,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -401,6 +388,7 @@ export const S3Block: BlockConfig<S3Response> = {
|
||||
acl: { type: 'string', description: 'Access control list' },
|
||||
// Download inputs
|
||||
s3Uri: { type: 'string', description: 'S3 object URL' },
|
||||
getObjectRegion: { type: 'string', description: 'Optional AWS region override for downloads' },
|
||||
// List inputs
|
||||
prefix: { type: 'string', description: 'Prefix filter' },
|
||||
maxKeys: { type: 'number', description: 'Maximum results' },
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
slug: enterprise
|
||||
title: 'Build with Sim for Enterprise'
|
||||
description: 'Access control, BYOK, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, Admin API, and flexible data retention—enterprise features for teams with strict security and compliance requirements.'
|
||||
date: 2026-01-23
|
||||
updated: 2026-01-23
|
||||
date: 2026-02-11
|
||||
updated: 2026-02-11
|
||||
authors:
|
||||
- vik
|
||||
readingTime: 10
|
||||
@@ -13,8 +13,8 @@ ogAlt: 'Sim Enterprise features overview'
|
||||
about: ['Enterprise Software', 'Security', 'Compliance', 'Self-Hosting']
|
||||
timeRequired: PT10M
|
||||
canonical: https://sim.ai/studio/enterprise
|
||||
featured: false
|
||||
draft: true
|
||||
featured: true
|
||||
draft: false
|
||||
---
|
||||
|
||||
We've been working with security teams at larger organizations to bring Sim into environments with strict compliance and data handling requirements. This post covers the enterprise capabilities we've built: granular access control, bring-your-own-keys, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, compliance, and programmatic management via the Admin API.
|
||||
|
||||
@@ -999,6 +999,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
reasoningEffort: inputs.reasoningEffort,
|
||||
verbosity: inputs.verbosity,
|
||||
thinkingLevel: inputs.thinkingLevel,
|
||||
previousInteractionId: inputs.previousInteractionId,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1069,6 +1070,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
reasoningEffort: providerRequest.reasoningEffort,
|
||||
verbosity: providerRequest.verbosity,
|
||||
thinkingLevel: providerRequest.thinkingLevel,
|
||||
previousInteractionId: providerRequest.previousInteractionId,
|
||||
})
|
||||
|
||||
return this.processProviderResponse(response, block, responseFormat)
|
||||
@@ -1269,6 +1271,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
content: result.content,
|
||||
model: result.model,
|
||||
...this.createResponseMetadata(result),
|
||||
...(result.interactionId && { interactionId: result.interactionId }),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -20,6 +20,8 @@ export interface AgentInputs {
|
||||
conversationId?: string // Required for all non-none memory types
|
||||
slidingWindowSize?: string // For message-based sliding window
|
||||
slidingWindowTokens?: string // For token-based sliding window
|
||||
// Deep research multi-turn
|
||||
previousInteractionId?: string // Interactions API previous interaction reference
|
||||
// LLM parameters
|
||||
temperature?: string
|
||||
maxTokens?: string
|
||||
|
||||
@@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables {
|
||||
|
||||
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
|
||||
|
||||
Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions.
|
||||
Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions.
|
||||
|
||||
Guidelines:
|
||||
- Use the specific values provided (credential names, channel names, model names)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useRef } from 'react'
|
||||
import { useCallback } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type {
|
||||
BlockCompletedData,
|
||||
@@ -16,6 +16,18 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
|
||||
const logger = createLogger('useExecutionStream')
|
||||
|
||||
/**
|
||||
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
|
||||
* These should be treated as clean disconnects, not execution errors.
|
||||
*/
|
||||
function isClientDisconnectError(error: any): boolean {
|
||||
if (error.name === 'AbortError') return true
|
||||
const msg = (error.message ?? '').toLowerCase()
|
||||
return (
|
||||
msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed')
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Processes SSE events from a response body and invokes appropriate callbacks.
|
||||
*/
|
||||
@@ -121,6 +133,7 @@ export interface ExecuteStreamOptions {
|
||||
parallels?: Record<string, any>
|
||||
}
|
||||
stopAfterBlockId?: string
|
||||
onExecutionId?: (executionId: string) => void
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
@@ -129,30 +142,40 @@ export interface ExecuteFromBlockOptions {
|
||||
startBlockId: string
|
||||
sourceSnapshot: SerializableExecutionState
|
||||
input?: any
|
||||
onExecutionId?: (executionId: string) => void
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
export interface ReconnectStreamOptions {
|
||||
workflowId: string
|
||||
executionId: string
|
||||
fromEventId?: number
|
||||
callbacks?: ExecutionStreamCallbacks
|
||||
}
|
||||
|
||||
/**
|
||||
* Module-level map shared across all hook instances.
|
||||
* Ensures ANY instance can cancel streams started by ANY other instance,
|
||||
* which is critical for SPA navigation where the original hook instance unmounts
|
||||
* but the SSE stream must be cancellable from the new instance.
|
||||
*/
|
||||
const sharedAbortControllers = new Map<string, AbortController>()
|
||||
|
||||
/**
|
||||
* Hook for executing workflows via server-side SSE streaming.
|
||||
* Supports concurrent executions via per-workflow AbortController maps.
|
||||
*/
|
||||
export function useExecutionStream() {
|
||||
const abortControllersRef = useRef<Map<string, AbortController>>(new Map())
|
||||
const currentExecutionsRef = useRef<Map<string, { workflowId: string; executionId: string }>>(
|
||||
new Map()
|
||||
)
|
||||
|
||||
const execute = useCallback(async (options: ExecuteStreamOptions) => {
|
||||
const { workflowId, callbacks = {}, ...payload } = options
|
||||
const { workflowId, callbacks = {}, onExecutionId, ...payload } = options
|
||||
|
||||
const existing = abortControllersRef.current.get(workflowId)
|
||||
const existing = sharedAbortControllers.get(workflowId)
|
||||
if (existing) {
|
||||
existing.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
abortControllersRef.current.set(workflowId, abortController)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
sharedAbortControllers.set(workflowId, abortController)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||
@@ -177,42 +200,48 @@ export function useExecutionStream() {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const executionId = response.headers.get('X-Execution-Id')
|
||||
if (executionId) {
|
||||
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
||||
const serverExecutionId = response.headers.get('X-Execution-Id')
|
||||
if (serverExecutionId) {
|
||||
onExecutionId?.(serverExecutionId)
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
await processSSEStream(reader, callbacks, 'Execution')
|
||||
} catch (error: any) {
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Execution stream cancelled')
|
||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||
} else {
|
||||
logger.error('Execution stream error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
if (isClientDisconnectError(error)) {
|
||||
logger.info('Execution stream disconnected (page unload or abort)')
|
||||
return
|
||||
}
|
||||
logger.error('Execution stream error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
throw error
|
||||
} finally {
|
||||
abortControllersRef.current.delete(workflowId)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
|
||||
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
|
||||
const {
|
||||
workflowId,
|
||||
startBlockId,
|
||||
sourceSnapshot,
|
||||
input,
|
||||
onExecutionId,
|
||||
callbacks = {},
|
||||
} = options
|
||||
|
||||
const existing = abortControllersRef.current.get(workflowId)
|
||||
const existing = sharedAbortControllers.get(workflowId)
|
||||
if (existing) {
|
||||
existing.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
abortControllersRef.current.set(workflowId, abortController)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
sharedAbortControllers.set(workflowId, abortController)
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||
@@ -246,64 +275,80 @@ export function useExecutionStream() {
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
const executionId = response.headers.get('X-Execution-Id')
|
||||
if (executionId) {
|
||||
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
|
||||
const serverExecutionId = response.headers.get('X-Execution-Id')
|
||||
if (serverExecutionId) {
|
||||
onExecutionId?.(serverExecutionId)
|
||||
}
|
||||
|
||||
const reader = response.body.getReader()
|
||||
await processSSEStream(reader, callbacks, 'Run-from-block')
|
||||
} catch (error: any) {
|
||||
if (error.name === 'AbortError') {
|
||||
logger.info('Run-from-block execution cancelled')
|
||||
callbacks.onExecutionCancelled?.({ duration: 0 })
|
||||
} else {
|
||||
logger.error('Run-from-block execution error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
if (isClientDisconnectError(error)) {
|
||||
logger.info('Run-from-block stream disconnected (page unload or abort)')
|
||||
return
|
||||
}
|
||||
logger.error('Run-from-block execution error:', error)
|
||||
callbacks.onExecutionError?.({
|
||||
error: error.message || 'Unknown error',
|
||||
duration: 0,
|
||||
})
|
||||
throw error
|
||||
} finally {
|
||||
abortControllersRef.current.delete(workflowId)
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const reconnect = useCallback(async (options: ReconnectStreamOptions) => {
|
||||
const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options
|
||||
|
||||
const existing = sharedAbortControllers.get(workflowId)
|
||||
if (existing) {
|
||||
existing.abort()
|
||||
}
|
||||
|
||||
const abortController = new AbortController()
|
||||
sharedAbortControllers.set(workflowId, abortController)
|
||||
try {
|
||||
const response = await fetch(
|
||||
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
|
||||
{ signal: abortController.signal }
|
||||
)
|
||||
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
|
||||
if (!response.body) throw new Error('No response body')
|
||||
|
||||
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
|
||||
} catch (error: any) {
|
||||
if (isClientDisconnectError(error)) return
|
||||
logger.error('Reconnection stream error:', error)
|
||||
throw error
|
||||
} finally {
|
||||
if (sharedAbortControllers.get(workflowId) === abortController) {
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
}
|
||||
}, [])
|
||||
|
||||
const cancel = useCallback((workflowId?: string) => {
|
||||
if (workflowId) {
|
||||
const execution = currentExecutionsRef.current.get(workflowId)
|
||||
if (execution) {
|
||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
||||
method: 'POST',
|
||||
}).catch(() => {})
|
||||
}
|
||||
|
||||
const controller = abortControllersRef.current.get(workflowId)
|
||||
const controller = sharedAbortControllers.get(workflowId)
|
||||
if (controller) {
|
||||
controller.abort()
|
||||
abortControllersRef.current.delete(workflowId)
|
||||
sharedAbortControllers.delete(workflowId)
|
||||
}
|
||||
currentExecutionsRef.current.delete(workflowId)
|
||||
} else {
|
||||
for (const [, execution] of currentExecutionsRef.current) {
|
||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
||||
method: 'POST',
|
||||
}).catch(() => {})
|
||||
}
|
||||
|
||||
for (const [, controller] of abortControllersRef.current) {
|
||||
for (const [, controller] of sharedAbortControllers) {
|
||||
controller.abort()
|
||||
}
|
||||
abortControllersRef.current.clear()
|
||||
currentExecutionsRef.current.clear()
|
||||
sharedAbortControllers.clear()
|
||||
}
|
||||
}, [])
|
||||
|
||||
return {
|
||||
execute,
|
||||
executeFromBlock,
|
||||
reconnect,
|
||||
cancel,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,8 @@ export interface BuildPayloadParams {
|
||||
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
|
||||
commands?: string[]
|
||||
chatId?: string
|
||||
conversationId?: string
|
||||
prefetch?: boolean
|
||||
implicitFeedback?: string
|
||||
}
|
||||
|
||||
@@ -64,6 +66,10 @@ export async function buildCopilotRequestPayload(
|
||||
fileAttachments,
|
||||
commands,
|
||||
chatId,
|
||||
conversationId,
|
||||
prefetch,
|
||||
conversationHistory,
|
||||
implicitFeedback,
|
||||
} = params
|
||||
|
||||
const selectedModel = options.selectedModel
|
||||
@@ -154,6 +160,12 @@ export async function buildCopilotRequestPayload(
|
||||
version: SIM_AGENT_VERSION,
|
||||
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
|
||||
...(chatId ? { chatId } : {}),
|
||||
...(conversationId ? { conversationId } : {}),
|
||||
...(Array.isArray(conversationHistory) && conversationHistory.length > 0
|
||||
? { conversationHistory }
|
||||
: {}),
|
||||
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
|
||||
...(implicitFeedback ? { implicitFeedback } : {}),
|
||||
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
|
||||
...(integrationTools.length > 0 ? { integrationTools } : {}),
|
||||
...(credentials ? { credentials } : {}),
|
||||
|
||||
@@ -5,7 +5,7 @@ import { serializeMessagesForDB } from './serialization'
|
||||
|
||||
const logger = createLogger('CopilotMessagePersistence')
|
||||
|
||||
interface PersistParams {
|
||||
export async function persistMessages(params: {
|
||||
chatId: string
|
||||
messages: CopilotMessage[]
|
||||
sensitiveCredentialIds?: Set<string>
|
||||
@@ -13,29 +13,24 @@ interface PersistParams {
|
||||
mode?: string
|
||||
model?: string
|
||||
conversationId?: string
|
||||
}
|
||||
|
||||
/** Builds the JSON body used by both fetch and sendBeacon persistence paths. */
|
||||
function buildPersistBody(params: PersistParams): string {
|
||||
const dbMessages = serializeMessagesForDB(
|
||||
params.messages,
|
||||
params.sensitiveCredentialIds ?? new Set<string>()
|
||||
)
|
||||
return JSON.stringify({
|
||||
chatId: params.chatId,
|
||||
messages: dbMessages,
|
||||
...(params.planArtifact !== undefined ? { planArtifact: params.planArtifact } : {}),
|
||||
...(params.mode || params.model ? { config: { mode: params.mode, model: params.model } } : {}),
|
||||
...(params.conversationId ? { conversationId: params.conversationId } : {}),
|
||||
})
|
||||
}
|
||||
|
||||
export async function persistMessages(params: PersistParams): Promise<boolean> {
|
||||
}): Promise<boolean> {
|
||||
try {
|
||||
const dbMessages = serializeMessagesForDB(
|
||||
params.messages,
|
||||
params.sensitiveCredentialIds ?? new Set<string>()
|
||||
)
|
||||
const response = await fetch(COPILOT_UPDATE_MESSAGES_API_PATH, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: buildPersistBody(params),
|
||||
body: JSON.stringify({
|
||||
chatId: params.chatId,
|
||||
messages: dbMessages,
|
||||
...(params.planArtifact !== undefined ? { planArtifact: params.planArtifact } : {}),
|
||||
...(params.mode || params.model
|
||||
? { config: { mode: params.mode, model: params.model } }
|
||||
: {}),
|
||||
...(params.conversationId ? { conversationId: params.conversationId } : {}),
|
||||
}),
|
||||
})
|
||||
return response.ok
|
||||
} catch (error) {
|
||||
@@ -46,27 +41,3 @@ export async function persistMessages(params: PersistParams): Promise<boolean> {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists messages using navigator.sendBeacon, which is reliable during page unload.
|
||||
* Unlike fetch, sendBeacon is guaranteed to be queued even when the page is being torn down.
|
||||
*/
|
||||
export function persistMessagesBeacon(params: PersistParams): boolean {
|
||||
try {
|
||||
const body = buildPersistBody(params)
|
||||
const blob = new Blob([body], { type: 'application/json' })
|
||||
const sent = navigator.sendBeacon(COPILOT_UPDATE_MESSAGES_API_PATH, blob)
|
||||
if (!sent) {
|
||||
logger.warn('sendBeacon returned false — browser may have rejected the request', {
|
||||
chatId: params.chatId,
|
||||
})
|
||||
}
|
||||
return sent
|
||||
} catch (error) {
|
||||
logger.warn('Failed to persist messages via sendBeacon', {
|
||||
chatId: params.chatId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow } from '@sim/db/schema'
|
||||
import { customTools, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { and, desc, eq, isNull, or } from 'drizzle-orm'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import type {
|
||||
ExecutionContext,
|
||||
@@ -12,6 +12,7 @@ import { routeExecution } from '@/lib/copilot/tools/server/router'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
|
||||
import { getTool, resolveToolId } from '@/tools/utils'
|
||||
import {
|
||||
executeCheckDeploymentStatus,
|
||||
@@ -76,6 +77,247 @@ import {
|
||||
|
||||
const logger = createLogger('CopilotToolExecutor')
|
||||
|
||||
type ManageCustomToolOperation = 'add' | 'edit' | 'delete' | 'list'
|
||||
|
||||
interface ManageCustomToolSchema {
|
||||
type: 'function'
|
||||
function: {
|
||||
name: string
|
||||
description?: string
|
||||
parameters: Record<string, unknown>
|
||||
}
|
||||
}
|
||||
|
||||
interface ManageCustomToolParams {
|
||||
operation?: string
|
||||
toolId?: string
|
||||
schema?: ManageCustomToolSchema
|
||||
code?: string
|
||||
title?: string
|
||||
workspaceId?: string
|
||||
}
|
||||
|
||||
async function executeManageCustomTool(
|
||||
rawParams: Record<string, unknown>,
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const params = rawParams as ManageCustomToolParams
|
||||
const operation = String(params.operation || '').toLowerCase() as ManageCustomToolOperation
|
||||
const workspaceId = params.workspaceId || context.workspaceId
|
||||
|
||||
if (!operation) {
|
||||
return { success: false, error: "Missing required 'operation' argument" }
|
||||
}
|
||||
|
||||
try {
|
||||
if (operation === 'list') {
|
||||
const toolsForUser = workspaceId
|
||||
? await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(
|
||||
or(
|
||||
eq(customTools.workspaceId, workspaceId),
|
||||
and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId))
|
||||
)
|
||||
)
|
||||
.orderBy(desc(customTools.createdAt))
|
||||
: await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId)))
|
||||
.orderBy(desc(customTools.createdAt))
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
tools: toolsForUser,
|
||||
count: toolsForUser.length,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'add') {
|
||||
if (!workspaceId) {
|
||||
return {
|
||||
success: false,
|
||||
error: "workspaceId is required for operation 'add'",
|
||||
}
|
||||
}
|
||||
if (!params.schema || !params.code) {
|
||||
return {
|
||||
success: false,
|
||||
error: "Both 'schema' and 'code' are required for operation 'add'",
|
||||
}
|
||||
}
|
||||
|
||||
const title = params.title || params.schema.function?.name
|
||||
if (!title) {
|
||||
return { success: false, error: "Missing tool title or schema.function.name for 'add'" }
|
||||
}
|
||||
|
||||
const resultTools = await upsertCustomTools({
|
||||
tools: [
|
||||
{
|
||||
title,
|
||||
schema: params.schema,
|
||||
code: params.code,
|
||||
},
|
||||
],
|
||||
workspaceId,
|
||||
userId: context.userId,
|
||||
})
|
||||
const created = resultTools.find((tool) => tool.title === title)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
toolId: created?.id,
|
||||
title,
|
||||
message: `Created custom tool "${title}"`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'edit') {
|
||||
if (!workspaceId) {
|
||||
return {
|
||||
success: false,
|
||||
error: "workspaceId is required for operation 'edit'",
|
||||
}
|
||||
}
|
||||
if (!params.toolId) {
|
||||
return { success: false, error: "'toolId' is required for operation 'edit'" }
|
||||
}
|
||||
if (!params.schema && !params.code) {
|
||||
return {
|
||||
success: false,
|
||||
error: "At least one of 'schema' or 'code' is required for operation 'edit'",
|
||||
}
|
||||
}
|
||||
|
||||
const workspaceTool = await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId)))
|
||||
.limit(1)
|
||||
|
||||
const legacyTool =
|
||||
workspaceTool.length === 0
|
||||
? await db
|
||||
.select()
|
||||
.from(customTools)
|
||||
.where(
|
||||
and(
|
||||
eq(customTools.id, params.toolId),
|
||||
isNull(customTools.workspaceId),
|
||||
eq(customTools.userId, context.userId)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
: []
|
||||
|
||||
const existing = workspaceTool[0] || legacyTool[0]
|
||||
if (!existing) {
|
||||
return { success: false, error: `Custom tool not found: ${params.toolId}` }
|
||||
}
|
||||
|
||||
const mergedSchema = params.schema || (existing.schema as ManageCustomToolSchema)
|
||||
const mergedCode = params.code || existing.code
|
||||
const title = params.title || mergedSchema.function?.name || existing.title
|
||||
|
||||
await upsertCustomTools({
|
||||
tools: [
|
||||
{
|
||||
id: params.toolId,
|
||||
title,
|
||||
schema: mergedSchema,
|
||||
code: mergedCode,
|
||||
},
|
||||
],
|
||||
workspaceId,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
toolId: params.toolId,
|
||||
title,
|
||||
message: `Updated custom tool "${title}"`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'delete') {
|
||||
if (!params.toolId) {
|
||||
return { success: false, error: "'toolId' is required for operation 'delete'" }
|
||||
}
|
||||
|
||||
const workspaceDelete =
|
||||
workspaceId != null
|
||||
? await db
|
||||
.delete(customTools)
|
||||
.where(
|
||||
and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))
|
||||
)
|
||||
.returning({ id: customTools.id })
|
||||
: []
|
||||
|
||||
const legacyDelete =
|
||||
workspaceDelete.length === 0
|
||||
? await db
|
||||
.delete(customTools)
|
||||
.where(
|
||||
and(
|
||||
eq(customTools.id, params.toolId),
|
||||
isNull(customTools.workspaceId),
|
||||
eq(customTools.userId, context.userId)
|
||||
)
|
||||
)
|
||||
.returning({ id: customTools.id })
|
||||
: []
|
||||
|
||||
const deleted = workspaceDelete[0] || legacyDelete[0]
|
||||
if (!deleted) {
|
||||
return { success: false, error: `Custom tool not found: ${params.toolId}` }
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
operation,
|
||||
toolId: params.toolId,
|
||||
message: 'Deleted custom tool',
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: `Unsupported operation for manage_custom_tool: ${operation}`,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('manage_custom_tool execution failed', {
|
||||
operation,
|
||||
workspaceId,
|
||||
userId: context.userId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to manage custom tool',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const SERVER_TOOLS = new Set<string>([
|
||||
'get_blocks_and_tools',
|
||||
'get_blocks_metadata',
|
||||
@@ -161,6 +403,19 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
|
||||
}
|
||||
}
|
||||
},
|
||||
oauth_request_access: async (p, _c) => {
|
||||
const providerName = (p.providerName || p.provider_name || 'the provider') as string
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
status: 'requested',
|
||||
providerName,
|
||||
message: `Requested ${providerName} OAuth connection. The user should complete the OAuth modal in the UI, then retry credential-dependent actions.`,
|
||||
},
|
||||
}
|
||||
},
|
||||
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
246
apps/sim/lib/execution/event-buffer.ts
Normal file
246
apps/sim/lib/execution/event-buffer.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getRedisClient } from '@/lib/core/config/redis'
|
||||
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
|
||||
|
||||
const logger = createLogger('ExecutionEventBuffer')
|
||||
|
||||
const REDIS_PREFIX = 'execution:stream:'
|
||||
const TTL_SECONDS = 60 * 60 // 1 hour
|
||||
const EVENT_LIMIT = 1000
|
||||
const RESERVE_BATCH = 100
|
||||
const FLUSH_INTERVAL_MS = 15
|
||||
const FLUSH_MAX_BATCH = 200
|
||||
|
||||
function getEventsKey(executionId: string) {
|
||||
return `${REDIS_PREFIX}${executionId}:events`
|
||||
}
|
||||
|
||||
function getSeqKey(executionId: string) {
|
||||
return `${REDIS_PREFIX}${executionId}:seq`
|
||||
}
|
||||
|
||||
function getMetaKey(executionId: string) {
|
||||
return `${REDIS_PREFIX}${executionId}:meta`
|
||||
}
|
||||
|
||||
export type ExecutionStreamStatus = 'active' | 'complete' | 'error' | 'cancelled'
|
||||
|
||||
export interface ExecutionStreamMeta {
|
||||
status: ExecutionStreamStatus
|
||||
userId?: string
|
||||
workflowId?: string
|
||||
updatedAt?: string
|
||||
}
|
||||
|
||||
export interface ExecutionEventEntry {
|
||||
eventId: number
|
||||
executionId: string
|
||||
event: ExecutionEvent
|
||||
}
|
||||
|
||||
export interface ExecutionEventWriter {
|
||||
write: (event: ExecutionEvent) => Promise<ExecutionEventEntry>
|
||||
flush: () => Promise<void>
|
||||
close: () => Promise<void>
|
||||
}
|
||||
|
||||
export async function setExecutionMeta(
|
||||
executionId: string,
|
||||
meta: Partial<ExecutionStreamMeta>
|
||||
): Promise<void> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('setExecutionMeta: Redis client unavailable', { executionId })
|
||||
return
|
||||
}
|
||||
try {
|
||||
const key = getMetaKey(executionId)
|
||||
const payload: Record<string, string> = {
|
||||
updatedAt: new Date().toISOString(),
|
||||
}
|
||||
if (meta.status) payload.status = meta.status
|
||||
if (meta.userId) payload.userId = meta.userId
|
||||
if (meta.workflowId) payload.workflowId = meta.workflowId
|
||||
await redis.hset(key, payload)
|
||||
await redis.expire(key, TTL_SECONDS)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to update execution meta', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export async function getExecutionMeta(executionId: string): Promise<ExecutionStreamMeta | null> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn('getExecutionMeta: Redis client unavailable', { executionId })
|
||||
return null
|
||||
}
|
||||
try {
|
||||
const key = getMetaKey(executionId)
|
||||
const meta = await redis.hgetall(key)
|
||||
if (!meta || Object.keys(meta).length === 0) return null
|
||||
return meta as unknown as ExecutionStreamMeta
|
||||
} catch (error) {
|
||||
logger.warn('Failed to read execution meta', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export async function readExecutionEvents(
|
||||
executionId: string,
|
||||
afterEventId: number
|
||||
): Promise<ExecutionEventEntry[]> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) return []
|
||||
try {
|
||||
const raw = await redis.zrangebyscore(getEventsKey(executionId), afterEventId + 1, '+inf')
|
||||
return raw
|
||||
.map((entry) => {
|
||||
try {
|
||||
return JSON.parse(entry) as ExecutionEventEntry
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
})
|
||||
.filter((entry): entry is ExecutionEventEntry => Boolean(entry))
|
||||
} catch (error) {
|
||||
logger.warn('Failed to read execution events', {
|
||||
executionId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
export function createExecutionEventWriter(executionId: string): ExecutionEventWriter {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
logger.warn(
|
||||
'createExecutionEventWriter: Redis client unavailable, events will not be buffered',
|
||||
{
|
||||
executionId,
|
||||
}
|
||||
)
|
||||
return {
|
||||
write: async (event) => ({ eventId: 0, executionId, event }),
|
||||
flush: async () => {},
|
||||
close: async () => {},
|
||||
}
|
||||
}
|
||||
|
||||
let pending: ExecutionEventEntry[] = []
|
||||
let nextEventId = 0
|
||||
let maxReservedId = 0
|
||||
let flushTimer: ReturnType<typeof setTimeout> | null = null
|
||||
|
||||
const scheduleFlush = () => {
|
||||
if (flushTimer) return
|
||||
flushTimer = setTimeout(() => {
|
||||
flushTimer = null
|
||||
void flush()
|
||||
}, FLUSH_INTERVAL_MS)
|
||||
}
|
||||
|
||||
const reserveIds = async (minCount: number) => {
|
||||
const reserveCount = Math.max(RESERVE_BATCH, minCount)
|
||||
const newMax = await redis.incrby(getSeqKey(executionId), reserveCount)
|
||||
const startId = newMax - reserveCount + 1
|
||||
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||
nextEventId = startId
|
||||
maxReservedId = newMax
|
||||
}
|
||||
}
|
||||
|
||||
let flushPromise: Promise<void> | null = null
|
||||
let closed = false
|
||||
const inflightWrites = new Set<Promise<ExecutionEventEntry>>()
|
||||
|
||||
const doFlush = async () => {
|
||||
if (pending.length === 0) return
|
||||
const batch = pending
|
||||
pending = []
|
||||
try {
|
||||
const key = getEventsKey(executionId)
|
||||
const zaddArgs: (string | number)[] = []
|
||||
for (const entry of batch) {
|
||||
zaddArgs.push(entry.eventId, JSON.stringify(entry))
|
||||
}
|
||||
const pipeline = redis.pipeline()
|
||||
pipeline.zadd(key, ...zaddArgs)
|
||||
pipeline.expire(key, TTL_SECONDS)
|
||||
pipeline.expire(getSeqKey(executionId), TTL_SECONDS)
|
||||
pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1)
|
||||
await pipeline.exec()
|
||||
} catch (error) {
|
||||
logger.warn('Failed to flush execution events', {
|
||||
executionId,
|
||||
batchSize: batch.length,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
pending = batch.concat(pending)
|
||||
}
|
||||
}
|
||||
|
||||
const flush = async () => {
|
||||
if (flushPromise) {
|
||||
await flushPromise
|
||||
return
|
||||
}
|
||||
flushPromise = doFlush()
|
||||
try {
|
||||
await flushPromise
|
||||
} finally {
|
||||
flushPromise = null
|
||||
if (pending.length > 0) scheduleFlush()
|
||||
}
|
||||
}
|
||||
|
||||
const writeCore = async (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
|
||||
if (closed) return { eventId: 0, executionId, event }
|
||||
if (nextEventId === 0 || nextEventId > maxReservedId) {
|
||||
await reserveIds(1)
|
||||
}
|
||||
const eventId = nextEventId++
|
||||
const entry: ExecutionEventEntry = { eventId, executionId, event }
|
||||
pending.push(entry)
|
||||
if (pending.length >= FLUSH_MAX_BATCH) {
|
||||
await flush()
|
||||
} else {
|
||||
scheduleFlush()
|
||||
}
|
||||
return entry
|
||||
}
|
||||
|
||||
const write = (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
|
||||
const p = writeCore(event)
|
||||
inflightWrites.add(p)
|
||||
const remove = () => inflightWrites.delete(p)
|
||||
p.then(remove, remove)
|
||||
return p
|
||||
}
|
||||
|
||||
const close = async () => {
|
||||
closed = true
|
||||
if (flushTimer) {
|
||||
clearTimeout(flushTimer)
|
||||
flushTimer = null
|
||||
}
|
||||
if (inflightWrites.size > 0) {
|
||||
await Promise.allSettled(inflightWrites)
|
||||
}
|
||||
if (flushPromise) {
|
||||
await flushPromise
|
||||
}
|
||||
if (pending.length > 0) {
|
||||
await doFlush()
|
||||
}
|
||||
}
|
||||
|
||||
return { write, flush, close }
|
||||
}
|
||||
@@ -2364,6 +2364,261 @@ describe('hasWorkflowChanged', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('Trigger Config Normalization (False Positive Prevention)', () => {
|
||||
it.concurrent(
|
||||
'should not detect change when deployed has null fields but current has values from triggerConfig',
|
||||
() => {
|
||||
// Core scenario: deployed state has null individual fields, current state has
|
||||
// values populated from triggerConfig at runtime by populateTriggerFieldsFromConfig
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent(
|
||||
'should detect change when user edits a trigger field to a different value',
|
||||
() => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'old-secret' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'new-secret' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'old-secret' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should not detect change when both sides have no triggerConfig', () => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should not detect change when deployed has empty fields and triggerConfig populates them',
|
||||
() => {
|
||||
// Empty string is also treated as "empty" by normalizeTriggerConfigValues
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should not detect change when triggerId differs', () => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
triggerId: { value: null },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
triggerId: { value: 'slack_webhook' },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should not detect change for namespaced system subBlock IDs like samplePayload_slack_webhook',
|
||||
() => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
samplePayload_slack_webhook: { value: 'old payload' },
|
||||
triggerInstructions_slack_webhook: { value: 'old instructions' },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
model: { value: 'gpt-4' },
|
||||
samplePayload_slack_webhook: { value: 'new payload' },
|
||||
triggerInstructions_slack_webhook: { value: 'new instructions' },
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent(
|
||||
'should handle mixed scenario: some fields from triggerConfig, some user-edited',
|
||||
() => {
|
||||
const deployedState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
includeFiles: { id: 'includeFiles', type: 'switch', value: false },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
const currentState = createWorkflowState({
|
||||
blocks: {
|
||||
block1: createBlock('block1', {
|
||||
type: 'starter',
|
||||
subBlocks: {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||
includeFiles: { id: 'includeFiles', type: 'switch', value: true },
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
},
|
||||
}),
|
||||
},
|
||||
})
|
||||
|
||||
// includeFiles changed from false to true — this IS a real change
|
||||
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
|
||||
it.concurrent('should not detect change when webhookId differs', () => {
|
||||
const deployedState = createWorkflowState({
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
normalizeLoop,
|
||||
normalizeParallel,
|
||||
normalizeSubBlockValue,
|
||||
normalizeTriggerConfigValues,
|
||||
normalizeValue,
|
||||
normalizeVariables,
|
||||
sanitizeVariable,
|
||||
@@ -172,14 +173,18 @@ export function generateWorkflowDiffSummary(
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize trigger config values for both states before comparison
|
||||
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
|
||||
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
|
||||
|
||||
// Compare subBlocks using shared helper for filtering (single source of truth)
|
||||
const allSubBlockIds = filterSubBlockIds([
|
||||
...new Set([...Object.keys(currentSubBlocks), ...Object.keys(previousSubBlocks)]),
|
||||
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
|
||||
])
|
||||
|
||||
for (const subId of allSubBlockIds) {
|
||||
const currentSub = currentSubBlocks[subId] as Record<string, unknown> | undefined
|
||||
const previousSub = previousSubBlocks[subId] as Record<string, unknown> | undefined
|
||||
const currentSub = normalizedCurrentSubs[subId] as Record<string, unknown> | undefined
|
||||
const previousSub = normalizedPreviousSubs[subId] as Record<string, unknown> | undefined
|
||||
|
||||
if (!currentSub || !previousSub) {
|
||||
changes.push({
|
||||
|
||||
@@ -4,10 +4,12 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
import {
|
||||
filterSubBlockIds,
|
||||
normalizedStringify,
|
||||
normalizeEdge,
|
||||
normalizeLoop,
|
||||
normalizeParallel,
|
||||
normalizeTriggerConfigValues,
|
||||
normalizeValue,
|
||||
sanitizeInputFormat,
|
||||
sanitizeTools,
|
||||
@@ -584,4 +586,214 @@ describe('Workflow Normalization Utilities', () => {
|
||||
expect(result2).toBe(result3)
|
||||
})
|
||||
})
|
||||
|
||||
describe('filterSubBlockIds', () => {
|
||||
it.concurrent('should exclude exact SYSTEM_SUBBLOCK_IDS', () => {
|
||||
const ids = ['signingSecret', 'samplePayload', 'triggerInstructions', 'botToken']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['botToken', 'signingSecret'])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude namespaced SYSTEM_SUBBLOCK_IDS (prefix matching)', () => {
|
||||
const ids = [
|
||||
'signingSecret',
|
||||
'samplePayload_slack_webhook',
|
||||
'triggerInstructions_slack_webhook',
|
||||
'webhookUrlDisplay_slack_webhook',
|
||||
'botToken',
|
||||
]
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['botToken', 'signingSecret'])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude exact TRIGGER_RUNTIME_SUBBLOCK_IDS', () => {
|
||||
const ids = ['webhookId', 'triggerPath', 'triggerConfig', 'triggerId', 'signingSecret']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['signingSecret'])
|
||||
})
|
||||
|
||||
it.concurrent('should not exclude IDs that merely contain a system ID substring', () => {
|
||||
const ids = ['mySamplePayload', 'notSamplePayload']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['mySamplePayload', 'notSamplePayload'])
|
||||
})
|
||||
|
||||
it.concurrent('should return sorted results', () => {
|
||||
const ids = ['zebra', 'alpha', 'middle']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['alpha', 'middle', 'zebra'])
|
||||
})
|
||||
|
||||
it.concurrent('should handle empty array', () => {
|
||||
expect(filterSubBlockIds([])).toEqual([])
|
||||
})
|
||||
|
||||
it.concurrent('should handle all IDs being excluded', () => {
|
||||
const ids = ['webhookId', 'triggerPath', 'samplePayload', 'triggerConfig']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude setupScript and scheduleInfo namespaced variants', () => {
|
||||
const ids = ['setupScript_google_sheets_row', 'scheduleInfo_cron_trigger', 'realField']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['realField'])
|
||||
})
|
||||
|
||||
it.concurrent('should exclude triggerCredentials namespaced variants', () => {
|
||||
const ids = ['triggerCredentials_slack_webhook', 'signingSecret']
|
||||
const result = filterSubBlockIds(ids)
|
||||
expect(result).toEqual(['signingSecret'])
|
||||
})
|
||||
})
|
||||
|
||||
describe('normalizeTriggerConfigValues', () => {
|
||||
it.concurrent('should return subBlocks unchanged when no triggerConfig exists', () => {
|
||||
const subBlocks = {
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result).toEqual(subBlocks)
|
||||
})
|
||||
|
||||
it.concurrent('should return subBlocks unchanged when triggerConfig value is null', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: null },
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result).toEqual(subBlocks)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
'should return subBlocks unchanged when triggerConfig value is not an object',
|
||||
() => {
|
||||
const subBlocks = {
|
||||
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: 'string-value' },
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result).toEqual(subBlocks)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should populate null individual fields from triggerConfig', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123', botToken: 'token456' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||
expect((result.botToken as Record<string, unknown>).value).toBe('token456')
|
||||
})
|
||||
|
||||
it.concurrent('should populate undefined individual fields from triggerConfig', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: undefined },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||
})
|
||||
|
||||
it.concurrent('should populate empty string individual fields from triggerConfig', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
|
||||
})
|
||||
|
||||
it.concurrent('should NOT overwrite existing non-empty individual field values', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'old-secret' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'user-edited-secret' },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe('user-edited-secret')
|
||||
})
|
||||
|
||||
it.concurrent('should skip triggerConfig fields that are null/undefined', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: null, botToken: undefined },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
botToken: { id: 'botToken', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||
expect((result.botToken as Record<string, unknown>).value).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should skip fields from triggerConfig that have no matching subBlock', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { nonExistentField: 'value123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
expect(result.nonExistentField).toBeUndefined()
|
||||
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should not mutate the original subBlocks object', () => {
|
||||
const original = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
|
||||
}
|
||||
normalizeTriggerConfigValues(original)
|
||||
expect((original.signingSecret as Record<string, unknown>).value).toBe(null)
|
||||
})
|
||||
|
||||
it.concurrent('should preserve other subBlock properties when populating value', () => {
|
||||
const subBlocks = {
|
||||
triggerConfig: {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: { signingSecret: 'secret123' },
|
||||
},
|
||||
signingSecret: {
|
||||
id: 'signingSecret',
|
||||
type: 'short-input',
|
||||
value: null,
|
||||
placeholder: 'Enter signing secret',
|
||||
},
|
||||
}
|
||||
const result = normalizeTriggerConfigValues(subBlocks)
|
||||
const normalized = result.signingSecret as Record<string, unknown>
|
||||
expect(normalized.value).toBe('secret123')
|
||||
expect(normalized.id).toBe('signingSecret')
|
||||
expect(normalized.type).toBe('short-input')
|
||||
expect(normalized.placeholder).toBe('Enter signing secret')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -418,10 +418,48 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
|
||||
*/
|
||||
export function filterSubBlockIds(subBlockIds: string[]): string[] {
|
||||
return subBlockIds
|
||||
.filter((id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id))
|
||||
.filter((id) => {
|
||||
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
|
||||
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
|
||||
return false
|
||||
return true
|
||||
})
|
||||
.sort()
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes trigger block subBlocks by populating null/empty individual fields
|
||||
* from the triggerConfig aggregate subBlock. This compensates for the runtime
|
||||
* population done by populateTriggerFieldsFromConfig, ensuring consistent
|
||||
* comparison between client state (with populated values) and deployed state
|
||||
* (with null values from DB).
|
||||
*/
|
||||
export function normalizeTriggerConfigValues(
|
||||
subBlocks: Record<string, unknown>
|
||||
): Record<string, unknown> {
|
||||
const triggerConfigSub = subBlocks.triggerConfig as Record<string, unknown> | undefined
|
||||
const triggerConfigValue = triggerConfigSub?.value
|
||||
if (!triggerConfigValue || typeof triggerConfigValue !== 'object') {
|
||||
return subBlocks
|
||||
}
|
||||
|
||||
const result = { ...subBlocks }
|
||||
for (const [fieldId, configValue] of Object.entries(
|
||||
triggerConfigValue as Record<string, unknown>
|
||||
)) {
|
||||
if (configValue === null || configValue === undefined) continue
|
||||
const existingSub = result[fieldId] as Record<string, unknown> | undefined
|
||||
if (
|
||||
existingSub &&
|
||||
(existingSub.value === null || existingSub.value === undefined || existingSub.value === '')
|
||||
) {
|
||||
result[fieldId] = { ...existingSub, value: configValue }
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalizes a subBlock value with sanitization for specific subBlock types.
|
||||
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
type GenerateContentConfig,
|
||||
type GenerateContentResponse,
|
||||
type GoogleGenAI,
|
||||
type Interactions,
|
||||
type Part,
|
||||
type Schema,
|
||||
type ThinkingConfig,
|
||||
@@ -27,6 +28,7 @@ import {
|
||||
import type { FunctionCallResponse, ProviderRequest, ProviderResponse } from '@/providers/types'
|
||||
import {
|
||||
calculateCost,
|
||||
isDeepResearchModel,
|
||||
prepareToolExecution,
|
||||
prepareToolsWithUsageControl,
|
||||
} from '@/providers/utils'
|
||||
@@ -381,6 +383,468 @@ export interface GeminiExecutionConfig {
|
||||
providerType: GeminiProviderType
|
||||
}
|
||||
|
||||
const DEEP_RESEARCH_POLL_INTERVAL_MS = 10_000
|
||||
const DEEP_RESEARCH_MAX_DURATION_MS = 60 * 60 * 1000
|
||||
|
||||
/**
|
||||
* Sleeps for the specified number of milliseconds
|
||||
*/
|
||||
function sleep(ms: number): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
/**
|
||||
* Collapses a ProviderRequest into a single input string and optional system instruction
|
||||
* for the Interactions API, which takes a flat input rather than a messages array.
|
||||
*
|
||||
* Deep research is single-turn only — it takes one research query and returns a report.
|
||||
* Memory/conversation history is hidden in the UI for deep research models, so only
|
||||
* the last user message is used as input. System messages are passed via system_instruction.
|
||||
*/
|
||||
function collapseMessagesToInput(request: ProviderRequest): {
|
||||
input: string
|
||||
systemInstruction: string | undefined
|
||||
} {
|
||||
const systemParts: string[] = []
|
||||
const userParts: string[] = []
|
||||
|
||||
if (request.systemPrompt) {
|
||||
systemParts.push(request.systemPrompt)
|
||||
}
|
||||
|
||||
if (request.messages) {
|
||||
for (const msg of request.messages) {
|
||||
if (msg.role === 'system' && msg.content) {
|
||||
systemParts.push(msg.content)
|
||||
} else if (msg.role === 'user' && msg.content) {
|
||||
userParts.push(msg.content)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
input:
|
||||
userParts.length > 0
|
||||
? userParts[userParts.length - 1]
|
||||
: 'Please conduct research on the provided topic.',
|
||||
systemInstruction: systemParts.length > 0 ? systemParts.join('\n\n') : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts text content from a completed interaction's outputs array.
|
||||
* The outputs array can contain text, thought, google_search_result, and other types.
|
||||
* We concatenate all text outputs to get the full research report.
|
||||
*/
|
||||
function extractTextFromInteractionOutputs(outputs: Interactions.Interaction['outputs']): string {
|
||||
if (!outputs || outputs.length === 0) return ''
|
||||
|
||||
const textParts: string[] = []
|
||||
for (const output of outputs) {
|
||||
if (output.type === 'text') {
|
||||
const text = (output as Interactions.TextContent).text
|
||||
if (text) textParts.push(text)
|
||||
}
|
||||
}
|
||||
|
||||
return textParts.join('\n\n')
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts token usage from an Interaction's Usage object.
|
||||
* The Interactions API provides total_input_tokens, total_output_tokens, total_tokens,
|
||||
* and total_reasoning_tokens (for thinking models).
|
||||
*
|
||||
* Also handles the raw API field name total_thought_tokens which the SDK may
|
||||
* map to total_reasoning_tokens.
|
||||
*/
|
||||
function extractInteractionUsage(usage: Interactions.Usage | undefined): {
|
||||
inputTokens: number
|
||||
outputTokens: number
|
||||
reasoningTokens: number
|
||||
totalTokens: number
|
||||
} {
|
||||
if (!usage) {
|
||||
return { inputTokens: 0, outputTokens: 0, reasoningTokens: 0, totalTokens: 0 }
|
||||
}
|
||||
|
||||
const usageLogger = createLogger('DeepResearchUsage')
|
||||
usageLogger.info('Raw interaction usage', { usage: JSON.stringify(usage) })
|
||||
|
||||
const inputTokens = usage.total_input_tokens ?? 0
|
||||
const outputTokens = usage.total_output_tokens ?? 0
|
||||
const reasoningTokens =
|
||||
usage.total_reasoning_tokens ??
|
||||
((usage as Record<string, unknown>).total_thought_tokens as number) ??
|
||||
0
|
||||
const totalTokens = usage.total_tokens ?? inputTokens + outputTokens
|
||||
|
||||
return { inputTokens, outputTokens, reasoningTokens, totalTokens }
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a standard ProviderResponse from a completed deep research interaction.
|
||||
*/
|
||||
function buildDeepResearchResponse(
|
||||
content: string,
|
||||
model: string,
|
||||
usage: {
|
||||
inputTokens: number
|
||||
outputTokens: number
|
||||
reasoningTokens: number
|
||||
totalTokens: number
|
||||
},
|
||||
providerStartTime: number,
|
||||
providerStartTimeISO: string,
|
||||
interactionId?: string
|
||||
): ProviderResponse {
|
||||
const providerEndTime = Date.now()
|
||||
const duration = providerEndTime - providerStartTime
|
||||
|
||||
return {
|
||||
content,
|
||||
model,
|
||||
tokens: {
|
||||
input: usage.inputTokens,
|
||||
output: usage.outputTokens,
|
||||
total: usage.totalTokens,
|
||||
},
|
||||
timing: {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: new Date(providerEndTime).toISOString(),
|
||||
duration,
|
||||
modelTime: duration,
|
||||
toolsTime: 0,
|
||||
firstResponseTime: duration,
|
||||
iterations: 1,
|
||||
timeSegments: [
|
||||
{
|
||||
type: 'model',
|
||||
name: 'Deep research',
|
||||
startTime: providerStartTime,
|
||||
endTime: providerEndTime,
|
||||
duration,
|
||||
},
|
||||
],
|
||||
},
|
||||
cost: calculateCost(model, usage.inputTokens, usage.outputTokens),
|
||||
interactionId,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a ReadableStream from a deep research streaming interaction.
|
||||
*
|
||||
* Deep research streaming returns InteractionSSEEvent chunks including:
|
||||
* - interaction.start: initial interaction with ID
|
||||
* - content.delta: incremental text and thought_summary updates
|
||||
* - content.start / content.stop: output boundaries
|
||||
* - interaction.complete: final event (outputs is undefined in streaming; must reconstruct)
|
||||
* - error: error events
|
||||
*
|
||||
* We stream text deltas to the client and track usage from the interaction.complete event.
|
||||
*/
|
||||
function createDeepResearchStream(
|
||||
stream: AsyncIterable<Interactions.InteractionSSEEvent>,
|
||||
onComplete?: (
|
||||
content: string,
|
||||
usage: {
|
||||
inputTokens: number
|
||||
outputTokens: number
|
||||
reasoningTokens: number
|
||||
totalTokens: number
|
||||
},
|
||||
interactionId?: string
|
||||
) => void
|
||||
): ReadableStream<Uint8Array> {
|
||||
const streamLogger = createLogger('DeepResearchStream')
|
||||
let fullContent = ''
|
||||
let completionUsage = { inputTokens: 0, outputTokens: 0, reasoningTokens: 0, totalTokens: 0 }
|
||||
let completedInteractionId: string | undefined
|
||||
|
||||
return new ReadableStream({
|
||||
async start(controller) {
|
||||
try {
|
||||
for await (const event of stream) {
|
||||
if (event.event_type === 'content.delta') {
|
||||
const delta = (event as Interactions.ContentDelta).delta
|
||||
if (delta?.type === 'text' && 'text' in delta && delta.text) {
|
||||
fullContent += delta.text
|
||||
controller.enqueue(new TextEncoder().encode(delta.text))
|
||||
}
|
||||
} else if (event.event_type === 'interaction.complete') {
|
||||
const interaction = (event as Interactions.InteractionEvent).interaction
|
||||
if (interaction?.usage) {
|
||||
completionUsage = extractInteractionUsage(interaction.usage)
|
||||
}
|
||||
completedInteractionId = interaction?.id
|
||||
} else if (event.event_type === 'interaction.start') {
|
||||
const interaction = (event as Interactions.InteractionEvent).interaction
|
||||
if (interaction?.id) {
|
||||
completedInteractionId = interaction.id
|
||||
}
|
||||
} else if (event.event_type === 'error') {
|
||||
const errorEvent = event as { error?: { code?: string; message?: string } }
|
||||
const message = errorEvent.error?.message ?? 'Unknown deep research stream error'
|
||||
streamLogger.error('Deep research stream error', {
|
||||
code: errorEvent.error?.code,
|
||||
message,
|
||||
})
|
||||
controller.error(new Error(message))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
onComplete?.(fullContent, completionUsage, completedInteractionId)
|
||||
controller.close()
|
||||
} catch (error) {
|
||||
streamLogger.error('Error reading deep research stream', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
controller.error(error)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a deep research request using the Interactions API.
|
||||
*
|
||||
* Deep research uses the Interactions API ({@link https://ai.google.dev/api/interactions-api}),
|
||||
* a completely different surface from generateContent. It creates a background interaction
|
||||
* that performs comprehensive research (up to 60 minutes).
|
||||
*
|
||||
* Supports both streaming and non-streaming modes:
|
||||
* - Streaming: returns a StreamingExecution with a ReadableStream of text deltas
|
||||
* - Non-streaming: polls until completion and returns a ProviderResponse
|
||||
*
|
||||
* Deep research does NOT support custom function calling tools, MCP servers,
|
||||
* or structured output (response_format). These are gracefully ignored.
|
||||
*/
|
||||
export async function executeDeepResearchRequest(
|
||||
config: GeminiExecutionConfig
|
||||
): Promise<ProviderResponse | StreamingExecution> {
|
||||
const { ai, model, request, providerType } = config
|
||||
const logger = createLogger(providerType === 'google' ? 'GoogleProvider' : 'VertexProvider')
|
||||
|
||||
logger.info('Preparing deep research request', {
|
||||
model,
|
||||
hasSystemPrompt: !!request.systemPrompt,
|
||||
hasMessages: !!request.messages?.length,
|
||||
streaming: !!request.stream,
|
||||
hasPreviousInteractionId: !!request.previousInteractionId,
|
||||
})
|
||||
|
||||
if (request.tools?.length) {
|
||||
logger.warn('Deep research does not support custom tools — ignoring tools parameter')
|
||||
}
|
||||
if (request.responseFormat) {
|
||||
logger.warn(
|
||||
'Deep research does not support structured output — ignoring responseFormat parameter'
|
||||
)
|
||||
}
|
||||
|
||||
const providerStartTime = Date.now()
|
||||
const providerStartTimeISO = new Date(providerStartTime).toISOString()
|
||||
|
||||
try {
|
||||
const { input, systemInstruction } = collapseMessagesToInput(request)
|
||||
|
||||
// Deep research requires background=true and store=true (store defaults to true,
|
||||
// but we set it explicitly per API requirements)
|
||||
const baseParams = {
|
||||
agent: model as Interactions.CreateAgentInteractionParamsNonStreaming['agent'],
|
||||
input,
|
||||
background: true,
|
||||
store: true,
|
||||
...(systemInstruction && { system_instruction: systemInstruction }),
|
||||
...(request.previousInteractionId && {
|
||||
previous_interaction_id: request.previousInteractionId,
|
||||
}),
|
||||
agent_config: {
|
||||
type: 'deep-research' as const,
|
||||
thinking_summaries: 'auto' as const,
|
||||
},
|
||||
}
|
||||
|
||||
logger.info('Creating deep research interaction', {
|
||||
inputLength: input.length,
|
||||
hasSystemInstruction: !!systemInstruction,
|
||||
streaming: !!request.stream,
|
||||
})
|
||||
|
||||
// Streaming mode: create a streaming interaction and return a StreamingExecution
|
||||
if (request.stream) {
|
||||
const streamParams: Interactions.CreateAgentInteractionParamsStreaming = {
|
||||
...baseParams,
|
||||
stream: true,
|
||||
}
|
||||
|
||||
const streamResponse = await ai.interactions.create(streamParams)
|
||||
const firstResponseTime = Date.now() - providerStartTime
|
||||
|
||||
const streamingResult: StreamingExecution = {
|
||||
stream: undefined as unknown as ReadableStream<Uint8Array>,
|
||||
execution: {
|
||||
success: true,
|
||||
output: {
|
||||
content: '',
|
||||
model,
|
||||
tokens: { input: 0, output: 0, total: 0 },
|
||||
providerTiming: {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: new Date().toISOString(),
|
||||
duration: Date.now() - providerStartTime,
|
||||
modelTime: firstResponseTime,
|
||||
toolsTime: 0,
|
||||
firstResponseTime,
|
||||
iterations: 1,
|
||||
timeSegments: [
|
||||
{
|
||||
type: 'model',
|
||||
name: 'Deep research (streaming)',
|
||||
startTime: providerStartTime,
|
||||
endTime: providerStartTime + firstResponseTime,
|
||||
duration: firstResponseTime,
|
||||
},
|
||||
],
|
||||
},
|
||||
cost: {
|
||||
input: 0,
|
||||
output: 0,
|
||||
total: 0,
|
||||
pricing: { input: 0, output: 0, updatedAt: new Date().toISOString() },
|
||||
},
|
||||
},
|
||||
logs: [],
|
||||
metadata: {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: new Date().toISOString(),
|
||||
duration: Date.now() - providerStartTime,
|
||||
},
|
||||
isStreaming: true,
|
||||
},
|
||||
}
|
||||
|
||||
streamingResult.stream = createDeepResearchStream(
|
||||
streamResponse,
|
||||
(content, usage, streamInteractionId) => {
|
||||
streamingResult.execution.output.content = content
|
||||
streamingResult.execution.output.tokens = {
|
||||
input: usage.inputTokens,
|
||||
output: usage.outputTokens,
|
||||
total: usage.totalTokens,
|
||||
}
|
||||
streamingResult.execution.output.interactionId = streamInteractionId
|
||||
|
||||
const cost = calculateCost(model, usage.inputTokens, usage.outputTokens)
|
||||
streamingResult.execution.output.cost = cost
|
||||
|
||||
const streamEndTime = Date.now()
|
||||
if (streamingResult.execution.output.providerTiming) {
|
||||
streamingResult.execution.output.providerTiming.endTime = new Date(
|
||||
streamEndTime
|
||||
).toISOString()
|
||||
streamingResult.execution.output.providerTiming.duration =
|
||||
streamEndTime - providerStartTime
|
||||
const segments = streamingResult.execution.output.providerTiming.timeSegments
|
||||
if (segments?.[0]) {
|
||||
segments[0].endTime = streamEndTime
|
||||
segments[0].duration = streamEndTime - providerStartTime
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
return streamingResult
|
||||
}
|
||||
|
||||
// Non-streaming mode: create and poll
|
||||
const createParams: Interactions.CreateAgentInteractionParamsNonStreaming = {
|
||||
...baseParams,
|
||||
stream: false,
|
||||
}
|
||||
|
||||
const interaction = await ai.interactions.create(createParams)
|
||||
const interactionId = interaction.id
|
||||
|
||||
logger.info('Deep research interaction created', { interactionId, status: interaction.status })
|
||||
|
||||
// Poll until a terminal status
|
||||
const pollStartTime = Date.now()
|
||||
let result: Interactions.Interaction = interaction
|
||||
|
||||
while (Date.now() - pollStartTime < DEEP_RESEARCH_MAX_DURATION_MS) {
|
||||
if (result.status === 'completed') {
|
||||
break
|
||||
}
|
||||
|
||||
if (result.status === 'failed') {
|
||||
throw new Error(`Deep research interaction failed: ${interactionId}`)
|
||||
}
|
||||
|
||||
if (result.status === 'cancelled') {
|
||||
throw new Error(`Deep research interaction was cancelled: ${interactionId}`)
|
||||
}
|
||||
|
||||
logger.info('Deep research in progress, polling...', {
|
||||
interactionId,
|
||||
status: result.status,
|
||||
elapsedMs: Date.now() - pollStartTime,
|
||||
})
|
||||
|
||||
await sleep(DEEP_RESEARCH_POLL_INTERVAL_MS)
|
||||
result = await ai.interactions.get(interactionId)
|
||||
}
|
||||
|
||||
if (result.status !== 'completed') {
|
||||
throw new Error(
|
||||
`Deep research timed out after ${DEEP_RESEARCH_MAX_DURATION_MS / 1000}s (status: ${result.status})`
|
||||
)
|
||||
}
|
||||
|
||||
const content = extractTextFromInteractionOutputs(result.outputs)
|
||||
const usage = extractInteractionUsage(result.usage)
|
||||
|
||||
logger.info('Deep research completed', {
|
||||
interactionId,
|
||||
contentLength: content.length,
|
||||
inputTokens: usage.inputTokens,
|
||||
outputTokens: usage.outputTokens,
|
||||
reasoningTokens: usage.reasoningTokens,
|
||||
totalTokens: usage.totalTokens,
|
||||
durationMs: Date.now() - providerStartTime,
|
||||
})
|
||||
|
||||
return buildDeepResearchResponse(
|
||||
content,
|
||||
model,
|
||||
usage,
|
||||
providerStartTime,
|
||||
providerStartTimeISO,
|
||||
interactionId
|
||||
)
|
||||
} catch (error) {
|
||||
const providerEndTime = Date.now()
|
||||
const duration = providerEndTime - providerStartTime
|
||||
|
||||
logger.error('Error in deep research request:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
const enhancedError = error instanceof Error ? error : new Error(String(error))
|
||||
Object.assign(enhancedError, {
|
||||
timing: {
|
||||
startTime: providerStartTimeISO,
|
||||
endTime: new Date(providerEndTime).toISOString(),
|
||||
duration,
|
||||
},
|
||||
})
|
||||
throw enhancedError
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes a request using the Gemini API
|
||||
*
|
||||
@@ -391,6 +855,12 @@ export async function executeGeminiRequest(
|
||||
config: GeminiExecutionConfig
|
||||
): Promise<ProviderResponse | StreamingExecution> {
|
||||
const { ai, model, request, providerType } = config
|
||||
|
||||
// Route deep research models to the interactions API
|
||||
if (isDeepResearchModel(model)) {
|
||||
return executeDeepResearchRequest(config)
|
||||
}
|
||||
|
||||
const logger = createLogger(providerType === 'google' ? 'GoogleProvider' : 'VertexProvider')
|
||||
|
||||
logger.info(`Preparing ${providerType} Gemini request`, {
|
||||
|
||||
@@ -46,6 +46,9 @@ export interface ModelCapabilities {
|
||||
levels: string[]
|
||||
default?: string
|
||||
}
|
||||
deepResearch?: boolean
|
||||
/** Whether this model supports conversation memory. Defaults to true if omitted. */
|
||||
memory?: boolean
|
||||
}
|
||||
|
||||
export interface ModelDefinition {
|
||||
@@ -825,7 +828,7 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
name: 'Google',
|
||||
description: "Google's Gemini models",
|
||||
defaultModel: 'gemini-2.5-pro',
|
||||
modelPatterns: [/^gemini/],
|
||||
modelPatterns: [/^gemini/, /^deep-research/],
|
||||
capabilities: {
|
||||
toolUsageControl: true,
|
||||
},
|
||||
@@ -928,6 +931,19 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
},
|
||||
contextWindow: 1000000,
|
||||
},
|
||||
{
|
||||
id: 'deep-research-pro-preview-12-2025',
|
||||
pricing: {
|
||||
input: 2.0,
|
||||
output: 2.0,
|
||||
updatedAt: '2026-02-10',
|
||||
},
|
||||
capabilities: {
|
||||
deepResearch: true,
|
||||
memory: false,
|
||||
},
|
||||
contextWindow: 1000000,
|
||||
},
|
||||
],
|
||||
},
|
||||
vertex: {
|
||||
@@ -1038,6 +1054,19 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
},
|
||||
contextWindow: 1000000,
|
||||
},
|
||||
{
|
||||
id: 'vertex/deep-research-pro-preview-12-2025',
|
||||
pricing: {
|
||||
input: 2.0,
|
||||
output: 2.0,
|
||||
updatedAt: '2026-02-10',
|
||||
},
|
||||
capabilities: {
|
||||
deepResearch: true,
|
||||
memory: false,
|
||||
},
|
||||
contextWindow: 1000000,
|
||||
},
|
||||
],
|
||||
},
|
||||
deepseek: {
|
||||
@@ -2480,6 +2509,37 @@ export function getThinkingLevelsForModel(modelId: string): string[] | null {
|
||||
return capability?.levels ?? null
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all models that support deep research capability
|
||||
*/
|
||||
export function getModelsWithDeepResearch(): string[] {
|
||||
const models: string[] = []
|
||||
for (const provider of Object.values(PROVIDER_DEFINITIONS)) {
|
||||
for (const model of provider.models) {
|
||||
if (model.capabilities.deepResearch) {
|
||||
models.push(model.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
return models
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all models that explicitly disable memory support (memory: false).
|
||||
* Models without this capability default to supporting memory.
|
||||
*/
|
||||
export function getModelsWithoutMemory(): string[] {
|
||||
const models: string[] = []
|
||||
for (const provider of Object.values(PROVIDER_DEFINITIONS)) {
|
||||
for (const model of provider.models) {
|
||||
if (model.capabilities.memory === false) {
|
||||
models.push(model.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
return models
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the max output tokens for a specific model.
|
||||
*
|
||||
|
||||
@@ -95,6 +95,8 @@ export interface ProviderResponse {
|
||||
total: number
|
||||
pricing: ModelPricing
|
||||
}
|
||||
/** Interaction ID returned by the Interactions API (used for multi-turn deep research) */
|
||||
interactionId?: string
|
||||
}
|
||||
|
||||
export type ToolUsageControl = 'auto' | 'force' | 'none'
|
||||
@@ -169,6 +171,8 @@ export interface ProviderRequest {
|
||||
verbosity?: string
|
||||
thinkingLevel?: string
|
||||
isDeployedContext?: boolean
|
||||
/** Previous interaction ID for multi-turn Interactions API requests (deep research follow-ups) */
|
||||
previousInteractionId?: string
|
||||
}
|
||||
|
||||
export const providers: Record<string, ProviderConfig> = {}
|
||||
|
||||
@@ -12,6 +12,8 @@ import {
|
||||
getMaxOutputTokensForModel as getMaxOutputTokensForModelFromDefinitions,
|
||||
getMaxTemperature as getMaxTempFromDefinitions,
|
||||
getModelPricing as getModelPricingFromDefinitions,
|
||||
getModelsWithDeepResearch,
|
||||
getModelsWithoutMemory,
|
||||
getModelsWithReasoningEffort,
|
||||
getModelsWithTemperatureSupport,
|
||||
getModelsWithTempRange01,
|
||||
@@ -953,6 +955,8 @@ export const MODELS_WITH_TEMPERATURE_SUPPORT = getModelsWithTemperatureSupport()
|
||||
export const MODELS_WITH_REASONING_EFFORT = getModelsWithReasoningEffort()
|
||||
export const MODELS_WITH_VERBOSITY = getModelsWithVerbosity()
|
||||
export const MODELS_WITH_THINKING = getModelsWithThinking()
|
||||
export const MODELS_WITH_DEEP_RESEARCH = getModelsWithDeepResearch()
|
||||
export const MODELS_WITHOUT_MEMORY = getModelsWithoutMemory()
|
||||
export const PROVIDERS_WITH_TOOL_USAGE_CONTROL = getProvidersWithToolUsageControl()
|
||||
|
||||
export function supportsTemperature(model: string): boolean {
|
||||
@@ -971,6 +975,10 @@ export function supportsThinking(model: string): boolean {
|
||||
return MODELS_WITH_THINKING.includes(model.toLowerCase())
|
||||
}
|
||||
|
||||
export function isDeepResearchModel(model: string): boolean {
|
||||
return MODELS_WITH_DEEP_RESEARCH.includes(model.toLowerCase())
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum temperature value for a model
|
||||
* @returns Maximum temperature value (1 or 2) or undefined if temperature not supported
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 45 KiB After Width: | Height: | Size: 78 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 58 KiB After Width: | Height: | Size: 58 KiB |
@@ -129,6 +129,18 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
|
||||
})
|
||||
},
|
||||
|
||||
setCurrentExecutionId: (workflowId, executionId) => {
|
||||
set({
|
||||
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
||||
currentExecutionId: executionId,
|
||||
}),
|
||||
})
|
||||
},
|
||||
|
||||
getCurrentExecutionId: (workflowId) => {
|
||||
return getOrCreate(get().workflowExecutions, workflowId).currentExecutionId
|
||||
},
|
||||
|
||||
clearRunPath: (workflowId) => {
|
||||
set({
|
||||
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
|
||||
|
||||
@@ -35,6 +35,8 @@ export interface WorkflowExecutionState {
|
||||
lastRunPath: Map<string, BlockRunStatus>
|
||||
/** Maps edge IDs to their run result from the last execution */
|
||||
lastRunEdges: Map<string, EdgeRunStatus>
|
||||
/** The execution ID of the currently running execution */
|
||||
currentExecutionId: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -54,6 +56,7 @@ export const defaultWorkflowExecutionState: WorkflowExecutionState = {
|
||||
debugContext: null,
|
||||
lastRunPath: new Map(),
|
||||
lastRunEdges: new Map(),
|
||||
currentExecutionId: null,
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -96,6 +99,10 @@ export interface ExecutionActions {
|
||||
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
|
||||
/** Clears the run path and run edges for a workflow */
|
||||
clearRunPath: (workflowId: string) => void
|
||||
/** Stores the current execution ID for a workflow */
|
||||
setCurrentExecutionId: (workflowId: string, executionId: string | null) => void
|
||||
/** Returns the current execution ID for a workflow */
|
||||
getCurrentExecutionId: (workflowId: string) => string | null
|
||||
/** Resets the entire store to its initial empty state */
|
||||
reset: () => void
|
||||
/** Stores a serializable execution snapshot for a workflow */
|
||||
|
||||
@@ -39,7 +39,6 @@ import {
|
||||
buildToolCallsById,
|
||||
normalizeMessagesForUI,
|
||||
persistMessages,
|
||||
persistMessagesBeacon,
|
||||
saveMessageCheckpoint,
|
||||
} from '@/lib/copilot/messages'
|
||||
import type { CopilotTransportMode } from '@/lib/copilot/models'
|
||||
@@ -79,28 +78,6 @@ let _isPageUnloading = false
|
||||
if (typeof window !== 'undefined') {
|
||||
window.addEventListener('beforeunload', () => {
|
||||
_isPageUnloading = true
|
||||
|
||||
// Emergency persistence: flush any pending streaming updates to the store and
|
||||
// persist via sendBeacon (which is guaranteed to be queued during page teardown).
|
||||
// Without this, thinking blocks and in-progress content are lost on refresh.
|
||||
try {
|
||||
const state = useCopilotStore.getState()
|
||||
if (state.isSendingMessage && state.currentChat) {
|
||||
// Flush batched streaming updates into the store messages
|
||||
flushStreamingUpdates(useCopilotStore.setState.bind(useCopilotStore))
|
||||
const flushedState = useCopilotStore.getState()
|
||||
persistMessagesBeacon({
|
||||
chatId: flushedState.currentChat!.id,
|
||||
messages: flushedState.messages,
|
||||
sensitiveCredentialIds: flushedState.sensitiveCredentialIds,
|
||||
planArtifact: flushedState.streamingPlanContent || null,
|
||||
mode: flushedState.mode,
|
||||
model: flushedState.selectedModel,
|
||||
})
|
||||
}
|
||||
} catch {
|
||||
// Best-effort — don't let errors prevent page unload
|
||||
}
|
||||
})
|
||||
}
|
||||
function isPageUnloading(): boolean {
|
||||
@@ -333,6 +310,50 @@ function parseModelKey(compositeKey: string): { provider: string; modelId: strin
|
||||
return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert legacy/variant Claude IDs into the canonical ID shape used by the model catalog.
|
||||
*
|
||||
* Examples:
|
||||
* - claude-4.5-opus -> claude-opus-4-5
|
||||
* - claude-opus-4.6 -> claude-opus-4-6
|
||||
* - anthropic.claude-opus-4-5-20251101-v1:0 -> claude-opus-4-5 (match key only)
|
||||
*/
|
||||
function canonicalizeModelMatchKey(modelId: string): string {
|
||||
if (!modelId) return modelId
|
||||
const normalized = modelId.trim().toLowerCase()
|
||||
|
||||
const toCanonicalClaude = (tier: string, version: string): string => {
|
||||
const normalizedVersion = version.replace(/\./g, '-')
|
||||
return `claude-${tier}-${normalizedVersion}`
|
||||
}
|
||||
|
||||
const tierFirstExact = normalized.match(/^claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)$/)
|
||||
if (tierFirstExact) {
|
||||
const [, tier, version] = tierFirstExact
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
const versionFirstExact = normalized.match(/^claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)$/)
|
||||
if (versionFirstExact) {
|
||||
const [, version, tier] = versionFirstExact
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
const tierFirstEmbedded = normalized.match(/claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)/)
|
||||
if (tierFirstEmbedded) {
|
||||
const [, tier, version] = tierFirstEmbedded
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
const versionFirstEmbedded = normalized.match(/claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)/)
|
||||
if (versionFirstEmbedded) {
|
||||
const [, version, tier] = versionFirstEmbedded
|
||||
return toCanonicalClaude(tier, version)
|
||||
}
|
||||
|
||||
return normalized
|
||||
}
|
||||
|
||||
const MODEL_PROVIDER_PRIORITY = [
|
||||
'anthropic',
|
||||
'bedrock',
|
||||
@@ -373,12 +394,23 @@ function normalizeSelectedModelKey(selectedModel: string, models: AvailableModel
|
||||
|
||||
const { provider, modelId } = parseModelKey(selectedModel)
|
||||
const targetModelId = modelId || selectedModel
|
||||
const targetMatchKey = canonicalizeModelMatchKey(targetModelId)
|
||||
|
||||
const matches = models.filter((m) => m.id.endsWith(`/${targetModelId}`))
|
||||
const matches = models.filter((m) => {
|
||||
const candidateModelId = parseModelKey(m.id).modelId || m.id
|
||||
const candidateMatchKey = canonicalizeModelMatchKey(candidateModelId)
|
||||
return (
|
||||
candidateModelId === targetModelId ||
|
||||
m.id.endsWith(`/${targetModelId}`) ||
|
||||
candidateMatchKey === targetMatchKey
|
||||
)
|
||||
})
|
||||
if (matches.length === 0) return selectedModel
|
||||
|
||||
if (provider) {
|
||||
const sameProvider = matches.find((m) => m.provider === provider)
|
||||
const sameProvider = matches.find(
|
||||
(m) => m.provider === provider || m.id.startsWith(`${provider}/`)
|
||||
)
|
||||
if (sameProvider) return sameProvider.id
|
||||
}
|
||||
|
||||
@@ -1116,11 +1148,12 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const chatConfig = chat.config ?? {}
|
||||
const chatMode = chatConfig.mode || get().mode
|
||||
const chatModel = chatConfig.model || get().selectedModel
|
||||
const normalizedChatModel = normalizeSelectedModelKey(chatModel, get().availableModels)
|
||||
|
||||
logger.debug('[Chat] Restoring chat config', {
|
||||
chatId: chat.id,
|
||||
mode: chatMode,
|
||||
model: chatModel,
|
||||
model: normalizedChatModel,
|
||||
hasPlanArtifact: !!planArtifact,
|
||||
})
|
||||
|
||||
@@ -1142,7 +1175,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
showPlanTodos: false,
|
||||
streamingPlanContent: planArtifact,
|
||||
mode: chatMode,
|
||||
selectedModel: chatModel as CopilotStore['selectedModel'],
|
||||
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
|
||||
suppressAutoSelect: false,
|
||||
})
|
||||
|
||||
@@ -1315,6 +1348,10 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const refreshedConfig = updatedCurrentChat.config ?? {}
|
||||
const refreshedMode = refreshedConfig.mode || get().mode
|
||||
const refreshedModel = refreshedConfig.model || get().selectedModel
|
||||
const normalizedRefreshedModel = normalizeSelectedModelKey(
|
||||
refreshedModel,
|
||||
get().availableModels
|
||||
)
|
||||
const toolCallsById = buildToolCallsById(normalizedMessages)
|
||||
|
||||
set({
|
||||
@@ -1323,7 +1360,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
toolCallsById,
|
||||
streamingPlanContent: refreshedPlanArtifact,
|
||||
mode: refreshedMode,
|
||||
selectedModel: refreshedModel as CopilotStore['selectedModel'],
|
||||
selectedModel: normalizedRefreshedModel as CopilotStore['selectedModel'],
|
||||
})
|
||||
}
|
||||
try {
|
||||
@@ -1343,11 +1380,15 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const chatConfig = mostRecentChat.config ?? {}
|
||||
const chatMode = chatConfig.mode || get().mode
|
||||
const chatModel = chatConfig.model || get().selectedModel
|
||||
const normalizedChatModel = normalizeSelectedModelKey(
|
||||
chatModel,
|
||||
get().availableModels
|
||||
)
|
||||
|
||||
logger.info('[Chat] Auto-selecting most recent chat with config', {
|
||||
chatId: mostRecentChat.id,
|
||||
mode: chatMode,
|
||||
model: chatModel,
|
||||
model: normalizedChatModel,
|
||||
hasPlanArtifact: !!planArtifact,
|
||||
})
|
||||
|
||||
@@ -1359,7 +1400,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
toolCallsById,
|
||||
streamingPlanContent: planArtifact,
|
||||
mode: chatMode,
|
||||
selectedModel: chatModel as CopilotStore['selectedModel'],
|
||||
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
|
||||
})
|
||||
try {
|
||||
await get().loadMessageCheckpoints(mostRecentChat.id)
|
||||
@@ -1484,26 +1525,19 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
// Immediately put all in-progress tools into aborted state
|
||||
abortAllInProgressTools(set, get)
|
||||
|
||||
// Persist whatever contentBlocks/text we have to keep ordering for reloads.
|
||||
// During page unload, use sendBeacon which is guaranteed to be queued even
|
||||
// as the page tears down. Regular async fetch won't complete in time.
|
||||
// Persist whatever contentBlocks/text we have to keep ordering for reloads
|
||||
const { currentChat, streamingPlanContent, mode, selectedModel } = get()
|
||||
if (currentChat) {
|
||||
try {
|
||||
const currentMessages = get().messages
|
||||
const persistParams = {
|
||||
void persistMessages({
|
||||
chatId: currentChat.id,
|
||||
messages: currentMessages,
|
||||
sensitiveCredentialIds: get().sensitiveCredentialIds,
|
||||
planArtifact: streamingPlanContent || null,
|
||||
mode,
|
||||
model: selectedModel,
|
||||
}
|
||||
if (isPageUnloading()) {
|
||||
persistMessagesBeacon(persistParams)
|
||||
} else {
|
||||
void persistMessages(persistParams)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
logger.warn('[Copilot] Failed to queue abort snapshot persistence', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
@@ -2298,7 +2332,8 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
},
|
||||
|
||||
setSelectedModel: async (model) => {
|
||||
set({ selectedModel: model })
|
||||
const normalizedModel = normalizeSelectedModelKey(model, get().availableModels)
|
||||
set({ selectedModel: normalizedModel as CopilotStore['selectedModel'] })
|
||||
},
|
||||
setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }),
|
||||
loadAvailableModels: async () => {
|
||||
|
||||
@@ -224,7 +224,7 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
|
||||
const newEntry = get().entries[0]
|
||||
|
||||
if (newEntry?.error) {
|
||||
if (newEntry?.error && newEntry.blockType !== 'cancelled') {
|
||||
notifyBlockError({
|
||||
error: newEntry.error,
|
||||
blockName: newEntry.blockName || 'Unknown Block',
|
||||
@@ -243,6 +243,11 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
useExecutionStore.getState().clearRunPath(workflowId)
|
||||
},
|
||||
|
||||
clearExecutionEntries: (executionId: string) =>
|
||||
set((state) => ({
|
||||
entries: state.entries.filter((e) => e.executionId !== executionId),
|
||||
})),
|
||||
|
||||
exportConsoleCSV: (workflowId: string) => {
|
||||
const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
|
||||
|
||||
@@ -470,12 +475,24 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
|
||||
},
|
||||
merge: (persistedState, currentState) => {
|
||||
const persisted = persistedState as Partial<ConsoleStore> | undefined
|
||||
const entries = (persisted?.entries ?? currentState.entries).map((entry, index) => {
|
||||
const rawEntries = persisted?.entries ?? currentState.entries
|
||||
const oneHourAgo = Date.now() - 60 * 60 * 1000
|
||||
|
||||
const entries = rawEntries.map((entry, index) => {
|
||||
let updated = entry
|
||||
if (entry.executionOrder === undefined) {
|
||||
return { ...entry, executionOrder: index + 1 }
|
||||
updated = { ...updated, executionOrder: index + 1 }
|
||||
}
|
||||
return entry
|
||||
if (
|
||||
entry.isRunning &&
|
||||
entry.startedAt &&
|
||||
new Date(entry.startedAt).getTime() < oneHourAgo
|
||||
) {
|
||||
updated = { ...updated, isRunning: false }
|
||||
}
|
||||
return updated
|
||||
})
|
||||
|
||||
return {
|
||||
...currentState,
|
||||
entries,
|
||||
|
||||
@@ -51,6 +51,7 @@ export interface ConsoleStore {
|
||||
isOpen: boolean
|
||||
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
|
||||
clearWorkflowConsole: (workflowId: string) => void
|
||||
clearExecutionEntries: (executionId: string) => void
|
||||
exportConsoleCSV: (workflowId: string) => void
|
||||
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
|
||||
toggleConsole: () => void
|
||||
|
||||
114
apps/sim/tools/confluence/delete_label.ts
Normal file
114
apps/sim/tools/confluence/delete_label.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceDeleteLabelParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
pageId: string
|
||||
labelName: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceDeleteLabelResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
pageId: string
|
||||
labelName: string
|
||||
deleted: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceDeleteLabelTool: ToolConfig<
|
||||
ConfluenceDeleteLabelParams,
|
||||
ConfluenceDeleteLabelResponse
|
||||
> = {
|
||||
id: 'confluence_delete_label',
|
||||
name: 'Confluence Delete Label',
|
||||
description: 'Remove a label from a Confluence page.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
pageId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Confluence page ID to remove the label from',
|
||||
},
|
||||
labelName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Name of the label to remove',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: () => '/api/tools/confluence/labels',
|
||||
method: 'DELETE',
|
||||
headers: (params: ConfluenceDeleteLabelParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
body: (params: ConfluenceDeleteLabelParams) => ({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
pageId: params.pageId?.trim(),
|
||||
labelName: params.labelName?.trim(),
|
||||
cloudId: params.cloudId,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
pageId: data.pageId ?? '',
|
||||
labelName: data.labelName ?? '',
|
||||
deleted: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
pageId: {
|
||||
type: 'string',
|
||||
description: 'Page ID the label was removed from',
|
||||
},
|
||||
labelName: {
|
||||
type: 'string',
|
||||
description: 'Name of the removed label',
|
||||
},
|
||||
deleted: {
|
||||
type: 'boolean',
|
||||
description: 'Deletion status',
|
||||
},
|
||||
},
|
||||
}
|
||||
105
apps/sim/tools/confluence/delete_page_property.ts
Normal file
105
apps/sim/tools/confluence/delete_page_property.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceDeletePagePropertyParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
pageId: string
|
||||
propertyId: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceDeletePagePropertyResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
pageId: string
|
||||
propertyId: string
|
||||
deleted: boolean
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceDeletePagePropertyTool: ToolConfig<
|
||||
ConfluenceDeletePagePropertyParams,
|
||||
ConfluenceDeletePagePropertyResponse
|
||||
> = {
|
||||
id: 'confluence_delete_page_property',
|
||||
name: 'Confluence Delete Page Property',
|
||||
description: 'Delete a content property from a Confluence page by its property ID.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
pageId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the page containing the property',
|
||||
},
|
||||
propertyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the property to delete',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: () => '/api/tools/confluence/page-properties',
|
||||
method: 'DELETE',
|
||||
headers: (params: ConfluenceDeletePagePropertyParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
body: (params: ConfluenceDeletePagePropertyParams) => ({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
pageId: params.pageId?.trim(),
|
||||
propertyId: params.propertyId?.trim(),
|
||||
cloudId: params.cloudId,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
pageId: data.pageId ?? '',
|
||||
propertyId: data.propertyId ?? '',
|
||||
deleted: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
pageId: { type: 'string', description: 'ID of the page' },
|
||||
propertyId: { type: 'string', description: 'ID of the deleted property' },
|
||||
deleted: { type: 'boolean', description: 'Deletion status' },
|
||||
},
|
||||
}
|
||||
143
apps/sim/tools/confluence/get_pages_by_label.ts
Normal file
143
apps/sim/tools/confluence/get_pages_by_label.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { PAGE_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceGetPagesByLabelParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
labelId: string
|
||||
limit?: number
|
||||
cursor?: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceGetPagesByLabelResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
labelId: string
|
||||
pages: Array<{
|
||||
id: string
|
||||
title: string
|
||||
status: string | null
|
||||
spaceId: string | null
|
||||
parentId: string | null
|
||||
authorId: string | null
|
||||
createdAt: string | null
|
||||
version: {
|
||||
number: number
|
||||
message?: string
|
||||
createdAt?: string
|
||||
} | null
|
||||
}>
|
||||
nextCursor: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceGetPagesByLabelTool: ToolConfig<
|
||||
ConfluenceGetPagesByLabelParams,
|
||||
ConfluenceGetPagesByLabelResponse
|
||||
> = {
|
||||
id: 'confluence_get_pages_by_label',
|
||||
name: 'Confluence Get Pages by Label',
|
||||
description: 'Retrieve all pages that have a specific label applied.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
labelId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the label to get pages for',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of pages to return (default: 50, max: 250)',
|
||||
},
|
||||
cursor: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Pagination cursor from previous response',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: ConfluenceGetPagesByLabelParams) => {
|
||||
const query = new URLSearchParams({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
labelId: params.labelId,
|
||||
limit: String(params.limit || 50),
|
||||
})
|
||||
if (params.cursor) {
|
||||
query.set('cursor', params.cursor)
|
||||
}
|
||||
if (params.cloudId) {
|
||||
query.set('cloudId', params.cloudId)
|
||||
}
|
||||
return `/api/tools/confluence/pages-by-label?${query.toString()}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: ConfluenceGetPagesByLabelParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
labelId: data.labelId ?? '',
|
||||
pages: data.pages ?? [],
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
labelId: { type: 'string', description: 'ID of the label' },
|
||||
pages: {
|
||||
type: 'array',
|
||||
description: 'Array of pages with this label',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: PAGE_ITEM_PROPERTIES,
|
||||
},
|
||||
},
|
||||
nextCursor: {
|
||||
type: 'string',
|
||||
description: 'Cursor for fetching the next page of results',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -5,11 +5,14 @@ import { confluenceCreatePageTool } from '@/tools/confluence/create_page'
|
||||
import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property'
|
||||
import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment'
|
||||
import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment'
|
||||
import { confluenceDeleteLabelTool } from '@/tools/confluence/delete_label'
|
||||
import { confluenceDeletePageTool } from '@/tools/confluence/delete_page'
|
||||
import { confluenceDeletePagePropertyTool } from '@/tools/confluence/delete_page_property'
|
||||
import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost'
|
||||
import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors'
|
||||
import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children'
|
||||
import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version'
|
||||
import { confluenceGetPagesByLabelTool } from '@/tools/confluence/get_pages_by_label'
|
||||
import { confluenceGetSpaceTool } from '@/tools/confluence/get_space'
|
||||
import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments'
|
||||
import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts'
|
||||
@@ -19,6 +22,7 @@ import { confluenceListLabelsTool } from '@/tools/confluence/list_labels'
|
||||
import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties'
|
||||
import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions'
|
||||
import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space'
|
||||
import { confluenceListSpaceLabelsTool } from '@/tools/confluence/list_space_labels'
|
||||
import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces'
|
||||
import { confluenceRetrieveTool } from '@/tools/confluence/retrieve'
|
||||
import { confluenceSearchTool } from '@/tools/confluence/search'
|
||||
@@ -78,6 +82,7 @@ export {
|
||||
// Page Properties Tools
|
||||
confluenceListPagePropertiesTool,
|
||||
confluenceCreatePagePropertyTool,
|
||||
confluenceDeletePagePropertyTool,
|
||||
// Blog Post Tools
|
||||
confluenceListBlogPostsTool,
|
||||
confluenceGetBlogPostTool,
|
||||
@@ -98,6 +103,9 @@ export {
|
||||
// Label Tools
|
||||
confluenceListLabelsTool,
|
||||
confluenceAddLabelTool,
|
||||
confluenceDeleteLabelTool,
|
||||
confluenceGetPagesByLabelTool,
|
||||
confluenceListSpaceLabelsTool,
|
||||
// Space Tools
|
||||
confluenceGetSpaceTool,
|
||||
confluenceListSpacesTool,
|
||||
|
||||
134
apps/sim/tools/confluence/list_space_labels.ts
Normal file
134
apps/sim/tools/confluence/list_space_labels.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import { LABEL_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface ConfluenceListSpaceLabelsParams {
|
||||
accessToken: string
|
||||
domain: string
|
||||
spaceId: string
|
||||
limit?: number
|
||||
cursor?: string
|
||||
cloudId?: string
|
||||
}
|
||||
|
||||
export interface ConfluenceListSpaceLabelsResponse {
|
||||
success: boolean
|
||||
output: {
|
||||
ts: string
|
||||
spaceId: string
|
||||
labels: Array<{
|
||||
id: string
|
||||
name: string
|
||||
prefix: string
|
||||
}>
|
||||
nextCursor: string | null
|
||||
}
|
||||
}
|
||||
|
||||
export const confluenceListSpaceLabelsTool: ToolConfig<
|
||||
ConfluenceListSpaceLabelsParams,
|
||||
ConfluenceListSpaceLabelsResponse
|
||||
> = {
|
||||
id: 'confluence_list_space_labels',
|
||||
name: 'Confluence List Space Labels',
|
||||
description: 'List all labels associated with a Confluence space.',
|
||||
version: '1.0.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'confluence',
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token for Confluence',
|
||||
},
|
||||
domain: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
|
||||
},
|
||||
spaceId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The ID of the Confluence space to list labels from',
|
||||
},
|
||||
limit: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Maximum number of labels to return (default: 25, max: 250)',
|
||||
},
|
||||
cursor: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Pagination cursor from previous response',
|
||||
},
|
||||
cloudId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: ConfluenceListSpaceLabelsParams) => {
|
||||
const query = new URLSearchParams({
|
||||
domain: params.domain,
|
||||
accessToken: params.accessToken,
|
||||
spaceId: params.spaceId,
|
||||
limit: String(params.limit || 25),
|
||||
})
|
||||
if (params.cursor) {
|
||||
query.set('cursor', params.cursor)
|
||||
}
|
||||
if (params.cloudId) {
|
||||
query.set('cloudId', params.cloudId)
|
||||
}
|
||||
return `/api/tools/confluence/space-labels?${query.toString()}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params: ConfluenceListSpaceLabelsParams) => ({
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
spaceId: data.spaceId ?? '',
|
||||
labels: data.labels ?? [],
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
ts: TIMESTAMP_OUTPUT,
|
||||
spaceId: { type: 'string', description: 'ID of the space' },
|
||||
labels: {
|
||||
type: 'array',
|
||||
description: 'Array of labels on the space',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: LABEL_ITEM_PROPERTIES,
|
||||
},
|
||||
},
|
||||
nextCursor: {
|
||||
type: 'string',
|
||||
description: 'Cursor for fetching the next page of results',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -118,10 +118,13 @@ import {
|
||||
confluenceCreatePageTool,
|
||||
confluenceDeleteAttachmentTool,
|
||||
confluenceDeleteCommentTool,
|
||||
confluenceDeleteLabelTool,
|
||||
confluenceDeletePagePropertyTool,
|
||||
confluenceDeletePageTool,
|
||||
confluenceGetBlogPostTool,
|
||||
confluenceGetPageAncestorsTool,
|
||||
confluenceGetPageChildrenTool,
|
||||
confluenceGetPagesByLabelTool,
|
||||
confluenceGetPageVersionTool,
|
||||
confluenceGetSpaceTool,
|
||||
confluenceListAttachmentsTool,
|
||||
@@ -132,6 +135,7 @@ import {
|
||||
confluenceListPagePropertiesTool,
|
||||
confluenceListPagesInSpaceTool,
|
||||
confluenceListPageVersionsTool,
|
||||
confluenceListSpaceLabelsTool,
|
||||
confluenceListSpacesTool,
|
||||
confluenceRetrieveTool,
|
||||
confluenceSearchInSpaceTool,
|
||||
@@ -2667,6 +2671,10 @@ export const tools: Record<string, ToolConfig> = {
|
||||
confluence_delete_attachment: confluenceDeleteAttachmentTool,
|
||||
confluence_list_labels: confluenceListLabelsTool,
|
||||
confluence_add_label: confluenceAddLabelTool,
|
||||
confluence_get_pages_by_label: confluenceGetPagesByLabelTool,
|
||||
confluence_list_space_labels: confluenceListSpaceLabelsTool,
|
||||
confluence_delete_label: confluenceDeleteLabelTool,
|
||||
confluence_delete_page_property: confluenceDeletePagePropertyTool,
|
||||
confluence_get_space: confluenceGetSpaceTool,
|
||||
confluence_list_spaces: confluenceListSpacesTool,
|
||||
cursor_list_agents: cursorListAgentsTool,
|
||||
|
||||
@@ -26,6 +26,13 @@ export const s3GetObjectTool: ToolConfig = {
|
||||
visibility: 'user-only',
|
||||
description: 'Your AWS Secret Access Key',
|
||||
},
|
||||
region: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description:
|
||||
'Optional region override when URL does not include region (e.g., us-east-1, eu-west-1)',
|
||||
},
|
||||
s3Uri: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
@@ -37,7 +44,7 @@ export const s3GetObjectTool: ToolConfig = {
|
||||
request: {
|
||||
url: (params) => {
|
||||
try {
|
||||
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri)
|
||||
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri, params.region)
|
||||
|
||||
params.bucketName = bucketName
|
||||
params.region = region
|
||||
@@ -46,7 +53,7 @@ export const s3GetObjectTool: ToolConfig = {
|
||||
return `https://${bucketName}.s3.${region}.amazonaws.com/${encodeS3PathComponent(objectKey)}`
|
||||
} catch (_error) {
|
||||
throw new Error(
|
||||
'Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file'
|
||||
'Invalid S3 Object URL. Use a valid S3 URL and optionally provide region if the URL omits it.'
|
||||
)
|
||||
}
|
||||
},
|
||||
@@ -55,7 +62,7 @@ export const s3GetObjectTool: ToolConfig = {
|
||||
try {
|
||||
// Parse S3 URI if not already parsed
|
||||
if (!params.bucketName || !params.region || !params.objectKey) {
|
||||
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri)
|
||||
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri, params.region)
|
||||
params.bucketName = bucketName
|
||||
params.region = region
|
||||
params.objectKey = objectKey
|
||||
@@ -102,7 +109,7 @@ export const s3GetObjectTool: ToolConfig = {
|
||||
transformResponse: async (response: Response, params) => {
|
||||
// Parse S3 URI if not already parsed
|
||||
if (!params.bucketName || !params.region || !params.objectKey) {
|
||||
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri)
|
||||
const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri, params.region)
|
||||
params.bucketName = bucketName
|
||||
params.region = region
|
||||
params.objectKey = objectKey
|
||||
|
||||
@@ -20,7 +20,10 @@ export function getSignatureKey(
|
||||
return kSigning
|
||||
}
|
||||
|
||||
export function parseS3Uri(s3Uri: string): {
|
||||
export function parseS3Uri(
|
||||
s3Uri: string,
|
||||
fallbackRegion?: string
|
||||
): {
|
||||
bucketName: string
|
||||
region: string
|
||||
objectKey: string
|
||||
@@ -28,10 +31,55 @@ export function parseS3Uri(s3Uri: string): {
|
||||
try {
|
||||
const url = new URL(s3Uri)
|
||||
const hostname = url.hostname
|
||||
const bucketName = hostname.split('.')[0]
|
||||
const regionMatch = hostname.match(/s3[.-]([^.]+)\.amazonaws\.com/)
|
||||
const region = regionMatch ? regionMatch[1] : 'us-east-1'
|
||||
const objectKey = url.pathname.startsWith('/') ? url.pathname.substring(1) : url.pathname
|
||||
const normalizedPath = url.pathname.startsWith('/') ? url.pathname.slice(1) : url.pathname
|
||||
|
||||
const virtualHostedDualstackMatch = hostname.match(
|
||||
/^(.+)\.s3\.dualstack\.([^.]+)\.amazonaws\.com(?:\.cn)?$/
|
||||
)
|
||||
const virtualHostedRegionalMatch = hostname.match(
|
||||
/^(.+)\.s3[.-]([^.]+)\.amazonaws\.com(?:\.cn)?$/
|
||||
)
|
||||
const virtualHostedGlobalMatch = hostname.match(/^(.+)\.s3\.amazonaws\.com(?:\.cn)?$/)
|
||||
|
||||
const pathStyleDualstackMatch = hostname.match(
|
||||
/^s3\.dualstack\.([^.]+)\.amazonaws\.com(?:\.cn)?$/
|
||||
)
|
||||
const pathStyleRegionalMatch = hostname.match(/^s3[.-]([^.]+)\.amazonaws\.com(?:\.cn)?$/)
|
||||
const pathStyleGlobalMatch = hostname.match(/^s3\.amazonaws\.com(?:\.cn)?$/)
|
||||
|
||||
const isPathStyleHost = Boolean(
|
||||
pathStyleDualstackMatch || pathStyleRegionalMatch || pathStyleGlobalMatch
|
||||
)
|
||||
|
||||
const firstSlashIndex = normalizedPath.indexOf('/')
|
||||
const pathStyleBucketName =
|
||||
firstSlashIndex === -1 ? normalizedPath : normalizedPath.slice(0, firstSlashIndex)
|
||||
const pathStyleObjectKey =
|
||||
firstSlashIndex === -1 ? '' : normalizedPath.slice(firstSlashIndex + 1)
|
||||
|
||||
const bucketName = isPathStyleHost
|
||||
? pathStyleBucketName
|
||||
: (virtualHostedDualstackMatch?.[1] ??
|
||||
virtualHostedRegionalMatch?.[1] ??
|
||||
virtualHostedGlobalMatch?.[1] ??
|
||||
'')
|
||||
|
||||
const rawObjectKey = isPathStyleHost ? pathStyleObjectKey : normalizedPath
|
||||
const objectKey = (() => {
|
||||
try {
|
||||
return decodeURIComponent(rawObjectKey)
|
||||
} catch {
|
||||
return rawObjectKey
|
||||
}
|
||||
})()
|
||||
|
||||
const normalizedFallbackRegion = fallbackRegion?.trim()
|
||||
const regionFromHost =
|
||||
virtualHostedDualstackMatch?.[2] ??
|
||||
virtualHostedRegionalMatch?.[2] ??
|
||||
pathStyleDualstackMatch?.[1] ??
|
||||
pathStyleRegionalMatch?.[1]
|
||||
const region = regionFromHost || normalizedFallbackRegion || 'us-east-1'
|
||||
|
||||
if (!bucketName || !objectKey) {
|
||||
throw new Error('Invalid S3 URI format')
|
||||
@@ -40,7 +88,7 @@ export function parseS3Uri(s3Uri: string): {
|
||||
return { bucketName, region, objectKey }
|
||||
} catch (_error) {
|
||||
throw new Error(
|
||||
'Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file'
|
||||
'Invalid S3 Object URL format. Expected S3 virtual-hosted or path-style URL with object key.'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,12 @@ export const SYSTEM_SUBBLOCK_IDS: string[] = [
|
||||
* with default values from the trigger definition on load, which aren't present in
|
||||
* the deployed state, causing false positive change detection.
|
||||
*/
|
||||
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = ['webhookId', 'triggerPath', 'triggerConfig']
|
||||
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = [
|
||||
'webhookId',
|
||||
'triggerPath',
|
||||
'triggerConfig',
|
||||
'triggerId',
|
||||
]
|
||||
|
||||
/**
|
||||
* Maximum number of consecutive failures before a trigger (schedule/webhook) is auto-disabled.
|
||||
|
||||
Reference in New Issue
Block a user