This commit is contained in:
Siddharth Ganesan
2026-04-09 10:55:24 -07:00
parent b49d67e46c
commit f2fcfe7e6e
28 changed files with 801 additions and 177 deletions

View File

@@ -49,7 +49,10 @@ export async function POST(request: Request) {
headers['x-api-key'] = env.COPILOT_API_KEY
}
const controller = new AbortController()
const timeout = setTimeout(() => controller.abort(), GO_EXPLICIT_ABORT_TIMEOUT_MS)
const timeout = setTimeout(
() => controller.abort('timeout:go_explicit_abort_fetch'),
GO_EXPLICIT_ABORT_TIMEOUT_MS
)
const response = await fetch(`${SIM_AGENT_API_URL}/api/streams/explicit-abort`, {
method: 'POST',
headers,

View File

@@ -388,8 +388,7 @@ function TextEditor({
)
const isStreaming = streamingContent !== undefined
const shouldAnimateStreaming = isStreaming && streamingMode === 'append'
const revealedContent = useStreamingText(content, shouldAnimateStreaming)
const revealedContent = useStreamingText(content, false)
const textareaStuckRef = useRef(true)

View File

@@ -1,6 +1,6 @@
'use client'
import { forwardRef, memo, useCallback, useState } from 'react'
import { forwardRef, memo, useCallback, useEffect, useState } from 'react'
import { cn } from '@/lib/core/utils/cn'
import { getFileExtension } from '@/lib/uploads/utils/file-utils'
import type { PreviewMode } from '@/app/workspace/[workspaceId]/files/components/file-viewer'
@@ -101,11 +101,12 @@ export const MothershipView = memo(
const [prevActiveId, setPrevActiveId] = useState<string | null | undefined>(active?.id)
const handleCyclePreview = useCallback(() => setPreviewMode((m) => PREVIEW_CYCLE[m]), [])
// Reset preview mode to default when the active resource changes (guarded render-phase update)
if (active?.id !== prevActiveId) {
setPrevActiveId(active?.id)
setPreviewMode('preview')
}
useEffect(() => {
if (active?.id !== prevActiveId) {
setPrevActiveId(active?.id)
setPreviewMode('preview')
}
}, [active?.id, prevActiveId])
const isActivePreviewable =
canEdit &&

View File

@@ -755,9 +755,15 @@ export function useChat(
const lines = buffer.split('\n')
buffer = lines.pop() || ''
pendingLines.push(...lines)
if (pendingLines.length === 0) {
continue
}
}
const line = pendingLines.shift()!
const line = pendingLines.shift()
if (line === undefined) {
continue
}
if (isStale()) {
pendingLines.length = 0
continue
@@ -893,9 +899,18 @@ export function useChat(
const nextSession: StreamingFilePreview = {
...prevSession,
toolCallId: id,
...(typeof payload.operation === 'string'
? { operation: payload.operation }
: {}),
...(typeof payload.fileId === 'string'
? { fileId: payload.fileId, targetKind: 'file_id' as const }
: {}),
}
sessions.set(id, nextSession)
activeFilePreviewToolCallIdRef.current = id
if (nextSession.fileId) {
setActiveResourceId(nextSession.fileId)
}
setStreamingFile(nextSession)
break
}
@@ -958,9 +973,18 @@ export function useChat(
if (previewPhase === 'file_preview_content') {
const content = typeof payload.content === 'string' ? payload.content : ''
const isAppendOp = prevSession.operation === 'append'
const prevContent = streamingFileRef.current?.content ?? ''
const nextContent = isAppendOp ? prevContent + content : content
const contentMode =
typeof payload.contentMode === 'string' ? payload.contentMode : undefined
let nextContent: string
if (contentMode === 'snapshot') {
nextContent = content
} else if (contentMode === 'delta') {
nextContent = (prevSession.content ?? '') + content
} else {
const isAppendOp = prevSession.operation === 'append'
const prevContent = streamingFileRef.current?.content ?? ''
nextContent = isAppendOp ? prevContent + content : content
}
const nextSession: StreamingFilePreview = {
...prevSession,
content: nextContent,
@@ -971,6 +995,34 @@ export function useChat(
setStreamingFile(nextSession)
break
}
if (previewPhase === 'file_preview_complete') {
const fileId =
typeof payload.fileId === 'string' ? payload.fileId : prevSession.fileId
const resultData = asPayloadRecord(payload.data)
sessions.delete(id)
activeFilePreviewToolCallIdRef.current = null
if (fileId && resultData?.id) {
const fileName = (resultData.name as string) ?? prevSession.fileName ?? 'File'
const fileResource = { type: 'file' as const, id: fileId, title: fileName }
setResources((rs) => {
const without = rs.filter((r) => r.id !== 'streaming-file')
if (without.some((r) => r.type === 'file' && r.id === fileResource.id)) {
return without
}
return [...without, fileResource]
})
setActiveResourceId(fileId)
invalidateResourceQueries(queryClient, workspaceId, 'file', fileId)
}
if (!activeSubagent || activeSubagent !== FileTool.id) {
setStreamingFile(null)
streamingFileRef.current = null
}
break
}
}
if (phase === MothershipStreamV1ToolPhase.args_delta) {
@@ -1327,7 +1379,7 @@ export function useChat(
if (!isSameActiveSubagent) {
blocks.push({ type: 'subagent', content: name })
}
if (name === FileTool.id) {
if (name === FileTool.id && !isSameActiveSubagent) {
const emptyFile: StreamingFilePreview = {
toolCallId: parentToolCallId || 'file-preview',
fileName: '',
@@ -1937,7 +1989,7 @@ export function useChat(
streamGenRef.current++
streamReaderRef.current?.cancel().catch(() => {})
streamReaderRef.current = null
abortControllerRef.current?.abort()
abortControllerRef.current?.abort('user_stop:client_stopGeneration')
abortControllerRef.current = null
sendingRef.current = false
setIsSending(false)

View File

@@ -53,14 +53,15 @@ interface StreamingRevealResult {
* by DOM restructuring. It only resets when content clears (new message).
*/
export function useStreamingReveal(content: string, isStreaming: boolean): StreamingRevealResult {
const [committedEnd, setCommittedEnd] = useState(0)
const [generation, setGeneration] = useState(0)
const [revealState, setRevealState] = useState({ committedEnd: 0, generation: 0 })
const prevSplitRef = useRef(0)
useEffect(() => {
if (content.length === 0) {
prevSplitRef.current = 0
setCommittedEnd(0)
setRevealState((prev) =>
prev.committedEnd === 0 && prev.generation === 0 ? prev : { committedEnd: 0, generation: 0 }
)
return
}
@@ -69,11 +70,16 @@ export function useStreamingReveal(content: string, isStreaming: boolean): Strea
const splitPoint = findSafeSplitPoint(content)
if (splitPoint > prevSplitRef.current) {
prevSplitRef.current = splitPoint
setCommittedEnd(splitPoint)
setGeneration((g) => g + 1)
setRevealState((prev) =>
prev.committedEnd === splitPoint
? prev
: { committedEnd: splitPoint, generation: prev.generation + 1 }
)
}
}, [content, isStreaming])
const { committedEnd, generation } = revealState
if (!isStreaming) {
const preservedSplit = prevSplitRef.current

View File

@@ -221,6 +221,8 @@ interface RawBlock {
type: string
lane?: string
content?: string
/** Go persists text blocks with key "text" instead of "content" */
text?: string
channel?: string
phase?: string
kind?: string
@@ -275,7 +277,8 @@ function normalizeCanonicalBlock(block: RawBlock): PersistedContentBlock {
if (block.lane === 'main' || block.lane === 'subagent') {
result.lane = block.lane
}
if (block.content !== undefined) result.content = block.content
const blockContent = block.content ?? block.text
if (blockContent !== undefined) result.content = blockContent
if (block.channel) result.channel = block.channel as MothershipStreamV1TextChannel
if (block.phase) result.phase = block.phase as MothershipStreamV1ToolPhase
if (block.kind) result.kind = block.kind as MothershipStreamV1SpanPayloadKind
@@ -364,7 +367,7 @@ function normalizeLegacyBlock(block: RawBlock): PersistedContentBlock {
return {
type: MothershipStreamV1EventType.text,
channel: MothershipStreamV1TextChannel.assistant,
content: block.content,
content: block.content ?? block.text,
}
}

View File

@@ -13,11 +13,13 @@ export interface ToolCatalogEntry {
| 'complete_job'
| 'context_write'
| 'crawl_website'
| 'create_file'
| 'create_folder'
| 'create_job'
| 'create_workflow'
| 'create_workspace_mcp_server'
| 'debug'
| 'delete_file'
| 'delete_folder'
| 'delete_workflow'
| 'delete_workspace_mcp_server'
@@ -63,6 +65,7 @@ export interface ToolCatalogEntry {
| 'open_resource'
| 'read'
| 'redeploy'
| 'rename_file'
| 'rename_workflow'
| 'research'
| 'respond'
@@ -97,11 +100,13 @@ export interface ToolCatalogEntry {
| 'complete_job'
| 'context_write'
| 'crawl_website'
| 'create_file'
| 'create_folder'
| 'create_job'
| 'create_workflow'
| 'create_workspace_mcp_server'
| 'debug'
| 'delete_file'
| 'delete_folder'
| 'delete_workflow'
| 'delete_workspace_mcp_server'
@@ -147,6 +152,7 @@ export interface ToolCatalogEntry {
| 'open_resource'
| 'read'
| 'redeploy'
| 'rename_file'
| 'rename_workflow'
| 'research'
| 'respond'
@@ -298,6 +304,39 @@ export const CrawlWebsite: ToolCatalogEntry = {
},
}
export const CreateFile: ToolCatalogEntry = {
id: 'create_file',
name: 'create_file',
executor: 'sim',
mode: 'async',
parameters: {
type: 'object',
properties: {
contentType: {
type: 'string',
description:
'Optional MIME type override. Usually omit and let the system infer from the file extension.',
},
fileName: {
type: 'string',
description:
'Plain workspace filename including extension, e.g. "main.py" or "report.md". Must not contain slashes.',
},
},
required: ['fileName'],
},
resultSchema: {
type: 'object',
properties: {
data: { type: 'object', description: 'Contains id (the fileId) and name.' },
message: { type: 'string', description: 'Human-readable outcome.' },
success: { type: 'boolean', description: 'Whether the file was created.' },
},
required: ['success', 'message'],
},
requiredPermission: 'write',
}
export const CreateFolder: ToolCatalogEntry = {
id: 'create_folder',
name: 'create_folder',
@@ -430,6 +469,29 @@ export const Debug: ToolCatalogEntry = {
internal: true,
}
export const DeleteFile: ToolCatalogEntry = {
id: 'delete_file',
name: 'delete_file',
executor: 'sim',
mode: 'async',
parameters: {
type: 'object',
properties: {
fileId: { type: 'string', description: 'Canonical workspace file ID of the file to delete.' },
},
required: ['fileId'],
},
resultSchema: {
type: 'object',
properties: {
message: { type: 'string', description: 'Human-readable outcome.' },
success: { type: 'boolean', description: 'Whether the delete succeeded.' },
},
required: ['success', 'message'],
},
requiredPermission: 'write',
}
export const DeleteFolder: ToolCatalogEntry = {
id: 'delete_folder',
name: 'delete_folder',
@@ -1726,8 +1788,6 @@ export const Read: ToolCatalogEntry = {
parameters: {
type: 'object',
properties: {
limit: { type: 'number', description: 'Maximum number of lines to read.' },
offset: { type: 'number', description: 'Line offset to start reading from (0-indexed).' },
outputTable: {
type: 'string',
description:
@@ -1760,6 +1820,35 @@ export const Redeploy: ToolCatalogEntry = {
requiredPermission: 'admin',
}
export const RenameFile: ToolCatalogEntry = {
id: 'rename_file',
name: 'rename_file',
executor: 'sim',
mode: 'async',
parameters: {
type: 'object',
properties: {
fileId: { type: 'string', description: 'Canonical workspace file ID of the file to rename.' },
newName: {
type: 'string',
description:
'New filename including extension, e.g. "draft_v2.md". Must not contain slashes.',
},
},
required: ['fileId', 'newName'],
},
resultSchema: {
type: 'object',
properties: {
data: { type: 'object', description: 'Contains id and the new name.' },
message: { type: 'string', description: 'Human-readable outcome.' },
success: { type: 'boolean', description: 'Whether the rename succeeded.' },
},
required: ['success', 'message'],
},
requiredPermission: 'write',
}
export const RenameWorkflow: ToolCatalogEntry = {
id: 'rename_workflow',
name: 'rename_workflow',
@@ -2626,11 +2715,13 @@ export const TOOL_CATALOG: Record<string, ToolCatalogEntry> = {
[CompleteJob.id]: CompleteJob,
[ContextWrite.id]: ContextWrite,
[CrawlWebsite.id]: CrawlWebsite,
[CreateFile.id]: CreateFile,
[CreateFolder.id]: CreateFolder,
[CreateJob.id]: CreateJob,
[CreateWorkflow.id]: CreateWorkflow,
[CreateWorkspaceMcpServer.id]: CreateWorkspaceMcpServer,
[Debug.id]: Debug,
[DeleteFile.id]: DeleteFile,
[DeleteFolder.id]: DeleteFolder,
[DeleteWorkflow.id]: DeleteWorkflow,
[DeleteWorkspaceMcpServer.id]: DeleteWorkspaceMcpServer,
@@ -2676,6 +2767,7 @@ export const TOOL_CATALOG: Record<string, ToolCatalogEntry> = {
[OpenResource.id]: OpenResource,
[Read.id]: Read,
[Redeploy.id]: Redeploy,
[RenameFile.id]: RenameFile,
[RenameWorkflow.id]: RenameWorkflow,
[Research.id]: Research,
[Respond.id]: Respond,

View File

@@ -113,6 +113,42 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
create_file: {
parameters: {
type: 'object',
properties: {
contentType: {
type: 'string',
description:
'Optional MIME type override. Usually omit and let the system infer from the file extension.',
},
fileName: {
type: 'string',
description:
'Plain workspace filename including extension, e.g. "main.py" or "report.md". Must not contain slashes.',
},
},
required: ['fileName'],
},
resultSchema: {
type: 'object',
properties: {
data: {
type: 'object',
description: 'Contains id (the fileId) and name.',
},
message: {
type: 'string',
description: 'Human-readable outcome.',
},
success: {
type: 'boolean',
description: 'Whether the file was created.',
},
},
required: ['success', 'message'],
},
},
create_folder: {
parameters: {
type: 'object',
@@ -249,6 +285,32 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
delete_file: {
parameters: {
type: 'object',
properties: {
fileId: {
type: 'string',
description: 'Canonical workspace file ID of the file to delete.',
},
},
required: ['fileId'],
},
resultSchema: {
type: 'object',
properties: {
message: {
type: 'string',
description: 'Human-readable outcome.',
},
success: {
type: 'boolean',
description: 'Whether the delete succeeded.',
},
},
required: ['success', 'message'],
},
},
delete_folder: {
parameters: {
type: 'object',
@@ -1504,14 +1566,6 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
parameters: {
type: 'object',
properties: {
limit: {
type: 'number',
description: 'Maximum number of lines to read.',
},
offset: {
type: 'number',
description: 'Line offset to start reading from (0-indexed).',
},
outputTable: {
type: 'string',
description:
@@ -1538,6 +1592,41 @@ export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {
},
resultSchema: undefined,
},
rename_file: {
parameters: {
type: 'object',
properties: {
fileId: {
type: 'string',
description: 'Canonical workspace file ID of the file to rename.',
},
newName: {
type: 'string',
description:
'New filename including extension, e.g. "draft_v2.md". Must not contain slashes.',
},
},
required: ['fileId', 'newName'],
},
resultSchema: {
type: 'object',
properties: {
data: {
type: 'object',
description: 'Contains id and the new name.',
},
message: {
type: 'string',
description: 'Human-readable outcome.',
},
success: {
type: 'boolean',
description: 'Whether the rename succeeded.',
},
},
required: ['success', 'message'],
},
},
rename_workflow: {
parameters: {
type: 'object',

View File

@@ -196,10 +196,6 @@ export interface StreamLoopOptions extends OrchestratorOptions {
onBeforeDispatch?: (event: StreamEvent, context: StreamingContext) => boolean | undefined
}
// Pre-resolve text handlers at module level to avoid map lookups in the hot path.
const textHandler = sseHandlers[MothershipStreamV1EventType.text]
const subagentTextHandler = subAgentHandlers[MothershipStreamV1EventType.text]
/**
* Run the SSE stream processing loop against the Go backend.
*
@@ -281,6 +277,14 @@ export async function runStreamLoop(
return
}
if (
streamEvent.type === MothershipStreamV1EventType.text &&
typeof streamEvent.payload.text === 'string'
) {
await options.onEvent?.(streamEvent)
return
}
if (
streamEvent.type === MothershipStreamV1EventType.tool &&
streamEvent.payload.phase === 'args_delta' &&
@@ -296,19 +300,6 @@ export async function runStreamLoop(
}
state.raw += delta
if (!state.started) {
state.started = true
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_start',
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
const operation = extractJsonString(state.raw, 'operation')
const targetKind = extractJsonString(state.raw, 'kind')
const fileId = extractJsonString(state.raw, 'fileId')
@@ -320,99 +311,115 @@ export async function runStreamLoop(
if (fileName) state.fileName = fileName
if (title) state.title = title
const targetKey = JSON.stringify({
operation: state.operation,
targetKind: state.targetKind,
fileId: state.fileId,
fileName: state.fileName,
title: state.title,
})
if (
state.targetKind &&
(state.targetKind === 'new_file' ? !!state.fileName : !!state.fileId) &&
state.targetKey !== targetKey
) {
state.targetKey = targetKey
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_target',
operation: state.operation,
target: {
kind: state.targetKind,
...(state.fileId ? { fileId: state.fileId } : {}),
...(state.fileName ? { fileName: state.fileName } : {}),
const isDocFormat = /\.(pptx|docx|pdf)$/i.test(state.fileName ?? '')
if (!isDocFormat) {
if (!state.started) {
state.started = true
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_start',
},
...(state.title ? { title: state.title } : {}),
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
const strategy = extractJsonString(state.raw, 'strategy')
const editMetaPayload = strategy
? {
strategy,
...(extractJsonString(state.raw, 'mode')
? { mode: extractJsonString(state.raw, 'mode') }
: {}),
...(extractJsonNumber(state.raw, 'occurrence') !== undefined
? { occurrence: extractJsonNumber(state.raw, 'occurrence') }
: {}),
...(extractJsonString(state.raw, 'search')
? { search: extractJsonString(state.raw, 'search') }
: {}),
...(extractJsonBoolean(state.raw, 'replaceAll') !== undefined
? { replaceAll: extractJsonBoolean(state.raw, 'replaceAll') }
: {}),
...(extractJsonString(state.raw, 'before_anchor')
? { before_anchor: extractJsonString(state.raw, 'before_anchor') }
: {}),
...(extractJsonString(state.raw, 'after_anchor')
? { after_anchor: extractJsonString(state.raw, 'after_anchor') }
: {}),
...(extractJsonString(state.raw, 'anchor')
? { anchor: extractJsonString(state.raw, 'anchor') }
: {}),
...(extractJsonString(state.raw, 'start_anchor')
? { start_anchor: extractJsonString(state.raw, 'start_anchor') }
: {}),
...(extractJsonString(state.raw, 'end_anchor')
? { end_anchor: extractJsonString(state.raw, 'end_anchor') }
: {}),
}
: undefined
const editMetaKey = editMetaPayload ? JSON.stringify(editMetaPayload) : undefined
if (editMetaPayload && state.editMetaKey !== editMetaKey) {
state.editMetaKey = editMetaKey
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_edit_meta',
edit: editMetaPayload,
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
const targetKey = JSON.stringify({
operation: state.operation,
targetKind: state.targetKind,
fileId: state.fileId,
fileName: state.fileName,
title: state.title,
})
}
if (
state.targetKind &&
(state.targetKind === 'new_file' ? !!state.fileName : !!state.fileId) &&
state.targetKey !== targetKey
) {
state.targetKey = targetKey
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_target',
operation: state.operation,
target: {
kind: state.targetKind,
...(state.fileId ? { fileId: state.fileId } : {}),
...(state.fileName ? { fileName: state.fileName } : {}),
},
...(state.title ? { title: state.title } : {}),
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
const streamedContent = buildPreviewContent(state.raw, strategy)
if (streamedContent !== (state.lastContentSnapshot ?? '')) {
state.lastContentSnapshot = streamedContent
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_content',
content: streamedContent,
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
const strategy = extractJsonString(state.raw, 'strategy')
const editMetaPayload = strategy
? {
strategy,
...(extractJsonString(state.raw, 'mode')
? { mode: extractJsonString(state.raw, 'mode') }
: {}),
...(extractJsonNumber(state.raw, 'occurrence') !== undefined
? { occurrence: extractJsonNumber(state.raw, 'occurrence') }
: {}),
...(extractJsonString(state.raw, 'search')
? { search: extractJsonString(state.raw, 'search') }
: {}),
...(extractJsonBoolean(state.raw, 'replaceAll') !== undefined
? { replaceAll: extractJsonBoolean(state.raw, 'replaceAll') }
: {}),
...(extractJsonString(state.raw, 'before_anchor')
? { before_anchor: extractJsonString(state.raw, 'before_anchor') }
: {}),
...(extractJsonString(state.raw, 'after_anchor')
? { after_anchor: extractJsonString(state.raw, 'after_anchor') }
: {}),
...(extractJsonString(state.raw, 'anchor')
? { anchor: extractJsonString(state.raw, 'anchor') }
: {}),
...(extractJsonString(state.raw, 'start_anchor')
? { start_anchor: extractJsonString(state.raw, 'start_anchor') }
: {}),
...(extractJsonString(state.raw, 'end_anchor')
? { end_anchor: extractJsonString(state.raw, 'end_anchor') }
: {}),
}
: undefined
const editMetaKey = editMetaPayload ? JSON.stringify(editMetaPayload) : undefined
if (editMetaPayload && state.editMetaKey !== editMetaKey) {
state.editMetaKey = editMetaKey
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_edit_meta',
edit: editMetaPayload,
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
const streamedContent = buildPreviewContent(state.raw, strategy)
if (streamedContent !== (state.lastContentSnapshot ?? '')) {
state.lastContentSnapshot = streamedContent
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_content',
content: streamedContent,
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
} // end if (!isDocFormat)
filePreviewState.set(toolCallId, state)
}

View File

@@ -74,6 +74,17 @@ export function abortPendingToolIfStreamDead(
toolCall.status = MothershipStreamV1ToolOutcome.cancelled
toolCall.endTime = Date.now()
markToolResultSeen(toolCallId)
const toolSpan = context.trace.startSpan(toolCall.name || 'unknown_tool', 'tool.execute', {
toolCallId,
toolName: toolCall.name,
cancelReason: 'stream_dead_before_dispatch',
abortSignalAborted: options.abortSignal?.aborted ?? false,
abortReason: options.abortSignal?.aborted
? String(options.abortSignal.reason ?? 'unknown')
: undefined,
wasAborted: context.wasAborted ?? false,
})
context.trace.endSpan(toolSpan, 'cancelled')
return true
}

View File

@@ -22,7 +22,7 @@ export async function finalizeStream(
requestId: string
): Promise<void> {
if (aborted) {
return handleAborted(publisher, runId, requestId)
return handleAborted(result, publisher, runId, requestId)
}
if (!result.success) {
return handleError(result, publisher, runId, requestId)
@@ -31,15 +31,29 @@ export async function finalizeStream(
}
async function handleAborted(
result: OrchestratorResult,
publisher: StreamWriter,
runId: string,
requestId: string
): Promise<void> {
logger.info(`[${requestId}] Stream aborted by explicit stop`)
const partialContentLen = result.content?.length ?? 0
const toolCallCount = result.toolCalls?.length ?? 0
const blockCount = result.contentBlocks?.length ?? 0
logger.info(`[${requestId}] Stream aborted by explicit stop`, {
partialContentLen,
toolCallCount,
blockCount,
})
if (!publisher.sawComplete) {
const partialContent = result.content || undefined
await publisher.publish({
type: MothershipStreamV1EventType.complete,
payload: { status: MothershipStreamV1CompletionStatus.cancelled },
payload: {
status: MothershipStreamV1CompletionStatus.cancelled,
...(partialContent ? { partialContent } : {}),
...(partialContentLen ? { partialContentLen } : {}),
...(toolCallCount ? { toolCallCount } : {}),
},
})
}
await publisher.flush()

View File

@@ -183,7 +183,7 @@ export async function abortActiveStream(streamId: string): Promise<boolean> {
await writeAbortMarker(streamId)
const controller = activeStreams.get(streamId)
if (!controller) return false
controller.abort()
controller.abort('user_stop:abortActiveStream')
activeStreams.delete(streamId)
return true
}
@@ -206,7 +206,7 @@ export function startAbortPoller(
try {
const shouldAbort = await hasAbortMarker(streamId)
if (shouldAbort && !abortController.signal.aborted) {
abortController.abort()
abortController.abort('redis_abort_marker:poller')
await clearAbortMarker(streamId)
}
} catch (error) {

View File

@@ -190,8 +190,28 @@ export async function executeToolAndReport(
toolCallId: toolCall.id,
toolName: toolCall.name,
argsPreview,
abortSignalAborted: execContext.abortSignal?.aborted ?? false,
})
const endToolSpan = (
status: string,
detail?: { error?: string; cancelReason?: string; resultSuccess?: boolean }
) => {
const abortDetail: Record<string, unknown> = {}
if (execContext.abortSignal?.aborted) {
abortDetail.abortSignalAborted = true
abortDetail.abortReason = String(execContext.abortSignal.reason ?? 'unknown')
}
if (options?.abortSignal?.aborted) {
abortDetail.optionsAbortReason = String(options.abortSignal.reason ?? 'unknown')
}
if (context.wasAborted) {
abortDetail.wasAborted = true
}
toolSpan.attributes = { ...toolSpan.attributes, ...abortDetail, ...detail }
context.trace.endSpan(toolSpan, status)
}
logger.info('Tool execution started', {
toolCallId: toolCall.id,
toolName: toolCall.name,
@@ -224,6 +244,10 @@ export async function executeToolAndReport(
message: 'Request aborted during tool execution',
data: { cancelled: true },
})
endToolSpan('cancelled', {
cancelReason: 'abort_during_execution',
error: result.success === false ? result.error : undefined,
})
return cancelledCompletion('Request aborted during tool execution')
}
result = await maybeWriteOutputToFile(toolCall.name, toolCall.params, result, execContext)
@@ -248,6 +272,7 @@ export async function executeToolAndReport(
message: 'Request aborted during tool post-processing',
data: { cancelled: true },
})
endToolSpan('cancelled', { cancelReason: 'abort_during_post_processing_file' })
return cancelledCompletion('Request aborted during tool post-processing')
}
result = await maybeWriteOutputToTable(toolCall.name, toolCall.params, result, execContext)
@@ -272,6 +297,7 @@ export async function executeToolAndReport(
message: 'Request aborted during tool post-processing',
data: { cancelled: true },
})
endToolSpan('cancelled', { cancelReason: 'abort_during_post_processing_table' })
return cancelledCompletion('Request aborted during tool post-processing')
}
result = await maybeWriteReadCsvToTable(toolCall.name, toolCall.params, result, execContext)
@@ -296,6 +322,7 @@ export async function executeToolAndReport(
message: 'Request aborted during tool post-processing',
data: { cancelled: true },
})
endToolSpan('cancelled', { cancelReason: 'abort_during_post_processing_csv' })
return cancelledCompletion('Request aborted during tool post-processing')
}
toolCall.status = result.success
@@ -404,7 +431,10 @@ export async function executeToolAndReport(
() => abortRequested(context, execContext, options)
)
}
context.trace.endSpan(toolSpan, result.success ? 'ok' : 'error')
endToolSpan(result.success ? 'ok' : 'error', {
resultSuccess: result.success,
...(result.success ? {} : { error: result.error || 'Tool failed' }),
})
return {
status: result.success
? MothershipStreamV1ToolOutcome.success
@@ -413,7 +443,7 @@ export async function executeToolAndReport(
data: asRecord(result.output),
}
} catch (error) {
context.trace.endSpan(toolSpan, 'error')
const thrownMessage = error instanceof Error ? error.message : String(error)
if (abortRequested(context, execContext, options)) {
toolCall.status = MothershipStreamV1ToolOutcome.cancelled
toolCall.endTime = Date.now()
@@ -435,10 +465,14 @@ export async function executeToolAndReport(
message: 'Request aborted during tool execution',
data: { cancelled: true },
})
endToolSpan('cancelled', {
cancelReason: 'abort_during_execution_catch',
error: thrownMessage,
})
return cancelledCompletion('Request aborted during tool execution')
}
toolCall.status = MothershipStreamV1ToolOutcome.error
toolCall.error = error instanceof Error ? error.message : String(error)
toolCall.error = thrownMessage
toolCall.endTime = Date.now()
logger.error('Tool execution threw', {
@@ -481,6 +515,7 @@ export async function executeToolAndReport(
},
}
await options?.onEvent?.(errorEvent)
endToolSpan('error', { error: thrownMessage })
return {
status: MothershipStreamV1ToolOutcome.error,
message: toolCall.error,

View File

@@ -1,5 +1,7 @@
import {
CreateFile,
CreateWorkflow,
DeleteFile,
DeleteWorkflow,
DownloadToWorkspaceFile,
EditWorkflow,
@@ -8,6 +10,7 @@ import {
GenerateVisualization,
Knowledge,
KnowledgeBase,
RenameFile,
UserTable,
WorkspaceFile,
} from '@/lib/copilot/generated/tool-catalog-v1'
@@ -19,6 +22,9 @@ type ResourceType = MothershipResourceType
const RESOURCE_TOOL_NAMES: Set<string> = new Set([
UserTable.id,
WorkspaceFile.id,
CreateFile.id,
RenameFile.id,
DeleteFile.id,
DownloadToWorkspaceFile.id,
CreateWorkflow.id,
EditWorkflow.id,
@@ -104,7 +110,9 @@ export function extractResourcesFromToolResult(
return []
}
case WorkspaceFile.id: {
case WorkspaceFile.id:
case CreateFile.id:
case RenameFile.id: {
const file = asRecord(data.file)
if (file.id) {
return [{ type: 'file', id: file.id as string, title: (file.name as string) || 'File' }]
@@ -217,6 +225,7 @@ export function extractResourcesFromToolResult(
const DELETE_CAPABLE_TOOL_RESOURCE_TYPE: Record<string, ResourceType> = {
[DeleteWorkflow.id]: 'workflow',
[WorkspaceFile.id]: 'file',
[DeleteFile.id]: 'file',
[UserTable.id]: 'table',
[KnowledgeBase.id]: 'knowledgebase',
}
@@ -264,6 +273,14 @@ export function extractDeletedResourcesFromToolResult(
return []
}
case DeleteFile.id: {
const fileId = (data.id as string) ?? (args.fileId as string) ?? (params?.fileId as string)
if (fileId) {
return [{ type: resourceType, id: fileId, title: (data.name as string) || 'File' }]
}
return []
}
case UserTable.id: {
if (operation !== 'delete') return []
const tableId = (args.tableId as string) ?? (params?.tableId as string)

View File

@@ -42,7 +42,11 @@ export async function executeTool(
}
if (context.abortSignal?.aborted) {
return { success: false, error: 'Execution aborted' }
logger.warn('Tool execution skipped: abort signal already set', {
toolId,
abortReason: context.abortSignal.reason ?? 'unknown',
})
return { success: false, error: 'Execution aborted: abort signal was set before tool started' }
}
const handler = handlerRegistry.get(toolId)
@@ -55,7 +59,11 @@ export async function executeTool(
return await handler(params, context)
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
logger.error('Tool execution failed', { toolId, error: message })
logger.error('Tool execution failed', {
toolId,
error: message,
abortSignalAborted: context.abortSignal?.aborted ?? false,
})
return { success: false, error: message }
}
}

View File

@@ -66,7 +66,7 @@ export function isRunToolActiveForId(toolCallId: string): boolean {
export function cancelRunToolExecution(workflowId: string): void {
const controller = activeRunAbortByWorkflowId.get(workflowId)
if (!controller) return
controller.abort()
controller.abort('user_stop:cancelRunToolExecution')
activeRunAbortByWorkflowId.delete(workflowId)
}

View File

@@ -122,6 +122,28 @@ export async function executeVfsRead(
}
try {
const parseOptionalNumber = (value: unknown): number | undefined => {
if (typeof value === 'number' && Number.isFinite(value)) return value
if (typeof value === 'string' && value.trim() !== '') {
const parsed = Number.parseInt(value, 10)
return Number.isFinite(parsed) ? parsed : undefined
}
return undefined
}
const offset = parseOptionalNumber(params.offset)
const limit = parseOptionalNumber(params.limit)
const applyWindow = <T extends { content: string; totalLines: number }>(result: T): T => {
if (offset === undefined && limit === undefined) return result
const lines = result.content.split('\n')
const start = Math.max(0, Math.min(result.totalLines, offset ?? 0))
const endRaw = limit !== undefined ? start + Math.max(0, limit) : result.totalLines
const end = Math.max(start, Math.min(result.totalLines, endRaw))
return {
...result,
content: lines.slice(start, end).join('\n'),
}
}
// Handle chat-scoped uploads via the uploads/ virtual prefix
if (path.startsWith('uploads/')) {
if (!context.chatId) {
@@ -137,11 +159,17 @@ export async function executeVfsRead(
return {
success: false,
error:
'Read result too large to return inline. Use grep on this path instead of reading it directly, or retry read with offset/limit.',
'Read result too large to return inline. Use grep on this path instead of reading it directly.',
}
}
logger.debug('vfs_read resolved chat upload', { path, totalLines: uploadResult.totalLines })
return { success: true, output: uploadResult }
const windowedUpload = applyWindow(uploadResult)
logger.debug('vfs_read resolved chat upload', {
path,
totalLines: uploadResult.totalLines,
offset,
limit,
})
return { success: true, output: windowedUpload }
}
return {
success: false,
@@ -150,11 +178,7 @@ export async function executeVfsRead(
}
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
const result = vfs.read(
path,
params.offset as number | undefined,
params.limit as number | undefined
)
const result = vfs.read(path, offset, limit)
if (!result) {
const fileContent = await vfs.readFileContent(path)
if (fileContent) {
@@ -165,16 +189,19 @@ export async function executeVfsRead(
return {
success: false,
error:
'Read result too large to return inline. Use grep on this path instead of reading it directly, or retry read with offset/limit.',
'Read result too large to return inline. Use grep on this path instead of reading it directly.',
}
}
const windowedFileContent = applyWindow(fileContent)
logger.debug('vfs_read resolved workspace file', {
path,
totalLines: fileContent.totalLines,
offset,
limit,
})
return {
success: true,
output: fileContent,
output: windowedFileContent,
}
}
@@ -193,10 +220,10 @@ export async function executeVfsRead(
return {
success: false,
error:
'Read result too large to return inline. Use grep on this path instead of reading it directly, or retry read with offset/limit.',
'Read result too large to return inline. Use grep on this path instead of reading it directly.',
}
}
logger.debug('vfs_read result', { path, totalLines: result.totalLines })
logger.debug('vfs_read result', { path, totalLines: result.totalLines, offset, limit })
return {
success: true,
output: result,

View File

@@ -34,13 +34,15 @@ export function createServerToolHandler(toolId: string): ToolHandler {
}
return { success: true, output: result }
} catch (error) {
const message = error instanceof Error ? error.message : String(error)
logger.error('Server tool execution failed', {
toolId,
error: error instanceof Error ? error.message : String(error),
error: message,
abortSignalAborted: context.abortSignal?.aborted ?? false,
})
return {
success: false,
error: error instanceof Error ? error.message : 'Server tool execution failed',
error: `[${toolId}] ${message}`,
}
}
}

View File

@@ -16,7 +16,10 @@ export function assertServerToolNotAborted(
message = 'Request aborted before tool mutation could be applied.'
): void {
if (context?.userStopSignal?.aborted) {
throw new Error(message)
const reason = context.userStopSignal.reason
? ` (reason: ${String(context.userStopSignal.reason)})`
: ''
throw new Error(`${message}${reason}`)
}
}

View File

@@ -0,0 +1,86 @@
import { createLogger } from '@sim/logger'
import { CreateFile } from '@/lib/copilot/generated/tool-catalog-v1'
import {
assertServerToolNotAborted,
type BaseServerTool,
type ServerToolContext,
} from '@/lib/copilot/tools/server/base-tool'
import {
getWorkspaceFileByName,
uploadWorkspaceFile,
} from '@/lib/uploads/contexts/workspace/workspace-file-manager'
import { inferContentType, validateFlatWorkspaceFileName } from './workspace-file'
const logger = createLogger('CreateFileServerTool')
interface CreateFileArgs {
fileName: string
contentType?: string
args?: Record<string, unknown>
}
interface CreateFileResult {
success: boolean
message: string
data?: {
id: string
name: string
contentType: string
}
}
export const createFileServerTool: BaseServerTool<CreateFileArgs, CreateFileResult> = {
name: CreateFile.id,
async execute(params: CreateFileArgs, context?: ServerToolContext): Promise<CreateFileResult> {
if (!context?.userId) {
throw new Error('Authentication required')
}
const workspaceId = context.workspaceId
if (!workspaceId) {
return { success: false, message: 'Workspace ID is required' }
}
const raw = params as Record<string, unknown>
const nested = raw.args as Record<string, unknown> | undefined
const fileName = (params.fileName as string) ?? (nested?.fileName as string) ?? ''
const explicitType =
(params.contentType as string) ?? (nested?.contentType as string) ?? undefined
const nameError = validateFlatWorkspaceFileName(fileName)
if (nameError) return { success: false, message: nameError }
const existingFile = await getWorkspaceFileByName(workspaceId, fileName)
if (existingFile) {
return { success: false, message: `File "${fileName}" already exists` }
}
const contentType = inferContentType(fileName, explicitType)
const emptyBuffer = Buffer.from('', 'utf-8')
assertServerToolNotAborted(context)
const result = await uploadWorkspaceFile(
workspaceId,
context.userId,
emptyBuffer,
fileName,
contentType
)
logger.info('File created via create_file', {
fileId: result.id,
name: fileName,
contentType,
userId: context.userId,
})
return {
success: true,
message: `File "${fileName}" created successfully`,
data: {
id: result.id,
name: result.name,
contentType,
},
}
},
}

View File

@@ -0,0 +1,61 @@
import { createLogger } from '@sim/logger'
import { DeleteFile } from '@/lib/copilot/generated/tool-catalog-v1'
import {
assertServerToolNotAborted,
type BaseServerTool,
type ServerToolContext,
} from '@/lib/copilot/tools/server/base-tool'
import {
deleteWorkspaceFile,
getWorkspaceFile,
} from '@/lib/uploads/contexts/workspace/workspace-file-manager'
const logger = createLogger('DeleteFileServerTool')
interface DeleteFileArgs {
fileId: string
args?: Record<string, unknown>
}
interface DeleteFileResult {
success: boolean
message: string
}
export const deleteFileServerTool: BaseServerTool<DeleteFileArgs, DeleteFileResult> = {
name: DeleteFile.id,
async execute(params: DeleteFileArgs, context?: ServerToolContext): Promise<DeleteFileResult> {
if (!context?.userId) {
throw new Error('Authentication required')
}
const workspaceId = context.workspaceId
if (!workspaceId) {
return { success: false, message: 'Workspace ID is required' }
}
const raw = params as Record<string, unknown>
const nested = raw.args as Record<string, unknown> | undefined
const fileId = (params.fileId as string) ?? (nested?.fileId as string) ?? ''
if (!fileId) return { success: false, message: 'fileId is required' }
const existingFile = await getWorkspaceFile(workspaceId, fileId)
if (!existingFile) {
return { success: false, message: `File with ID "${fileId}" not found` }
}
assertServerToolNotAborted(context)
await deleteWorkspaceFile(workspaceId, fileId)
logger.info('File deleted via delete_file', {
fileId,
name: existingFile.name,
userId: context.userId,
})
return {
success: true,
message: `File "${existingFile.name}" deleted successfully`,
}
},
}

View File

@@ -0,0 +1,76 @@
import { createLogger } from '@sim/logger'
import { RenameFile } from '@/lib/copilot/generated/tool-catalog-v1'
import {
assertServerToolNotAborted,
type BaseServerTool,
type ServerToolContext,
} from '@/lib/copilot/tools/server/base-tool'
import {
getWorkspaceFile,
renameWorkspaceFile,
} from '@/lib/uploads/contexts/workspace/workspace-file-manager'
import { validateFlatWorkspaceFileName } from './workspace-file'
const logger = createLogger('RenameFileServerTool')
interface RenameFileArgs {
fileId: string
newName: string
args?: Record<string, unknown>
}
interface RenameFileResult {
success: boolean
message: string
data?: {
id: string
name: string
}
}
export const renameFileServerTool: BaseServerTool<RenameFileArgs, RenameFileResult> = {
name: RenameFile.id,
async execute(params: RenameFileArgs, context?: ServerToolContext): Promise<RenameFileResult> {
if (!context?.userId) {
throw new Error('Authentication required')
}
const workspaceId = context.workspaceId
if (!workspaceId) {
return { success: false, message: 'Workspace ID is required' }
}
const raw = params as Record<string, unknown>
const nested = raw.args as Record<string, unknown> | undefined
const fileId = (params.fileId as string) ?? (nested?.fileId as string) ?? ''
const newName = (params.newName as string) ?? (nested?.newName as string) ?? ''
if (!fileId) return { success: false, message: 'fileId is required' }
const nameError = validateFlatWorkspaceFileName(newName)
if (nameError) return { success: false, message: nameError }
const existingFile = await getWorkspaceFile(workspaceId, fileId)
if (!existingFile) {
return { success: false, message: `File with ID "${fileId}" not found` }
}
assertServerToolNotAborted(context)
await renameWorkspaceFile(workspaceId, fileId, newName)
logger.info('File renamed via rename_file', {
fileId,
oldName: existingFile.name,
newName,
userId: context.userId,
})
return {
success: true,
message: `File renamed from "${existingFile.name}" to "${newName}"`,
data: {
id: fileId,
name: newName,
},
}
},
}

View File

@@ -89,13 +89,13 @@ const EXT_TO_MIME: Record<string, string> = {
'.pdf': PDF_MIME,
}
function inferContentType(fileName: string, explicitType?: string): string {
export function inferContentType(fileName: string, explicitType?: string): string {
if (explicitType) return explicitType
const ext = fileName.slice(fileName.lastIndexOf('.')).toLowerCase()
return EXT_TO_MIME[ext] || 'text/plain'
}
function validateFlatWorkspaceFileName(fileName: string): string | null {
export function validateFlatWorkspaceFileName(fileName: string): string | null {
const trimmed = fileName.trim()
if (!trimmed) return 'File name cannot be empty'
if (trimmed.includes('/')) {

View File

@@ -1,6 +1,8 @@
import { createLogger } from '@sim/logger'
import { z } from 'zod'
import {
CreateFile,
DeleteFile,
DownloadToWorkspaceFile,
GenerateImage,
GenerateVisualization,
@@ -9,6 +11,7 @@ import {
ManageCustomTool,
ManageMcpTool,
ManageSkill,
RenameFile,
UserTable,
WorkspaceFile,
} from '@/lib/copilot/generated/tool-catalog-v1'
@@ -20,7 +23,10 @@ import {
import { getBlocksMetadataServerTool } from '@/lib/copilot/tools/server/blocks/get-blocks-metadata-tool'
import { getTriggerBlocksServerTool } from '@/lib/copilot/tools/server/blocks/get-trigger-blocks'
import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/search-documentation'
import { createFileServerTool } from '@/lib/copilot/tools/server/files/create-file'
import { deleteFileServerTool } from '@/lib/copilot/tools/server/files/delete-file'
import { downloadToWorkspaceFileServerTool } from '@/lib/copilot/tools/server/files/download-to-workspace-file'
import { renameFileServerTool } from '@/lib/copilot/tools/server/files/rename-file'
import { workspaceFileServerTool } from '@/lib/copilot/tools/server/files/workspace-file'
import { validateGeneratedToolPayload } from '@/lib/copilot/tools/server/generated-schema'
import { generateImageServerTool } from '@/lib/copilot/tools/server/image/generate-image'
@@ -82,6 +88,9 @@ const WRITE_ACTIONS: Record<string, string[]> = {
[ManageSkill.id]: ['add', 'edit', 'delete'],
[ManageCredential.id]: ['rename', 'delete'],
[WorkspaceFile.id]: ['create', 'append', 'update', 'delete', 'rename', 'patch'],
[CreateFile.id]: ['*'],
[RenameFile.id]: ['*'],
[DeleteFile.id]: ['*'],
[DownloadToWorkspaceFile.id]: ['*'],
[GenerateVisualization.id]: ['generate'],
[GenerateImage.id]: ['generate'],
@@ -119,6 +128,9 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
[userTableServerTool.name]: userTableServerTool,
[workspaceFileServerTool.name]: workspaceFileServerTool,
[createFileServerTool.name]: createFileServerTool,
[renameFileServerTool.name]: renameFileServerTool,
[deleteFileServerTool.name]: deleteFileServerTool,
[downloadToWorkspaceFileServerTool.name]: downloadToWorkspaceFileServerTool,
[generateVisualizationServerTool.name]: generateVisualizationServerTool,
[generateImageServerTool.name]: generateImageServerTool,
@@ -155,7 +167,10 @@ export async function routeExecution(
}
}
assertServerToolNotAborted(context)
assertServerToolNotAborted(
context,
`User stop signal aborted ${toolName} before payload normalization`
)
// Go injects chatId/workspaceId and may wrap the model's args inside a
// nested "args" object. Unwrap that before validation so the generated
@@ -177,7 +192,7 @@ export async function routeExecution(
? tool.inputSchema.parse(normalizedPayload)
: validateGeneratedToolPayload(toolName, 'parameters', normalizedPayload)
assertServerToolNotAborted(context)
assertServerToolNotAborted(context, `User stop signal aborted ${toolName} after validation`)
// Execute
const result = await tool.execute(args, context)

View File

@@ -226,8 +226,10 @@ export function read(
const totalLines = lines.length
if (offset !== undefined || limit !== undefined) {
const start = offset ?? 0
const end = limit !== undefined ? start + limit : lines.length
const rawStart = Number.isFinite(offset) ? (offset as number) : 0
const start = Math.max(0, Math.min(totalLines, rawStart))
const rawEnd = limit !== undefined ? start + Math.max(0, limit) : totalLines
const end = Math.max(start, Math.min(totalLines, rawEnd))
return {
content: lines.slice(start, end).join('\n'),
totalLines,

View File

@@ -809,7 +809,7 @@ export class WorkspaceVFS {
isActive: chatTable.isActive,
})
.from(chatTable)
.where(eq(chatTable.workflowId, workflowId)),
.where(and(eq(chatTable.workflowId, workflowId), isNull(chatTable.archivedAt))),
db
.select({
id: form.id,
@@ -822,7 +822,7 @@ export class WorkspaceVFS {
isActive: form.isActive,
})
.from(form)
.where(eq(form.workflowId, workflowId)),
.where(and(eq(form.workflowId, workflowId), isNull(form.archivedAt))),
db
.select({
serverId: workflowMcpTool.serverId,
@@ -833,7 +833,13 @@ export class WorkspaceVFS {
})
.from(workflowMcpTool)
.innerJoin(workflowMcpServer, eq(workflowMcpTool.serverId, workflowMcpServer.id))
.where(eq(workflowMcpTool.workflowId, workflowId)),
.where(
and(
eq(workflowMcpTool.workflowId, workflowId),
isNull(workflowMcpTool.archivedAt),
isNull(workflowMcpServer.deletedAt)
)
),
db
.select({
id: a2aAgent.id,
@@ -844,7 +850,13 @@ export class WorkspaceVFS {
capabilities: a2aAgent.capabilities,
})
.from(a2aAgent)
.where(and(eq(a2aAgent.workflowId, workflowId), eq(a2aAgent.workspaceId, workspaceId))),
.where(
and(
eq(a2aAgent.workflowId, workflowId),
eq(a2aAgent.workspaceId, workspaceId),
isNull(a2aAgent.archivedAt)
)
),
isDeployed
? db
.select({

View File

@@ -655,6 +655,6 @@ export async function listFolders(workspaceId: string) {
sortOrder: workflowFolder.sortOrder,
})
.from(workflowFolder)
.where(eq(workflowFolder.workspaceId, workspaceId))
.where(and(eq(workflowFolder.workspaceId, workspaceId), isNull(workflowFolder.archivedAt)))
.orderBy(asc(workflowFolder.sortOrder), asc(workflowFolder.createdAt))
}

View File

@@ -1232,7 +1232,10 @@ async function executeToolRequest(
if (isInternalRoute) {
const controller = new AbortController()
const timeout = requestParams.timeout || DEFAULT_EXECUTION_TIMEOUT_MS
const timeoutId = setTimeout(() => controller.abort(), timeout)
const timeoutId = setTimeout(
() => controller.abort(`timeout:internal_tool_fetch:${timeout}ms`),
timeout
)
try {
response = await fetch(fullUrl, {