Merge branch 'dev' of github.com:simstudioai/sim into dev

This commit is contained in:
Vikhyath Mondreti
2026-04-09 13:35:43 -07:00
17 changed files with 5612 additions and 3459 deletions

View File

@@ -107,11 +107,19 @@ export function ToolCallItem({ toolName, displayTitle, status, streamingArgs }:
const opMatch = streamingArgs.match(/"operation"\s*:\s*"(\w+)"/)
const op = opMatch?.[1] ?? ''
const verb =
op === 'patch' || op === 'update' || op === 'rename'
? 'Editing'
: op === 'delete'
? 'Deleting'
: 'Writing'
op === 'create'
? 'Creating'
: op === 'append'
? 'Adding'
: op === 'patch'
? 'Editing'
: op === 'update'
? 'Writing'
: op === 'rename'
? 'Renaming'
: op === 'delete'
? 'Deleting'
: 'Writing'
const unescaped = titleMatch[1]
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex: string) =>
String.fromCharCode(Number.parseInt(hex, 16))

View File

@@ -155,7 +155,7 @@ const MARKDOWN_COMPONENTS = {
},
inlineCode({ children }: { children?: React.ReactNode }) {
return (
<code className='rounded bg-[var(--surface-5)] px-1.5 py-0.5 font-mono text-small font-[400] text-[var(--text-primary)] before:content-none after:content-none'>
<code className='rounded bg-[var(--surface-5)] px-1.5 py-0.5 font-[400] font-mono text-[var(--text-primary)] text-small before:content-none after:content-none'>
{children}
</code>
)

View File

@@ -41,6 +41,7 @@ const TOOL_ICONS: Record<string, IconComponent> = {
superagent: Blimp,
user_table: TableIcon,
workspace_file: File,
edit_content: File,
create_workflow: Layout,
edit_workflow: Pencil,
workflow: Hammer,

View File

@@ -103,20 +103,22 @@ export const ResourceContent = memo(function ResourceContent({
const isUpdateStream = streamOperation === 'update'
const { data: allFiles = [] } = useWorkspaceFiles(workspaceId)
const activeFileRecord = useMemo(() => {
if (!isPatchStream || resource.type !== 'file') return undefined
return allFiles.find((f) => f.id === resource.id)
}, [isPatchStream, resource, allFiles])
const previewFileId =
streamingFile?.fileId ?? (resource.type === 'file' ? resource.id : undefined)
const previewFileRecord = useMemo(() => {
if (!previewFileId) return undefined
return allFiles.find((f) => f.id === previewFileId)
}, [previewFileId, allFiles])
const isSourceMime =
activeFileRecord?.type === 'text/x-pptxgenjs' ||
activeFileRecord?.type === 'text/x-docxjs' ||
activeFileRecord?.type === 'text/x-pdflibjs'
previewFileRecord?.type === 'text/x-pptxgenjs' ||
previewFileRecord?.type === 'text/x-docxjs' ||
previewFileRecord?.type === 'text/x-pdflibjs'
const { data: fetchedFileContent } = useWorkspaceFileContent(
workspaceId,
activeFileRecord?.id ?? '',
activeFileRecord?.key ?? '',
previewFileRecord?.id ?? '',
previewFileRecord?.key ?? '',
isSourceMime
)
@@ -125,15 +127,28 @@ export const ResourceContent = memo(function ResourceContent({
if (!streamOperation) return undefined
if (isPatchStream) {
if (!fetchedFileContent) return undefined
if (fetchedFileContent === undefined) return undefined
if (!shouldApplyPatchPreview(streamingFile)) return undefined
return extractPatchPreview(streamingFile, fetchedFileContent)
}
const extracted = streamingFile.content
if (extracted.length === 0) return undefined
if (isUpdateStream) return extracted
if (isWriteStream) return extracted
if (streamOperation === 'append') {
if (streamingFile.targetKind === 'file_id') {
if (fetchedFileContent === undefined) return undefined
return buildAppendPreview(fetchedFileContent, extracted)
}
return extracted.length > 0 ? extracted : undefined
}
if (streamOperation === 'create') {
return extracted.length > 0 ? extracted : undefined
}
if (isWriteStream) return extracted.length > 0 ? extracted : undefined
return undefined
}, [
@@ -165,16 +180,11 @@ export const ResourceContent = memo(function ResourceContent({
}
}, [workspaceId, streamFileName])
// workspace_file preview events now carry whole-file snapshots, not deltas.
// Treat every live preview as replace so the viewer shows the latest snapshot.
// ResourceContent now reconstructs full-file preview text per operation,
// so the viewer can always treat streaming content as a whole-file replace.
const streamingFileMode: 'append' | 'replace' = 'replace'
// For existing file resources (not streaming-file), only pass streaming
// content for patch operations where the preview splices new content into
// the displayed file. Update operations re-stream the entire file from
// scratch which causes visual duplication of already-visible content.
const embeddedStreamingContent =
resource.id !== 'streaming-file' && isUpdateStream ? undefined : streamingExtractedContent
const embeddedStreamingContent = streamingExtractedContent
if (streamingFile && resource.id === 'streaming-file') {
return (
@@ -700,3 +710,27 @@ function extractPatchPreview(
return undefined
}
function shouldApplyPatchPreview(streamingFile: {
content: string
edit?: Record<string, unknown>
}): boolean {
const edit = streamingFile.edit ?? {}
const strategy = typeof edit.strategy === 'string' ? edit.strategy : undefined
const mode = typeof edit.mode === 'string' ? edit.mode : undefined
// delete_between is delete-only and can be previewed from intent metadata alone.
if (strategy === 'anchored' && mode === 'delete_between') {
return true
}
// For all other patch modes, keep the visible file unchanged until
// edit_content actually streams content into the target location.
return streamingFile.content.length > 0
}
function buildAppendPreview(existingContent: string, incomingContent: string): string {
if (incomingContent.length === 0) return existingContent
if (existingContent.length === 0) return incomingContent
return `${existingContent}\n${incomingContent}`
}

View File

@@ -354,6 +354,7 @@ export function useChat(
streamingFileRef.current = streamingFile
const filePreviewSessionsRef = useRef<Map<string, StreamingFilePreview>>(new Map())
const activeFilePreviewToolCallIdRef = useRef<string | null>(null)
const editContentParentToolCallIdRef = useRef<Map<string, string>>(new Map())
const [messageQueue, setMessageQueue] = useState<QueuedMessage[]>([])
const messageQueueRef = useRef<QueuedMessage[]>([])
@@ -368,6 +369,15 @@ export function useChat(
options?: { preserveExistingState?: boolean }
) => Promise<{ sawStreamError: boolean; sawComplete: boolean }>
>(async () => ({ sawStreamError: false, sawComplete: false }))
const attachToExistingStreamRef = useRef<
(opts: {
streamId: string
assistantId: string
expectedGen: number
initialBatch?: StreamBatchResponse | null
afterCursor?: string
}) => Promise<{ error: boolean; aborted: boolean }>
>(async () => ({ error: false, aborted: true }))
const retryReconnectRef = useRef<
(opts: { streamId: string; assistantId: string; gen: number }) => Promise<boolean>
>(async () => false)
@@ -518,6 +528,7 @@ export function useChat(
streamingFileRef.current = null
filePreviewSessionsRef.current.clear()
activeFilePreviewToolCallIdRef.current = null
editContentParentToolCallIdRef.current.clear()
setMessageQueue([])
}, [initialChatId, queryClient])
@@ -541,6 +552,7 @@ export function useChat(
streamingFileRef.current = null
filePreviewSessionsRef.current.clear()
activeFilePreviewToolCallIdRef.current = null
editContentParentToolCallIdRef.current.clear()
setMessageQueue([])
}, [isHomePage])
@@ -617,7 +629,7 @@ export function useChat(
const reconnectResult =
snapshotEvents.length > 0
? await attachToExistingStream({
? await attachToExistingStreamRef.current({
streamId: activeStreamId,
assistantId,
expectedGen: gen,
@@ -1015,6 +1027,10 @@ export function useChat(
sessions.set(id, nextSession)
activeFilePreviewToolCallIdRef.current = id
streamingFileRef.current = nextSession
const previewToolIdx = toolMap.get(id)
if (previewToolIdx !== undefined && blocks[previewToolIdx].toolCall) {
blocks[previewToolIdx].toolCall!.status = 'executing'
}
setStreamingFile(nextSession)
break
}
@@ -1062,11 +1078,19 @@ export function useChat(
const opMatch = tc.streamingArgs.match(/"operation"\s*:\s*"(\w+)"/)
const op = opMatch?.[1] ?? ''
const verb =
op === 'patch' || op === 'update' || op === 'rename'
? 'Editing'
: op === 'delete'
? 'Deleting'
: 'Writing'
op === 'create'
? 'Creating'
: op === 'append'
? 'Adding'
: op === 'patch'
? 'Editing'
: op === 'update'
? 'Writing'
: op === 'rename'
? 'Renaming'
: op === 'delete'
? 'Deleting'
: 'Writing'
const titleMatch = tc.streamingArgs.match(/"title"\s*:\s*"([^"]*)"/)
if (titleMatch?.[1]) {
const unescaped = titleMatch[1]
@@ -1174,7 +1198,20 @@ export function useChat(
clientExecutionStartedRef.current.delete(id)
}
if (tc.name === WorkspaceFile.id) {
const workspaceFileOperation =
tc.name === WorkspaceFile.id && typeof tc.params?.operation === 'string'
? tc.params.operation
: undefined
const shouldKeepWorkspacePreviewOpen =
tc.name === WorkspaceFile.id &&
(workspaceFileOperation === 'append' ||
workspaceFileOperation === 'update' ||
workspaceFileOperation === 'patch')
if (
(tc.name === WorkspaceFile.id || tc.name === 'edit_content') &&
!shouldKeepWorkspacePreviewOpen
) {
filePreviewSessionsRef.current.delete(id)
if (activeFilePreviewToolCallIdRef.current === id) {
activeFilePreviewToolCallIdRef.current = null
@@ -1198,6 +1235,7 @@ export function useChat(
setResources((rs) => rs.filter((r) => r.id !== 'streaming-file'))
}
}
editContentParentToolCallIdRef.current.delete(id)
break
}
@@ -1222,11 +1260,19 @@ export function useChat(
if (name === WorkspaceFile.id) {
const operation = typeof args?.operation === 'string' ? args.operation : ''
const verb =
operation === 'patch' || operation === 'update' || operation === 'rename'
? 'Editing'
: operation === 'delete'
? 'Deleting'
: 'Writing'
operation === 'create'
? 'Creating'
: operation === 'append'
? 'Adding'
: operation === 'patch'
? 'Editing'
: operation === 'update'
? 'Writing'
: operation === 'rename'
? 'Renaming'
: operation === 'delete'
? 'Deleting'
: 'Writing'
const chunkTitle = args?.title as string | undefined
const target = args ? asPayloadRecord(args.target) : undefined
const targetFileName = target?.fileName as string | undefined
@@ -1237,6 +1283,25 @@ export function useChat(
}
}
if (name === 'edit_content') {
const parentToolCallId =
activeFilePreviewToolCallIdRef.current ?? streamingFileRef.current?.toolCallId
const parentIdx =
parentToolCallId !== null && parentToolCallId !== undefined
? toolMap.get(parentToolCallId)
: undefined
if (parentIdx !== undefined && blocks[parentIdx].toolCall) {
toolMap.set(id, parentIdx)
editContentParentToolCallIdRef.current.set(id, parentToolCallId!)
const tc = blocks[parentIdx].toolCall!
tc.status = 'executing'
tc.result = undefined
tc.error = undefined
flush()
break
}
}
if (!toolMap.has(id)) {
toolMap.set(id, blocks.length)
blocks.push({
@@ -1608,6 +1673,7 @@ export function useChat(
},
[fetchStreamBatch]
)
attachToExistingStreamRef.current = attachToExistingStream
const resumeOrFinalize = useCallback(
async (opts: {
@@ -2054,6 +2120,7 @@ export function useChat(
streamingFileRef.current = null
filePreviewSessionsRef.current.clear()
activeFilePreviewToolCallIdRef.current = null
editContentParentToolCallIdRef.current.clear()
setResources((rs) => rs.filter((resource) => resource.id !== 'streaming-file'))
const execState = useExecutionStore.getState()

View File

@@ -4,6 +4,7 @@ import {
CreateWorkflow,
Debug,
Deploy,
EditContent,
EditWorkflow,
FunctionExecute,
GetPageContents,
@@ -272,6 +273,11 @@ export const TOOL_UI_METADATA: Record<string, ToolUIMetadata> = {
phaseLabel: 'Resource',
phase: 'resource',
},
[EditContent.id]: {
title: 'Writing content',
phaseLabel: 'Resource',
phase: 'resource',
},
[CreateWorkflow.id]: {
title: 'Creating workflow',
phaseLabel: 'Resource',

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -25,6 +25,7 @@ export function createStreamingContext(overrides?: Partial<StreamingContext>): S
streamComplete: false,
wasAborted: false,
errors: [],
activeFileIntent: null,
trace: new TraceCollector(),
...overrides,
}

View File

@@ -4,6 +4,7 @@ import {
MothershipStreamV1EventType,
MothershipStreamV1SpanLifecycleEvent,
MothershipStreamV1SpanPayloadKind,
MothershipStreamV1ToolPhase,
} from '@/lib/copilot/generated/mothership-stream-v1'
import { processSSEStream } from '@/lib/copilot/request/go/parser'
import {
@@ -19,61 +20,28 @@ import type {
StreamEvent,
StreamingContext,
} from '@/lib/copilot/request/types'
import { clearIntentsForWorkspace } from '@/lib/copilot/tools/server/files/file-intent-store'
const logger = createLogger('CopilotGoStream')
type FilePreviewServerState = {
raw: string
started: boolean
operation?: string
targetKind?: string
fileId?: string
fileName?: string
type FileIntent = {
toolCallId: string
operation: string
target: { kind: string; fileId?: string; fileName?: string }
title?: string
editMetaKey?: string
targetKey?: string
contentType?: string
edit?: Record<string, unknown>
}
type EditContentStreamState = {
raw: string
lastContentSnapshot?: string
}
function extractJsonString(raw: string, key: string): string | undefined {
const pattern = new RegExp(`"${key}"\\s*:\\s*"`)
const m = pattern.exec(raw)
if (!m) return undefined
const start = m.index + m[0].length
let end = -1
for (let i = start; i < raw.length; i++) {
if (raw[i] === '\\') {
i++
continue
}
if (raw[i] === '"') {
end = i
break
}
}
if (end === -1) return undefined
return raw
.slice(start, end)
.replace(/\\n/g, '\n')
.replace(/\\t/g, '\t')
.replace(/\\r/g, '\r')
.replace(/\\"/g, '"')
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex) => String.fromCharCode(Number.parseInt(hex, 16)))
.replace(/\\\\/g, '\\')
}
function extractJsonBoolean(raw: string, key: string): boolean | undefined {
const match = raw.match(new RegExp(`"${key}"\\s*:\\s*(true|false)`))
if (!match) return undefined
return match[1] === 'true'
}
function extractJsonNumber(raw: string, key: string): number | undefined {
const match = raw.match(new RegExp(`"${key}"\\s*:\\s*(\\d+)`))
if (!match) return undefined
return Number.parseInt(match[1], 10)
}
/**
* Decode a prefix of a JSON-encoded string value, handling escape sequences
* that may be incomplete at the end of a streaming chunk.
*/
function decodeJsonStringPrefix(input: string): string {
let output = ''
for (let i = 0; i < input.length; i++) {
@@ -126,9 +94,7 @@ function decodeJsonStringPrefix(input: string): string {
}
if (next === 'u') {
const hex = input.slice(i + 2, i + 6)
if (hex.length < 4 || !/^[0-9a-fA-F]{4}$/.test(hex)) {
break
}
if (hex.length < 4 || !/^[0-9a-fA-F]{4}$/.test(hex)) break
output += String.fromCharCode(Number.parseInt(hex, 16))
i += 5
continue
@@ -138,8 +104,13 @@ function decodeJsonStringPrefix(input: string): string {
return output
}
function extractStreamedContent(raw: string, preferredKey: 'content' | 'replace'): string {
const marker = `"${preferredKey}":`
/**
* Extract the streamed content string from edit_content's raw JSON args.
* Since edit_content has a single field `content`, the JSON is always
* `{"content":"..."}`. We find `"content":"` and decode everything after.
*/
function extractEditContent(raw: string): string {
const marker = '"content":'
const idx = raw.indexOf(marker)
if (idx === -1) return ''
const rest = raw.slice(idx + marker.length).trimStart()
@@ -159,13 +130,6 @@ function extractStreamedContent(raw: string, preferredKey: 'content' | 'replace'
return decodeJsonStringPrefix(inner)
}
function buildPreviewContent(raw: string, strategy?: string): string {
if (strategy === 'search_replace') {
return extractStreamedContent(raw, 'replace')
}
return extractStreamedContent(raw, 'content')
}
export class CopilotBackendError extends Error {
status?: number
body?: string
@@ -202,9 +166,10 @@ export interface StreamLoopOptions extends OrchestratorOptions {
* Handles: fetch -> parse -> normalize -> dedupe -> subagent routing -> handler dispatch.
* Callers provide the fetch URL/options and can intercept events via onBeforeDispatch.
*
* Optimised hot path: text events (the most frequent) bypass tool-call dedup
* checks and are dispatched synchronously without any await, eliminating ~4
* microtask yields per text event vs the previous async-generator + await chain.
* File preview streaming uses an intent-based approach:
* 1. workspace_file phase:call → store intent (operation, target, edit metadata)
* 2. edit_content phase:args_delta → stream content using stored intent
* 3. edit_content phase:call → consume and clear intent
*/
export async function runStreamLoop(
fetchUrl: string,
@@ -214,7 +179,7 @@ export async function runStreamLoop(
options: StreamLoopOptions
): Promise<void> {
const { timeout = ORCHESTRATION_TIMEOUT_MS, abortSignal } = options
const filePreviewState = new Map<string, FilePreviewServerState>()
const editContentState = new Map<string, EditContentStreamState>()
const fetchSpan = context.trace.startSpan(
`HTTP Request → ${new URL(fetchUrl).pathname}`,
@@ -277,144 +242,180 @@ export async function runStreamLoop(
return
}
// ── workspace_file phase:call → store intent and emit preview metadata ──
if (
streamEvent.type === MothershipStreamV1EventType.tool &&
streamEvent.payload.phase === 'args_delta' &&
streamEvent.payload.toolName === 'workspace_file' &&
streamEvent.payload.phase === MothershipStreamV1ToolPhase.call &&
streamEvent.payload.toolName === 'workspace_file'
) {
const toolCallId = streamEvent.payload.toolCallId as string | undefined
const args = (streamEvent.payload.arguments ?? streamEvent.payload.input) as
| Record<string, unknown>
| undefined
if (toolCallId && args) {
const operation = args.operation as string | undefined
const target = args.target as Record<string, unknown> | undefined
const title = args.title as string | undefined
const contentType = args.contentType as string | undefined
const edit = args.edit as Record<string, unknown> | undefined
if (operation && target) {
const targetKind = target.kind as string
const fileId = target.fileId as string | undefined
const fileName = target.fileName as string | undefined
const isContentOp =
operation === 'append' || operation === 'update' || operation === 'patch'
if (context.activeFileIntent && isContentOp) {
logger.warn(
'Orphaned workspace_file intent: content-op workspace_file arrived without edit_content for prior intent',
{
orphanedToolCallId: context.activeFileIntent.toolCallId,
orphanedOperation: context.activeFileIntent.operation,
newToolCallId: toolCallId,
newOperation: operation,
}
)
const cleared = clearIntentsForWorkspace(execContext.workspaceId)
if (cleared > 0) {
logger.warn('Cleared orphaned execution intents from store', {
cleared,
workspaceId: execContext.workspaceId,
})
}
}
context.activeFileIntent = {
toolCallId,
operation,
target: {
kind: targetKind,
...(fileId ? { fileId } : {}),
...(fileName ? { fileName } : {}),
},
...(title ? { title } : {}),
...(contentType ? { contentType } : {}),
...(edit ? { edit } : {}),
}
const isDocFormat = /\.(pptx|docx|pdf)$/i.test(fileName ?? '')
if (!isDocFormat && isContentOp) {
const scope = streamEvent.scope ? { scope: streamEvent.scope } : {}
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_start',
},
...scope,
})
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_target',
operation,
target: {
kind: targetKind,
...(fileId ? { fileId } : {}),
...(fileName ? { fileName } : {}),
},
...(title ? { title } : {}),
},
...scope,
})
if (edit) {
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_edit_meta',
edit,
},
...scope,
})
}
}
}
}
}
// ── edit_content phase:args_delta → stream content using stored intent ──
if (
streamEvent.type === MothershipStreamV1EventType.tool &&
streamEvent.payload.phase === MothershipStreamV1ToolPhase.args_delta &&
streamEvent.payload.toolName === 'edit_content' &&
typeof streamEvent.payload.toolCallId === 'string' &&
typeof streamEvent.payload.argumentsDelta === 'string'
) {
const toolCallId = streamEvent.payload.toolCallId as string
const delta = streamEvent.payload.argumentsDelta as string
const state = filePreviewState.get(toolCallId) ?? {
raw: '',
started: false,
}
const state = editContentState.get(toolCallId) ?? { raw: '' }
state.raw += delta
const operation = extractJsonString(state.raw, 'operation')
const targetKind = extractJsonString(state.raw, 'kind')
const fileId = extractJsonString(state.raw, 'fileId')
const fileName = extractJsonString(state.raw, 'fileName')
const title = extractJsonString(state.raw, 'title')
if (operation) state.operation = operation
if (targetKind) state.targetKind = targetKind
if (fileId) state.fileId = fileId
if (fileName) state.fileName = fileName
if (title) state.title = title
const isDocFormat = /\.(pptx|docx|pdf)$/i.test(state.fileName ?? '')
if (!isDocFormat) {
if (!state.started) {
state.started = true
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_start',
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
const targetKey = JSON.stringify({
operation: state.operation,
targetKind: state.targetKind,
fileId: state.fileId,
fileName: state.fileName,
title: state.title,
})
if (
state.targetKind &&
(state.targetKind === 'new_file' ? !!state.fileName : !!state.fileId) &&
state.targetKey !== targetKey
) {
state.targetKey = targetKey
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_target',
operation: state.operation,
target: {
kind: state.targetKind,
...(state.fileId ? { fileId: state.fileId } : {}),
...(state.fileName ? { fileName: state.fileName } : {}),
if (context.activeFileIntent) {
const isDocFormat = /\.(pptx|docx|pdf)$/i.test(
context.activeFileIntent.target.fileName ?? ''
)
if (!isDocFormat) {
const streamedContent = extractEditContent(state.raw)
if (streamedContent !== (state.lastContentSnapshot ?? '')) {
state.lastContentSnapshot = streamedContent
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId: context.activeFileIntent.toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_content',
content: streamedContent,
contentMode: 'snapshot',
},
...(state.title ? { title: state.title } : {}),
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
}
}
const strategy = extractJsonString(state.raw, 'strategy')
const editMetaPayload = strategy
? {
strategy,
...(extractJsonString(state.raw, 'mode')
? { mode: extractJsonString(state.raw, 'mode') }
: {}),
...(extractJsonNumber(state.raw, 'occurrence') !== undefined
? { occurrence: extractJsonNumber(state.raw, 'occurrence') }
: {}),
...(extractJsonString(state.raw, 'search')
? { search: extractJsonString(state.raw, 'search') }
: {}),
...(extractJsonBoolean(state.raw, 'replaceAll') !== undefined
? { replaceAll: extractJsonBoolean(state.raw, 'replaceAll') }
: {}),
...(extractJsonString(state.raw, 'before_anchor')
? { before_anchor: extractJsonString(state.raw, 'before_anchor') }
: {}),
...(extractJsonString(state.raw, 'after_anchor')
? { after_anchor: extractJsonString(state.raw, 'after_anchor') }
: {}),
...(extractJsonString(state.raw, 'anchor')
? { anchor: extractJsonString(state.raw, 'anchor') }
: {}),
...(extractJsonString(state.raw, 'start_anchor')
? { start_anchor: extractJsonString(state.raw, 'start_anchor') }
: {}),
...(extractJsonString(state.raw, 'end_anchor')
? { end_anchor: extractJsonString(state.raw, 'end_anchor') }
: {}),
}
: undefined
const editMetaKey = editMetaPayload ? JSON.stringify(editMetaPayload) : undefined
if (editMetaPayload && state.editMetaKey !== editMetaKey) {
state.editMetaKey = editMetaKey
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_edit_meta',
edit: editMetaPayload,
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
editContentState.set(toolCallId, state)
}
const streamedContent = buildPreviewContent(state.raw, strategy)
if (streamedContent !== (state.lastContentSnapshot ?? '')) {
state.lastContentSnapshot = streamedContent
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_content',
content: streamedContent,
contentMode: 'snapshot',
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
}
} // end if (!isDocFormat)
// ── edit_content phase:call → keep intent until result for preview completion ──
if (
streamEvent.type === MothershipStreamV1EventType.tool &&
streamEvent.payload.phase === MothershipStreamV1ToolPhase.call &&
streamEvent.payload.toolName === 'edit_content'
) {
const toolCallId = streamEvent.payload.toolCallId as string | undefined
if (toolCallId) {
editContentState.delete(toolCallId)
}
}
filePreviewState.set(toolCallId, state)
// ── edit_content phase:result → complete preview and clear intent ──
if (
streamEvent.type === MothershipStreamV1EventType.tool &&
streamEvent.payload.phase === MothershipStreamV1ToolPhase.result &&
streamEvent.payload.toolName === 'edit_content' &&
context.activeFileIntent
) {
await options.onEvent?.({
type: MothershipStreamV1EventType.tool,
payload: {
toolCallId: context.activeFileIntent.toolCallId,
toolName: 'workspace_file',
previewPhase: 'file_preview_complete',
fileId: context.activeFileIntent.target.fileId,
data:
streamEvent.payload.result !== undefined
? streamEvent.payload.result
: streamEvent.payload.data,
},
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
})
context.activeFileIntent = null
}
try {

View File

@@ -193,10 +193,17 @@ async function runCheckpointLoop(
execContext,
loopOptions
)
context.trace.endSpan(streamSpan)
const streamStatus = isAborted(options, context)
? RequestTraceV1SpanStatus.cancelled
: context.errors.length > 0
? RequestTraceV1SpanStatus.error
: RequestTraceV1SpanStatus.ok
context.trace.endSpan(streamSpan, streamStatus)
context.trace.setActiveSpan(undefined)
resumeAttempt = 0
} catch (streamError) {
context.trace.endSpan(streamSpan, RequestTraceV1SpanStatus.error)
context.trace.setActiveSpan(undefined)
if (streamError instanceof BillingLimitError) {
await handleBillingLimitResponse(streamError.userId, context, execContext, options)
break
@@ -282,6 +289,12 @@ async function runCheckpointLoop(
)
}
if (isAborted(options, context)) {
cancelPendingTools(context)
context.awaitingAsyncContinuation = undefined
break
}
const results: Array<{
callId: string
name: string
@@ -289,6 +302,11 @@ async function runCheckpointLoop(
success: boolean
}> = []
for (const toolCallId of continuation.pendingToolCallIds) {
if (isAborted(options, context)) {
cancelPendingTools(context)
context.awaitingAsyncContinuation = undefined
break
}
const tool = context.toolCalls.get(toolCallId)
if (!tool || (!tool.result && !tool.error)) {
logger.error('Missing tool result for pending tool call', {
@@ -309,6 +327,12 @@ async function runCheckpointLoop(
})
}
if (isAborted(options, context)) {
cancelPendingTools(context)
context.awaitingAsyncContinuation = undefined
break
}
logger.info('Resuming with tool results', {
checkpointId: continuation.checkpointId,
runId: continuation.runId,
@@ -324,6 +348,13 @@ async function runCheckpointLoop(
checkpointId: continuation.checkpointId,
results,
}
if (isAborted(options, context)) {
cancelPendingTools(context)
context.awaitingAsyncContinuation = undefined
break
}
logger.info('Prepared resume request payload', {
route,
streamId: context.messageId,

View File

@@ -211,6 +211,19 @@ export async function executeToolAndReport(
toolSpan.attributes = { ...toolSpan.attributes, ...abortDetail, ...detail }
context.trace.endSpan(toolSpan, status)
}
const endToolSpanFromTerminalState = () => {
const terminalStatus =
toolCall.status === MothershipStreamV1ToolOutcome.cancelled
? 'cancelled'
: toolCall.status === MothershipStreamV1ToolOutcome.success ||
toolCall.status === MothershipStreamV1ToolOutcome.skipped
? 'ok'
: 'error'
endToolSpan(terminalStatus, {
resultSuccess: toolCall.status === MothershipStreamV1ToolOutcome.success,
...(toolCall.error ? { error: toolCall.error } : {}),
})
}
logger.info('Tool execution started', {
toolCallId: toolCall.id,
@@ -221,6 +234,7 @@ export async function executeToolAndReport(
ensureHandlersRegistered()
let result = await executeTool(toolCall.name, toolCall.params || {}, execContext)
if (toolCall.endTime || isTerminalToolCallStatus(toolCall.status)) {
endToolSpanFromTerminalState()
return terminalCompletionFromToolCall(toolCall)
}
if (abortRequested(context, execContext, options)) {
@@ -394,6 +408,7 @@ export async function executeToolAndReport(
if (abortRequested(context, execContext, options)) {
toolCall.status = MothershipStreamV1ToolOutcome.cancelled
endToolSpan('cancelled', { cancelReason: 'abort_before_tool_result_delivery' })
return cancelledCompletion('Request aborted before tool result delivery')
}
@@ -418,6 +433,7 @@ export async function executeToolAndReport(
if (abortRequested(context, execContext, options)) {
toolCall.status = MothershipStreamV1ToolOutcome.cancelled
endToolSpan('cancelled', { cancelReason: 'abort_before_resource_persistence' })
return cancelledCompletion('Request aborted before resource persistence')
}

View File

@@ -86,6 +86,14 @@ export interface StreamingContext {
errors: string[]
usage?: { prompt: number; completion: number }
cost?: { input: number; output: number; total: number }
activeFileIntent?: {
toolCallId: string
operation: string
target: { kind: string; fileId?: string; fileName?: string }
title?: string
contentType?: string
edit?: Record<string, unknown>
} | null
trace: TraceCollector
}

View File

@@ -0,0 +1,287 @@
import { createLogger } from '@sim/logger'
import {
assertServerToolNotAborted,
type BaseServerTool,
type ServerToolContext,
} from '@/lib/copilot/tools/server/base-tool'
import {
generateDocxFromCode,
generatePdfFromCode,
generatePptxFromCode,
} from '@/lib/execution/doc-vm'
import { updateWorkspaceFileContent } from '@/lib/uploads/contexts/workspace/workspace-file-manager'
import { consumeLatestFileIntent } from './file-intent-store'
import { inferContentType } from './workspace-file'
const logger = createLogger('EditContentServerTool')
type EditContentArgs = {
content: string
}
type EditContentResult = {
success: boolean
message: string
data?: Record<string, unknown>
}
function getDocumentFormatInfo(fileName: string): {
isDoc: boolean
formatName?: string
sourceMime?: string
generator?: (code: string, workspaceId: string, signal?: AbortSignal) => Promise<Buffer>
} {
const lowerName = fileName.toLowerCase()
if (lowerName.endsWith('.pptx')) {
return {
isDoc: true,
formatName: 'PPTX',
sourceMime: 'text/x-pptxgenjs',
generator: generatePptxFromCode,
}
}
if (lowerName.endsWith('.docx')) {
return {
isDoc: true,
formatName: 'DOCX',
sourceMime: 'text/x-docxjs',
generator: generateDocxFromCode,
}
}
if (lowerName.endsWith('.pdf')) {
return {
isDoc: true,
formatName: 'PDF',
sourceMime: 'text/x-pdflibjs',
generator: generatePdfFromCode,
}
}
return { isDoc: false }
}
export const editContentServerTool: BaseServerTool<EditContentArgs, EditContentResult> = {
name: 'edit_content',
async execute(params: EditContentArgs, context?: ServerToolContext): Promise<EditContentResult> {
if (!context?.userId) {
logger.error('Unauthorized attempt to use edit_content')
throw new Error('Authentication required')
}
const workspaceId = context.workspaceId
if (!workspaceId) {
return { success: false, message: 'Workspace ID is required' }
}
const raw = params as Record<string, unknown>
const nested = raw.args as Record<string, unknown> | undefined
const content =
typeof params.content === 'string'
? params.content
: typeof nested?.content === 'string'
? (nested.content as string)
: undefined
if (content === undefined) {
return { success: false, message: 'content is required for edit_content' }
}
const intent = consumeLatestFileIntent(workspaceId)
if (!intent) {
return {
success: false,
message:
'No workspace_file context found. Call workspace_file first, wait for it to succeed, then call edit_content in the next step. Do not emit edit_content in parallel or in the same batch as workspace_file.',
}
}
try {
const { operation, fileRecord } = intent
const docInfo = getDocumentFormatInfo(fileRecord.name)
let finalContent: string
switch (operation) {
case 'append': {
const existing = intent.existingContent ?? ''
finalContent = docInfo.isDoc
? `${existing}\n{\n${content}\n}`
: existing
? `${existing}\n${content}`
: content
break
}
case 'update': {
finalContent = content
break
}
case 'patch': {
const existing = intent.existingContent ?? ''
if (!intent.edit) {
return { success: false, message: 'Patch intent missing edit metadata' }
}
if (intent.edit.strategy === 'search_replace') {
const search = intent.edit.search!
const firstIdx = existing.indexOf(search)
if (firstIdx === -1) {
return {
success: false,
message: `Patch failed: search string not found in file "${fileRecord.name}"`,
}
}
finalContent = intent.edit.replaceAll
? existing.split(search).join(content)
: existing.slice(0, firstIdx) + content + existing.slice(firstIdx + search.length)
} else if (intent.edit.strategy === 'anchored') {
const lines = existing.split('\n')
const defaultOccurrence = intent.edit.occurrence ?? 1
const findAnchorLine = (
anchor: string,
occurrence = defaultOccurrence,
afterIndex = -1
): { index: number; error?: string } => {
const trimmed = anchor.trim()
let count = 0
for (let i = afterIndex + 1; i < lines.length; i++) {
if (lines[i].trim() === trimmed) {
count++
if (count === occurrence) return { index: i }
}
}
if (count === 0) {
return {
index: -1,
error: `Anchor line not found in "${fileRecord.name}": "${anchor.slice(0, 100)}"`,
}
}
return {
index: -1,
error: `Anchor line occurrence ${occurrence} not found (only ${count} match${count > 1 ? 'es' : ''}) in "${fileRecord.name}": "${anchor.slice(0, 100)}"`,
}
}
if (intent.edit.mode === 'replace_between') {
if (!intent.edit.before_anchor || !intent.edit.after_anchor) {
return {
success: false,
message: 'replace_between requires before_anchor and after_anchor',
}
}
const before = findAnchorLine(intent.edit.before_anchor)
if (before.error) return { success: false, message: `Patch failed: ${before.error}` }
const after = findAnchorLine(
intent.edit.after_anchor,
defaultOccurrence,
before.index
)
if (after.error) return { success: false, message: `Patch failed: ${after.error}` }
if (after.index <= before.index) {
return {
success: false,
message: 'Patch failed: after_anchor must appear after before_anchor in the file',
}
}
const newLines = [
...lines.slice(0, before.index + 1),
...content.split('\n'),
...lines.slice(after.index),
]
finalContent = newLines.join('\n')
} else if (intent.edit.mode === 'insert_after') {
if (!intent.edit.anchor) {
return { success: false, message: 'insert_after requires anchor' }
}
const found = findAnchorLine(intent.edit.anchor)
if (found.error) return { success: false, message: `Patch failed: ${found.error}` }
const newLines = [
...lines.slice(0, found.index + 1),
...content.split('\n'),
...lines.slice(found.index + 1),
]
finalContent = newLines.join('\n')
} else if (intent.edit.mode === 'delete_between') {
if (!intent.edit.start_anchor || !intent.edit.end_anchor) {
return {
success: false,
message: 'delete_between requires start_anchor and end_anchor',
}
}
const start = findAnchorLine(intent.edit.start_anchor)
if (start.error) return { success: false, message: `Patch failed: ${start.error}` }
const end = findAnchorLine(intent.edit.end_anchor, defaultOccurrence, start.index)
if (end.error) return { success: false, message: `Patch failed: ${end.error}` }
if (end.index <= start.index) {
return {
success: false,
message: 'Patch failed: end_anchor must appear after start_anchor in the file',
}
}
const newLines = [...lines.slice(0, start.index), ...lines.slice(end.index)]
finalContent = newLines.join('\n')
} else {
return {
success: false,
message: `Unknown anchored patch mode: "${intent.edit.mode}"`,
}
}
} else {
return { success: false, message: `Unknown patch strategy: "${intent.edit.strategy}"` }
}
break
}
default:
return { success: false, message: `Unsupported operation in intent: ${operation}` }
}
if (docInfo.isDoc) {
try {
await docInfo.generator!(finalContent, workspaceId)
} catch (err) {
const msg = err instanceof Error ? err.message : String(err)
return {
success: false,
message: `${docInfo.formatName} generation failed: ${msg}. Fix the content and retry.`,
}
}
}
const fileBuffer = Buffer.from(finalContent, 'utf-8')
assertServerToolNotAborted(context)
const mime = docInfo.sourceMime || inferContentType(fileRecord.name, intent.contentType)
await updateWorkspaceFileContent(workspaceId, intent.fileId, context.userId, fileBuffer, mime)
const verb =
operation === 'append' ? 'appended to' : operation === 'update' ? 'updated' : 'patched'
logger.info(`Workspace file ${verb} via copilot (edit_content)`, {
fileId: intent.fileId,
name: fileRecord.name,
operation,
size: fileBuffer.length,
userId: context.userId,
})
return {
success: true,
message: `File "${fileRecord.name}" ${verb} successfully (${fileBuffer.length} bytes)`,
data: {
id: intent.fileId,
name: fileRecord.name,
size: fileBuffer.length,
contentType: mime,
},
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error('Error in edit_content tool', {
operation: intent.operation,
fileId: intent.fileId,
error: errorMessage,
userId: context.userId,
})
return {
success: false,
message: `Failed to apply content: ${errorMessage}`,
}
}
},
}

View File

@@ -0,0 +1,90 @@
import type { UserFile } from '@/lib/uploads/contexts/workspace/workspace-file-manager'
export type PendingFileIntent = {
operation: 'append' | 'update' | 'patch'
fileId: string
workspaceId: string
userId: string
fileRecord: UserFile
existingContent?: string
edit?: {
strategy: string
search?: string
replaceAll?: boolean
mode?: string
occurrence?: number
before_anchor?: string
after_anchor?: string
anchor?: string
start_anchor?: string
end_anchor?: string
}
contentType?: string
title?: string
createdAt: number
}
const INTENT_TTL_MS = 60_000
const store = new Map<string, PendingFileIntent>()
function buildKey(workspaceId: string, fileId: string): string {
return `${workspaceId}:${fileId}`
}
function cleanupStale(): void {
const now = Date.now()
for (const [key, intent] of store) {
if (now - intent.createdAt > INTENT_TTL_MS) {
store.delete(key)
}
}
}
export function storeFileIntent(
workspaceId: string,
fileId: string,
intent: PendingFileIntent
): void {
cleanupStale()
store.set(buildKey(workspaceId, fileId), intent)
}
export function consumeFileIntent(
workspaceId: string,
fileId: string
): PendingFileIntent | undefined {
const key = buildKey(workspaceId, fileId)
const intent = store.get(key)
if (intent) {
store.delete(key)
}
return intent
}
export function consumeLatestFileIntent(workspaceId: string): PendingFileIntent | undefined {
let latest: PendingFileIntent | undefined
let latestKey: string | undefined
for (const [key, intent] of store) {
if (intent.workspaceId === workspaceId) {
if (!latest || intent.createdAt > latest.createdAt) {
latest = intent
latestKey = key
}
}
}
if (latestKey) {
store.delete(latestKey)
}
return latest
}
export function clearIntentsForWorkspace(workspaceId: string): number {
let cleared = 0
for (const [key, intent] of store) {
if (intent.workspaceId === workspaceId) {
store.delete(key)
cleared++
}
}
return cleared
}

View File

@@ -16,9 +16,9 @@ import {
getWorkspaceFile,
getWorkspaceFileByName,
renameWorkspaceFile,
updateWorkspaceFileContent,
uploadWorkspaceFile,
} from '@/lib/uploads/contexts/workspace/workspace-file-manager'
import { storeFileIntent } from './file-intent-store'
const logger = createLogger('WorkspaceFileServerTool')
@@ -251,9 +251,6 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
message: 'append requires target.kind=file_id with target.fileId',
}
}
if (normalized.content === undefined || normalized.content === null) {
return { success: false, message: 'content is required for append operation' }
}
const existingFile = await getWorkspaceFile(workspaceId, target.fileId)
if (!existingFile) {
@@ -266,53 +263,25 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
}
}
const docInfo = getDocumentFormatInfo(existingFile.name)
const currentBuffer = await downloadWsFile(existingFile)
const combined = docInfo.isDoc
? `${currentBuffer.toString('utf-8')}\n{\n${normalized.content}\n}`
: `${currentBuffer.toString('utf-8')}\n${normalized.content}`
if (docInfo.isDoc) {
try {
await docInfo.generator!(combined, workspaceId)
} catch (err) {
const msg = err instanceof Error ? err.message : String(err)
return {
success: false,
message: `${docInfo.formatName} generation failed after append: ${msg}. Fix the content and retry.`,
}
}
}
const combinedBuffer = Buffer.from(combined, 'utf-8')
assertServerToolNotAborted(context)
const appendMime =
docInfo.sourceMime || inferContentType(existingFile.name, normalized.contentType)
await updateWorkspaceFileContent(
storeFileIntent(workspaceId, target.fileId, {
operation: 'append',
fileId: target.fileId,
workspaceId,
existingFile.id,
context.userId,
combinedBuffer,
appendMime
)
logger.info('Workspace file appended via copilot', {
fileId: existingFile.id,
name: existingFile.name,
appendedSize: normalized.content.length,
totalSize: combinedBuffer.length,
userId: context.userId,
fileRecord: existingFile,
existingContent: currentBuffer.toString('utf-8'),
contentType: normalized.contentType,
title: normalized.title,
createdAt: Date.now(),
})
return {
success: true,
message: `Content appended to "${existingFile.name}" (${normalized.content.length} bytes added, ${combinedBuffer.length} bytes total)`,
data: {
id: existingFile.id,
name: existingFile.name,
size: combinedBuffer.length,
contentType: appendMime,
},
message: withMessageId(
`Intent set: append to "${existingFile.name}". Wait for this success result, then call edit_content in the next step with the content to write. Do not call edit_content in parallel.`
),
data: { id: existingFile.id, name: existingFile.name, operation: 'append' },
}
}
@@ -324,9 +293,6 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
message: 'update requires target.kind=file_id with target.fileId',
}
}
if (normalized.content === undefined || normalized.content === null) {
return { success: false, message: 'content is required for update operation' }
}
const fileRecord = await getWorkspaceFile(workspaceId, target.fileId)
if (!fileRecord) {
@@ -339,47 +305,23 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
}
}
const docInfo = getDocumentFormatInfo(fileRecord.name)
if (docInfo.isDoc) {
try {
await docInfo.generator!(normalized.content, workspaceId)
} catch (err) {
const msg = err instanceof Error ? err.message : String(err)
return {
success: false,
message: `${docInfo.formatName} generation failed: ${msg}. Fix the code and retry.`,
}
}
}
const fileBuffer = Buffer.from(normalized.content, 'utf-8')
assertServerToolNotAborted(context)
const updateMime =
docInfo.sourceMime || inferContentType(fileRecord.name, normalized.contentType)
await updateWorkspaceFileContent(
workspaceId,
target.fileId,
context.userId,
fileBuffer,
updateMime
)
logger.info('Workspace file updated via copilot', {
storeFileIntent(workspaceId, target.fileId, {
operation: 'update',
fileId: target.fileId,
name: fileRecord.name,
size: fileBuffer.length,
workspaceId,
userId: context.userId,
fileRecord,
contentType: normalized.contentType,
title: normalized.title,
createdAt: Date.now(),
})
return {
success: true,
message: `File "${fileRecord.name}" updated successfully (${fileBuffer.length} bytes)`,
data: {
id: target.fileId,
name: fileRecord.name,
size: fileBuffer.length,
contentType: updateMime,
},
message: withMessageId(
`Intent set: update "${fileRecord.name}". Wait for this success result, then call edit_content in the next step with the replacement content. Do not call edit_content in parallel.`
),
data: { id: target.fileId, name: fileRecord.name, operation: 'update' },
}
}
@@ -468,120 +410,30 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
}
const currentBuffer = await downloadWsFile(fileRecord)
let content = currentBuffer.toString('utf-8')
const existingContent = currentBuffer.toString('utf-8')
if (normalized.edit.strategy === 'anchored') {
const lines = content.split('\n')
const defaultOccurrence = normalized.edit.occurrence ?? 1
const findAnchorLine = (
anchor: string,
occurrence = defaultOccurrence,
afterIndex = -1
): { index: number; error?: string } => {
const trimmed = anchor.trim()
let count = 0
for (let i = afterIndex + 1; i < lines.length; i++) {
if (lines[i].trim() === trimmed) {
count++
if (count === occurrence) return { index: i }
}
}
if (count === 0) {
return {
index: -1,
error: `Anchor line not found in "${fileRecord.name}": "${anchor.slice(0, 100)}"`,
}
}
return {
index: -1,
error: `Anchor line occurrence ${occurrence} not found (only ${count} match${count > 1 ? 'es' : ''}) in "${fileRecord.name}": "${anchor.slice(0, 100)}"`,
}
}
if (normalized.edit.mode === 'replace_between') {
if (!normalized.edit.before_anchor || !normalized.edit.after_anchor) {
return {
success: false,
message: 'replace_between requires before_anchor and after_anchor',
}
}
const before = findAnchorLine(normalized.edit.before_anchor)
if (before.error) return { success: false, message: `Patch failed: ${before.error}` }
const after = findAnchorLine(
normalized.edit.after_anchor,
defaultOccurrence,
before.index
)
if (after.error) return { success: false, message: `Patch failed: ${after.error}` }
if (after.index <= before.index) {
return {
success: false,
message: 'Patch failed: after_anchor must appear after before_anchor in the file',
}
}
const newLines = [
...lines.slice(0, before.index + 1),
...(normalized.edit.content ?? '').split('\n'),
...lines.slice(after.index),
]
content = newLines.join('\n')
} else if (normalized.edit.mode === 'insert_after') {
if (!normalized.edit.anchor) {
return { success: false, message: 'insert_after requires anchor' }
}
const found = findAnchorLine(normalized.edit.anchor)
if (found.error) return { success: false, message: `Patch failed: ${found.error}` }
const newLines = [
...lines.slice(0, found.index + 1),
...(normalized.edit.content ?? '').split('\n'),
...lines.slice(found.index + 1),
]
content = newLines.join('\n')
} else if (normalized.edit.mode === 'delete_between') {
if (!normalized.edit.start_anchor || !normalized.edit.end_anchor) {
return {
success: false,
message: 'delete_between requires start_anchor and end_anchor',
}
}
const start = findAnchorLine(normalized.edit.start_anchor)
if (start.error) return { success: false, message: `Patch failed: ${start.error}` }
const end = findAnchorLine(normalized.edit.end_anchor, defaultOccurrence, start.index)
if (end.error) return { success: false, message: `Patch failed: ${end.error}` }
if (end.index <= start.index) {
return {
success: false,
message: 'Patch failed: end_anchor must appear after start_anchor in the file',
}
}
const newLines = [...lines.slice(0, start.index), ...lines.slice(end.index)]
content = newLines.join('\n')
} else {
return {
success: false,
message: `Unknown anchored patch mode: "${normalized.edit.mode}"`,
}
}
} else if (normalized.edit.strategy === 'search_replace') {
if (normalized.edit.strategy === 'search_replace') {
const search = normalized.edit.search
const replace = normalized.edit.replace
const firstIdx = content.indexOf(search)
const firstIdx = existingContent.indexOf(search)
if (firstIdx === -1) {
return {
success: false,
message: `Patch failed: search string not found in file "${fileRecord.name}". Search: "${search.slice(0, 100)}${search.length > 100 ? '...' : ''}"`,
}
}
if (!normalized.edit.replaceAll && content.indexOf(search, firstIdx + 1) !== -1) {
if (
!normalized.edit.replaceAll &&
existingContent.indexOf(search, firstIdx + 1) !== -1
) {
return {
success: false,
message: `Patch failed: search string is ambiguous — found at multiple locations in "${fileRecord.name}". Use a longer unique search string or replaceAll.`,
}
}
content = normalized.edit.replaceAll
? content.split(search).join(replace)
: content.slice(0, firstIdx) + replace + content.slice(firstIdx + search.length)
} else if (normalized.edit.strategy === 'anchored') {
if (!normalized.edit.mode) {
return { success: false, message: 'anchored strategy requires mode' }
}
} else {
return {
success: false,
@@ -589,46 +441,41 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
}
}
const docInfo = getDocumentFormatInfo(fileRecord.name)
if (docInfo.isDoc) {
try {
await docInfo.generator!(content, workspaceId)
} catch (err) {
const msg = err instanceof Error ? err.message : String(err)
return {
success: false,
message: `Patched ${docInfo.formatName} code failed to compile: ${msg}. Fix the edit and retry.`,
}
}
}
const patchedBuffer = Buffer.from(content, 'utf-8')
assertServerToolNotAborted(context)
const patchMime = docInfo.sourceMime || inferContentType(fileRecord.name)
await updateWorkspaceFileContent(
workspaceId,
target.fileId,
context.userId,
patchedBuffer,
patchMime
)
logger.info('Workspace file patched via copilot', {
storeFileIntent(workspaceId, target.fileId, {
operation: 'patch',
fileId: target.fileId,
name: fileRecord.name,
strategy: normalized.edit.strategy,
workspaceId,
userId: context.userId,
fileRecord,
existingContent,
edit: {
strategy: normalized.edit.strategy,
...(normalized.edit.strategy === 'search_replace'
? {
search: normalized.edit.search,
replaceAll: normalized.edit.replaceAll,
}
: {
mode: normalized.edit.mode,
occurrence: normalized.edit.occurrence,
before_anchor: normalized.edit.before_anchor,
after_anchor: normalized.edit.after_anchor,
anchor: normalized.edit.anchor,
start_anchor: normalized.edit.start_anchor,
end_anchor: normalized.edit.end_anchor,
}),
},
contentType: normalized.contentType,
title: normalized.title,
createdAt: Date.now(),
})
return {
success: true,
message: `File "${fileRecord.name}" patched successfully (${normalized.edit.strategy} edit applied)`,
data: {
id: target.fileId,
name: fileRecord.name,
size: patchedBuffer.length,
contentType: patchMime,
},
message: withMessageId(
`Intent set: patch "${fileRecord.name}" (${normalized.edit.strategy}). Wait for this success result, then call edit_content in the next step with the replacement/insert content. Do not call edit_content in parallel.`
),
data: { id: target.fileId, name: fileRecord.name, operation: 'patch' },
}
}

View File

@@ -26,6 +26,7 @@ import { searchDocumentationServerTool } from '@/lib/copilot/tools/server/docs/s
import { createFileServerTool } from '@/lib/copilot/tools/server/files/create-file'
import { deleteFileServerTool } from '@/lib/copilot/tools/server/files/delete-file'
import { downloadToWorkspaceFileServerTool } from '@/lib/copilot/tools/server/files/download-to-workspace-file'
import { editContentServerTool } from '@/lib/copilot/tools/server/files/edit-content'
import { renameFileServerTool } from '@/lib/copilot/tools/server/files/rename-file'
import { workspaceFileServerTool } from '@/lib/copilot/tools/server/files/workspace-file'
import { validateGeneratedToolPayload } from '@/lib/copilot/tools/server/generated-schema'
@@ -88,6 +89,7 @@ const WRITE_ACTIONS: Record<string, string[]> = {
[ManageSkill.id]: ['add', 'edit', 'delete'],
[ManageCredential.id]: ['rename', 'delete'],
[WorkspaceFile.id]: ['create', 'append', 'update', 'delete', 'rename', 'patch'],
[editContentServerTool.name]: ['*'],
[CreateFile.id]: ['*'],
[RenameFile.id]: ['*'],
[DeleteFile.id]: ['*'],
@@ -128,6 +130,7 @@ const serverToolRegistry: Record<string, BaseServerTool> = {
[knowledgeBaseServerTool.name]: knowledgeBaseServerTool,
[userTableServerTool.name]: userTableServerTool,
[workspaceFileServerTool.name]: workspaceFileServerTool,
[editContentServerTool.name]: editContentServerTool,
[createFileServerTool.name]: createFileServerTool,
[renameFileServerTool.name]: renameFileServerTool,
[deleteFileServerTool.name]: deleteFileServerTool,