mirror of
https://github.com/simstudioai/sim.git
synced 2026-04-28 03:00:29 -04:00
Checkpoint
This commit is contained in:
@@ -189,7 +189,6 @@ export async function POST(req: NextRequest) {
|
||||
logger.error(`[${tracker.requestId}] Failed to process contexts`, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (Array.isArray(resourceAttachments) && resourceAttachments.length > 0) {
|
||||
const results = await Promise.allSettled(
|
||||
resourceAttachments.map(async (r) => {
|
||||
|
||||
@@ -254,6 +254,9 @@ function TextEditor({
|
||||
fetchedContent.endsWith(`\n${streamingContent}`)
|
||||
? fetchedContent
|
||||
: `${fetchedContent}\n${streamingContent}`
|
||||
// #region agent log
|
||||
fetch('http://127.0.0.1:7774/ingest/b056eec6-a1ee-457f-8556-85f94314ca06',{method:'POST',headers:{'Content-Type':'application/json','X-Debug-Session-Id':'6f10b0'},body:JSON.stringify({sessionId:'6f10b0',location:'file-viewer.tsx:TextEditor-merge',message:'streaming merge',data:{streamingMode,fetchedContentLen:fetchedContent?.length,streamingContentLen:streamingContent.length,nextContentLen:nextContent.length,fetchedUndefined:fetchedContent===undefined,usedReplace:streamingMode==='replace'||fetchedContent===undefined,nextPreview:nextContent.slice(0,200)},timestamp:Date.now(),hypothesisId:'H2-H3'})}).catch(()=>{});
|
||||
// #endregion
|
||||
setContent(nextContent)
|
||||
contentRef.current = nextContent
|
||||
initializedRef.current = true
|
||||
|
||||
@@ -106,7 +106,12 @@ export function ToolCallItem({ toolName, displayTitle, status, streamingArgs }:
|
||||
if (!titleMatch?.[1]) return null
|
||||
const opMatch = streamingArgs.match(/"operation"\s*:\s*"(\w+)"/)
|
||||
const op = opMatch?.[1] ?? ''
|
||||
const verb = op === 'patch' || op === 'update' ? 'Editing' : 'Writing'
|
||||
const verb =
|
||||
op === 'patch' || op === 'update' || op === 'rename'
|
||||
? 'Editing'
|
||||
: op === 'delete'
|
||||
? 'Deleting'
|
||||
: 'Writing'
|
||||
const unescaped = titleMatch[1]
|
||||
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex: string) =>
|
||||
String.fromCharCode(Number.parseInt(hex, 16))
|
||||
|
||||
@@ -210,7 +210,7 @@ function parseBlocks(blocks: ContentBlock[]): MessageSegment[] {
|
||||
if (block.type === 'tool_call') {
|
||||
if (!block.toolCall) continue
|
||||
const tc = block.toolCall
|
||||
if (tc.name === ToolSearchToolRegex.id || tc.name === 'set_file_context') continue
|
||||
if (tc.name === ToolSearchToolRegex.id) continue
|
||||
if (tc.name === ReadTool.id && isToolResultRead(tc.params)) continue
|
||||
const isDispatch = SUBAGENT_KEYS.has(tc.name) && !tc.calledBy
|
||||
|
||||
|
||||
@@ -65,7 +65,15 @@ interface ResourceContentProps {
|
||||
workspaceId: string
|
||||
resource: MothershipResource
|
||||
previewMode?: PreviewMode
|
||||
streamingFile?: { fileName: string; fileId?: string; content: string } | null
|
||||
streamingFile?: {
|
||||
toolCallId?: string
|
||||
fileName: string
|
||||
fileId?: string
|
||||
targetKind?: 'new_file' | 'file_id'
|
||||
operation?: string
|
||||
edit?: Record<string, unknown>
|
||||
content: string
|
||||
} | null
|
||||
genericResourceData?: GenericResourceData
|
||||
}
|
||||
|
||||
@@ -87,11 +95,10 @@ export const ResourceContent = memo(function ResourceContent({
|
||||
|
||||
const streamOperation = useMemo(() => {
|
||||
if (!streamingFile) return undefined
|
||||
const m = streamingFile.content.match(/"operation"\s*:\s*"(\w+)"/)
|
||||
return m?.[1]
|
||||
return streamingFile.operation
|
||||
}, [streamingFile])
|
||||
|
||||
const isWriteStream = streamOperation === 'write'
|
||||
const isWriteStream = streamOperation === 'create' || streamOperation === 'append'
|
||||
const isPatchStream = streamOperation === 'patch'
|
||||
const isUpdateStream = streamOperation === 'update'
|
||||
|
||||
@@ -113,24 +120,36 @@ export const ResourceContent = memo(function ResourceContent({
|
||||
isSourceMime
|
||||
)
|
||||
|
||||
// #region agent log
|
||||
if (streamingFile) {
|
||||
fetch('http://127.0.0.1:7774/ingest/b056eec6-a1ee-457f-8556-85f94314ca06',{method:'POST',headers:{'Content-Type':'application/json','X-Debug-Session-Id':'6f10b0'},body:JSON.stringify({sessionId:'6f10b0',location:'resource-content.tsx:streaming-context',message:'streaming state',data:{resourceId:resource.id,resourceType:resource.type,streamOp:streamOperation,isPatch:isPatchStream,isWrite:isWriteStream,isUpdate:isUpdateStream,hasActiveFileRecord:!!activeFileRecord,hasFetchedContent:!!fetchedFileContent,fetchedContentLen:fetchedFileContent?.length,streamingFileContentLen:streamingFile.content.length,streamingFileName:streamingFile.fileName,streamingFileMode:isWriteStream?'append':'replace'},timestamp:Date.now()})}).catch(()=>{});
|
||||
}
|
||||
// #endregion
|
||||
const streamingExtractedContent = useMemo(() => {
|
||||
if (!streamingFile) return undefined
|
||||
const raw = streamingFile.content
|
||||
|
||||
// Do not guess. Until the operation key has streamed in, we don't know
|
||||
// whether the payload should append, replace, or splice into the file.
|
||||
// Rendering early here can show content at the end of the file and then
|
||||
// "snap" to the right place once the operation/mode becomes known.
|
||||
if (!streamOperation) return undefined
|
||||
|
||||
if (isPatchStream) {
|
||||
if (!fetchedFileContent) return undefined
|
||||
return extractPatchPreview(raw, fetchedFileContent)
|
||||
if (!fetchedFileContent) {
|
||||
// #region agent log
|
||||
fetch('http://127.0.0.1:7774/ingest/b056eec6-a1ee-457f-8556-85f94314ca06',{method:'POST',headers:{'Content-Type':'application/json','X-Debug-Session-Id':'6f10b0'},body:JSON.stringify({sessionId:'6f10b0',location:'resource-content.tsx:patch-no-fetched',message:'patch but no fetchedFileContent',data:{resourceId:resource.id,activeFileRecordId:activeFileRecord?.id},timestamp:Date.now(),hypothesisId:'H1'})}).catch(()=>{});
|
||||
// #endregion
|
||||
return undefined
|
||||
}
|
||||
const patchResult = extractPatchPreview(streamingFile, fetchedFileContent)
|
||||
// #region agent log
|
||||
fetch('http://127.0.0.1:7774/ingest/b056eec6-a1ee-457f-8556-85f94314ca06',{method:'POST',headers:{'Content-Type':'application/json','X-Debug-Session-Id':'6f10b0'},body:JSON.stringify({sessionId:'6f10b0',location:'resource-content.tsx:patch-result',message:'extractPatchPreview result',data:{hasPatchResult:!!patchResult,patchResultLen:patchResult?.length,fetchedLen:fetchedFileContent.length,contentPreview:streamingFile.content.slice(0,200),edit:streamingFile.edit},timestamp:Date.now(),hypothesisId:'H4'})}).catch(()=>{});
|
||||
// #endregion
|
||||
return patchResult
|
||||
}
|
||||
|
||||
const extracted = extractFileContent(raw)
|
||||
const extracted = streamingFile.content
|
||||
if (extracted.length === 0) return undefined
|
||||
|
||||
// #region agent log
|
||||
fetch('http://127.0.0.1:7774/ingest/b056eec6-a1ee-457f-8556-85f94314ca06',{method:'POST',headers:{'Content-Type':'application/json','X-Debug-Session-Id':'6f10b0'},body:JSON.stringify({sessionId:'6f10b0',location:'resource-content.tsx:write-update-content',message:'extracted content for write/update',data:{streamOp:streamOperation,extractedLen:extracted.length,extractedPreview:extracted.slice(0,150)},timestamp:Date.now(),hypothesisId:'H2'})}).catch(()=>{});
|
||||
// #endregion
|
||||
|
||||
if (isUpdateStream) return extracted
|
||||
if (isWriteStream) return extracted
|
||||
|
||||
@@ -160,6 +179,15 @@ export const ResourceContent = memo(function ResourceContent({
|
||||
const streamingFileMode: 'append' | 'replace' =
|
||||
isWriteStream ? 'append' : 'replace'
|
||||
|
||||
// For existing file resources (not streaming-file), only pass streaming
|
||||
// content for patch operations where the preview splices new content into
|
||||
// the displayed file. Update operations re-stream the entire file from
|
||||
// scratch which causes visual duplication of already-visible content.
|
||||
const embeddedStreamingContent =
|
||||
resource.id !== 'streaming-file' && isUpdateStream
|
||||
? undefined
|
||||
: streamingExtractedContent
|
||||
|
||||
if (streamingFile && resource.id === 'streaming-file') {
|
||||
return (
|
||||
<div className='flex h-full flex-col overflow-hidden'>
|
||||
@@ -192,7 +220,7 @@ export const ResourceContent = memo(function ResourceContent({
|
||||
workspaceId={workspaceId}
|
||||
fileId={resource.id}
|
||||
previewMode={previewMode}
|
||||
streamingContent={streamingExtractedContent}
|
||||
streamingContent={embeddedStreamingContent}
|
||||
streamingMode={streamingFileMode}
|
||||
/>
|
||||
)
|
||||
@@ -587,65 +615,6 @@ function EmbeddedFolder({ workspaceId, folderId }: EmbeddedFolderProps) {
|
||||
)
|
||||
}
|
||||
|
||||
function extractFileContent(raw: string): string {
|
||||
const marker = '"content":'
|
||||
const idx = raw.indexOf(marker)
|
||||
if (idx === -1) return ''
|
||||
const rest = raw.slice(idx + marker.length).trimStart()
|
||||
if (!rest.startsWith('"')) return rest
|
||||
|
||||
// Walk the JSON string value to find the unescaped closing quote.
|
||||
// While streaming, the closing quote may not have arrived yet — in that
|
||||
// case we treat everything received so far as the content (no trim).
|
||||
let end = -1
|
||||
for (let i = 1; i < rest.length; i++) {
|
||||
if (rest[i] === '\\') {
|
||||
i++ // skip escaped character
|
||||
continue
|
||||
}
|
||||
if (rest[i] === '"') {
|
||||
end = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
const inner = end === -1 ? rest.slice(1) : rest.slice(1, end)
|
||||
return inner
|
||||
.replace(/\\n/g, '\n')
|
||||
.replace(/\\t/g, '\t')
|
||||
.replace(/\\r/g, '\r')
|
||||
.replace(/\\"/g, '"')
|
||||
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex) => String.fromCharCode(Number.parseInt(hex, 16)))
|
||||
.replace(/\\\\/g, '\\')
|
||||
}
|
||||
|
||||
function extractJsonString(raw: string, key: string): string | undefined {
|
||||
const pattern = new RegExp(`"${key}"\\s*:\\s*"`)
|
||||
const m = pattern.exec(raw)
|
||||
if (!m) return undefined
|
||||
const start = m.index + m[0].length
|
||||
let end = -1
|
||||
for (let i = start; i < raw.length; i++) {
|
||||
if (raw[i] === '\\') {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if (raw[i] === '"') {
|
||||
end = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if (end === -1) return undefined
|
||||
return raw
|
||||
.slice(start, end)
|
||||
.replace(/\\n/g, '\n')
|
||||
.replace(/\\t/g, '\t')
|
||||
.replace(/\\r/g, '\r')
|
||||
.replace(/\\"/g, '"')
|
||||
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex) => String.fromCharCode(Number.parseInt(hex, 16)))
|
||||
.replace(/\\\\/g, '\\')
|
||||
}
|
||||
|
||||
function findAnchorIndex(lines: string[], anchor: string, occurrence = 1, afterIndex = -1): number {
|
||||
const trimmed = anchor.trim()
|
||||
let count = 0
|
||||
@@ -658,24 +627,46 @@ function findAnchorIndex(lines: string[], anchor: string, occurrence = 1, afterI
|
||||
return -1
|
||||
}
|
||||
|
||||
function extractPatchPreview(raw: string, existingContent: string): string | undefined {
|
||||
const mode = extractJsonString(raw, 'mode')
|
||||
function extractPatchPreview(
|
||||
streamingFile: {
|
||||
content: string
|
||||
edit?: Record<string, unknown>
|
||||
},
|
||||
existingContent: string
|
||||
): string | undefined {
|
||||
const edit = streamingFile.edit ?? {}
|
||||
const strategy = typeof edit.strategy === 'string' ? edit.strategy : undefined
|
||||
const lines = existingContent.split('\n')
|
||||
const occurrence =
|
||||
typeof edit.occurrence === 'number' && Number.isFinite(edit.occurrence)
|
||||
? edit.occurrence
|
||||
: 1
|
||||
|
||||
if (strategy === 'search_replace') {
|
||||
const search = typeof edit.search === 'string' ? edit.search : ''
|
||||
if (!search) return undefined
|
||||
const replace = streamingFile.content
|
||||
if ((edit.replaceAll as boolean | undefined) === true) {
|
||||
return existingContent.split(search).join(replace)
|
||||
}
|
||||
const firstIdx = existingContent.indexOf(search)
|
||||
if (firstIdx === -1) return undefined
|
||||
return existingContent.slice(0, firstIdx) + replace + existingContent.slice(firstIdx + search.length)
|
||||
}
|
||||
|
||||
const mode = typeof edit.mode === 'string' ? edit.mode : undefined
|
||||
if (!mode) return undefined
|
||||
|
||||
const lines = existingContent.split('\n')
|
||||
const occurrenceMatch = raw.match(/"occurrence"\s*:\s*(\d+)/)
|
||||
const occurrence = occurrenceMatch ? Number.parseInt(occurrenceMatch[1], 10) : 1
|
||||
|
||||
if (mode === 'replace_between') {
|
||||
const beforeAnchor = extractJsonString(raw, 'before_anchor')
|
||||
const afterAnchor = extractJsonString(raw, 'after_anchor')
|
||||
const beforeAnchor = typeof edit.before_anchor === 'string' ? edit.before_anchor : undefined
|
||||
const afterAnchor = typeof edit.after_anchor === 'string' ? edit.after_anchor : undefined
|
||||
if (!beforeAnchor || !afterAnchor) return undefined
|
||||
|
||||
const beforeIdx = findAnchorIndex(lines, beforeAnchor, occurrence)
|
||||
const afterIdx = findAnchorIndex(lines, afterAnchor, occurrence, beforeIdx)
|
||||
if (beforeIdx === -1 || afterIdx === -1 || afterIdx <= beforeIdx) return undefined
|
||||
|
||||
const newContent = extractFileContent(raw)
|
||||
const newContent = streamingFile.content
|
||||
const spliced = [
|
||||
...lines.slice(0, beforeIdx + 1),
|
||||
...(newContent.length > 0 ? newContent.split('\n') : []),
|
||||
@@ -685,13 +676,13 @@ function extractPatchPreview(raw: string, existingContent: string): string | und
|
||||
}
|
||||
|
||||
if (mode === 'insert_after') {
|
||||
const anchor = extractJsonString(raw, 'anchor')
|
||||
const anchor = typeof edit.anchor === 'string' ? edit.anchor : undefined
|
||||
if (!anchor) return undefined
|
||||
|
||||
const anchorIdx = findAnchorIndex(lines, anchor, occurrence)
|
||||
if (anchorIdx === -1) return undefined
|
||||
|
||||
const newContent = extractFileContent(raw)
|
||||
const newContent = streamingFile.content
|
||||
const spliced = [
|
||||
...lines.slice(0, anchorIdx + 1),
|
||||
...(newContent.length > 0 ? newContent.split('\n') : []),
|
||||
@@ -701,8 +692,8 @@ function extractPatchPreview(raw: string, existingContent: string): string | und
|
||||
}
|
||||
|
||||
if (mode === 'delete_between') {
|
||||
const startAnchor = extractJsonString(raw, 'start_anchor')
|
||||
const endAnchor = extractJsonString(raw, 'end_anchor')
|
||||
const startAnchor = typeof edit.start_anchor === 'string' ? edit.start_anchor : undefined
|
||||
const endAnchor = typeof edit.end_anchor === 'string' ? edit.end_anchor : undefined
|
||||
if (!startAnchor || !endAnchor) return undefined
|
||||
|
||||
const startIdx = findAnchorIndex(lines, startAnchor, occurrence)
|
||||
|
||||
@@ -19,30 +19,29 @@ const PREVIEW_CYCLE: Record<PreviewMode, PreviewMode> = {
|
||||
preview: 'editor',
|
||||
} as const
|
||||
|
||||
function streamFileBasename(name: string): string {
|
||||
const n = name.replace(/\\/g, '/').trim()
|
||||
const parts = n.split('/').filter(Boolean)
|
||||
return parts.length ? parts[parts.length - 1]! : n
|
||||
}
|
||||
|
||||
function fileTitlesEquivalent(streamFileName: string, resourceTitle: string): boolean {
|
||||
return streamFileBasename(streamFileName) === streamFileBasename(resourceTitle)
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether the active resource should show the in-progress file stream.
|
||||
* The synthetic `streaming-file` tab always shows it; a real file tab shows it when
|
||||
* the streamed `fileName` matches that resource (so users who stay on the open file see live text).
|
||||
* The synthetic `streaming-file` tab always shows it; a real file tab only shows it
|
||||
* when the streamed fileId matches that exact resource.
|
||||
*/
|
||||
function shouldShowStreamingFilePanel(
|
||||
streamingFile: { fileName: string; fileId?: string; content: string } | null | undefined,
|
||||
streamingFile:
|
||||
| {
|
||||
toolCallId?: string
|
||||
fileName: string
|
||||
fileId?: string
|
||||
targetKind?: 'new_file' | 'file_id'
|
||||
operation?: string
|
||||
edit?: Record<string, unknown>
|
||||
content: string
|
||||
}
|
||||
| null
|
||||
| undefined,
|
||||
active: MothershipResource | null
|
||||
): boolean {
|
||||
if (!streamingFile || !active) return false
|
||||
if (active.id === 'streaming-file') return true
|
||||
if (active.type !== 'file') return false
|
||||
const fn = streamingFile.fileName.trim()
|
||||
if (fn && fileTitlesEquivalent(fn, active.title)) return true
|
||||
if (active.id && streamingFile.fileId === active.id) return true
|
||||
return false
|
||||
}
|
||||
@@ -59,7 +58,17 @@ interface MothershipViewProps {
|
||||
onCollapse: () => void
|
||||
isCollapsed: boolean
|
||||
className?: string
|
||||
streamingFile?: { fileName: string; fileId?: string; content: string } | null
|
||||
streamingFile?:
|
||||
| {
|
||||
toolCallId?: string
|
||||
fileName: string
|
||||
fileId?: string
|
||||
targetKind?: 'new_file' | 'file_id'
|
||||
operation?: string
|
||||
edit?: Record<string, unknown>
|
||||
content: string
|
||||
}
|
||||
| null
|
||||
genericResourceData?: GenericResourceData
|
||||
}
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ import {
|
||||
MothershipStreamV1ToolPhase,
|
||||
} from '@/lib/copilot/generated/mothership-stream-v1'
|
||||
import {
|
||||
CreateFile,
|
||||
CreateFolder,
|
||||
DeleteFolder,
|
||||
DeleteWorkflow,
|
||||
@@ -33,7 +32,6 @@ import {
|
||||
Read as ReadTool,
|
||||
Redeploy,
|
||||
RenameWorkflow,
|
||||
SetFileContext,
|
||||
ToolSearchToolRegex,
|
||||
WorkspaceFile,
|
||||
} from '@/lib/copilot/generated/tool-catalog-v1'
|
||||
@@ -105,7 +103,7 @@ export interface UseChatReturn {
|
||||
removeFromQueue: (id: string) => void
|
||||
sendNow: (id: string) => Promise<void>
|
||||
editQueuedMessage: (id: string) => QueuedMessage | undefined
|
||||
streamingFile: { fileName: string; content: string } | null
|
||||
streamingFile: StreamingFilePreview | null
|
||||
genericResourceData: GenericResourceData | null
|
||||
}
|
||||
|
||||
@@ -140,6 +138,16 @@ type StreamToolUI = {
|
||||
clientExecutable?: boolean
|
||||
}
|
||||
|
||||
type StreamingFilePreview = {
|
||||
toolCallId: string
|
||||
fileName: string
|
||||
fileId?: string
|
||||
targetKind?: 'new_file' | 'file_id'
|
||||
operation?: string
|
||||
edit?: Record<string, unknown>
|
||||
content: string
|
||||
}
|
||||
|
||||
type StreamBatchEvent = {
|
||||
eventId: number
|
||||
streamId: string
|
||||
@@ -341,14 +349,11 @@ export function useChat(
|
||||
const activeResourceIdRef = useRef(effectiveActiveResourceId)
|
||||
activeResourceIdRef.current = effectiveActiveResourceId
|
||||
|
||||
const [streamingFile, setStreamingFile] = useState<{
|
||||
fileName: string
|
||||
fileId?: string
|
||||
content: string
|
||||
} | null>(null)
|
||||
const [streamingFile, setStreamingFile] = useState<StreamingFilePreview | null>(null)
|
||||
const streamingFileRef = useRef(streamingFile)
|
||||
streamingFileRef.current = streamingFile
|
||||
const activeFileContextRef = useRef<{ fileId?: string; fileName?: string } | null>(null)
|
||||
const filePreviewSessionsRef = useRef<Map<string, StreamingFilePreview>>(new Map())
|
||||
const activeFilePreviewToolCallIdRef = useRef<string | null>(null)
|
||||
|
||||
const [messageQueue, setMessageQueue] = useState<QueuedMessage[]>([])
|
||||
const messageQueueRef = useRef<QueuedMessage[]>([])
|
||||
@@ -511,6 +516,8 @@ export function useChat(
|
||||
setActiveResourceId(null)
|
||||
setStreamingFile(null)
|
||||
streamingFileRef.current = null
|
||||
filePreviewSessionsRef.current.clear()
|
||||
activeFilePreviewToolCallIdRef.current = null
|
||||
setMessageQueue([])
|
||||
}, [initialChatId, queryClient])
|
||||
|
||||
@@ -532,6 +539,8 @@ export function useChat(
|
||||
setActiveResourceId(null)
|
||||
setStreamingFile(null)
|
||||
streamingFileRef.current = null
|
||||
filePreviewSessionsRef.current.clear()
|
||||
activeFilePreviewToolCallIdRef.current = null
|
||||
setMessageQueue([])
|
||||
}, [isHomePage])
|
||||
|
||||
@@ -860,6 +869,8 @@ export function useChat(
|
||||
}
|
||||
case MothershipStreamV1EventType.tool: {
|
||||
const payload = getPayloadData(parsed)
|
||||
const previewPhase =
|
||||
typeof payload.previewPhase === 'string' ? payload.previewPhase : undefined
|
||||
const phase =
|
||||
typeof payload.phase === 'string' ? payload.phase : MothershipStreamV1ToolPhase.call
|
||||
const id =
|
||||
@@ -870,60 +881,43 @@ export function useChat(
|
||||
: undefined
|
||||
if (!id) break
|
||||
|
||||
if (phase === MothershipStreamV1ToolPhase.args_delta) {
|
||||
const delta =
|
||||
typeof payload.argumentsDelta === 'string' ? payload.argumentsDelta : ''
|
||||
if (!delta) break
|
||||
if (previewPhase) {
|
||||
const sessions = filePreviewSessionsRef.current
|
||||
const prevSession = sessions.get(id) ?? {
|
||||
toolCallId: id,
|
||||
fileName: '',
|
||||
content: '',
|
||||
}
|
||||
|
||||
const toolName =
|
||||
typeof payload.toolName === 'string'
|
||||
? payload.toolName
|
||||
: (blocks[toolMap.get(id) ?? -1]?.toolCall?.name ?? '')
|
||||
const streamWorkspaceFile = toolName === WorkspaceFile.id
|
||||
|
||||
if (streamWorkspaceFile) {
|
||||
let prev = streamingFileRef.current
|
||||
if (!prev || (!prev.fileName && !prev.fileId)) {
|
||||
const ctx = activeFileContextRef.current
|
||||
prev = {
|
||||
fileName: ctx?.fileName || prev?.fileName || '',
|
||||
fileId: ctx?.fileId || prev?.fileId,
|
||||
content: prev?.content || '',
|
||||
}
|
||||
streamingFileRef.current = prev
|
||||
setStreamingFile(prev)
|
||||
if (previewPhase === 'file_preview_start') {
|
||||
const nextSession: StreamingFilePreview = {
|
||||
...prevSession,
|
||||
toolCallId: id,
|
||||
}
|
||||
const raw = prev.content + delta
|
||||
let fileName = prev.fileName
|
||||
if (!fileName) {
|
||||
fileName = activeFileContextRef.current?.fileName || ''
|
||||
if (!fileName) {
|
||||
const match = raw.match(/"fileName"\s*:\s*"([^"]+)"/)
|
||||
if (match) {
|
||||
fileName = match[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
const fileIdMatch = raw.match(/"fileId"\s*:\s*"([^"]+)"/)
|
||||
const matchedResourceId =
|
||||
fileIdMatch?.[1] || prev.fileId || activeFileContextRef.current?.fileId
|
||||
const existingFileMatch =
|
||||
matchedResourceId &&
|
||||
resourcesRef.current.some(
|
||||
(resource) => resource.type === 'file' && resource.id === matchedResourceId
|
||||
)
|
||||
sessions.set(id, nextSession)
|
||||
activeFilePreviewToolCallIdRef.current = id
|
||||
setStreamingFile(nextSession)
|
||||
break
|
||||
}
|
||||
|
||||
if (existingFileMatch) {
|
||||
const hadStreamingResource = resourcesRef.current.some(
|
||||
(resource) => resource.id === 'streaming-file'
|
||||
)
|
||||
if (hadStreamingResource) {
|
||||
setResources((rs) => rs.filter((resource) => resource.id !== 'streaming-file'))
|
||||
setActiveResourceId(matchedResourceId)
|
||||
} else if (activeResourceIdRef.current === null) {
|
||||
setActiveResourceId(matchedResourceId)
|
||||
}
|
||||
} else if (fileName || fileIdMatch || activeSubagent === FileTool.id) {
|
||||
if (previewPhase === 'file_preview_target') {
|
||||
const target = asPayloadRecord(payload.target)
|
||||
const nextSession: StreamingFilePreview = {
|
||||
...prevSession,
|
||||
operation: typeof payload.operation === 'string' ? payload.operation : prevSession.operation,
|
||||
targetKind:
|
||||
target?.kind === 'new_file' || target?.kind === 'file_id'
|
||||
? (target.kind as 'new_file' | 'file_id')
|
||||
: prevSession.targetKind,
|
||||
fileId:
|
||||
typeof target?.fileId === 'string' ? target.fileId : prevSession.fileId,
|
||||
fileName:
|
||||
typeof target?.fileName === 'string' ? target.fileName : prevSession.fileName,
|
||||
}
|
||||
sessions.set(id, nextSession)
|
||||
activeFilePreviewToolCallIdRef.current = id
|
||||
|
||||
if (nextSession.targetKind === 'new_file') {
|
||||
const hasStreamingResource = resourcesRef.current.some(
|
||||
(resource) => resource.id === 'streaming-file'
|
||||
)
|
||||
@@ -931,16 +925,55 @@ export function useChat(
|
||||
addResource({
|
||||
type: 'file',
|
||||
id: 'streaming-file',
|
||||
title: fileName || 'Writing file...',
|
||||
title: nextSession.fileName || 'Writing file...',
|
||||
})
|
||||
setActiveResourceId('streaming-file')
|
||||
}
|
||||
} else if (nextSession.fileId) {
|
||||
setResources((rs) => rs.filter((resource) => resource.id !== 'streaming-file'))
|
||||
if (
|
||||
activeResourceIdRef.current === null ||
|
||||
activeResourceIdRef.current === 'streaming-file'
|
||||
) {
|
||||
setActiveResourceId(nextSession.fileId)
|
||||
}
|
||||
}
|
||||
const next = { fileName, fileId: matchedResourceId, content: raw }
|
||||
streamingFileRef.current = next
|
||||
setStreamingFile(next)
|
||||
|
||||
setStreamingFile(nextSession)
|
||||
break
|
||||
}
|
||||
|
||||
if (previewPhase === 'file_preview_edit_meta') {
|
||||
const nextSession: StreamingFilePreview = {
|
||||
...prevSession,
|
||||
edit: asPayloadRecord(payload.edit),
|
||||
}
|
||||
sessions.set(id, nextSession)
|
||||
activeFilePreviewToolCallIdRef.current = id
|
||||
setStreamingFile(nextSession)
|
||||
break
|
||||
}
|
||||
|
||||
if (previewPhase === 'file_preview_content_delta') {
|
||||
const delta =
|
||||
typeof payload.delta === 'string' ? payload.delta : ''
|
||||
if (!delta) break
|
||||
const nextSession: StreamingFilePreview = {
|
||||
...prevSession,
|
||||
content: (prevSession.content ?? '') + delta,
|
||||
}
|
||||
sessions.set(id, nextSession)
|
||||
activeFilePreviewToolCallIdRef.current = id
|
||||
setStreamingFile(nextSession)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (phase === MothershipStreamV1ToolPhase.args_delta) {
|
||||
const delta =
|
||||
typeof payload.argumentsDelta === 'string' ? payload.argumentsDelta : ''
|
||||
if (!delta) break
|
||||
|
||||
const idx = toolMap.get(id)
|
||||
if (idx !== undefined && blocks[idx].toolCall) {
|
||||
const tc = blocks[idx].toolCall!
|
||||
@@ -949,7 +982,12 @@ export function useChat(
|
||||
if (tc.name === WorkspaceFile.id) {
|
||||
const opMatch = tc.streamingArgs.match(/"operation"\s*:\s*"(\w+)"/)
|
||||
const op = opMatch?.[1] ?? ''
|
||||
const verb = op === 'patch' || op === 'update' ? 'Editing' : 'Writing'
|
||||
const verb =
|
||||
op === 'patch' || op === 'update' || op === 'rename'
|
||||
? 'Editing'
|
||||
: op === 'delete'
|
||||
? 'Deleting'
|
||||
: 'Writing'
|
||||
const titleMatch = tc.streamingArgs.match(/"title"\s*:\s*"([^"]*)"/)
|
||||
if (titleMatch?.[1]) {
|
||||
const unescaped = titleMatch[1]
|
||||
@@ -969,21 +1007,6 @@ export function useChat(
|
||||
|
||||
if (phase === MothershipStreamV1ToolPhase.result) {
|
||||
const resultToolName = typeof payload.toolName === 'string' ? payload.toolName : ''
|
||||
if (
|
||||
(resultToolName === CreateFile.id || resultToolName === SetFileContext.id) &&
|
||||
(payload.success === true ||
|
||||
payload.status === MothershipStreamV1ToolOutcome.success)
|
||||
) {
|
||||
const resultOutput = asPayloadRecord(payload.result)
|
||||
const ctxFileId =
|
||||
typeof resultOutput?.fileId === 'string' ? resultOutput.fileId : undefined
|
||||
const ctxFileName =
|
||||
typeof resultOutput?.fileName === 'string' ? resultOutput.fileName : undefined
|
||||
if (ctxFileId || ctxFileName) {
|
||||
activeFileContextRef.current = { fileId: ctxFileId, fileName: ctxFileName }
|
||||
}
|
||||
}
|
||||
|
||||
const idx = toolMap.get(id)
|
||||
if (idx === undefined || !blocks[idx].toolCall) {
|
||||
break
|
||||
@@ -1073,24 +1096,17 @@ export function useChat(
|
||||
|
||||
onToolResultRef.current?.(tc.name, tc.status === 'success', tc.result?.output)
|
||||
|
||||
if (
|
||||
(tc.name === CreateFile.id || tc.name === SetFileContext.id) &&
|
||||
tc.status === 'success'
|
||||
) {
|
||||
const output = tc.result?.output as Record<string, unknown> | undefined
|
||||
const fileId = typeof output?.fileId === 'string' ? output.fileId : undefined
|
||||
const fileName =
|
||||
typeof output?.fileName === 'string' ? output.fileName : undefined
|
||||
if (fileId || fileName) {
|
||||
activeFileContextRef.current = { fileId, fileName }
|
||||
}
|
||||
}
|
||||
|
||||
if (isWorkflowToolName(tc.name)) {
|
||||
clientExecutionStartedRef.current.delete(id)
|
||||
}
|
||||
|
||||
if (tc.name === WorkspaceFile.id) {
|
||||
filePreviewSessionsRef.current.delete(id)
|
||||
if (activeFilePreviewToolCallIdRef.current === id) {
|
||||
activeFilePreviewToolCallIdRef.current = null
|
||||
setStreamingFile(null)
|
||||
streamingFileRef.current = null
|
||||
}
|
||||
const fileResource = extractedResources.find((r) => r.type === 'file')
|
||||
if (fileResource) {
|
||||
setResources((rs) => {
|
||||
@@ -1116,7 +1132,7 @@ export function useChat(
|
||||
? payload.name
|
||||
: 'unknown'
|
||||
const isPartial = payload.partial === true
|
||||
if (name === ToolSearchToolRegex.id || name === SetFileContext.id) {
|
||||
if (name === ToolSearchToolRegex.id) {
|
||||
break
|
||||
}
|
||||
const ui = getToolUI(payload)
|
||||
@@ -1129,13 +1145,19 @@ export function useChat(
|
||||
|
||||
if (name === WorkspaceFile.id) {
|
||||
const operation = typeof args?.operation === 'string' ? args.operation : ''
|
||||
const verb = operation === 'patch' || operation === 'update' ? 'Editing' : 'Writing'
|
||||
const innerArgs = args ? asPayloadRecord(args.args) : undefined
|
||||
const chunkTitle = innerArgs?.title as string | undefined
|
||||
const verb =
|
||||
operation === 'patch' || operation === 'update' || operation === 'rename'
|
||||
? 'Editing'
|
||||
: operation === 'delete'
|
||||
? 'Deleting'
|
||||
: 'Writing'
|
||||
const chunkTitle = args?.title as string | undefined
|
||||
const target = args ? asPayloadRecord(args.target) : undefined
|
||||
const targetFileName = target?.fileName as string | undefined
|
||||
if (chunkTitle) {
|
||||
displayTitle = `${verb} ${chunkTitle}`
|
||||
} else if (activeFileContextRef.current?.fileName) {
|
||||
displayTitle = `${verb} ${activeFileContextRef.current.fileName}`
|
||||
} else if (targetFileName) {
|
||||
displayTitle = `${verb} ${targetFileName}`
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1300,7 +1322,11 @@ export function useChat(
|
||||
blocks.push({ type: 'subagent', content: name })
|
||||
}
|
||||
if (name === FileTool.id) {
|
||||
const emptyFile = { fileName: '', content: '' }
|
||||
const emptyFile: StreamingFilePreview = {
|
||||
toolCallId: parentToolCallId || 'file-preview',
|
||||
fileName: '',
|
||||
content: '',
|
||||
}
|
||||
streamingFileRef.current = emptyFile
|
||||
setStreamingFile(emptyFile)
|
||||
}
|
||||
@@ -1950,6 +1976,8 @@ export function useChat(
|
||||
invalidateChatQueries()
|
||||
setStreamingFile(null)
|
||||
streamingFileRef.current = null
|
||||
filePreviewSessionsRef.current.clear()
|
||||
activeFilePreviewToolCallIdRef.current = null
|
||||
setResources((rs) => rs.filter((resource) => resource.id !== 'streaming-file'))
|
||||
|
||||
const execState = useExecutionStore.getState()
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
MothershipStreamV1ToolOutcome,
|
||||
} from '@/lib/copilot/generated/mothership-stream-v1'
|
||||
import {
|
||||
type ChatContextKind,
|
||||
type ChatMessage,
|
||||
type ChatMessageAttachment,
|
||||
type ChatMessageContext,
|
||||
@@ -83,12 +84,14 @@ function toDisplayContexts(
|
||||
): ChatMessageContext[] | undefined {
|
||||
if (!contexts || contexts.length === 0) return undefined
|
||||
return contexts.map((c) => ({
|
||||
kind: c.kind,
|
||||
kind: c.kind as ChatContextKind,
|
||||
label: c.label,
|
||||
...(c.workflowId ? { workflowId: c.workflowId } : {}),
|
||||
...(c.knowledgeId ? { knowledgeId: c.knowledgeId } : {}),
|
||||
...(c.tableId ? { tableId: c.tableId } : {}),
|
||||
...(c.fileId ? { fileId: c.fileId } : {}),
|
||||
...(c.folderId ? { folderId: c.folderId } : {}),
|
||||
...(c.chatId ? { chatId: c.chatId } : {}),
|
||||
}))
|
||||
}
|
||||
|
||||
|
||||
@@ -55,6 +55,8 @@ export interface PersistedMessageContext {
|
||||
knowledgeId?: string
|
||||
tableId?: string
|
||||
fileId?: string
|
||||
folderId?: string
|
||||
chatId?: string
|
||||
}
|
||||
|
||||
export interface PersistedMessage {
|
||||
@@ -199,6 +201,8 @@ export function buildPersistedUserMessage(params: UserMessageParams): PersistedM
|
||||
...(c.knowledgeId ? { knowledgeId: c.knowledgeId } : {}),
|
||||
...(c.tableId ? { tableId: c.tableId } : {}),
|
||||
...(c.fileId ? { fileId: c.fileId } : {}),
|
||||
...(c.folderId ? { folderId: c.folderId } : {}),
|
||||
...(c.chatId ? { chatId: c.chatId } : {}),
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -462,6 +466,8 @@ export function normalizeMessage(raw: Record<string, unknown>): PersistedMessage
|
||||
...(c.knowledgeId ? { knowledgeId: c.knowledgeId } : {}),
|
||||
...(c.tableId ? { tableId: c.tableId } : {}),
|
||||
...(c.fileId ? { fileId: c.fileId } : {}),
|
||||
...(c.folderId ? { folderId: c.folderId } : {}),
|
||||
...(c.chatId ? { chatId: c.chatId } : {}),
|
||||
}))
|
||||
}
|
||||
|
||||
|
||||
@@ -6,12 +6,14 @@ export interface ToolCatalogEntry {
|
||||
clientExecutable?: boolean;
|
||||
executor: "client" | "go" | "sim" | "subagent";
|
||||
hidden?: boolean;
|
||||
id: "agent" | "auth" | "check_deployment_status" | "complete_job" | "context_write" | "crawl_website" | "create_file" | "create_folder" | "create_job" | "create_workflow" | "create_workspace_mcp_server" | "debug" | "delete_folder" | "delete_workflow" | "delete_workspace_mcp_server" | "deploy" | "deploy_api" | "deploy_chat" | "deploy_mcp" | "download_to_workspace_file" | "edit_workflow" | "file" | "function_execute" | "generate_api_key" | "generate_image" | "generate_visualization" | "get_block_outputs" | "get_block_upstream_references" | "get_deployed_workflow_state" | "get_deployment_version" | "get_execution_summary" | "get_job_logs" | "get_page_contents" | "get_platform_actions" | "get_workflow_data" | "get_workflow_logs" | "glob" | "grep" | "job" | "knowledge" | "knowledge_base" | "list_folders" | "list_user_workspaces" | "list_workspace_mcp_servers" | "manage_credential" | "manage_custom_tool" | "manage_job" | "manage_mcp_tool" | "manage_skill" | "materialize_file" | "move_folder" | "move_workflow" | "oauth_get_auth_link" | "oauth_request_access" | "open_resource" | "read" | "redeploy" | "rename_workflow" | "research" | "respond" | "revert_to_version" | "run" | "run_block" | "run_from_block" | "run_workflow" | "run_workflow_until_block" | "scrape_page" | "search_documentation" | "search_library_docs" | "search_online" | "search_patterns" | "set_environment_variables" | "set_file_context" | "set_global_workflow_variables" | "superagent" | "table" | "tool_search_tool_regex" | "update_job_history" | "update_workspace_mcp_server" | "user_memory" | "user_table" | "workflow" | "workspace_file";
|
||||
id: "agent" | "auth" | "check_deployment_status" | "complete_job" | "context_write" | "crawl_website" | "create_folder" | "create_job" | "create_workflow" | "create_workspace_mcp_server" | "debug" | "delete_folder" | "delete_workflow" | "delete_workspace_mcp_server" | "deploy" | "deploy_api" | "deploy_chat" | "deploy_mcp" | "download_to_workspace_file" | "edit_workflow" | "file" | "function_execute" | "generate_api_key" | "generate_image" | "generate_visualization" | "get_block_outputs" | "get_block_upstream_references" | "get_deployed_workflow_state" | "get_deployment_version" | "get_execution_summary" | "get_job_logs" | "get_page_contents" | "get_platform_actions" | "get_workflow_data" | "get_workflow_logs" | "glob" | "grep" | "job" | "knowledge" | "knowledge_base" | "list_folders" | "list_user_workspaces" | "list_workspace_mcp_servers" | "manage_credential" | "manage_custom_tool" | "manage_job" | "manage_mcp_tool" | "manage_skill" | "materialize_file" | "move_folder" | "move_workflow" | "oauth_get_auth_link" | "oauth_request_access" | "open_resource" | "read" | "redeploy" | "rename_workflow" | "research" | "respond" | "revert_to_version" | "run" | "run_block" | "run_from_block" | "run_workflow" | "run_workflow_until_block" | "scrape_page" | "search_documentation" | "search_library_docs" | "search_online" | "search_patterns" | "set_environment_variables" | "set_global_workflow_variables" | "superagent" | "table" | "tool_search_tool_regex" | "update_job_history" | "update_workspace_mcp_server" | "user_memory" | "user_table" | "workflow" | "workspace_file";
|
||||
internal?: boolean;
|
||||
mode: "async" | "sync";
|
||||
name: "agent" | "auth" | "check_deployment_status" | "complete_job" | "context_write" | "crawl_website" | "create_file" | "create_folder" | "create_job" | "create_workflow" | "create_workspace_mcp_server" | "debug" | "delete_folder" | "delete_workflow" | "delete_workspace_mcp_server" | "deploy" | "deploy_api" | "deploy_chat" | "deploy_mcp" | "download_to_workspace_file" | "edit_workflow" | "file" | "function_execute" | "generate_api_key" | "generate_image" | "generate_visualization" | "get_block_outputs" | "get_block_upstream_references" | "get_deployed_workflow_state" | "get_deployment_version" | "get_execution_summary" | "get_job_logs" | "get_page_contents" | "get_platform_actions" | "get_workflow_data" | "get_workflow_logs" | "glob" | "grep" | "job" | "knowledge" | "knowledge_base" | "list_folders" | "list_user_workspaces" | "list_workspace_mcp_servers" | "manage_credential" | "manage_custom_tool" | "manage_job" | "manage_mcp_tool" | "manage_skill" | "materialize_file" | "move_folder" | "move_workflow" | "oauth_get_auth_link" | "oauth_request_access" | "open_resource" | "read" | "redeploy" | "rename_workflow" | "research" | "respond" | "revert_to_version" | "run" | "run_block" | "run_from_block" | "run_workflow" | "run_workflow_until_block" | "scrape_page" | "search_documentation" | "search_library_docs" | "search_online" | "search_patterns" | "set_environment_variables" | "set_file_context" | "set_global_workflow_variables" | "superagent" | "table" | "tool_search_tool_regex" | "update_job_history" | "update_workspace_mcp_server" | "user_memory" | "user_table" | "workflow" | "workspace_file";
|
||||
name: "agent" | "auth" | "check_deployment_status" | "complete_job" | "context_write" | "crawl_website" | "create_folder" | "create_job" | "create_workflow" | "create_workspace_mcp_server" | "debug" | "delete_folder" | "delete_workflow" | "delete_workspace_mcp_server" | "deploy" | "deploy_api" | "deploy_chat" | "deploy_mcp" | "download_to_workspace_file" | "edit_workflow" | "file" | "function_execute" | "generate_api_key" | "generate_image" | "generate_visualization" | "get_block_outputs" | "get_block_upstream_references" | "get_deployed_workflow_state" | "get_deployment_version" | "get_execution_summary" | "get_job_logs" | "get_page_contents" | "get_platform_actions" | "get_workflow_data" | "get_workflow_logs" | "glob" | "grep" | "job" | "knowledge" | "knowledge_base" | "list_folders" | "list_user_workspaces" | "list_workspace_mcp_servers" | "manage_credential" | "manage_custom_tool" | "manage_job" | "manage_mcp_tool" | "manage_skill" | "materialize_file" | "move_folder" | "move_workflow" | "oauth_get_auth_link" | "oauth_request_access" | "open_resource" | "read" | "redeploy" | "rename_workflow" | "research" | "respond" | "revert_to_version" | "run" | "run_block" | "run_from_block" | "run_workflow" | "run_workflow_until_block" | "scrape_page" | "search_documentation" | "search_library_docs" | "search_online" | "search_patterns" | "set_environment_variables" | "set_global_workflow_variables" | "superagent" | "table" | "tool_search_tool_regex" | "update_job_history" | "update_workspace_mcp_server" | "user_memory" | "user_table" | "workflow" | "workspace_file";
|
||||
parameters: unknown;
|
||||
requiredPermission?: "admin" | "write";
|
||||
requiresConfirmation?: boolean;
|
||||
resultSchema?: unknown;
|
||||
subagentId?: "agent" | "auth" | "debug" | "deploy" | "file" | "job" | "knowledge" | "research" | "run" | "superagent" | "table" | "workflow";
|
||||
}
|
||||
|
||||
@@ -20,6 +22,7 @@ export const Agent: ToolCatalogEntry = {
|
||||
name: "agent",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"request":{"description":"What tool/skill/MCP action is needed.","type":"string"}},"required":["request"],"type":"object"},
|
||||
subagentId: "agent",
|
||||
internal: true,
|
||||
requiredPermission: "write",
|
||||
@@ -30,6 +33,7 @@ export const Auth: ToolCatalogEntry = {
|
||||
name: "auth",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"request":{"description":"What authentication/credential action is needed.","type":"string"}},"required":["request"],"type":"object"},
|
||||
subagentId: "auth",
|
||||
internal: true,
|
||||
};
|
||||
@@ -39,6 +43,7 @@ export const CheckDeploymentStatus: ToolCatalogEntry = {
|
||||
name: "check_deployment_status",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"workflowId":{"type":"string","description":"Workflow ID to check (defaults to current workflow)"}}},
|
||||
};
|
||||
|
||||
export const CompleteJob: ToolCatalogEntry = {
|
||||
@@ -46,6 +51,7 @@ export const CompleteJob: ToolCatalogEntry = {
|
||||
name: "complete_job",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"jobId":{"type":"string","description":"The ID of the job to mark as completed."}},"required":["jobId"]},
|
||||
};
|
||||
|
||||
export const ContextWrite: ToolCatalogEntry = {
|
||||
@@ -53,6 +59,7 @@ export const ContextWrite: ToolCatalogEntry = {
|
||||
name: "context_write",
|
||||
executor: "go",
|
||||
mode: "sync",
|
||||
parameters: {"type":"object","properties":{"content":{"type":"string","description":"Full content to write to the file (replaces existing content)"},"file_path":{"type":"string","description":"Path of the file to write (e.g. 'SESSION.md')"}},"required":["file_path","content"]},
|
||||
};
|
||||
|
||||
export const CrawlWebsite: ToolCatalogEntry = {
|
||||
@@ -60,13 +67,7 @@ export const CrawlWebsite: ToolCatalogEntry = {
|
||||
name: "crawl_website",
|
||||
executor: "go",
|
||||
mode: "sync",
|
||||
};
|
||||
|
||||
export const CreateFile: ToolCatalogEntry = {
|
||||
id: "create_file",
|
||||
name: "create_file",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"exclude_paths":{"type":"array","description":"Skip URLs matching these patterns","items":{"type":"string"}},"include_paths":{"type":"array","description":"Only crawl URLs matching these patterns","items":{"type":"string"}},"limit":{"type":"number","description":"Maximum pages to crawl (default 10, max 50)"},"max_depth":{"type":"number","description":"How deep to follow links (default 2)"},"url":{"type":"string","description":"Starting URL to crawl from"}},"required":["url"]},
|
||||
};
|
||||
|
||||
export const CreateFolder: ToolCatalogEntry = {
|
||||
@@ -74,6 +75,7 @@ export const CreateFolder: ToolCatalogEntry = {
|
||||
name: "create_folder",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"name":{"type":"string","description":"Folder name."},"parentId":{"type":"string","description":"Optional parent folder ID."},"workspaceId":{"type":"string","description":"Optional workspace ID."}},"required":["name"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -82,6 +84,7 @@ export const CreateJob: ToolCatalogEntry = {
|
||||
name: "create_job",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"cron":{"type":"string","description":"Cron expression for recurring jobs (e.g., '*/5 * * * *' for every 5 minutes, '0 9 * * *' for daily at 9 AM). Omit for one-time jobs."},"lifecycle":{"type":"string","description":"'persistent' (default) or 'until_complete'. Until_complete jobs stop when complete_job is called after the success condition is met.","enum":["persistent","until_complete"]},"maxRuns":{"type":"integer","description":"Maximum number of executions before the job auto-completes. Safety limit to prevent runaway polling."},"prompt":{"type":"string","description":"The prompt to execute when the job fires. This is sent to the Mothership as a user message."},"successCondition":{"type":"string","description":"What must happen for the job to be considered complete. Used with until_complete lifecycle (e.g., 'John has replied to the partnership email')."},"time":{"type":"string","description":"ISO 8601 datetime for one-time execution or as the start time for a cron schedule (e.g., '2026-03-06T09:00:00'). Include timezone offset or use the timezone parameter."},"timezone":{"type":"string","description":"IANA timezone for the schedule (e.g., 'America/New_York', 'Europe/London'). Defaults to UTC."},"title":{"type":"string","description":"A short, descriptive title for the job (e.g., 'Email Poller', 'Daily Report'). Used as the display name."}},"required":["title","prompt"]},
|
||||
};
|
||||
|
||||
export const CreateWorkflow: ToolCatalogEntry = {
|
||||
@@ -89,6 +92,7 @@ export const CreateWorkflow: ToolCatalogEntry = {
|
||||
name: "create_workflow",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"description":{"type":"string","description":"Optional workflow description."},"folderId":{"type":"string","description":"Optional folder ID."},"name":{"type":"string","description":"Workflow name."},"workspaceId":{"type":"string","description":"Optional workspace ID."}},"required":["name"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -97,6 +101,7 @@ export const CreateWorkspaceMcpServer: ToolCatalogEntry = {
|
||||
name: "create_workspace_mcp_server",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"description":{"type":"string","description":"Optional description for the server"},"name":{"type":"string","description":"Required: server name"},"workspaceId":{"type":"string","description":"Workspace ID (defaults to current workspace)"}},"required":["name"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "admin",
|
||||
};
|
||||
@@ -106,6 +111,7 @@ export const Debug: ToolCatalogEntry = {
|
||||
name: "debug",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"context":{"description":"Pre-gathered context: workflow state JSON, block schemas, error logs. The debug agent will skip re-reading anything included here.","type":"string"},"request":{"description":"What to debug. Include error messages, block IDs, and any context about the failure.","type":"string"}},"required":["request"],"type":"object"},
|
||||
subagentId: "debug",
|
||||
internal: true,
|
||||
};
|
||||
@@ -115,6 +121,7 @@ export const DeleteFolder: ToolCatalogEntry = {
|
||||
name: "delete_folder",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"folderId":{"type":"string","description":"The folder ID to delete."}},"required":["folderId"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "write",
|
||||
};
|
||||
@@ -124,6 +131,7 @@ export const DeleteWorkflow: ToolCatalogEntry = {
|
||||
name: "delete_workflow",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"workflowId":{"type":"string","description":"The workflow ID to delete."}},"required":["workflowId"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "write",
|
||||
};
|
||||
@@ -133,6 +141,7 @@ export const DeleteWorkspaceMcpServer: ToolCatalogEntry = {
|
||||
name: "delete_workspace_mcp_server",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"serverId":{"type":"string","description":"Required: the MCP server ID to delete"}},"required":["serverId"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "admin",
|
||||
};
|
||||
@@ -142,6 +151,7 @@ export const Deploy: ToolCatalogEntry = {
|
||||
name: "deploy",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"request":{"description":"Detailed deployment instructions. Include deployment type (api/chat) and ALL user-specified options: identifier, title, description, authType, password, allowedEmails, welcomeMessage, outputConfigs (block outputs to display).","type":"string"}},"required":["request"],"type":"object"},
|
||||
subagentId: "deploy",
|
||||
internal: true,
|
||||
};
|
||||
@@ -151,6 +161,7 @@ export const DeployApi: ToolCatalogEntry = {
|
||||
name: "deploy_api",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"action":{"type":"string","description":"Whether to deploy or undeploy the API endpoint","enum":["deploy","undeploy"],"default":"deploy"},"workflowId":{"type":"string","description":"Workflow ID to deploy (required in workspace context)"}}},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "admin",
|
||||
};
|
||||
@@ -160,6 +171,7 @@ export const DeployChat: ToolCatalogEntry = {
|
||||
name: "deploy_chat",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"action":{"type":"string","description":"Whether to deploy or undeploy the chat interface","enum":["deploy","undeploy"],"default":"deploy"},"allowedEmails":{"type":"array","description":"List of allowed emails/domains for email or SSO auth","items":{"type":"string"}},"authType":{"type":"string","description":"Authentication type: public, password, email, or sso","enum":["public","password","email","sso"],"default":"public"},"description":{"type":"string","description":"Optional description for the chat"},"identifier":{"type":"string","description":"URL slug for the chat (lowercase letters, numbers, hyphens only)"},"outputConfigs":{"type":"array","description":"Output configurations specifying which block outputs to display in chat","items":{"type":"object","properties":{"blockId":{"type":"string","description":"The block UUID"},"path":{"type":"string","description":"The output path (e.g. 'response', 'response.content')"}},"required":["blockId","path"]}},"password":{"type":"string","description":"Password for password-protected chats"},"title":{"type":"string","description":"Display title for the chat interface"},"welcomeMessage":{"type":"string","description":"Welcome message shown to users"},"workflowId":{"type":"string","description":"Workflow ID to deploy (required in workspace context)"}}},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "admin",
|
||||
};
|
||||
@@ -169,6 +181,7 @@ export const DeployMcp: ToolCatalogEntry = {
|
||||
name: "deploy_mcp",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"parameterDescriptions":{"type":"array","description":"Array of parameter descriptions for the tool","items":{"type":"object","properties":{"description":{"type":"string","description":"Parameter description"},"name":{"type":"string","description":"Parameter name"}},"required":["name","description"]}},"serverId":{"type":"string","description":"Required: server ID from list_workspace_mcp_servers"},"toolDescription":{"type":"string","description":"Description for the MCP tool"},"toolName":{"type":"string","description":"Name for the MCP tool (defaults to workflow name)"},"workflowId":{"type":"string","description":"Workflow ID (defaults to active workflow)"}},"required":["serverId"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "admin",
|
||||
};
|
||||
@@ -178,6 +191,7 @@ export const DownloadToWorkspaceFile: ToolCatalogEntry = {
|
||||
name: "download_to_workspace_file",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"fileName":{"type":"string","description":"Optional workspace file name to save as. If omitted, the name is inferred from the response or URL."},"url":{"type":"string","description":"Direct URL of the file to download, such as an image CDN URL ending in .png or .jpg"}},"required":["url"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -186,6 +200,7 @@ export const EditWorkflow: ToolCatalogEntry = {
|
||||
name: "edit_workflow",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"operations":{"type":"array","description":"Array of edit operations","items":{"type":"object","properties":{"block_id":{"type":"string","description":"Block ID for the operation. For add operations, this will be the desired ID for the new block."},"operation_type":{"type":"string","description":"Type of operation to perform","enum":["add","edit","delete","insert_into_subflow","extract_from_subflow"]},"params":{"type":"object","description":"Parameters for the operation. \nFor edit: {\"inputs\": {\"temperature\": 0.5}} NOT {\"subBlocks\": {\"temperature\": {\"value\": 0.5}}}\nFor add: {\"type\": \"agent\", \"name\": \"My Agent\", \"inputs\": {\"model\": \"gpt-4o\"}}\nFor delete: {} (empty object)"}},"required":["operation_type","block_id","params"]}},"workflowId":{"type":"string","description":"Optional workflow ID to edit. If not provided, uses the current workflow in context."}},"required":["operations"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -194,6 +209,7 @@ export const File: ToolCatalogEntry = {
|
||||
name: "file",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"type":"object"},
|
||||
subagentId: "file",
|
||||
internal: true,
|
||||
};
|
||||
@@ -203,6 +219,7 @@ export const FunctionExecute: ToolCatalogEntry = {
|
||||
name: "function_execute",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"code":{"type":"string","description":"Code to execute. For JS: raw statements auto-wrapped in async context. For Python: full script. For shell: bash script with access to pre-installed CLI tools and workspace env vars as $VAR_NAME."},"inputFiles":{"type":"array","description":"Canonical workspace file IDs to mount in the sandbox. Discover IDs via read(\"files/{name}/meta.json\") or glob(\"files/by-id/*/meta.json\"). Mounted path: /home/user/files/{fileId}/{originalName}. Example: [\"wf_123\"]","items":{"type":"string"}},"inputTables":{"type":"array","description":"Table IDs to mount as CSV files in the sandbox. Each table appears at /home/user/tables/{tableId}.csv with a header row. Example: [\"tbl_abc123\"]","items":{"type":"string"}},"language":{"type":"string","description":"Execution language.","enum":["javascript","python","shell"]},"outputFormat":{"type":"string","description":"Format for outputPath. Determines how the code result is serialized. If omitted, inferred from outputPath file extension.","enum":["json","csv","txt","md","html"]},"outputMimeType":{"type":"string","description":"MIME type for outputSandboxPath export. Required for binary files: image/png, image/jpeg, application/pdf, etc. Omit for text files."},"outputPath":{"type":"string","description":"Pipe output directly to a NEW workspace file instead of returning in context. ALWAYS use this instead of a separate workspace_file write call. Use a flat path like \"files/result.json\" — nested paths are not supported."},"outputSandboxPath":{"type":"string","description":"Path to a file created inside the sandbox that should be exported to the workspace. Use together with outputPath."},"outputTable":{"type":"string","description":"Table ID to overwrite with the code's return value. Code MUST return an array of objects where keys match column names. All existing rows are replaced. Example: \"tbl_abc123\""}},"required":["code"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -211,6 +228,7 @@ export const GenerateApiKey: ToolCatalogEntry = {
|
||||
name: "generate_api_key",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"name":{"type":"string","description":"A descriptive name for the API key (e.g., 'production-key', 'dev-testing')."},"workspaceId":{"type":"string","description":"Optional workspace ID. Defaults to user's default workspace."}},"required":["name"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "admin",
|
||||
};
|
||||
@@ -220,6 +238,7 @@ export const GenerateImage: ToolCatalogEntry = {
|
||||
name: "generate_image",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"aspectRatio":{"type":"string","description":"Aspect ratio for the generated image.","enum":["1:1","16:9","9:16","4:3","3:4"]},"fileName":{"type":"string","description":"Output file name. Defaults to \"generated-image.png\". Workspace files are flat, so pass a plain file name, not a nested path."},"overwriteFileId":{"type":"string","description":"If provided, overwrites the existing workspace file with this ID instead of creating a new file. Use this when the user asks to update or redo a previously generated image. The file ID is returned by previous generate_image or generate_visualization calls (fileId field), or can be found via read(\"files/by-id/{fileId}/meta.json\")."},"prompt":{"type":"string","description":"Detailed text description of the image to generate, or editing instructions when used with editFileId."},"referenceFileIds":{"type":"array","description":"File IDs of workspace images to include as context for the generation. All images are sent alongside the prompt. Use for: editing a single image (1 file), compositing multiple images together (2+ files), style transfer, face swapping, etc. Order matters — list the primary/base image first.","items":{"type":"string"}}},"required":["prompt"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -228,6 +247,7 @@ export const GenerateVisualization: ToolCatalogEntry = {
|
||||
name: "generate_visualization",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"code":{"type":"string","description":"Python code that generates a visualization using matplotlib. MUST call plt.savefig('/home/user/output.png', dpi=150, bbox_inches='tight') to produce output."},"fileName":{"type":"string","description":"Output file name. Defaults to \"chart.png\". Workspace files are flat, so pass a plain file name, not a nested path."},"inputFiles":{"type":"array","description":"Canonical workspace file IDs to mount in the sandbox. Discover IDs via read(\"files/{name}/meta.json\") or glob(\"files/by-id/*/meta.json\"). Mounted path: /home/user/files/{fileId}/{originalName}.","items":{"type":"string"}},"inputTables":{"type":"array","description":"Table IDs to mount as CSV files in the sandbox. Each table appears at /home/user/tables/{tableId}.csv with a header row. Read with pandas: pd.read_csv('/home/user/tables/tbl_xxx.csv')","items":{"type":"string"}},"overwriteFileId":{"type":"string","description":"If provided, overwrites the existing workspace file with this ID instead of creating a new file. Use this when the user asks to update or redo a previously generated chart. The file ID is returned by previous generate_visualization or generate_image calls (fileId field), or can be found via read(\"files/by-id/{fileId}/meta.json\")."}},"required":["code"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -236,6 +256,7 @@ export const GetBlockOutputs: ToolCatalogEntry = {
|
||||
name: "get_block_outputs",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"blockIds":{"type":"array","description":"Optional array of block UUIDs. If provided, returns outputs only for those blocks. If not provided, returns outputs for all blocks in the workflow.","items":{"type":"string"}},"workflowId":{"type":"string","description":"Optional workflow ID. If not provided, uses the current workflow in context."}}},
|
||||
};
|
||||
|
||||
export const GetBlockUpstreamReferences: ToolCatalogEntry = {
|
||||
@@ -243,6 +264,7 @@ export const GetBlockUpstreamReferences: ToolCatalogEntry = {
|
||||
name: "get_block_upstream_references",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"blockIds":{"type":"array","description":"Required array of block UUIDs (minimum 1). Returns what each block can reference based on its position in the workflow graph.","items":{"type":"string"}},"workflowId":{"type":"string","description":"Optional workflow ID. If not provided, uses the current workflow in context."}},"required":["blockIds"]},
|
||||
};
|
||||
|
||||
export const GetDeployedWorkflowState: ToolCatalogEntry = {
|
||||
@@ -250,6 +272,7 @@ export const GetDeployedWorkflowState: ToolCatalogEntry = {
|
||||
name: "get_deployed_workflow_state",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"workflowId":{"type":"string","description":"Optional workflow ID. If not provided, uses the current workflow in context."}}},
|
||||
};
|
||||
|
||||
export const GetDeploymentVersion: ToolCatalogEntry = {
|
||||
@@ -257,6 +280,7 @@ export const GetDeploymentVersion: ToolCatalogEntry = {
|
||||
name: "get_deployment_version",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"version":{"type":"number","description":"The deployment version number"},"workflowId":{"type":"string","description":"The workflow ID"}},"required":["workflowId","version"]},
|
||||
};
|
||||
|
||||
export const GetExecutionSummary: ToolCatalogEntry = {
|
||||
@@ -264,6 +288,7 @@ export const GetExecutionSummary: ToolCatalogEntry = {
|
||||
name: "get_execution_summary",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"limit":{"type":"number","description":"Max number of executions to return (default: 10, max: 20)."},"status":{"type":"string","description":"Filter by status: 'success', 'error', or 'all' (default: 'all').","enum":["success","error","all"]},"workflowId":{"type":"string","description":"Optional workflow ID. If omitted, returns executions across all workflows in the workspace."},"workspaceId":{"type":"string","description":"Workspace ID to scope executions to."}},"required":["workspaceId"]},
|
||||
};
|
||||
|
||||
export const GetJobLogs: ToolCatalogEntry = {
|
||||
@@ -271,6 +296,7 @@ export const GetJobLogs: ToolCatalogEntry = {
|
||||
name: "get_job_logs",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"executionId":{"type":"string","description":"Optional execution ID for a specific run."},"includeDetails":{"type":"boolean","description":"Include tool calls, outputs, and cost details."},"jobId":{"type":"string","description":"The job (schedule) ID to get logs for."},"limit":{"type":"number","description":"Max number of entries (default: 3, max: 5)"}},"required":["jobId"]},
|
||||
};
|
||||
|
||||
export const GetPageContents: ToolCatalogEntry = {
|
||||
@@ -278,6 +304,7 @@ export const GetPageContents: ToolCatalogEntry = {
|
||||
name: "get_page_contents",
|
||||
executor: "go",
|
||||
mode: "sync",
|
||||
parameters: {"type":"object","properties":{"include_highlights":{"type":"boolean","description":"Include key highlights (default false)"},"include_summary":{"type":"boolean","description":"Include AI-generated summary (default false)"},"include_text":{"type":"boolean","description":"Include full page text (default true)"},"urls":{"type":"array","description":"URLs to get content from (max 10)","items":{"type":"string"}}},"required":["urls"]},
|
||||
};
|
||||
|
||||
export const GetPlatformActions: ToolCatalogEntry = {
|
||||
@@ -285,6 +312,7 @@ export const GetPlatformActions: ToolCatalogEntry = {
|
||||
name: "get_platform_actions",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{}},
|
||||
};
|
||||
|
||||
export const GetWorkflowData: ToolCatalogEntry = {
|
||||
@@ -292,6 +320,7 @@ export const GetWorkflowData: ToolCatalogEntry = {
|
||||
name: "get_workflow_data",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"data_type":{"type":"string","description":"The type of workflow data to retrieve","enum":["global_variables","custom_tools","mcp_tools","files"]},"workflowId":{"type":"string","description":"Optional workflow ID. If not provided, uses the current workflow in context."}},"required":["data_type"]},
|
||||
};
|
||||
|
||||
export const GetWorkflowLogs: ToolCatalogEntry = {
|
||||
@@ -299,6 +328,7 @@ export const GetWorkflowLogs: ToolCatalogEntry = {
|
||||
name: "get_workflow_logs",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"executionId":{"type":"string","description":"Optional execution ID to get logs for a specific execution. Use with get_execution_summary to find execution IDs first."},"includeDetails":{"type":"boolean","description":"Include detailed info"},"limit":{"type":"number","description":"Max number of entries (hard limit: 3)"},"workflowId":{"type":"string","description":"Optional workflow ID. If not provided, uses the current workflow in context."}}},
|
||||
};
|
||||
|
||||
export const Glob: ToolCatalogEntry = {
|
||||
@@ -306,6 +336,7 @@ export const Glob: ToolCatalogEntry = {
|
||||
name: "glob",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"pattern":{"type":"string","description":"Glob pattern to match file paths. Supports * (any segment) and ** (any depth)."}},"required":["pattern"]},
|
||||
};
|
||||
|
||||
export const Grep: ToolCatalogEntry = {
|
||||
@@ -313,6 +344,7 @@ export const Grep: ToolCatalogEntry = {
|
||||
name: "grep",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"context":{"type":"number","description":"Number of lines to show before and after each match. Only applies to output_mode 'content'."},"ignoreCase":{"type":"boolean","description":"Case insensitive search (default false)."},"lineNumbers":{"type":"boolean","description":"Include line numbers in output (default true). Only applies to output_mode 'content'."},"maxResults":{"type":"number","description":"Maximum number of matches to return (default 50)."},"output_mode":{"type":"string","description":"Output mode: 'content' shows matching lines (default), 'files_with_matches' shows only file paths, 'count' shows match counts per file.","enum":["content","files_with_matches","count"]},"path":{"type":"string","description":"Optional path prefix to scope the search (e.g. 'workflows/', 'environment/', 'internal/', 'components/blocks/')."},"pattern":{"type":"string","description":"Regex pattern to search for in file contents."}},"required":["pattern"]},
|
||||
};
|
||||
|
||||
export const Job: ToolCatalogEntry = {
|
||||
@@ -320,6 +352,7 @@ export const Job: ToolCatalogEntry = {
|
||||
name: "job",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"request":{"description":"What job action is needed.","type":"string"}},"required":["request"],"type":"object"},
|
||||
subagentId: "job",
|
||||
internal: true,
|
||||
};
|
||||
@@ -329,6 +362,7 @@ export const Knowledge: ToolCatalogEntry = {
|
||||
name: "knowledge",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"request":{"description":"What knowledge base action is needed.","type":"string"}},"required":["request"],"type":"object"},
|
||||
subagentId: "knowledge",
|
||||
internal: true,
|
||||
};
|
||||
@@ -338,6 +372,8 @@ export const KnowledgeBase: ToolCatalogEntry = {
|
||||
name: "knowledge_base",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"args":{"type":"object","description":"Arguments for the operation","properties":{"apiKey":{"type":"string","description":"API key for API-key-based connectors (required when connector auth mode is apiKey)"},"chunkingConfig":{"type":"object","description":"Chunking configuration (optional for 'create')","properties":{"maxSize":{"type":"number","description":"Maximum chunk size (100-4000, default: 1024)","default":1024},"minSize":{"type":"number","description":"Minimum chunk size (1-2000, default: 1)","default":1},"overlap":{"type":"number","description":"Overlap between chunks (0-500, default: 200)","default":200}}},"connectorId":{"type":"string","description":"Connector ID (required for update_connector, delete_connector, sync_connector)"},"connectorStatus":{"type":"string","description":"Connector status (optional for update_connector)","enum":["active","paused"]},"connectorType":{"type":"string","description":"Connector type from registry, e.g. 'confluence', 'google_drive', 'notion' (required for add_connector). Read knowledgebases/connectors/{type}.json for the config schema."},"credentialId":{"type":"string","description":"OAuth credential ID from environment/credentials.json (required for OAuth connectors)"},"description":{"type":"string","description":"Description of the knowledge base (optional for 'create')"},"disabledTagIds":{"type":"array","description":"Tag definition IDs to opt out of (optional for add_connector). See tagDefinitions in the connector schema."},"documentId":{"type":"string","description":"Document ID (required for delete_document, update_document)"},"enabled":{"type":"boolean","description":"Enable/disable a document (optional for update_document)"},"fileId":{"type":"string","description":"Canonical workspace file ID to add as a document (preferred for add_file). Discover via read(\"files/{name}/meta.json\") or glob(\"files/by-id/*/meta.json\")."},"filePath":{"type":"string","description":"Legacy workspace file reference for add_file. Prefer fileId."},"filename":{"type":"string","description":"New filename for a document (optional for update_document)"},"knowledgeBaseId":{"type":"string","description":"Knowledge base ID (required for get, query, add_file, list_tags, create_tag, get_tag_usage)"},"name":{"type":"string","description":"Name of the knowledge base (required for 'create')"},"query":{"type":"string","description":"Search query text (required for 'query')"},"sourceConfig":{"type":"object","description":"Connector-specific configuration matching the configFields in knowledgebases/connectors/{type}.json"},"syncIntervalMinutes":{"type":"number","description":"Sync interval in minutes: 60 (hourly), 360 (6h), 1440 (daily), 10080 (weekly), 0 (manual only). Default: 1440","default":1440},"tagDefinitionId":{"type":"string","description":"Tag definition ID (required for update_tag, delete_tag)"},"tagDisplayName":{"type":"string","description":"Display name for the tag (required for create_tag, optional for update_tag)"},"tagFieldType":{"type":"string","description":"Field type: text, number, date, boolean (optional for create_tag, defaults to text)","enum":["text","number","date","boolean"]},"topK":{"type":"number","description":"Number of results to return (1-50, default: 5)","default":5},"workspaceId":{"type":"string","description":"Workspace ID (required for 'create', optional filter for 'list')"}}},"operation":{"type":"string","description":"The operation to perform","enum":["create","get","query","add_file","update","delete","delete_document","update_document","list_tags","create_tag","update_tag","delete_tag","get_tag_usage","add_connector","update_connector","delete_connector","sync_connector"]}},"required":["operation","args"]},
|
||||
resultSchema: {"type":"object","properties":{"data":{"type":"object","description":"Operation-specific result payload."},"message":{"type":"string","description":"Human-readable outcome summary."},"success":{"type":"boolean","description":"Whether the operation succeeded."}},"required":["success","message"]},
|
||||
requiresConfirmation: true,
|
||||
};
|
||||
|
||||
@@ -346,6 +382,7 @@ export const ListFolders: ToolCatalogEntry = {
|
||||
name: "list_folders",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"workspaceId":{"type":"string","description":"Optional workspace ID to list folders for."}}},
|
||||
};
|
||||
|
||||
export const ListUserWorkspaces: ToolCatalogEntry = {
|
||||
@@ -353,6 +390,7 @@ export const ListUserWorkspaces: ToolCatalogEntry = {
|
||||
name: "list_user_workspaces",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{}},
|
||||
};
|
||||
|
||||
export const ListWorkspaceMcpServers: ToolCatalogEntry = {
|
||||
@@ -360,6 +398,7 @@ export const ListWorkspaceMcpServers: ToolCatalogEntry = {
|
||||
name: "list_workspace_mcp_servers",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"workspaceId":{"type":"string","description":"Workspace ID (defaults to current workspace)"}}},
|
||||
};
|
||||
|
||||
export const ManageCredential: ToolCatalogEntry = {
|
||||
@@ -367,6 +406,7 @@ export const ManageCredential: ToolCatalogEntry = {
|
||||
name: "manage_credential",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"credentialId":{"type":"string","description":"The credential ID (from environment/credentials.json)"},"displayName":{"type":"string","description":"New display name (required for rename)"},"operation":{"type":"string","description":"The operation to perform","enum":["rename","delete"]}},"required":["operation","credentialId"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "admin",
|
||||
};
|
||||
@@ -376,6 +416,7 @@ export const ManageCustomTool: ToolCatalogEntry = {
|
||||
name: "manage_custom_tool",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"code":{"type":"string","description":"The JavaScript code that executes when the tool is called (required for add). Parameters from schema are available as variables. Function body only - no signature or wrapping braces."},"operation":{"type":"string","description":"The operation to perform: 'add', 'edit', 'list', or 'delete'","enum":["add","edit","delete","list"]},"schema":{"type":"object","description":"The tool schema in OpenAI function calling format (required for add).","properties":{"function":{"type":"object","description":"The function definition","properties":{"description":{"type":"string","description":"What the function does"},"name":{"type":"string","description":"The function name (camelCase)"},"parameters":{"type":"object","description":"The function parameters schema","properties":{"properties":{"type":"object","description":"Parameter definitions as key-value pairs"},"required":{"type":"array","description":"Array of required parameter names","items":{"type":"string"}},"type":{"type":"string","description":"Must be 'object'"}},"required":["type","properties"]}},"required":["name","parameters"]},"type":{"type":"string","description":"Must be 'function'"}},"required":["type","function"]},"toolId":{"type":"string","description":"The ID of the custom tool (required for edit/delete). Must be the exact toolId from the get_workflow_data custom tool response - do not guess or construct it. DO NOT PROVIDE THE TOOL ID IF THE OPERATION IS 'ADD'."}},"required":["operation"]},
|
||||
requiresConfirmation: true,
|
||||
};
|
||||
|
||||
@@ -384,6 +425,7 @@ export const ManageJob: ToolCatalogEntry = {
|
||||
name: "manage_job",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"args":{"type":"object","description":"Operation-specific arguments. For create: {title, prompt, cron?, time?, timezone?, lifecycle?, successCondition?, maxRuns?}. For get/delete: {jobId}. For update: {jobId, title?, prompt?, cron?, timezone?, status?, lifecycle?, successCondition?, maxRuns?}. For list: no args needed.","properties":{"cron":{"type":"string","description":"Cron expression for recurring jobs"},"jobId":{"type":"string","description":"Job ID (required for get, update, delete)"},"lifecycle":{"type":"string","description":"'persistent' (default) or 'until_complete'. Until_complete jobs stop when complete_job is called."},"maxRuns":{"type":"integer","description":"Max executions before auto-completing. Safety limit."},"prompt":{"type":"string","description":"The prompt to execute when the job fires"},"status":{"type":"string","description":"Job status: active, paused"},"successCondition":{"type":"string","description":"What must happen for the job to be considered complete (until_complete lifecycle)."},"time":{"type":"string","description":"ISO 8601 datetime for one-time jobs or cron start time"},"timezone":{"type":"string","description":"IANA timezone (e.g. America/New_York). Defaults to UTC."},"title":{"type":"string","description":"Short descriptive title for the job (e.g. 'Email Poller')"}}},"operation":{"type":"string","description":"The operation to perform: create, list, get, update, delete","enum":["create","list","get","update","delete"]}},"required":["operation"]},
|
||||
};
|
||||
|
||||
export const ManageMcpTool: ToolCatalogEntry = {
|
||||
@@ -391,6 +433,7 @@ export const ManageMcpTool: ToolCatalogEntry = {
|
||||
name: "manage_mcp_tool",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"config":{"type":"object","description":"Required for add and edit. The MCP server configuration.","properties":{"enabled":{"type":"boolean","description":"Whether the server is enabled (default: true)"},"headers":{"type":"object","description":"Optional HTTP headers to send with requests (key-value pairs)"},"name":{"type":"string","description":"Display name for the MCP server"},"timeout":{"type":"number","description":"Request timeout in milliseconds (default: 30000)"},"transport":{"type":"string","description":"Transport protocol: 'streamable-http' or 'sse'","enum":["streamable-http","sse"],"default":"streamable-http"},"url":{"type":"string","description":"The MCP server endpoint URL (required for add)"}}},"operation":{"type":"string","description":"The operation to perform: 'add', 'edit', 'list', or 'delete'","enum":["add","edit","delete","list"]},"serverId":{"type":"string","description":"Required for edit and delete. The database ID of the MCP server. DO NOT PROVIDE if operation is 'add' or 'list'."}},"required":["operation"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "write",
|
||||
};
|
||||
@@ -400,6 +443,7 @@ export const ManageSkill: ToolCatalogEntry = {
|
||||
name: "manage_skill",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"content":{"type":"string","description":"Markdown instructions for the skill. Required for add, optional for edit."},"description":{"type":"string","description":"Short description of the skill. Required for add, optional for edit."},"name":{"type":"string","description":"Skill name in kebab-case (e.g. 'my-skill'). Required for add, optional for edit."},"operation":{"type":"string","description":"The operation to perform: 'add', 'edit', 'list', or 'delete'","enum":["add","edit","delete","list"]},"skillId":{"type":"string","description":"The ID of the skill (required for edit/delete). Must be the exact ID from the VFS or list. DO NOT PROVIDE if operation is 'add' or 'list'."}},"required":["operation"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "write",
|
||||
};
|
||||
@@ -409,6 +453,7 @@ export const MaterializeFile: ToolCatalogEntry = {
|
||||
name: "materialize_file",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"fileName":{"type":"string","description":"The name of the uploaded file to materialize (e.g. \"report.pdf\")"},"knowledgeBaseId":{"type":"string","description":"ID of an existing knowledge base to add the file to (only used with operation \"knowledge_base\"). If omitted, a new KB is created."},"operation":{"type":"string","description":"What to do with the file. \"save\" promotes it to files/. \"import\" imports a workflow JSON. \"table\" converts CSV/TSV/JSON to a table. \"knowledge_base\" saves and adds to a KB. Defaults to \"save\".","enum":["save","import","table","knowledge_base"],"default":"save"},"tableName":{"type":"string","description":"Custom name for the table (only used with operation \"table\"). Defaults to the file name without extension."}},"required":["fileName"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -417,6 +462,7 @@ export const MoveFolder: ToolCatalogEntry = {
|
||||
name: "move_folder",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"folderId":{"type":"string","description":"The folder ID to move."},"parentId":{"type":"string","description":"Target parent folder ID. Omit or pass empty string to move to workspace root."}},"required":["folderId"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -425,6 +471,7 @@ export const MoveWorkflow: ToolCatalogEntry = {
|
||||
name: "move_workflow",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"folderId":{"type":"string","description":"Target folder ID. Omit or pass empty string to move to workspace root."},"workflowId":{"type":"string","description":"The workflow ID to move."}},"required":["workflowId"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -433,6 +480,7 @@ export const OauthGetAuthLink: ToolCatalogEntry = {
|
||||
name: "oauth_get_auth_link",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"providerName":{"type":"string","description":"The name of the OAuth provider to connect (e.g., 'Slack', 'Gmail', 'Google Calendar', 'GitHub')"}},"required":["providerName"]},
|
||||
};
|
||||
|
||||
export const OauthRequestAccess: ToolCatalogEntry = {
|
||||
@@ -440,6 +488,7 @@ export const OauthRequestAccess: ToolCatalogEntry = {
|
||||
name: "oauth_request_access",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"providerName":{"type":"string","description":"The name of the OAuth provider to connect (e.g., 'Slack', 'Gmail', 'Google Calendar')"}},"required":["providerName"]},
|
||||
requiresConfirmation: true,
|
||||
};
|
||||
|
||||
@@ -448,6 +497,7 @@ export const OpenResource: ToolCatalogEntry = {
|
||||
name: "open_resource",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"id":{"type":"string","description":"The resource ID to open."},"type":{"type":"string","description":"The resource type to open.","enum":["workflow","table","knowledgebase","file"]}},"required":["type","id"]},
|
||||
};
|
||||
|
||||
export const Read: ToolCatalogEntry = {
|
||||
@@ -455,6 +505,7 @@ export const Read: ToolCatalogEntry = {
|
||||
name: "read",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"limit":{"type":"number","description":"Maximum number of lines to read."},"offset":{"type":"number","description":"Line offset to start reading from (0-indexed)."},"outputTable":{"type":"string","description":"Table ID to import the file contents into (CSV/JSON). All existing rows are replaced. Example: \"tbl_abc123\""},"path":{"type":"string","description":"Path to the file to read (e.g. 'workflows/My Workflow/state.json')."}},"required":["path"]},
|
||||
};
|
||||
|
||||
export const Redeploy: ToolCatalogEntry = {
|
||||
@@ -462,6 +513,7 @@ export const Redeploy: ToolCatalogEntry = {
|
||||
name: "redeploy",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"workflowId":{"type":"string","description":"Workflow ID to redeploy (required in workspace context)"}}},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "admin",
|
||||
};
|
||||
@@ -471,6 +523,7 @@ export const RenameWorkflow: ToolCatalogEntry = {
|
||||
name: "rename_workflow",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"name":{"type":"string","description":"The new name for the workflow."},"workflowId":{"type":"string","description":"The workflow ID to rename."}},"required":["workflowId","name"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -479,6 +532,7 @@ export const Research: ToolCatalogEntry = {
|
||||
name: "research",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"topic":{"description":"The topic to research.","type":"string"}},"required":["topic"],"type":"object"},
|
||||
subagentId: "research",
|
||||
internal: true,
|
||||
};
|
||||
@@ -488,6 +542,7 @@ export const Respond: ToolCatalogEntry = {
|
||||
name: "respond",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"additionalProperties":true,"properties":{"output":{"description":"The result — facts, status, VFS paths to persisted data, whatever the caller needs to act on.","type":"string"},"success":{"description":"Whether the task completed successfully","type":"boolean"},"type":{"description":"Optional logical result type override","type":"string"}},"required":["output","success"],"type":"object"},
|
||||
internal: true,
|
||||
hidden: true,
|
||||
};
|
||||
@@ -497,6 +552,7 @@ export const RevertToVersion: ToolCatalogEntry = {
|
||||
name: "revert_to_version",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"version":{"type":"number","description":"The deployment version number to revert to"},"workflowId":{"type":"string","description":"The workflow ID"}},"required":["workflowId","version"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "admin",
|
||||
};
|
||||
@@ -506,6 +562,7 @@ export const Run: ToolCatalogEntry = {
|
||||
name: "run",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"context":{"description":"Pre-gathered context: workflow state, block IDs, input requirements.","type":"string"},"request":{"description":"What to run or what logs to check.","type":"string"}},"required":["request"],"type":"object"},
|
||||
subagentId: "run",
|
||||
internal: true,
|
||||
};
|
||||
@@ -515,6 +572,7 @@ export const RunBlock: ToolCatalogEntry = {
|
||||
name: "run_block",
|
||||
executor: "client",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"blockId":{"type":"string","description":"The block ID to run in isolation."},"executionId":{"type":"string","description":"Optional execution ID to load the snapshot from. Uses latest execution if omitted."},"useDeployedState":{"type":"boolean","description":"When true, runs the deployed version instead of the live draft. Default: false (draft)."},"workflowId":{"type":"string","description":"Optional workflow ID to run. If not provided, uses the current workflow in context."},"workflow_input":{"type":"object","description":"JSON object with key-value mappings where each key is an input field name"}},"required":["blockId"]},
|
||||
clientExecutable: true,
|
||||
requiresConfirmation: true,
|
||||
};
|
||||
@@ -524,6 +582,7 @@ export const RunFromBlock: ToolCatalogEntry = {
|
||||
name: "run_from_block",
|
||||
executor: "client",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"executionId":{"type":"string","description":"Optional execution ID to load the snapshot from. Uses latest execution if omitted."},"startBlockId":{"type":"string","description":"The block ID to start execution from."},"useDeployedState":{"type":"boolean","description":"When true, runs the deployed version instead of the live draft. Default: false (draft)."},"workflowId":{"type":"string","description":"Optional workflow ID to run. If not provided, uses the current workflow in context."},"workflow_input":{"type":"object","description":"JSON object with key-value mappings where each key is an input field name"}},"required":["startBlockId"]},
|
||||
clientExecutable: true,
|
||||
requiresConfirmation: true,
|
||||
};
|
||||
@@ -533,6 +592,7 @@ export const RunWorkflow: ToolCatalogEntry = {
|
||||
name: "run_workflow",
|
||||
executor: "client",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"useDeployedState":{"type":"boolean","description":"When true, runs the deployed version instead of the live draft. Default: false (draft)."},"workflowId":{"type":"string","description":"Optional workflow ID to run. If not provided, uses the current workflow in context."},"workflow_input":{"type":"object","description":"JSON object with key-value mappings where each key is an input field name"}},"required":["workflow_input"]},
|
||||
clientExecutable: true,
|
||||
requiresConfirmation: true,
|
||||
};
|
||||
@@ -542,6 +602,7 @@ export const RunWorkflowUntilBlock: ToolCatalogEntry = {
|
||||
name: "run_workflow_until_block",
|
||||
executor: "client",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"stopAfterBlockId":{"type":"string","description":"The block ID to stop after. Execution halts once this block completes."},"useDeployedState":{"type":"boolean","description":"When true, runs the deployed version instead of the live draft. Default: false (draft)."},"workflowId":{"type":"string","description":"Optional workflow ID to run. If not provided, uses the current workflow in context."},"workflow_input":{"type":"object","description":"JSON object with key-value mappings where each key is an input field name"}},"required":["stopAfterBlockId"]},
|
||||
clientExecutable: true,
|
||||
requiresConfirmation: true,
|
||||
};
|
||||
@@ -551,6 +612,7 @@ export const ScrapePage: ToolCatalogEntry = {
|
||||
name: "scrape_page",
|
||||
executor: "go",
|
||||
mode: "sync",
|
||||
parameters: {"type":"object","properties":{"include_links":{"type":"boolean","description":"Extract all links from the page (default false)"},"url":{"type":"string","description":"The URL to scrape (must include https://)"},"wait_for":{"type":"string","description":"CSS selector to wait for before scraping (for JS-heavy pages)"}},"required":["url"]},
|
||||
};
|
||||
|
||||
export const SearchDocumentation: ToolCatalogEntry = {
|
||||
@@ -558,6 +620,7 @@ export const SearchDocumentation: ToolCatalogEntry = {
|
||||
name: "search_documentation",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"query":{"type":"string","description":"The search query"},"topK":{"type":"number","description":"Number of results (max 10)"}},"required":["query"]},
|
||||
};
|
||||
|
||||
export const SearchLibraryDocs: ToolCatalogEntry = {
|
||||
@@ -565,6 +628,7 @@ export const SearchLibraryDocs: ToolCatalogEntry = {
|
||||
name: "search_library_docs",
|
||||
executor: "go",
|
||||
mode: "sync",
|
||||
parameters: {"type":"object","properties":{"library_name":{"type":"string","description":"Name of the library to search for (e.g., 'nextjs', 'stripe', 'langchain')"},"query":{"type":"string","description":"The question or topic to find documentation for - be specific"},"version":{"type":"string","description":"Specific version (optional, e.g., '14', 'v2')"}},"required":["library_name","query"]},
|
||||
};
|
||||
|
||||
export const SearchOnline: ToolCatalogEntry = {
|
||||
@@ -572,6 +636,7 @@ export const SearchOnline: ToolCatalogEntry = {
|
||||
name: "search_online",
|
||||
executor: "go",
|
||||
mode: "sync",
|
||||
parameters: {"type":"object","properties":{"category":{"type":"string","description":"Filter by category","enum":["news","tweet","github","paper","company","research paper","linkedin profile","pdf","personal site"]},"include_text":{"type":"boolean","description":"Include page text content (default true)"},"num_results":{"type":"number","description":"Number of results (default 10, max 25)"},"query":{"type":"string","description":"Natural language search query"}},"required":["query"]},
|
||||
};
|
||||
|
||||
export const SearchPatterns: ToolCatalogEntry = {
|
||||
@@ -579,6 +644,7 @@ export const SearchPatterns: ToolCatalogEntry = {
|
||||
name: "search_patterns",
|
||||
executor: "go",
|
||||
mode: "sync",
|
||||
parameters: {"type":"object","properties":{"limit":{"type":"integer","description":"Maximum number of unique pattern examples to return (defaults to 3)."},"queries":{"type":"array","description":"Up to 3 descriptive strings explaining the workflow pattern(s) you need. Focus on intent and desired outcomes.","items":{"type":"string","description":"Example: \"how to automate wealthbox meeting notes into follow-up tasks\""}}},"required":["queries"]},
|
||||
};
|
||||
|
||||
export const SetEnvironmentVariables: ToolCatalogEntry = {
|
||||
@@ -586,23 +652,17 @@ export const SetEnvironmentVariables: ToolCatalogEntry = {
|
||||
name: "set_environment_variables",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"variables":{"type":"array","description":"List of env vars to set","items":{"type":"object","properties":{"name":{"type":"string","description":"Variable name"},"value":{"type":"string","description":"Variable value"}},"required":["name","value"]}}},"required":["variables"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
export const SetFileContext: ToolCatalogEntry = {
|
||||
id: "set_file_context",
|
||||
name: "set_file_context",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
hidden: true,
|
||||
};
|
||||
|
||||
export const SetGlobalWorkflowVariables: ToolCatalogEntry = {
|
||||
id: "set_global_workflow_variables",
|
||||
name: "set_global_workflow_variables",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"operations":{"type":"array","description":"List of operations to apply","items":{"type":"object","properties":{"name":{"type":"string"},"operation":{"type":"string","enum":["add","delete","edit"]},"type":{"type":"string","enum":["plain","number","boolean","array","object"]},"value":{"type":"string"}},"required":["operation","name","type","value"]}},"workflowId":{"type":"string","description":"Optional workflow ID. If not provided, uses the current workflow in context."}},"required":["operations"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "write",
|
||||
};
|
||||
@@ -612,6 +672,7 @@ export const Superagent: ToolCatalogEntry = {
|
||||
name: "superagent",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"task":{"description":"A single sentence — the agent has full conversation context. Do NOT pre-read credentials or look up configs. Example: 'send the email we discussed' or 'check my calendar for tomorrow'.","type":"string"}},"required":["task"],"type":"object"},
|
||||
subagentId: "superagent",
|
||||
internal: true,
|
||||
};
|
||||
@@ -621,6 +682,7 @@ export const Table: ToolCatalogEntry = {
|
||||
name: "table",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"request":{"description":"What table action is needed.","type":"string"}},"required":["request"],"type":"object"},
|
||||
subagentId: "table",
|
||||
internal: true,
|
||||
};
|
||||
@@ -630,6 +692,7 @@ export const ToolSearchToolRegex: ToolCatalogEntry = {
|
||||
name: "tool_search_tool_regex",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"case_insensitive":{"description":"Whether the regex should be case-insensitive (default true).","type":"boolean"},"max_results":{"description":"Maximum number of tools to return (optional).","type":"integer"},"pattern":{"description":"Regular expression to match tool names or descriptions.","type":"string"}},"required":["pattern"],"type":"object"},
|
||||
};
|
||||
|
||||
export const UpdateJobHistory: ToolCatalogEntry = {
|
||||
@@ -637,6 +700,7 @@ export const UpdateJobHistory: ToolCatalogEntry = {
|
||||
name: "update_job_history",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"jobId":{"type":"string","description":"The job ID."},"summary":{"type":"string","description":"A concise summary of what was done this run (e.g., 'Sent follow-up emails to 3 leads: Alice, Bob, Carol')."}},"required":["jobId","summary"]},
|
||||
};
|
||||
|
||||
export const UpdateWorkspaceMcpServer: ToolCatalogEntry = {
|
||||
@@ -644,6 +708,7 @@ export const UpdateWorkspaceMcpServer: ToolCatalogEntry = {
|
||||
name: "update_workspace_mcp_server",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"description":{"type":"string","description":"New description for the server"},"isPublic":{"type":"boolean","description":"Whether the server is publicly accessible"},"name":{"type":"string","description":"New name for the server"},"serverId":{"type":"string","description":"Required: the MCP server ID to update"}},"required":["serverId"]},
|
||||
requiresConfirmation: true,
|
||||
requiredPermission: "admin",
|
||||
};
|
||||
@@ -653,6 +718,7 @@ export const UserMemory: ToolCatalogEntry = {
|
||||
name: "user_memory",
|
||||
executor: "go",
|
||||
mode: "sync",
|
||||
parameters: {"type":"object","properties":{"confidence":{"type":"number","description":"Confidence level 0-1 (default 1.0 for explicit, 0.8 for inferred)"},"correct_value":{"type":"string","description":"The correct value to replace the wrong one (for 'correct' operation)"},"key":{"type":"string","description":"Unique key for the memory (e.g., 'preferred_model', 'slack_credential')"},"limit":{"type":"number","description":"Number of results for search (default 10)"},"memory_type":{"type":"string","description":"Type of memory: 'preference', 'entity', 'history', or 'correction'","enum":["preference","entity","history","correction"]},"operation":{"type":"string","description":"Operation: 'add', 'search', 'delete', 'correct', or 'list'","enum":["add","search","delete","correct","list"]},"query":{"type":"string","description":"Search query to find relevant memories"},"source":{"type":"string","description":"Source: 'explicit' (user told you) or 'inferred' (you observed)","enum":["explicit","inferred"]},"value":{"type":"string","description":"Value to remember"}},"required":["operation"]},
|
||||
};
|
||||
|
||||
export const UserTable: ToolCatalogEntry = {
|
||||
@@ -660,6 +726,8 @@ export const UserTable: ToolCatalogEntry = {
|
||||
name: "user_table",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"args":{"type":"object","description":"Arguments for the operation","properties":{"column":{"type":"object","description":"Column definition for add_column: { name, type, unique?, position? }"},"columnName":{"type":"string","description":"Column name (required for rename_column, update_column; use columnNames array for batch delete_column)"},"columnNames":{"type":"array","description":"Array of column names to delete at once (for delete_column). Preferred over columnName when deleting multiple columns."},"data":{"type":"object","description":"Row data as key-value pairs (required for insert_row, update_row)"},"description":{"type":"string","description":"Table description (optional for 'create')"},"fileId":{"type":"string","description":"Canonical workspace file ID for create_from_file/import_file. Discover via read(\"files/{name}/meta.json\") or glob(\"files/by-id/*/meta.json\")."},"filePath":{"type":"string","description":"Legacy workspace file reference for create_from_file/import_file. Prefer fileId."},"filter":{"type":"object","description":"MongoDB-style filter for query_rows, update_rows_by_filter, delete_rows_by_filter"},"limit":{"type":"number","description":"Maximum rows to return or affect (optional, default 100)"},"name":{"type":"string","description":"Table name (required for 'create')"},"newName":{"type":"string","description":"New column name (required for rename_column)"},"newType":{"type":"string","description":"New column type (optional for update_column). Types: string, number, boolean, date, json"},"offset":{"type":"number","description":"Number of rows to skip (optional for query_rows, default 0)"},"outputFormat":{"type":"string","description":"Explicit format override for outputPath. Usually unnecessary — the file extension determines the format automatically. Only use this to force a different format than what the extension implies.","enum":["json","csv","txt","md","html"]},"outputPath":{"type":"string","description":"Pipe query_rows results directly to a NEW workspace file. The format is auto-inferred from the file extension: .csv → CSV, .json → JSON, .md → Markdown, etc. Use .csv for tabular exports. Use a flat path like \"files/export.csv\" — nested paths are not supported."},"rowId":{"type":"string","description":"Row ID (required for get_row, update_row, delete_row)"},"rowIds":{"type":"array","description":"Array of row IDs to delete (for batch_delete_rows)"},"rows":{"type":"array","description":"Array of row data objects (required for batch_insert_rows)"},"schema":{"type":"object","description":"Table schema with columns array (required for 'create'). Each column: { name, type, unique? }"},"sort":{"type":"object","description":"Sort specification as { field: 'asc' | 'desc' } (optional for query_rows)"},"tableId":{"type":"string","description":"Table ID (required for most operations except 'create')"},"unique":{"type":"boolean","description":"Set column unique constraint (optional for update_column)"},"updates":{"type":"array","description":"Array of per-row updates: [{ rowId, data: { col: val } }] (for batch_update_rows)"},"values":{"type":"object","description":"Map of rowId to value for single-column batch update: { \"rowId1\": val1, \"rowId2\": val2 } (for batch_update_rows with columnName)"}}},"operation":{"type":"string","description":"The operation to perform","enum":["create","create_from_file","import_file","get","get_schema","delete","insert_row","batch_insert_rows","get_row","query_rows","update_row","delete_row","update_rows_by_filter","delete_rows_by_filter","batch_update_rows","batch_delete_rows","add_column","rename_column","delete_column","update_column"]}},"required":["operation","args"]},
|
||||
resultSchema: {"type":"object","properties":{"data":{"type":"object","description":"Operation-specific result payload."},"message":{"type":"string","description":"Human-readable outcome summary."},"success":{"type":"boolean","description":"Whether the operation succeeded."}},"required":["success","message"]},
|
||||
requiresConfirmation: true,
|
||||
};
|
||||
|
||||
@@ -668,6 +736,7 @@ export const Workflow: ToolCatalogEntry = {
|
||||
name: "workflow",
|
||||
executor: "subagent",
|
||||
mode: "async",
|
||||
parameters: {"properties":{"request":{"description":"A single sentence — the agent has full conversation context and VFS access. Do NOT look up IDs or pre-read data; the workflow agent does its own research. Example: 'move all the return letter workflows into a folder called Letters'.","type":"string"}},"required":["request"],"type":"object"},
|
||||
subagentId: "workflow",
|
||||
internal: true,
|
||||
};
|
||||
@@ -677,6 +746,8 @@ export const WorkspaceFile: ToolCatalogEntry = {
|
||||
name: "workspace_file",
|
||||
executor: "sim",
|
||||
mode: "async",
|
||||
parameters: {"type":"object","properties":{"operation":{"type":"string","description":"The file operation to perform.","enum":["create","append","update","patch","rename","delete"]},"target":{"type":"object","description":"Explicit file target. Use kind=new_file + fileName for create. Use kind=file_id + fileId for append, update, patch, rename, and delete. Emit target keys in this order: kind, fileId, fileName.","properties":{"kind":{"type":"string","description":"How the file target is identified.","enum":["new_file","file_id"]},"fileId":{"type":"string","description":"Canonical existing workspace file ID. Required when target.kind=file_id."},"fileName":{"type":"string","description":"Plain workspace filename including extension, e.g. \"main.py\" or \"report.docx\". Required when target.kind=new_file."}},"required":["kind"]},"title":{"type":"string","description":"Optional short UI label for create/append chunks, e.g. \"Chapter 1\" or \"Slide 3\"."},"contentType":{"type":"string","description":"Optional MIME type override. Usually omit and let the system infer from the target file extension.","enum":["text/markdown","text/html","text/plain","application/json","text/csv","application/vnd.openxmlformats-officedocument.presentationml.presentation","application/vnd.openxmlformats-officedocument.wordprocessingml.document","application/pdf"]},"edit":{"type":"object","description":"Patch metadata. Use strategy=search_replace for exact text replacement, or strategy=anchored for line-based inserts/replacements/deletions. Emit edit keys in this order: strategy, search, replace, replaceAll, mode, occurrence, before_anchor, after_anchor, anchor, start_anchor, end_anchor, content.","properties":{"strategy":{"type":"string","description":"Patch strategy.","enum":["search_replace","anchored"]},"search":{"type":"string","description":"Exact text to find when strategy=search_replace. Must match exactly once unless replaceAll=true."},"replace":{"type":"string","description":"Replacement text when strategy=search_replace."},"replaceAll":{"type":"boolean","description":"When true and strategy=search_replace, replace every match instead of requiring a unique single match."},"mode":{"type":"string","description":"Anchored edit mode when strategy=anchored.","enum":["replace_between","insert_after","delete_between"]},"occurrence":{"type":"number","description":"1-based occurrence for repeated anchor lines. Optional; defaults to 1."},"before_anchor":{"type":"string","description":"Boundary line kept before inserted replacement content. Required for mode=replace_between."},"after_anchor":{"type":"string","description":"Boundary line kept after inserted replacement content. Required for mode=replace_between."},"anchor":{"type":"string","description":"Anchor line after which new content is inserted. Required for mode=insert_after."},"start_anchor":{"type":"string","description":"First line to delete. Required for mode=delete_between."},"end_anchor":{"type":"string","description":"First line to keep after deletion. Required for mode=delete_between."},"content":{"type":"string","description":"Inserted or replacement content for anchored edits. Not used for delete_between."}}},"newName":{"type":"string","description":"New file name for rename. Must be a plain workspace filename like \"main.py\"."},"content":{"type":"string","description":"File content for create, append, or update. For .pptx/.docx/.pdf this must be JavaScript source code for the corresponding generator runtime."}},"required":["operation","target"]},
|
||||
resultSchema: {"type":"object","properties":{"data":{"type":"object","description":"Optional operation metadata such as file id, file name, size, and content type."},"message":{"type":"string","description":"Human-readable summary of the outcome."},"success":{"type":"boolean","description":"Whether the file operation succeeded."}},"required":["success","message"]},
|
||||
requiredPermission: "write",
|
||||
};
|
||||
|
||||
@@ -687,7 +758,6 @@ export const TOOL_CATALOG: Record<string, ToolCatalogEntry> = {
|
||||
[CompleteJob.id]: CompleteJob,
|
||||
[ContextWrite.id]: ContextWrite,
|
||||
[CrawlWebsite.id]: CrawlWebsite,
|
||||
[CreateFile.id]: CreateFile,
|
||||
[CreateFolder.id]: CreateFolder,
|
||||
[CreateJob.id]: CreateJob,
|
||||
[CreateWorkflow.id]: CreateWorkflow,
|
||||
@@ -753,7 +823,6 @@ export const TOOL_CATALOG: Record<string, ToolCatalogEntry> = {
|
||||
[SearchOnline.id]: SearchOnline,
|
||||
[SearchPatterns.id]: SearchPatterns,
|
||||
[SetEnvironmentVariables.id]: SetEnvironmentVariables,
|
||||
[SetFileContext.id]: SetFileContext,
|
||||
[SetGlobalWorkflowVariables.id]: SetGlobalWorkflowVariables,
|
||||
[Superagent.id]: Superagent,
|
||||
[Table.id]: Table,
|
||||
|
||||
2605
apps/sim/lib/copilot/generated/tool-schemas-v1.ts
Normal file
2605
apps/sim/lib/copilot/generated/tool-schemas-v1.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -22,6 +22,92 @@ import type {
|
||||
|
||||
const logger = createLogger('CopilotGoStream')
|
||||
|
||||
type FilePreviewServerState = {
|
||||
raw: string
|
||||
started: boolean
|
||||
operation?: string
|
||||
targetKind?: string
|
||||
fileId?: string
|
||||
fileName?: string
|
||||
title?: string
|
||||
editMetaKey?: string
|
||||
targetKey?: string
|
||||
emittedContentLength: number
|
||||
}
|
||||
|
||||
function extractJsonString(raw: string, key: string): string | undefined {
|
||||
const pattern = new RegExp(`"${key}"\\s*:\\s*"`)
|
||||
const m = pattern.exec(raw)
|
||||
if (!m) return undefined
|
||||
const start = m.index + m[0].length
|
||||
let end = -1
|
||||
for (let i = start; i < raw.length; i++) {
|
||||
if (raw[i] === '\\') {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if (raw[i] === '"') {
|
||||
end = i
|
||||
break
|
||||
}
|
||||
}
|
||||
if (end === -1) return undefined
|
||||
return raw
|
||||
.slice(start, end)
|
||||
.replace(/\\n/g, '\n')
|
||||
.replace(/\\t/g, '\t')
|
||||
.replace(/\\r/g, '\r')
|
||||
.replace(/\\"/g, '"')
|
||||
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex) => String.fromCharCode(Number.parseInt(hex, 16)))
|
||||
.replace(/\\\\/g, '\\')
|
||||
}
|
||||
|
||||
function extractJsonBoolean(raw: string, key: string): boolean | undefined {
|
||||
const match = raw.match(new RegExp(`"${key}"\\s*:\\s*(true|false)`))
|
||||
if (!match) return undefined
|
||||
return match[1] === 'true'
|
||||
}
|
||||
|
||||
function extractJsonNumber(raw: string, key: string): number | undefined {
|
||||
const match = raw.match(new RegExp(`"${key}"\\s*:\\s*(\\d+)`))
|
||||
if (!match) return undefined
|
||||
return Number.parseInt(match[1], 10)
|
||||
}
|
||||
|
||||
function extractStreamedContent(raw: string, preferredKey: 'content' | 'replace'): string {
|
||||
const marker = `"${preferredKey}":`
|
||||
const idx = raw.indexOf(marker)
|
||||
if (idx === -1) return ''
|
||||
const rest = raw.slice(idx + marker.length).trimStart()
|
||||
if (!rest.startsWith('"')) return rest
|
||||
let end = -1
|
||||
for (let i = 1; i < rest.length; i++) {
|
||||
if (rest[i] === '\\') {
|
||||
i++
|
||||
continue
|
||||
}
|
||||
if (rest[i] === '"') {
|
||||
end = i
|
||||
break
|
||||
}
|
||||
}
|
||||
const inner = end === -1 ? rest.slice(1) : rest.slice(1, end)
|
||||
return inner
|
||||
.replace(/\\n/g, '\n')
|
||||
.replace(/\\t/g, '\t')
|
||||
.replace(/\\r/g, '\r')
|
||||
.replace(/\\"/g, '"')
|
||||
.replace(/\\u([0-9a-fA-F]{4})/g, (_, hex) => String.fromCharCode(Number.parseInt(hex, 16)))
|
||||
.replace(/\\\\/g, '\\')
|
||||
}
|
||||
|
||||
function buildPreviewContent(raw: string, strategy?: string): string {
|
||||
if (strategy === 'search_replace') {
|
||||
return extractStreamedContent(raw, 'replace')
|
||||
}
|
||||
return extractStreamedContent(raw, 'content')
|
||||
}
|
||||
|
||||
export class CopilotBackendError extends Error {
|
||||
status?: number
|
||||
body?: string
|
||||
@@ -74,6 +160,7 @@ export async function runStreamLoop(
|
||||
options: StreamLoopOptions
|
||||
): Promise<void> {
|
||||
const { timeout = ORCHESTRATION_TIMEOUT_MS, abortSignal } = options
|
||||
const filePreviewState = new Map<string, FilePreviewServerState>()
|
||||
|
||||
const fetchSpan = context.trace.startSpan(
|
||||
`HTTP Request → ${new URL(fetchUrl).pathname}`,
|
||||
@@ -136,6 +223,144 @@ export async function runStreamLoop(
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
streamEvent.type === MothershipStreamV1EventType.tool &&
|
||||
streamEvent.payload.phase === 'args_delta' &&
|
||||
streamEvent.payload.toolName === 'workspace_file' &&
|
||||
typeof streamEvent.payload.toolCallId === 'string' &&
|
||||
typeof streamEvent.payload.argumentsDelta === 'string'
|
||||
) {
|
||||
const toolCallId = streamEvent.payload.toolCallId as string
|
||||
const delta = streamEvent.payload.argumentsDelta as string
|
||||
const state = filePreviewState.get(toolCallId) ?? {
|
||||
raw: '',
|
||||
started: false,
|
||||
emittedContentLength: 0,
|
||||
}
|
||||
state.raw += delta
|
||||
|
||||
if (!state.started) {
|
||||
state.started = true
|
||||
await options.onEvent?.({
|
||||
type: MothershipStreamV1EventType.tool,
|
||||
payload: {
|
||||
toolCallId,
|
||||
toolName: 'workspace_file',
|
||||
previewPhase: 'file_preview_start',
|
||||
},
|
||||
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
|
||||
})
|
||||
}
|
||||
|
||||
const operation = extractJsonString(state.raw, 'operation')
|
||||
const targetKind = extractJsonString(state.raw, 'kind')
|
||||
const fileId = extractJsonString(state.raw, 'fileId')
|
||||
const fileName = extractJsonString(state.raw, 'fileName')
|
||||
const title = extractJsonString(state.raw, 'title')
|
||||
if (operation) state.operation = operation
|
||||
if (targetKind) state.targetKind = targetKind
|
||||
if (fileId) state.fileId = fileId
|
||||
if (fileName) state.fileName = fileName
|
||||
if (title) state.title = title
|
||||
|
||||
const targetKey = JSON.stringify({
|
||||
operation: state.operation,
|
||||
targetKind: state.targetKind,
|
||||
fileId: state.fileId,
|
||||
fileName: state.fileName,
|
||||
title: state.title,
|
||||
})
|
||||
if (
|
||||
state.targetKind &&
|
||||
(state.targetKind === 'new_file' ? !!state.fileName : !!state.fileId) &&
|
||||
state.targetKey !== targetKey
|
||||
) {
|
||||
state.targetKey = targetKey
|
||||
await options.onEvent?.({
|
||||
type: MothershipStreamV1EventType.tool,
|
||||
payload: {
|
||||
toolCallId,
|
||||
toolName: 'workspace_file',
|
||||
previewPhase: 'file_preview_target',
|
||||
operation: state.operation,
|
||||
target: {
|
||||
kind: state.targetKind,
|
||||
...(state.fileId ? { fileId: state.fileId } : {}),
|
||||
...(state.fileName ? { fileName: state.fileName } : {}),
|
||||
},
|
||||
...(state.title ? { title: state.title } : {}),
|
||||
},
|
||||
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
|
||||
})
|
||||
}
|
||||
|
||||
const strategy = extractJsonString(state.raw, 'strategy')
|
||||
const editMetaPayload = strategy
|
||||
? {
|
||||
strategy,
|
||||
...(extractJsonString(state.raw, 'mode')
|
||||
? { mode: extractJsonString(state.raw, 'mode') }
|
||||
: {}),
|
||||
...(extractJsonNumber(state.raw, 'occurrence') !== undefined
|
||||
? { occurrence: extractJsonNumber(state.raw, 'occurrence') }
|
||||
: {}),
|
||||
...(extractJsonString(state.raw, 'search')
|
||||
? { search: extractJsonString(state.raw, 'search') }
|
||||
: {}),
|
||||
...(extractJsonBoolean(state.raw, 'replaceAll') !== undefined
|
||||
? { replaceAll: extractJsonBoolean(state.raw, 'replaceAll') }
|
||||
: {}),
|
||||
...(extractJsonString(state.raw, 'before_anchor')
|
||||
? { before_anchor: extractJsonString(state.raw, 'before_anchor') }
|
||||
: {}),
|
||||
...(extractJsonString(state.raw, 'after_anchor')
|
||||
? { after_anchor: extractJsonString(state.raw, 'after_anchor') }
|
||||
: {}),
|
||||
...(extractJsonString(state.raw, 'anchor')
|
||||
? { anchor: extractJsonString(state.raw, 'anchor') }
|
||||
: {}),
|
||||
...(extractJsonString(state.raw, 'start_anchor')
|
||||
? { start_anchor: extractJsonString(state.raw, 'start_anchor') }
|
||||
: {}),
|
||||
...(extractJsonString(state.raw, 'end_anchor')
|
||||
? { end_anchor: extractJsonString(state.raw, 'end_anchor') }
|
||||
: {}),
|
||||
}
|
||||
: undefined
|
||||
const editMetaKey = editMetaPayload ? JSON.stringify(editMetaPayload) : undefined
|
||||
if (editMetaPayload && state.editMetaKey !== editMetaKey) {
|
||||
state.editMetaKey = editMetaKey
|
||||
await options.onEvent?.({
|
||||
type: MothershipStreamV1EventType.tool,
|
||||
payload: {
|
||||
toolCallId,
|
||||
toolName: 'workspace_file',
|
||||
previewPhase: 'file_preview_edit_meta',
|
||||
edit: editMetaPayload,
|
||||
},
|
||||
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
|
||||
})
|
||||
}
|
||||
|
||||
const streamedContent = buildPreviewContent(state.raw, strategy)
|
||||
if (streamedContent.length > state.emittedContentLength) {
|
||||
const contentDelta = streamedContent.slice(state.emittedContentLength)
|
||||
state.emittedContentLength = streamedContent.length
|
||||
await options.onEvent?.({
|
||||
type: MothershipStreamV1EventType.tool,
|
||||
payload: {
|
||||
toolCallId,
|
||||
toolName: 'workspace_file',
|
||||
previewPhase: 'file_preview_content_delta',
|
||||
delta: contentDelta,
|
||||
},
|
||||
...(streamEvent.scope ? { scope: streamEvent.scope } : {}),
|
||||
})
|
||||
}
|
||||
|
||||
filePreviewState.set(toolCallId, state)
|
||||
}
|
||||
|
||||
try {
|
||||
await options.onEvent?.(streamEvent)
|
||||
} catch (error) {
|
||||
|
||||
@@ -42,6 +42,10 @@ function getOperation(params: Record<string, unknown> | undefined): string | und
|
||||
return (args.operation ?? params?.operation) as string | undefined
|
||||
}
|
||||
|
||||
function getWorkspaceFileTarget(params: Record<string, unknown> | undefined): Record<string, unknown> {
|
||||
return asRecord(params?.target)
|
||||
}
|
||||
|
||||
const READ_ONLY_TABLE_OPS = new Set(['get', 'get_schema', 'get_row', 'query_rows'])
|
||||
const READ_ONLY_KB_OPS = new Set(['get', 'query', 'list_tags', 'get_tag_usage'])
|
||||
const READ_ONLY_KNOWLEDGE_ACTIONS = new Set(['listed', 'queried'])
|
||||
@@ -250,7 +254,11 @@ export function extractDeletedResourcesFromToolResult(
|
||||
|
||||
case WorkspaceFile.id: {
|
||||
if (operation !== 'delete') return []
|
||||
const fileId = (data.id as string) ?? (args.fileId as string)
|
||||
const target = getWorkspaceFileTarget(params)
|
||||
const fileId =
|
||||
(data.id as string) ??
|
||||
(target.fileId as string) ??
|
||||
(args.fileId as string)
|
||||
if (fileId) {
|
||||
return [{ type: resourceType, id: fileId, title: (data.name as string) || 'File' }]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ import { createLogger } from '@sim/logger'
|
||||
import {
|
||||
CheckDeploymentStatus,
|
||||
CompleteJob,
|
||||
CreateFile,
|
||||
CreateFolder,
|
||||
CreateJob,
|
||||
CreateWorkflow,
|
||||
@@ -45,7 +44,6 @@ import {
|
||||
RunFromBlock,
|
||||
RunWorkflow,
|
||||
RunWorkflowUntilBlock,
|
||||
SetFileContext,
|
||||
SetGlobalWorkflowVariables,
|
||||
UpdateJobHistory,
|
||||
UpdateWorkspaceMcpServer,
|
||||
@@ -67,8 +65,6 @@ import {
|
||||
executeRevertToVersion,
|
||||
executeUpdateWorkspaceMcpServer,
|
||||
} from '../tools/handlers/deployment/manage'
|
||||
import { executeCreateFile } from '../tools/handlers/files/create-file'
|
||||
import { executeSetFileContext } from '../tools/handlers/files/set-file-context'
|
||||
import { executeFunctionExecute } from '../tools/handlers/function-execute'
|
||||
import {
|
||||
executeCompleteJob,
|
||||
@@ -183,8 +179,6 @@ function buildHandlerMap(): Record<string, ToolHandler> {
|
||||
[GetPlatformActions.id]: h(executeGetPlatformActions),
|
||||
[MaterializeFile.id]: h(executeMaterializeFile),
|
||||
[FunctionExecute.id]: h(executeFunctionExecute),
|
||||
[CreateFile.id]: h(executeCreateFile),
|
||||
[SetFileContext.id]: h(executeSetFileContext),
|
||||
|
||||
...buildServerToolHandlers(),
|
||||
}
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ToolExecutionContext, ToolExecutionResult } from '@/lib/copilot/tool-executor/types'
|
||||
import {
|
||||
getWorkspaceFileByName,
|
||||
uploadWorkspaceFile,
|
||||
} from '@/lib/uploads/contexts/workspace/workspace-file-manager'
|
||||
import { getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
const logger = createLogger('CreateFile')
|
||||
|
||||
interface CreateFileParams {
|
||||
fileName: string
|
||||
}
|
||||
|
||||
export async function executeCreateFile(
|
||||
params: CreateFileParams,
|
||||
context: ToolExecutionContext
|
||||
): Promise<ToolExecutionResult> {
|
||||
const { fileName } = params
|
||||
const workspaceId = context.workspaceId
|
||||
|
||||
if (!fileName) {
|
||||
return { success: false, error: 'fileName is required' }
|
||||
}
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'workspaceId is required' }
|
||||
}
|
||||
|
||||
try {
|
||||
const existing = await getWorkspaceFileByName(workspaceId, fileName)
|
||||
if (existing) {
|
||||
logger.warn('Create file rejected because file already exists', {
|
||||
fileId: existing.id,
|
||||
fileName,
|
||||
workspaceId,
|
||||
})
|
||||
return {
|
||||
success: false,
|
||||
error: `File "${existing.name}" already exists. Use set_file_context with fileId "${existing.id}" to append to it, or choose a new fileName.`,
|
||||
}
|
||||
}
|
||||
|
||||
const emptyBuffer = Buffer.from('', 'utf-8')
|
||||
const extension = fileName.includes('.') ? fileName.split('.').pop() || '' : ''
|
||||
const mimeType = getMimeTypeFromExtension(extension)
|
||||
const record = await uploadWorkspaceFile(
|
||||
workspaceId,
|
||||
context.userId,
|
||||
emptyBuffer,
|
||||
fileName,
|
||||
mimeType
|
||||
)
|
||||
|
||||
logger.info('File created', { fileId: record.id, fileName: record.name, workspaceId })
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
fileId: record.id,
|
||||
fileName: record.name,
|
||||
contentType: record.type,
|
||||
size: 0,
|
||||
message: `File "${record.name}" created. File context is now set — subsequent workspace_file.write calls will automatically target this file.`,
|
||||
},
|
||||
resources: [
|
||||
{
|
||||
type: 'file',
|
||||
id: record.id,
|
||||
title: record.name,
|
||||
},
|
||||
],
|
||||
}
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err)
|
||||
logger.error('Failed to create file', { fileName, error: msg })
|
||||
return { success: false, error: `Failed to create file: ${msg}` }
|
||||
}
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { ToolExecutionContext, ToolExecutionResult } from '@/lib/copilot/tool-executor/types'
|
||||
import { getWorkspaceFile } from '@/lib/uploads/contexts/workspace/workspace-file-manager'
|
||||
|
||||
const logger = createLogger('SetFileContext')
|
||||
|
||||
interface SetFileContextParams {
|
||||
fileId: string
|
||||
}
|
||||
|
||||
export async function executeSetFileContext(
|
||||
params: SetFileContextParams,
|
||||
context: ToolExecutionContext
|
||||
): Promise<ToolExecutionResult> {
|
||||
const { fileId } = params
|
||||
const workspaceId = context.workspaceId
|
||||
|
||||
if (!fileId) {
|
||||
return { success: false, error: 'fileId is required' }
|
||||
}
|
||||
if (!workspaceId) {
|
||||
return { success: false, error: 'workspaceId is required' }
|
||||
}
|
||||
|
||||
try {
|
||||
const file = await getWorkspaceFile(workspaceId, fileId)
|
||||
if (!file) {
|
||||
return { success: false, error: `File not found: ${fileId}` }
|
||||
}
|
||||
|
||||
logger.info('File context set', { fileId, fileName: file.name, workspaceId })
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
fileId: file.id,
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
size: file.size,
|
||||
message: `File context switched to "${file.name}". Subsequent workspace_file.write calls will now target this file.`,
|
||||
},
|
||||
}
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err)
|
||||
logger.error('Failed to validate file context', { fileId, error: msg })
|
||||
return { success: false, error: `Failed to validate file: ${msg}` }
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,9 @@
|
||||
import { existsSync, readFileSync } from 'fs'
|
||||
import { join } from 'path'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { z } from 'zod'
|
||||
import { getCopilotToolDescription } from '@/lib/copilot/tools/descriptions'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { GetBlocksMetadataInput, GetBlocksMetadataResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { getAllowedIntegrationsFromEnv, isHosted } from '@/lib/core/config/feature-flags'
|
||||
import { getServiceAccountProviderForProviderId } from '@/lib/oauth/utils'
|
||||
import { registry as blockRegistry } from '@/blocks/registry'
|
||||
@@ -100,17 +100,20 @@ export interface CopilotBlockMetadata {
|
||||
yamlDocumentation?: string
|
||||
}
|
||||
|
||||
const GetBlocksMetadataInputSchema = z.object({ blockIds: z.array(z.string()).min(1) })
|
||||
const GetBlocksMetadataResultSchema = z.object({ metadata: z.record(z.any()) })
|
||||
|
||||
export const getBlocksMetadataServerTool: BaseServerTool<
|
||||
ReturnType<typeof GetBlocksMetadataInput.parse>,
|
||||
ReturnType<typeof GetBlocksMetadataResult.parse>
|
||||
z.infer<typeof GetBlocksMetadataInputSchema>,
|
||||
z.infer<typeof GetBlocksMetadataResultSchema>
|
||||
> = {
|
||||
name: 'get_blocks_metadata',
|
||||
inputSchema: GetBlocksMetadataInput,
|
||||
outputSchema: GetBlocksMetadataResult,
|
||||
inputSchema: GetBlocksMetadataInputSchema,
|
||||
outputSchema: GetBlocksMetadataResultSchema,
|
||||
async execute(
|
||||
{ blockIds }: ReturnType<typeof GetBlocksMetadataInput.parse>,
|
||||
{ blockIds }: z.infer<typeof GetBlocksMetadataInputSchema>,
|
||||
context?: { userId: string }
|
||||
): Promise<ReturnType<typeof GetBlocksMetadataResult.parse>> {
|
||||
): Promise<z.infer<typeof GetBlocksMetadataResultSchema>> {
|
||||
const logger = createLogger('GetBlocksMetadataServerTool')
|
||||
logger.debug('Executing get_blocks_metadata', { count: blockIds?.length })
|
||||
|
||||
@@ -319,7 +322,7 @@ export const getBlocksMetadataServerTool: BaseServerTool<
|
||||
transformedResult[blockId] = transformBlockMetadata(metadata)
|
||||
}
|
||||
|
||||
return GetBlocksMetadataResult.parse({ metadata: transformedResult })
|
||||
return GetBlocksMetadataResultSchema.parse({ metadata: transformedResult })
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ import {
|
||||
type BaseServerTool,
|
||||
type ServerToolContext,
|
||||
} from '@/lib/copilot/tools/server/base-tool'
|
||||
import type { WorkspaceFileArgs, WorkspaceFileResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import {
|
||||
generateDocxFromCode,
|
||||
generatePdfFromCode,
|
||||
@@ -30,6 +29,57 @@ const PPTX_SOURCE_MIME = 'text/x-pptxgenjs'
|
||||
const DOCX_SOURCE_MIME = 'text/x-docxjs'
|
||||
const PDF_SOURCE_MIME = 'text/x-pdflibjs'
|
||||
|
||||
type WorkspaceFileOperation = 'create' | 'append' | 'update' | 'delete' | 'rename' | 'patch'
|
||||
|
||||
type WorkspaceFileTarget =
|
||||
| {
|
||||
kind: 'new_file'
|
||||
fileName: string
|
||||
fileId?: string
|
||||
}
|
||||
| {
|
||||
kind: 'file_id'
|
||||
fileId: string
|
||||
fileName?: string
|
||||
}
|
||||
|
||||
type WorkspaceFileEdit =
|
||||
| {
|
||||
strategy: 'search_replace'
|
||||
search: string
|
||||
replace: string
|
||||
replaceAll?: boolean
|
||||
}
|
||||
| {
|
||||
strategy: 'anchored'
|
||||
mode: 'replace_between' | 'insert_after' | 'delete_between'
|
||||
occurrence?: number
|
||||
before_anchor?: string
|
||||
after_anchor?: string
|
||||
start_anchor?: string
|
||||
end_anchor?: string
|
||||
anchor?: string
|
||||
content?: string
|
||||
}
|
||||
|
||||
type WorkspaceFileArgs = {
|
||||
operation: WorkspaceFileOperation
|
||||
target?: WorkspaceFileTarget
|
||||
title?: string
|
||||
content?: string
|
||||
contentType?: string
|
||||
newName?: string
|
||||
edit?: WorkspaceFileEdit
|
||||
// Legacy nested shape kept temporarily for compatibility during migration.
|
||||
args?: Record<string, unknown>
|
||||
}
|
||||
|
||||
type WorkspaceFileResult = {
|
||||
success: boolean
|
||||
message: string
|
||||
data?: Record<string, unknown>
|
||||
}
|
||||
|
||||
const EXT_TO_MIME: Record<string, string> = {
|
||||
'.txt': 'text/plain',
|
||||
'.md': 'text/markdown',
|
||||
@@ -56,6 +106,136 @@ function validateFlatWorkspaceFileName(fileName: string): string | null {
|
||||
return null
|
||||
}
|
||||
|
||||
function getDocumentFormatInfo(fileName: string): {
|
||||
isDoc: boolean
|
||||
formatName?: 'PPTX' | 'DOCX' | 'PDF'
|
||||
sourceMime?: string
|
||||
generator?: (code: string, workspaceId: string, signal?: AbortSignal) => Promise<Buffer>
|
||||
} {
|
||||
const lowerName = fileName.toLowerCase()
|
||||
if (lowerName.endsWith('.pptx')) {
|
||||
return {
|
||||
isDoc: true,
|
||||
formatName: 'PPTX',
|
||||
sourceMime: PPTX_SOURCE_MIME,
|
||||
generator: generatePptxFromCode,
|
||||
}
|
||||
}
|
||||
if (lowerName.endsWith('.docx')) {
|
||||
return {
|
||||
isDoc: true,
|
||||
formatName: 'DOCX',
|
||||
sourceMime: DOCX_SOURCE_MIME,
|
||||
generator: generateDocxFromCode,
|
||||
}
|
||||
}
|
||||
if (lowerName.endsWith('.pdf')) {
|
||||
return {
|
||||
isDoc: true,
|
||||
formatName: 'PDF',
|
||||
sourceMime: PDF_SOURCE_MIME,
|
||||
generator: generatePdfFromCode,
|
||||
}
|
||||
}
|
||||
return { isDoc: false }
|
||||
}
|
||||
|
||||
function normalizeWorkspaceFileParams(params: WorkspaceFileArgs): {
|
||||
operation: WorkspaceFileOperation
|
||||
target?: WorkspaceFileTarget
|
||||
title?: string
|
||||
content?: string
|
||||
contentType?: string
|
||||
newName?: string
|
||||
edit?: WorkspaceFileEdit
|
||||
} {
|
||||
if (params.target || params.edit || params.content !== undefined || params.newName !== undefined) {
|
||||
return {
|
||||
operation: params.operation,
|
||||
target: params.target,
|
||||
title: params.title,
|
||||
content: params.content,
|
||||
contentType: params.contentType,
|
||||
newName: params.newName,
|
||||
edit: params.edit,
|
||||
}
|
||||
}
|
||||
|
||||
const legacyArgs = (params.args ?? {}) as Record<string, unknown>
|
||||
const legacyOperation = params.operation
|
||||
const legacyTarget: WorkspaceFileTarget | undefined =
|
||||
legacyOperation === 'create'
|
||||
? ({
|
||||
kind: 'new_file',
|
||||
fileName: String(legacyArgs.fileName ?? ''),
|
||||
} as WorkspaceFileTarget)
|
||||
: legacyArgs.fileId
|
||||
? ({
|
||||
kind: 'file_id',
|
||||
fileId: String(legacyArgs.fileId),
|
||||
...(legacyArgs.fileName ? { fileName: String(legacyArgs.fileName) } : {}),
|
||||
} as WorkspaceFileTarget)
|
||||
: legacyArgs.fileName
|
||||
? ({
|
||||
kind: 'new_file',
|
||||
fileName: String(legacyArgs.fileName),
|
||||
} as WorkspaceFileTarget)
|
||||
: undefined
|
||||
|
||||
const legacyEdit = (() => {
|
||||
const structured = legacyArgs.edit as Record<string, unknown> | undefined
|
||||
if (structured && typeof structured.mode === 'string') {
|
||||
return {
|
||||
strategy: 'anchored',
|
||||
mode: structured.mode as 'replace_between' | 'insert_after' | 'delete_between',
|
||||
occurrence:
|
||||
typeof structured.occurrence === 'number' ? structured.occurrence : undefined,
|
||||
before_anchor:
|
||||
typeof structured.before_anchor === 'string' ? structured.before_anchor : undefined,
|
||||
after_anchor:
|
||||
typeof structured.after_anchor === 'string' ? structured.after_anchor : undefined,
|
||||
start_anchor:
|
||||
typeof structured.start_anchor === 'string' ? structured.start_anchor : undefined,
|
||||
end_anchor:
|
||||
typeof structured.end_anchor === 'string' ? structured.end_anchor : undefined,
|
||||
anchor: typeof structured.anchor === 'string' ? structured.anchor : undefined,
|
||||
content: typeof structured.content === 'string' ? structured.content : undefined,
|
||||
} satisfies WorkspaceFileEdit
|
||||
}
|
||||
|
||||
const edits = legacyArgs.edits as Array<{ search?: unknown; replace?: unknown }> | undefined
|
||||
if (Array.isArray(edits) && edits.length > 0) {
|
||||
const first = edits[0]
|
||||
if (typeof first?.search === 'string' && typeof first?.replace === 'string') {
|
||||
return {
|
||||
strategy: 'search_replace',
|
||||
search: first.search,
|
||||
replace: first.replace,
|
||||
} satisfies WorkspaceFileEdit
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
})()
|
||||
|
||||
const normalizedOperation: WorkspaceFileOperation =
|
||||
(legacyOperation as string) === 'write'
|
||||
? legacyTarget?.kind === 'new_file'
|
||||
? 'create'
|
||||
: 'append'
|
||||
: (legacyOperation as WorkspaceFileOperation)
|
||||
|
||||
return {
|
||||
operation: normalizedOperation,
|
||||
target: legacyTarget,
|
||||
title: typeof legacyArgs.title === 'string' ? legacyArgs.title : undefined,
|
||||
content: typeof legacyArgs.content === 'string' ? legacyArgs.content : undefined,
|
||||
contentType: typeof legacyArgs.contentType === 'string' ? legacyArgs.contentType : undefined,
|
||||
newName: typeof legacyArgs.newName === 'string' ? legacyArgs.newName : undefined,
|
||||
edit: legacyEdit,
|
||||
}
|
||||
}
|
||||
|
||||
export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, WorkspaceFileResult> = {
|
||||
name: WorkspaceFile.id,
|
||||
async execute(
|
||||
@@ -70,9 +250,9 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
|
||||
throw new Error('Authentication required')
|
||||
}
|
||||
|
||||
const { operation, args = {} } = params
|
||||
const workspaceId =
|
||||
context.workspaceId || ((args as Record<string, unknown>).workspaceId as string | undefined)
|
||||
const normalized = normalizeWorkspaceFileParams(params)
|
||||
const { operation } = normalized
|
||||
const workspaceId = context.workspaceId
|
||||
|
||||
if (!workspaceId) {
|
||||
return { success: false, message: 'Workspace ID is required' }
|
||||
@@ -80,118 +260,42 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
|
||||
|
||||
try {
|
||||
switch (operation) {
|
||||
case 'write': {
|
||||
const fileName = (args as Record<string, unknown>).fileName as string | undefined
|
||||
const fileId = (args as Record<string, unknown>).fileId as string | undefined
|
||||
const content = (args as Record<string, unknown>).content as string | undefined
|
||||
const explicitType = (args as Record<string, unknown>).contentType as string | undefined
|
||||
|
||||
if (!fileName) {
|
||||
return { success: false, message: 'fileName is required for write operation' }
|
||||
}
|
||||
if (content === undefined || content === null) {
|
||||
return { success: false, message: 'content is required for write operation' }
|
||||
}
|
||||
const fileNameValidationError = validateFlatWorkspaceFileName(fileName)
|
||||
if (fileNameValidationError) {
|
||||
return { success: false, message: fileNameValidationError }
|
||||
}
|
||||
|
||||
const lowerName = fileName.toLowerCase()
|
||||
const isPptx = lowerName.endsWith('.pptx')
|
||||
const isDocx = lowerName.endsWith('.docx')
|
||||
const isPdf = lowerName.endsWith('.pdf')
|
||||
const isDoc = isPptx || isDocx || isPdf
|
||||
const sourceMime = isPptx
|
||||
? PPTX_SOURCE_MIME
|
||||
: isDocx
|
||||
? DOCX_SOURCE_MIME
|
||||
: isPdf
|
||||
? PDF_SOURCE_MIME
|
||||
: undefined
|
||||
|
||||
const existingFile = fileId
|
||||
? await getWorkspaceFile(workspaceId, fileId)
|
||||
: await getWorkspaceFileByName(workspaceId, fileName)
|
||||
|
||||
if (existingFile) {
|
||||
const currentBuffer = await downloadWsFile(existingFile)
|
||||
const combined = isDoc
|
||||
? `${currentBuffer.toString('utf-8')}\n{\n${content}\n}`
|
||||
: `${currentBuffer.toString('utf-8')}\n${content}`
|
||||
|
||||
if (isDoc) {
|
||||
const formatName = isPptx ? 'PPTX' : isDocx ? 'DOCX' : 'PDF'
|
||||
const generator = isPptx
|
||||
? generatePptxFromCode
|
||||
: isDocx
|
||||
? generateDocxFromCode
|
||||
: generatePdfFromCode
|
||||
try {
|
||||
await generator(combined, workspaceId)
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err)
|
||||
return {
|
||||
success: false,
|
||||
message: `${formatName} generation failed after append: ${msg}. Fix the content and retry.`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const combinedBuffer = Buffer.from(combined, 'utf-8')
|
||||
assertServerToolNotAborted(context)
|
||||
await updateWorkspaceFileContent(
|
||||
workspaceId,
|
||||
existingFile.id,
|
||||
context.userId,
|
||||
combinedBuffer,
|
||||
sourceMime
|
||||
)
|
||||
|
||||
logger.info('Workspace file appended via write', {
|
||||
fileId: existingFile.id,
|
||||
name: existingFile.name,
|
||||
appendedSize: content.length,
|
||||
totalSize: combinedBuffer.length,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
case 'create': {
|
||||
const target = normalized.target
|
||||
if (!target || target.kind != 'new_file') {
|
||||
return {
|
||||
success: true,
|
||||
message: `Content appended to "${existingFile.name}" (${content.length} bytes added, ${combinedBuffer.length} bytes total)`,
|
||||
data: {
|
||||
id: existingFile.id,
|
||||
name: existingFile.name,
|
||||
size: combinedBuffer.length,
|
||||
},
|
||||
success: false,
|
||||
message: 'create requires target.kind=new_file with target.fileName',
|
||||
}
|
||||
}
|
||||
|
||||
let contentType: string
|
||||
if (isDoc) {
|
||||
const formatName = isPptx ? 'PPTX' : isDocx ? 'DOCX' : 'PDF'
|
||||
const generator = isPptx
|
||||
? generatePptxFromCode
|
||||
: isDocx
|
||||
? generateDocxFromCode
|
||||
: generatePdfFromCode
|
||||
const fileName = target.fileName
|
||||
const content = normalized.content ?? ''
|
||||
const explicitType = normalized.contentType
|
||||
const fileNameValidationError = validateFlatWorkspaceFileName(fileName)
|
||||
if (fileNameValidationError) return { success: false, message: fileNameValidationError }
|
||||
|
||||
const existingFile = await getWorkspaceFileByName(workspaceId, fileName)
|
||||
if (existingFile) {
|
||||
return { success: false, message: `File "${fileName}" already exists` }
|
||||
}
|
||||
|
||||
const docInfo = getDocumentFormatInfo(fileName)
|
||||
let contentType = inferContentType(fileName, explicitType)
|
||||
if (docInfo.isDoc) {
|
||||
try {
|
||||
await generator(content, workspaceId)
|
||||
await docInfo.generator!(content, workspaceId)
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err)
|
||||
logger.error(`${formatName} code validation failed`, { error: msg, fileName })
|
||||
return {
|
||||
success: false,
|
||||
message: `${formatName} generation failed: ${msg}. Fix the code and retry.`,
|
||||
message: `${docInfo.formatName} generation failed: ${msg}. Fix the code and retry.`,
|
||||
}
|
||||
}
|
||||
contentType = sourceMime!
|
||||
} else {
|
||||
contentType = inferContentType(fileName, explicitType)
|
||||
contentType = docInfo.sourceMime!
|
||||
}
|
||||
|
||||
const fileBuffer = Buffer.from(content, 'utf-8')
|
||||
|
||||
assertServerToolNotAborted(context)
|
||||
const result = await uploadWorkspaceFile(
|
||||
workspaceId,
|
||||
@@ -201,7 +305,7 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
|
||||
contentType
|
||||
)
|
||||
|
||||
logger.info('Workspace file written via copilot', {
|
||||
logger.info('Workspace file created via copilot', {
|
||||
fileId: result.id,
|
||||
name: fileName,
|
||||
size: fileBuffer.length,
|
||||
@@ -222,66 +326,127 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
|
||||
}
|
||||
}
|
||||
|
||||
case 'update': {
|
||||
const fileId = (args as Record<string, unknown>).fileId as string | undefined
|
||||
const content = (args as Record<string, unknown>).content as string | undefined
|
||||
|
||||
if (!fileId) {
|
||||
return { success: false, message: 'fileId is required for update operation' }
|
||||
case 'append': {
|
||||
const target = normalized.target
|
||||
if (!target || target.kind !== 'file_id') {
|
||||
return {
|
||||
success: false,
|
||||
message: 'append requires target.kind=file_id with target.fileId',
|
||||
}
|
||||
}
|
||||
if (content === undefined || content === null) {
|
||||
return { success: false, message: 'content is required for update operation' }
|
||||
if (normalized.content === undefined || normalized.content === null) {
|
||||
return { success: false, message: 'content is required for append operation' }
|
||||
}
|
||||
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||
if (!fileRecord) {
|
||||
return { success: false, message: `File with ID "${fileId}" not found` }
|
||||
const existingFile = await getWorkspaceFile(workspaceId, target.fileId)
|
||||
if (!existingFile) {
|
||||
return { success: false, message: `File with ID "${target.fileId}" not found` }
|
||||
}
|
||||
if (target.fileName && target.fileName != existingFile.name) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Target mismatch: fileId "${target.fileId}" is "${existingFile.name}", not "${target.fileName}"`,
|
||||
}
|
||||
}
|
||||
|
||||
const updateLowerName = fileRecord.name?.toLowerCase() ?? ''
|
||||
const isPptxUpdate = updateLowerName.endsWith('.pptx')
|
||||
const isDocxUpdate = updateLowerName.endsWith('.docx')
|
||||
const isPdfUpdate = updateLowerName.endsWith('.pdf')
|
||||
const isDocUpdate = isPptxUpdate || isDocxUpdate || isPdfUpdate
|
||||
const docInfo = getDocumentFormatInfo(existingFile.name)
|
||||
const currentBuffer = await downloadWsFile(existingFile)
|
||||
const combined = docInfo.isDoc
|
||||
? `${currentBuffer.toString('utf-8')}\n{\n${normalized.content}\n}`
|
||||
: `${currentBuffer.toString('utf-8')}\n${normalized.content}`
|
||||
|
||||
if (isDocUpdate) {
|
||||
const formatName = isPptxUpdate ? 'PPTX' : isDocxUpdate ? 'DOCX' : 'PDF'
|
||||
const generator = isPptxUpdate
|
||||
? generatePptxFromCode
|
||||
: isDocxUpdate
|
||||
? generateDocxFromCode
|
||||
: generatePdfFromCode
|
||||
if (docInfo.isDoc) {
|
||||
try {
|
||||
await generator(content, workspaceId)
|
||||
await docInfo.generator!(combined, workspaceId)
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err)
|
||||
return {
|
||||
success: false,
|
||||
message: `${formatName} generation failed: ${msg}. Fix the code and retry.`,
|
||||
message: `${docInfo.formatName} generation failed after append: ${msg}. Fix the content and retry.`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const updateSourceMime = isPptxUpdate
|
||||
? PPTX_SOURCE_MIME
|
||||
: isDocxUpdate
|
||||
? DOCX_SOURCE_MIME
|
||||
: isPdfUpdate
|
||||
? PDF_SOURCE_MIME
|
||||
: undefined
|
||||
const fileBuffer = Buffer.from(content, 'utf-8')
|
||||
|
||||
const combinedBuffer = Buffer.from(combined, 'utf-8')
|
||||
assertServerToolNotAborted(context)
|
||||
const appendMime = docInfo.sourceMime || inferContentType(existingFile.name, normalized.contentType)
|
||||
await updateWorkspaceFileContent(
|
||||
workspaceId,
|
||||
fileId,
|
||||
existingFile.id,
|
||||
context.userId,
|
||||
combinedBuffer,
|
||||
appendMime
|
||||
)
|
||||
|
||||
logger.info('Workspace file appended via copilot', {
|
||||
fileId: existingFile.id,
|
||||
name: existingFile.name,
|
||||
appendedSize: normalized.content.length,
|
||||
totalSize: combinedBuffer.length,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Content appended to "${existingFile.name}" (${normalized.content.length} bytes added, ${combinedBuffer.length} bytes total)`,
|
||||
data: {
|
||||
id: existingFile.id,
|
||||
name: existingFile.name,
|
||||
size: combinedBuffer.length,
|
||||
contentType: appendMime,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
case 'update': {
|
||||
const target = normalized.target
|
||||
if (!target || target.kind !== 'file_id') {
|
||||
return {
|
||||
success: false,
|
||||
message: 'update requires target.kind=file_id with target.fileId',
|
||||
}
|
||||
}
|
||||
if (normalized.content === undefined || normalized.content === null) {
|
||||
return { success: false, message: 'content is required for update operation' }
|
||||
}
|
||||
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, target.fileId)
|
||||
if (!fileRecord) {
|
||||
return { success: false, message: `File with ID "${target.fileId}" not found` }
|
||||
}
|
||||
if (target.fileName && target.fileName != fileRecord.name) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Target mismatch: fileId "${target.fileId}" is "${fileRecord.name}", not "${target.fileName}"`,
|
||||
}
|
||||
}
|
||||
|
||||
const docInfo = getDocumentFormatInfo(fileRecord.name)
|
||||
if (docInfo.isDoc) {
|
||||
try {
|
||||
await docInfo.generator!(normalized.content, workspaceId)
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err)
|
||||
return {
|
||||
success: false,
|
||||
message: `${docInfo.formatName} generation failed: ${msg}. Fix the code and retry.`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const fileBuffer = Buffer.from(normalized.content, 'utf-8')
|
||||
assertServerToolNotAborted(context)
|
||||
const updateMime = docInfo.sourceMime || inferContentType(fileRecord.name, normalized.contentType)
|
||||
await updateWorkspaceFileContent(
|
||||
workspaceId,
|
||||
target.fileId,
|
||||
context.userId,
|
||||
fileBuffer,
|
||||
updateSourceMime
|
||||
updateMime
|
||||
)
|
||||
|
||||
logger.info('Workspace file updated via copilot', {
|
||||
fileId,
|
||||
fileId: target.fileId,
|
||||
name: fileRecord.name,
|
||||
size: fileBuffer.length,
|
||||
userId: context.userId,
|
||||
@@ -291,67 +456,70 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
|
||||
success: true,
|
||||
message: `File "${fileRecord.name}" updated successfully (${fileBuffer.length} bytes)`,
|
||||
data: {
|
||||
id: fileId,
|
||||
id: target.fileId,
|
||||
name: fileRecord.name,
|
||||
size: fileBuffer.length,
|
||||
contentType: updateMime,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
case 'rename': {
|
||||
const fileId = (args as Record<string, unknown>).fileId as string | undefined
|
||||
const newName = (args as Record<string, unknown>).newName as string | undefined
|
||||
|
||||
if (!fileId) {
|
||||
return { success: false, message: 'fileId is required for rename operation' }
|
||||
const target = normalized.target
|
||||
if (!target || target.kind !== 'file_id') {
|
||||
return {
|
||||
success: false,
|
||||
message: 'rename requires target.kind=file_id with target.fileId',
|
||||
}
|
||||
}
|
||||
if (!newName) {
|
||||
if (!normalized.newName) {
|
||||
return { success: false, message: 'newName is required for rename operation' }
|
||||
}
|
||||
const fileNameValidationError = validateFlatWorkspaceFileName(newName)
|
||||
if (fileNameValidationError) {
|
||||
return { success: false, message: fileNameValidationError }
|
||||
}
|
||||
const fileNameValidationError = validateFlatWorkspaceFileName(normalized.newName)
|
||||
if (fileNameValidationError) return { success: false, message: fileNameValidationError }
|
||||
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, target.fileId)
|
||||
if (!fileRecord) {
|
||||
return { success: false, message: `File with ID "${fileId}" not found` }
|
||||
return { success: false, message: `File with ID "${target.fileId}" not found` }
|
||||
}
|
||||
|
||||
const oldName = fileRecord.name
|
||||
assertServerToolNotAborted(context)
|
||||
await renameWorkspaceFile(workspaceId, fileId, newName)
|
||||
await renameWorkspaceFile(workspaceId, target.fileId, normalized.newName)
|
||||
|
||||
logger.info('Workspace file renamed via copilot', {
|
||||
fileId,
|
||||
fileId: target.fileId,
|
||||
oldName,
|
||||
newName,
|
||||
newName: normalized.newName,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `File renamed from "${oldName}" to "${newName}"`,
|
||||
data: { id: fileId, name: newName },
|
||||
message: `File renamed from "${oldName}" to "${normalized.newName}"`,
|
||||
data: { id: target.fileId, name: normalized.newName },
|
||||
}
|
||||
}
|
||||
|
||||
case 'delete': {
|
||||
const fileId = (args as Record<string, unknown>).fileId as string | undefined
|
||||
if (!fileId) {
|
||||
return { success: false, message: 'fileId is required for delete operation' }
|
||||
const target = normalized.target
|
||||
if (!target || target.kind !== 'file_id') {
|
||||
return {
|
||||
success: false,
|
||||
message: 'delete requires target.kind=file_id with target.fileId',
|
||||
}
|
||||
}
|
||||
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, target.fileId)
|
||||
if (!fileRecord) {
|
||||
return { success: false, message: `File with ID "${fileId}" not found` }
|
||||
return { success: false, message: `File with ID "${target.fileId}" not found` }
|
||||
}
|
||||
|
||||
assertServerToolNotAborted(context)
|
||||
await deleteWorkspaceFile(workspaceId, fileId)
|
||||
await deleteWorkspaceFile(workspaceId, target.fileId)
|
||||
|
||||
logger.info('Workspace file deleted via copilot', {
|
||||
fileId,
|
||||
fileId: target.fileId,
|
||||
name: fileRecord.name,
|
||||
userId: context.userId,
|
||||
})
|
||||
@@ -359,44 +527,33 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
|
||||
return {
|
||||
success: true,
|
||||
message: `File "${fileRecord.name}" deleted successfully`,
|
||||
data: { id: fileId, name: fileRecord.name },
|
||||
data: { id: target.fileId, name: fileRecord.name },
|
||||
}
|
||||
}
|
||||
|
||||
case 'patch': {
|
||||
const fileId = (args as Record<string, unknown>).fileId as string | undefined
|
||||
const edit = (args as Record<string, unknown>).edit as
|
||||
| {
|
||||
mode: string
|
||||
before_anchor?: string
|
||||
after_anchor?: string
|
||||
start_anchor?: string
|
||||
end_anchor?: string
|
||||
anchor?: string
|
||||
content?: string
|
||||
occurrence?: number
|
||||
}
|
||||
| undefined
|
||||
const legacyEdits = (args as Record<string, unknown>).edits as
|
||||
| { search: string; replace: string }[]
|
||||
| undefined
|
||||
|
||||
if (!fileId) {
|
||||
return { success: false, message: 'fileId is required for patch operation' }
|
||||
const target = normalized.target
|
||||
if (!target || target.kind !== 'file_id') {
|
||||
return {
|
||||
success: false,
|
||||
message: 'patch requires target.kind=file_id with target.fileId',
|
||||
}
|
||||
}
|
||||
if (!normalized.edit) {
|
||||
return { success: false, message: 'edit is required for patch operation' }
|
||||
}
|
||||
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, target.fileId)
|
||||
if (!fileRecord) {
|
||||
return { success: false, message: `File with ID "${fileId}" not found` }
|
||||
return { success: false, message: `File with ID "${target.fileId}" not found` }
|
||||
}
|
||||
|
||||
const currentBuffer = await downloadWsFile(fileRecord)
|
||||
let content = currentBuffer.toString('utf-8')
|
||||
|
||||
if (edit && typeof edit.mode === 'string') {
|
||||
if (normalized.edit.strategy === 'anchored') {
|
||||
const lines = content.split('\n')
|
||||
|
||||
const defaultOccurrence = edit.occurrence ?? 1
|
||||
const defaultOccurrence = normalized.edit.occurrence ?? 1
|
||||
|
||||
const findAnchorLine = (
|
||||
anchor: string,
|
||||
@@ -423,16 +580,16 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
|
||||
}
|
||||
}
|
||||
|
||||
if (edit.mode === 'replace_between') {
|
||||
if (!edit.before_anchor || !edit.after_anchor) {
|
||||
if (normalized.edit.mode === 'replace_between') {
|
||||
if (!normalized.edit.before_anchor || !normalized.edit.after_anchor) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'replace_between requires before_anchor and after_anchor',
|
||||
}
|
||||
}
|
||||
const before = findAnchorLine(edit.before_anchor)
|
||||
const before = findAnchorLine(normalized.edit.before_anchor)
|
||||
if (before.error) return { success: false, message: `Patch failed: ${before.error}` }
|
||||
const after = findAnchorLine(edit.after_anchor, defaultOccurrence, before.index)
|
||||
const after = findAnchorLine(normalized.edit.after_anchor, defaultOccurrence, before.index)
|
||||
if (after.error) return { success: false, message: `Patch failed: ${after.error}` }
|
||||
if (after.index <= before.index) {
|
||||
return {
|
||||
@@ -440,36 +597,34 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
|
||||
message: 'Patch failed: after_anchor must appear after before_anchor in the file',
|
||||
}
|
||||
}
|
||||
|
||||
const newLines = [
|
||||
...lines.slice(0, before.index + 1),
|
||||
...(edit.content ?? '').split('\n'),
|
||||
...((normalized.edit.content ?? '').split('\n')),
|
||||
...lines.slice(after.index),
|
||||
]
|
||||
content = newLines.join('\n')
|
||||
} else if (edit.mode === 'insert_after') {
|
||||
if (!edit.anchor) {
|
||||
} else if (normalized.edit.mode === 'insert_after') {
|
||||
if (!normalized.edit.anchor) {
|
||||
return { success: false, message: 'insert_after requires anchor' }
|
||||
}
|
||||
const found = findAnchorLine(edit.anchor)
|
||||
const found = findAnchorLine(normalized.edit.anchor)
|
||||
if (found.error) return { success: false, message: `Patch failed: ${found.error}` }
|
||||
|
||||
const newLines = [
|
||||
...lines.slice(0, found.index + 1),
|
||||
...(edit.content ?? '').split('\n'),
|
||||
...((normalized.edit.content ?? '').split('\n')),
|
||||
...lines.slice(found.index + 1),
|
||||
]
|
||||
content = newLines.join('\n')
|
||||
} else if (edit.mode === 'delete_between') {
|
||||
if (!edit.start_anchor || !edit.end_anchor) {
|
||||
} else if (normalized.edit.mode === 'delete_between') {
|
||||
if (!normalized.edit.start_anchor || !normalized.edit.end_anchor) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'delete_between requires start_anchor and end_anchor',
|
||||
}
|
||||
}
|
||||
const start = findAnchorLine(edit.start_anchor)
|
||||
const start = findAnchorLine(normalized.edit.start_anchor)
|
||||
if (start.error) return { success: false, message: `Patch failed: ${start.error}` }
|
||||
const end = findAnchorLine(edit.end_anchor, defaultOccurrence, start.index)
|
||||
const end = findAnchorLine(normalized.edit.end_anchor, defaultOccurrence, start.index)
|
||||
if (end.error) return { success: false, message: `Patch failed: ${end.error}` }
|
||||
if (end.index <= start.index) {
|
||||
return {
|
||||
@@ -477,96 +632,79 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
|
||||
message: 'Patch failed: end_anchor must appear after start_anchor in the file',
|
||||
}
|
||||
}
|
||||
|
||||
const newLines = [...lines.slice(0, start.index), ...lines.slice(end.index)]
|
||||
content = newLines.join('\n')
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
message: `Unknown edit mode: "${edit.mode}". Use "replace_between", "insert_after", or "delete_between".`,
|
||||
message: `Unknown anchored patch mode: "${normalized.edit.mode}"`,
|
||||
}
|
||||
}
|
||||
} else if (legacyEdits && Array.isArray(legacyEdits) && legacyEdits.length > 0) {
|
||||
for (const le of legacyEdits) {
|
||||
const firstIdx = content.indexOf(le.search)
|
||||
if (firstIdx === -1) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Patch failed: search string not found in file "${fileRecord.name}". Search: "${le.search.slice(0, 100)}${le.search.length > 100 ? '...' : ''}"`,
|
||||
}
|
||||
} else if (normalized.edit.strategy === 'search_replace') {
|
||||
const search = normalized.edit.search
|
||||
const replace = normalized.edit.replace
|
||||
const firstIdx = content.indexOf(search)
|
||||
if (firstIdx === -1) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Patch failed: search string not found in file "${fileRecord.name}". Search: "${search.slice(0, 100)}${search.length > 100 ? '...' : ''}"`,
|
||||
}
|
||||
if (content.indexOf(le.search, firstIdx + 1) !== -1) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Patch failed: search string is ambiguous — found at multiple locations in "${fileRecord.name}". Use a longer, unique search string.`,
|
||||
}
|
||||
}
|
||||
content =
|
||||
content.slice(0, firstIdx) + le.replace + content.slice(firstIdx + le.search.length)
|
||||
}
|
||||
if (!normalized.edit.replaceAll && content.indexOf(search, firstIdx + 1) !== -1) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Patch failed: search string is ambiguous — found at multiple locations in "${fileRecord.name}". Use a longer unique search string or replaceAll.`,
|
||||
}
|
||||
}
|
||||
content = normalized.edit.replaceAll
|
||||
? content.split(search).join(replace)
|
||||
: content.slice(0, firstIdx) + replace + content.slice(firstIdx + search.length)
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
message: 'patch requires either an edit object (with mode) or a legacy edits array',
|
||||
message: `Unknown patch strategy: "${(normalized.edit as { strategy?: string }).strategy}"`,
|
||||
}
|
||||
}
|
||||
|
||||
const patchLowerName = fileRecord.name?.toLowerCase() ?? ''
|
||||
const isPptxPatch = patchLowerName.endsWith('.pptx')
|
||||
const isDocxPatch = patchLowerName.endsWith('.docx')
|
||||
const isPdfPatch = patchLowerName.endsWith('.pdf')
|
||||
const isDocPatch = isPptxPatch || isDocxPatch || isPdfPatch
|
||||
|
||||
if (isDocPatch) {
|
||||
const formatName = isPptxPatch ? 'PPTX' : isDocxPatch ? 'DOCX' : 'PDF'
|
||||
const generator = isPptxPatch
|
||||
? generatePptxFromCode
|
||||
: isDocxPatch
|
||||
? generateDocxFromCode
|
||||
: generatePdfFromCode
|
||||
const docInfo = getDocumentFormatInfo(fileRecord.name)
|
||||
if (docInfo.isDoc) {
|
||||
try {
|
||||
await generator(content, workspaceId)
|
||||
await docInfo.generator!(content, workspaceId)
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err)
|
||||
return {
|
||||
success: false,
|
||||
message: `Patched ${formatName} code failed to compile: ${msg}. Fix the edits and retry.`,
|
||||
message: `Patched ${docInfo.formatName} code failed to compile: ${msg}. Fix the edit and retry.`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const patchSourceMime = isPptxPatch
|
||||
? PPTX_SOURCE_MIME
|
||||
: isDocxPatch
|
||||
? DOCX_SOURCE_MIME
|
||||
: isPdfPatch
|
||||
? PDF_SOURCE_MIME
|
||||
: undefined
|
||||
const patchedBuffer = Buffer.from(content, 'utf-8')
|
||||
assertServerToolNotAborted(context)
|
||||
const patchMime = docInfo.sourceMime || inferContentType(fileRecord.name)
|
||||
await updateWorkspaceFileContent(
|
||||
workspaceId,
|
||||
fileId,
|
||||
target.fileId,
|
||||
context.userId,
|
||||
patchedBuffer,
|
||||
patchSourceMime
|
||||
patchMime
|
||||
)
|
||||
|
||||
const editMode = edit?.mode ?? 'legacy'
|
||||
logger.info('Workspace file patched via copilot', {
|
||||
fileId,
|
||||
fileId: target.fileId,
|
||||
name: fileRecord.name,
|
||||
editMode,
|
||||
strategy: normalized.edit.strategy,
|
||||
userId: context.userId,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `File "${fileRecord.name}" patched successfully (${editMode} edit applied)`,
|
||||
message: `File "${fileRecord.name}" patched successfully (${normalized.edit.strategy} edit applied)`,
|
||||
data: {
|
||||
id: fileId,
|
||||
id: target.fileId,
|
||||
name: fileRecord.name,
|
||||
size: patchedBuffer.length,
|
||||
contentType: patchMime,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -574,7 +712,7 @@ export const workspaceFileServerTool: BaseServerTool<WorkspaceFileArgs, Workspac
|
||||
default:
|
||||
return {
|
||||
success: false,
|
||||
message: `Unknown operation: ${operation}. Supported: write, update, patch, rename, delete.`,
|
||||
message: `Unknown operation: ${operation}. Supported: create, append, update, patch, rename, delete.`,
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
46
apps/sim/lib/copilot/tools/server/generated-schema.ts
Normal file
46
apps/sim/lib/copilot/tools/server/generated-schema.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import Ajv, { type ErrorObject, type ValidateFunction } from 'ajv'
|
||||
import { TOOL_RUNTIME_SCHEMAS } from '@/lib/copilot/generated/tool-schemas-v1'
|
||||
|
||||
const ajv = new Ajv({
|
||||
allErrors: true,
|
||||
strict: false,
|
||||
})
|
||||
|
||||
const validatorCache = new Map<string, ValidateFunction>()
|
||||
|
||||
function formatErrors(errors: ErrorObject[] | null | undefined): string {
|
||||
if (!errors || errors.length === 0) return 'unknown validation error'
|
||||
return errors
|
||||
.slice(0, 5)
|
||||
.map((error) => `${error.instancePath || '/'} ${error.message || 'is invalid'}`.trim())
|
||||
.join('; ')
|
||||
}
|
||||
|
||||
function getValidator(toolName: string, schemaKind: 'parameters' | 'resultSchema'): ValidateFunction | null {
|
||||
const cacheKey = `${toolName}:${schemaKind}`
|
||||
const cached = validatorCache.get(cacheKey)
|
||||
if (cached) return cached
|
||||
|
||||
const schema = TOOL_RUNTIME_SCHEMAS[toolName]?.[schemaKind]
|
||||
if (!schema) return null
|
||||
|
||||
const validator = ajv.compile(schema as object)
|
||||
validatorCache.set(cacheKey, validator)
|
||||
return validator
|
||||
}
|
||||
|
||||
export function validateGeneratedToolPayload<T>(
|
||||
toolName: string,
|
||||
schemaKind: 'parameters' | 'resultSchema',
|
||||
payload: T
|
||||
): T {
|
||||
const validator = getValidator(toolName, schemaKind)
|
||||
if (!validator) return payload
|
||||
|
||||
if (!validator(payload)) {
|
||||
const label = schemaKind === 'parameters' ? 'input' : 'output'
|
||||
throw new Error(`${toolName} ${label} validation failed: ${formatErrors(validator.errors)}`)
|
||||
}
|
||||
|
||||
return payload
|
||||
}
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
type BaseServerTool,
|
||||
type ServerToolContext,
|
||||
} from '@/lib/copilot/tools/server/base-tool'
|
||||
import type { KnowledgeBaseArgs, KnowledgeBaseResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { generateId } from '@/lib/core/utils/uuid'
|
||||
import {
|
||||
@@ -40,6 +39,17 @@ import { getQueryStrategy, handleVectorOnlySearch } from '@/app/api/knowledge/se
|
||||
|
||||
const logger = createLogger('KnowledgeBaseServerTool')
|
||||
|
||||
type KnowledgeBaseArgs = {
|
||||
operation: string
|
||||
args?: Record<string, any>
|
||||
}
|
||||
|
||||
type KnowledgeBaseResult = {
|
||||
success: boolean
|
||||
message: string
|
||||
data?: any
|
||||
}
|
||||
|
||||
/**
|
||||
* Knowledge base tool for copilot to create, list, and get knowledge bases
|
||||
*/
|
||||
|
||||
@@ -33,11 +33,17 @@ import { generateVisualizationServerTool } from '@/lib/copilot/tools/server/visu
|
||||
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
||||
import { getExecutionSummaryServerTool } from '@/lib/copilot/tools/server/workflow/get-execution-summary'
|
||||
import { getWorkflowLogsServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-logs'
|
||||
import { ExecuteResponseSuccessSchema } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { z } from 'zod'
|
||||
import { validateGeneratedToolPayload } from '@/lib/copilot/tools/server/generated-schema'
|
||||
|
||||
export { ExecuteResponseSuccessSchema }
|
||||
export type ExecuteResponseSuccess = (typeof ExecuteResponseSuccessSchema)['_type']
|
||||
|
||||
const ExecuteResponseSuccessSchema = z.object({
|
||||
success: z.literal(true),
|
||||
result: z.unknown(),
|
||||
})
|
||||
|
||||
const logger = createLogger('ServerToolRouter')
|
||||
|
||||
const WRITE_ACTIONS: Record<string, string[]> = {
|
||||
@@ -76,7 +82,7 @@ const WRITE_ACTIONS: Record<string, string[]> = {
|
||||
[ManageMcpTool.id]: ['add', 'edit', 'delete'],
|
||||
[ManageSkill.id]: ['add', 'edit', 'delete'],
|
||||
[ManageCredential.id]: ['rename', 'delete'],
|
||||
[WorkspaceFile.id]: ['write', 'update', 'delete', 'rename', 'patch'],
|
||||
[WorkspaceFile.id]: ['create', 'append', 'update', 'delete', 'rename', 'patch'],
|
||||
[DownloadToWorkspaceFile.id]: ['*'],
|
||||
[GenerateVisualization.id]: ['generate'],
|
||||
[GenerateImage.id]: ['generate'],
|
||||
@@ -153,14 +159,20 @@ export async function routeExecution(
|
||||
|
||||
assertServerToolNotAborted(context)
|
||||
|
||||
// Validate input if tool declares a schema
|
||||
const args = tool.inputSchema ? tool.inputSchema.parse(payload ?? {}) : (payload ?? {})
|
||||
// Validate input if tool declares a schema; otherwise fall back to the
|
||||
// generated JSON schema contract emitted from Go.
|
||||
const args = tool.inputSchema
|
||||
? tool.inputSchema.parse(payload ?? {})
|
||||
: validateGeneratedToolPayload(toolName, 'parameters', payload ?? {})
|
||||
|
||||
assertServerToolNotAborted(context)
|
||||
|
||||
// Execute
|
||||
const result = await tool.execute(args, context)
|
||||
|
||||
// Validate output if tool declares a schema
|
||||
return tool.outputSchema ? tool.outputSchema.parse(result) : result
|
||||
// Validate output if tool declares a schema; otherwise fall back to the
|
||||
// generated JSON schema contract emitted from Go.
|
||||
return tool.outputSchema
|
||||
? tool.outputSchema.parse(result)
|
||||
: validateGeneratedToolPayload(toolName, 'resultSchema', result)
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import {
|
||||
type BaseServerTool,
|
||||
type ServerToolContext,
|
||||
} from '@/lib/copilot/tools/server/base-tool'
|
||||
import type { UserTableArgs, UserTableResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { generateId } from '@/lib/core/utils/uuid'
|
||||
import { COLUMN_TYPES } from '@/lib/table/constants'
|
||||
import {
|
||||
@@ -38,6 +37,17 @@ import {
|
||||
|
||||
const logger = createLogger('UserTableServerTool')
|
||||
|
||||
type UserTableArgs = {
|
||||
operation: string
|
||||
args?: Record<string, any>
|
||||
}
|
||||
|
||||
type UserTableResult = {
|
||||
success: boolean
|
||||
message: string
|
||||
data?: any
|
||||
}
|
||||
|
||||
const MAX_BATCH_SIZE = 1000
|
||||
const SCHEMA_SAMPLE_SIZE = 100
|
||||
|
||||
|
||||
@@ -1,274 +0,0 @@
|
||||
import { z } from 'zod'
|
||||
|
||||
// Generic envelope used by client to validate API responses
|
||||
export const ExecuteResponseSuccessSchema = z.object({
|
||||
success: z.literal(true),
|
||||
result: z.unknown(),
|
||||
})
|
||||
export type ExecuteResponseSuccess = z.infer<typeof ExecuteResponseSuccessSchema>
|
||||
|
||||
// get_blocks_metadata
|
||||
export const GetBlocksMetadataInput = z.object({ blockIds: z.array(z.string()).min(1) })
|
||||
export const GetBlocksMetadataResult = z.object({ metadata: z.record(z.any()) })
|
||||
export type GetBlocksMetadataResultType = z.infer<typeof GetBlocksMetadataResult>
|
||||
|
||||
// get_trigger_blocks
|
||||
export const GetTriggerBlocksInput = z.object({})
|
||||
export const GetTriggerBlocksResult = z.object({
|
||||
triggerBlockIds: z.array(z.string()),
|
||||
})
|
||||
export type GetTriggerBlocksResultType = z.infer<typeof GetTriggerBlocksResult>
|
||||
|
||||
// knowledge_base - shared schema used by client tool, server tool, and registry
|
||||
export const KnowledgeBaseArgsSchema = z.object({
|
||||
operation: z.enum([
|
||||
'create',
|
||||
'get',
|
||||
'query',
|
||||
'update',
|
||||
'delete',
|
||||
'add_file',
|
||||
'delete_document',
|
||||
'update_document',
|
||||
'list_tags',
|
||||
'create_tag',
|
||||
'update_tag',
|
||||
'delete_tag',
|
||||
'get_tag_usage',
|
||||
'add_connector',
|
||||
'update_connector',
|
||||
'delete_connector',
|
||||
'sync_connector',
|
||||
]),
|
||||
args: z
|
||||
.object({
|
||||
/** Name of the knowledge base (required for create) */
|
||||
name: z.string().optional(),
|
||||
/** Description of the knowledge base (optional for create) */
|
||||
description: z.string().optional(),
|
||||
/** Workspace ID to associate with (required for create, optional for list) */
|
||||
workspaceId: z.string().optional(),
|
||||
/** Knowledge base ID (required for get, query, add_file, list_tags, create_tag, get_tag_usage, add_connector) */
|
||||
knowledgeBaseId: z.string().optional(),
|
||||
/** Workspace file ID to add as a document (required for add_file). */
|
||||
fileId: z.string().optional(),
|
||||
/** Legacy workspace file reference for add_file. Prefer fileId. */
|
||||
filePath: z.string().optional(),
|
||||
/** Search query text (required for query) */
|
||||
query: z.string().optional(),
|
||||
/** Number of results to return (optional for query, defaults to 5) */
|
||||
topK: z.number().min(1).max(50).optional(),
|
||||
/** Chunking configuration (optional for create) */
|
||||
chunkingConfig: z
|
||||
.object({
|
||||
maxSize: z.number().min(100).max(4000).default(1024),
|
||||
minSize: z.number().min(1).max(2000).default(1),
|
||||
overlap: z.number().min(0).max(500).default(200),
|
||||
})
|
||||
.optional(),
|
||||
/** Tag definition ID (required for update_tag, delete_tag) */
|
||||
tagDefinitionId: z.string().optional(),
|
||||
/** Tag display name (required for create_tag, optional for update_tag) */
|
||||
tagDisplayName: z.string().optional(),
|
||||
/** Tag field type: text, number, date, boolean (optional for create_tag, defaults to text) */
|
||||
tagFieldType: z.enum(['text', 'number', 'date', 'boolean']).optional(),
|
||||
/** Connector type from registry, e.g. "confluence" (required for add_connector) */
|
||||
connectorType: z.string().optional(),
|
||||
/** OAuth credential ID from environment/credentials.json (required for OAuth connectors) */
|
||||
credentialId: z.string().optional(),
|
||||
/** API key for API key-based connectors (required for API key connectors) */
|
||||
apiKey: z.string().optional(),
|
||||
/** Connector-specific config matching the schema in knowledgebases/connectors/{type}.json */
|
||||
sourceConfig: z.record(z.unknown()).optional(),
|
||||
/** Sync interval: 60, 360, 1440, 10080, or 0 for manual only (optional for add_connector, defaults to 1440) */
|
||||
syncIntervalMinutes: z.number().int().min(0).optional(),
|
||||
/** Connector ID (required for update_connector, delete_connector, sync_connector) */
|
||||
connectorId: z.string().optional(),
|
||||
/** Connector status: "active" or "paused" (optional for update_connector) */
|
||||
connectorStatus: z.enum(['active', 'paused']).optional(),
|
||||
/** Tag definition IDs to disable (optional for add_connector) */
|
||||
disabledTagIds: z.array(z.string()).optional(),
|
||||
/** Document ID (required for delete_document, update_document) */
|
||||
documentId: z.string().optional(),
|
||||
/** Enable/disable a document (optional for update_document) */
|
||||
enabled: z.boolean().optional(),
|
||||
/** New filename for a document (optional for update_document) */
|
||||
filename: z.string().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
export type KnowledgeBaseArgs = z.infer<typeof KnowledgeBaseArgsSchema>
|
||||
|
||||
export const KnowledgeBaseResultSchema = z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string(),
|
||||
data: z.any().optional(),
|
||||
})
|
||||
export type KnowledgeBaseResult = z.infer<typeof KnowledgeBaseResultSchema>
|
||||
|
||||
// user_table - shared schema used by server tool and registry
|
||||
export const UserTableArgsSchema = z.object({
|
||||
operation: z.enum([
|
||||
'create',
|
||||
'create_from_file',
|
||||
'import_file',
|
||||
'get',
|
||||
'get_schema',
|
||||
'delete',
|
||||
'insert_row',
|
||||
'batch_insert_rows',
|
||||
'get_row',
|
||||
'query_rows',
|
||||
'update_row',
|
||||
'delete_row',
|
||||
'update_rows_by_filter',
|
||||
'delete_rows_by_filter',
|
||||
'batch_update_rows',
|
||||
'batch_delete_rows',
|
||||
'add_column',
|
||||
'rename_column',
|
||||
'delete_column',
|
||||
'update_column',
|
||||
'rename',
|
||||
]),
|
||||
args: z
|
||||
.object({
|
||||
name: z.string().optional(),
|
||||
description: z.string().optional(),
|
||||
schema: z.any().optional(),
|
||||
tableId: z.string().optional(),
|
||||
rowId: z.string().optional(),
|
||||
data: z.record(z.any()).optional(),
|
||||
rows: z.array(z.record(z.any())).optional(),
|
||||
updates: z.array(z.object({ rowId: z.string(), data: z.record(z.any()) })).optional(),
|
||||
rowIds: z.array(z.string()).optional(),
|
||||
values: z.record(z.any()).optional(),
|
||||
filter: z.any().optional(),
|
||||
sort: z.record(z.enum(['asc', 'desc'])).optional(),
|
||||
limit: z.number().optional(),
|
||||
offset: z.number().optional(),
|
||||
fileId: z.string().optional(),
|
||||
filePath: z.string().optional(),
|
||||
column: z
|
||||
.object({
|
||||
name: z.string(),
|
||||
type: z.string(),
|
||||
unique: z.boolean().optional(),
|
||||
position: z.number().optional(),
|
||||
})
|
||||
.optional(),
|
||||
columnName: z.string().optional(),
|
||||
columnNames: z.array(z.string()).optional(),
|
||||
newName: z.string().optional(),
|
||||
newType: z.string().optional(),
|
||||
unique: z.boolean().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
export type UserTableArgs = z.infer<typeof UserTableArgsSchema>
|
||||
|
||||
export const UserTableResultSchema = z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string(),
|
||||
data: z.any().optional(),
|
||||
})
|
||||
export type UserTableResult = z.infer<typeof UserTableResultSchema>
|
||||
|
||||
// workspace_file - shared schema used by server tool and Go catalog
|
||||
export const WorkspaceFileArgsSchema = z.object({
|
||||
operation: z.enum(['write', 'update', 'delete', 'rename', 'patch']),
|
||||
args: z
|
||||
.object({
|
||||
fileId: z.string().optional(),
|
||||
fileName: z.string().optional(),
|
||||
content: z.string().optional(),
|
||||
contentType: z.string().optional(),
|
||||
workspaceId: z.string().optional(),
|
||||
newName: z.string().optional(),
|
||||
edits: z
|
||||
.array(z.object({ search: z.string(), replace: z.string() }))
|
||||
.describe(
|
||||
'List of search/replace pairs applied sequentially — each edit operates on the result of the previous one. Search strings must be unique within the file.'
|
||||
)
|
||||
.optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
export type WorkspaceFileArgs = z.infer<typeof WorkspaceFileArgsSchema>
|
||||
|
||||
export const WorkspaceFileResultSchema = z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string(),
|
||||
data: z.any().optional(),
|
||||
})
|
||||
export type WorkspaceFileResult = z.infer<typeof WorkspaceFileResultSchema>
|
||||
|
||||
export const GetBlockOutputsInput = z.object({
|
||||
blockIds: z.array(z.string()).optional(),
|
||||
})
|
||||
export const GetBlockOutputsResult = z.object({
|
||||
blocks: z.array(
|
||||
z.object({
|
||||
blockId: z.string(),
|
||||
blockName: z.string(),
|
||||
blockType: z.string(),
|
||||
triggerMode: z.boolean().optional(),
|
||||
outputs: z.array(z.string()),
|
||||
insideSubflowOutputs: z.array(z.string()).optional(),
|
||||
outsideSubflowOutputs: z.array(z.string()).optional(),
|
||||
})
|
||||
),
|
||||
variables: z
|
||||
.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
type: z.string(),
|
||||
tag: z.string(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
})
|
||||
export type GetBlockOutputsInputType = z.infer<typeof GetBlockOutputsInput>
|
||||
export type GetBlockOutputsResultType = z.infer<typeof GetBlockOutputsResult>
|
||||
|
||||
export const GetBlockUpstreamReferencesInput = z.object({
|
||||
blockIds: z.array(z.string()).min(1),
|
||||
})
|
||||
export const GetBlockUpstreamReferencesResult = z.object({
|
||||
results: z.array(
|
||||
z.object({
|
||||
blockId: z.string(),
|
||||
blockName: z.string(),
|
||||
insideSubflows: z
|
||||
.array(
|
||||
z.object({
|
||||
blockId: z.string(),
|
||||
blockName: z.string(),
|
||||
blockType: z.string(),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
accessibleBlocks: z.array(
|
||||
z.object({
|
||||
blockId: z.string(),
|
||||
blockName: z.string(),
|
||||
blockType: z.string(),
|
||||
triggerMode: z.boolean().optional(),
|
||||
outputs: z.array(z.string()),
|
||||
accessContext: z.enum(['inside', 'outside']).optional(),
|
||||
})
|
||||
),
|
||||
variables: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
name: z.string(),
|
||||
type: z.string(),
|
||||
tag: z.string(),
|
||||
})
|
||||
),
|
||||
})
|
||||
),
|
||||
})
|
||||
export type GetBlockUpstreamReferencesInputType = z.infer<typeof GetBlockUpstreamReferencesInput>
|
||||
export type GetBlockUpstreamReferencesResultType = z.infer<typeof GetBlockUpstreamReferencesResult>
|
||||
@@ -204,6 +204,49 @@ const EXTENSION_TO_MIME: Record<string, string> = {
|
||||
yml: 'application/x-yaml',
|
||||
rtf: 'application/rtf',
|
||||
|
||||
// Code / plain-text source
|
||||
py: 'text/x-python',
|
||||
js: 'text/javascript',
|
||||
mjs: 'text/javascript',
|
||||
cjs: 'text/javascript',
|
||||
ts: 'text/typescript',
|
||||
tsx: 'text/typescript',
|
||||
jsx: 'text/javascript',
|
||||
go: 'text/x-go',
|
||||
rs: 'text/x-rust',
|
||||
java: 'text/x-java',
|
||||
kt: 'text/x-kotlin',
|
||||
c: 'text/x-c',
|
||||
cpp: 'text/x-c++',
|
||||
h: 'text/x-c',
|
||||
hpp: 'text/x-c++',
|
||||
cs: 'text/x-csharp',
|
||||
rb: 'text/x-ruby',
|
||||
php: 'text/x-php',
|
||||
swift: 'text/x-swift',
|
||||
sh: 'text/x-shellscript',
|
||||
bash: 'text/x-shellscript',
|
||||
zsh: 'text/x-shellscript',
|
||||
r: 'text/x-r',
|
||||
sql: 'text/x-sql',
|
||||
scala: 'text/x-scala',
|
||||
lua: 'text/x-lua',
|
||||
pl: 'text/x-perl',
|
||||
toml: 'text/x-toml',
|
||||
ini: 'text/plain',
|
||||
cfg: 'text/plain',
|
||||
conf: 'text/plain',
|
||||
env: 'text/plain',
|
||||
log: 'text/plain',
|
||||
makefile: 'text/x-makefile',
|
||||
dockerfile: 'text/x-dockerfile',
|
||||
css: 'text/css',
|
||||
scss: 'text/x-scss',
|
||||
less: 'text/x-less',
|
||||
graphql: 'text/x-graphql',
|
||||
gql: 'text/x-graphql',
|
||||
proto: 'text/x-protobuf',
|
||||
|
||||
// Audio
|
||||
mp3: 'audio/mpeg',
|
||||
m4a: 'audio/mp4',
|
||||
|
||||
@@ -9,6 +9,10 @@ const DEFAULT_CATALOG_PATH = resolve(
|
||||
'../copilot/copilot/contracts/tool-catalog-v1.json'
|
||||
)
|
||||
const OUTPUT_PATH = resolve(ROOT, 'apps/sim/lib/copilot/generated/tool-catalog-v1.ts')
|
||||
const RUNTIME_SCHEMA_OUTPUT_PATH = resolve(
|
||||
ROOT,
|
||||
'apps/sim/lib/copilot/generated/tool-schemas-v1.ts'
|
||||
)
|
||||
|
||||
function snakeToPascal(s: string): string {
|
||||
return s.split('_').map((w) => w.charAt(0).toUpperCase() + w.slice(1)).join('')
|
||||
@@ -22,7 +26,43 @@ function inferTSType(values: unknown[]): string {
|
||||
}
|
||||
if (unique.every((v) => typeof v === 'boolean')) return 'boolean'
|
||||
if (unique.every((v) => typeof v === 'number')) return 'number'
|
||||
return 'string'
|
||||
return 'unknown'
|
||||
}
|
||||
|
||||
function renderRuntimeSchemaModule(catalog: { tools: Record<string, unknown>[] }): string {
|
||||
const lines: string[] = [
|
||||
'// AUTO-GENERATED FILE. DO NOT EDIT.',
|
||||
'// Generated from copilot/contracts/tool-catalog-v1.json',
|
||||
'//',
|
||||
'',
|
||||
'export type JsonSchema = unknown',
|
||||
'',
|
||||
'export interface ToolRuntimeSchemaEntry {',
|
||||
' parameters?: JsonSchema;',
|
||||
' resultSchema?: JsonSchema;',
|
||||
'}',
|
||||
'',
|
||||
'export const TOOL_RUNTIME_SCHEMAS: Record<string, ToolRuntimeSchemaEntry> = {',
|
||||
]
|
||||
|
||||
for (const tool of catalog.tools) {
|
||||
const id = JSON.stringify(tool.id)
|
||||
const parameters = 'parameters' in tool ? JSON.stringify(tool.parameters ?? null, null, 2) : 'undefined'
|
||||
const resultSchema =
|
||||
'resultSchema' in tool ? JSON.stringify(tool.resultSchema ?? null, null, 2) : 'undefined'
|
||||
lines.push(` [${id}]: {`)
|
||||
lines.push(
|
||||
` parameters: ${parameters === 'null' ? 'undefined' : parameters.replace(/\n/g, '\n ')},`
|
||||
)
|
||||
lines.push(
|
||||
` resultSchema: ${resultSchema === 'null' ? 'undefined' : resultSchema.replace(/\n/g, '\n ')},`
|
||||
)
|
||||
lines.push(' },')
|
||||
}
|
||||
|
||||
lines.push('}')
|
||||
lines.push('')
|
||||
return lines.join('\n')
|
||||
}
|
||||
|
||||
function generateInterface(tools: Record<string, unknown>[]): string {
|
||||
@@ -95,10 +135,12 @@ async function main() {
|
||||
lines.push('')
|
||||
|
||||
const rendered = lines.join('\n')
|
||||
const runtimeSchemaRendered = renderRuntimeSchemaModule(catalog)
|
||||
|
||||
if (checkOnly) {
|
||||
const existing = await readFile(OUTPUT_PATH, 'utf8').catch(() => null)
|
||||
if (existing !== rendered) {
|
||||
const existingRuntime = await readFile(RUNTIME_SCHEMA_OUTPUT_PATH, 'utf8').catch(() => null)
|
||||
if (existing !== rendered || existingRuntime !== runtimeSchemaRendered) {
|
||||
throw new Error(
|
||||
`Generated tool catalog is stale. Run: bun run mship-tools:generate`
|
||||
)
|
||||
@@ -108,6 +150,8 @@ async function main() {
|
||||
|
||||
await mkdir(dirname(OUTPUT_PATH), { recursive: true })
|
||||
await writeFile(OUTPUT_PATH, rendered, 'utf8')
|
||||
await mkdir(dirname(RUNTIME_SCHEMA_OUTPUT_PATH), { recursive: true })
|
||||
await writeFile(RUNTIME_SCHEMA_OUTPUT_PATH, runtimeSchemaRendered, 'utf8')
|
||||
}
|
||||
|
||||
await main()
|
||||
|
||||
Reference in New Issue
Block a user