improvement(mothership): docs

This commit is contained in:
Siddharth Ganesan
2026-04-04 14:19:24 -07:00
parent cac100a145
commit 0d09d11c33
7 changed files with 254 additions and 21 deletions

View File

@@ -593,6 +593,7 @@ async function maybeExportSandboxFileToWorkspace(args: {
workspaceId?: string
outputPath?: string
outputFormat?: string
outputMimeType?: string
outputSandboxPath?: string
exportedFileContent?: string
stdout: string
@@ -604,6 +605,7 @@ async function maybeExportSandboxFileToWorkspace(args: {
workspaceId,
outputPath,
outputFormat,
outputMimeType,
outputSandboxPath,
exportedFileContent,
stdout,
@@ -650,14 +652,23 @@ async function maybeExportSandboxFileToWorkspace(args: {
}
const fileName = normalizeOutputWorkspaceFileName(outputPath)
const format = resolveOutputFormat(fileName, outputFormat)
const contentType = FORMAT_TO_CONTENT_TYPE[format]
const TEXT_MIMES = new Set(Object.values(FORMAT_TO_CONTENT_TYPE))
const resolvedMimeType =
outputMimeType ||
FORMAT_TO_CONTENT_TYPE[resolveOutputFormat(fileName, outputFormat)] ||
'application/octet-stream'
const isBinary = !TEXT_MIMES.has(resolvedMimeType)
const fileBuffer = isBinary
? Buffer.from(exportedFileContent, 'base64')
: Buffer.from(exportedFileContent, 'utf-8')
const uploaded = await uploadWorkspaceFile(
resolvedWorkspaceId,
authUserId,
Buffer.from(exportedFileContent, 'utf-8'),
fileBuffer,
fileName,
contentType
resolvedMimeType
)
return NextResponse.json({
@@ -702,6 +713,7 @@ export async function POST(req: NextRequest) {
language = DEFAULT_CODE_LANGUAGE,
outputPath,
outputFormat,
outputMimeType,
outputSandboxPath,
envVars = {},
blockData = {},
@@ -815,6 +827,7 @@ export async function POST(req: NextRequest) {
workspaceId,
outputPath,
outputFormat,
outputMimeType,
outputSandboxPath,
exportedFileContent,
stdout: shellStdout,
@@ -938,6 +951,7 @@ export async function POST(req: NextRequest) {
workspaceId,
outputPath,
outputFormat,
outputMimeType,
outputSandboxPath,
exportedFileContent,
stdout,
@@ -1019,6 +1033,7 @@ export async function POST(req: NextRequest) {
workspaceId,
outputPath,
outputFormat,
outputMimeType,
outputSandboxPath,
exportedFileContent,
stdout,

View File

@@ -163,7 +163,7 @@ export function FileViewer({
}
if (category === 'iframe-previewable') {
return <IframePreview file={file} />
return <IframePreview file={file} workspaceId={workspaceId} />
}
if (category === 'image-previewable') {
@@ -454,13 +454,36 @@ function TextEditor({
)
}
const IframePreview = memo(function IframePreview({ file }: { file: WorkspaceFileRecord }) {
const serveUrl = `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace`
const IframePreview = memo(function IframePreview({
file,
workspaceId,
}: {
file: WorkspaceFileRecord
workspaceId: string
}) {
const { data: fileData, isLoading } = useWorkspaceFileBinary(workspaceId, file.id, file.key)
const [blobUrl, setBlobUrl] = useState<string | null>(null)
useEffect(() => {
if (!fileData) return
const blob = new Blob([fileData], { type: 'application/pdf' })
const url = URL.createObjectURL(blob)
setBlobUrl(url)
return () => URL.revokeObjectURL(url)
}, [fileData])
if (isLoading || !blobUrl) {
return (
<div className='flex h-full items-center justify-center'>
<Skeleton className='h-[200px] w-[80%]' />
</div>
)
}
return (
<div className='flex flex-1 overflow-hidden'>
<iframe
src={serveUrl}
src={blobUrl}
className='h-full w-full border-0'
title={file.name}
onError={() => {

View File

@@ -1,6 +1,109 @@
import { createLogger } from '@sim/logger'
import { getTableById, queryRows } from '@/lib/table/service'
import {
downloadWorkspaceFile,
findWorkspaceFileRecord,
getSandboxWorkspaceFilePath,
listWorkspaceFiles,
} from '@/lib/uploads/contexts/workspace/workspace-file-manager'
import { executeTool as executeAppTool } from '@/tools'
import type { ToolExecutionContext, ToolExecutionResult } from '../../tool-executor/types'
const logger = createLogger('CopilotFunctionExecute')
const MAX_FILE_SIZE = 10 * 1024 * 1024
const MAX_TOTAL_SIZE = 50 * 1024 * 1024
interface SandboxFile {
path: string
content: string
}
async function resolveInputFiles(
workspaceId: string,
inputFiles?: unknown[],
inputTables?: unknown[]
): Promise<SandboxFile[]> {
const sandboxFiles: SandboxFile[] = []
let totalSize = 0
if (inputFiles?.length && workspaceId) {
const allFiles = await listWorkspaceFiles(workspaceId)
for (const fileRef of inputFiles) {
if (typeof fileRef !== 'string') continue
const record = findWorkspaceFileRecord(allFiles, fileRef)
if (!record) {
logger.warn('Input file not found', { fileRef })
continue
}
if (record.size > MAX_FILE_SIZE) {
logger.warn('Input file exceeds size limit', { fileId: record.id, size: record.size })
continue
}
if (totalSize + record.size > MAX_TOTAL_SIZE) {
logger.warn('Total input size limit reached')
break
}
const buffer = await downloadWorkspaceFile(record)
totalSize += buffer.length
const isText = /^text\/|application\/json|application\/xml|application\/csv/.test(
record.type || ''
)
const content = isText ? buffer.toString('utf-8') : buffer.toString('base64')
sandboxFiles.push({
path: getSandboxWorkspaceFilePath(record),
content,
encoding: isText ? undefined : 'base64',
} as SandboxFile)
}
}
if (inputTables?.length) {
for (const tableId of inputTables) {
if (typeof tableId !== 'string') continue
const table = await getTableById(tableId)
if (!table) {
logger.warn('Input table not found', { tableId })
continue
}
const rows = await queryRows(tableId, workspaceId, {}, 'copilot-fn-exec')
if (!rows.rows?.length) continue
const allKeys = new Set<string>()
for (const row of rows.rows) {
if (row.data && typeof row.data === 'object') {
for (const key of Object.keys(row.data as Record<string, unknown>)) {
allKeys.add(key)
}
}
}
const headers = Array.from(allKeys)
const csvLines = [headers.join(',')]
for (const row of rows.rows) {
const data = (row.data || {}) as Record<string, unknown>
csvLines.push(
headers
.map((h) => {
const val = data[h]
const str = val === null || val === undefined ? '' : String(val)
return str.includes(',') || str.includes('"') || str.includes('\n')
? `"${str.replace(/"/g, '""')}"`
: str
})
.join(',')
)
}
const csvContent = csvLines.join('\n')
sandboxFiles.push({
path: `/home/user/tables/${tableId}.csv`,
content: csvContent,
})
}
}
return sandboxFiles
}
export async function executeFunctionExecute(
params: Record<string, unknown>,
context: ToolExecutionContext
@@ -14,6 +117,19 @@ export async function executeFunctionExecute(
}
}
if (context.workspaceId) {
const inputFiles = enrichedParams.inputFiles as unknown[] | undefined
const inputTables = enrichedParams.inputTables as unknown[] | undefined
if (inputFiles?.length || inputTables?.length) {
const resolved = await resolveInputFiles(context.workspaceId, inputFiles, inputTables)
if (resolved.length > 0) {
const existing = (enrichedParams._sandboxFiles as SandboxFile[]) || []
enrichedParams._sandboxFiles = [...existing, ...resolved]
}
}
}
enrichedParams._context = {
...(typeof enrichedParams._context === 'object' && enrichedParams._context !== null
? (enrichedParams._context as object)

View File

@@ -39,7 +39,16 @@ const FORMATS = {
async setup() {
const PDFLib = require('pdf-lib')
const pdf = await PDFLib.PDFDocument.create()
return { globals: { PDFLib, pdf }, pdf }
async function embedImage(dataUri) {
const base64 = dataUri.split(',')[1]
const bytes = Buffer.from(base64, 'base64')
const mime = dataUri.split(';')[0].split(':')[1] || ''
if (mime.includes('png')) return pdf.embedPng(bytes)
return pdf.embedJpg(bytes)
}
return { globals: { PDFLib, pdf, embedImage }, pdf }
},
async serialize(ctx) {
const pdf = ctx.globals.pdf

View File

@@ -6,6 +6,7 @@ import { CodeLanguage } from '@/lib/execution/languages'
export interface SandboxFile {
path: string
content: string
encoding?: 'base64'
}
export interface E2BExecutionRequest {
@@ -49,7 +50,15 @@ export async function executeInE2B(req: E2BExecutionRequest): Promise<E2BExecuti
if (req.sandboxFiles?.length) {
for (const file of req.sandboxFiles) {
await sandbox.files.write(file.path, file.content)
if (file.encoding === 'base64') {
const buf = Buffer.from(file.content, 'base64')
await sandbox.files.write(
file.path,
buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)
)
} else {
await sandbox.files.write(file.path, file.content)
}
}
logger.info('Wrote sandbox input files', {
sandboxId,
@@ -125,9 +134,30 @@ export async function executeInE2B(req: E2BExecutionRequest): Promise<E2BExecuti
}
}
const exportedFileContent = outputSandboxPath
? await sandbox.files.read(outputSandboxPath)
: undefined
let exportedFileContent: string | undefined
if (outputSandboxPath) {
const ext = outputSandboxPath.slice(outputSandboxPath.lastIndexOf('.')).toLowerCase()
const binaryExts = new Set([
'.png',
'.jpg',
'.jpeg',
'.gif',
'.webp',
'.pdf',
'.zip',
'.mp3',
'.mp4',
'.docx',
'.pptx',
'.xlsx',
])
if (binaryExts.has(ext)) {
const b64Result = await sandbox.commands.run(`base64 -w0 "${outputSandboxPath}"`)
exportedFileContent = b64Result.stdout
} else {
exportedFileContent = await sandbox.files.read(outputSandboxPath)
}
}
return {
result,
@@ -164,7 +194,15 @@ export async function executeShellInE2B(
if (req.sandboxFiles?.length) {
for (const file of req.sandboxFiles) {
await sandbox.files.write(file.path, file.content)
if (file.encoding === 'base64') {
const buf = Buffer.from(file.content, 'base64')
await sandbox.files.write(
file.path,
buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)
)
} else {
await sandbox.files.write(file.path, file.content)
}
}
logger.info('Wrote sandbox input files', {
sandboxId,
@@ -237,9 +275,32 @@ export async function executeShellInE2B(
cleanedStdout = filteredLines.join('\n')
}
const exportedFileContent = outputSandboxPath
? await sandbox.files.read(outputSandboxPath)
: undefined
let exportedFileContent: string | undefined
if (outputSandboxPath) {
const ext = outputSandboxPath.slice(outputSandboxPath.lastIndexOf('.')).toLowerCase()
const binaryExts = new Set([
'.png',
'.jpg',
'.jpeg',
'.gif',
'.webp',
'.pdf',
'.zip',
'.mp3',
'.mp4',
'.docx',
'.pptx',
'.xlsx',
])
if (binaryExts.has(ext)) {
const b64Result = await sandbox.commands.run(`base64 -w0 "${outputSandboxPath}"`, {
user: 'root',
})
exportedFileContent = b64Result.stdout
} else {
exportedFileContent = await sandbox.files.read(outputSandboxPath)
}
}
return { result: parsed, stdout: cleanedStdout, sandboxId, exportedFileContent }
} finally {

View File

@@ -35,30 +35,37 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
outputPath: {
type: 'string',
required: false,
visibility: 'user-or-llm',
visibility: 'hidden',
description:
'Write the tool result back to a workspace file, e.g. "files/result.json" or "files/report.csv". Use for text/JSON/CSV/markdown/html outputs.',
},
outputFormat: {
type: 'string',
required: false,
visibility: 'user-or-llm',
visibility: 'hidden',
description: 'Optional format override for outputPath (json, csv, txt, md, html).',
},
outputTable: {
type: 'string',
required: false,
visibility: 'user-or-llm',
visibility: 'hidden',
description:
'Overwrite a workspace table with the code result. The code must return an array of objects.',
},
outputSandboxPath: {
type: 'string',
required: false,
visibility: 'user-or-llm',
visibility: 'hidden',
description:
'Export a file created inside the sandbox to the workspace. Provide the sandbox file path here and also set outputPath to the workspace destination.',
},
outputMimeType: {
type: 'string',
required: false,
visibility: 'hidden',
description:
'MIME type for the exported file. Required for binary files (e.g. "image/png", "application/pdf"). If omitted, inferred from outputPath extension for text formats.',
},
envVars: {
type: 'object',
required: false,
@@ -115,6 +122,7 @@ export const functionExecuteTool: ToolConfig<CodeExecutionInput, CodeExecutionOu
outputFormat: params.outputFormat,
outputTable: params.outputTable,
outputSandboxPath: params.outputSandboxPath,
outputMimeType: params.outputMimeType,
envVars: params.envVars || {},
workflowVariables: params.workflowVariables || {},
blockData: params.blockData || {},

View File

@@ -11,6 +11,7 @@ export interface CodeExecutionInput {
outputFormat?: 'json' | 'csv' | 'txt' | 'md' | 'html'
outputTable?: string
outputSandboxPath?: string
outputMimeType?: string
envVars?: Record<string, string>
workflowVariables?: Record<string, unknown>
blockData?: Record<string, unknown>