fix(mothership): file materialization tools (#3586)

* Fix ope

* File upload fixes

* Fix lint

* Materialization shows up

* Snapshot

* Fix

* Nuke migrations

* Add migs

* migs

---------

Co-authored-by: Waleed <walif6@gmail.com>
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: Lakee Sivaraya <71339072+lakeesiv@users.noreply.github.com>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
This commit is contained in:
Siddharth Ganesan
2026-03-14 16:56:44 -07:00
committed by GitHub
parent 75bdf46e6b
commit f077751ce8
19 changed files with 14049 additions and 52 deletions

View File

@@ -120,8 +120,8 @@ export async function verifyFileAccess(
return true
}
// 1. Workspace files: Check database first (most reliable for both local and cloud)
if (inferredContext === 'workspace') {
// 1. Workspace / mothership files: Check database first (most reliable for both local and cloud)
if (inferredContext === 'workspace' || inferredContext === 'mothership') {
return await verifyWorkspaceFileAccess(cloudKey, userId, customConfig, isLocal)
}

View File

@@ -4,6 +4,7 @@ import { sanitizeFileName } from '@/executor/constants'
import '@/lib/uploads/core/setup.server'
import { getSession } from '@/lib/auth'
import type { StorageContext } from '@/lib/uploads/config'
import { generateWorkspaceFileKey } from '@/lib/uploads/contexts/workspace/workspace-file-manager'
import { isImageFileType, resolveFileType } from '@/lib/uploads/utils/file-utils'
import {
SUPPORTED_AUDIO_EXTENSIONS,
@@ -232,6 +233,53 @@ export async function POST(request: NextRequest) {
}
}
// Handle mothership context (chat-scoped uploads to workspace S3)
if (context === 'mothership') {
if (!workspaceId) {
throw new InvalidRequestError('Mothership context requires workspaceId parameter')
}
logger.info(`Uploading mothership file: ${originalName}`)
const storageKey = generateWorkspaceFileKey(workspaceId, originalName)
const metadata: Record<string, string> = {
originalName: originalName,
uploadedAt: new Date().toISOString(),
purpose: 'mothership',
userId: session.user.id,
workspaceId,
}
const fileInfo = await storageService.uploadFile({
file: buffer,
fileName: storageKey,
contentType: file.type || 'application/octet-stream',
context: 'mothership',
preserveKey: true,
customKey: storageKey,
metadata,
})
const finalPath = usingCloudStorage ? `${fileInfo.path}?context=mothership` : fileInfo.path
uploadResults.push({
fileName: originalName,
presignedUrl: '',
fileInfo: {
path: finalPath,
key: fileInfo.key,
name: originalName,
size: buffer.length,
type: file.type || 'application/octet-stream',
},
directUploadSupported: false,
})
logger.info(`Successfully uploaded mothership file: ${fileInfo.key}`)
continue
}
// Handle copilot, chat, profile-pictures contexts
if (context === 'copilot' || context === 'chat' || context === 'profile-pictures') {
if (context === 'copilot') {

View File

@@ -31,6 +31,8 @@ const FileAttachmentSchema = z.object({
const ResourceAttachmentSchema = z.object({
type: z.enum(['workflow', 'table', 'file', 'knowledgebase']),
id: z.string().min(1),
title: z.string().optional(),
active: z.boolean().optional(),
})
const MothershipMessageSchema = z.object({
@@ -124,9 +126,19 @@ export async function POST(req: NextRequest) {
if (Array.isArray(resourceAttachments) && resourceAttachments.length > 0) {
const results = await Promise.allSettled(
resourceAttachments.map((r) =>
resolveActiveResourceContext(r.type, r.id, workspaceId, authenticatedUserId)
)
resourceAttachments.map(async (r) => {
const ctx = await resolveActiveResourceContext(
r.type,
r.id,
workspaceId,
authenticatedUserId
)
if (!ctx) return null
return {
...ctx,
tag: r.active ? '@active_tab' : '@open_tab',
}
})
)
for (const result of results) {
if (result.status === 'fulfilled' && result.value) {

View File

@@ -212,6 +212,7 @@ export function UserInput({
const files = useFileAttachments({
userId: userId || session?.user?.id,
workspaceId,
disabled: false,
isLoading: isSending,
})

View File

@@ -880,12 +880,15 @@ export function useChat(
try {
const currentActiveId = activeResourceIdRef.current
const currentResources = resourcesRef.current
const activeRes = currentActiveId
? currentResources.find((r) => r.id === currentActiveId)
: undefined
const resourceAttachments = activeRes
? [{ type: activeRes.type, id: activeRes.id }]
: undefined
const resourceAttachments =
currentResources.length > 0
? currentResources.map((r) => ({
type: r.type,
id: r.id,
title: r.title,
active: r.id === currentActiveId,
}))
: undefined
const response = await fetch(MOTHERSHIP_CHAT_API_PATH, {
method: 'POST',

View File

@@ -43,6 +43,7 @@ export interface MessageFileAttachment {
interface UseFileAttachmentsProps {
userId?: string
workspaceId?: string
disabled?: boolean
isLoading?: boolean
}
@@ -55,7 +56,7 @@ interface UseFileAttachmentsProps {
* @returns File attachment state and operations
*/
export function useFileAttachments(props: UseFileAttachmentsProps) {
const { userId, disabled, isLoading } = props
const { userId, workspaceId, disabled, isLoading } = props
const [attachedFiles, setAttachedFiles] = useState<AttachedFile[]>([])
const [isDragging, setIsDragging] = useState(false)
@@ -135,7 +136,10 @@ export function useFileAttachments(props: UseFileAttachmentsProps) {
try {
const formData = new FormData()
formData.append('file', file)
formData.append('context', 'copilot')
formData.append('context', 'mothership')
if (workspaceId) {
formData.append('workspaceId', workspaceId)
}
const uploadResponse = await fetch('/api/files/upload', {
method: 'POST',
@@ -171,7 +175,7 @@ export function useFileAttachments(props: UseFileAttachmentsProps) {
}
}
},
[userId]
[userId, workspaceId]
)
/**

View File

@@ -188,6 +188,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const fileAttachments = useFileAttachments({
userId: session?.user?.id,
workspaceId,
disabled,
isLoading,
})

View File

@@ -1,9 +1,9 @@
import { createLogger } from '@sim/logger'
import { getUserSubscriptionState } from '@/lib/billing/core/subscription'
import { processFileAttachments } from '@/lib/copilot/chat-context'
import { getCopilotToolDescription } from '@/lib/copilot/tool-descriptions'
import { isHosted } from '@/lib/core/config/feature-flags'
import { createMcpToolId } from '@/lib/mcp/utils'
import { trackChatUpload } from '@/lib/uploads/contexts/workspace/workspace-file-manager'
import { getWorkflowById } from '@/lib/workflows/utils'
import { tools } from '@/tools/registry'
import { getLatestVersionTools, stripVersionSuffix } from '@/tools/utils'
@@ -126,7 +126,47 @@ export async function buildCopilotRequestPayload(
const effectiveMode = mode === 'agent' ? 'build' : mode
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
const processedFileContents = await processFileAttachments(fileAttachments ?? [], userId)
// Track uploaded files in the DB and build context tags instead of base64 inlining
const uploadContexts: Array<{ type: string; content: string }> = []
if (chatId && params.workspaceId && fileAttachments && fileAttachments.length > 0) {
for (const f of fileAttachments) {
const filename = (f.filename ?? f.name ?? 'file') as string
const mediaType = (f.media_type ?? f.mimeType ?? 'application/octet-stream') as string
try {
await trackChatUpload(
params.workspaceId,
userId,
chatId,
f.key,
filename,
mediaType,
f.size
)
const lines = [
`File "${filename}" (${mediaType}, ${f.size} bytes) uploaded.`,
`Read with: read("uploads/${filename}")`,
`To save permanently: materialize_file(fileName: "${filename}")`,
]
if (filename.endsWith('.json')) {
lines.push(
`To import as a workflow: materialize_file(fileName: "${filename}", operation: "import")`
)
}
uploadContexts.push({
type: 'uploaded_file',
content: lines.join('\n'),
})
} catch (err) {
logger.warn('Failed to track chat upload', {
filename,
chatId,
error: err instanceof Error ? err.message : String(err),
})
}
}
}
const allContexts = [...(contexts ?? []), ...uploadContexts]
let integrationTools: ToolSchema[] = []
@@ -170,11 +210,10 @@ export async function buildCopilotRequestPayload(
...(provider ? { provider } : {}),
mode: transportMode,
messageId: userMessageId,
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
...(allContexts.length > 0 ? { context: allContexts } : {}),
...(chatId ? { chatId } : {}),
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
...(implicitFeedback ? { implicitFeedback } : {}),
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
...(integrationTools.length > 0 ? { integrationTools } : {}),
...(commands && commands.length > 0 ? { commands } : {}),
...(params.workspaceContext ? { workspaceContext: params.workspaceContext } : {}),

View File

@@ -47,6 +47,7 @@ import {
executeManageJob,
executeUpdateJobHistory,
} from './job-tools'
import { executeMaterializeFile } from './materialize-file'
import type {
CheckDeploymentStatusParams,
CreateFolderParams,
@@ -984,6 +985,7 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
},
}
},
materialize_file: (p, c) => executeMaterializeFile(p, c),
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
manage_mcp_tool: (p, c) => executeManageMcpTool(p, c),
manage_skill: (p, c) => executeManageSkill(p, c),

View File

@@ -0,0 +1,221 @@
import { db } from '@sim/db'
import { workflow, workspaceFiles } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull } from 'drizzle-orm'
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
import { getServePathPrefix } from '@/lib/uploads'
import { downloadWorkspaceFile } from '@/lib/uploads/contexts/workspace/workspace-file-manager'
import { parseWorkflowJson } from '@/lib/workflows/operations/import-export'
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
import { deduplicateWorkflowName } from '@/lib/workflows/utils'
import { extractWorkflowMetadata } from '@/app/api/v1/admin/types'
const logger = createLogger('MaterializeFile')
async function findUploadRecord(fileName: string, chatId: string) {
const rows = await db
.select()
.from(workspaceFiles)
.where(
and(
eq(workspaceFiles.originalName, fileName),
eq(workspaceFiles.chatId, chatId),
eq(workspaceFiles.context, 'mothership'),
isNull(workspaceFiles.deletedAt)
)
)
.limit(1)
return rows[0] ?? null
}
function toFileRecord(row: typeof workspaceFiles.$inferSelect) {
const pathPrefix = getServePathPrefix()
return {
id: row.id,
workspaceId: row.workspaceId || '',
name: row.originalName,
key: row.key,
path: `${pathPrefix}${encodeURIComponent(row.key)}?context=mothership`,
size: row.size,
type: row.contentType,
uploadedBy: row.userId,
deletedAt: row.deletedAt,
uploadedAt: row.uploadedAt,
}
}
async function executeSave(fileName: string, chatId: string): Promise<ToolCallResult> {
const [updated] = await db
.update(workspaceFiles)
.set({ context: 'workspace', chatId: null })
.where(
and(
eq(workspaceFiles.originalName, fileName),
eq(workspaceFiles.chatId, chatId),
eq(workspaceFiles.context, 'mothership'),
isNull(workspaceFiles.deletedAt)
)
)
.returning({ id: workspaceFiles.id, originalName: workspaceFiles.originalName })
if (!updated) {
return {
success: false,
error: `Upload not found: "${fileName}". Use glob("uploads/*") to list available uploads.`,
}
}
logger.info('Materialized file', { fileName, fileId: updated.id, chatId })
return {
success: true,
output: {
message: `File "${fileName}" materialized. It is now available at files/${fileName} and will persist independently of this chat.`,
fileId: updated.id,
path: `files/${fileName}`,
},
resources: [{ type: 'file', id: updated.id, title: fileName }],
}
}
async function executeImport(
fileName: string,
chatId: string,
workspaceId: string,
userId: string
): Promise<ToolCallResult> {
const row = await findUploadRecord(fileName, chatId)
if (!row) {
return {
success: false,
error: `Upload not found: "${fileName}". Use glob("uploads/*") to list available uploads.`,
}
}
const buffer = await downloadWorkspaceFile(toFileRecord(row))
const content = buffer.toString('utf-8')
let parsed: unknown
try {
parsed = JSON.parse(content)
} catch {
return { success: false, error: `"${fileName}" is not valid JSON.` }
}
const { data: workflowData, errors } = parseWorkflowJson(content)
if (!workflowData || errors.length > 0) {
return {
success: false,
error: `Invalid workflow JSON: ${errors.join(', ')}`,
}
}
const {
name: rawName,
color: workflowColor,
description: workflowDescription,
} = extractWorkflowMetadata(parsed)
const workflowId = crypto.randomUUID()
const now = new Date()
const dedupedName = await deduplicateWorkflowName(rawName, workspaceId, null)
await db.insert(workflow).values({
id: workflowId,
userId,
workspaceId,
folderId: null,
name: dedupedName,
description: workflowDescription,
color: workflowColor,
lastSynced: now,
createdAt: now,
updatedAt: now,
isDeployed: false,
runCount: 0,
variables: {},
})
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowData)
if (!saveResult.success) {
await db.delete(workflow).where(eq(workflow.id, workflowId))
return { success: false, error: `Failed to save workflow state: ${saveResult.error}` }
}
if (workflowData.variables && Array.isArray(workflowData.variables)) {
const variablesRecord: Record<
string,
{ id: string; name: string; type: string; value: unknown }
> = {}
for (const v of workflowData.variables) {
const varId = (v as { id?: string }).id || crypto.randomUUID()
const variable = v as { name: string; type?: string; value: unknown }
variablesRecord[varId] = {
id: varId,
name: variable.name,
type: variable.type || 'string',
value: variable.value,
}
}
await db
.update(workflow)
.set({ variables: variablesRecord, updatedAt: new Date() })
.where(eq(workflow.id, workflowId))
}
logger.info('Imported workflow from upload', {
fileName,
workflowId,
workflowName: dedupedName,
chatId,
})
return {
success: true,
output: {
message: `Workflow "${dedupedName}" imported successfully. It is now available in the workspace and can be edited or run.`,
workflowId,
workflowName: dedupedName,
},
resources: [{ type: 'workflow', id: workflowId, title: dedupedName }],
}
}
export async function executeMaterializeFile(
params: Record<string, unknown>,
context: ExecutionContext
): Promise<ToolCallResult> {
const fileName = params.fileName as string | undefined
if (!fileName) {
return { success: false, error: "Missing required parameter 'fileName'" }
}
if (!context.chatId) {
return { success: false, error: 'No chat context available for materialize_file' }
}
if (!context.workspaceId) {
return { success: false, error: 'No workspace context available for materialize_file' }
}
const operation = (params.operation as string | undefined) || 'save'
try {
if (operation === 'import') {
return await executeImport(fileName, context.chatId, context.workspaceId, context.userId)
}
return await executeSave(fileName, context.chatId)
} catch (err) {
logger.error('materialize_file failed', {
fileName,
operation,
chatId: context.chatId,
error: err instanceof Error ? err.message : String(err),
})
return {
success: false,
error: err instanceof Error ? err.message : 'Failed to materialize file',
}
}
}

View File

@@ -0,0 +1,86 @@
import { db } from '@sim/db'
import { workspaceFiles } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull } from 'drizzle-orm'
import { type FileReadResult, readFileRecord } from '@/lib/copilot/vfs/file-reader'
import { getServePathPrefix } from '@/lib/uploads'
import type { WorkspaceFileRecord } from '@/lib/uploads/contexts/workspace/workspace-file-manager'
const logger = createLogger('UploadFileReader')
function toWorkspaceFileRecord(row: typeof workspaceFiles.$inferSelect): WorkspaceFileRecord {
const pathPrefix = getServePathPrefix()
return {
id: row.id,
workspaceId: row.workspaceId || '',
name: row.originalName,
key: row.key,
path: `${pathPrefix}${encodeURIComponent(row.key)}?context=mothership`,
size: row.size,
type: row.contentType,
uploadedBy: row.userId,
deletedAt: row.deletedAt,
uploadedAt: row.uploadedAt,
}
}
/**
* List all chat-scoped uploads for a given chat.
*/
export async function listChatUploads(chatId: string): Promise<WorkspaceFileRecord[]> {
try {
const rows = await db
.select()
.from(workspaceFiles)
.where(
and(
eq(workspaceFiles.chatId, chatId),
eq(workspaceFiles.context, 'mothership'),
isNull(workspaceFiles.deletedAt)
)
)
return rows.map(toWorkspaceFileRecord)
} catch (err) {
logger.warn('Failed to list chat uploads', {
chatId,
error: err instanceof Error ? err.message : String(err),
})
return []
}
}
/**
* Read a specific uploaded file by name within a chat session.
*/
export async function readChatUpload(
filename: string,
chatId: string
): Promise<FileReadResult | null> {
try {
const rows = await db
.select()
.from(workspaceFiles)
.where(
and(
eq(workspaceFiles.chatId, chatId),
eq(workspaceFiles.context, 'mothership'),
eq(workspaceFiles.originalName, filename),
isNull(workspaceFiles.deletedAt)
)
)
.limit(1)
if (rows.length === 0) return null
const record = toWorkspaceFileRecord(rows[0])
return readFileRecord(record)
} catch (err) {
logger.warn('Failed to read chat upload', {
filename,
chatId,
error: err instanceof Error ? err.message : String(err),
})
return null
}
}

View File

@@ -1,35 +1,10 @@
import { createLogger } from '@sim/logger'
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/orchestrator/types'
import type { MothershipResource } from '@/lib/copilot/resource-types'
import { VFS_DIR_TO_RESOURCE } from '@/lib/copilot/resource-types'
import type { WorkspaceVFS } from '@/lib/copilot/vfs'
import { getOrMaterializeVFS } from '@/lib/copilot/vfs'
import { listChatUploads, readChatUpload } from './upload-file-reader'
const logger = createLogger('VfsTools')
/**
* Resolves a VFS resource path to its resource descriptor by reading the
* sibling meta.json (already in memory) for the resource ID and name.
*/
function resolveVfsResource(vfs: WorkspaceVFS, path: string): MothershipResource | null {
const segments = path.split('/')
const resourceType = VFS_DIR_TO_RESOURCE[segments[0]]
if (!resourceType || !segments[1]) return null
const metaPath = `${segments[0]}/${segments[1]}/meta.json`
const meta = vfs.read(metaPath)
if (!meta) return null
try {
const parsed = JSON.parse(meta.content)
const id = parsed?.id as string | undefined
if (!id) return null
return { type: resourceType, id, title: (parsed.name as string) || segments[1] }
} catch {
return null
}
}
export async function executeVfsGrep(
params: Record<string, unknown>,
context: ExecutionContext
@@ -89,7 +64,14 @@ export async function executeVfsGlob(
try {
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
const files = vfs.glob(pattern)
let files = vfs.glob(pattern)
if (context.chatId && (pattern === 'uploads/*' || pattern.startsWith('uploads/'))) {
const uploads = await listChatUploads(context.chatId)
const uploadPaths = uploads.map((f) => `uploads/${f.name}`)
files = [...files, ...uploadPaths]
}
logger.debug('vfs_glob result', { pattern, fileCount: files.length })
return { success: true, output: { files } }
} catch (err) {
@@ -116,6 +98,23 @@ export async function executeVfsRead(
}
try {
// Handle chat-scoped uploads via the uploads/ virtual prefix
if (path.startsWith('uploads/')) {
if (!context.chatId) {
return { success: false, error: 'No chat context available for uploads/' }
}
const filename = path.slice('uploads/'.length)
const uploadResult = await readChatUpload(filename, context.chatId)
if (uploadResult) {
logger.debug('vfs_read resolved chat upload', { path, totalLines: uploadResult.totalLines })
return { success: true, output: uploadResult }
}
return {
success: false,
error: `Upload not found: ${path}. Use glob("uploads/*") to list available uploads.`,
}
}
const vfs = await getOrMaterializeVFS(workspaceId, context.userId)
const result = vfs.read(
path,
@@ -129,12 +128,9 @@ export async function executeVfsRead(
path,
totalLines: fileContent.totalLines,
})
// Appends metadata of resource to tool response
const resource = resolveVfsResource(vfs, path)
return {
success: true,
output: fileContent,
...(resource && { resources: [resource] }),
}
}
@@ -147,12 +143,9 @@ export async function executeVfsRead(
return { success: false, error: `File not found: ${path}.${hint}` }
}
logger.debug('vfs_read result', { path, totalLines: result.totalLines })
// Appends metadata of resource to tool response
const resource = resolveVfsResource(vfs, path)
return {
success: true,
output: result,
...(resource && { resources: [resource] }),
}
} catch (err) {
logger.error('vfs_read failed', {

View File

@@ -153,6 +153,7 @@ function getS3Config(context: StorageContext): StorageConfig {
bucket: S3_EXECUTION_FILES_CONFIG.bucket,
region: S3_EXECUTION_FILES_CONFIG.region,
}
case 'mothership':
case 'workspace':
return {
bucket: S3_CONFIG.bucket,
@@ -209,6 +210,7 @@ function getBlobConfig(context: StorageContext): StorageConfig {
connectionString: BLOB_EXECUTION_FILES_CONFIG.connectionString,
containerName: BLOB_EXECUTION_FILES_CONFIG.containerName,
}
case 'mothership':
case 'workspace':
return {
accountName: BLOB_CONFIG.accountName,

View File

@@ -207,6 +207,37 @@ export async function uploadWorkspaceFile(
}
}
/**
* Track a file that was already uploaded to workspace S3 as a chat-scoped upload.
* Creates a workspaceFiles record with context='mothership' and the given chatId.
* No S3 operations -- the file is already in storage from the presigned/upload step.
*/
export async function trackChatUpload(
workspaceId: string,
userId: string,
chatId: string,
s3Key: string,
fileName: string,
contentType: string,
size: number
): Promise<void> {
const fileId = `wf_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`
await db.insert(workspaceFiles).values({
id: fileId,
key: s3Key,
userId,
workspaceId,
context: 'mothership',
chatId,
originalName: fileName,
contentType,
size,
})
logger.info(`Tracked chat upload: ${fileName} for chat ${chatId}`)
}
/**
* Check if a file with the same name already exists in workspace
*/

View File

@@ -2,6 +2,7 @@ export type StorageContext =
| 'knowledge-base'
| 'chat'
| 'copilot'
| 'mothership'
| 'execution'
| 'workspace'
| 'profile-pictures'

View File

@@ -0,0 +1,3 @@
ALTER TABLE "workspace_files" ADD COLUMN "chat_id" uuid;--> statement-breakpoint
ALTER TABLE "workspace_files" ADD CONSTRAINT "workspace_files_chat_id_copilot_chats_id_fk" FOREIGN KEY ("chat_id") REFERENCES "public"."copilot_chats"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "workspace_files_chat_id_idx" ON "workspace_files" USING btree ("chat_id");

File diff suppressed because it is too large Load Diff

View File

@@ -1219,6 +1219,13 @@
"when": 1773529490946,
"tag": "0174_whole_lyja",
"breakpoints": true
},
{
"idx": 175,
"version": "7",
"when": 1773532070072,
"tag": "0175_cheerful_shockwave",
"breakpoints": true
}
]
}

View File

@@ -1041,7 +1041,8 @@ export const workspaceFiles = pgTable(
.notNull()
.references(() => user.id, { onDelete: 'cascade' }),
workspaceId: text('workspace_id').references(() => workspace.id, { onDelete: 'cascade' }),
context: text('context').notNull(), // 'workspace', 'copilot', 'chat', 'knowledge-base', 'profile-pictures', 'general', 'execution'
context: text('context').notNull(), // 'workspace', 'mothership', 'copilot', 'chat', 'knowledge-base', 'profile-pictures', 'general', 'execution'
chatId: uuid('chat_id').references(() => copilotChats.id, { onDelete: 'cascade' }),
originalName: text('original_name').notNull(),
contentType: text('content_type').notNull(),
size: integer('size').notNull(),
@@ -1056,6 +1057,7 @@ export const workspaceFiles = pgTable(
userIdIdx: index('workspace_files_user_id_idx').on(table.userId),
workspaceIdIdx: index('workspace_files_workspace_id_idx').on(table.workspaceId),
contextIdx: index('workspace_files_context_idx').on(table.context),
chatIdIdx: index('workspace_files_chat_id_idx').on(table.chatId),
deletedAtIdx: index('workspace_files_deleted_at_idx').on(table.deletedAt),
})
)