mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 15:07:55 -05:00
feat(files): added file manager table, enforce permissions for viewing files (#1766)
* feat(files): added file manager table, enforce permissions for viewing files * rename types * cleanup * cleanup * confirm local file system works with all contexts * clean * remove isAsync * ignore expiresAt * add relative imports instead of absolute ones * absl imports * remove redundant comments
This commit is contained in:
@@ -911,49 +911,44 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
|||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@/lib/uploads/core/setup', () => ({
|
vi.doMock('@/lib/uploads/config', () => ({
|
||||||
USE_S3_STORAGE: provider === 's3',
|
USE_S3_STORAGE: provider === 's3',
|
||||||
USE_BLOB_STORAGE: provider === 'blob',
|
USE_BLOB_STORAGE: provider === 'blob',
|
||||||
USE_LOCAL_STORAGE: provider === 'local',
|
USE_LOCAL_STORAGE: provider === 'local',
|
||||||
getStorageProvider: vi.fn().mockReturnValue(provider),
|
getStorageProvider: vi.fn().mockReturnValue(provider),
|
||||||
|
S3_CONFIG: {
|
||||||
|
bucket: 'test-s3-bucket',
|
||||||
|
region: 'us-east-1',
|
||||||
|
},
|
||||||
|
S3_KB_CONFIG: {
|
||||||
|
bucket: 'test-s3-kb-bucket',
|
||||||
|
region: 'us-east-1',
|
||||||
|
},
|
||||||
|
S3_CHAT_CONFIG: {
|
||||||
|
bucket: 'test-s3-chat-bucket',
|
||||||
|
region: 'us-east-1',
|
||||||
|
},
|
||||||
|
BLOB_CONFIG: {
|
||||||
|
accountName: 'testaccount',
|
||||||
|
accountKey: 'testkey',
|
||||||
|
containerName: 'test-container',
|
||||||
|
},
|
||||||
|
BLOB_KB_CONFIG: {
|
||||||
|
accountName: 'testaccount',
|
||||||
|
accountKey: 'testkey',
|
||||||
|
containerName: 'test-kb-container',
|
||||||
|
},
|
||||||
|
BLOB_CHAT_CONFIG: {
|
||||||
|
accountName: 'testaccount',
|
||||||
|
accountKey: 'testkey',
|
||||||
|
containerName: 'test-chat-container',
|
||||||
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
if (provider === 's3') {
|
if (provider === 's3') {
|
||||||
vi.doMock('@/lib/uploads/s3/s3-client', () => ({
|
vi.doMock('@/lib/uploads/providers/s3/client', () => ({
|
||||||
getS3Client: vi.fn().mockReturnValue({}),
|
getS3Client: vi.fn().mockReturnValue({}),
|
||||||
sanitizeFilenameForMetadata: vi.fn((filename) => filename),
|
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@/lib/uploads/setup', () => ({
|
|
||||||
S3_CONFIG: {
|
|
||||||
bucket: 'test-s3-bucket',
|
|
||||||
region: 'us-east-1',
|
|
||||||
},
|
|
||||||
S3_KB_CONFIG: {
|
|
||||||
bucket: 'test-s3-kb-bucket',
|
|
||||||
region: 'us-east-1',
|
|
||||||
},
|
|
||||||
S3_CHAT_CONFIG: {
|
|
||||||
bucket: 'test-s3-chat-bucket',
|
|
||||||
region: 'us-east-1',
|
|
||||||
},
|
|
||||||
BLOB_CONFIG: {
|
|
||||||
accountName: 'testaccount',
|
|
||||||
accountKey: 'testkey',
|
|
||||||
containerName: 'test-container',
|
|
||||||
},
|
|
||||||
BLOB_KB_CONFIG: {
|
|
||||||
accountName: 'testaccount',
|
|
||||||
accountKey: 'testkey',
|
|
||||||
containerName: 'test-kb-container',
|
|
||||||
},
|
|
||||||
BLOB_CHAT_CONFIG: {
|
|
||||||
accountName: 'testaccount',
|
|
||||||
accountKey: 'testkey',
|
|
||||||
containerName: 'test-chat-container',
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.doMock('@aws-sdk/client-s3', () => ({
|
vi.doMock('@aws-sdk/client-s3', () => ({
|
||||||
PutObjectCommand: vi.fn(),
|
PutObjectCommand: vi.fn(),
|
||||||
}))
|
}))
|
||||||
@@ -983,29 +978,9 @@ export function createStorageProviderMocks(options: StorageProviderMockOptions =
|
|||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
|
||||||
vi.doMock('@/lib/uploads/blob/blob-client', () => ({
|
vi.doMock('@/lib/uploads/providers/blob/client', () => ({
|
||||||
getBlobServiceClient: vi.fn().mockReturnValue(mockBlobServiceClient),
|
getBlobServiceClient: vi.fn().mockReturnValue(mockBlobServiceClient),
|
||||||
sanitizeFilenameForMetadata: vi.fn((filename) => filename),
|
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@/lib/uploads/setup', () => ({
|
|
||||||
BLOB_CONFIG: {
|
|
||||||
accountName: 'testaccount',
|
|
||||||
accountKey: 'testkey',
|
|
||||||
containerName: 'test-container',
|
|
||||||
},
|
|
||||||
BLOB_KB_CONFIG: {
|
|
||||||
accountName: 'testaccount',
|
|
||||||
accountKey: 'testkey',
|
|
||||||
containerName: 'test-kb-container',
|
|
||||||
},
|
|
||||||
BLOB_CHAT_CONFIG: {
|
|
||||||
accountName: 'testaccount',
|
|
||||||
accountKey: 'testkey',
|
|
||||||
containerName: 'test-chat-container',
|
|
||||||
},
|
|
||||||
}))
|
|
||||||
|
|
||||||
vi.doMock('@azure/storage-blob', () => ({
|
vi.doMock('@azure/storage-blob', () => ({
|
||||||
BlobSASPermissions: {
|
BlobSASPermissions: {
|
||||||
parse: vi.fn(() => 'w'),
|
parse: vi.fn(() => 'w'),
|
||||||
@@ -1355,6 +1330,25 @@ export function setupFileApiMocks(
|
|||||||
authMocks.setUnauthenticated()
|
authMocks.setUnauthenticated()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||||
|
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||||
|
success: authenticated,
|
||||||
|
userId: authenticated ? 'test-user-id' : undefined,
|
||||||
|
error: authenticated ? undefined : 'Unauthorized',
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/app/api/files/authorization', () => ({
|
||||||
|
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||||
|
verifyWorkspaceFileAccess: vi.fn().mockResolvedValue(true),
|
||||||
|
verifyKBFileAccess: vi.fn().mockResolvedValue(true),
|
||||||
|
verifyCopilotFileAccess: vi.fn().mockResolvedValue(true),
|
||||||
|
lookupWorkspaceFileByKey: vi.fn().mockResolvedValue({
|
||||||
|
workspaceId: 'test-workspace-id',
|
||||||
|
uploadedBy: 'test-user-id',
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
mockFileSystem({
|
mockFileSystem({
|
||||||
writeFileSuccess: true,
|
writeFileSuccess: true,
|
||||||
readFileContent: 'test content',
|
readFileContent: 'test content',
|
||||||
@@ -1510,11 +1504,10 @@ export function mockUploadUtils(
|
|||||||
isUsingCloudStorage: vi.fn().mockReturnValue(isCloudStorage),
|
isUsingCloudStorage: vi.fn().mockReturnValue(isCloudStorage),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@/lib/uploads/setup', () => ({
|
vi.doMock('@/lib/uploads/config', () => ({
|
||||||
UPLOAD_DIR: '/test/uploads',
|
UPLOAD_DIR: '/test/uploads',
|
||||||
USE_S3_STORAGE: isCloudStorage,
|
USE_S3_STORAGE: isCloudStorage,
|
||||||
USE_BLOB_STORAGE: false,
|
USE_BLOB_STORAGE: false,
|
||||||
ensureUploadsDirectory: vi.fn().mockResolvedValue(true),
|
|
||||||
S3_CONFIG: {
|
S3_CONFIG: {
|
||||||
bucket: 'test-bucket',
|
bucket: 'test-bucket',
|
||||||
region: 'test-region',
|
region: 'test-region',
|
||||||
|
|||||||
@@ -18,7 +18,6 @@ const logger = createLogger('ChatIdentifierAPI')
|
|||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
export const runtime = 'nodejs'
|
export const runtime = 'nodejs'
|
||||||
|
|
||||||
// This endpoint handles chat interactions via the identifier
|
|
||||||
export async function POST(
|
export async function POST(
|
||||||
request: NextRequest,
|
request: NextRequest,
|
||||||
{ params }: { params: Promise<{ identifier: string }> }
|
{ params }: { params: Promise<{ identifier: string }> }
|
||||||
@@ -29,7 +28,6 @@ export async function POST(
|
|||||||
try {
|
try {
|
||||||
logger.debug(`[${requestId}] Processing chat request for identifier: ${identifier}`)
|
logger.debug(`[${requestId}] Processing chat request for identifier: ${identifier}`)
|
||||||
|
|
||||||
// Parse the request body once
|
|
||||||
let parsedBody
|
let parsedBody
|
||||||
try {
|
try {
|
||||||
parsedBody = await request.json()
|
parsedBody = await request.json()
|
||||||
@@ -37,7 +35,6 @@ export async function POST(
|
|||||||
return addCorsHeaders(createErrorResponse('Invalid request body', 400), request)
|
return addCorsHeaders(createErrorResponse('Invalid request body', 400), request)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the chat deployment for this identifier
|
|
||||||
const deploymentResult = await db
|
const deploymentResult = await db
|
||||||
.select({
|
.select({
|
||||||
id: chat.id,
|
id: chat.id,
|
||||||
@@ -60,13 +57,11 @@ export async function POST(
|
|||||||
|
|
||||||
const deployment = deploymentResult[0]
|
const deployment = deploymentResult[0]
|
||||||
|
|
||||||
// Check if the chat is active
|
|
||||||
if (!deployment.isActive) {
|
if (!deployment.isActive) {
|
||||||
logger.warn(`[${requestId}] Chat is not active: ${identifier}`)
|
logger.warn(`[${requestId}] Chat is not active: ${identifier}`)
|
||||||
return addCorsHeaders(createErrorResponse('This chat is currently unavailable', 403), request)
|
return addCorsHeaders(createErrorResponse('This chat is currently unavailable', 403), request)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate authentication with the parsed body
|
|
||||||
const authResult = await validateChatAuth(requestId, deployment, request, parsedBody)
|
const authResult = await validateChatAuth(requestId, deployment, request, parsedBody)
|
||||||
if (!authResult.authorized) {
|
if (!authResult.authorized) {
|
||||||
return addCorsHeaders(
|
return addCorsHeaders(
|
||||||
@@ -75,26 +70,20 @@ export async function POST(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use the already parsed body
|
|
||||||
const { input, password, email, conversationId, files } = parsedBody
|
const { input, password, email, conversationId, files } = parsedBody
|
||||||
|
|
||||||
// If this is an authentication request (has password or email but no input),
|
|
||||||
// set auth cookie and return success
|
|
||||||
if ((password || email) && !input) {
|
if ((password || email) && !input) {
|
||||||
const response = addCorsHeaders(createSuccessResponse({ authenticated: true }), request)
|
const response = addCorsHeaders(createSuccessResponse({ authenticated: true }), request)
|
||||||
|
|
||||||
// Set authentication cookie
|
|
||||||
setChatAuthCookie(response, deployment.id, deployment.authType)
|
setChatAuthCookie(response, deployment.id, deployment.authType)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
// For chat messages, create regular response (allow empty input if files are present)
|
|
||||||
if (!input && (!files || files.length === 0)) {
|
if (!input && (!files || files.length === 0)) {
|
||||||
return addCorsHeaders(createErrorResponse('No input provided', 400), request)
|
return addCorsHeaders(createErrorResponse('No input provided', 400), request)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the workflow and workspace owner for this chat
|
|
||||||
const workflowResult = await db
|
const workflowResult = await db
|
||||||
.select({
|
.select({
|
||||||
isDeployed: workflow.isDeployed,
|
isDeployed: workflow.isDeployed,
|
||||||
@@ -141,20 +130,22 @@ export async function POST(
|
|||||||
const { SSE_HEADERS } = await import('@/lib/utils')
|
const { SSE_HEADERS } = await import('@/lib/utils')
|
||||||
const { createFilteredResult } = await import('@/app/api/workflows/[id]/execute/route')
|
const { createFilteredResult } = await import('@/app/api/workflows/[id]/execute/route')
|
||||||
|
|
||||||
// Generate executionId early so it can be used for file uploads and workflow execution
|
|
||||||
const executionId = crypto.randomUUID()
|
const executionId = crypto.randomUUID()
|
||||||
|
|
||||||
const workflowInput: any = { input, conversationId }
|
const workflowInput: any = { input, conversationId }
|
||||||
if (files && Array.isArray(files) && files.length > 0) {
|
if (files && Array.isArray(files) && files.length > 0) {
|
||||||
logger.debug(`[${requestId}] Processing ${files.length} attached files`)
|
|
||||||
|
|
||||||
const executionContext = {
|
const executionContext = {
|
||||||
workspaceId: deployment.userId,
|
workspaceId: workflowResult[0].workspaceId || '',
|
||||||
workflowId: deployment.workflowId,
|
workflowId: deployment.workflowId,
|
||||||
executionId,
|
executionId,
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadedFiles = await ChatFiles.processChatFiles(files, executionContext, requestId)
|
const uploadedFiles = await ChatFiles.processChatFiles(
|
||||||
|
files,
|
||||||
|
executionContext,
|
||||||
|
requestId,
|
||||||
|
deployment.userId
|
||||||
|
)
|
||||||
|
|
||||||
if (uploadedFiles.length > 0) {
|
if (uploadedFiles.length > 0) {
|
||||||
workflowInput.files = uploadedFiles
|
workflowInput.files = uploadedFiles
|
||||||
@@ -205,7 +196,6 @@ export async function POST(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// This endpoint returns information about the chat
|
|
||||||
export async function GET(
|
export async function GET(
|
||||||
request: NextRequest,
|
request: NextRequest,
|
||||||
{ params }: { params: Promise<{ identifier: string }> }
|
{ params }: { params: Promise<{ identifier: string }> }
|
||||||
@@ -216,7 +206,6 @@ export async function GET(
|
|||||||
try {
|
try {
|
||||||
logger.debug(`[${requestId}] Fetching chat info for identifier: ${identifier}`)
|
logger.debug(`[${requestId}] Fetching chat info for identifier: ${identifier}`)
|
||||||
|
|
||||||
// Find the chat deployment for this identifier
|
|
||||||
const deploymentResult = await db
|
const deploymentResult = await db
|
||||||
.select({
|
.select({
|
||||||
id: chat.id,
|
id: chat.id,
|
||||||
@@ -241,13 +230,11 @@ export async function GET(
|
|||||||
|
|
||||||
const deployment = deploymentResult[0]
|
const deployment = deploymentResult[0]
|
||||||
|
|
||||||
// Check if the chat is active
|
|
||||||
if (!deployment.isActive) {
|
if (!deployment.isActive) {
|
||||||
logger.warn(`[${requestId}] Chat is not active: ${identifier}`)
|
logger.warn(`[${requestId}] Chat is not active: ${identifier}`)
|
||||||
return addCorsHeaders(createErrorResponse('This chat is currently unavailable', 403), request)
|
return addCorsHeaders(createErrorResponse('This chat is currently unavailable', 403), request)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for auth cookie first
|
|
||||||
const cookieName = `chat_auth_${deployment.id}`
|
const cookieName = `chat_auth_${deployment.id}`
|
||||||
const authCookie = request.cookies.get(cookieName)
|
const authCookie = request.cookies.get(cookieName)
|
||||||
|
|
||||||
@@ -256,7 +243,6 @@ export async function GET(
|
|||||||
authCookie &&
|
authCookie &&
|
||||||
validateAuthToken(authCookie.value, deployment.id)
|
validateAuthToken(authCookie.value, deployment.id)
|
||||||
) {
|
) {
|
||||||
// Cookie valid, return chat info
|
|
||||||
return addCorsHeaders(
|
return addCorsHeaders(
|
||||||
createSuccessResponse({
|
createSuccessResponse({
|
||||||
id: deployment.id,
|
id: deployment.id,
|
||||||
@@ -270,7 +256,6 @@ export async function GET(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no valid cookie, proceed with standard auth check
|
|
||||||
const authResult = await validateChatAuth(requestId, deployment, request)
|
const authResult = await validateChatAuth(requestId, deployment, request)
|
||||||
if (!authResult.authorized) {
|
if (!authResult.authorized) {
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -282,7 +267,6 @@ export async function GET(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return public information about the chat including auth type
|
|
||||||
return addCorsHeaders(
|
return addCorsHeaders(
|
||||||
createSuccessResponse({
|
createSuccessResponse({
|
||||||
id: deployment.id,
|
id: deployment.id,
|
||||||
|
|||||||
647
apps/sim/app/api/files/authorization.ts
Normal file
647
apps/sim/app/api/files/authorization.ts
Normal file
@@ -0,0 +1,647 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { document, workspaceFile } from '@sim/db/schema'
|
||||||
|
import { eq, like, or } from 'drizzle-orm'
|
||||||
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||||
|
import { getFileMetadata } from '@/lib/uploads'
|
||||||
|
import type { StorageContext } from '@/lib/uploads/config'
|
||||||
|
import {
|
||||||
|
BLOB_CHAT_CONFIG,
|
||||||
|
BLOB_KB_CONFIG,
|
||||||
|
S3_CHAT_CONFIG,
|
||||||
|
S3_KB_CONFIG,
|
||||||
|
} from '@/lib/uploads/config'
|
||||||
|
import type { StorageConfig } from '@/lib/uploads/core/storage-client'
|
||||||
|
import { getFileMetadataByKey } from '@/lib/uploads/server/metadata'
|
||||||
|
import { inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
const logger = createLogger('FileAuthorization')
|
||||||
|
|
||||||
|
export interface AuthorizationResult {
|
||||||
|
granted: boolean
|
||||||
|
reason: string
|
||||||
|
workspaceId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lookup workspace file by storage key from database
|
||||||
|
* @param key Storage key to lookup
|
||||||
|
* @returns Workspace file info or null if not found
|
||||||
|
*/
|
||||||
|
export async function lookupWorkspaceFileByKey(
|
||||||
|
key: string
|
||||||
|
): Promise<{ workspaceId: string; uploadedBy: string } | null> {
|
||||||
|
try {
|
||||||
|
// Priority 1: Check new workspaceFiles table
|
||||||
|
const fileRecord = await getFileMetadataByKey(key, 'workspace')
|
||||||
|
|
||||||
|
if (fileRecord) {
|
||||||
|
return {
|
||||||
|
workspaceId: fileRecord.workspaceId || '',
|
||||||
|
uploadedBy: fileRecord.userId,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 2: Check legacy workspace_file table (for backward compatibility during migration)
|
||||||
|
try {
|
||||||
|
const [legacyFile] = await db
|
||||||
|
.select({
|
||||||
|
workspaceId: workspaceFile.workspaceId,
|
||||||
|
uploadedBy: workspaceFile.uploadedBy,
|
||||||
|
})
|
||||||
|
.from(workspaceFile)
|
||||||
|
.where(eq(workspaceFile.key, key))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (legacyFile) {
|
||||||
|
return {
|
||||||
|
workspaceId: legacyFile.workspaceId,
|
||||||
|
uploadedBy: legacyFile.uploadedBy,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (legacyError) {
|
||||||
|
// Ignore errors when checking legacy table (it may not exist after migration)
|
||||||
|
logger.debug('Legacy workspace_file table check failed (may not exist):', legacyError)
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error looking up workspace file by key:', { key, error })
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract workspace ID from workspace file key pattern
|
||||||
|
* Pattern: {workspaceId}/{timestamp}-{random}-{filename}
|
||||||
|
*/
|
||||||
|
function extractWorkspaceIdFromKey(key: string): string | null {
|
||||||
|
// Use inferContextFromKey to check if it's a workspace file
|
||||||
|
const inferredContext = inferContextFromKey(key)
|
||||||
|
if (inferredContext !== 'workspace') {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const parts = key.split('/')
|
||||||
|
const workspaceId = parts[0]
|
||||||
|
if (workspaceId && /^[a-f0-9-]{36}$/.test(workspaceId)) {
|
||||||
|
return workspaceId
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify file access based on file path patterns and metadata
|
||||||
|
* @param cloudKey The file key/path (e.g., "workspace_id/workflow_id/execution_id/filename" or "kb/filename")
|
||||||
|
* @param userId The authenticated user ID
|
||||||
|
* @param bucketType Optional bucket type (e.g., 'copilot', 'execution-files')
|
||||||
|
* @param customConfig Optional custom storage configuration
|
||||||
|
* @param context Optional explicit storage context
|
||||||
|
* @param isLocal Optional flag indicating if this is local storage
|
||||||
|
* @returns Promise<boolean> True if user has access, false otherwise
|
||||||
|
*/
|
||||||
|
export async function verifyFileAccess(
|
||||||
|
cloudKey: string,
|
||||||
|
userId: string,
|
||||||
|
bucketType?: string | null,
|
||||||
|
customConfig?: StorageConfig,
|
||||||
|
context?: StorageContext,
|
||||||
|
isLocal?: boolean
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
// Infer context from key if not explicitly provided
|
||||||
|
const inferredContext = context || inferContextFromKey(cloudKey)
|
||||||
|
|
||||||
|
// 1. Workspace files: Check database first (most reliable for both local and cloud)
|
||||||
|
if (inferredContext === 'workspace') {
|
||||||
|
return await verifyWorkspaceFileAccess(cloudKey, userId, customConfig, isLocal)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Execution files: workspace_id/workflow_id/execution_id/filename
|
||||||
|
if (inferredContext === 'execution' || isExecutionFile(cloudKey, bucketType)) {
|
||||||
|
return await verifyExecutionFileAccess(cloudKey, userId, customConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Copilot files: Check database first, then metadata, then path pattern (legacy)
|
||||||
|
if (inferredContext === 'copilot' || bucketType === 'copilot') {
|
||||||
|
return await verifyCopilotFileAccess(cloudKey, userId, customConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. KB files: kb/filename
|
||||||
|
if (inferredContext === 'knowledge-base') {
|
||||||
|
return await verifyKBFileAccess(cloudKey, userId, customConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 5. Chat files: chat/filename
|
||||||
|
if (inferredContext === 'chat') {
|
||||||
|
return await verifyChatFileAccess(cloudKey, userId, customConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6. Regular uploads: UUID-filename or timestamp-filename
|
||||||
|
// Check metadata for userId/workspaceId, or database for workspace files
|
||||||
|
return await verifyRegularFileAccess(cloudKey, userId, customConfig, isLocal)
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error verifying file access:', { cloudKey, userId, error })
|
||||||
|
// Deny access on error to be safe
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify access to workspace files
|
||||||
|
* Priority: Database lookup > Metadata > Deny
|
||||||
|
*/
|
||||||
|
async function verifyWorkspaceFileAccess(
|
||||||
|
cloudKey: string,
|
||||||
|
userId: string,
|
||||||
|
customConfig?: StorageConfig,
|
||||||
|
isLocal?: boolean
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
// Priority 1: Check database (most reliable, works for both local and cloud)
|
||||||
|
const workspaceFileRecord = await lookupWorkspaceFileByKey(cloudKey)
|
||||||
|
if (workspaceFileRecord) {
|
||||||
|
const permission = await getUserEntityPermissions(
|
||||||
|
userId,
|
||||||
|
'workspace',
|
||||||
|
workspaceFileRecord.workspaceId
|
||||||
|
)
|
||||||
|
if (permission !== null) {
|
||||||
|
logger.debug('Workspace file access granted (database lookup)', {
|
||||||
|
userId,
|
||||||
|
workspaceId: workspaceFileRecord.workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
logger.warn('User does not have workspace access for file', {
|
||||||
|
userId,
|
||||||
|
workspaceId: workspaceFileRecord.workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 2: Check metadata (works for both local and cloud files)
|
||||||
|
const config: StorageConfig = customConfig || {}
|
||||||
|
const metadata = await getFileMetadata(cloudKey, config)
|
||||||
|
const workspaceId = metadata.workspaceId
|
||||||
|
|
||||||
|
if (workspaceId) {
|
||||||
|
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||||
|
if (permission !== null) {
|
||||||
|
logger.debug('Workspace file access granted (metadata)', {
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
logger.warn('User does not have workspace access for file (metadata)', {
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.warn('Workspace file missing authorization metadata', { cloudKey, userId })
|
||||||
|
return false
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error verifying workspace file access', { cloudKey, userId, error })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if file is an execution file based on path pattern
|
||||||
|
* Execution files have format: workspace_id/workflow_id/execution_id/filename
|
||||||
|
*/
|
||||||
|
function isExecutionFile(cloudKey: string, bucketType?: string | null): boolean {
|
||||||
|
if (bucketType === 'execution-files' || bucketType === 'execution') {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return inferContextFromKey(cloudKey) === 'execution'
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify access to execution files
|
||||||
|
* Execution files: workspace_id/workflow_id/execution_id/filename
|
||||||
|
*/
|
||||||
|
async function verifyExecutionFileAccess(
|
||||||
|
cloudKey: string,
|
||||||
|
userId: string,
|
||||||
|
customConfig?: StorageConfig
|
||||||
|
): Promise<boolean> {
|
||||||
|
const parts = cloudKey.split('/')
|
||||||
|
if (parts.length < 3) {
|
||||||
|
logger.warn('Invalid execution file path format', { cloudKey })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const workspaceId = parts[0]
|
||||||
|
if (!workspaceId) {
|
||||||
|
logger.warn('Could not extract workspaceId from execution file path', { cloudKey })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||||
|
if (permission === null) {
|
||||||
|
logger.warn('User does not have workspace access for execution file', {
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Execution file access granted', { userId, workspaceId, cloudKey })
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify access to copilot files
|
||||||
|
* Priority: Database lookup > Metadata > Path pattern (legacy)
|
||||||
|
*/
|
||||||
|
async function verifyCopilotFileAccess(
|
||||||
|
cloudKey: string,
|
||||||
|
userId: string,
|
||||||
|
customConfig?: StorageConfig
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
// Priority 1: Check workspaceFiles table (new system)
|
||||||
|
const fileRecord = await getFileMetadataByKey(cloudKey, 'copilot')
|
||||||
|
|
||||||
|
if (fileRecord) {
|
||||||
|
if (fileRecord.userId === userId) {
|
||||||
|
logger.debug('Copilot file access granted (workspaceFiles table)', {
|
||||||
|
userId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
logger.warn('User does not own copilot file', {
|
||||||
|
userId,
|
||||||
|
fileUserId: fileRecord.userId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 2: Check metadata (for files not yet in database)
|
||||||
|
const config: StorageConfig = customConfig || {}
|
||||||
|
const metadata = await getFileMetadata(cloudKey, config)
|
||||||
|
const fileUserId = metadata.userId
|
||||||
|
|
||||||
|
if (fileUserId) {
|
||||||
|
if (fileUserId === userId) {
|
||||||
|
logger.debug('Copilot file access granted (metadata)', { userId, cloudKey })
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
logger.warn('User does not own copilot file (metadata)', {
|
||||||
|
userId,
|
||||||
|
fileUserId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 3: Legacy path pattern check (userId/filename format)
|
||||||
|
// This handles old copilot files that may have been stored with userId prefix
|
||||||
|
const parts = cloudKey.split('/')
|
||||||
|
if (parts.length >= 2) {
|
||||||
|
const fileUserId = parts[0]
|
||||||
|
if (fileUserId && fileUserId === userId) {
|
||||||
|
logger.debug('Copilot file access granted (path pattern)', { userId, cloudKey })
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
logger.warn('User does not own copilot file (path pattern)', {
|
||||||
|
userId,
|
||||||
|
fileUserId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.warn('Copilot file missing authorization metadata', { cloudKey, userId })
|
||||||
|
return false
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error verifying copilot file access', { cloudKey, userId, error })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify access to KB files
|
||||||
|
* KB files: kb/filename
|
||||||
|
*/
|
||||||
|
async function verifyKBFileAccess(
|
||||||
|
cloudKey: string,
|
||||||
|
userId: string,
|
||||||
|
customConfig?: StorageConfig
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
// Priority 1: Check workspaceFiles table (new system)
|
||||||
|
const fileRecord = await getFileMetadataByKey(cloudKey, 'knowledge-base')
|
||||||
|
|
||||||
|
if (fileRecord?.workspaceId) {
|
||||||
|
const permission = await getUserEntityPermissions(userId, 'workspace', fileRecord.workspaceId)
|
||||||
|
if (permission !== null) {
|
||||||
|
logger.debug('KB file access granted (workspaceFiles table)', {
|
||||||
|
userId,
|
||||||
|
workspaceId: fileRecord.workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
logger.warn('User does not have workspace access for KB file', {
|
||||||
|
userId,
|
||||||
|
workspaceId: fileRecord.workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 2: Check document table via fileUrl (legacy knowledge base files)
|
||||||
|
try {
|
||||||
|
// Try to find document with matching fileUrl
|
||||||
|
const documents = await db
|
||||||
|
.select({
|
||||||
|
knowledgeBaseId: document.knowledgeBaseId,
|
||||||
|
})
|
||||||
|
.from(document)
|
||||||
|
.where(
|
||||||
|
or(
|
||||||
|
like(document.fileUrl, `%${cloudKey}%`),
|
||||||
|
like(document.fileUrl, `%${encodeURIComponent(cloudKey)}%`)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(10) // Limit to avoid scanning too many
|
||||||
|
|
||||||
|
// Check each document's knowledge base for workspace access
|
||||||
|
for (const doc of documents) {
|
||||||
|
const { knowledgeBase } = await import('@sim/db/schema')
|
||||||
|
const [kb] = await db
|
||||||
|
.select({
|
||||||
|
workspaceId: knowledgeBase.workspaceId,
|
||||||
|
})
|
||||||
|
.from(knowledgeBase)
|
||||||
|
.where(eq(knowledgeBase.id, doc.knowledgeBaseId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (kb?.workspaceId) {
|
||||||
|
const permission = await getUserEntityPermissions(userId, 'workspace', kb.workspaceId)
|
||||||
|
if (permission !== null) {
|
||||||
|
logger.debug('KB file access granted (document table lookup)', {
|
||||||
|
userId,
|
||||||
|
workspaceId: kb.workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (docError) {
|
||||||
|
logger.debug('Document table lookup failed:', docError)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 3: Check cloud storage metadata
|
||||||
|
const config: StorageConfig = customConfig || (await getKBStorageConfig())
|
||||||
|
const metadata = await getFileMetadata(cloudKey, config)
|
||||||
|
const workspaceId = metadata.workspaceId
|
||||||
|
|
||||||
|
if (workspaceId) {
|
||||||
|
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||||
|
if (permission !== null) {
|
||||||
|
logger.debug('KB file access granted (cloud metadata)', {
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
logger.warn('User does not have workspace access for KB file', {
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.warn('KB file missing workspaceId in all sources', { cloudKey, userId })
|
||||||
|
return false
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error verifying KB file access', { cloudKey, userId, error })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify access to chat files
|
||||||
|
* Chat files: chat/filename
|
||||||
|
*/
|
||||||
|
async function verifyChatFileAccess(
|
||||||
|
cloudKey: string,
|
||||||
|
userId: string,
|
||||||
|
customConfig?: StorageConfig
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const config: StorageConfig = customConfig || (await getChatStorageConfig())
|
||||||
|
|
||||||
|
const metadata = await getFileMetadata(cloudKey, config)
|
||||||
|
const workspaceId = metadata.workspaceId
|
||||||
|
|
||||||
|
if (!workspaceId) {
|
||||||
|
logger.warn('Chat file missing workspaceId in metadata', { cloudKey, userId })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||||
|
if (permission === null) {
|
||||||
|
logger.warn('User does not have workspace access for chat file', {
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.debug('Chat file access granted', { userId, workspaceId, cloudKey })
|
||||||
|
return true
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error verifying chat file access', { cloudKey, userId, error })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify access to regular uploads
|
||||||
|
* Regular uploads: UUID-filename or timestamp-filename
|
||||||
|
* Priority: Database lookup (for workspace files) > Metadata > Deny
|
||||||
|
*/
|
||||||
|
async function verifyRegularFileAccess(
|
||||||
|
cloudKey: string,
|
||||||
|
userId: string,
|
||||||
|
customConfig?: StorageConfig,
|
||||||
|
isLocal?: boolean
|
||||||
|
): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
// Priority 1: Check if this might be a workspace file (check database)
|
||||||
|
// This handles legacy files that might not have metadata
|
||||||
|
const workspaceFileRecord = await lookupWorkspaceFileByKey(cloudKey)
|
||||||
|
if (workspaceFileRecord) {
|
||||||
|
const permission = await getUserEntityPermissions(
|
||||||
|
userId,
|
||||||
|
'workspace',
|
||||||
|
workspaceFileRecord.workspaceId
|
||||||
|
)
|
||||||
|
if (permission !== null) {
|
||||||
|
logger.debug('Regular file access granted (workspace file from database)', {
|
||||||
|
userId,
|
||||||
|
workspaceId: workspaceFileRecord.workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
logger.warn('User does not have workspace access for file', {
|
||||||
|
userId,
|
||||||
|
workspaceId: workspaceFileRecord.workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 2: Check metadata (works for both local and cloud files)
|
||||||
|
const config: StorageConfig = customConfig || {}
|
||||||
|
const metadata = await getFileMetadata(cloudKey, config)
|
||||||
|
const fileUserId = metadata.userId
|
||||||
|
const workspaceId = metadata.workspaceId
|
||||||
|
|
||||||
|
// If file has userId, verify ownership
|
||||||
|
if (fileUserId) {
|
||||||
|
if (fileUserId === userId) {
|
||||||
|
logger.debug('Regular file access granted (userId match)', { userId, cloudKey })
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
logger.warn('User does not own file', { userId, fileUserId, cloudKey })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// If file has workspaceId, verify workspace membership
|
||||||
|
if (workspaceId) {
|
||||||
|
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||||
|
if (permission !== null) {
|
||||||
|
logger.debug('Regular file access granted (workspace membership)', {
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
logger.warn('User does not have workspace access for file', {
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
cloudKey,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// No ownership info available - deny access for security
|
||||||
|
logger.warn('File missing ownership metadata', { cloudKey, userId })
|
||||||
|
return false
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error verifying regular file access', { cloudKey, userId, error })
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Unified authorization function that returns structured result
|
||||||
|
*/
|
||||||
|
export async function authorizeFileAccess(
|
||||||
|
key: string,
|
||||||
|
userId: string,
|
||||||
|
context?: StorageContext,
|
||||||
|
storageConfig?: StorageConfig,
|
||||||
|
isLocal?: boolean
|
||||||
|
): Promise<AuthorizationResult> {
|
||||||
|
const granted = await verifyFileAccess(key, userId, null, storageConfig, context, isLocal)
|
||||||
|
|
||||||
|
if (granted) {
|
||||||
|
let workspaceId: string | undefined
|
||||||
|
const inferredContext = context || inferContextFromKey(key)
|
||||||
|
|
||||||
|
if (inferredContext === 'workspace') {
|
||||||
|
const record = await lookupWorkspaceFileByKey(key)
|
||||||
|
workspaceId = record?.workspaceId
|
||||||
|
} else {
|
||||||
|
const extracted = extractWorkspaceIdFromKey(key)
|
||||||
|
if (extracted) {
|
||||||
|
workspaceId = extracted
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
granted: true,
|
||||||
|
reason: 'Access granted',
|
||||||
|
workspaceId,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
granted: false,
|
||||||
|
reason: 'Access denied - insufficient permissions or file not found',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get KB storage configuration based on current storage provider
|
||||||
|
*/
|
||||||
|
async function getKBStorageConfig(): Promise<StorageConfig> {
|
||||||
|
const { USE_S3_STORAGE, USE_BLOB_STORAGE } = await import('@/lib/uploads/config')
|
||||||
|
|
||||||
|
if (USE_BLOB_STORAGE) {
|
||||||
|
return {
|
||||||
|
containerName: BLOB_KB_CONFIG.containerName,
|
||||||
|
accountName: BLOB_KB_CONFIG.accountName,
|
||||||
|
accountKey: BLOB_KB_CONFIG.accountKey,
|
||||||
|
connectionString: BLOB_KB_CONFIG.connectionString,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (USE_S3_STORAGE) {
|
||||||
|
return {
|
||||||
|
bucket: S3_KB_CONFIG.bucket,
|
||||||
|
region: S3_KB_CONFIG.region,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get chat storage configuration based on current storage provider
|
||||||
|
*/
|
||||||
|
async function getChatStorageConfig(): Promise<StorageConfig> {
|
||||||
|
const { USE_S3_STORAGE, USE_BLOB_STORAGE } = await import('@/lib/uploads/config')
|
||||||
|
|
||||||
|
if (USE_BLOB_STORAGE) {
|
||||||
|
return {
|
||||||
|
containerName: BLOB_CHAT_CONFIG.containerName,
|
||||||
|
accountName: BLOB_CHAT_CONFIG.accountName,
|
||||||
|
accountKey: BLOB_CHAT_CONFIG.accountKey,
|
||||||
|
connectionString: BLOB_CHAT_CONFIG.connectionString,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (USE_S3_STORAGE) {
|
||||||
|
return {
|
||||||
|
bucket: S3_CHAT_CONFIG.bucket,
|
||||||
|
region: S3_CHAT_CONFIG.region,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {}
|
||||||
|
}
|
||||||
@@ -1,14 +1,17 @@
|
|||||||
import type { NextRequest } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
|
import { NextResponse } from 'next/server'
|
||||||
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import type { StorageContext } from '@/lib/uploads/core/config-resolver'
|
import type { StorageContext } from '@/lib/uploads/config'
|
||||||
import { deleteFile } from '@/lib/uploads/core/storage-service'
|
import { deleteFile, hasCloudStorage } from '@/lib/uploads/core/storage-service'
|
||||||
|
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||||
import {
|
import {
|
||||||
createErrorResponse,
|
createErrorResponse,
|
||||||
createOptionsResponse,
|
createOptionsResponse,
|
||||||
createSuccessResponse,
|
createSuccessResponse,
|
||||||
extractBlobKey,
|
|
||||||
extractFilename,
|
extractFilename,
|
||||||
extractS3Key,
|
FileNotFoundError,
|
||||||
InvalidRequestError,
|
InvalidRequestError,
|
||||||
isBlobPath,
|
isBlobPath,
|
||||||
isCloudPath,
|
isCloudPath,
|
||||||
@@ -24,20 +27,44 @@ const logger = createLogger('FilesDeleteAPI')
|
|||||||
*/
|
*/
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
try {
|
try {
|
||||||
|
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success || !authResult.userId) {
|
||||||
|
logger.warn('Unauthorized file delete request', {
|
||||||
|
error: authResult.error || 'Missing userId',
|
||||||
|
})
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const requestData = await request.json()
|
const requestData = await request.json()
|
||||||
const { filePath, context } = requestData
|
const { filePath, context } = requestData
|
||||||
|
|
||||||
logger.info('File delete request received:', { filePath, context })
|
logger.info('File delete request received:', { filePath, context, userId })
|
||||||
|
|
||||||
if (!filePath) {
|
if (!filePath) {
|
||||||
throw new InvalidRequestError('No file path provided')
|
throw new InvalidRequestError('No file path provided')
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const key = extractStorageKey(filePath)
|
const key = extractStorageKeyFromPath(filePath)
|
||||||
|
|
||||||
const storageContext: StorageContext = context || inferContextFromKey(key)
|
const storageContext: StorageContext = context || inferContextFromKey(key)
|
||||||
|
|
||||||
|
const hasAccess = await verifyFileAccess(
|
||||||
|
key,
|
||||||
|
userId,
|
||||||
|
null,
|
||||||
|
undefined,
|
||||||
|
storageContext,
|
||||||
|
!hasCloudStorage() // isLocal
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!hasAccess) {
|
||||||
|
logger.warn('Unauthorized file delete attempt', { userId, key, context: storageContext })
|
||||||
|
throw new FileNotFoundError(`File not found: ${key}`)
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`Deleting file with key: ${key}, context: ${storageContext}`)
|
logger.info(`Deleting file with key: ${key}, context: ${storageContext}`)
|
||||||
|
|
||||||
await deleteFile({
|
await deleteFile({
|
||||||
@@ -53,6 +80,11 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error deleting file:', error)
|
logger.error('Error deleting file:', error)
|
||||||
|
|
||||||
|
if (error instanceof FileNotFoundError) {
|
||||||
|
return createErrorResponse(error)
|
||||||
|
}
|
||||||
|
|
||||||
return createErrorResponse(
|
return createErrorResponse(
|
||||||
error instanceof Error ? error : new Error('Failed to delete file')
|
error instanceof Error ? error : new Error('Failed to delete file')
|
||||||
)
|
)
|
||||||
@@ -64,71 +96,20 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract storage key from file path (works for S3, Blob, and local paths)
|
* Extract storage key from file path
|
||||||
*/
|
*/
|
||||||
function extractStorageKey(filePath: string): string {
|
function extractStorageKeyFromPath(filePath: string): string {
|
||||||
if (isS3Path(filePath)) {
|
if (isS3Path(filePath) || isBlobPath(filePath) || filePath.startsWith('/api/files/serve/')) {
|
||||||
return extractS3Key(filePath)
|
return extractStorageKey(filePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isBlobPath(filePath)) {
|
|
||||||
return extractBlobKey(filePath)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle "/api/files/serve/<key>" paths
|
|
||||||
if (filePath.startsWith('/api/files/serve/')) {
|
|
||||||
const pathWithoutQuery = filePath.split('?')[0]
|
|
||||||
return decodeURIComponent(pathWithoutQuery.substring('/api/files/serve/'.length))
|
|
||||||
}
|
|
||||||
|
|
||||||
// For local files, extract filename
|
|
||||||
if (!isCloudPath(filePath)) {
|
if (!isCloudPath(filePath)) {
|
||||||
return extractFilename(filePath)
|
return extractFilename(filePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
// As a last resort, assume the incoming string is already a raw key
|
|
||||||
return filePath
|
return filePath
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Infer storage context from file key structure
|
|
||||||
*
|
|
||||||
* Key patterns:
|
|
||||||
* - KB: kb/{uuid}-{filename}
|
|
||||||
* - Workspace: {workspaceId}/{timestamp}-{random}-{filename}
|
|
||||||
* - Execution: {workspaceId}/{workflowId}/{executionId}/{filename}
|
|
||||||
* - Copilot: {timestamp}-{random}-{filename} (ambiguous - prefer explicit context)
|
|
||||||
* - Chat: Uses execution context (same pattern as execution files)
|
|
||||||
* - General: {timestamp}-{random}-{filename} (fallback for ambiguous patterns)
|
|
||||||
*/
|
|
||||||
function inferContextFromKey(key: string): StorageContext {
|
|
||||||
// KB files always start with 'kb/' prefix
|
|
||||||
if (key.startsWith('kb/')) {
|
|
||||||
return 'knowledge-base'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execution files: three or more UUID segments (workspace/workflow/execution/...)
|
|
||||||
// Pattern: {uuid}/{uuid}/{uuid}/{filename}
|
|
||||||
const segments = key.split('/')
|
|
||||||
if (segments.length >= 4 && segments[0].match(/^[a-f0-9-]{36}$/)) {
|
|
||||||
return 'execution'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Workspace files: UUID-like ID followed by timestamp pattern
|
|
||||||
// Pattern: {uuid}/{timestamp}-{random}-{filename}
|
|
||||||
if (key.match(/^[a-f0-9-]{36}\/\d+-[a-z0-9]+-/)) {
|
|
||||||
return 'workspace'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copilot/General files: timestamp-random-filename (no path segments)
|
|
||||||
// Pattern: {timestamp}-{random}-{filename}
|
|
||||||
if (key.match(/^\d+-[a-z0-9]+-/)) {
|
|
||||||
return 'general'
|
|
||||||
}
|
|
||||||
|
|
||||||
return 'general'
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle CORS preflight requests
|
* Handle CORS preflight requests
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import type { StorageContext } from '@/lib/uploads/core/config-resolver'
|
import type { StorageContext } from '@/lib/uploads/config'
|
||||||
import { generatePresignedDownloadUrl, hasCloudStorage } from '@/lib/uploads/core/storage-service'
|
import { hasCloudStorage } from '@/lib/uploads/core/storage-service'
|
||||||
import { getBaseUrl } from '@/lib/urls/utils'
|
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||||
import { createErrorResponse } from '@/app/api/files/utils'
|
import { createErrorResponse, FileNotFoundError } from '@/app/api/files/utils'
|
||||||
|
|
||||||
const logger = createLogger('FileDownload')
|
const logger = createLogger('FileDownload')
|
||||||
|
|
||||||
@@ -11,14 +12,34 @@ export const dynamic = 'force-dynamic'
|
|||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
try {
|
try {
|
||||||
|
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success || !authResult.userId) {
|
||||||
|
logger.warn('Unauthorized download URL request', {
|
||||||
|
error: authResult.error || 'Missing userId',
|
||||||
|
})
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const { key, name, isExecutionFile, context } = body
|
const { key, name, isExecutionFile, context, url } = body
|
||||||
|
|
||||||
if (!key) {
|
if (!key) {
|
||||||
return createErrorResponse(new Error('File key is required'), 400)
|
return createErrorResponse(new Error('File key is required'), 400)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Generating download URL for file: ${name || key}`)
|
if (key.startsWith('url/')) {
|
||||||
|
if (!url) {
|
||||||
|
return createErrorResponse(new Error('URL is required for URL-type files'), 400)
|
||||||
|
}
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
downloadUrl: url,
|
||||||
|
expiresIn: null,
|
||||||
|
fileName: name || key.split('/').pop() || 'download',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
let storageContext: StorageContext = context || 'general'
|
let storageContext: StorageContext = context || 'general'
|
||||||
|
|
||||||
@@ -27,41 +48,37 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`Using execution context for file: ${key}`)
|
logger.info(`Using execution context for file: ${key}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasCloudStorage()) {
|
const hasAccess = await verifyFileAccess(
|
||||||
try {
|
key,
|
||||||
const downloadUrl = await generatePresignedDownloadUrl(
|
userId,
|
||||||
key,
|
isExecutionFile ? 'execution' : null,
|
||||||
storageContext,
|
undefined,
|
||||||
5 * 60 // 5 minutes
|
storageContext,
|
||||||
)
|
!hasCloudStorage()
|
||||||
|
)
|
||||||
|
|
||||||
logger.info(`Generated download URL for ${storageContext} file: ${key}`)
|
if (!hasAccess) {
|
||||||
|
logger.warn('Unauthorized download URL request', { userId, key, context: storageContext })
|
||||||
return NextResponse.json({
|
throw new FileNotFoundError(`File not found: ${key}`)
|
||||||
downloadUrl,
|
|
||||||
expiresIn: 300, // 5 minutes in seconds
|
|
||||||
fileName: name || key.split('/').pop() || 'download',
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Failed to generate presigned URL for ${key}:`, error)
|
|
||||||
return createErrorResponse(
|
|
||||||
error instanceof Error ? error : new Error('Failed to generate download URL'),
|
|
||||||
500
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const downloadUrl = `${getBaseUrl()}/api/files/serve/${encodeURIComponent(key)}?context=${storageContext}`
|
|
||||||
|
|
||||||
logger.info(`Using local storage path for file: ${key}`)
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
downloadUrl,
|
|
||||||
expiresIn: null,
|
|
||||||
fileName: name || key.split('/').pop() || 'download',
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { getBaseUrl } = await import('@/lib/urls/utils')
|
||||||
|
const downloadUrl = `${getBaseUrl()}/api/files/serve/${encodeURIComponent(key)}?context=${storageContext}`
|
||||||
|
|
||||||
|
logger.info(`Generated download URL for ${storageContext} file: ${key}`)
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
downloadUrl,
|
||||||
|
expiresIn: null,
|
||||||
|
fileName: name || key.split('/').pop() || 'download',
|
||||||
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error in file download endpoint:', error)
|
logger.error('Error in file download endpoint:', error)
|
||||||
|
|
||||||
|
if (error instanceof FileNotFoundError) {
|
||||||
|
return createErrorResponse(error)
|
||||||
|
}
|
||||||
|
|
||||||
return createErrorResponse(
|
return createErrorResponse(
|
||||||
error instanceof Error ? error : new Error('Internal server error'),
|
error instanceof Error ? error : new Error('Internal server error'),
|
||||||
500
|
500
|
||||||
|
|||||||
@@ -1,5 +1,10 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow, workflowExecutionLogs } from '@sim/db/schema'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||||
import {
|
import {
|
||||||
generateExecutionFileDownloadUrl,
|
generateExecutionFileDownloadUrl,
|
||||||
getExecutionFiles,
|
getExecutionFiles,
|
||||||
@@ -17,6 +22,16 @@ export async function GET(
|
|||||||
{ params }: { params: Promise<{ executionId: string; fileId: string }> }
|
{ params }: { params: Promise<{ executionId: string; fileId: string }> }
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
|
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success || !authResult.userId) {
|
||||||
|
logger.warn('Unauthorized execution file download request', {
|
||||||
|
error: authResult.error || 'Missing userId',
|
||||||
|
})
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const { executionId, fileId } = await params
|
const { executionId, fileId } = await params
|
||||||
|
|
||||||
if (!executionId || !fileId) {
|
if (!executionId || !fileId) {
|
||||||
@@ -25,6 +40,49 @@ export async function GET(
|
|||||||
|
|
||||||
logger.info(`Generating download URL for file ${fileId} in execution ${executionId}`)
|
logger.info(`Generating download URL for file ${fileId} in execution ${executionId}`)
|
||||||
|
|
||||||
|
const [executionLog] = await db
|
||||||
|
.select({
|
||||||
|
workflowId: workflowExecutionLogs.workflowId,
|
||||||
|
})
|
||||||
|
.from(workflowExecutionLogs)
|
||||||
|
.where(eq(workflowExecutionLogs.executionId, executionId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!executionLog) {
|
||||||
|
return NextResponse.json({ error: 'Execution not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const [workflowData] = await db
|
||||||
|
.select({
|
||||||
|
workspaceId: workflow.workspaceId,
|
||||||
|
})
|
||||||
|
.from(workflow)
|
||||||
|
.where(eq(workflow.id, executionLog.workflowId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!workflowData) {
|
||||||
|
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!workflowData.workspaceId) {
|
||||||
|
logger.warn('Workflow missing workspaceId', {
|
||||||
|
workflowId: executionLog.workflowId,
|
||||||
|
executionId,
|
||||||
|
})
|
||||||
|
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const permission = await getUserEntityPermissions(userId, 'workspace', workflowData.workspaceId)
|
||||||
|
if (permission === null) {
|
||||||
|
logger.warn('User does not have workspace access for execution file', {
|
||||||
|
userId,
|
||||||
|
workspaceId: workflowData.workspaceId,
|
||||||
|
executionId,
|
||||||
|
fileId,
|
||||||
|
})
|
||||||
|
return NextResponse.json({ error: 'File not found' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
const executionFiles = await getExecutionFiles(executionId)
|
const executionFiles = await getExecutionFiles(executionId)
|
||||||
|
|
||||||
if (executionFiles.length === 0) {
|
if (executionFiles.length === 0) {
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ export async function POST(request: NextRequest) {
|
|||||||
const config = getStorageConfig(context)
|
const config = getStorageConfig(context)
|
||||||
|
|
||||||
if (storageProvider === 's3') {
|
if (storageProvider === 's3') {
|
||||||
const { initiateS3MultipartUpload } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { initiateS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const result = await initiateS3MultipartUpload({
|
const result = await initiateS3MultipartUpload({
|
||||||
fileName,
|
fileName,
|
||||||
@@ -68,9 +68,7 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (storageProvider === 'blob') {
|
if (storageProvider === 'blob') {
|
||||||
const { initiateMultipartUpload } = await import(
|
const { initiateMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||||
'@/lib/uploads/providers/blob/blob-client'
|
|
||||||
)
|
|
||||||
|
|
||||||
const result = await initiateMultipartUpload({
|
const result = await initiateMultipartUpload({
|
||||||
fileName,
|
fileName,
|
||||||
@@ -107,16 +105,16 @@ export async function POST(request: NextRequest) {
|
|||||||
const config = getStorageConfig(context)
|
const config = getStorageConfig(context)
|
||||||
|
|
||||||
if (storageProvider === 's3') {
|
if (storageProvider === 's3') {
|
||||||
const { getS3MultipartPartUrls } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { getS3MultipartPartUrls } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const presignedUrls = await getS3MultipartPartUrls(key, uploadId, partNumbers)
|
const presignedUrls = await getS3MultipartPartUrls(key, uploadId, partNumbers)
|
||||||
|
|
||||||
return NextResponse.json({ presignedUrls })
|
return NextResponse.json({ presignedUrls })
|
||||||
}
|
}
|
||||||
if (storageProvider === 'blob') {
|
if (storageProvider === 'blob') {
|
||||||
const { getMultipartPartUrls } = await import('@/lib/uploads/providers/blob/blob-client')
|
const { getMultipartPartUrls } = await import('@/lib/uploads/providers/blob/client')
|
||||||
|
|
||||||
const presignedUrls = await getMultipartPartUrls(key, uploadId, partNumbers, {
|
const presignedUrls = await getMultipartPartUrls(key, partNumbers, {
|
||||||
containerName: config.containerName!,
|
containerName: config.containerName!,
|
||||||
accountName: config.accountName!,
|
accountName: config.accountName!,
|
||||||
accountKey: config.accountKey,
|
accountKey: config.accountKey,
|
||||||
@@ -145,7 +143,7 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
if (storageProvider === 's3') {
|
if (storageProvider === 's3') {
|
||||||
const { completeS3MultipartUpload } = await import(
|
const { completeS3MultipartUpload } = await import(
|
||||||
'@/lib/uploads/providers/s3/s3-client'
|
'@/lib/uploads/providers/s3/client'
|
||||||
)
|
)
|
||||||
const parts = upload.parts // S3 format: { ETag, PartNumber }
|
const parts = upload.parts // S3 format: { ETag, PartNumber }
|
||||||
|
|
||||||
@@ -160,11 +158,11 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
if (storageProvider === 'blob') {
|
if (storageProvider === 'blob') {
|
||||||
const { completeMultipartUpload } = await import(
|
const { completeMultipartUpload } = await import(
|
||||||
'@/lib/uploads/providers/blob/blob-client'
|
'@/lib/uploads/providers/blob/client'
|
||||||
)
|
)
|
||||||
const parts = upload.parts // Azure format: { blockId, partNumber }
|
const parts = upload.parts // Azure format: { blockId, partNumber }
|
||||||
|
|
||||||
const result = await completeMultipartUpload(key, uploadId, parts, {
|
const result = await completeMultipartUpload(key, parts, {
|
||||||
containerName: config.containerName!,
|
containerName: config.containerName!,
|
||||||
accountName: config.accountName!,
|
accountName: config.accountName!,
|
||||||
accountKey: config.accountKey,
|
accountKey: config.accountKey,
|
||||||
@@ -190,7 +188,7 @@ export async function POST(request: NextRequest) {
|
|||||||
const { uploadId, key, parts } = data
|
const { uploadId, key, parts } = data
|
||||||
|
|
||||||
if (storageProvider === 's3') {
|
if (storageProvider === 's3') {
|
||||||
const { completeS3MultipartUpload } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { completeS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const result = await completeS3MultipartUpload(key, uploadId, parts)
|
const result = await completeS3MultipartUpload(key, uploadId, parts)
|
||||||
|
|
||||||
@@ -204,11 +202,9 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
if (storageProvider === 'blob') {
|
if (storageProvider === 'blob') {
|
||||||
const { completeMultipartUpload } = await import(
|
const { completeMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||||
'@/lib/uploads/providers/blob/blob-client'
|
|
||||||
)
|
|
||||||
|
|
||||||
const result = await completeMultipartUpload(key, uploadId, parts, {
|
const result = await completeMultipartUpload(key, parts, {
|
||||||
containerName: config.containerName!,
|
containerName: config.containerName!,
|
||||||
accountName: config.accountName!,
|
accountName: config.accountName!,
|
||||||
accountKey: config.accountKey,
|
accountKey: config.accountKey,
|
||||||
@@ -238,15 +234,15 @@ export async function POST(request: NextRequest) {
|
|||||||
const config = getStorageConfig(context as StorageContext)
|
const config = getStorageConfig(context as StorageContext)
|
||||||
|
|
||||||
if (storageProvider === 's3') {
|
if (storageProvider === 's3') {
|
||||||
const { abortS3MultipartUpload } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { abortS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
await abortS3MultipartUpload(key, uploadId)
|
await abortS3MultipartUpload(key, uploadId)
|
||||||
|
|
||||||
logger.info(`Aborted S3 multipart upload for key ${key} (context: ${context})`)
|
logger.info(`Aborted S3 multipart upload for key ${key} (context: ${context})`)
|
||||||
} else if (storageProvider === 'blob') {
|
} else if (storageProvider === 'blob') {
|
||||||
const { abortMultipartUpload } = await import('@/lib/uploads/providers/blob/blob-client')
|
const { abortMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||||
|
|
||||||
await abortMultipartUpload(key, uploadId, {
|
await abortMultipartUpload(key, {
|
||||||
containerName: config.containerName!,
|
containerName: config.containerName!,
|
||||||
accountName: config.accountName!,
|
accountName: config.accountName!,
|
||||||
accountKey: config.accountKey,
|
accountKey: config.accountKey,
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ import { NextRequest } from 'next/server'
|
|||||||
*/
|
*/
|
||||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||||
import { createMockRequest, setupFileApiMocks } from '@/app/api/__test-utils__/utils'
|
import { createMockRequest, setupFileApiMocks } from '@/app/api/__test-utils__/utils'
|
||||||
import { POST } from '@/app/api/files/parse/route'
|
|
||||||
|
|
||||||
const mockJoin = vi.fn((...args: string[]): string => {
|
const mockJoin = vi.fn((...args: string[]): string => {
|
||||||
if (args[0] === '/test/uploads') {
|
if (args[0] === '/test/uploads') {
|
||||||
@@ -18,7 +17,11 @@ const mockJoin = vi.fn((...args: string[]): string => {
|
|||||||
|
|
||||||
describe('File Parse API Route', () => {
|
describe('File Parse API Route', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.resetAllMocks()
|
vi.resetModules()
|
||||||
|
|
||||||
|
setupFileApiMocks({
|
||||||
|
authenticated: true,
|
||||||
|
})
|
||||||
|
|
||||||
vi.doMock('@/lib/file-parsers', () => ({
|
vi.doMock('@/lib/file-parsers', () => ({
|
||||||
isSupportedFileType: vi.fn().mockReturnValue(true),
|
isSupportedFileType: vi.fn().mockReturnValue(true),
|
||||||
@@ -50,8 +53,6 @@ describe('File Parse API Route', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should handle missing file path', async () => {
|
it('should handle missing file path', async () => {
|
||||||
setupFileApiMocks()
|
|
||||||
|
|
||||||
const req = createMockRequest('POST', {})
|
const req = createMockRequest('POST', {})
|
||||||
const { POST } = await import('@/app/api/files/parse/route')
|
const { POST } = await import('@/app/api/files/parse/route')
|
||||||
|
|
||||||
@@ -66,6 +67,7 @@ describe('File Parse API Route', () => {
|
|||||||
setupFileApiMocks({
|
setupFileApiMocks({
|
||||||
cloudEnabled: false,
|
cloudEnabled: false,
|
||||||
storageProvider: 'local',
|
storageProvider: 'local',
|
||||||
|
authenticated: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
const req = createMockRequest('POST', {
|
const req = createMockRequest('POST', {
|
||||||
@@ -91,6 +93,7 @@ describe('File Parse API Route', () => {
|
|||||||
setupFileApiMocks({
|
setupFileApiMocks({
|
||||||
cloudEnabled: true,
|
cloudEnabled: true,
|
||||||
storageProvider: 's3',
|
storageProvider: 's3',
|
||||||
|
authenticated: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
const req = createMockRequest('POST', {
|
const req = createMockRequest('POST', {
|
||||||
@@ -114,6 +117,7 @@ describe('File Parse API Route', () => {
|
|||||||
setupFileApiMocks({
|
setupFileApiMocks({
|
||||||
cloudEnabled: false,
|
cloudEnabled: false,
|
||||||
storageProvider: 'local',
|
storageProvider: 'local',
|
||||||
|
authenticated: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
const req = createMockRequest('POST', {
|
const req = createMockRequest('POST', {
|
||||||
@@ -135,6 +139,7 @@ describe('File Parse API Route', () => {
|
|||||||
setupFileApiMocks({
|
setupFileApiMocks({
|
||||||
cloudEnabled: true,
|
cloudEnabled: true,
|
||||||
storageProvider: 's3',
|
storageProvider: 's3',
|
||||||
|
authenticated: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
const req = createMockRequest('POST', {
|
const req = createMockRequest('POST', {
|
||||||
@@ -159,6 +164,7 @@ describe('File Parse API Route', () => {
|
|||||||
setupFileApiMocks({
|
setupFileApiMocks({
|
||||||
cloudEnabled: true,
|
cloudEnabled: true,
|
||||||
storageProvider: 's3',
|
storageProvider: 's3',
|
||||||
|
authenticated: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
const req = createMockRequest('POST', {
|
const req = createMockRequest('POST', {
|
||||||
@@ -183,6 +189,7 @@ describe('File Parse API Route', () => {
|
|||||||
setupFileApiMocks({
|
setupFileApiMocks({
|
||||||
cloudEnabled: true,
|
cloudEnabled: true,
|
||||||
storageProvider: 's3',
|
storageProvider: 's3',
|
||||||
|
authenticated: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
const downloadFileMock = vi.fn().mockRejectedValue(new Error('Access denied'))
|
const downloadFileMock = vi.fn().mockRejectedValue(new Error('Access denied'))
|
||||||
@@ -211,6 +218,7 @@ describe('File Parse API Route', () => {
|
|||||||
setupFileApiMocks({
|
setupFileApiMocks({
|
||||||
cloudEnabled: false,
|
cloudEnabled: false,
|
||||||
storageProvider: 'local',
|
storageProvider: 'local',
|
||||||
|
authenticated: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
vi.doMock('fs/promises', () => ({
|
vi.doMock('fs/promises', () => ({
|
||||||
@@ -236,7 +244,10 @@ describe('File Parse API Route', () => {
|
|||||||
|
|
||||||
describe('Files Parse API - Path Traversal Security', () => {
|
describe('Files Parse API - Path Traversal Security', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
vi.clearAllMocks()
|
vi.resetModules()
|
||||||
|
setupFileApiMocks({
|
||||||
|
authenticated: true,
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe('Path Traversal Prevention', () => {
|
describe('Path Traversal Prevention', () => {
|
||||||
@@ -257,11 +268,14 @@ describe('Files Parse API - Path Traversal Security', () => {
|
|||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const { POST } = await import('@/app/api/files/parse/route')
|
||||||
const response = await POST(request)
|
const response = await POST(request)
|
||||||
const result = await response.json()
|
const result = await response.json()
|
||||||
|
|
||||||
expect(result.success).toBe(false)
|
expect(result.success).toBe(false)
|
||||||
expect(result.error).toMatch(/Access denied|Invalid path|Path outside allowed directory/)
|
expect(result.error).toMatch(
|
||||||
|
/Access denied|Invalid path|Path outside allowed directory|Unauthorized/
|
||||||
|
)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -280,11 +294,12 @@ describe('Files Parse API - Path Traversal Security', () => {
|
|||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const { POST } = await import('@/app/api/files/parse/route')
|
||||||
const response = await POST(request)
|
const response = await POST(request)
|
||||||
const result = await response.json()
|
const result = await response.json()
|
||||||
|
|
||||||
expect(result.success).toBe(false)
|
expect(result.success).toBe(false)
|
||||||
expect(result.error).toMatch(/Access denied|Invalid path/)
|
expect(result.error).toMatch(/Access denied|Invalid path|Unauthorized/)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -305,11 +320,12 @@ describe('Files Parse API - Path Traversal Security', () => {
|
|||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const { POST } = await import('@/app/api/files/parse/route')
|
||||||
const response = await POST(request)
|
const response = await POST(request)
|
||||||
const result = await response.json()
|
const result = await response.json()
|
||||||
|
|
||||||
expect(result.success).toBe(false)
|
expect(result.success).toBe(false)
|
||||||
expect(result.error).toMatch(/Access denied|Path outside allowed directory/)
|
expect(result.error).toMatch(/Access denied|Path outside allowed directory|Unauthorized/)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -328,6 +344,7 @@ describe('Files Parse API - Path Traversal Security', () => {
|
|||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const { POST } = await import('@/app/api/files/parse/route')
|
||||||
const response = await POST(request)
|
const response = await POST(request)
|
||||||
const result = await response.json()
|
const result = await response.json()
|
||||||
|
|
||||||
@@ -354,11 +371,14 @@ describe('Files Parse API - Path Traversal Security', () => {
|
|||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const { POST } = await import('@/app/api/files/parse/route')
|
||||||
const response = await POST(request)
|
const response = await POST(request)
|
||||||
const result = await response.json()
|
const result = await response.json()
|
||||||
|
|
||||||
expect(result.success).toBe(false)
|
expect(result.success).toBe(false)
|
||||||
expect(result.error).toMatch(/Access denied|Invalid path|Path outside allowed directory/)
|
expect(result.error).toMatch(
|
||||||
|
/Access denied|Invalid path|Path outside allowed directory|Unauthorized/
|
||||||
|
)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -377,6 +397,7 @@ describe('Files Parse API - Path Traversal Security', () => {
|
|||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const { POST } = await import('@/app/api/files/parse/route')
|
||||||
const response = await POST(request)
|
const response = await POST(request)
|
||||||
const result = await response.json()
|
const result = await response.json()
|
||||||
|
|
||||||
@@ -394,6 +415,7 @@ describe('Files Parse API - Path Traversal Security', () => {
|
|||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const { POST } = await import('@/app/api/files/parse/route')
|
||||||
const response = await POST(request)
|
const response = await POST(request)
|
||||||
const result = await response.json()
|
const result = await response.json()
|
||||||
|
|
||||||
@@ -407,6 +429,7 @@ describe('Files Parse API - Path Traversal Security', () => {
|
|||||||
body: JSON.stringify({}),
|
body: JSON.stringify({}),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const { POST } = await import('@/app/api/files/parse/route')
|
||||||
const response = await POST(request)
|
const response = await POST(request)
|
||||||
const result = await response.json()
|
const result = await response.json()
|
||||||
|
|
||||||
|
|||||||
@@ -4,31 +4,28 @@ import fsPromises, { readFile } from 'fs/promises'
|
|||||||
import path from 'path'
|
import path from 'path'
|
||||||
import binaryExtensionsList from 'binary-extensions'
|
import binaryExtensionsList from 'binary-extensions'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||||
import { validateExternalUrl } from '@/lib/security/input-validation'
|
import { validateExternalUrl } from '@/lib/security/input-validation'
|
||||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||||
import { UPLOAD_DIR_SERVER } from '@/lib/uploads/core/setup.server'
|
import { UPLOAD_DIR_SERVER } from '@/lib/uploads/core/setup.server'
|
||||||
import { extractStorageKey } from '@/lib/uploads/utils/file-utils'
|
import { getFileMetadataByKey } from '@/lib/uploads/server/metadata'
|
||||||
|
import {
|
||||||
|
extractCleanFilename,
|
||||||
|
extractStorageKey,
|
||||||
|
extractWorkspaceIdFromExecutionKey,
|
||||||
|
getViewerUrl,
|
||||||
|
inferContextFromKey,
|
||||||
|
} from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||||
import '@/lib/uploads/core/setup.server'
|
import '@/lib/uploads/core/setup.server'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
const logger = createLogger('FilesParseAPI')
|
const logger = createLogger('FilesParseAPI')
|
||||||
|
|
||||||
/**
|
|
||||||
* Infer storage context from file key pattern
|
|
||||||
*/
|
|
||||||
function inferContextFromKey(key: string): StorageContext {
|
|
||||||
if (key.startsWith('kb/')) return 'knowledge-base'
|
|
||||||
|
|
||||||
const segments = key.split('/')
|
|
||||||
if (segments.length >= 4 && segments[0].match(/^[a-f0-9-]{36}$/)) return 'execution'
|
|
||||||
if (key.match(/^[a-f0-9-]{36}\/\d+-[a-z0-9]+-/)) return 'workspace'
|
|
||||||
|
|
||||||
return 'general'
|
|
||||||
}
|
|
||||||
|
|
||||||
const MAX_DOWNLOAD_SIZE_BYTES = 100 * 1024 * 1024 // 100 MB
|
const MAX_DOWNLOAD_SIZE_BYTES = 100 * 1024 * 1024 // 100 MB
|
||||||
const DOWNLOAD_TIMEOUT_MS = 30000 // 30 seconds
|
const DOWNLOAD_TIMEOUT_MS = 30000 // 30 seconds
|
||||||
|
|
||||||
@@ -37,6 +34,8 @@ interface ParseResult {
|
|||||||
content?: string
|
content?: string
|
||||||
error?: string
|
error?: string
|
||||||
filePath: string
|
filePath: string
|
||||||
|
originalName?: string // Original filename from database (for workspace files)
|
||||||
|
viewerUrl?: string | null // Viewer URL for the file if available
|
||||||
metadata?: {
|
metadata?: {
|
||||||
fileType: string
|
fileType: string
|
||||||
size: number
|
size: number
|
||||||
@@ -82,6 +81,23 @@ export async function POST(request: NextRequest) {
|
|||||||
const startTime = Date.now()
|
const startTime = Date.now()
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const authResult = await checkHybridAuth(request, { requireWorkflowId: true })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn('Unauthorized file parse request', {
|
||||||
|
error: authResult.error || 'Authentication failed',
|
||||||
|
})
|
||||||
|
return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!authResult.userId) {
|
||||||
|
logger.warn('File parse request missing userId', {
|
||||||
|
authType: authResult.authType,
|
||||||
|
})
|
||||||
|
return NextResponse.json({ success: false, error: 'User context required' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const requestData = await request.json()
|
const requestData = await request.json()
|
||||||
const { filePath, fileType, workspaceId } = requestData
|
const { filePath, fileType, workspaceId } = requestData
|
||||||
|
|
||||||
@@ -89,7 +105,7 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ success: false, error: 'No file path provided' }, { status: 400 })
|
return NextResponse.json({ success: false, error: 'No file path provided' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('File parse request received:', { filePath, fileType, workspaceId })
|
logger.info('File parse request received:', { filePath, fileType, workspaceId, userId })
|
||||||
|
|
||||||
if (Array.isArray(filePath)) {
|
if (Array.isArray(filePath)) {
|
||||||
const results = []
|
const results = []
|
||||||
@@ -103,22 +119,25 @@ export async function POST(request: NextRequest) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await parseFileSingle(path, fileType, workspaceId)
|
const result = await parseFileSingle(path, fileType, workspaceId, userId)
|
||||||
if (result.metadata) {
|
if (result.metadata) {
|
||||||
result.metadata.processingTime = Date.now() - startTime
|
result.metadata.processingTime = Date.now() - startTime
|
||||||
}
|
}
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
|
const displayName =
|
||||||
|
result.originalName || extractCleanFilename(result.filePath) || 'unknown'
|
||||||
results.push({
|
results.push({
|
||||||
success: true,
|
success: true,
|
||||||
output: {
|
output: {
|
||||||
content: result.content,
|
content: result.content,
|
||||||
name: result.filePath.split('/').pop() || 'unknown',
|
name: displayName,
|
||||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||||
size: result.metadata?.size || 0,
|
size: result.metadata?.size || 0,
|
||||||
binary: false,
|
binary: false,
|
||||||
},
|
},
|
||||||
filePath: result.filePath,
|
filePath: result.filePath,
|
||||||
|
viewerUrl: result.viewerUrl,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
results.push(result)
|
results.push(result)
|
||||||
@@ -131,22 +150,25 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = await parseFileSingle(filePath, fileType, workspaceId)
|
const result = await parseFileSingle(filePath, fileType, workspaceId, userId)
|
||||||
|
|
||||||
if (result.metadata) {
|
if (result.metadata) {
|
||||||
result.metadata.processingTime = Date.now() - startTime
|
result.metadata.processingTime = Date.now() - startTime
|
||||||
}
|
}
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
|
const displayName = result.originalName || extractCleanFilename(result.filePath) || 'unknown'
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
output: {
|
output: {
|
||||||
content: result.content,
|
content: result.content,
|
||||||
name: result.filePath.split('/').pop() || 'unknown',
|
name: displayName,
|
||||||
fileType: result.metadata?.fileType || 'application/octet-stream',
|
fileType: result.metadata?.fileType || 'application/octet-stream',
|
||||||
size: result.metadata?.size || 0,
|
size: result.metadata?.size || 0,
|
||||||
binary: false, // We only return text content
|
binary: false,
|
||||||
},
|
},
|
||||||
|
filePath: result.filePath,
|
||||||
|
viewerUrl: result.viewerUrl,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -169,8 +191,9 @@ export async function POST(request: NextRequest) {
|
|||||||
*/
|
*/
|
||||||
async function parseFileSingle(
|
async function parseFileSingle(
|
||||||
filePath: string,
|
filePath: string,
|
||||||
fileType?: string,
|
fileType: string,
|
||||||
workspaceId?: string
|
workspaceId: string,
|
||||||
|
userId: string
|
||||||
): Promise<ParseResult> {
|
): Promise<ParseResult> {
|
||||||
logger.info('Parsing file:', filePath)
|
logger.info('Parsing file:', filePath)
|
||||||
|
|
||||||
@@ -192,18 +215,18 @@ async function parseFileSingle(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (filePath.includes('/api/files/serve/')) {
|
if (filePath.includes('/api/files/serve/')) {
|
||||||
return handleCloudFile(filePath, fileType)
|
return handleCloudFile(filePath, fileType, undefined, userId)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
|
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
|
||||||
return handleExternalUrl(filePath, fileType, workspaceId)
|
return handleExternalUrl(filePath, fileType, workspaceId, userId)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isUsingCloudStorage()) {
|
if (isUsingCloudStorage()) {
|
||||||
return handleCloudFile(filePath, fileType)
|
return handleCloudFile(filePath, fileType, undefined, userId)
|
||||||
}
|
}
|
||||||
|
|
||||||
return handleLocalFile(filePath, fileType)
|
return handleLocalFile(filePath, fileType, userId)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -239,8 +262,9 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
|||||||
*/
|
*/
|
||||||
async function handleExternalUrl(
|
async function handleExternalUrl(
|
||||||
url: string,
|
url: string,
|
||||||
fileType?: string,
|
fileType: string,
|
||||||
workspaceId?: string
|
workspaceId: string,
|
||||||
|
userId: string
|
||||||
): Promise<ParseResult> {
|
): Promise<ParseResult> {
|
||||||
try {
|
try {
|
||||||
logger.info('Fetching external URL:', url)
|
logger.info('Fetching external URL:', url)
|
||||||
@@ -267,7 +291,7 @@ async function handleExternalUrl(
|
|||||||
BLOB_EXECUTION_FILES_CONFIG,
|
BLOB_EXECUTION_FILES_CONFIG,
|
||||||
USE_S3_STORAGE,
|
USE_S3_STORAGE,
|
||||||
USE_BLOB_STORAGE,
|
USE_BLOB_STORAGE,
|
||||||
} = await import('@/lib/uploads/core/setup')
|
} = await import('@/lib/uploads/config')
|
||||||
|
|
||||||
let isExecutionFile = false
|
let isExecutionFile = false
|
||||||
try {
|
try {
|
||||||
@@ -291,6 +315,20 @@ async function handleExternalUrl(
|
|||||||
const shouldCheckWorkspace = workspaceId && !isExecutionFile
|
const shouldCheckWorkspace = workspaceId && !isExecutionFile
|
||||||
|
|
||||||
if (shouldCheckWorkspace) {
|
if (shouldCheckWorkspace) {
|
||||||
|
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||||
|
if (permission === null) {
|
||||||
|
logger.warn('User does not have workspace access for file parse', {
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
filename,
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'File not found',
|
||||||
|
filePath: url,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const { fileExistsInWorkspace, listWorkspaceFiles } = await import(
|
const { fileExistsInWorkspace, listWorkspaceFiles } = await import(
|
||||||
'@/lib/uploads/contexts/workspace'
|
'@/lib/uploads/contexts/workspace'
|
||||||
)
|
)
|
||||||
@@ -303,7 +341,7 @@ async function handleExternalUrl(
|
|||||||
|
|
||||||
if (existingFile) {
|
if (existingFile) {
|
||||||
const storageFilePath = `/api/files/serve/${existingFile.key}`
|
const storageFilePath = `/api/files/serve/${existingFile.key}`
|
||||||
return handleCloudFile(storageFilePath, fileType, 'workspace')
|
return handleCloudFile(storageFilePath, fileType, 'workspace', userId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -330,13 +368,18 @@ async function handleExternalUrl(
|
|||||||
|
|
||||||
if (shouldCheckWorkspace) {
|
if (shouldCheckWorkspace) {
|
||||||
try {
|
try {
|
||||||
const { getSession } = await import('@/lib/auth')
|
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||||
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
|
if (permission !== 'admin' && permission !== 'write') {
|
||||||
|
logger.warn('User does not have write permission for workspace file save', {
|
||||||
const session = await getSession()
|
userId,
|
||||||
if (session?.user?.id) {
|
workspaceId,
|
||||||
|
filename,
|
||||||
|
permission,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
|
||||||
const mimeType = response.headers.get('content-type') || getMimeType(extension)
|
const mimeType = response.headers.get('content-type') || getMimeType(extension)
|
||||||
await uploadWorkspaceFile(workspaceId, session.user.id, buffer, filename, mimeType)
|
await uploadWorkspaceFile(workspaceId, userId, buffer, filename, mimeType)
|
||||||
logger.info(`Saved URL file to workspace storage: ${filename}`)
|
logger.info(`Saved URL file to workspace storage: ${filename}`)
|
||||||
}
|
}
|
||||||
} catch (saveError) {
|
} catch (saveError) {
|
||||||
@@ -370,8 +413,9 @@ async function handleExternalUrl(
|
|||||||
*/
|
*/
|
||||||
async function handleCloudFile(
|
async function handleCloudFile(
|
||||||
filePath: string,
|
filePath: string,
|
||||||
fileType?: string,
|
fileType: string,
|
||||||
explicitContext?: string
|
explicitContext: string | undefined,
|
||||||
|
userId: string
|
||||||
): Promise<ParseResult> {
|
): Promise<ParseResult> {
|
||||||
try {
|
try {
|
||||||
const cloudKey = extractStorageKey(filePath)
|
const cloudKey = extractStorageKey(filePath)
|
||||||
@@ -379,24 +423,86 @@ async function handleCloudFile(
|
|||||||
logger.info('Extracted cloud key:', cloudKey)
|
logger.info('Extracted cloud key:', cloudKey)
|
||||||
|
|
||||||
const context = (explicitContext as StorageContext) || inferContextFromKey(cloudKey)
|
const context = (explicitContext as StorageContext) || inferContextFromKey(cloudKey)
|
||||||
|
|
||||||
|
const hasAccess = await verifyFileAccess(
|
||||||
|
cloudKey,
|
||||||
|
userId,
|
||||||
|
null,
|
||||||
|
undefined,
|
||||||
|
context,
|
||||||
|
false // isLocal
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!hasAccess) {
|
||||||
|
logger.warn('Unauthorized cloud file parse attempt', { userId, key: cloudKey, context })
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'File not found',
|
||||||
|
filePath,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let originalFilename: string | undefined
|
||||||
|
if (context === 'workspace') {
|
||||||
|
try {
|
||||||
|
const fileRecord = await getFileMetadataByKey(cloudKey, 'workspace')
|
||||||
|
|
||||||
|
if (fileRecord) {
|
||||||
|
originalFilename = fileRecord.originalName
|
||||||
|
logger.debug(`Found original filename for workspace file: ${originalFilename}`)
|
||||||
|
}
|
||||||
|
} catch (dbError) {
|
||||||
|
logger.debug(`Failed to lookup original filename for ${cloudKey}:`, dbError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const fileBuffer = await StorageService.downloadFile({ key: cloudKey, context })
|
const fileBuffer = await StorageService.downloadFile({ key: cloudKey, context })
|
||||||
logger.info(
|
logger.info(
|
||||||
`Downloaded file from ${context} storage (${explicitContext ? 'explicit' : 'inferred'}): ${cloudKey}, size: ${fileBuffer.length} bytes`
|
`Downloaded file from ${context} storage (${explicitContext ? 'explicit' : 'inferred'}): ${cloudKey}, size: ${fileBuffer.length} bytes`
|
||||||
)
|
)
|
||||||
|
|
||||||
const filename = cloudKey.split('/').pop() || cloudKey
|
const filename = originalFilename || cloudKey.split('/').pop() || cloudKey
|
||||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||||
|
|
||||||
|
const normalizedFilePath = `/api/files/serve/${encodeURIComponent(cloudKey)}?context=${context}`
|
||||||
|
let workspaceIdFromKey: string | undefined
|
||||||
|
|
||||||
|
if (context === 'execution') {
|
||||||
|
workspaceIdFromKey = extractWorkspaceIdFromExecutionKey(cloudKey) || undefined
|
||||||
|
} else if (context === 'workspace') {
|
||||||
|
const segments = cloudKey.split('/')
|
||||||
|
if (segments.length >= 2 && /^[a-f0-9-]{36}$/.test(segments[0])) {
|
||||||
|
workspaceIdFromKey = segments[0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const viewerUrl = getViewerUrl(cloudKey, workspaceIdFromKey)
|
||||||
|
|
||||||
|
let parseResult: ParseResult
|
||||||
if (extension === 'pdf') {
|
if (extension === 'pdf') {
|
||||||
return await handlePdfBuffer(fileBuffer, filename, fileType, filePath)
|
parseResult = await handlePdfBuffer(fileBuffer, filename, fileType, normalizedFilePath)
|
||||||
|
} else if (extension === 'csv') {
|
||||||
|
parseResult = await handleCsvBuffer(fileBuffer, filename, fileType, normalizedFilePath)
|
||||||
|
} else if (isSupportedFileType(extension)) {
|
||||||
|
parseResult = await handleGenericTextBuffer(
|
||||||
|
fileBuffer,
|
||||||
|
filename,
|
||||||
|
extension,
|
||||||
|
fileType,
|
||||||
|
normalizedFilePath
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
parseResult = handleGenericBuffer(fileBuffer, filename, extension, fileType)
|
||||||
|
parseResult.filePath = normalizedFilePath
|
||||||
}
|
}
|
||||||
if (extension === 'csv') {
|
|
||||||
return await handleCsvBuffer(fileBuffer, filename, fileType, filePath)
|
if (originalFilename) {
|
||||||
|
parseResult.originalName = originalFilename
|
||||||
}
|
}
|
||||||
if (isSupportedFileType(extension)) {
|
|
||||||
return await handleGenericTextBuffer(fileBuffer, filename, extension, fileType, filePath)
|
parseResult.viewerUrl = viewerUrl
|
||||||
}
|
|
||||||
return handleGenericBuffer(fileBuffer, filename, extension, fileType)
|
return parseResult
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Error handling cloud file ${filePath}:`, error)
|
logger.error(`Error handling cloud file ${filePath}:`, error)
|
||||||
|
|
||||||
@@ -416,9 +522,33 @@ async function handleCloudFile(
|
|||||||
/**
|
/**
|
||||||
* Handle local file
|
* Handle local file
|
||||||
*/
|
*/
|
||||||
async function handleLocalFile(filePath: string, fileType?: string): Promise<ParseResult> {
|
async function handleLocalFile(
|
||||||
|
filePath: string,
|
||||||
|
fileType: string,
|
||||||
|
userId: string
|
||||||
|
): Promise<ParseResult> {
|
||||||
try {
|
try {
|
||||||
const filename = filePath.split('/').pop() || filePath
|
const filename = filePath.split('/').pop() || filePath
|
||||||
|
|
||||||
|
const context = inferContextFromKey(filename)
|
||||||
|
const hasAccess = await verifyFileAccess(
|
||||||
|
filename,
|
||||||
|
userId,
|
||||||
|
null,
|
||||||
|
undefined,
|
||||||
|
context,
|
||||||
|
true // isLocal
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!hasAccess) {
|
||||||
|
logger.warn('Unauthorized local file parse attempt', { userId, filename })
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: 'File not found',
|
||||||
|
filePath,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const fullPath = path.join(UPLOAD_DIR_SERVER, filename)
|
const fullPath = path.join(UPLOAD_DIR_SERVER, filename)
|
||||||
|
|
||||||
logger.info('Processing local file:', fullPath)
|
logger.info('Processing local file:', fullPath)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import type { StorageContext } from '@/lib/uploads/core/config-resolver'
|
import type { StorageContext } from '@/lib/uploads/config'
|
||||||
import { USE_BLOB_STORAGE } from '@/lib/uploads/core/setup'
|
import { USE_BLOB_STORAGE } from '@/lib/uploads/config'
|
||||||
import {
|
import {
|
||||||
generateBatchPresignedUploadUrls,
|
generateBatchPresignedUploadUrls,
|
||||||
hasCloudStorage,
|
hasCloudStorage,
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { CopilotFiles } from '@/lib/uploads'
|
import { CopilotFiles } from '@/lib/uploads'
|
||||||
import type { StorageContext } from '@/lib/uploads/core/config-resolver'
|
import type { StorageContext } from '@/lib/uploads/config'
|
||||||
import { USE_BLOB_STORAGE } from '@/lib/uploads/core/setup'
|
import { USE_BLOB_STORAGE } from '@/lib/uploads/config'
|
||||||
import { generatePresignedUploadUrl, hasCloudStorage } from '@/lib/uploads/core/storage-service'
|
import { generatePresignedUploadUrl, hasCloudStorage } from '@/lib/uploads/core/storage-service'
|
||||||
import { validateFileType } from '@/lib/uploads/utils/validation'
|
import { validateFileType } from '@/lib/uploads/utils/validation'
|
||||||
import { createErrorResponse } from '@/app/api/files/utils'
|
import { createErrorResponse } from '@/app/api/files/utils'
|
||||||
|
|||||||
@@ -16,6 +16,17 @@ describe('File Serve API Route', () => {
|
|||||||
withUploadUtils: true,
|
withUploadUtils: true,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||||
|
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
userId: 'test-user-id',
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/app/api/files/authorization', () => ({
|
||||||
|
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||||
|
}))
|
||||||
|
|
||||||
vi.doMock('fs', () => ({
|
vi.doMock('fs', () => ({
|
||||||
existsSync: vi.fn().mockReturnValue(true),
|
existsSync: vi.fn().mockReturnValue(true),
|
||||||
}))
|
}))
|
||||||
@@ -45,8 +56,7 @@ describe('File Serve API Route', () => {
|
|||||||
getContentType: vi.fn().mockReturnValue('text/plain'),
|
getContentType: vi.fn().mockReturnValue('text/plain'),
|
||||||
isS3Path: vi.fn().mockReturnValue(false),
|
isS3Path: vi.fn().mockReturnValue(false),
|
||||||
isBlobPath: vi.fn().mockReturnValue(false),
|
isBlobPath: vi.fn().mockReturnValue(false),
|
||||||
extractS3Key: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
extractStorageKey: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
||||||
extractBlobKey: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
|
||||||
extractFilename: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
extractFilename: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
||||||
findLocalFile: vi.fn().mockReturnValue('/test/uploads/test-file.txt'),
|
findLocalFile: vi.fn().mockReturnValue('/test/uploads/test-file.txt'),
|
||||||
}))
|
}))
|
||||||
@@ -99,12 +109,22 @@ describe('File Serve API Route', () => {
|
|||||||
getContentType: vi.fn().mockReturnValue('text/plain'),
|
getContentType: vi.fn().mockReturnValue('text/plain'),
|
||||||
isS3Path: vi.fn().mockReturnValue(false),
|
isS3Path: vi.fn().mockReturnValue(false),
|
||||||
isBlobPath: vi.fn().mockReturnValue(false),
|
isBlobPath: vi.fn().mockReturnValue(false),
|
||||||
extractS3Key: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
extractStorageKey: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
||||||
extractBlobKey: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
|
||||||
extractFilename: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
extractFilename: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
||||||
findLocalFile: vi.fn().mockReturnValue('/test/uploads/nested/path/file.txt'),
|
findLocalFile: vi.fn().mockReturnValue('/test/uploads/nested/path/file.txt'),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||||
|
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
userId: 'test-user-id',
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/app/api/files/authorization', () => ({
|
||||||
|
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||||
|
}))
|
||||||
|
|
||||||
const req = new NextRequest('http://localhost:3000/api/files/serve/nested/path/file.txt')
|
const req = new NextRequest('http://localhost:3000/api/files/serve/nested/path/file.txt')
|
||||||
const params = { path: ['nested', 'path', 'file.txt'] }
|
const params = { path: ['nested', 'path', 'file.txt'] }
|
||||||
const { GET } = await import('@/app/api/files/serve/[...path]/route')
|
const { GET } = await import('@/app/api/files/serve/[...path]/route')
|
||||||
@@ -142,6 +162,17 @@ describe('File Serve API Route', () => {
|
|||||||
USE_BLOB_STORAGE: false,
|
USE_BLOB_STORAGE: false,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||||
|
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
userId: 'test-user-id',
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/app/api/files/authorization', () => ({
|
||||||
|
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||||
|
}))
|
||||||
|
|
||||||
vi.doMock('@/app/api/files/utils', () => ({
|
vi.doMock('@/app/api/files/utils', () => ({
|
||||||
FileNotFoundError: class FileNotFoundError extends Error {
|
FileNotFoundError: class FileNotFoundError extends Error {
|
||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
@@ -167,8 +198,7 @@ describe('File Serve API Route', () => {
|
|||||||
getContentType: vi.fn().mockReturnValue('image/png'),
|
getContentType: vi.fn().mockReturnValue('image/png'),
|
||||||
isS3Path: vi.fn().mockReturnValue(false),
|
isS3Path: vi.fn().mockReturnValue(false),
|
||||||
isBlobPath: vi.fn().mockReturnValue(false),
|
isBlobPath: vi.fn().mockReturnValue(false),
|
||||||
extractS3Key: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
extractStorageKey: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
||||||
extractBlobKey: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
|
||||||
extractFilename: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
extractFilename: vi.fn().mockImplementation((path) => path.split('/').pop()),
|
||||||
findLocalFile: vi.fn().mockReturnValue('/test/uploads/test-file.txt'),
|
findLocalFile: vi.fn().mockReturnValue('/test/uploads/test-file.txt'),
|
||||||
}))
|
}))
|
||||||
@@ -197,6 +227,17 @@ describe('File Serve API Route', () => {
|
|||||||
readFile: vi.fn().mockRejectedValue(new Error('ENOENT: no such file or directory')),
|
readFile: vi.fn().mockRejectedValue(new Error('ENOENT: no such file or directory')),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||||
|
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
userId: 'test-user-id',
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/app/api/files/authorization', () => ({
|
||||||
|
verifyFileAccess: vi.fn().mockResolvedValue(false), // File not found = no access
|
||||||
|
}))
|
||||||
|
|
||||||
vi.doMock('@/app/api/files/utils', () => ({
|
vi.doMock('@/app/api/files/utils', () => ({
|
||||||
FileNotFoundError: class FileNotFoundError extends Error {
|
FileNotFoundError: class FileNotFoundError extends Error {
|
||||||
constructor(message: string) {
|
constructor(message: string) {
|
||||||
@@ -214,8 +255,7 @@ describe('File Serve API Route', () => {
|
|||||||
getContentType: vi.fn().mockReturnValue('text/plain'),
|
getContentType: vi.fn().mockReturnValue('text/plain'),
|
||||||
isS3Path: vi.fn().mockReturnValue(false),
|
isS3Path: vi.fn().mockReturnValue(false),
|
||||||
isBlobPath: vi.fn().mockReturnValue(false),
|
isBlobPath: vi.fn().mockReturnValue(false),
|
||||||
extractS3Key: vi.fn(),
|
extractStorageKey: vi.fn(),
|
||||||
extractBlobKey: vi.fn(),
|
|
||||||
extractFilename: vi.fn(),
|
extractFilename: vi.fn(),
|
||||||
findLocalFile: vi.fn().mockReturnValue(null),
|
findLocalFile: vi.fn().mockReturnValue(null),
|
||||||
}))
|
}))
|
||||||
@@ -246,7 +286,24 @@ describe('File Serve API Route', () => {
|
|||||||
|
|
||||||
for (const test of contentTypeTests) {
|
for (const test of contentTypeTests) {
|
||||||
it(`should serve ${test.ext} file with correct content type`, async () => {
|
it(`should serve ${test.ext} file with correct content type`, async () => {
|
||||||
|
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||||
|
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||||
|
success: true,
|
||||||
|
userId: 'test-user-id',
|
||||||
|
}),
|
||||||
|
}))
|
||||||
|
|
||||||
|
vi.doMock('@/app/api/files/authorization', () => ({
|
||||||
|
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||||
|
}))
|
||||||
|
|
||||||
vi.doMock('@/app/api/files/utils', () => ({
|
vi.doMock('@/app/api/files/utils', () => ({
|
||||||
|
FileNotFoundError: class FileNotFoundError extends Error {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message)
|
||||||
|
this.name = 'FileNotFoundError'
|
||||||
|
}
|
||||||
|
},
|
||||||
getContentType: () => test.contentType,
|
getContentType: () => test.contentType,
|
||||||
findLocalFile: () => `/test/uploads/file.${test.ext}`,
|
findLocalFile: () => `/test/uploads/file.${test.ext}`,
|
||||||
createFileResponse: (obj: { buffer: Buffer; contentType: string; filename: string }) =>
|
createFileResponse: (obj: { buffer: Buffer; contentType: string; filename: string }) =>
|
||||||
|
|||||||
@@ -4,8 +4,10 @@ import { NextResponse } from 'next/server'
|
|||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { CopilotFiles, isUsingCloudStorage } from '@/lib/uploads'
|
import { CopilotFiles, isUsingCloudStorage } from '@/lib/uploads'
|
||||||
import type { StorageContext } from '@/lib/uploads/core/config-resolver'
|
import type { StorageContext } from '@/lib/uploads/config'
|
||||||
import { downloadFile } from '@/lib/uploads/core/storage-service'
|
import { downloadFile } from '@/lib/uploads/core/storage-service'
|
||||||
|
import { inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||||
import {
|
import {
|
||||||
createErrorResponse,
|
createErrorResponse,
|
||||||
createFileResponse,
|
createFileResponse,
|
||||||
@@ -31,8 +33,11 @@ export async function GET(
|
|||||||
|
|
||||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
if (!authResult.success) {
|
if (!authResult.success || !authResult.userId) {
|
||||||
logger.warn('Unauthorized file access attempt', { path, error: authResult.error })
|
logger.warn('Unauthorized file access attempt', {
|
||||||
|
path,
|
||||||
|
error: authResult.error || 'Missing userId',
|
||||||
|
})
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -47,7 +52,7 @@ export async function GET(
|
|||||||
const legacyBucketType = request.nextUrl.searchParams.get('bucket')
|
const legacyBucketType = request.nextUrl.searchParams.get('bucket')
|
||||||
|
|
||||||
if (isUsingCloudStorage() || isCloudPath) {
|
if (isUsingCloudStorage() || isCloudPath) {
|
||||||
return await handleCloudProxy(cloudKey, contextParam, legacyBucketType, userId)
|
return await handleCloudProxy(cloudKey, userId, contextParam, legacyBucketType)
|
||||||
}
|
}
|
||||||
|
|
||||||
return await handleLocalFile(fullPath, userId)
|
return await handleLocalFile(fullPath, userId)
|
||||||
@@ -62,8 +67,26 @@ export async function GET(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleLocalFile(filename: string, userId?: string): Promise<NextResponse> {
|
async function handleLocalFile(filename: string, userId: string): Promise<NextResponse> {
|
||||||
try {
|
try {
|
||||||
|
const contextParam: StorageContext | undefined = inferContextFromKey(filename) as
|
||||||
|
| StorageContext
|
||||||
|
| undefined
|
||||||
|
|
||||||
|
const hasAccess = await verifyFileAccess(
|
||||||
|
filename,
|
||||||
|
userId,
|
||||||
|
null,
|
||||||
|
undefined,
|
||||||
|
contextParam,
|
||||||
|
true // isLocal = true
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!hasAccess) {
|
||||||
|
logger.warn('Unauthorized local file access attempt', { userId, filename })
|
||||||
|
throw new FileNotFoundError(`File not found: ${filename}`)
|
||||||
|
}
|
||||||
|
|
||||||
const filePath = findLocalFile(filename)
|
const filePath = findLocalFile(filename)
|
||||||
|
|
||||||
if (!filePath) {
|
if (!filePath) {
|
||||||
@@ -86,44 +109,11 @@ async function handleLocalFile(filename: string, userId?: string): Promise<NextR
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Infer storage context from file key pattern
|
|
||||||
*/
|
|
||||||
function inferContextFromKey(key: string): StorageContext {
|
|
||||||
// KB files always start with 'kb/' prefix
|
|
||||||
if (key.startsWith('kb/')) {
|
|
||||||
return 'knowledge-base'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Workspace files: UUID-like ID followed by timestamp pattern
|
|
||||||
// Pattern: {uuid}/{timestamp}-{random}-{filename}
|
|
||||||
if (key.match(/^[a-f0-9-]{36}\/\d+-[a-z0-9]+-/)) {
|
|
||||||
return 'workspace'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execution files: three UUID segments (workspace/workflow/execution)
|
|
||||||
// Pattern: {uuid}/{uuid}/{uuid}/{filename}
|
|
||||||
const segments = key.split('/')
|
|
||||||
if (segments.length >= 4 && segments[0].match(/^[a-f0-9-]{36}$/)) {
|
|
||||||
return 'execution'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copilot files: timestamp-random-filename (no path segments)
|
|
||||||
// Pattern: {timestamp}-{random}-{filename}
|
|
||||||
// NOTE: This is ambiguous with other contexts - prefer explicit context parameter
|
|
||||||
if (key.match(/^\d+-[a-z0-9]+-/)) {
|
|
||||||
// Could be copilot, general, or chat - default to general
|
|
||||||
return 'general'
|
|
||||||
}
|
|
||||||
|
|
||||||
return 'general'
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleCloudProxy(
|
async function handleCloudProxy(
|
||||||
cloudKey: string,
|
cloudKey: string,
|
||||||
|
userId: string,
|
||||||
contextParam?: string | null,
|
contextParam?: string | null,
|
||||||
legacyBucketType?: string | null,
|
legacyBucketType?: string | null
|
||||||
userId?: string
|
|
||||||
): Promise<NextResponse> {
|
): Promise<NextResponse> {
|
||||||
try {
|
try {
|
||||||
let context: StorageContext
|
let context: StorageContext
|
||||||
@@ -139,6 +129,20 @@ async function handleCloudProxy(
|
|||||||
logger.info(`Inferred context: ${context} from key pattern: ${cloudKey}`)
|
logger.info(`Inferred context: ${context} from key pattern: ${cloudKey}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const hasAccess = await verifyFileAccess(
|
||||||
|
cloudKey,
|
||||||
|
userId,
|
||||||
|
legacyBucketType || null,
|
||||||
|
undefined,
|
||||||
|
context,
|
||||||
|
false // isLocal = false
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!hasAccess) {
|
||||||
|
logger.warn('Unauthorized cloud file access attempt', { userId, key: cloudKey, context })
|
||||||
|
throw new FileNotFoundError(`File not found: ${cloudKey}`)
|
||||||
|
}
|
||||||
|
|
||||||
let fileBuffer: Buffer
|
let fileBuffer: Buffer
|
||||||
|
|
||||||
if (context === 'copilot') {
|
if (context === 'copilot') {
|
||||||
|
|||||||
@@ -2,6 +2,10 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import '@/lib/uploads/core/setup.server'
|
import '@/lib/uploads/core/setup.server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||||
|
import type { StorageContext } from '@/lib/uploads/config'
|
||||||
|
import { isImageFileType } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { validateFileType } from '@/lib/uploads/utils/validation'
|
||||||
import {
|
import {
|
||||||
createErrorResponse,
|
createErrorResponse,
|
||||||
createOptionsResponse,
|
createOptionsResponse,
|
||||||
@@ -26,9 +30,6 @@ const ALLOWED_EXTENSIONS = new Set([
|
|||||||
'yml',
|
'yml',
|
||||||
])
|
])
|
||||||
|
|
||||||
/**
|
|
||||||
* Validates file extension against allowlist
|
|
||||||
*/
|
|
||||||
function validateFileExtension(filename: string): boolean {
|
function validateFileExtension(filename: string): boolean {
|
||||||
const extension = filename.split('.').pop()?.toLowerCase()
|
const extension = filename.split('.').pop()?.toLowerCase()
|
||||||
if (!extension) return false
|
if (!extension) return false
|
||||||
@@ -57,6 +58,12 @@ export async function POST(request: NextRequest) {
|
|||||||
const workflowId = formData.get('workflowId') as string | null
|
const workflowId = formData.get('workflowId') as string | null
|
||||||
const executionId = formData.get('executionId') as string | null
|
const executionId = formData.get('executionId') as string | null
|
||||||
const workspaceId = formData.get('workspaceId') as string | null
|
const workspaceId = formData.get('workspaceId') as string | null
|
||||||
|
const contextParam = formData.get('context') as string | null
|
||||||
|
|
||||||
|
// Determine context: explicit > workspace > execution > general
|
||||||
|
const context: StorageContext =
|
||||||
|
(contextParam as StorageContext) ||
|
||||||
|
(workspaceId ? 'workspace' : workflowId && executionId ? 'execution' : 'general')
|
||||||
|
|
||||||
const storageService = await import('@/lib/uploads/core/storage-service')
|
const storageService = await import('@/lib/uploads/core/storage-service')
|
||||||
const usingCloudStorage = storageService.hasCloudStorage()
|
const usingCloudStorage = storageService.hasCloudStorage()
|
||||||
@@ -68,6 +75,8 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
} else if (workspaceId) {
|
} else if (workspaceId) {
|
||||||
logger.info(`Uploading files for workspace-scoped storage: workspace=${workspaceId}`)
|
logger.info(`Uploading files for workspace-scoped storage: workspace=${workspaceId}`)
|
||||||
|
} else if (contextParam) {
|
||||||
|
logger.info(`Uploading files for ${contextParam} context`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadResults = []
|
const uploadResults = []
|
||||||
@@ -96,15 +105,82 @@ export async function POST(request: NextRequest) {
|
|||||||
},
|
},
|
||||||
buffer,
|
buffer,
|
||||||
originalName,
|
originalName,
|
||||||
file.type
|
file.type,
|
||||||
|
session.user.id // userId available from session
|
||||||
)
|
)
|
||||||
|
|
||||||
uploadResults.push(userFile)
|
uploadResults.push(userFile)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// Priority 2: Workspace-scoped storage (persistent, no expiry)
|
// Priority 2: Knowledge-base files (must check BEFORE workspace to avoid duplicate file check)
|
||||||
if (workspaceId) {
|
if (context === 'knowledge-base') {
|
||||||
|
// Validate file type for knowledge base
|
||||||
|
const validationError = validateFileType(originalName, file.type)
|
||||||
|
if (validationError) {
|
||||||
|
throw new InvalidRequestError(validationError.message)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workspaceId) {
|
||||||
|
const permission = await getUserEntityPermissions(
|
||||||
|
session.user.id,
|
||||||
|
'workspace',
|
||||||
|
workspaceId
|
||||||
|
)
|
||||||
|
if (permission === null) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Insufficient permissions for workspace' },
|
||||||
|
{ status: 403 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Uploading knowledge-base file: ${originalName}`)
|
||||||
|
|
||||||
|
const metadata: Record<string, string> = {
|
||||||
|
originalName: originalName,
|
||||||
|
uploadedAt: new Date().toISOString(),
|
||||||
|
purpose: 'knowledge-base',
|
||||||
|
userId: session.user.id,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workspaceId) {
|
||||||
|
metadata.workspaceId = workspaceId
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileInfo = await storageService.uploadFile({
|
||||||
|
file: buffer,
|
||||||
|
fileName: originalName,
|
||||||
|
contentType: file.type,
|
||||||
|
context: 'knowledge-base',
|
||||||
|
metadata,
|
||||||
|
})
|
||||||
|
|
||||||
|
const finalPath = usingCloudStorage
|
||||||
|
? `${fileInfo.path}?context=knowledge-base`
|
||||||
|
: fileInfo.path
|
||||||
|
|
||||||
|
const uploadResult = {
|
||||||
|
fileName: originalName,
|
||||||
|
presignedUrl: '', // Not used for server-side uploads
|
||||||
|
fileInfo: {
|
||||||
|
path: finalPath,
|
||||||
|
key: fileInfo.key,
|
||||||
|
name: originalName,
|
||||||
|
size: buffer.length,
|
||||||
|
type: file.type,
|
||||||
|
},
|
||||||
|
directUploadSupported: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Successfully uploaded knowledge-base file: ${fileInfo.key}`)
|
||||||
|
uploadResults.push(uploadResult)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 3: Workspace-scoped storage (persistent, no expiry)
|
||||||
|
// Only if context is NOT explicitly set to something else
|
||||||
|
if (workspaceId && !contextParam) {
|
||||||
try {
|
try {
|
||||||
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
|
const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace')
|
||||||
const userFile = await uploadWorkspaceFile(
|
const userFile = await uploadWorkspaceFile(
|
||||||
@@ -118,7 +194,6 @@ export async function POST(request: NextRequest) {
|
|||||||
uploadResults.push(userFile)
|
uploadResults.push(userFile)
|
||||||
continue
|
continue
|
||||||
} catch (workspaceError) {
|
} catch (workspaceError) {
|
||||||
// Check error type
|
|
||||||
const errorMessage =
|
const errorMessage =
|
||||||
workspaceError instanceof Error ? workspaceError.message : 'Upload failed'
|
workspaceError instanceof Error ? workspaceError.message : 'Upload failed'
|
||||||
const isDuplicate = errorMessage.includes('already exists')
|
const isDuplicate = errorMessage.includes('already exists')
|
||||||
@@ -128,7 +203,6 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
logger.warn(`Workspace file upload failed: ${errorMessage}`)
|
logger.warn(`Workspace file upload failed: ${errorMessage}`)
|
||||||
|
|
||||||
// Determine appropriate status code
|
|
||||||
let statusCode = 500
|
let statusCode = 500
|
||||||
if (isDuplicate) statusCode = 409
|
if (isDuplicate) statusCode = 409
|
||||||
else if (isStorageLimitError) statusCode = 413
|
else if (isStorageLimitError) statusCode = 413
|
||||||
@@ -144,15 +218,90 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Priority 4: Context-specific uploads (copilot, chat, profile-pictures)
|
||||||
|
if (context === 'copilot' || context === 'chat' || context === 'profile-pictures') {
|
||||||
|
if (!isImageFileType(file.type)) {
|
||||||
|
throw new InvalidRequestError(
|
||||||
|
`Only image files (JPEG, PNG, GIF, WebP, SVG) are allowed for ${context} uploads`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (context === 'chat' && workspaceId) {
|
||||||
|
const permission = await getUserEntityPermissions(
|
||||||
|
session.user.id,
|
||||||
|
'workspace',
|
||||||
|
workspaceId
|
||||||
|
)
|
||||||
|
if (permission === null) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Insufficient permissions for workspace' },
|
||||||
|
{ status: 403 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Uploading ${context} file: ${originalName}`)
|
||||||
|
|
||||||
|
const metadata: Record<string, string> = {
|
||||||
|
originalName: originalName,
|
||||||
|
uploadedAt: new Date().toISOString(),
|
||||||
|
purpose: context,
|
||||||
|
userId: session.user.id,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workspaceId && context === 'chat') {
|
||||||
|
metadata.workspaceId = workspaceId
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileInfo = await storageService.uploadFile({
|
||||||
|
file: buffer,
|
||||||
|
fileName: originalName,
|
||||||
|
contentType: file.type,
|
||||||
|
context,
|
||||||
|
metadata,
|
||||||
|
})
|
||||||
|
|
||||||
|
const finalPath = usingCloudStorage ? `${fileInfo.path}?context=${context}` : fileInfo.path
|
||||||
|
|
||||||
|
const uploadResult = {
|
||||||
|
fileName: originalName,
|
||||||
|
presignedUrl: '', // Not used for server-side uploads
|
||||||
|
fileInfo: {
|
||||||
|
path: finalPath,
|
||||||
|
key: fileInfo.key,
|
||||||
|
name: originalName,
|
||||||
|
size: buffer.length,
|
||||||
|
type: file.type,
|
||||||
|
},
|
||||||
|
directUploadSupported: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Successfully uploaded ${context} file: ${fileInfo.key}`)
|
||||||
|
uploadResults.push(uploadResult)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Priority 5: General uploads (fallback)
|
||||||
try {
|
try {
|
||||||
logger.info(`Uploading file (general context): ${originalName}`)
|
logger.info(`Uploading file (general context): ${originalName}`)
|
||||||
|
|
||||||
const storageService = await import('@/lib/uploads/core/storage-service')
|
const metadata: Record<string, string> = {
|
||||||
|
originalName: originalName,
|
||||||
|
uploadedAt: new Date().toISOString(),
|
||||||
|
purpose: 'general',
|
||||||
|
userId: session.user.id,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workspaceId) {
|
||||||
|
metadata.workspaceId = workspaceId
|
||||||
|
}
|
||||||
|
|
||||||
const fileInfo = await storageService.uploadFile({
|
const fileInfo = await storageService.uploadFile({
|
||||||
file: buffer,
|
file: buffer,
|
||||||
fileName: originalName,
|
fileName: originalName,
|
||||||
contentType: file.type,
|
contentType: file.type,
|
||||||
context: 'general',
|
context: 'general',
|
||||||
|
metadata,
|
||||||
})
|
})
|
||||||
|
|
||||||
let downloadUrl: string | undefined
|
let downloadUrl: string | undefined
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { existsSync } from 'fs'
|
|||||||
import { join, resolve, sep } from 'path'
|
import { join, resolve, sep } from 'path'
|
||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { UPLOAD_DIR } from '@/lib/uploads/core/setup'
|
import { UPLOAD_DIR } from '@/lib/uploads/config'
|
||||||
|
|
||||||
const logger = createLogger('FilesUtils')
|
const logger = createLogger('FilesUtils')
|
||||||
|
|
||||||
@@ -102,22 +102,6 @@ export function isCloudPath(path: string): boolean {
|
|||||||
return isS3Path(path) || isBlobPath(path)
|
return isS3Path(path) || isBlobPath(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function extractStorageKey(path: string, storageType: 's3' | 'blob'): string {
|
|
||||||
const prefix = `/api/files/serve/${storageType}/`
|
|
||||||
if (path.includes(prefix)) {
|
|
||||||
return decodeURIComponent(path.split(prefix)[1])
|
|
||||||
}
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
|
|
||||||
export function extractS3Key(path: string): string {
|
|
||||||
return extractStorageKey(path, 's3')
|
|
||||||
}
|
|
||||||
|
|
||||||
export function extractBlobKey(path: string): string {
|
|
||||||
return extractStorageKey(path, 'blob')
|
|
||||||
}
|
|
||||||
|
|
||||||
export function extractFilename(path: string): string {
|
export function extractFilename(path: string): string {
|
||||||
let filename: string
|
let filename: string
|
||||||
|
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processFilesToUserFiles,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processFilesToUserFiles,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
import { base64UrlEncode, buildMimeMessage } from '@/tools/gmail/utils'
|
import { base64UrlEncode, buildMimeMessage } from '@/tools/gmail/utils'
|
||||||
|
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processFilesToUserFiles,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
import { base64UrlEncode, buildMimeMessage } from '@/tools/gmail/utils'
|
import { base64UrlEncode, buildMimeMessage } from '@/tools/gmail/utils'
|
||||||
|
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processSingleFileToUserFile,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
import {
|
import {
|
||||||
GOOGLE_WORKSPACE_MIME_TYPES,
|
GOOGLE_WORKSPACE_MIME_TYPES,
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processFilesToUserFiles,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processFilesToUserFiles,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -2,28 +2,16 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { type StorageContext, StorageService } from '@/lib/uploads'
|
import { StorageService } from '@/lib/uploads'
|
||||||
import { extractStorageKey } from '@/lib/uploads/utils/file-utils'
|
import { extractStorageKey, inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||||
import { getBaseUrl } from '@/lib/urls/utils'
|
import { getBaseUrl } from '@/lib/urls/utils'
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
const logger = createLogger('MistralParseAPI')
|
const logger = createLogger('MistralParseAPI')
|
||||||
|
|
||||||
/**
|
|
||||||
* Infer storage context from file key pattern
|
|
||||||
*/
|
|
||||||
function inferContextFromKey(key: string): StorageContext {
|
|
||||||
if (key.startsWith('kb/')) return 'knowledge-base'
|
|
||||||
|
|
||||||
const segments = key.split('/')
|
|
||||||
if (segments.length >= 4 && segments[0].match(/^[a-f0-9-]{36}$/)) return 'execution'
|
|
||||||
if (key.match(/^[a-f0-9-]{36}\/\d+-[a-z0-9]+-/)) return 'workspace'
|
|
||||||
|
|
||||||
return 'general'
|
|
||||||
}
|
|
||||||
|
|
||||||
const MistralParseSchema = z.object({
|
const MistralParseSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
filePath: z.string().min(1, 'File path is required'),
|
filePath: z.string().min(1, 'File path is required'),
|
||||||
@@ -40,36 +28,61 @@ export async function POST(request: NextRequest) {
|
|||||||
try {
|
try {
|
||||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
if (!authResult.success) {
|
if (!authResult.success || !authResult.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthorized Mistral parse attempt: ${authResult.error}`)
|
logger.warn(`[${requestId}] Unauthorized Mistral parse attempt`, {
|
||||||
|
error: authResult.error || 'Missing userId',
|
||||||
|
})
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: authResult.error || 'Authentication required',
|
error: authResult.error || 'Unauthorized',
|
||||||
},
|
},
|
||||||
{ status: 401 }
|
{ status: 401 }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const validatedData = MistralParseSchema.parse(body)
|
const validatedData = MistralParseSchema.parse(body)
|
||||||
|
|
||||||
logger.info(`[${requestId}] Mistral parse request`, {
|
logger.info(`[${requestId}] Mistral parse request`, {
|
||||||
filePath: validatedData.filePath,
|
filePath: validatedData.filePath,
|
||||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||||
|
userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
let fileUrl = validatedData.filePath
|
||||||
|
|
||||||
// Check if it's an internal workspace file path
|
|
||||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||||
try {
|
try {
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
const storageKey = extractStorageKey(validatedData.filePath)
|
||||||
|
|
||||||
// Infer context from key pattern
|
|
||||||
const context = inferContextFromKey(storageKey)
|
const context = inferContextFromKey(storageKey)
|
||||||
|
|
||||||
// Generate 5-minute presigned URL for external API access
|
const hasAccess = await verifyFileAccess(
|
||||||
|
storageKey,
|
||||||
|
userId,
|
||||||
|
null,
|
||||||
|
undefined,
|
||||||
|
context,
|
||||||
|
false // isLocal
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!hasAccess) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||||
|
userId,
|
||||||
|
key: storageKey,
|
||||||
|
context,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'File not found',
|
||||||
|
},
|
||||||
|
{ status: 404 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -83,12 +96,10 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
} else if (validatedData.filePath?.startsWith('/')) {
|
||||||
// Convert relative path to absolute URL
|
|
||||||
const baseUrl = getBaseUrl()
|
const baseUrl = getBaseUrl()
|
||||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
fileUrl = `${baseUrl}${validatedData.filePath}`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Call Mistral API with the resolved URL
|
|
||||||
const mistralBody: any = {
|
const mistralBody: any = {
|
||||||
model: 'mistral-ocr-latest',
|
model: 'mistral-ocr-latest',
|
||||||
document: {
|
document: {
|
||||||
|
|||||||
@@ -3,10 +3,8 @@ import * as XLSX from 'xlsx'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processSingleFileToUserFile,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processFilesToUserFiles,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processFilesToUserFiles,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -3,10 +3,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processSingleFileToUserFile,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processFilesToUserFiles,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processFilesToUserFiles,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processFilesToUserFiles,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
import { convertMarkdownToHTML } from '@/tools/telegram/utils'
|
import { convertMarkdownToHTML } from '@/tools/telegram/utils'
|
||||||
|
|
||||||
|
|||||||
@@ -2,10 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
downloadFileFromStorage,
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
processSingleFileToUserFile,
|
|
||||||
} from '@/lib/uploads/utils/file-processing'
|
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|||||||
@@ -565,7 +565,34 @@ export async function POST(
|
|||||||
input: rawInput,
|
input: rawInput,
|
||||||
} = extractExecutionParams(request as NextRequest, parsedBody)
|
} = extractExecutionParams(request as NextRequest, parsedBody)
|
||||||
|
|
||||||
// Generate executionId early so it can be used for file uploads
|
let authenticatedUserId: string
|
||||||
|
let triggerType: TriggerType = 'manual'
|
||||||
|
|
||||||
|
if (finalIsSecureMode) {
|
||||||
|
authenticatedUserId = validation.workflow.userId
|
||||||
|
triggerType = 'manual'
|
||||||
|
} else {
|
||||||
|
const session = await getSession()
|
||||||
|
const apiKeyHeader = request.headers.get('X-API-Key')
|
||||||
|
|
||||||
|
if (session?.user?.id && !apiKeyHeader) {
|
||||||
|
authenticatedUserId = session.user.id
|
||||||
|
triggerType = 'manual'
|
||||||
|
} else if (apiKeyHeader) {
|
||||||
|
const auth = await authenticateApiKeyFromHeader(apiKeyHeader)
|
||||||
|
if (!auth.success || !auth.userId) {
|
||||||
|
return createErrorResponse('Unauthorized', 401)
|
||||||
|
}
|
||||||
|
authenticatedUserId = auth.userId
|
||||||
|
triggerType = 'api'
|
||||||
|
if (auth.keyId) {
|
||||||
|
void updateApiKeyLastUsed(auth.keyId).catch(() => {})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return createErrorResponse('Authentication required', 401)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const executionId = uuidv4()
|
const executionId = uuidv4()
|
||||||
|
|
||||||
let processedInput = rawInput
|
let processedInput = rawInput
|
||||||
@@ -609,7 +636,7 @@ export async function POST(
|
|||||||
fieldValue,
|
fieldValue,
|
||||||
executionContext,
|
executionContext,
|
||||||
requestId,
|
requestId,
|
||||||
isAsync
|
authenticatedUserId
|
||||||
)
|
)
|
||||||
|
|
||||||
if (uploadedFiles.length > 0) {
|
if (uploadedFiles.length > 0) {
|
||||||
@@ -633,34 +660,6 @@ export async function POST(
|
|||||||
|
|
||||||
const input = processedInput
|
const input = processedInput
|
||||||
|
|
||||||
let authenticatedUserId: string
|
|
||||||
let triggerType: TriggerType = 'manual'
|
|
||||||
|
|
||||||
if (finalIsSecureMode) {
|
|
||||||
authenticatedUserId = validation.workflow.userId
|
|
||||||
triggerType = 'manual'
|
|
||||||
} else {
|
|
||||||
const session = await getSession()
|
|
||||||
const apiKeyHeader = request.headers.get('X-API-Key')
|
|
||||||
|
|
||||||
if (session?.user?.id && !apiKeyHeader) {
|
|
||||||
authenticatedUserId = session.user.id
|
|
||||||
triggerType = 'manual'
|
|
||||||
} else if (apiKeyHeader) {
|
|
||||||
const auth = await authenticateApiKeyFromHeader(apiKeyHeader)
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return createErrorResponse('Unauthorized', 401)
|
|
||||||
}
|
|
||||||
authenticatedUserId = auth.userId
|
|
||||||
triggerType = 'api'
|
|
||||||
if (auth.keyId) {
|
|
||||||
void updateApiKeyLastUsed(auth.keyId).catch(() => {})
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return createErrorResponse('Authentication required', 401)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const userSubscription = await getHighestPrioritySubscription(authenticatedUserId)
|
const userSubscription = await getHighestPrioritySubscription(authenticatedUserId)
|
||||||
|
|
||||||
if (isAsync) {
|
if (isAsync) {
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { StorageService } from '@/lib/uploads'
|
|
||||||
import { getWorkspaceFile } from '@/lib/uploads/contexts/workspace'
|
import { getWorkspaceFile } from '@/lib/uploads/contexts/workspace'
|
||||||
import { generateRequestId } from '@/lib/utils'
|
import { generateRequestId } from '@/lib/utils'
|
||||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||||
@@ -12,8 +11,8 @@ const logger = createLogger('WorkspaceFileDownloadAPI')
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* POST /api/workspaces/[id]/files/[fileId]/download
|
* POST /api/workspaces/[id]/files/[fileId]/download
|
||||||
* Generate presigned download URL (requires read permission)
|
* Return authenticated file serve URL (requires read permission)
|
||||||
* Reuses execution file helper pattern for 5-minute presigned URLs
|
* Uses /api/files/serve endpoint which enforces authentication and context
|
||||||
*/
|
*/
|
||||||
export async function POST(
|
export async function POST(
|
||||||
request: NextRequest,
|
request: NextRequest,
|
||||||
@@ -28,7 +27,6 @@ export async function POST(
|
|||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check workspace permissions (requires read)
|
|
||||||
const userPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
const userPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||||
if (!userPermission) {
|
if (!userPermission) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
@@ -42,20 +40,18 @@ export async function POST(
|
|||||||
return NextResponse.json({ error: 'File not found' }, { status: 404 })
|
return NextResponse.json({ error: 'File not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate 5-minute presigned URL using unified storage service
|
const { getBaseUrl } = await import('@/lib/urls/utils')
|
||||||
const downloadUrl = await StorageService.generatePresignedDownloadUrl(
|
const serveUrl = `${getBaseUrl()}/api/files/serve/${encodeURIComponent(fileRecord.key)}?context=workspace`
|
||||||
fileRecord.key,
|
const viewerUrl = `${getBaseUrl()}/workspace/${workspaceId}/files/${fileId}/view`
|
||||||
'workspace',
|
|
||||||
5 * 60 // 5 minutes
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Generated download URL for workspace file: ${fileRecord.name}`)
|
logger.info(`[${requestId}] Generated download URL for workspace file: ${fileRecord.name}`)
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
downloadUrl,
|
downloadUrl: serveUrl,
|
||||||
|
viewerUrl: viewerUrl,
|
||||||
fileName: fileRecord.name,
|
fileName: fileRecord.name,
|
||||||
expiresIn: 300, // 5 minutes
|
expiresIn: null,
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${requestId}] Error generating download URL:`, error)
|
logger.error(`[${requestId}] Error generating download URL:`, error)
|
||||||
|
|||||||
@@ -0,0 +1,27 @@
|
|||||||
|
'use client'
|
||||||
|
|
||||||
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import type { WorkspaceFileRecord } from '@/lib/uploads/contexts/workspace'
|
||||||
|
|
||||||
|
const logger = createLogger('FileViewer')
|
||||||
|
|
||||||
|
interface FileViewerProps {
|
||||||
|
file: WorkspaceFileRecord
|
||||||
|
}
|
||||||
|
|
||||||
|
export function FileViewer({ file }: FileViewerProps) {
|
||||||
|
const serveUrl = `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace`
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className='fixed inset-0 z-50 bg-white'>
|
||||||
|
<iframe
|
||||||
|
src={serveUrl}
|
||||||
|
className='h-full w-full border-0'
|
||||||
|
title={file.name}
|
||||||
|
onError={(e) => {
|
||||||
|
logger.error(`Failed to load file: ${file.name}`)
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
import { redirect } from 'next/navigation'
|
||||||
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { getWorkspaceFile } from '@/lib/uploads/contexts/workspace'
|
||||||
|
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||||
|
import { FileViewer } from './file-viewer'
|
||||||
|
|
||||||
|
interface FileViewerPageProps {
|
||||||
|
params: Promise<{
|
||||||
|
workspaceId: string
|
||||||
|
fileId: string
|
||||||
|
}>
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function FileViewerPage({ params }: FileViewerPageProps) {
|
||||||
|
const { workspaceId, fileId } = await params
|
||||||
|
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
redirect('/')
|
||||||
|
}
|
||||||
|
|
||||||
|
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||||
|
if (!hasPermission) {
|
||||||
|
redirect(`/workspace/${workspaceId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||||
|
if (!fileRecord) {
|
||||||
|
redirect(`/workspace/${workspaceId}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return <FileViewer file={fileRecord} />
|
||||||
|
} catch (error) {
|
||||||
|
redirect(`/workspace/${workspaceId}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,16 +2,14 @@
|
|||||||
|
|
||||||
import { useRef, useState } from 'react'
|
import { useRef, useState } from 'react'
|
||||||
import { AlertCircle, Check, Loader2, X } from 'lucide-react'
|
import { AlertCircle, Check, Loader2, X } from 'lucide-react'
|
||||||
|
import { useParams } from 'next/navigation'
|
||||||
import { Button } from '@/components/ui/button'
|
import { Button } from '@/components/ui/button'
|
||||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
||||||
import { Label } from '@/components/ui/label'
|
import { Label } from '@/components/ui/label'
|
||||||
import { Progress } from '@/components/ui/progress'
|
import { Progress } from '@/components/ui/progress'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { formatFileSize, validateKnowledgeBaseFile } from '@/lib/uploads/utils/file-utils'
|
||||||
ACCEPT_ATTRIBUTE,
|
import { ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation'
|
||||||
ACCEPTED_FILE_TYPES,
|
|
||||||
MAX_FILE_SIZE,
|
|
||||||
} from '@/lib/uploads/utils/validation'
|
|
||||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
|
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
|
||||||
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
|
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
|
||||||
|
|
||||||
@@ -40,6 +38,8 @@ export function UploadModal({
|
|||||||
chunkingConfig,
|
chunkingConfig,
|
||||||
onUploadComplete,
|
onUploadComplete,
|
||||||
}: UploadModalProps) {
|
}: UploadModalProps) {
|
||||||
|
const params = useParams()
|
||||||
|
const workspaceId = params.workspaceId as string
|
||||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||||
const [files, setFiles] = useState<FileWithPreview[]>([])
|
const [files, setFiles] = useState<FileWithPreview[]>([])
|
||||||
|
|
||||||
@@ -47,6 +47,7 @@ export function UploadModal({
|
|||||||
const [isDragging, setIsDragging] = useState(false)
|
const [isDragging, setIsDragging] = useState(false)
|
||||||
|
|
||||||
const { isUploading, uploadProgress, uploadError, uploadFiles, clearError } = useKnowledgeUpload({
|
const { isUploading, uploadProgress, uploadError, uploadFiles, clearError } = useKnowledgeUpload({
|
||||||
|
workspaceId,
|
||||||
onUploadComplete: () => {
|
onUploadComplete: () => {
|
||||||
logger.info(`Successfully uploaded ${files.length} files`)
|
logger.info(`Successfully uploaded ${files.length} files`)
|
||||||
onUploadComplete?.()
|
onUploadComplete?.()
|
||||||
@@ -65,13 +66,7 @@ export function UploadModal({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const validateFile = (file: File): string | null => {
|
const validateFile = (file: File): string | null => {
|
||||||
if (file.size > MAX_FILE_SIZE) {
|
return validateKnowledgeBaseFile(file)
|
||||||
return `File "${file.name}" is too large. Maximum size is 100MB.`
|
|
||||||
}
|
|
||||||
if (!ACCEPTED_FILE_TYPES.includes(file.type)) {
|
|
||||||
return `File "${file.name}" has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML, JSON, YAML, or YML files.`
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const processFiles = (fileList: FileList | File[]) => {
|
const processFiles = (fileList: FileList | File[]) => {
|
||||||
@@ -153,14 +148,6 @@ export function UploadModal({
|
|||||||
return <IconComponent className='h-10 w-8' />
|
return <IconComponent className='h-10 w-8' />
|
||||||
}
|
}
|
||||||
|
|
||||||
const formatFileSize = (bytes: number): string => {
|
|
||||||
if (bytes === 0) return '0 B'
|
|
||||||
const k = 1024
|
|
||||||
const sizes = ['B', 'KB', 'MB', 'GB']
|
|
||||||
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
|
||||||
return `${Number.parseFloat((bytes / k ** i).toFixed(1))} ${sizes[i]}`
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Dialog open={open} onOpenChange={handleClose}>
|
<Dialog open={open} onOpenChange={handleClose}>
|
||||||
<DialogContent className='flex max-h-[95vh] flex-col overflow-hidden sm:max-w-[600px]'>
|
<DialogContent className='flex max-h-[95vh] flex-col overflow-hidden sm:max-w-[600px]'>
|
||||||
@@ -281,12 +268,7 @@ export function UploadModal({
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{fileError && (
|
{/* Show upload error first, then file error only if no upload error */}
|
||||||
<div className='rounded-md border border-destructive/50 bg-destructive/10 px-3 py-2 text-destructive text-sm'>
|
|
||||||
{fileError}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{uploadError && (
|
{uploadError && (
|
||||||
<div className='rounded-md border border-destructive/50 bg-destructive/10 px-3 py-2'>
|
<div className='rounded-md border border-destructive/50 bg-destructive/10 px-3 py-2'>
|
||||||
<div className='flex items-start gap-2'>
|
<div className='flex items-start gap-2'>
|
||||||
@@ -295,6 +277,12 @@ export function UploadModal({
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{fileError && !uploadError && (
|
||||||
|
<div className='rounded-md border border-destructive/50 bg-destructive/10 px-3 py-2 text-destructive text-sm'>
|
||||||
|
{fileError}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -14,11 +14,8 @@ import { Label } from '@/components/ui/label'
|
|||||||
import { Progress } from '@/components/ui/progress'
|
import { Progress } from '@/components/ui/progress'
|
||||||
import { Textarea } from '@/components/ui/textarea'
|
import { Textarea } from '@/components/ui/textarea'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { formatFileSize, validateKnowledgeBaseFile } from '@/lib/uploads/utils/file-utils'
|
||||||
ACCEPT_ATTRIBUTE,
|
import { ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation'
|
||||||
ACCEPTED_FILE_TYPES,
|
|
||||||
MAX_FILE_SIZE,
|
|
||||||
} from '@/lib/uploads/utils/validation'
|
|
||||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
|
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
|
||||||
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
|
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
|
||||||
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
import type { KnowledgeBaseData } from '@/stores/knowledge/store'
|
||||||
@@ -84,9 +81,9 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
const dropZoneRef = useRef<HTMLDivElement>(null)
|
const dropZoneRef = useRef<HTMLDivElement>(null)
|
||||||
|
|
||||||
const { uploadFiles, isUploading, uploadProgress, uploadError, clearError } = useKnowledgeUpload({
|
const { uploadFiles, isUploading, uploadProgress, uploadError, clearError } = useKnowledgeUpload({
|
||||||
|
workspaceId,
|
||||||
onUploadComplete: (uploadedFiles) => {
|
onUploadComplete: (uploadedFiles) => {
|
||||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||||
// Files uploaded and document records created - processing will continue in background
|
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -97,7 +94,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
onOpenChange(open)
|
onOpenChange(open)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cleanup file preview URLs when component unmounts to prevent memory leaks
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
return () => {
|
return () => {
|
||||||
files.forEach((file) => {
|
files.forEach((file) => {
|
||||||
@@ -126,19 +122,15 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
mode: 'onSubmit',
|
mode: 'onSubmit',
|
||||||
})
|
})
|
||||||
|
|
||||||
// Watch the name field to enable/disable the submit button
|
|
||||||
const nameValue = watch('name')
|
const nameValue = watch('name')
|
||||||
|
|
||||||
// Reset state when modal opens/closes
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (open) {
|
if (open) {
|
||||||
// Reset states when modal opens
|
|
||||||
setSubmitStatus(null)
|
setSubmitStatus(null)
|
||||||
setFileError(null)
|
setFileError(null)
|
||||||
setFiles([])
|
setFiles([])
|
||||||
setIsDragging(false)
|
setIsDragging(false)
|
||||||
setDragCounter(0)
|
setDragCounter(0)
|
||||||
// Reset form to default values
|
|
||||||
reset({
|
reset({
|
||||||
name: '',
|
name: '',
|
||||||
description: '',
|
description: '',
|
||||||
@@ -159,23 +151,13 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
let hasError = false
|
let hasError = false
|
||||||
|
|
||||||
for (const file of Array.from(fileList)) {
|
for (const file of Array.from(fileList)) {
|
||||||
// Check file size
|
const validationError = validateKnowledgeBaseFile(file)
|
||||||
if (file.size > MAX_FILE_SIZE) {
|
if (validationError) {
|
||||||
setFileError(`File ${file.name} is too large. Maximum size is 100MB per file.`)
|
setFileError(validationError)
|
||||||
hasError = true
|
hasError = true
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check file type
|
|
||||||
if (!ACCEPTED_FILE_TYPES.includes(file.type)) {
|
|
||||||
setFileError(
|
|
||||||
`File ${file.name} has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML, JSON, YAML, or YML.`
|
|
||||||
)
|
|
||||||
hasError = true
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create file with preview (using file icon since these aren't images)
|
|
||||||
const fileWithPreview = Object.assign(file, {
|
const fileWithPreview = Object.assign(file, {
|
||||||
preview: URL.createObjectURL(file),
|
preview: URL.createObjectURL(file),
|
||||||
}) as FileWithPreview
|
}) as FileWithPreview
|
||||||
@@ -190,7 +172,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
logger.error('Error processing files:', error)
|
logger.error('Error processing files:', error)
|
||||||
setFileError('An error occurred while processing files. Please try again.')
|
setFileError('An error occurred while processing files. Please try again.')
|
||||||
} finally {
|
} finally {
|
||||||
// Reset the input
|
|
||||||
if (fileInputRef.current) {
|
if (fileInputRef.current) {
|
||||||
fileInputRef.current.value = ''
|
fileInputRef.current.value = ''
|
||||||
}
|
}
|
||||||
@@ -203,7 +184,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle drag events
|
|
||||||
const handleDragEnter = (e: React.DragEvent) => {
|
const handleDragEnter = (e: React.DragEvent) => {
|
||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
@@ -231,7 +211,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
const handleDragOver = (e: React.DragEvent) => {
|
const handleDragOver = (e: React.DragEvent) => {
|
||||||
e.preventDefault()
|
e.preventDefault()
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
// Add visual feedback for valid drop zone
|
|
||||||
e.dataTransfer.dropEffect = 'copy'
|
e.dataTransfer.dropEffect = 'copy'
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -248,7 +227,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
|
|
||||||
const removeFile = (index: number) => {
|
const removeFile = (index: number) => {
|
||||||
setFiles((prev) => {
|
setFiles((prev) => {
|
||||||
// Revoke the URL to avoid memory leaks
|
|
||||||
URL.revokeObjectURL(prev[index].preview)
|
URL.revokeObjectURL(prev[index].preview)
|
||||||
return prev.filter((_, i) => i !== index)
|
return prev.filter((_, i) => i !== index)
|
||||||
})
|
})
|
||||||
@@ -259,20 +237,11 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
return <IconComponent className='h-10 w-8' />
|
return <IconComponent className='h-10 w-8' />
|
||||||
}
|
}
|
||||||
|
|
||||||
const formatFileSize = (bytes: number): string => {
|
|
||||||
if (bytes === 0) return '0 B'
|
|
||||||
const k = 1024
|
|
||||||
const sizes = ['B', 'KB', 'MB', 'GB']
|
|
||||||
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
|
||||||
return `${Number.parseFloat((bytes / k ** i).toFixed(1))} ${sizes[i]}`
|
|
||||||
}
|
|
||||||
|
|
||||||
const onSubmit = async (data: FormValues) => {
|
const onSubmit = async (data: FormValues) => {
|
||||||
setIsSubmitting(true)
|
setIsSubmitting(true)
|
||||||
setSubmitStatus(null)
|
setSubmitStatus(null)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// First create the knowledge base
|
|
||||||
const knowledgeBasePayload = {
|
const knowledgeBasePayload = {
|
||||||
name: data.name,
|
name: data.name,
|
||||||
description: data.description || undefined,
|
description: data.description || undefined,
|
||||||
@@ -371,14 +340,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
className='scrollbar-thin scrollbar-thumb-muted-foreground/20 hover:scrollbar-thumb-muted-foreground/25 scrollbar-track-transparent min-h-0 flex-1 overflow-y-auto px-6'
|
className='scrollbar-thin scrollbar-thumb-muted-foreground/20 hover:scrollbar-thumb-muted-foreground/25 scrollbar-track-transparent min-h-0 flex-1 overflow-y-auto px-6'
|
||||||
>
|
>
|
||||||
<div className='flex min-h-full flex-col py-4'>
|
<div className='flex min-h-full flex-col py-4'>
|
||||||
{submitStatus && submitStatus.type === 'error' && (
|
{/* Show upload error first, then submit error only if no upload error */}
|
||||||
<Alert variant='destructive' className='mb-6'>
|
|
||||||
<AlertCircle className='h-4 w-4' />
|
|
||||||
<AlertTitle>Error</AlertTitle>
|
|
||||||
<AlertDescription>{submitStatus.message}</AlertDescription>
|
|
||||||
</Alert>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{uploadError && (
|
{uploadError && (
|
||||||
<Alert variant='destructive' className='mb-6'>
|
<Alert variant='destructive' className='mb-6'>
|
||||||
<AlertCircle className='h-4 w-4' />
|
<AlertCircle className='h-4 w-4' />
|
||||||
@@ -387,6 +349,14 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
|||||||
</Alert>
|
</Alert>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{submitStatus && submitStatus.type === 'error' && !uploadError && (
|
||||||
|
<Alert variant='destructive' className='mb-6'>
|
||||||
|
<AlertCircle className='h-4 w-4' />
|
||||||
|
<AlertTitle>Error</AlertTitle>
|
||||||
|
<AlertDescription>{submitStatus.message}</AlertDescription>
|
||||||
|
</Alert>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Form Fields Section - Fixed at top */}
|
{/* Form Fields Section - Fixed at top */}
|
||||||
<div className='flex-shrink-0 space-y-4'>
|
<div className='flex-shrink-0 space-y-4'>
|
||||||
<div className='space-y-2'>
|
<div className='space-y-2'>
|
||||||
|
|||||||
@@ -52,6 +52,7 @@ export interface ProcessingOptions {
|
|||||||
export interface UseKnowledgeUploadOptions {
|
export interface UseKnowledgeUploadOptions {
|
||||||
onUploadComplete?: (uploadedFiles: UploadedFile[]) => void
|
onUploadComplete?: (uploadedFiles: UploadedFile[]) => void
|
||||||
onError?: (error: UploadError) => void
|
onError?: (error: UploadError) => void
|
||||||
|
workspaceId?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
class KnowledgeUploadError extends Error {
|
class KnowledgeUploadError extends Error {
|
||||||
@@ -355,23 +356,13 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
|||||||
const timeoutId = setTimeout(() => controller.abort(), timeoutMs)
|
const timeoutId = setTimeout(() => controller.abort(), timeoutMs)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
presignedData = presignedOverride ?? (await getPresignedData(file, timeoutMs, controller))
|
// For large files (>50MB), use multipart upload
|
||||||
|
if (file.size > UPLOAD_CONFIG.LARGE_FILE_THRESHOLD) {
|
||||||
if (presignedData.directUploadSupported) {
|
presignedData = presignedOverride ?? (await getPresignedData(file, timeoutMs, controller))
|
||||||
if (file.size > UPLOAD_CONFIG.LARGE_FILE_THRESHOLD) {
|
return await uploadFileInChunks(file, presignedData, timeoutMs, fileIndex)
|
||||||
return await uploadFileInChunks(file, presignedData, timeoutMs, fileIndex)
|
|
||||||
}
|
|
||||||
return await uploadFileDirectly(file, presignedData, timeoutMs, controller, fileIndex)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (file.size > UPLOAD_CONFIG.DIRECT_UPLOAD_THRESHOLD) {
|
// For all other files, use server-side upload
|
||||||
throw new DirectUploadError(
|
|
||||||
`File ${file.name} is too large (${(file.size / 1024 / 1024).toFixed(2)}MB) for upload. Cloud storage must be configured for files over 4MB.`,
|
|
||||||
{ fileSize: file.size, limit: UPLOAD_CONFIG.DIRECT_UPLOAD_THRESHOLD }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.warn(`Using API upload fallback for ${file.name} - cloud storage not configured`)
|
|
||||||
return await uploadFileThroughAPI(file, timeoutMs)
|
return await uploadFileThroughAPI(file, timeoutMs)
|
||||||
} finally {
|
} finally {
|
||||||
clearTimeout(timeoutId)
|
clearTimeout(timeoutId)
|
||||||
@@ -729,6 +720,11 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
|||||||
try {
|
try {
|
||||||
const formData = new FormData()
|
const formData = new FormData()
|
||||||
formData.append('file', file)
|
formData.append('file', file)
|
||||||
|
formData.append('context', 'knowledge-base')
|
||||||
|
|
||||||
|
if (options.workspaceId) {
|
||||||
|
formData.append('workspaceId', options.workspaceId)
|
||||||
|
}
|
||||||
|
|
||||||
const uploadResponse = await fetch('/api/files/upload', {
|
const uploadResponse = await fetch('/api/files/upload', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -752,8 +748,9 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
|||||||
|
|
||||||
const uploadResult = await uploadResponse.json()
|
const uploadResult = await uploadResponse.json()
|
||||||
|
|
||||||
// Validate upload result structure
|
const filePath = uploadResult.fileInfo?.path || uploadResult.path
|
||||||
if (!uploadResult.path) {
|
|
||||||
|
if (!filePath) {
|
||||||
throw new DirectUploadError(
|
throw new DirectUploadError(
|
||||||
`Invalid upload response for ${file.name}: missing file path`,
|
`Invalid upload response for ${file.name}: missing file path`,
|
||||||
uploadResult
|
uploadResult
|
||||||
@@ -762,9 +759,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
|||||||
|
|
||||||
return createUploadedFile(
|
return createUploadedFile(
|
||||||
file.name,
|
file.name,
|
||||||
uploadResult.path.startsWith('http')
|
filePath.startsWith('http') ? filePath : `${window.location.origin}${filePath}`,
|
||||||
? uploadResult.path
|
|
||||||
: `${window.location.origin}${uploadResult.path}`,
|
|
||||||
file.size,
|
file.size,
|
||||||
file.type,
|
file.type,
|
||||||
file
|
file
|
||||||
|
|||||||
@@ -2,8 +2,10 @@
|
|||||||
|
|
||||||
import { useState } from 'react'
|
import { useState } from 'react'
|
||||||
import { Download, Loader2 } from 'lucide-react'
|
import { Download, Loader2 } from 'lucide-react'
|
||||||
|
import { useRouter } from 'next/navigation'
|
||||||
import { Button } from '@/components/ui/button'
|
import { Button } from '@/components/ui/button'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { extractWorkspaceIdFromExecutionKey, getViewerUrl } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
const logger = createLogger('FileDownload')
|
const logger = createLogger('FileDownload')
|
||||||
|
|
||||||
@@ -22,12 +24,19 @@ interface FileDownloadProps {
|
|||||||
}
|
}
|
||||||
isExecutionFile?: boolean // Flag to indicate this is an execution file
|
isExecutionFile?: boolean // Flag to indicate this is an execution file
|
||||||
className?: string
|
className?: string
|
||||||
|
workspaceId?: string // Optional workspace ID (can be extracted from file key if not provided)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function FileDownload({ file, isExecutionFile = false, className }: FileDownloadProps) {
|
export function FileDownload({
|
||||||
|
file,
|
||||||
|
isExecutionFile = false,
|
||||||
|
className,
|
||||||
|
workspaceId,
|
||||||
|
}: FileDownloadProps) {
|
||||||
const [isDownloading, setIsDownloading] = useState(false)
|
const [isDownloading, setIsDownloading] = useState(false)
|
||||||
|
const router = useRouter()
|
||||||
|
|
||||||
const handleDownload = async () => {
|
const handleDownload = () => {
|
||||||
if (isDownloading) return
|
if (isDownloading) return
|
||||||
|
|
||||||
setIsDownloading(true)
|
setIsDownloading(true)
|
||||||
@@ -35,34 +44,50 @@ export function FileDownload({ file, isExecutionFile = false, className }: FileD
|
|||||||
try {
|
try {
|
||||||
logger.info(`Initiating download for file: ${file.name}`)
|
logger.info(`Initiating download for file: ${file.name}`)
|
||||||
|
|
||||||
// Generate a fresh download URL
|
if (file.key.startsWith('url/')) {
|
||||||
const response = await fetch('/api/files/download', {
|
if (file.url) {
|
||||||
method: 'POST',
|
window.open(file.url, '_blank')
|
||||||
headers: {
|
logger.info(`Opened URL-type file directly: ${file.url}`)
|
||||||
'Content-Type': 'application/json',
|
return
|
||||||
},
|
}
|
||||||
body: JSON.stringify({
|
throw new Error('URL is required for URL-type files')
|
||||||
key: file.key,
|
|
||||||
name: file.name,
|
|
||||||
storageProvider: file.storageProvider,
|
|
||||||
bucketName: file.bucketName,
|
|
||||||
isExecutionFile, // Add flag to indicate execution file
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => ({ error: response.statusText }))
|
|
||||||
throw new Error(errorData.error || `Failed to generate download URL: ${response.status}`)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const { downloadUrl, fileName } = await response.json()
|
let resolvedWorkspaceId = workspaceId
|
||||||
|
if (!resolvedWorkspaceId && isExecutionFile) {
|
||||||
|
resolvedWorkspaceId = extractWorkspaceIdFromExecutionKey(file.key) || undefined
|
||||||
|
} else if (!resolvedWorkspaceId) {
|
||||||
|
const segments = file.key.split('/')
|
||||||
|
if (segments.length >= 2 && /^[a-f0-9-]{36}$/.test(segments[0])) {
|
||||||
|
resolvedWorkspaceId = segments[0]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Open the download URL in a new tab
|
if (isExecutionFile) {
|
||||||
window.open(downloadUrl, '_blank')
|
const serveUrl =
|
||||||
|
file.url || `/api/files/serve/${encodeURIComponent(file.key)}?context=execution`
|
||||||
|
window.open(serveUrl, '_blank')
|
||||||
|
logger.info(`Opened execution file serve URL: ${serveUrl}`)
|
||||||
|
} else {
|
||||||
|
const viewerUrl = resolvedWorkspaceId ? getViewerUrl(file.key, resolvedWorkspaceId) : null
|
||||||
|
|
||||||
logger.info(`Download initiated for file: ${fileName}`)
|
if (viewerUrl) {
|
||||||
|
router.push(viewerUrl)
|
||||||
|
logger.info(`Navigated to viewer URL: ${viewerUrl}`)
|
||||||
|
} else {
|
||||||
|
logger.warn(
|
||||||
|
`Could not construct viewer URL for file: ${file.name}, falling back to serve URL`
|
||||||
|
)
|
||||||
|
const serveUrl =
|
||||||
|
file.url || `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace`
|
||||||
|
window.open(serveUrl, '_blank')
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to download file ${file.name}:`, error)
|
logger.error(`Failed to download file ${file.name}:`, error)
|
||||||
|
if (file.url) {
|
||||||
|
window.open(file.url, '_blank')
|
||||||
|
}
|
||||||
} finally {
|
} finally {
|
||||||
setIsDownloading(false)
|
setIsDownloading(false)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ export const FileAttachmentDisplay = memo(({ fileAttachments }: FileAttachmentDi
|
|||||||
return fileUrls[cacheKey]
|
return fileUrls[cacheKey]
|
||||||
}
|
}
|
||||||
|
|
||||||
const url = `/api/files/serve/${encodeURIComponent(file.key)}?bucket=copilot`
|
const url = `/api/files/serve/${encodeURIComponent(file.key)}?context=copilot`
|
||||||
setFileUrls((prev) => ({ ...prev, [cacheKey]: url }))
|
setFileUrls((prev) => ({ ...prev, [cacheKey]: url }))
|
||||||
return url
|
return url
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -596,53 +596,33 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
|||||||
setAttachedFiles((prev) => [...prev, tempFile])
|
setAttachedFiles((prev) => [...prev, tempFile])
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Request presigned URL
|
const formData = new FormData()
|
||||||
const presignedResponse = await fetch('/api/files/presigned?type=copilot', {
|
formData.append('file', file)
|
||||||
|
formData.append('context', 'copilot')
|
||||||
|
|
||||||
|
const uploadResponse = await fetch('/api/files/upload', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
body: formData,
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
fileName: file.name,
|
|
||||||
contentType: file.type,
|
|
||||||
fileSize: file.size,
|
|
||||||
userId,
|
|
||||||
}),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!presignedResponse.ok) {
|
|
||||||
throw new Error('Failed to get presigned URL')
|
|
||||||
}
|
|
||||||
|
|
||||||
const presignedData = await presignedResponse.json()
|
|
||||||
|
|
||||||
logger.info(`Uploading file: ${presignedData.presignedUrl}`)
|
|
||||||
const uploadHeaders = presignedData.uploadHeaders || {}
|
|
||||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': file.type,
|
|
||||||
...uploadHeaders,
|
|
||||||
},
|
|
||||||
body: file,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(`Upload response status: ${uploadResponse.status}`)
|
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
if (!uploadResponse.ok) {
|
||||||
const errorText = await uploadResponse.text()
|
const errorData = await uploadResponse.json().catch(() => ({
|
||||||
logger.error(`Upload failed: ${errorText}`)
|
error: `Upload failed: ${uploadResponse.status}`,
|
||||||
throw new Error(`Failed to upload file: ${uploadResponse.status} ${errorText}`)
|
}))
|
||||||
|
throw new Error(errorData.error || `Failed to upload file: ${uploadResponse.status}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update file entry with success
|
const uploadData = await uploadResponse.json()
|
||||||
|
|
||||||
|
logger.info(`File uploaded successfully: ${uploadData.fileInfo?.path || uploadData.path}`)
|
||||||
|
|
||||||
setAttachedFiles((prev) =>
|
setAttachedFiles((prev) =>
|
||||||
prev.map((f) =>
|
prev.map((f) =>
|
||||||
f.id === tempFile.id
|
f.id === tempFile.id
|
||||||
? {
|
? {
|
||||||
...f,
|
...f,
|
||||||
path: presignedData.fileInfo.path,
|
path: uploadData.fileInfo?.path || uploadData.path || uploadData.url,
|
||||||
key: presignedData.fileInfo.key, // Store the actual storage key
|
key: uploadData.fileInfo?.key || uploadData.key,
|
||||||
uploading: false,
|
uploading: false,
|
||||||
}
|
}
|
||||||
: f
|
: f
|
||||||
@@ -650,7 +630,6 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
|||||||
)
|
)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`File upload failed: ${error}`)
|
logger.error(`File upload failed: ${error}`)
|
||||||
// Remove failed upload
|
|
||||||
setAttachedFiles((prev) => prev.filter((f) => f.id !== tempFile.id))
|
setAttachedFiles((prev) => prev.filter((f) => f.id !== tempFile.id))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -48,54 +48,9 @@ export function useProfilePictureUpload({
|
|||||||
|
|
||||||
const uploadFileToServer = useCallback(async (file: File): Promise<string> => {
|
const uploadFileToServer = useCallback(async (file: File): Promise<string> => {
|
||||||
try {
|
try {
|
||||||
const presignedResponse = await fetch('/api/files/presigned?type=profile-pictures', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
fileName: file.name,
|
|
||||||
contentType: file.type,
|
|
||||||
fileSize: file.size,
|
|
||||||
}),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (presignedResponse.ok) {
|
|
||||||
const presignedData = await presignedResponse.json()
|
|
||||||
|
|
||||||
logger.info('Presigned URL response:', presignedData)
|
|
||||||
|
|
||||||
if (presignedData.directUploadSupported && presignedData.presignedUrl) {
|
|
||||||
const uploadHeaders: Record<string, string> = {
|
|
||||||
'Content-Type': file.type,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (presignedData.uploadHeaders) {
|
|
||||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
|
||||||
}
|
|
||||||
|
|
||||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
|
||||||
method: 'PUT',
|
|
||||||
body: file,
|
|
||||||
headers: uploadHeaders,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(`Upload response status: ${uploadResponse.status}`)
|
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
|
||||||
const responseText = await uploadResponse.text()
|
|
||||||
logger.error(`Direct upload failed: ${uploadResponse.status} - ${responseText}`)
|
|
||||||
throw new Error(`Direct upload failed: ${uploadResponse.status} - ${responseText}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const publicUrl = presignedData.fileInfo.path
|
|
||||||
logger.info(`Profile picture uploaded successfully via direct upload: ${publicUrl}`)
|
|
||||||
return publicUrl
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const formData = new FormData()
|
const formData = new FormData()
|
||||||
formData.append('file', file)
|
formData.append('file', file)
|
||||||
|
formData.append('context', 'profile-pictures')
|
||||||
|
|
||||||
const response = await fetch('/api/files/upload', {
|
const response = await fetch('/api/files/upload', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -108,7 +63,7 @@ export function useProfilePictureUpload({
|
|||||||
}
|
}
|
||||||
|
|
||||||
const data = await response.json()
|
const data = await response.json()
|
||||||
const publicUrl = data.path
|
const publicUrl = data.fileInfo?.path || data.path || data.url
|
||||||
logger.info(`Profile picture uploaded successfully via server upload: ${publicUrl}`)
|
logger.info(`Profile picture uploaded successfully via server upload: ${publicUrl}`)
|
||||||
return publicUrl
|
return publicUrl
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -179,18 +179,7 @@ export function FileUploads() {
|
|||||||
const handleDownload = async (file: WorkspaceFileRecord) => {
|
const handleDownload = async (file: WorkspaceFileRecord) => {
|
||||||
if (!workspaceId) return
|
if (!workspaceId) return
|
||||||
|
|
||||||
try {
|
window.open(`/workspace/${workspaceId}/files/${file.id}/view`, '_blank')
|
||||||
const response = await fetch(`/api/workspaces/${workspaceId}/files/${file.id}/download`, {
|
|
||||||
method: 'POST',
|
|
||||||
})
|
|
||||||
const data = await response.json()
|
|
||||||
|
|
||||||
if (data.success && data.downloadUrl) {
|
|
||||||
window.open(data.downloadUrl, '_blank')
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error downloading file:', error)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleDelete = async (file: WorkspaceFileRecord) => {
|
const handleDelete = async (file: WorkspaceFileRecord) => {
|
||||||
@@ -376,9 +365,13 @@ export function FileUploads() {
|
|||||||
<TableCell className='px-3'>
|
<TableCell className='px-3'>
|
||||||
<div className='flex min-w-0 items-center gap-2'>
|
<div className='flex min-w-0 items-center gap-2'>
|
||||||
<Icon className='h-3.5 w-3.5 shrink-0 text-muted-foreground' />
|
<Icon className='h-3.5 w-3.5 shrink-0 text-muted-foreground' />
|
||||||
<span className='min-w-0 truncate font-normal' title={file.name}>
|
<button
|
||||||
|
onClick={() => handleDownload(file)}
|
||||||
|
className='min-w-0 truncate text-left font-normal hover:underline'
|
||||||
|
title={file.name}
|
||||||
|
>
|
||||||
{truncateMiddle(file.name)}
|
{truncateMiddle(file.name)}
|
||||||
</span>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</TableCell>
|
</TableCell>
|
||||||
<TableCell className='whitespace-nowrap px-3 text-[12px] text-muted-foreground'>
|
<TableCell className='whitespace-nowrap px-3 text-[12px] text-muted-foreground'>
|
||||||
|
|||||||
@@ -428,7 +428,8 @@ async function executeWebhookJobInternal(
|
|||||||
const uploadedFiles = await processExecutionFiles(
|
const uploadedFiles = await processExecutionFiles(
|
||||||
fieldValue,
|
fieldValue,
|
||||||
executionContext,
|
executionContext,
|
||||||
requestId
|
requestId,
|
||||||
|
payload.userId
|
||||||
)
|
)
|
||||||
|
|
||||||
if (uploadedFiles.length > 0) {
|
if (uploadedFiles.length > 0) {
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ export async function checkHybridAuth(
|
|||||||
const bodyText = await clonedRequest.text()
|
const bodyText = await clonedRequest.text()
|
||||||
if (bodyText) {
|
if (bodyText) {
|
||||||
const body = JSON.parse(bodyText)
|
const body = JSON.parse(bodyText)
|
||||||
workflowId = body.workflowId
|
workflowId = body.workflowId || body._context?.workflowId
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
// Ignore JSON parse errors
|
// Ignore JSON parse errors
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ export async function processExecutionFile(
|
|||||||
file: { type: string; data: string; name: string; mime?: string },
|
file: { type: string; data: string; name: string; mime?: string },
|
||||||
executionContext: { workspaceId: string; workflowId: string; executionId: string },
|
executionContext: { workspaceId: string; workflowId: string; executionId: string },
|
||||||
requestId: string,
|
requestId: string,
|
||||||
isAsync?: boolean
|
userId?: string
|
||||||
): Promise<UserFile | null> {
|
): Promise<UserFile | null> {
|
||||||
if (file.type === 'file' && file.data && file.name) {
|
if (file.type === 'file' && file.data && file.name) {
|
||||||
const dataUrlPrefix = 'data:'
|
const dataUrlPrefix = 'data:'
|
||||||
@@ -49,7 +49,7 @@ export async function processExecutionFile(
|
|||||||
buffer,
|
buffer,
|
||||||
file.name,
|
file.name,
|
||||||
mimeType || file.mime || 'application/octet-stream',
|
mimeType || file.mime || 'application/octet-stream',
|
||||||
isAsync
|
userId
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.debug(`[${requestId}] Successfully uploaded ${file.name}`)
|
logger.debug(`[${requestId}] Successfully uploaded ${file.name}`)
|
||||||
@@ -79,7 +79,7 @@ export async function processExecutionFiles(
|
|||||||
fieldValue: any,
|
fieldValue: any,
|
||||||
executionContext: { workspaceId: string; workflowId: string; executionId: string },
|
executionContext: { workspaceId: string; workflowId: string; executionId: string },
|
||||||
requestId: string,
|
requestId: string,
|
||||||
isAsync?: boolean
|
userId?: string
|
||||||
): Promise<UserFile[]> {
|
): Promise<UserFile[]> {
|
||||||
if (!fieldValue || typeof fieldValue !== 'object') {
|
if (!fieldValue || typeof fieldValue !== 'object') {
|
||||||
return []
|
return []
|
||||||
@@ -91,7 +91,7 @@ export async function processExecutionFiles(
|
|||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
try {
|
try {
|
||||||
const userFile = await processExecutionFile(file, fullContext, requestId, isAsync)
|
const userFile = await processExecutionFile(file, fullContext, requestId, userId)
|
||||||
|
|
||||||
if (userFile) {
|
if (userFile) {
|
||||||
uploadedFiles.push(userFile)
|
uploadedFiles.push(userFile)
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { parseBuffer, parseFile } from '@/lib/file-parsers'
|
|||||||
import { retryWithExponentialBackoff } from '@/lib/knowledge/documents/utils'
|
import { retryWithExponentialBackoff } from '@/lib/knowledge/documents/utils'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { StorageService } from '@/lib/uploads'
|
import { StorageService } from '@/lib/uploads'
|
||||||
|
import { downloadFileFromUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import { mistralParserTool } from '@/tools/mistral/parser'
|
import { mistralParserTool } from '@/tools/mistral/parser'
|
||||||
|
|
||||||
const logger = createLogger('DocumentProcessor')
|
const logger = createLogger('DocumentProcessor')
|
||||||
@@ -170,11 +171,18 @@ async function handleFileForOCR(fileUrl: string, filename: string, mimeType: str
|
|||||||
const buffer = await downloadFileWithTimeout(fileUrl)
|
const buffer = await downloadFileWithTimeout(fileUrl)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const metadata: Record<string, string> = {
|
||||||
|
originalName: filename,
|
||||||
|
uploadedAt: new Date().toISOString(),
|
||||||
|
purpose: 'knowledge-base',
|
||||||
|
}
|
||||||
|
|
||||||
const cloudResult = await StorageService.uploadFile({
|
const cloudResult = await StorageService.uploadFile({
|
||||||
file: buffer,
|
file: buffer,
|
||||||
fileName: filename,
|
fileName: filename,
|
||||||
contentType: mimeType,
|
contentType: mimeType,
|
||||||
context: 'knowledge-base',
|
context: 'knowledge-base',
|
||||||
|
metadata,
|
||||||
})
|
})
|
||||||
|
|
||||||
const httpsUrl = await StorageService.generatePresignedDownloadUrl(
|
const httpsUrl = await StorageService.generatePresignedDownloadUrl(
|
||||||
@@ -192,34 +200,7 @@ async function handleFileForOCR(fileUrl: string, filename: string, mimeType: str
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function downloadFileWithTimeout(fileUrl: string): Promise<Buffer> {
|
async function downloadFileWithTimeout(fileUrl: string): Promise<Buffer> {
|
||||||
const controller = new AbortController()
|
return downloadFileFromUrl(fileUrl, TIMEOUTS.FILE_DOWNLOAD)
|
||||||
const timeoutId = setTimeout(() => controller.abort(), TIMEOUTS.FILE_DOWNLOAD)
|
|
||||||
|
|
||||||
try {
|
|
||||||
const isInternalFileServe = fileUrl.includes('/api/files/serve/')
|
|
||||||
const headers: HeadersInit = {}
|
|
||||||
|
|
||||||
if (isInternalFileServe) {
|
|
||||||
const { generateInternalToken } = await import('@/lib/auth/internal')
|
|
||||||
const token = await generateInternalToken()
|
|
||||||
headers.Authorization = `Bearer ${token}`
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch(fileUrl, { signal: controller.signal, headers })
|
|
||||||
clearTimeout(timeoutId)
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Failed to download file: ${response.statusText}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return Buffer.from(await response.arrayBuffer())
|
|
||||||
} catch (error) {
|
|
||||||
clearTimeout(timeoutId)
|
|
||||||
if (error instanceof Error && error.name === 'AbortError') {
|
|
||||||
throw new Error('File download timed out')
|
|
||||||
}
|
|
||||||
throw error
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function downloadFileForBase64(fileUrl: string): Promise<Buffer> {
|
async function downloadFileForBase64(fileUrl: string): Promise<Buffer> {
|
||||||
|
|||||||
@@ -1,43 +1,108 @@
|
|||||||
import {
|
import { env } from '@/lib/env'
|
||||||
BLOB_CHAT_CONFIG,
|
import type { StorageConfig, StorageContext } from '@/lib/uploads/shared/types'
|
||||||
BLOB_CONFIG,
|
|
||||||
BLOB_COPILOT_CONFIG,
|
|
||||||
BLOB_EXECUTION_FILES_CONFIG,
|
|
||||||
BLOB_KB_CONFIG,
|
|
||||||
BLOB_PROFILE_PICTURES_CONFIG,
|
|
||||||
S3_CHAT_CONFIG,
|
|
||||||
S3_CONFIG,
|
|
||||||
S3_COPILOT_CONFIG,
|
|
||||||
S3_EXECUTION_FILES_CONFIG,
|
|
||||||
S3_KB_CONFIG,
|
|
||||||
S3_PROFILE_PICTURES_CONFIG,
|
|
||||||
USE_BLOB_STORAGE,
|
|
||||||
USE_S3_STORAGE,
|
|
||||||
} from '@/lib/uploads/core/setup'
|
|
||||||
|
|
||||||
export type StorageContext =
|
export type { StorageConfig, StorageContext } from '@/lib/uploads/shared/types'
|
||||||
| 'general'
|
export const UPLOAD_DIR = '/uploads'
|
||||||
| 'knowledge-base'
|
|
||||||
| 'chat'
|
|
||||||
| 'copilot'
|
|
||||||
| 'execution'
|
|
||||||
| 'workspace'
|
|
||||||
| 'profile-pictures'
|
|
||||||
|
|
||||||
export interface StorageConfig {
|
const hasS3Config = !!(env.S3_BUCKET_NAME && env.AWS_REGION)
|
||||||
// S3 config
|
const hasBlobConfig = !!(
|
||||||
bucket?: string
|
env.AZURE_STORAGE_CONTAINER_NAME &&
|
||||||
region?: string
|
((env.AZURE_ACCOUNT_NAME && env.AZURE_ACCOUNT_KEY) || env.AZURE_CONNECTION_STRING)
|
||||||
// Blob config
|
)
|
||||||
containerName?: string
|
|
||||||
accountName?: string
|
export const USE_BLOB_STORAGE = hasBlobConfig
|
||||||
accountKey?: string
|
export const USE_S3_STORAGE = hasS3Config && !USE_BLOB_STORAGE
|
||||||
connectionString?: string
|
|
||||||
|
export const S3_CONFIG = {
|
||||||
|
bucket: env.S3_BUCKET_NAME || '',
|
||||||
|
region: env.AWS_REGION || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const BLOB_CONFIG = {
|
||||||
|
accountName: env.AZURE_ACCOUNT_NAME || '',
|
||||||
|
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
||||||
|
connectionString: env.AZURE_CONNECTION_STRING || '',
|
||||||
|
containerName: env.AZURE_STORAGE_CONTAINER_NAME || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const S3_KB_CONFIG = {
|
||||||
|
bucket: env.S3_KB_BUCKET_NAME || '',
|
||||||
|
region: env.AWS_REGION || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const S3_EXECUTION_FILES_CONFIG = {
|
||||||
|
bucket: env.S3_EXECUTION_FILES_BUCKET_NAME || 'sim-execution-files',
|
||||||
|
region: env.AWS_REGION || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const BLOB_KB_CONFIG = {
|
||||||
|
accountName: env.AZURE_ACCOUNT_NAME || '',
|
||||||
|
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
||||||
|
connectionString: env.AZURE_CONNECTION_STRING || '',
|
||||||
|
containerName: env.AZURE_STORAGE_KB_CONTAINER_NAME || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const BLOB_EXECUTION_FILES_CONFIG = {
|
||||||
|
accountName: env.AZURE_ACCOUNT_NAME || '',
|
||||||
|
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
||||||
|
connectionString: env.AZURE_CONNECTION_STRING || '',
|
||||||
|
containerName: env.AZURE_STORAGE_EXECUTION_FILES_CONTAINER_NAME || 'sim-execution-files',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const S3_CHAT_CONFIG = {
|
||||||
|
bucket: env.S3_CHAT_BUCKET_NAME || '',
|
||||||
|
region: env.AWS_REGION || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const BLOB_CHAT_CONFIG = {
|
||||||
|
accountName: env.AZURE_ACCOUNT_NAME || '',
|
||||||
|
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
||||||
|
connectionString: env.AZURE_CONNECTION_STRING || '',
|
||||||
|
containerName: env.AZURE_STORAGE_CHAT_CONTAINER_NAME || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const S3_COPILOT_CONFIG = {
|
||||||
|
bucket: env.S3_COPILOT_BUCKET_NAME || '',
|
||||||
|
region: env.AWS_REGION || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const BLOB_COPILOT_CONFIG = {
|
||||||
|
accountName: env.AZURE_ACCOUNT_NAME || '',
|
||||||
|
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
||||||
|
connectionString: env.AZURE_CONNECTION_STRING || '',
|
||||||
|
containerName: env.AZURE_STORAGE_COPILOT_CONTAINER_NAME || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const S3_PROFILE_PICTURES_CONFIG = {
|
||||||
|
bucket: env.S3_PROFILE_PICTURES_BUCKET_NAME || '',
|
||||||
|
region: env.AWS_REGION || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const BLOB_PROFILE_PICTURES_CONFIG = {
|
||||||
|
accountName: env.AZURE_ACCOUNT_NAME || '',
|
||||||
|
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
||||||
|
connectionString: env.AZURE_CONNECTION_STRING || '',
|
||||||
|
containerName: env.AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current storage provider as a human-readable string
|
||||||
|
*/
|
||||||
|
export function getStorageProvider(): 'Azure Blob' | 'S3' | 'Local' {
|
||||||
|
if (USE_BLOB_STORAGE) return 'Azure Blob'
|
||||||
|
if (USE_S3_STORAGE) return 'S3'
|
||||||
|
return 'Local'
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if we're using any cloud storage (S3 or Blob)
|
||||||
|
*/
|
||||||
|
export function isUsingCloudStorage(): boolean {
|
||||||
|
return USE_S3_STORAGE || USE_BLOB_STORAGE
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the appropriate storage configuration for a given context
|
* Get the appropriate storage configuration for a given context
|
||||||
* Automatically selects between S3 and Blob based on USE_BLOB_STORAGE/USE_S3_STORAGE flags
|
|
||||||
*/
|
*/
|
||||||
export function getStorageConfig(context: StorageContext): StorageConfig {
|
export function getStorageConfig(context: StorageContext): StorageConfig {
|
||||||
if (USE_BLOB_STORAGE) {
|
if (USE_BLOB_STORAGE) {
|
||||||
@@ -48,7 +113,6 @@ export function getStorageConfig(context: StorageContext): StorageConfig {
|
|||||||
return getS3Config(context)
|
return getS3Config(context)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Local storage doesn't need config
|
|
||||||
return {}
|
return {}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -78,7 +142,6 @@ function getS3Config(context: StorageContext): StorageConfig {
|
|||||||
region: S3_EXECUTION_FILES_CONFIG.region,
|
region: S3_EXECUTION_FILES_CONFIG.region,
|
||||||
}
|
}
|
||||||
case 'workspace':
|
case 'workspace':
|
||||||
// Workspace files use general bucket but with custom key structure
|
|
||||||
return {
|
return {
|
||||||
bucket: S3_CONFIG.bucket,
|
bucket: S3_CONFIG.bucket,
|
||||||
region: S3_CONFIG.region,
|
region: S3_CONFIG.region,
|
||||||
@@ -130,7 +193,6 @@ function getBlobConfig(context: StorageContext): StorageConfig {
|
|||||||
containerName: BLOB_EXECUTION_FILES_CONFIG.containerName,
|
containerName: BLOB_EXECUTION_FILES_CONFIG.containerName,
|
||||||
}
|
}
|
||||||
case 'workspace':
|
case 'workspace':
|
||||||
// Workspace files use general container but with custom key structure
|
|
||||||
return {
|
return {
|
||||||
accountName: BLOB_CONFIG.accountName,
|
accountName: BLOB_CONFIG.accountName,
|
||||||
accountKey: BLOB_CONFIG.accountKey,
|
accountKey: BLOB_CONFIG.accountKey,
|
||||||
@@ -172,6 +234,5 @@ export function isStorageContextConfigured(context: StorageContext): boolean {
|
|||||||
return !!(config.bucket && config.region)
|
return !!(config.bucket && config.region)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Local storage is always available
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
@@ -29,12 +29,14 @@ export interface ChatExecutionContext {
|
|||||||
* @param files Array of chat file attachments
|
* @param files Array of chat file attachments
|
||||||
* @param executionContext Execution context for temporary storage
|
* @param executionContext Execution context for temporary storage
|
||||||
* @param requestId Unique request identifier for logging/tracing
|
* @param requestId Unique request identifier for logging/tracing
|
||||||
|
* @param userId User ID for file metadata (optional)
|
||||||
* @returns Array of UserFile objects with upload results
|
* @returns Array of UserFile objects with upload results
|
||||||
*/
|
*/
|
||||||
export async function processChatFiles(
|
export async function processChatFiles(
|
||||||
files: ChatFile[],
|
files: ChatFile[],
|
||||||
executionContext: ChatExecutionContext,
|
executionContext: ChatExecutionContext,
|
||||||
requestId: string
|
requestId: string,
|
||||||
|
userId?: string
|
||||||
): Promise<UserFile[]> {
|
): Promise<UserFile[]> {
|
||||||
logger.info(
|
logger.info(
|
||||||
`Processing ${files.length} chat files for execution ${executionContext.executionId}`,
|
`Processing ${files.length} chat files for execution ${executionContext.executionId}`,
|
||||||
@@ -51,7 +53,12 @@ export async function processChatFiles(
|
|||||||
mime: file.type,
|
mime: file.type,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const userFiles = await processExecutionFiles(transformedFiles, executionContext, requestId)
|
const userFiles = await processExecutionFiles(
|
||||||
|
transformedFiles,
|
||||||
|
executionContext,
|
||||||
|
requestId,
|
||||||
|
userId
|
||||||
|
)
|
||||||
|
|
||||||
logger.info(`Successfully processed ${userFiles.length} chat files`, {
|
logger.info(`Successfully processed ${userFiles.length} chat files`, {
|
||||||
requestId,
|
requestId,
|
||||||
@@ -75,8 +82,9 @@ export async function processChatFiles(
|
|||||||
export async function uploadChatFile(
|
export async function uploadChatFile(
|
||||||
file: ChatFile,
|
file: ChatFile,
|
||||||
executionContext: ChatExecutionContext,
|
executionContext: ChatExecutionContext,
|
||||||
requestId: string
|
requestId: string,
|
||||||
|
userId?: string
|
||||||
): Promise<UserFile> {
|
): Promise<UserFile> {
|
||||||
const [userFile] = await processChatFiles([file], executionContext, requestId)
|
const [userFile] = await processChatFiles([file], executionContext, requestId, userId)
|
||||||
return userFile
|
return userFile
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,8 +4,8 @@ import {
|
|||||||
downloadFile,
|
downloadFile,
|
||||||
generatePresignedDownloadUrl,
|
generatePresignedDownloadUrl,
|
||||||
generatePresignedUploadUrl,
|
generatePresignedUploadUrl,
|
||||||
type PresignedUrlResponse,
|
|
||||||
} from '@/lib/uploads/core/storage-service'
|
} from '@/lib/uploads/core/storage-service'
|
||||||
|
import type { PresignedUrlResponse } from '@/lib/uploads/shared/types'
|
||||||
|
|
||||||
const logger = createLogger('CopilotFileManager')
|
const logger = createLogger('CopilotFileManager')
|
||||||
|
|
||||||
@@ -61,12 +61,6 @@ export async function generateCopilotUploadUrl(
|
|||||||
): Promise<PresignedUrlResponse> {
|
): Promise<PresignedUrlResponse> {
|
||||||
const { fileName, contentType, fileSize, userId, expirationSeconds = 3600 } = options
|
const { fileName, contentType, fileSize, userId, expirationSeconds = 3600 } = options
|
||||||
|
|
||||||
logger.info(`Generating copilot upload URL for: ${fileName}`, {
|
|
||||||
userId,
|
|
||||||
contentType,
|
|
||||||
fileSize,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!userId?.trim()) {
|
if (!userId?.trim()) {
|
||||||
throw new Error('Authenticated user session is required for copilot uploads')
|
throw new Error('Authenticated user session is required for copilot uploads')
|
||||||
}
|
}
|
||||||
@@ -103,8 +97,6 @@ export async function generateCopilotUploadUrl(
|
|||||||
* @throws Error if file not found or download fails
|
* @throws Error if file not found or download fails
|
||||||
*/
|
*/
|
||||||
export async function downloadCopilotFile(key: string): Promise<Buffer> {
|
export async function downloadCopilotFile(key: string): Promise<Buffer> {
|
||||||
logger.info(`Downloading copilot file: ${key}`)
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const fileBuffer = await downloadFile({
|
const fileBuffer = await downloadFile({
|
||||||
key,
|
key,
|
||||||
@@ -136,8 +128,6 @@ export async function processCopilotAttachments(
|
|||||||
attachments: CopilotFileAttachment[],
|
attachments: CopilotFileAttachment[],
|
||||||
requestId: string
|
requestId: string
|
||||||
): Promise<Array<{ buffer: Buffer; attachment: CopilotFileAttachment }>> {
|
): Promise<Array<{ buffer: Buffer; attachment: CopilotFileAttachment }>> {
|
||||||
logger.info(`Processing ${attachments.length} copilot attachments`, { requestId })
|
|
||||||
|
|
||||||
const results: Array<{ buffer: Buffer; attachment: CopilotFileAttachment }> = []
|
const results: Array<{ buffer: Buffer; attachment: CopilotFileAttachment }> = []
|
||||||
|
|
||||||
for (const attachment of attachments) {
|
for (const attachment of attachments) {
|
||||||
@@ -173,8 +163,6 @@ export async function generateCopilotDownloadUrl(
|
|||||||
key: string,
|
key: string,
|
||||||
expirationSeconds = 3600
|
expirationSeconds = 3600
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
logger.info(`Generating copilot download URL for: ${key}`)
|
|
||||||
|
|
||||||
const downloadUrl = await generatePresignedDownloadUrl(key, 'copilot', expirationSeconds)
|
const downloadUrl = await generatePresignedDownloadUrl(key, 'copilot', expirationSeconds)
|
||||||
|
|
||||||
logger.info(`Generated copilot download URL for: ${key}`)
|
logger.info(`Generated copilot download URL for: ${key}`)
|
||||||
@@ -188,8 +176,6 @@ export async function generateCopilotDownloadUrl(
|
|||||||
* @param key File storage key
|
* @param key File storage key
|
||||||
*/
|
*/
|
||||||
export async function deleteCopilotFile(key: string): Promise<void> {
|
export async function deleteCopilotFile(key: string): Promise<void> {
|
||||||
logger.info(`Deleting copilot file: ${key}`)
|
|
||||||
|
|
||||||
await deleteFile({
|
await deleteFile({
|
||||||
key,
|
key,
|
||||||
context: 'copilot',
|
context: 'copilot',
|
||||||
|
|||||||
@@ -1,10 +1,4 @@
|
|||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
|
||||||
deleteFile,
|
|
||||||
downloadFile,
|
|
||||||
generatePresignedDownloadUrl,
|
|
||||||
uploadFile,
|
|
||||||
} from '@/lib/uploads/core/storage-service'
|
|
||||||
import type { UserFile } from '@/executor/types'
|
import type { UserFile } from '@/executor/types'
|
||||||
import type { ExecutionContext } from './execution-file-helpers'
|
import type { ExecutionContext } from './execution-file-helpers'
|
||||||
import {
|
import {
|
||||||
@@ -23,13 +17,14 @@ export async function uploadExecutionFile(
|
|||||||
fileBuffer: Buffer,
|
fileBuffer: Buffer,
|
||||||
fileName: string,
|
fileName: string,
|
||||||
contentType: string,
|
contentType: string,
|
||||||
isAsync?: boolean
|
userId?: string
|
||||||
): Promise<UserFile> {
|
): Promise<UserFile> {
|
||||||
logger.info(`Uploading execution file: ${fileName} for execution ${context.executionId}`)
|
logger.info(`Uploading execution file: ${fileName} for execution ${context.executionId}`)
|
||||||
logger.debug(`File upload context:`, {
|
logger.debug(`File upload context:`, {
|
||||||
workspaceId: context.workspaceId,
|
workspaceId: context.workspaceId,
|
||||||
workflowId: context.workflowId,
|
workflowId: context.workflowId,
|
||||||
executionId: context.executionId,
|
executionId: context.executionId,
|
||||||
|
userId: userId || 'not provided',
|
||||||
fileName,
|
fileName,
|
||||||
bufferSize: fileBuffer.length,
|
bufferSize: fileBuffer.length,
|
||||||
})
|
})
|
||||||
@@ -39,9 +34,19 @@ export async function uploadExecutionFile(
|
|||||||
|
|
||||||
logger.info(`Generated storage key: "${storageKey}" for file: ${fileName}`)
|
logger.info(`Generated storage key: "${storageKey}" for file: ${fileName}`)
|
||||||
|
|
||||||
const urlExpirationSeconds = isAsync ? 10 * 60 : 5 * 60
|
const metadata: Record<string, string> = {
|
||||||
|
originalName: fileName,
|
||||||
|
uploadedAt: new Date().toISOString(),
|
||||||
|
purpose: 'execution',
|
||||||
|
workspaceId: context.workspaceId,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (userId) {
|
||||||
|
metadata.userId = userId
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const { uploadFile } = await import('@/lib/uploads/core/storage-service')
|
||||||
const fileInfo = await uploadFile({
|
const fileInfo = await uploadFile({
|
||||||
file: fileBuffer,
|
file: fileBuffer,
|
||||||
fileName: storageKey,
|
fileName: storageKey,
|
||||||
@@ -49,34 +54,15 @@ export async function uploadExecutionFile(
|
|||||||
context: 'execution',
|
context: 'execution',
|
||||||
preserveKey: true, // Don't add timestamp prefix
|
preserveKey: true, // Don't add timestamp prefix
|
||||||
customKey: storageKey, // Use exact execution-scoped key
|
customKey: storageKey, // Use exact execution-scoped key
|
||||||
|
metadata, // Pass metadata for cloud storage and database tracking
|
||||||
})
|
})
|
||||||
|
|
||||||
logger.info(`Upload returned key: "${fileInfo.key}" for file: ${fileName}`)
|
|
||||||
logger.info(`Original storage key was: "${storageKey}"`)
|
|
||||||
logger.info(`Keys match: ${fileInfo.key === storageKey}`)
|
|
||||||
|
|
||||||
let directUrl: string | undefined
|
|
||||||
|
|
||||||
try {
|
|
||||||
logger.info(
|
|
||||||
`Generating presigned URL with key: "${fileInfo.key}" (expiration: ${urlExpirationSeconds / 60} minutes)`
|
|
||||||
)
|
|
||||||
directUrl = await generatePresignedDownloadUrl(
|
|
||||||
fileInfo.key,
|
|
||||||
'execution',
|
|
||||||
urlExpirationSeconds
|
|
||||||
)
|
|
||||||
logger.info(`Generated presigned URL for execution file`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`Failed to generate presigned URL for ${fileName}:`, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
const userFile: UserFile = {
|
const userFile: UserFile = {
|
||||||
id: fileId,
|
id: fileId,
|
||||||
name: fileName,
|
name: fileName,
|
||||||
size: fileBuffer.length,
|
size: fileBuffer.length,
|
||||||
type: contentType,
|
type: contentType,
|
||||||
url: directUrl || `/api/files/serve/${fileInfo.key}`, // Use presigned URL (5 or 10 min), fallback to serve path
|
url: `/api/files/serve/${fileInfo.key}`, // Always use internal serve path for consistency
|
||||||
key: fileInfo.key,
|
key: fileInfo.key,
|
||||||
uploadedAt: new Date().toISOString(),
|
uploadedAt: new Date().toISOString(),
|
||||||
expiresAt: getFileExpirationDate(),
|
expiresAt: getFileExpirationDate(),
|
||||||
@@ -100,6 +86,7 @@ export async function downloadExecutionFile(userFile: UserFile): Promise<Buffer>
|
|||||||
logger.info(`Downloading execution file: ${userFile.name}`)
|
logger.info(`Downloading execution file: ${userFile.name}`)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const { downloadFile } = await import('@/lib/uploads/core/storage-service')
|
||||||
const fileBuffer = await downloadFile({
|
const fileBuffer = await downloadFile({
|
||||||
key: userFile.key,
|
key: userFile.key,
|
||||||
context: 'execution',
|
context: 'execution',
|
||||||
@@ -125,6 +112,7 @@ export async function generateExecutionFileDownloadUrl(userFile: UserFile): Prom
|
|||||||
logger.info(`File key: "${userFile.key}"`)
|
logger.info(`File key: "${userFile.key}"`)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const { generatePresignedDownloadUrl } = await import('@/lib/uploads/core/storage-service')
|
||||||
const downloadUrl = await generatePresignedDownloadUrl(
|
const downloadUrl = await generatePresignedDownloadUrl(
|
||||||
userFile.key,
|
userFile.key,
|
||||||
'execution',
|
'execution',
|
||||||
@@ -148,6 +136,7 @@ export async function deleteExecutionFile(userFile: UserFile): Promise<void> {
|
|||||||
logger.info(`Deleting execution file: ${userFile.name}`)
|
logger.info(`Deleting execution file: ${userFile.name}`)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const { deleteFile } = await import('@/lib/uploads/core/storage-service')
|
||||||
await deleteFile({
|
await deleteFile({
|
||||||
key: userFile.key,
|
key: userFile.key,
|
||||||
context: 'execution',
|
context: 'execution',
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { workspaceFile } from '@sim/db/schema'
|
import { workspaceFiles } from '@sim/db/schema'
|
||||||
import { and, eq } from 'drizzle-orm'
|
import { and, eq } from 'drizzle-orm'
|
||||||
import {
|
import {
|
||||||
checkStorageQuota,
|
checkStorageQuota,
|
||||||
@@ -15,10 +15,10 @@ import { createLogger } from '@/lib/logs/console/logger'
|
|||||||
import {
|
import {
|
||||||
deleteFile,
|
deleteFile,
|
||||||
downloadFile,
|
downloadFile,
|
||||||
generatePresignedDownloadUrl,
|
|
||||||
hasCloudStorage,
|
hasCloudStorage,
|
||||||
uploadFile,
|
uploadFile,
|
||||||
} from '@/lib/uploads/core/storage-service'
|
} from '@/lib/uploads/core/storage-service'
|
||||||
|
import { getFileMetadataByKey, insertFileMetadata } from '@/lib/uploads/server/metadata'
|
||||||
import type { UserFile } from '@/executor/types'
|
import type { UserFile } from '@/executor/types'
|
||||||
|
|
||||||
const logger = createLogger('WorkspaceFileStorage')
|
const logger = createLogger('WorkspaceFileStorage')
|
||||||
@@ -71,11 +71,19 @@ export async function uploadWorkspaceFile(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const storageKey = generateWorkspaceFileKey(workspaceId, fileName)
|
const storageKey = generateWorkspaceFileKey(workspaceId, fileName)
|
||||||
const fileId = `wf_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`
|
let fileId = `wf_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`
|
||||||
|
|
||||||
try {
|
try {
|
||||||
logger.info(`Generated storage key: ${storageKey}`)
|
logger.info(`Generated storage key: ${storageKey}`)
|
||||||
|
|
||||||
|
const metadata: Record<string, string> = {
|
||||||
|
originalName: fileName,
|
||||||
|
uploadedAt: new Date().toISOString(),
|
||||||
|
purpose: 'workspace',
|
||||||
|
userId: userId,
|
||||||
|
workspaceId: workspaceId,
|
||||||
|
}
|
||||||
|
|
||||||
const uploadResult = await uploadFile({
|
const uploadResult = await uploadFile({
|
||||||
file: fileBuffer,
|
file: fileBuffer,
|
||||||
fileName: storageKey, // Use the full storageKey as fileName
|
fileName: storageKey, // Use the full storageKey as fileName
|
||||||
@@ -83,20 +91,47 @@ export async function uploadWorkspaceFile(
|
|||||||
context: 'workspace',
|
context: 'workspace',
|
||||||
preserveKey: true, // Don't add timestamp prefix
|
preserveKey: true, // Don't add timestamp prefix
|
||||||
customKey: storageKey, // Explicitly set the key
|
customKey: storageKey, // Explicitly set the key
|
||||||
|
metadata, // Pass metadata for cloud storage consistency
|
||||||
})
|
})
|
||||||
|
|
||||||
logger.info(`Upload returned key: ${uploadResult.key}`)
|
logger.info(`Upload returned key: ${uploadResult.key}`)
|
||||||
|
|
||||||
await db.insert(workspaceFile).values({
|
const usingCloudStorage = hasCloudStorage()
|
||||||
id: fileId,
|
|
||||||
workspaceId,
|
if (!usingCloudStorage) {
|
||||||
name: fileName,
|
const metadataRecord = await insertFileMetadata({
|
||||||
key: uploadResult.key, // This is what actually got stored in S3
|
id: fileId,
|
||||||
size: fileBuffer.length,
|
key: uploadResult.key,
|
||||||
type: contentType,
|
userId,
|
||||||
uploadedBy: userId,
|
workspaceId,
|
||||||
uploadedAt: new Date(),
|
context: 'workspace',
|
||||||
})
|
originalName: fileName,
|
||||||
|
contentType,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
})
|
||||||
|
fileId = metadataRecord.id
|
||||||
|
logger.info(`Stored metadata in database for local file: ${uploadResult.key}`)
|
||||||
|
} else {
|
||||||
|
const existing = await getFileMetadataByKey(uploadResult.key, 'workspace')
|
||||||
|
|
||||||
|
if (!existing) {
|
||||||
|
logger.warn(`Metadata not found for cloud file ${uploadResult.key}, inserting...`)
|
||||||
|
const metadataRecord = await insertFileMetadata({
|
||||||
|
id: fileId,
|
||||||
|
key: uploadResult.key,
|
||||||
|
userId,
|
||||||
|
workspaceId,
|
||||||
|
context: 'workspace',
|
||||||
|
originalName: fileName,
|
||||||
|
contentType,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
})
|
||||||
|
fileId = metadataRecord.id
|
||||||
|
} else {
|
||||||
|
fileId = existing.id
|
||||||
|
logger.info(`Using existing metadata record for cloud file: ${uploadResult.key}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`Successfully uploaded workspace file: ${fileName} with key: ${uploadResult.key}`)
|
logger.info(`Successfully uploaded workspace file: ${fileName} with key: ${uploadResult.key}`)
|
||||||
|
|
||||||
@@ -106,29 +141,19 @@ export async function uploadWorkspaceFile(
|
|||||||
logger.error(`Failed to update storage tracking:`, storageError)
|
logger.error(`Failed to update storage tracking:`, storageError)
|
||||||
}
|
}
|
||||||
|
|
||||||
let presignedUrl: string | undefined
|
const { getServePathPrefix } = await import('@/lib/uploads')
|
||||||
|
const pathPrefix = getServePathPrefix()
|
||||||
if (hasCloudStorage()) {
|
const serveUrl = `${pathPrefix}${encodeURIComponent(uploadResult.key)}?context=workspace`
|
||||||
try {
|
|
||||||
presignedUrl = await generatePresignedDownloadUrl(
|
|
||||||
uploadResult.key,
|
|
||||||
'workspace',
|
|
||||||
24 * 60 * 60 // 24 hours
|
|
||||||
)
|
|
||||||
} catch (error) {
|
|
||||||
logger.warn(`Failed to generate presigned URL for ${fileName}:`, error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: fileId,
|
id: fileId,
|
||||||
name: fileName,
|
name: fileName,
|
||||||
size: fileBuffer.length,
|
size: fileBuffer.length,
|
||||||
type: contentType,
|
type: contentType,
|
||||||
url: presignedUrl || uploadResult.path, // Use presigned URL for external access
|
url: serveUrl, // Use authenticated serve URL (enforces context)
|
||||||
key: uploadResult.key,
|
key: uploadResult.key,
|
||||||
uploadedAt: new Date().toISOString(),
|
uploadedAt: new Date().toISOString(),
|
||||||
expiresAt: new Date(Date.now() + 365 * 24 * 60 * 60 * 1000).toISOString(), // 1 year
|
expiresAt: new Date(Date.now() + 100 * 365 * 24 * 60 * 60 * 1000).toISOString(), // Far future date (effectively never expires)
|
||||||
context: 'workspace',
|
context: 'workspace',
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -149,8 +174,14 @@ export async function fileExistsInWorkspace(
|
|||||||
try {
|
try {
|
||||||
const existing = await db
|
const existing = await db
|
||||||
.select()
|
.select()
|
||||||
.from(workspaceFile)
|
.from(workspaceFiles)
|
||||||
.where(and(eq(workspaceFile.workspaceId, workspaceId), eq(workspaceFile.name, fileName)))
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workspaceFiles.workspaceId, workspaceId),
|
||||||
|
eq(workspaceFiles.originalName, fileName),
|
||||||
|
eq(workspaceFiles.context, 'workspace')
|
||||||
|
)
|
||||||
|
)
|
||||||
.limit(1)
|
.limit(1)
|
||||||
|
|
||||||
return existing.length > 0
|
return existing.length > 0
|
||||||
@@ -167,16 +198,25 @@ export async function listWorkspaceFiles(workspaceId: string): Promise<Workspace
|
|||||||
try {
|
try {
|
||||||
const files = await db
|
const files = await db
|
||||||
.select()
|
.select()
|
||||||
.from(workspaceFile)
|
.from(workspaceFiles)
|
||||||
.where(eq(workspaceFile.workspaceId, workspaceId))
|
.where(
|
||||||
.orderBy(workspaceFile.uploadedAt)
|
and(eq(workspaceFiles.workspaceId, workspaceId), eq(workspaceFiles.context, 'workspace'))
|
||||||
|
)
|
||||||
|
.orderBy(workspaceFiles.uploadedAt)
|
||||||
|
|
||||||
const { getServePathPrefix } = await import('@/lib/uploads')
|
const { getServePathPrefix } = await import('@/lib/uploads')
|
||||||
const pathPrefix = getServePathPrefix()
|
const pathPrefix = getServePathPrefix()
|
||||||
|
|
||||||
return files.map((file) => ({
|
return files.map((file) => ({
|
||||||
...file,
|
id: file.id,
|
||||||
|
workspaceId: file.workspaceId || workspaceId, // Use query workspaceId as fallback (should never be null for workspace files)
|
||||||
|
name: file.originalName,
|
||||||
|
key: file.key,
|
||||||
path: `${pathPrefix}${encodeURIComponent(file.key)}?context=workspace`,
|
path: `${pathPrefix}${encodeURIComponent(file.key)}?context=workspace`,
|
||||||
|
size: file.size,
|
||||||
|
type: file.contentType,
|
||||||
|
uploadedBy: file.userId,
|
||||||
|
uploadedAt: file.uploadedAt,
|
||||||
}))
|
}))
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to list workspace files for ${workspaceId}:`, error)
|
logger.error(`Failed to list workspace files for ${workspaceId}:`, error)
|
||||||
@@ -194,8 +234,14 @@ export async function getWorkspaceFile(
|
|||||||
try {
|
try {
|
||||||
const files = await db
|
const files = await db
|
||||||
.select()
|
.select()
|
||||||
.from(workspaceFile)
|
.from(workspaceFiles)
|
||||||
.where(and(eq(workspaceFile.id, fileId), eq(workspaceFile.workspaceId, workspaceId)))
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workspaceFiles.id, fileId),
|
||||||
|
eq(workspaceFiles.workspaceId, workspaceId),
|
||||||
|
eq(workspaceFiles.context, 'workspace')
|
||||||
|
)
|
||||||
|
)
|
||||||
.limit(1)
|
.limit(1)
|
||||||
|
|
||||||
if (files.length === 0) return null
|
if (files.length === 0) return null
|
||||||
@@ -203,9 +249,17 @@ export async function getWorkspaceFile(
|
|||||||
const { getServePathPrefix } = await import('@/lib/uploads')
|
const { getServePathPrefix } = await import('@/lib/uploads')
|
||||||
const pathPrefix = getServePathPrefix()
|
const pathPrefix = getServePathPrefix()
|
||||||
|
|
||||||
|
const file = files[0]
|
||||||
return {
|
return {
|
||||||
...files[0],
|
id: file.id,
|
||||||
path: `${pathPrefix}${encodeURIComponent(files[0].key)}?context=workspace`,
|
workspaceId: file.workspaceId || workspaceId, // Use query workspaceId as fallback (should never be null for workspace files)
|
||||||
|
name: file.originalName,
|
||||||
|
key: file.key,
|
||||||
|
path: `${pathPrefix}${encodeURIComponent(file.key)}?context=workspace`,
|
||||||
|
size: file.size,
|
||||||
|
type: file.contentType,
|
||||||
|
uploadedBy: file.userId,
|
||||||
|
uploadedAt: file.uploadedAt,
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to get workspace file ${fileId}:`, error)
|
logger.error(`Failed to get workspace file ${fileId}:`, error)
|
||||||
@@ -254,8 +308,14 @@ export async function deleteWorkspaceFile(workspaceId: string, fileId: string):
|
|||||||
})
|
})
|
||||||
|
|
||||||
await db
|
await db
|
||||||
.delete(workspaceFile)
|
.delete(workspaceFiles)
|
||||||
.where(and(eq(workspaceFile.id, fileId), eq(workspaceFile.workspaceId, workspaceId)))
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workspaceFiles.id, fileId),
|
||||||
|
eq(workspaceFiles.workspaceId, workspaceId),
|
||||||
|
eq(workspaceFiles.context, 'workspace')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await decrementStorageUsage(fileRecord.uploadedBy, fileRecord.size)
|
await decrementStorageUsage(fileRecord.uploadedBy, fileRecord.size)
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { mkdir } from 'fs/promises'
|
|||||||
import path, { join } from 'path'
|
import path, { join } from 'path'
|
||||||
import { env } from '@/lib/env'
|
import { env } from '@/lib/env'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { getStorageProvider, USE_BLOB_STORAGE, USE_S3_STORAGE } from '@/lib/uploads/core/setup'
|
import { getStorageProvider, USE_BLOB_STORAGE, USE_S3_STORAGE } from '@/lib/uploads/config'
|
||||||
|
|
||||||
const logger = createLogger('UploadsSetup')
|
const logger = createLogger('UploadsSetup')
|
||||||
|
|
||||||
|
|||||||
@@ -1,106 +0,0 @@
|
|||||||
import { env } from '@/lib/env'
|
|
||||||
|
|
||||||
// Client-safe configuration - no Node.js modules
|
|
||||||
export const UPLOAD_DIR = '/uploads'
|
|
||||||
|
|
||||||
// Check if S3 is configured (has required credentials)
|
|
||||||
const hasS3Config = !!(env.S3_BUCKET_NAME && env.AWS_REGION)
|
|
||||||
|
|
||||||
// Check if Azure Blob is configured (has required credentials)
|
|
||||||
const hasBlobConfig = !!(
|
|
||||||
env.AZURE_STORAGE_CONTAINER_NAME &&
|
|
||||||
((env.AZURE_ACCOUNT_NAME && env.AZURE_ACCOUNT_KEY) || env.AZURE_CONNECTION_STRING)
|
|
||||||
)
|
|
||||||
|
|
||||||
// Storage configuration flags - auto-detect based on available credentials
|
|
||||||
// Priority: Blob > S3 > Local (if both are configured, Blob takes priority)
|
|
||||||
export const USE_BLOB_STORAGE = hasBlobConfig
|
|
||||||
export const USE_S3_STORAGE = hasS3Config && !USE_BLOB_STORAGE
|
|
||||||
|
|
||||||
export const S3_CONFIG = {
|
|
||||||
bucket: env.S3_BUCKET_NAME || '',
|
|
||||||
region: env.AWS_REGION || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const BLOB_CONFIG = {
|
|
||||||
accountName: env.AZURE_ACCOUNT_NAME || '',
|
|
||||||
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
|
||||||
connectionString: env.AZURE_CONNECTION_STRING || '',
|
|
||||||
containerName: env.AZURE_STORAGE_CONTAINER_NAME || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const S3_KB_CONFIG = {
|
|
||||||
bucket: env.S3_KB_BUCKET_NAME || '',
|
|
||||||
region: env.AWS_REGION || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const S3_EXECUTION_FILES_CONFIG = {
|
|
||||||
bucket: env.S3_EXECUTION_FILES_BUCKET_NAME || 'sim-execution-files',
|
|
||||||
region: env.AWS_REGION || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const BLOB_KB_CONFIG = {
|
|
||||||
accountName: env.AZURE_ACCOUNT_NAME || '',
|
|
||||||
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
|
||||||
connectionString: env.AZURE_CONNECTION_STRING || '',
|
|
||||||
containerName: env.AZURE_STORAGE_KB_CONTAINER_NAME || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const BLOB_EXECUTION_FILES_CONFIG = {
|
|
||||||
accountName: env.AZURE_ACCOUNT_NAME || '',
|
|
||||||
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
|
||||||
connectionString: env.AZURE_CONNECTION_STRING || '',
|
|
||||||
containerName: env.AZURE_STORAGE_EXECUTION_FILES_CONTAINER_NAME || 'sim-execution-files',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const S3_CHAT_CONFIG = {
|
|
||||||
bucket: env.S3_CHAT_BUCKET_NAME || '',
|
|
||||||
region: env.AWS_REGION || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const BLOB_CHAT_CONFIG = {
|
|
||||||
accountName: env.AZURE_ACCOUNT_NAME || '',
|
|
||||||
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
|
||||||
connectionString: env.AZURE_CONNECTION_STRING || '',
|
|
||||||
containerName: env.AZURE_STORAGE_CHAT_CONTAINER_NAME || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const S3_COPILOT_CONFIG = {
|
|
||||||
bucket: env.S3_COPILOT_BUCKET_NAME || '',
|
|
||||||
region: env.AWS_REGION || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const BLOB_COPILOT_CONFIG = {
|
|
||||||
accountName: env.AZURE_ACCOUNT_NAME || '',
|
|
||||||
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
|
||||||
connectionString: env.AZURE_CONNECTION_STRING || '',
|
|
||||||
containerName: env.AZURE_STORAGE_COPILOT_CONTAINER_NAME || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const S3_PROFILE_PICTURES_CONFIG = {
|
|
||||||
bucket: env.S3_PROFILE_PICTURES_BUCKET_NAME || '',
|
|
||||||
region: env.AWS_REGION || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
export const BLOB_PROFILE_PICTURES_CONFIG = {
|
|
||||||
accountName: env.AZURE_ACCOUNT_NAME || '',
|
|
||||||
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
|
||||||
connectionString: env.AZURE_CONNECTION_STRING || '',
|
|
||||||
containerName: env.AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the current storage provider as a human-readable string
|
|
||||||
*/
|
|
||||||
export function getStorageProvider(): 'Azure Blob' | 'S3' | 'Local' {
|
|
||||||
if (USE_BLOB_STORAGE) return 'Azure Blob'
|
|
||||||
if (USE_S3_STORAGE) return 'S3'
|
|
||||||
return 'Local'
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if we're using any cloud storage (S3 or Blob)
|
|
||||||
*/
|
|
||||||
export function isUsingCloudStorage(): boolean {
|
|
||||||
return USE_S3_STORAGE || USE_BLOB_STORAGE
|
|
||||||
}
|
|
||||||
@@ -1,28 +1,35 @@
|
|||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { USE_BLOB_STORAGE, USE_S3_STORAGE } from '@/lib/uploads/core/setup'
|
import { USE_BLOB_STORAGE, USE_S3_STORAGE } from '@/lib/uploads/config'
|
||||||
import type { CustomBlobConfig } from '@/lib/uploads/providers/blob/blob-client'
|
import type { BlobConfig } from '@/lib/uploads/providers/blob/types'
|
||||||
import type { CustomS3Config } from '@/lib/uploads/providers/s3/s3-client'
|
import type { S3Config } from '@/lib/uploads/providers/s3/types'
|
||||||
|
import type { FileInfo, StorageConfig } from '@/lib/uploads/shared/types'
|
||||||
|
import { sanitizeFileKey } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
const logger = createLogger('StorageClient')
|
const logger = createLogger('StorageClient')
|
||||||
|
|
||||||
// Client-safe type definitions
|
export type { FileInfo, StorageConfig } from '@/lib/uploads/shared/types'
|
||||||
export type FileInfo = {
|
|
||||||
path: string
|
|
||||||
key: string
|
|
||||||
name: string
|
|
||||||
size: number
|
|
||||||
type: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export type CustomStorageConfig = {
|
/**
|
||||||
// S3 config
|
* Validate and resolve local file path ensuring it's within the allowed directory
|
||||||
bucket?: string
|
* @param key File key/name
|
||||||
region?: string
|
* @param uploadDir Upload directory path
|
||||||
// Blob config
|
* @returns Resolved file path
|
||||||
containerName?: string
|
* @throws Error if path is invalid or outside allowed directory
|
||||||
accountName?: string
|
*/
|
||||||
accountKey?: string
|
async function validateLocalFilePath(key: string, uploadDir: string): Promise<string> {
|
||||||
connectionString?: string
|
const { join, resolve, sep } = await import('path')
|
||||||
|
|
||||||
|
const safeKey = sanitizeFileKey(key)
|
||||||
|
const filePath = join(uploadDir, safeKey)
|
||||||
|
|
||||||
|
const resolvedPath = resolve(filePath)
|
||||||
|
const allowedDir = resolve(uploadDir)
|
||||||
|
|
||||||
|
if (!resolvedPath.startsWith(allowedDir + sep) && resolvedPath !== allowedDir) {
|
||||||
|
throw new Error('Invalid file path')
|
||||||
|
}
|
||||||
|
|
||||||
|
return filePath
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -53,7 +60,7 @@ export async function uploadFile(
|
|||||||
file: Buffer,
|
file: Buffer,
|
||||||
fileName: string,
|
fileName: string,
|
||||||
contentType: string,
|
contentType: string,
|
||||||
customConfig: CustomStorageConfig,
|
customConfig: StorageConfig,
|
||||||
size?: number
|
size?: number
|
||||||
): Promise<FileInfo>
|
): Promise<FileInfo>
|
||||||
|
|
||||||
@@ -61,16 +68,25 @@ export async function uploadFile(
|
|||||||
file: Buffer,
|
file: Buffer,
|
||||||
fileName: string,
|
fileName: string,
|
||||||
contentType: string,
|
contentType: string,
|
||||||
configOrSize?: CustomStorageConfig | number,
|
configOrSize?: StorageConfig | number,
|
||||||
size?: number
|
size?: number
|
||||||
): Promise<FileInfo> {
|
): Promise<FileInfo> {
|
||||||
if (USE_BLOB_STORAGE) {
|
if (USE_BLOB_STORAGE) {
|
||||||
logger.info(`Uploading file to Azure Blob Storage: ${fileName}`)
|
const { uploadToBlob } = await import('@/lib/uploads/providers/blob/client')
|
||||||
const { uploadToBlob } = await import('@/lib/uploads/providers/blob/blob-client')
|
|
||||||
if (typeof configOrSize === 'object') {
|
if (typeof configOrSize === 'object') {
|
||||||
const blobConfig: CustomBlobConfig = {
|
if (!configOrSize.containerName || !configOrSize.accountName) {
|
||||||
containerName: configOrSize.containerName!,
|
throw new Error(
|
||||||
accountName: configOrSize.accountName!,
|
'Blob configuration missing required properties: containerName and accountName'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (!configOrSize.connectionString && !configOrSize.accountKey) {
|
||||||
|
throw new Error(
|
||||||
|
'Blob configuration missing authentication: either connectionString or accountKey must be provided'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const blobConfig: BlobConfig = {
|
||||||
|
containerName: configOrSize.containerName,
|
||||||
|
accountName: configOrSize.accountName,
|
||||||
accountKey: configOrSize.accountKey,
|
accountKey: configOrSize.accountKey,
|
||||||
connectionString: configOrSize.connectionString,
|
connectionString: configOrSize.connectionString,
|
||||||
}
|
}
|
||||||
@@ -80,25 +96,26 @@ export async function uploadFile(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (USE_S3_STORAGE) {
|
if (USE_S3_STORAGE) {
|
||||||
logger.info(`Uploading file to S3: ${fileName}`)
|
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
|
||||||
if (typeof configOrSize === 'object') {
|
if (typeof configOrSize === 'object') {
|
||||||
const s3Config: CustomS3Config = {
|
if (!configOrSize.bucket || !configOrSize.region) {
|
||||||
bucket: configOrSize.bucket!,
|
throw new Error('S3 configuration missing required properties: bucket and region')
|
||||||
region: configOrSize.region!,
|
}
|
||||||
|
const s3Config: S3Config = {
|
||||||
|
bucket: configOrSize.bucket,
|
||||||
|
region: configOrSize.region,
|
||||||
}
|
}
|
||||||
return uploadToS3(file, fileName, contentType, s3Config, size)
|
return uploadToS3(file, fileName, contentType, s3Config, size)
|
||||||
}
|
}
|
||||||
return uploadToS3(file, fileName, contentType, configOrSize)
|
return uploadToS3(file, fileName, contentType, configOrSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Uploading file to local storage: ${fileName}`)
|
|
||||||
const { writeFile } = await import('fs/promises')
|
const { writeFile } = await import('fs/promises')
|
||||||
const { join } = await import('path')
|
const { join } = await import('path')
|
||||||
const { v4: uuidv4 } = await import('uuid')
|
const { v4: uuidv4 } = await import('uuid')
|
||||||
const { UPLOAD_DIR_SERVER } = await import('@/lib/uploads/core/setup.server')
|
const { UPLOAD_DIR_SERVER } = await import('@/lib/uploads/core/setup.server')
|
||||||
|
|
||||||
const safeFileName = fileName.replace(/[^a-zA-Z0-9.-]/g, '_').replace(/\.\./g, '')
|
const safeFileName = sanitizeFileKey(fileName)
|
||||||
const uniqueKey = `${uuidv4()}-${safeFileName}`
|
const uniqueKey = `${uuidv4()}-${safeFileName}`
|
||||||
const filePath = join(UPLOAD_DIR_SERVER, uniqueKey)
|
const filePath = join(UPLOAD_DIR_SERVER, uniqueKey)
|
||||||
|
|
||||||
@@ -106,9 +123,7 @@ export async function uploadFile(
|
|||||||
await writeFile(filePath, file)
|
await writeFile(filePath, file)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Failed to write file to local storage: ${fileName}`, error)
|
logger.error(`Failed to write file to local storage: ${fileName}`, error)
|
||||||
throw new Error(
|
throw error
|
||||||
`Failed to write file to local storage: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileSize = typeof configOrSize === 'number' ? configOrSize : size || file.length
|
const fileSize = typeof configOrSize === 'number' ? configOrSize : size || file.length
|
||||||
@@ -135,19 +150,25 @@ export async function downloadFile(key: string): Promise<Buffer>
|
|||||||
* @param customConfig Custom storage configuration
|
* @param customConfig Custom storage configuration
|
||||||
* @returns File buffer
|
* @returns File buffer
|
||||||
*/
|
*/
|
||||||
export async function downloadFile(key: string, customConfig: CustomStorageConfig): Promise<Buffer>
|
export async function downloadFile(key: string, customConfig: StorageConfig): Promise<Buffer>
|
||||||
|
|
||||||
export async function downloadFile(
|
export async function downloadFile(key: string, customConfig?: StorageConfig): Promise<Buffer> {
|
||||||
key: string,
|
|
||||||
customConfig?: CustomStorageConfig
|
|
||||||
): Promise<Buffer> {
|
|
||||||
if (USE_BLOB_STORAGE) {
|
if (USE_BLOB_STORAGE) {
|
||||||
logger.info(`Downloading file from Azure Blob Storage: ${key}`)
|
const { downloadFromBlob } = await import('@/lib/uploads/providers/blob/client')
|
||||||
const { downloadFromBlob } = await import('@/lib/uploads/providers/blob/blob-client')
|
|
||||||
if (customConfig) {
|
if (customConfig) {
|
||||||
const blobConfig: CustomBlobConfig = {
|
if (!customConfig.containerName || !customConfig.accountName) {
|
||||||
containerName: customConfig.containerName!,
|
throw new Error(
|
||||||
accountName: customConfig.accountName!,
|
'Blob configuration missing required properties: containerName and accountName'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (!customConfig.connectionString && !customConfig.accountKey) {
|
||||||
|
throw new Error(
|
||||||
|
'Blob configuration missing authentication: either connectionString or accountKey must be provided'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const blobConfig: BlobConfig = {
|
||||||
|
containerName: customConfig.containerName,
|
||||||
|
accountName: customConfig.accountName,
|
||||||
accountKey: customConfig.accountKey,
|
accountKey: customConfig.accountKey,
|
||||||
connectionString: customConfig.connectionString,
|
connectionString: customConfig.connectionString,
|
||||||
}
|
}
|
||||||
@@ -157,31 +178,24 @@ export async function downloadFile(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (USE_S3_STORAGE) {
|
if (USE_S3_STORAGE) {
|
||||||
logger.info(`Downloading file from S3: ${key}`)
|
const { downloadFromS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
const { downloadFromS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
|
||||||
if (customConfig) {
|
if (customConfig) {
|
||||||
const s3Config: CustomS3Config = {
|
if (!customConfig.bucket || !customConfig.region) {
|
||||||
bucket: customConfig.bucket!,
|
throw new Error('S3 configuration missing required properties: bucket and region')
|
||||||
region: customConfig.region!,
|
}
|
||||||
|
const s3Config: S3Config = {
|
||||||
|
bucket: customConfig.bucket,
|
||||||
|
region: customConfig.region,
|
||||||
}
|
}
|
||||||
return downloadFromS3(key, s3Config)
|
return downloadFromS3(key, s3Config)
|
||||||
}
|
}
|
||||||
return downloadFromS3(key)
|
return downloadFromS3(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Downloading file from local storage: ${key}`)
|
|
||||||
const { readFile } = await import('fs/promises')
|
const { readFile } = await import('fs/promises')
|
||||||
const { join, resolve, sep } = await import('path')
|
|
||||||
const { UPLOAD_DIR_SERVER } = await import('@/lib/uploads/core/setup.server')
|
const { UPLOAD_DIR_SERVER } = await import('@/lib/uploads/core/setup.server')
|
||||||
|
|
||||||
const safeKey = key.replace(/\.\./g, '').replace(/[/\\]/g, '')
|
const filePath = await validateLocalFilePath(key, UPLOAD_DIR_SERVER)
|
||||||
const filePath = join(UPLOAD_DIR_SERVER, safeKey)
|
|
||||||
|
|
||||||
const resolvedPath = resolve(filePath)
|
|
||||||
const allowedDir = resolve(UPLOAD_DIR_SERVER)
|
|
||||||
if (!resolvedPath.startsWith(allowedDir + sep) && resolvedPath !== allowedDir) {
|
|
||||||
throw new Error('Invalid file path')
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return await readFile(filePath)
|
return await readFile(filePath)
|
||||||
@@ -199,40 +213,31 @@ export async function downloadFile(
|
|||||||
*/
|
*/
|
||||||
export async function deleteFile(key: string): Promise<void> {
|
export async function deleteFile(key: string): Promise<void> {
|
||||||
if (USE_BLOB_STORAGE) {
|
if (USE_BLOB_STORAGE) {
|
||||||
logger.info(`Deleting file from Azure Blob Storage: ${key}`)
|
const { deleteFromBlob } = await import('@/lib/uploads/providers/blob/client')
|
||||||
const { deleteFromBlob } = await import('@/lib/uploads/providers/blob/blob-client')
|
|
||||||
return deleteFromBlob(key)
|
return deleteFromBlob(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (USE_S3_STORAGE) {
|
if (USE_S3_STORAGE) {
|
||||||
logger.info(`Deleting file from S3: ${key}`)
|
const { deleteFromS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
const { deleteFromS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
|
||||||
return deleteFromS3(key)
|
return deleteFromS3(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Deleting file from local storage: ${key}`)
|
|
||||||
const { unlink } = await import('fs/promises')
|
const { unlink } = await import('fs/promises')
|
||||||
const { join, resolve, sep } = await import('path')
|
|
||||||
const { UPLOAD_DIR_SERVER } = await import('@/lib/uploads/core/setup.server')
|
const { UPLOAD_DIR_SERVER } = await import('@/lib/uploads/core/setup.server')
|
||||||
|
|
||||||
const safeKey = key.replace(/\.\./g, '').replace(/[/\\]/g, '')
|
const filePath = await validateLocalFilePath(key, UPLOAD_DIR_SERVER)
|
||||||
const filePath = join(UPLOAD_DIR_SERVER, safeKey)
|
|
||||||
|
|
||||||
const resolvedPath = resolve(filePath)
|
|
||||||
const allowedDir = resolve(UPLOAD_DIR_SERVER)
|
|
||||||
if (!resolvedPath.startsWith(allowedDir + sep) && resolvedPath !== allowedDir) {
|
|
||||||
throw new Error('Invalid file path')
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await unlink(filePath)
|
await unlink(filePath)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
|
// File deletion is idempotent - if file doesn't exist, that's fine
|
||||||
logger.warn(`File not found during deletion: ${key}`)
|
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||||
return
|
throw error
|
||||||
}
|
}
|
||||||
throw error
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { deleteFileMetadata } = await import('../server/metadata')
|
||||||
|
await deleteFileMetadata(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -244,13 +249,6 @@ export function getStorageProvider(): 'blob' | 's3' | 'local' {
|
|||||||
return 'local'
|
return 'local'
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Check if we're using cloud storage (either S3 or Blob)
|
|
||||||
*/
|
|
||||||
export function isUsingCloudStorage(): boolean {
|
|
||||||
return USE_BLOB_STORAGE || USE_S3_STORAGE
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the appropriate serve path prefix based on storage provider
|
* Get the appropriate serve path prefix based on storage provider
|
||||||
*/
|
*/
|
||||||
@@ -259,3 +257,80 @@ export function getServePathPrefix(): string {
|
|||||||
if (USE_S3_STORAGE) return '/api/files/serve/s3/'
|
if (USE_S3_STORAGE) return '/api/files/serve/s3/'
|
||||||
return '/api/files/serve/'
|
return '/api/files/serve/'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file metadata from storage provider
|
||||||
|
* @param key File key/name
|
||||||
|
* @param customConfig Optional custom storage configuration
|
||||||
|
* @returns File metadata object with userId, workspaceId, originalName, uploadedAt, etc.
|
||||||
|
*/
|
||||||
|
export async function getFileMetadata(
|
||||||
|
key: string,
|
||||||
|
customConfig?: StorageConfig
|
||||||
|
): Promise<Record<string, string>> {
|
||||||
|
const { getFileMetadataByKey } = await import('../server/metadata')
|
||||||
|
const metadataRecord = await getFileMetadataByKey(key)
|
||||||
|
|
||||||
|
if (metadataRecord) {
|
||||||
|
return {
|
||||||
|
userId: metadataRecord.userId,
|
||||||
|
workspaceId: metadataRecord.workspaceId || '',
|
||||||
|
originalName: metadataRecord.originalName,
|
||||||
|
uploadedAt: metadataRecord.uploadedAt.toISOString(),
|
||||||
|
purpose: metadataRecord.context,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (USE_BLOB_STORAGE) {
|
||||||
|
const { getBlobServiceClient } = await import('@/lib/uploads/providers/blob/client')
|
||||||
|
const { BLOB_CONFIG } = await import('@/lib/uploads/config')
|
||||||
|
|
||||||
|
let blobServiceClient = await getBlobServiceClient()
|
||||||
|
let containerName = BLOB_CONFIG.containerName
|
||||||
|
|
||||||
|
if (customConfig) {
|
||||||
|
const { BlobServiceClient, StorageSharedKeyCredential } = await import('@azure/storage-blob')
|
||||||
|
if (customConfig.connectionString) {
|
||||||
|
blobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString)
|
||||||
|
} else if (customConfig.accountName && customConfig.accountKey) {
|
||||||
|
const credential = new StorageSharedKeyCredential(
|
||||||
|
customConfig.accountName,
|
||||||
|
customConfig.accountKey
|
||||||
|
)
|
||||||
|
blobServiceClient = new BlobServiceClient(
|
||||||
|
`https://${customConfig.accountName}.blob.core.windows.net`,
|
||||||
|
credential
|
||||||
|
)
|
||||||
|
}
|
||||||
|
containerName = customConfig.containerName || containerName
|
||||||
|
}
|
||||||
|
|
||||||
|
const containerClient = blobServiceClient.getContainerClient(containerName)
|
||||||
|
const blockBlobClient = containerClient.getBlockBlobClient(key)
|
||||||
|
const properties = await blockBlobClient.getProperties()
|
||||||
|
return properties.metadata || {}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (USE_S3_STORAGE) {
|
||||||
|
const { getS3Client } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
const { HeadObjectCommand } = await import('@aws-sdk/client-s3')
|
||||||
|
const { S3_CONFIG } = await import('@/lib/uploads/config')
|
||||||
|
|
||||||
|
const s3Client = getS3Client()
|
||||||
|
const bucket = customConfig?.bucket || S3_CONFIG.bucket
|
||||||
|
|
||||||
|
if (!bucket) {
|
||||||
|
throw new Error('S3 bucket not configured')
|
||||||
|
}
|
||||||
|
|
||||||
|
const command = new HeadObjectCommand({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
})
|
||||||
|
|
||||||
|
const response = await s3Client.send(command)
|
||||||
|
return response.Metadata || {}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,44 +1,84 @@
|
|||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { USE_BLOB_STORAGE, USE_S3_STORAGE } from '@/lib/uploads/core/setup'
|
import { getStorageConfig, USE_BLOB_STORAGE, USE_S3_STORAGE } from '@/lib/uploads/config'
|
||||||
import { getStorageConfig, type StorageContext } from './config-resolver'
|
import type { BlobConfig } from '@/lib/uploads/providers/blob/types'
|
||||||
import type { FileInfo } from './storage-client'
|
import type { S3Config } from '@/lib/uploads/providers/s3/types'
|
||||||
|
import type {
|
||||||
|
DeleteFileOptions,
|
||||||
|
DownloadFileOptions,
|
||||||
|
FileInfo,
|
||||||
|
GeneratePresignedUrlOptions,
|
||||||
|
PresignedUrlResponse,
|
||||||
|
StorageConfig,
|
||||||
|
StorageContext,
|
||||||
|
UploadFileOptions,
|
||||||
|
} from '@/lib/uploads/shared/types'
|
||||||
|
import {
|
||||||
|
sanitizeFileKey,
|
||||||
|
sanitizeFilenameForMetadata,
|
||||||
|
sanitizeStorageMetadata,
|
||||||
|
} from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
const logger = createLogger('StorageService')
|
const logger = createLogger('StorageService')
|
||||||
|
|
||||||
export interface UploadFileOptions {
|
/**
|
||||||
file: Buffer
|
* Create a Blob config from StorageConfig
|
||||||
fileName: string
|
* @throws Error if required properties are missing
|
||||||
contentType: string
|
*/
|
||||||
context: StorageContext
|
function createBlobConfig(config: StorageConfig): BlobConfig {
|
||||||
preserveKey?: boolean // Skip timestamp prefix (for workspace/execution files)
|
if (!config.containerName || !config.accountName) {
|
||||||
customKey?: string // Provide exact key to use (overrides fileName)
|
throw new Error('Blob configuration missing required properties: containerName and accountName')
|
||||||
metadata?: Record<string, string>
|
}
|
||||||
|
|
||||||
|
if (!config.connectionString && !config.accountKey) {
|
||||||
|
throw new Error(
|
||||||
|
'Blob configuration missing authentication: either connectionString or accountKey must be provided'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
containerName: config.containerName,
|
||||||
|
accountName: config.accountName,
|
||||||
|
accountKey: config.accountKey,
|
||||||
|
connectionString: config.connectionString,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface DownloadFileOptions {
|
/**
|
||||||
key: string
|
* Create an S3 config from StorageConfig
|
||||||
context?: StorageContext
|
* @throws Error if required properties are missing
|
||||||
|
*/
|
||||||
|
function createS3Config(config: StorageConfig): S3Config {
|
||||||
|
if (!config.bucket || !config.region) {
|
||||||
|
throw new Error('S3 configuration missing required properties: bucket and region')
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
bucket: config.bucket,
|
||||||
|
region: config.region,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface DeleteFileOptions {
|
/**
|
||||||
key: string
|
* Insert file metadata into the database
|
||||||
context?: StorageContext
|
*/
|
||||||
}
|
async function insertFileMetadataHelper(
|
||||||
|
key: string,
|
||||||
export interface GeneratePresignedUrlOptions {
|
metadata: Record<string, string>,
|
||||||
fileName: string
|
context: StorageContext,
|
||||||
contentType: string
|
fileName: string,
|
||||||
|
contentType: string,
|
||||||
fileSize: number
|
fileSize: number
|
||||||
context: StorageContext
|
): Promise<void> {
|
||||||
userId?: string
|
const { insertFileMetadata } = await import('../server/metadata')
|
||||||
expirationSeconds?: number
|
await insertFileMetadata({
|
||||||
metadata?: Record<string, string>
|
key,
|
||||||
}
|
userId: metadata.userId,
|
||||||
|
workspaceId: metadata.workspaceId || null,
|
||||||
export interface PresignedUrlResponse {
|
context,
|
||||||
url: string
|
originalName: metadata.originalName || fileName,
|
||||||
key: string
|
contentType,
|
||||||
uploadHeaders?: Record<string, string>
|
size: fileSize,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -54,44 +94,69 @@ export async function uploadFile(options: UploadFileOptions): Promise<FileInfo>
|
|||||||
const keyToUse = customKey || fileName
|
const keyToUse = customKey || fileName
|
||||||
|
|
||||||
if (USE_BLOB_STORAGE) {
|
if (USE_BLOB_STORAGE) {
|
||||||
const { uploadToBlob } = await import('../providers/blob/blob-client')
|
const { uploadToBlob } = await import('@/lib/uploads/providers/blob/client')
|
||||||
const blobConfig = {
|
const uploadResult = await uploadToBlob(
|
||||||
containerName: config.containerName!,
|
file,
|
||||||
accountName: config.accountName!,
|
keyToUse,
|
||||||
accountKey: config.accountKey,
|
contentType,
|
||||||
connectionString: config.connectionString,
|
createBlobConfig(config),
|
||||||
|
file.length,
|
||||||
|
metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
if (metadata) {
|
||||||
|
await insertFileMetadataHelper(
|
||||||
|
uploadResult.key,
|
||||||
|
metadata,
|
||||||
|
context,
|
||||||
|
fileName,
|
||||||
|
contentType,
|
||||||
|
file.length
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return uploadToBlob(file, keyToUse, contentType, blobConfig, file.length)
|
return uploadResult
|
||||||
}
|
}
|
||||||
|
|
||||||
if (USE_S3_STORAGE) {
|
if (USE_S3_STORAGE) {
|
||||||
const { uploadToS3 } = await import('../providers/s3/s3-client')
|
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
const s3Config = {
|
const uploadResult = await uploadToS3(
|
||||||
bucket: config.bucket!,
|
file,
|
||||||
region: config.region!,
|
keyToUse,
|
||||||
|
contentType,
|
||||||
|
createS3Config(config),
|
||||||
|
file.length,
|
||||||
|
preserveKey,
|
||||||
|
metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
if (metadata) {
|
||||||
|
await insertFileMetadataHelper(
|
||||||
|
uploadResult.key,
|
||||||
|
metadata,
|
||||||
|
context,
|
||||||
|
fileName,
|
||||||
|
contentType,
|
||||||
|
file.length
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return uploadToS3(file, keyToUse, contentType, s3Config, file.length, preserveKey)
|
return uploadResult
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info('Using local file storage')
|
|
||||||
const { writeFile } = await import('fs/promises')
|
const { writeFile } = await import('fs/promises')
|
||||||
const { join } = await import('path')
|
const { join } = await import('path')
|
||||||
const { v4: uuidv4 } = await import('uuid')
|
const { v4: uuidv4 } = await import('uuid')
|
||||||
const { UPLOAD_DIR_SERVER } = await import('./setup.server')
|
const { UPLOAD_DIR_SERVER } = await import('./setup.server')
|
||||||
|
|
||||||
const safeKey = keyToUse.replace(/[^a-zA-Z0-9.-]/g, '_').replace(/\.\./g, '')
|
const safeKey = sanitizeFileKey(keyToUse)
|
||||||
const uniqueKey = `${uuidv4()}-${safeKey}`
|
const uniqueKey = `${uuidv4()}-${safeKey}`
|
||||||
const filePath = join(UPLOAD_DIR_SERVER, uniqueKey)
|
const filePath = join(UPLOAD_DIR_SERVER, uniqueKey)
|
||||||
|
|
||||||
try {
|
await writeFile(filePath, file)
|
||||||
await writeFile(filePath, file)
|
|
||||||
} catch (error) {
|
if (metadata) {
|
||||||
logger.error(`Failed to write file to local storage: ${fileName}`, error)
|
await insertFileMetadataHelper(uniqueKey, metadata, context, fileName, contentType, file.length)
|
||||||
throw new Error(
|
|
||||||
`Failed to write file to local storage: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -109,29 +174,17 @@ export async function uploadFile(options: UploadFileOptions): Promise<FileInfo>
|
|||||||
export async function downloadFile(options: DownloadFileOptions): Promise<Buffer> {
|
export async function downloadFile(options: DownloadFileOptions): Promise<Buffer> {
|
||||||
const { key, context } = options
|
const { key, context } = options
|
||||||
|
|
||||||
logger.info(`Downloading file: ${key}${context ? ` (context: ${context})` : ''}`)
|
|
||||||
|
|
||||||
if (context) {
|
if (context) {
|
||||||
const config = getStorageConfig(context)
|
const config = getStorageConfig(context)
|
||||||
|
|
||||||
if (USE_BLOB_STORAGE) {
|
if (USE_BLOB_STORAGE) {
|
||||||
const { downloadFromBlob } = await import('../providers/blob/blob-client')
|
const { downloadFromBlob } = await import('@/lib/uploads/providers/blob/client')
|
||||||
const blobConfig = {
|
return downloadFromBlob(key, createBlobConfig(config))
|
||||||
containerName: config.containerName!,
|
|
||||||
accountName: config.accountName!,
|
|
||||||
accountKey: config.accountKey,
|
|
||||||
connectionString: config.connectionString,
|
|
||||||
}
|
|
||||||
return downloadFromBlob(key, blobConfig)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (USE_S3_STORAGE) {
|
if (USE_S3_STORAGE) {
|
||||||
const { downloadFromS3 } = await import('../providers/s3/s3-client')
|
const { downloadFromS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
const s3Config = {
|
return downloadFromS3(key, createS3Config(config))
|
||||||
bucket: config.bucket!,
|
|
||||||
region: config.region!,
|
|
||||||
}
|
|
||||||
return downloadFromS3(key, s3Config)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -145,33 +198,21 @@ export async function downloadFile(options: DownloadFileOptions): Promise<Buffer
|
|||||||
export async function deleteFile(options: DeleteFileOptions): Promise<void> {
|
export async function deleteFile(options: DeleteFileOptions): Promise<void> {
|
||||||
const { key, context } = options
|
const { key, context } = options
|
||||||
|
|
||||||
logger.info(`Deleting file: ${key}${context ? ` (context: ${context})` : ''}`)
|
|
||||||
|
|
||||||
if (context) {
|
if (context) {
|
||||||
const config = getStorageConfig(context)
|
const config = getStorageConfig(context)
|
||||||
|
|
||||||
if (USE_BLOB_STORAGE) {
|
if (USE_BLOB_STORAGE) {
|
||||||
const { deleteFromBlob } = await import('../providers/blob/blob-client')
|
const { deleteFromBlob } = await import('@/lib/uploads/providers/blob/client')
|
||||||
const blobConfig = {
|
return deleteFromBlob(key, createBlobConfig(config))
|
||||||
containerName: config.containerName!,
|
|
||||||
accountName: config.accountName!,
|
|
||||||
accountKey: config.accountKey,
|
|
||||||
connectionString: config.connectionString,
|
|
||||||
}
|
|
||||||
return deleteFromBlob(key, blobConfig)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (USE_S3_STORAGE) {
|
if (USE_S3_STORAGE) {
|
||||||
const { deleteFromS3 } = await import('../providers/s3/s3-client')
|
const { deleteFromS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
const s3Config = {
|
return deleteFromS3(key, createS3Config(config))
|
||||||
bucket: config.bucket!,
|
|
||||||
region: config.region!,
|
|
||||||
}
|
|
||||||
return deleteFromS3(key, s3Config)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const { deleteFile: defaultDelete } = await import('./storage-client')
|
const { deleteFile: defaultDelete } = await import('@/lib/uploads/core/storage-client')
|
||||||
return defaultDelete(key)
|
return defaultDelete(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -191,14 +232,12 @@ export async function generatePresignedUploadUrl(
|
|||||||
metadata = {},
|
metadata = {},
|
||||||
} = options
|
} = options
|
||||||
|
|
||||||
logger.info(`Generating presigned upload URL for ${context}: ${fileName}`)
|
|
||||||
|
|
||||||
const allMetadata = {
|
const allMetadata = {
|
||||||
...metadata,
|
...metadata,
|
||||||
originalname: fileName,
|
originalName: fileName,
|
||||||
uploadedat: new Date().toISOString(),
|
uploadedAt: new Date().toISOString(),
|
||||||
purpose: context,
|
purpose: context,
|
||||||
...(userId && { userid: userId }),
|
...(userId && { userId }),
|
||||||
}
|
}
|
||||||
|
|
||||||
const config = getStorageConfig(context)
|
const config = getStorageConfig(context)
|
||||||
@@ -237,7 +276,7 @@ async function generateS3PresignedUrl(
|
|||||||
config: { bucket?: string; region?: string },
|
config: { bucket?: string; region?: string },
|
||||||
expirationSeconds: number
|
expirationSeconds: number
|
||||||
): Promise<PresignedUrlResponse> {
|
): Promise<PresignedUrlResponse> {
|
||||||
const { getS3Client, sanitizeFilenameForMetadata } = await import('../providers/s3/s3-client')
|
const { getS3Client } = await import('@/lib/uploads/providers/s3/client')
|
||||||
const { PutObjectCommand } = await import('@aws-sdk/client-s3')
|
const { PutObjectCommand } = await import('@aws-sdk/client-s3')
|
||||||
const { getSignedUrl } = await import('@aws-sdk/s3-request-presigner')
|
const { getSignedUrl } = await import('@aws-sdk/s3-request-presigner')
|
||||||
|
|
||||||
@@ -245,13 +284,9 @@ async function generateS3PresignedUrl(
|
|||||||
throw new Error('S3 configuration missing bucket or region')
|
throw new Error('S3 configuration missing bucket or region')
|
||||||
}
|
}
|
||||||
|
|
||||||
const sanitizedMetadata: Record<string, string> = {}
|
const sanitizedMetadata = sanitizeStorageMetadata(metadata, 2000)
|
||||||
for (const [key, value] of Object.entries(metadata)) {
|
if (sanitizedMetadata.originalName) {
|
||||||
if (key === 'originalname') {
|
sanitizedMetadata.originalName = sanitizeFilenameForMetadata(sanitizedMetadata.originalName)
|
||||||
sanitizedMetadata[key] = sanitizeFilenameForMetadata(value)
|
|
||||||
} else {
|
|
||||||
sanitizedMetadata[key] = value
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const command = new PutObjectCommand({
|
const command = new PutObjectCommand({
|
||||||
@@ -285,7 +320,7 @@ async function generateBlobPresignedUrl(
|
|||||||
},
|
},
|
||||||
expirationSeconds: number
|
expirationSeconds: number
|
||||||
): Promise<PresignedUrlResponse> {
|
): Promise<PresignedUrlResponse> {
|
||||||
const { getBlobServiceClient } = await import('../providers/blob/blob-client')
|
const { getBlobServiceClient } = await import('@/lib/uploads/providers/blob/client')
|
||||||
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||||
await import('@azure/storage-blob')
|
await import('@azure/storage-blob')
|
||||||
|
|
||||||
@@ -293,7 +328,7 @@ async function generateBlobPresignedUrl(
|
|||||||
throw new Error('Blob configuration missing container name')
|
throw new Error('Blob configuration missing container name')
|
||||||
}
|
}
|
||||||
|
|
||||||
const blobServiceClient = getBlobServiceClient()
|
const blobServiceClient = await getBlobServiceClient()
|
||||||
const containerClient = blobServiceClient.getContainerClient(config.containerName)
|
const containerClient = blobServiceClient.getContainerClient(config.containerName)
|
||||||
const blobClient = containerClient.getBlockBlobClient(key)
|
const blobClient = containerClient.getBlockBlobClient(key)
|
||||||
|
|
||||||
@@ -351,8 +386,6 @@ export async function generateBatchPresignedUploadUrls(
|
|||||||
userId?: string,
|
userId?: string,
|
||||||
expirationSeconds?: number
|
expirationSeconds?: number
|
||||||
): Promise<PresignedUrlResponse[]> {
|
): Promise<PresignedUrlResponse[]> {
|
||||||
logger.info(`Generating ${files.length} presigned upload URLs for ${context}`)
|
|
||||||
|
|
||||||
const results: PresignedUrlResponse[] = []
|
const results: PresignedUrlResponse[] = []
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
@@ -378,34 +411,16 @@ export async function generatePresignedDownloadUrl(
|
|||||||
context: StorageContext,
|
context: StorageContext,
|
||||||
expirationSeconds = 3600
|
expirationSeconds = 3600
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
logger.info(`Generating presigned download URL for ${context}: ${key}`)
|
|
||||||
|
|
||||||
const config = getStorageConfig(context)
|
const config = getStorageConfig(context)
|
||||||
|
|
||||||
if (USE_S3_STORAGE) {
|
if (USE_S3_STORAGE) {
|
||||||
const { getPresignedUrlWithConfig } = await import('../providers/s3/s3-client')
|
const { getPresignedUrlWithConfig } = await import('@/lib/uploads/providers/s3/client')
|
||||||
return getPresignedUrlWithConfig(
|
return getPresignedUrlWithConfig(key, createS3Config(config), expirationSeconds)
|
||||||
key,
|
|
||||||
{
|
|
||||||
bucket: config.bucket!,
|
|
||||||
region: config.region!,
|
|
||||||
},
|
|
||||||
expirationSeconds
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (USE_BLOB_STORAGE) {
|
if (USE_BLOB_STORAGE) {
|
||||||
const { getPresignedUrlWithConfig } = await import('../providers/blob/blob-client')
|
const { getPresignedUrlWithConfig } = await import('@/lib/uploads/providers/blob/client')
|
||||||
return getPresignedUrlWithConfig(
|
return getPresignedUrlWithConfig(key, createBlobConfig(config), expirationSeconds)
|
||||||
key,
|
|
||||||
{
|
|
||||||
containerName: config.containerName!,
|
|
||||||
accountName: config.accountName!,
|
|
||||||
accountKey: config.accountKey,
|
|
||||||
connectionString: config.connectionString,
|
|
||||||
},
|
|
||||||
expirationSeconds
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return `/api/files/serve/${encodeURIComponent(key)}`
|
return `/api/files/serve/${encodeURIComponent(key)}`
|
||||||
|
|||||||
@@ -1,19 +1,21 @@
|
|||||||
|
export {
|
||||||
|
getStorageConfig,
|
||||||
|
isUsingCloudStorage,
|
||||||
|
type StorageConfig,
|
||||||
|
type StorageContext,
|
||||||
|
UPLOAD_DIR,
|
||||||
|
USE_BLOB_STORAGE,
|
||||||
|
USE_S3_STORAGE,
|
||||||
|
} from '@/lib/uploads/config'
|
||||||
export * as ChatFiles from '@/lib/uploads/contexts/chat'
|
export * as ChatFiles from '@/lib/uploads/contexts/chat'
|
||||||
export * as CopilotFiles from '@/lib/uploads/contexts/copilot'
|
export * as CopilotFiles from '@/lib/uploads/contexts/copilot'
|
||||||
export * as ExecutionFiles from '@/lib/uploads/contexts/execution'
|
export * as ExecutionFiles from '@/lib/uploads/contexts/execution'
|
||||||
export * as WorkspaceFiles from '@/lib/uploads/contexts/workspace'
|
export * as WorkspaceFiles from '@/lib/uploads/contexts/workspace'
|
||||||
export { getStorageConfig, type StorageContext } from '@/lib/uploads/core/config-resolver'
|
|
||||||
export {
|
export {
|
||||||
UPLOAD_DIR,
|
|
||||||
USE_BLOB_STORAGE,
|
|
||||||
USE_S3_STORAGE,
|
|
||||||
} from '@/lib/uploads/core/setup'
|
|
||||||
export {
|
|
||||||
type CustomStorageConfig,
|
|
||||||
type FileInfo,
|
type FileInfo,
|
||||||
|
getFileMetadata,
|
||||||
getServePathPrefix,
|
getServePathPrefix,
|
||||||
getStorageProvider,
|
getStorageProvider,
|
||||||
isUsingCloudStorage,
|
|
||||||
} from '@/lib/uploads/core/storage-client'
|
} from '@/lib/uploads/core/storage-client'
|
||||||
export * as StorageService from '@/lib/uploads/core/storage-service'
|
export * as StorageService from '@/lib/uploads/core/storage-service'
|
||||||
export {
|
export {
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ describe('Azure Blob Storage Client', () => {
|
|||||||
|
|
||||||
describe('uploadToBlob', () => {
|
describe('uploadToBlob', () => {
|
||||||
it('should upload a file to Azure Blob Storage', async () => {
|
it('should upload a file to Azure Blob Storage', async () => {
|
||||||
const { uploadToBlob } = await import('@/lib/uploads/providers/blob/blob-client')
|
const { uploadToBlob } = await import('@/lib/uploads/providers/blob/client')
|
||||||
|
|
||||||
const testBuffer = Buffer.from('test file content')
|
const testBuffer = Buffer.from('test file content')
|
||||||
const fileName = 'test-file.txt'
|
const fileName = 'test-file.txt'
|
||||||
@@ -120,7 +120,7 @@ describe('Azure Blob Storage Client', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should handle custom blob configuration', async () => {
|
it('should handle custom blob configuration', async () => {
|
||||||
const { uploadToBlob } = await import('@/lib/uploads/providers/blob/blob-client')
|
const { uploadToBlob } = await import('@/lib/uploads/providers/blob/client')
|
||||||
|
|
||||||
const testBuffer = Buffer.from('test file content')
|
const testBuffer = Buffer.from('test file content')
|
||||||
const fileName = 'test-file.txt'
|
const fileName = 'test-file.txt'
|
||||||
@@ -143,7 +143,7 @@ describe('Azure Blob Storage Client', () => {
|
|||||||
|
|
||||||
describe('downloadFromBlob', () => {
|
describe('downloadFromBlob', () => {
|
||||||
it('should download a file from Azure Blob Storage', async () => {
|
it('should download a file from Azure Blob Storage', async () => {
|
||||||
const { downloadFromBlob } = await import('@/lib/uploads/providers/blob/blob-client')
|
const { downloadFromBlob } = await import('@/lib/uploads/providers/blob/client')
|
||||||
|
|
||||||
const testKey = 'test-file-key'
|
const testKey = 'test-file-key'
|
||||||
const testContent = Buffer.from('downloaded content')
|
const testContent = Buffer.from('downloaded content')
|
||||||
@@ -172,7 +172,7 @@ describe('Azure Blob Storage Client', () => {
|
|||||||
|
|
||||||
describe('deleteFromBlob', () => {
|
describe('deleteFromBlob', () => {
|
||||||
it('should delete a file from Azure Blob Storage', async () => {
|
it('should delete a file from Azure Blob Storage', async () => {
|
||||||
const { deleteFromBlob } = await import('@/lib/uploads/providers/blob/blob-client')
|
const { deleteFromBlob } = await import('@/lib/uploads/providers/blob/client')
|
||||||
|
|
||||||
const testKey = 'test-file-key'
|
const testKey = 'test-file-key'
|
||||||
|
|
||||||
@@ -187,7 +187,7 @@ describe('Azure Blob Storage Client', () => {
|
|||||||
|
|
||||||
describe('getPresignedUrl', () => {
|
describe('getPresignedUrl', () => {
|
||||||
it('should generate a presigned URL for Azure Blob Storage', async () => {
|
it('should generate a presigned URL for Azure Blob Storage', async () => {
|
||||||
const { getPresignedUrl } = await import('@/lib/uploads/providers/blob/blob-client')
|
const { getPresignedUrl } = await import('@/lib/uploads/providers/blob/client')
|
||||||
|
|
||||||
const testKey = 'test-file-key'
|
const testKey = 'test-file-key'
|
||||||
const expiresIn = 3600
|
const expiresIn = 3600
|
||||||
@@ -211,9 +211,7 @@ describe('Azure Blob Storage Client', () => {
|
|||||||
]
|
]
|
||||||
|
|
||||||
it.each(testCases)('should sanitize "$input" to "$expected"', async ({ input, expected }) => {
|
it.each(testCases)('should sanitize "$input" to "$expected"', async ({ input, expected }) => {
|
||||||
const { sanitizeFilenameForMetadata } = await import(
|
const { sanitizeFilenameForMetadata } = await import('@/lib/uploads/utils/file-utils')
|
||||||
'@/lib/uploads/providers/blob/blob-client'
|
|
||||||
)
|
|
||||||
expect(sanitizeFilenameForMetadata(input)).toBe(expected)
|
expect(sanitizeFilenameForMetadata(input)).toBe(expected)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
@@ -1,28 +1,31 @@
|
|||||||
import {
|
|
||||||
BlobSASPermissions,
|
|
||||||
BlobServiceClient,
|
|
||||||
type BlockBlobClient,
|
|
||||||
generateBlobSASQueryParameters,
|
|
||||||
StorageSharedKeyCredential,
|
|
||||||
} from '@azure/storage-blob'
|
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { BLOB_CONFIG } from '@/lib/uploads/core/setup'
|
import { BLOB_CONFIG } from '@/lib/uploads/config'
|
||||||
|
import type {
|
||||||
|
AzureMultipartPart,
|
||||||
|
AzureMultipartUploadInit,
|
||||||
|
AzurePartUploadUrl,
|
||||||
|
BlobConfig,
|
||||||
|
} from '@/lib/uploads/providers/blob/types'
|
||||||
|
import type { FileInfo } from '@/lib/uploads/shared/types'
|
||||||
|
import { sanitizeStorageMetadata } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
type BlobServiceClientInstance = Awaited<
|
||||||
|
ReturnType<typeof import('@azure/storage-blob').BlobServiceClient.fromConnectionString>
|
||||||
|
>
|
||||||
|
|
||||||
const logger = createLogger('BlobClient')
|
const logger = createLogger('BlobClient')
|
||||||
|
|
||||||
// Lazily create a single Blob service client instance.
|
let _blobServiceClient: BlobServiceClientInstance | null = null
|
||||||
let _blobServiceClient: BlobServiceClient | null = null
|
|
||||||
|
|
||||||
export function getBlobServiceClient(): BlobServiceClient {
|
export async function getBlobServiceClient(): Promise<BlobServiceClientInstance> {
|
||||||
if (_blobServiceClient) return _blobServiceClient
|
if (_blobServiceClient) return _blobServiceClient
|
||||||
|
|
||||||
|
const { BlobServiceClient, StorageSharedKeyCredential } = await import('@azure/storage-blob')
|
||||||
const { accountName, accountKey, connectionString } = BLOB_CONFIG
|
const { accountName, accountKey, connectionString } = BLOB_CONFIG
|
||||||
|
|
||||||
if (connectionString) {
|
if (connectionString) {
|
||||||
// Use connection string if provided
|
|
||||||
_blobServiceClient = BlobServiceClient.fromConnectionString(connectionString)
|
_blobServiceClient = BlobServiceClient.fromConnectionString(connectionString)
|
||||||
} else if (accountName && accountKey) {
|
} else if (accountName && accountKey) {
|
||||||
// Use account name and key
|
|
||||||
const sharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey)
|
const sharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey)
|
||||||
_blobServiceClient = new BlobServiceClient(
|
_blobServiceClient = new BlobServiceClient(
|
||||||
`https://${accountName}.blob.core.windows.net`,
|
`https://${accountName}.blob.core.windows.net`,
|
||||||
@@ -37,97 +40,40 @@ export function getBlobServiceClient(): BlobServiceClient {
|
|||||||
return _blobServiceClient
|
return _blobServiceClient
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Sanitize a filename for use in blob metadata headers
|
|
||||||
* Azure blob metadata headers must contain only ASCII printable characters
|
|
||||||
* and cannot contain certain special characters
|
|
||||||
*/
|
|
||||||
export function sanitizeFilenameForMetadata(filename: string): string {
|
|
||||||
return (
|
|
||||||
filename
|
|
||||||
// Remove non-ASCII characters (keep only printable ASCII 0x20-0x7E)
|
|
||||||
.replace(/[^\x20-\x7E]/g, '')
|
|
||||||
// Remove characters that are problematic in HTTP headers
|
|
||||||
.replace(/["\\]/g, '')
|
|
||||||
// Replace multiple spaces with single space
|
|
||||||
.replace(/\s+/g, ' ')
|
|
||||||
// Trim whitespace
|
|
||||||
.trim() ||
|
|
||||||
// Provide fallback if completely sanitized
|
|
||||||
'file'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* File information structure
|
|
||||||
*/
|
|
||||||
export interface FileInfo {
|
|
||||||
path: string // Path to access the file
|
|
||||||
key: string // Blob name or local filename
|
|
||||||
name: string // Original filename
|
|
||||||
size: number // File size in bytes
|
|
||||||
type: string // MIME type
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Custom Blob configuration
|
|
||||||
*/
|
|
||||||
export interface CustomBlobConfig {
|
|
||||||
containerName: string
|
|
||||||
accountName: string
|
|
||||||
accountKey?: string
|
|
||||||
connectionString?: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Upload a file to Azure Blob Storage
|
* Upload a file to Azure Blob Storage
|
||||||
* @param file Buffer containing file data
|
* @param file Buffer containing file data
|
||||||
* @param fileName Original file name
|
* @param fileName Original file name
|
||||||
* @param contentType MIME type of the file
|
* @param contentType MIME type of the file
|
||||||
* @param size File size in bytes (optional, will use buffer length if not provided)
|
* @param configOrSize Custom Blob configuration OR file size in bytes (optional)
|
||||||
|
* @param size File size in bytes (required if configOrSize is BlobConfig, optional otherwise)
|
||||||
|
* @param metadata Optional metadata to store with the file
|
||||||
* @returns Object with file information
|
* @returns Object with file information
|
||||||
*/
|
*/
|
||||||
export async function uploadToBlob(
|
export async function uploadToBlob(
|
||||||
file: Buffer,
|
file: Buffer,
|
||||||
fileName: string,
|
fileName: string,
|
||||||
contentType: string,
|
contentType: string,
|
||||||
size?: number
|
configOrSize?: BlobConfig | number,
|
||||||
): Promise<FileInfo>
|
size?: number,
|
||||||
|
metadata?: Record<string, string>
|
||||||
/**
|
|
||||||
* Upload a file to Azure Blob Storage with custom container configuration
|
|
||||||
* @param file Buffer containing file data
|
|
||||||
* @param fileName Original file name
|
|
||||||
* @param contentType MIME type of the file
|
|
||||||
* @param customConfig Custom Blob configuration (container and account info)
|
|
||||||
* @param size File size in bytes (optional, will use buffer length if not provided)
|
|
||||||
* @returns Object with file information
|
|
||||||
*/
|
|
||||||
export async function uploadToBlob(
|
|
||||||
file: Buffer,
|
|
||||||
fileName: string,
|
|
||||||
contentType: string,
|
|
||||||
customConfig: CustomBlobConfig,
|
|
||||||
size?: number
|
|
||||||
): Promise<FileInfo>
|
): Promise<FileInfo>
|
||||||
|
|
||||||
export async function uploadToBlob(
|
export async function uploadToBlob(
|
||||||
file: Buffer,
|
file: Buffer,
|
||||||
fileName: string,
|
fileName: string,
|
||||||
contentType: string,
|
contentType: string,
|
||||||
configOrSize?: CustomBlobConfig | number,
|
configOrSize?: BlobConfig | number,
|
||||||
size?: number
|
size?: number,
|
||||||
|
metadata?: Record<string, string>
|
||||||
): Promise<FileInfo> {
|
): Promise<FileInfo> {
|
||||||
// Handle overloaded parameters
|
let config: BlobConfig
|
||||||
let config: CustomBlobConfig
|
|
||||||
let fileSize: number
|
let fileSize: number
|
||||||
|
|
||||||
if (typeof configOrSize === 'object') {
|
if (typeof configOrSize === 'object') {
|
||||||
// Custom config provided
|
|
||||||
config = configOrSize
|
config = configOrSize
|
||||||
fileSize = size ?? file.length
|
fileSize = size ?? file.length
|
||||||
} else {
|
} else {
|
||||||
// Use default config
|
|
||||||
config = {
|
config = {
|
||||||
containerName: BLOB_CONFIG.containerName,
|
containerName: BLOB_CONFIG.containerName,
|
||||||
accountName: BLOB_CONFIG.accountName,
|
accountName: BLOB_CONFIG.accountName,
|
||||||
@@ -140,18 +86,24 @@ export async function uploadToBlob(
|
|||||||
const safeFileName = fileName.replace(/\s+/g, '-') // Replace spaces with hyphens
|
const safeFileName = fileName.replace(/\s+/g, '-') // Replace spaces with hyphens
|
||||||
const uniqueKey = `${Date.now()}-${safeFileName}`
|
const uniqueKey = `${Date.now()}-${safeFileName}`
|
||||||
|
|
||||||
const blobServiceClient = getBlobServiceClient()
|
const blobServiceClient = await getBlobServiceClient()
|
||||||
const containerClient = blobServiceClient.getContainerClient(config.containerName)
|
const containerClient = blobServiceClient.getContainerClient(config.containerName)
|
||||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||||
|
|
||||||
|
const blobMetadata: Record<string, string> = {
|
||||||
|
originalName: encodeURIComponent(fileName), // Encode filename to prevent invalid characters in HTTP headers
|
||||||
|
uploadedAt: new Date().toISOString(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if (metadata) {
|
||||||
|
Object.assign(blobMetadata, sanitizeStorageMetadata(metadata, 8000))
|
||||||
|
}
|
||||||
|
|
||||||
await blockBlobClient.upload(file, fileSize, {
|
await blockBlobClient.upload(file, fileSize, {
|
||||||
blobHTTPHeaders: {
|
blobHTTPHeaders: {
|
||||||
blobContentType: contentType,
|
blobContentType: contentType,
|
||||||
},
|
},
|
||||||
metadata: {
|
metadata: blobMetadata,
|
||||||
originalName: encodeURIComponent(fileName), // Encode filename to prevent invalid characters in HTTP headers
|
|
||||||
uploadedAt: new Date().toISOString(),
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
const servePath = `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
|
||||||
@@ -159,7 +111,7 @@ export async function uploadToBlob(
|
|||||||
return {
|
return {
|
||||||
path: servePath,
|
path: servePath,
|
||||||
key: uniqueKey,
|
key: uniqueKey,
|
||||||
name: fileName, // Return the actual original filename in the response
|
name: fileName,
|
||||||
size: fileSize,
|
size: fileSize,
|
||||||
type: contentType,
|
type: contentType,
|
||||||
}
|
}
|
||||||
@@ -172,7 +124,9 @@ export async function uploadToBlob(
|
|||||||
* @returns Presigned URL
|
* @returns Presigned URL
|
||||||
*/
|
*/
|
||||||
export async function getPresignedUrl(key: string, expiresIn = 3600) {
|
export async function getPresignedUrl(key: string, expiresIn = 3600) {
|
||||||
const blobServiceClient = getBlobServiceClient()
|
const { BlobSASPermissions, generateBlobSASQueryParameters, StorageSharedKeyCredential } =
|
||||||
|
await import('@azure/storage-blob')
|
||||||
|
const blobServiceClient = await getBlobServiceClient()
|
||||||
const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName)
|
const containerClient = blobServiceClient.getContainerClient(BLOB_CONFIG.containerName)
|
||||||
const blockBlobClient = containerClient.getBlockBlobClient(key)
|
const blockBlobClient = containerClient.getBlockBlobClient(key)
|
||||||
|
|
||||||
@@ -207,10 +161,16 @@ export async function getPresignedUrl(key: string, expiresIn = 3600) {
|
|||||||
*/
|
*/
|
||||||
export async function getPresignedUrlWithConfig(
|
export async function getPresignedUrlWithConfig(
|
||||||
key: string,
|
key: string,
|
||||||
customConfig: CustomBlobConfig,
|
customConfig: BlobConfig,
|
||||||
expiresIn = 3600
|
expiresIn = 3600
|
||||||
) {
|
) {
|
||||||
let tempBlobServiceClient: BlobServiceClient
|
const {
|
||||||
|
BlobServiceClient,
|
||||||
|
BlobSASPermissions,
|
||||||
|
generateBlobSASQueryParameters,
|
||||||
|
StorageSharedKeyCredential,
|
||||||
|
} = await import('@azure/storage-blob')
|
||||||
|
let tempBlobServiceClient: BlobServiceClientInstance
|
||||||
|
|
||||||
if (customConfig.connectionString) {
|
if (customConfig.connectionString) {
|
||||||
tempBlobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString)
|
tempBlobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString)
|
||||||
@@ -267,13 +227,11 @@ export async function downloadFromBlob(key: string): Promise<Buffer>
|
|||||||
* @param customConfig Custom Blob configuration
|
* @param customConfig Custom Blob configuration
|
||||||
* @returns File buffer
|
* @returns File buffer
|
||||||
*/
|
*/
|
||||||
export async function downloadFromBlob(key: string, customConfig: CustomBlobConfig): Promise<Buffer>
|
export async function downloadFromBlob(key: string, customConfig: BlobConfig): Promise<Buffer>
|
||||||
|
|
||||||
export async function downloadFromBlob(
|
export async function downloadFromBlob(key: string, customConfig?: BlobConfig): Promise<Buffer> {
|
||||||
key: string,
|
const { BlobServiceClient, StorageSharedKeyCredential } = await import('@azure/storage-blob')
|
||||||
customConfig?: CustomBlobConfig
|
let blobServiceClient: BlobServiceClientInstance
|
||||||
): Promise<Buffer> {
|
|
||||||
let blobServiceClient: BlobServiceClient
|
|
||||||
let containerName: string
|
let containerName: string
|
||||||
|
|
||||||
if (customConfig) {
|
if (customConfig) {
|
||||||
@@ -293,7 +251,7 @@ export async function downloadFromBlob(
|
|||||||
}
|
}
|
||||||
containerName = customConfig.containerName
|
containerName = customConfig.containerName
|
||||||
} else {
|
} else {
|
||||||
blobServiceClient = getBlobServiceClient()
|
blobServiceClient = await getBlobServiceClient()
|
||||||
containerName = BLOB_CONFIG.containerName
|
containerName = BLOB_CONFIG.containerName
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -320,10 +278,11 @@ export async function deleteFromBlob(key: string): Promise<void>
|
|||||||
* @param key Blob name
|
* @param key Blob name
|
||||||
* @param customConfig Custom Blob configuration
|
* @param customConfig Custom Blob configuration
|
||||||
*/
|
*/
|
||||||
export async function deleteFromBlob(key: string, customConfig: CustomBlobConfig): Promise<void>
|
export async function deleteFromBlob(key: string, customConfig: BlobConfig): Promise<void>
|
||||||
|
|
||||||
export async function deleteFromBlob(key: string, customConfig?: CustomBlobConfig): Promise<void> {
|
export async function deleteFromBlob(key: string, customConfig?: BlobConfig): Promise<void> {
|
||||||
let blobServiceClient: BlobServiceClient
|
const { BlobServiceClient, StorageSharedKeyCredential } = await import('@azure/storage-blob')
|
||||||
|
let blobServiceClient: BlobServiceClientInstance
|
||||||
let containerName: string
|
let containerName: string
|
||||||
|
|
||||||
if (customConfig) {
|
if (customConfig) {
|
||||||
@@ -343,7 +302,7 @@ export async function deleteFromBlob(key: string, customConfig?: CustomBlobConfi
|
|||||||
}
|
}
|
||||||
containerName = customConfig.containerName
|
containerName = customConfig.containerName
|
||||||
} else {
|
} else {
|
||||||
blobServiceClient = getBlobServiceClient()
|
blobServiceClient = await getBlobServiceClient()
|
||||||
containerName = BLOB_CONFIG.containerName
|
containerName = BLOB_CONFIG.containerName
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -369,40 +328,16 @@ async function streamToBuffer(readableStream: NodeJS.ReadableStream): Promise<Bu
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Multipart upload interfaces
|
|
||||||
export interface AzureMultipartUploadInit {
|
|
||||||
fileName: string
|
|
||||||
contentType: string
|
|
||||||
fileSize: number
|
|
||||||
customConfig?: CustomBlobConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AzureMultipartUploadResult {
|
|
||||||
uploadId: string
|
|
||||||
key: string
|
|
||||||
blockBlobClient: BlockBlobClient
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AzurePartUploadUrl {
|
|
||||||
partNumber: number
|
|
||||||
blockId: string
|
|
||||||
url: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AzureMultipartPart {
|
|
||||||
blockId: string
|
|
||||||
partNumber: number
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initiate a multipart upload for Azure Blob Storage
|
* Initiate a multipart upload for Azure Blob Storage
|
||||||
*/
|
*/
|
||||||
export async function initiateMultipartUpload(
|
export async function initiateMultipartUpload(
|
||||||
options: AzureMultipartUploadInit
|
options: AzureMultipartUploadInit
|
||||||
): Promise<{ uploadId: string; key: string }> {
|
): Promise<{ uploadId: string; key: string }> {
|
||||||
|
const { BlobServiceClient, StorageSharedKeyCredential } = await import('@azure/storage-blob')
|
||||||
const { fileName, contentType, customConfig } = options
|
const { fileName, contentType, customConfig } = options
|
||||||
|
|
||||||
let blobServiceClient: BlobServiceClient
|
let blobServiceClient: BlobServiceClientInstance
|
||||||
let containerName: string
|
let containerName: string
|
||||||
|
|
||||||
if (customConfig) {
|
if (customConfig) {
|
||||||
@@ -422,23 +357,19 @@ export async function initiateMultipartUpload(
|
|||||||
}
|
}
|
||||||
containerName = customConfig.containerName
|
containerName = customConfig.containerName
|
||||||
} else {
|
} else {
|
||||||
blobServiceClient = getBlobServiceClient()
|
blobServiceClient = await getBlobServiceClient()
|
||||||
containerName = BLOB_CONFIG.containerName
|
containerName = BLOB_CONFIG.containerName
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create unique key for the blob
|
|
||||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||||
const { v4: uuidv4 } = await import('uuid')
|
const { v4: uuidv4 } = await import('uuid')
|
||||||
const uniqueKey = `kb/${uuidv4()}-${safeFileName}`
|
const uniqueKey = `kb/${uuidv4()}-${safeFileName}`
|
||||||
|
|
||||||
// Generate a unique upload ID (Azure doesn't have native multipart like S3)
|
|
||||||
const uploadId = uuidv4()
|
const uploadId = uuidv4()
|
||||||
|
|
||||||
// Store the blob client reference for later use (in a real implementation, you'd use Redis or similar)
|
|
||||||
const containerClient = blobServiceClient.getContainerClient(containerName)
|
const containerClient = blobServiceClient.getContainerClient(containerName)
|
||||||
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
const blockBlobClient = containerClient.getBlockBlobClient(uniqueKey)
|
||||||
|
|
||||||
// Set metadata to track the multipart upload
|
|
||||||
await blockBlobClient.setMetadata({
|
await blockBlobClient.setMetadata({
|
||||||
uploadId,
|
uploadId,
|
||||||
fileName: encodeURIComponent(fileName),
|
fileName: encodeURIComponent(fileName),
|
||||||
@@ -458,11 +389,16 @@ export async function initiateMultipartUpload(
|
|||||||
*/
|
*/
|
||||||
export async function getMultipartPartUrls(
|
export async function getMultipartPartUrls(
|
||||||
key: string,
|
key: string,
|
||||||
_uploadId: string, // Not used in Azure Blob, kept for interface consistency
|
|
||||||
partNumbers: number[],
|
partNumbers: number[],
|
||||||
customConfig?: CustomBlobConfig
|
customConfig?: BlobConfig
|
||||||
): Promise<AzurePartUploadUrl[]> {
|
): Promise<AzurePartUploadUrl[]> {
|
||||||
let blobServiceClient: BlobServiceClient
|
const {
|
||||||
|
BlobServiceClient,
|
||||||
|
BlobSASPermissions,
|
||||||
|
generateBlobSASQueryParameters,
|
||||||
|
StorageSharedKeyCredential,
|
||||||
|
} = await import('@azure/storage-blob')
|
||||||
|
let blobServiceClient: BlobServiceClientInstance
|
||||||
let containerName: string
|
let containerName: string
|
||||||
let accountName: string
|
let accountName: string
|
||||||
let accountKey: string
|
let accountKey: string
|
||||||
@@ -470,7 +406,6 @@ export async function getMultipartPartUrls(
|
|||||||
if (customConfig) {
|
if (customConfig) {
|
||||||
if (customConfig.connectionString) {
|
if (customConfig.connectionString) {
|
||||||
blobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString)
|
blobServiceClient = BlobServiceClient.fromConnectionString(customConfig.connectionString)
|
||||||
// Extract account name from connection string
|
|
||||||
const match = customConfig.connectionString.match(/AccountName=([^;]+)/)
|
const match = customConfig.connectionString.match(/AccountName=([^;]+)/)
|
||||||
if (!match) throw new Error('Cannot extract account name from connection string')
|
if (!match) throw new Error('Cannot extract account name from connection string')
|
||||||
accountName = match[1]
|
accountName = match[1]
|
||||||
@@ -494,7 +429,7 @@ export async function getMultipartPartUrls(
|
|||||||
}
|
}
|
||||||
containerName = customConfig.containerName
|
containerName = customConfig.containerName
|
||||||
} else {
|
} else {
|
||||||
blobServiceClient = getBlobServiceClient()
|
blobServiceClient = await getBlobServiceClient()
|
||||||
containerName = BLOB_CONFIG.containerName
|
containerName = BLOB_CONFIG.containerName
|
||||||
accountName = BLOB_CONFIG.accountName
|
accountName = BLOB_CONFIG.accountName
|
||||||
accountKey =
|
accountKey =
|
||||||
@@ -508,13 +443,10 @@ export async function getMultipartPartUrls(
|
|||||||
const blockBlobClient = containerClient.getBlockBlobClient(key)
|
const blockBlobClient = containerClient.getBlockBlobClient(key)
|
||||||
|
|
||||||
return partNumbers.map((partNumber) => {
|
return partNumbers.map((partNumber) => {
|
||||||
// Azure uses block IDs instead of part numbers
|
|
||||||
// Block IDs must be base64 encoded and all the same length
|
|
||||||
const blockId = Buffer.from(`block-${partNumber.toString().padStart(6, '0')}`).toString(
|
const blockId = Buffer.from(`block-${partNumber.toString().padStart(6, '0')}`).toString(
|
||||||
'base64'
|
'base64'
|
||||||
)
|
)
|
||||||
|
|
||||||
// Generate SAS token for uploading this specific block
|
|
||||||
const sasOptions = {
|
const sasOptions = {
|
||||||
containerName,
|
containerName,
|
||||||
blobName: key,
|
blobName: key,
|
||||||
@@ -541,11 +473,11 @@ export async function getMultipartPartUrls(
|
|||||||
*/
|
*/
|
||||||
export async function completeMultipartUpload(
|
export async function completeMultipartUpload(
|
||||||
key: string,
|
key: string,
|
||||||
_uploadId: string, // Not used in Azure Blob, kept for interface consistency
|
parts: AzureMultipartPart[],
|
||||||
parts: Array<{ blockId: string; partNumber: number }>,
|
customConfig?: BlobConfig
|
||||||
customConfig?: CustomBlobConfig
|
|
||||||
): Promise<{ location: string; path: string; key: string }> {
|
): Promise<{ location: string; path: string; key: string }> {
|
||||||
let blobServiceClient: BlobServiceClient
|
const { BlobServiceClient, StorageSharedKeyCredential } = await import('@azure/storage-blob')
|
||||||
|
let blobServiceClient: BlobServiceClientInstance
|
||||||
let containerName: string
|
let containerName: string
|
||||||
|
|
||||||
if (customConfig) {
|
if (customConfig) {
|
||||||
@@ -565,7 +497,7 @@ export async function completeMultipartUpload(
|
|||||||
}
|
}
|
||||||
containerName = customConfig.containerName
|
containerName = customConfig.containerName
|
||||||
} else {
|
} else {
|
||||||
blobServiceClient = getBlobServiceClient()
|
blobServiceClient = await getBlobServiceClient()
|
||||||
containerName = BLOB_CONFIG.containerName
|
containerName = BLOB_CONFIG.containerName
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -598,12 +530,9 @@ export async function completeMultipartUpload(
|
|||||||
/**
|
/**
|
||||||
* Abort multipart upload by deleting the blob if it exists
|
* Abort multipart upload by deleting the blob if it exists
|
||||||
*/
|
*/
|
||||||
export async function abortMultipartUpload(
|
export async function abortMultipartUpload(key: string, customConfig?: BlobConfig): Promise<void> {
|
||||||
key: string,
|
const { BlobServiceClient, StorageSharedKeyCredential } = await import('@azure/storage-blob')
|
||||||
_uploadId: string, // Not used in Azure Blob, kept for interface consistency
|
let blobServiceClient: BlobServiceClientInstance
|
||||||
customConfig?: CustomBlobConfig
|
|
||||||
): Promise<void> {
|
|
||||||
let blobServiceClient: BlobServiceClient
|
|
||||||
let containerName: string
|
let containerName: string
|
||||||
|
|
||||||
if (customConfig) {
|
if (customConfig) {
|
||||||
@@ -623,7 +552,7 @@ export async function abortMultipartUpload(
|
|||||||
}
|
}
|
||||||
containerName = customConfig.containerName
|
containerName = customConfig.containerName
|
||||||
} else {
|
} else {
|
||||||
blobServiceClient = getBlobServiceClient()
|
blobServiceClient = await getBlobServiceClient()
|
||||||
containerName = BLOB_CONFIG.containerName
|
containerName = BLOB_CONFIG.containerName
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,11 +1,14 @@
|
|||||||
export {
|
export {
|
||||||
type CustomBlobConfig,
|
|
||||||
deleteFromBlob,
|
deleteFromBlob,
|
||||||
downloadFromBlob,
|
downloadFromBlob,
|
||||||
type FileInfo,
|
|
||||||
getBlobServiceClient,
|
getBlobServiceClient,
|
||||||
getPresignedUrl,
|
getPresignedUrl,
|
||||||
getPresignedUrlWithConfig,
|
getPresignedUrlWithConfig,
|
||||||
sanitizeFilenameForMetadata,
|
|
||||||
uploadToBlob,
|
uploadToBlob,
|
||||||
} from '@/lib/uploads/providers/blob/blob-client'
|
} from '@/lib/uploads/providers/blob/client'
|
||||||
|
export type {
|
||||||
|
AzureMultipartPart,
|
||||||
|
AzureMultipartUploadInit,
|
||||||
|
AzurePartUploadUrl,
|
||||||
|
BlobConfig,
|
||||||
|
} from '@/lib/uploads/providers/blob/types'
|
||||||
|
|||||||
24
apps/sim/lib/uploads/providers/blob/types.ts
Normal file
24
apps/sim/lib/uploads/providers/blob/types.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
export interface BlobConfig {
|
||||||
|
containerName: string
|
||||||
|
accountName: string
|
||||||
|
accountKey?: string
|
||||||
|
connectionString?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AzureMultipartUploadInit {
|
||||||
|
fileName: string
|
||||||
|
contentType: string
|
||||||
|
fileSize: number
|
||||||
|
customConfig?: BlobConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AzurePartUploadUrl {
|
||||||
|
partNumber: number
|
||||||
|
blockId: string
|
||||||
|
url: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AzureMultipartPart {
|
||||||
|
blockId: string
|
||||||
|
partNumber: number
|
||||||
|
}
|
||||||
@@ -68,7 +68,7 @@ describe('S3 Client', () => {
|
|||||||
it('should upload a file to S3 and return file info', async () => {
|
it('should upload a file to S3 and return file info', async () => {
|
||||||
mockSend.mockResolvedValueOnce({})
|
mockSend.mockResolvedValueOnce({})
|
||||||
|
|
||||||
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const file = Buffer.from('test content')
|
const file = Buffer.from('test content')
|
||||||
const fileName = 'test-file.txt'
|
const fileName = 'test-file.txt'
|
||||||
@@ -101,7 +101,7 @@ describe('S3 Client', () => {
|
|||||||
it('should handle spaces in filenames', async () => {
|
it('should handle spaces in filenames', async () => {
|
||||||
mockSend.mockResolvedValueOnce({})
|
mockSend.mockResolvedValueOnce({})
|
||||||
|
|
||||||
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const testFile = Buffer.from('test file content')
|
const testFile = Buffer.from('test file content')
|
||||||
const fileName = 'test file with spaces.txt'
|
const fileName = 'test file with spaces.txt'
|
||||||
@@ -121,7 +121,7 @@ describe('S3 Client', () => {
|
|||||||
it('should use provided size if available', async () => {
|
it('should use provided size if available', async () => {
|
||||||
mockSend.mockResolvedValueOnce({})
|
mockSend.mockResolvedValueOnce({})
|
||||||
|
|
||||||
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const testFile = Buffer.from('test file content')
|
const testFile = Buffer.from('test file content')
|
||||||
const fileName = 'test-file.txt'
|
const fileName = 'test-file.txt'
|
||||||
@@ -137,7 +137,7 @@ describe('S3 Client', () => {
|
|||||||
const error = new Error('Upload failed')
|
const error = new Error('Upload failed')
|
||||||
mockSend.mockRejectedValueOnce(error)
|
mockSend.mockRejectedValueOnce(error)
|
||||||
|
|
||||||
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { uploadToS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const testFile = Buffer.from('test file content')
|
const testFile = Buffer.from('test file content')
|
||||||
const fileName = 'test-file.txt'
|
const fileName = 'test-file.txt'
|
||||||
@@ -151,7 +151,7 @@ describe('S3 Client', () => {
|
|||||||
it('should generate a presigned URL for a file', async () => {
|
it('should generate a presigned URL for a file', async () => {
|
||||||
mockGetSignedUrl.mockResolvedValueOnce('https://example.com/presigned-url')
|
mockGetSignedUrl.mockResolvedValueOnce('https://example.com/presigned-url')
|
||||||
|
|
||||||
const { getPresignedUrl } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { getPresignedUrl } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const key = 'test-file.txt'
|
const key = 'test-file.txt'
|
||||||
const expiresIn = 1800
|
const expiresIn = 1800
|
||||||
@@ -171,7 +171,7 @@ describe('S3 Client', () => {
|
|||||||
it('should use default expiration if not provided', async () => {
|
it('should use default expiration if not provided', async () => {
|
||||||
mockGetSignedUrl.mockResolvedValueOnce('https://example.com/presigned-url')
|
mockGetSignedUrl.mockResolvedValueOnce('https://example.com/presigned-url')
|
||||||
|
|
||||||
const { getPresignedUrl } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { getPresignedUrl } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const key = 'test-file.txt'
|
const key = 'test-file.txt'
|
||||||
|
|
||||||
@@ -188,7 +188,7 @@ describe('S3 Client', () => {
|
|||||||
const error = new Error('Presigned URL generation failed')
|
const error = new Error('Presigned URL generation failed')
|
||||||
mockGetSignedUrl.mockRejectedValueOnce(error)
|
mockGetSignedUrl.mockRejectedValueOnce(error)
|
||||||
|
|
||||||
const { getPresignedUrl } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { getPresignedUrl } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const key = 'test-file.txt'
|
const key = 'test-file.txt'
|
||||||
|
|
||||||
@@ -216,7 +216,7 @@ describe('S3 Client', () => {
|
|||||||
$metadata: { httpStatusCode: 200 },
|
$metadata: { httpStatusCode: 200 },
|
||||||
})
|
})
|
||||||
|
|
||||||
const { downloadFromS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { downloadFromS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const key = 'test-file.txt'
|
const key = 'test-file.txt'
|
||||||
|
|
||||||
@@ -247,7 +247,7 @@ describe('S3 Client', () => {
|
|||||||
$metadata: { httpStatusCode: 200 },
|
$metadata: { httpStatusCode: 200 },
|
||||||
})
|
})
|
||||||
|
|
||||||
const { downloadFromS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { downloadFromS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const key = 'test-file.txt'
|
const key = 'test-file.txt'
|
||||||
|
|
||||||
@@ -258,7 +258,7 @@ describe('S3 Client', () => {
|
|||||||
const error = new Error('Download failed')
|
const error = new Error('Download failed')
|
||||||
mockSend.mockRejectedValueOnce(error)
|
mockSend.mockRejectedValueOnce(error)
|
||||||
|
|
||||||
const { downloadFromS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { downloadFromS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const key = 'test-file.txt'
|
const key = 'test-file.txt'
|
||||||
|
|
||||||
@@ -270,7 +270,7 @@ describe('S3 Client', () => {
|
|||||||
it('should delete a file from S3', async () => {
|
it('should delete a file from S3', async () => {
|
||||||
mockSend.mockResolvedValueOnce({})
|
mockSend.mockResolvedValueOnce({})
|
||||||
|
|
||||||
const { deleteFromS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { deleteFromS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const key = 'test-file.txt'
|
const key = 'test-file.txt'
|
||||||
|
|
||||||
@@ -288,7 +288,7 @@ describe('S3 Client', () => {
|
|||||||
const error = new Error('Delete failed')
|
const error = new Error('Delete failed')
|
||||||
mockSend.mockRejectedValueOnce(error)
|
mockSend.mockRejectedValueOnce(error)
|
||||||
|
|
||||||
const { deleteFromS3 } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { deleteFromS3 } = await import('@/lib/uploads/providers/s3/client')
|
||||||
|
|
||||||
const key = 'test-file.txt'
|
const key = 'test-file.txt'
|
||||||
|
|
||||||
@@ -315,7 +315,7 @@ describe('S3 Client', () => {
|
|||||||
}))
|
}))
|
||||||
|
|
||||||
vi.resetModules()
|
vi.resetModules()
|
||||||
const { getS3Client } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { getS3Client } = await import('@/lib/uploads/providers/s3/client')
|
||||||
const { S3Client } = await import('@aws-sdk/client-s3')
|
const { S3Client } = await import('@aws-sdk/client-s3')
|
||||||
|
|
||||||
const client = getS3Client()
|
const client = getS3Client()
|
||||||
@@ -348,7 +348,7 @@ describe('S3 Client', () => {
|
|||||||
}))
|
}))
|
||||||
|
|
||||||
vi.resetModules()
|
vi.resetModules()
|
||||||
const { getS3Client } = await import('@/lib/uploads/providers/s3/s3-client')
|
const { getS3Client } = await import('@/lib/uploads/providers/s3/client')
|
||||||
const { S3Client } = await import('@aws-sdk/client-s3')
|
const { S3Client } = await import('@aws-sdk/client-s3')
|
||||||
|
|
||||||
const client = getS3Client()
|
const client = getS3Client()
|
||||||
@@ -10,9 +10,19 @@ import {
|
|||||||
} from '@aws-sdk/client-s3'
|
} from '@aws-sdk/client-s3'
|
||||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner'
|
import { getSignedUrl } from '@aws-sdk/s3-request-presigner'
|
||||||
import { env } from '@/lib/env'
|
import { env } from '@/lib/env'
|
||||||
import { S3_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/core/setup'
|
import { S3_CONFIG, S3_KB_CONFIG } from '@/lib/uploads/config'
|
||||||
|
import type {
|
||||||
|
S3Config,
|
||||||
|
S3MultipartPart,
|
||||||
|
S3MultipartUploadInit,
|
||||||
|
S3PartUploadUrl,
|
||||||
|
} from '@/lib/uploads/providers/s3/types'
|
||||||
|
import type { FileInfo } from '@/lib/uploads/shared/types'
|
||||||
|
import {
|
||||||
|
sanitizeFilenameForMetadata,
|
||||||
|
sanitizeStorageMetadata,
|
||||||
|
} from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
// Lazily create a single S3 client instance.
|
|
||||||
let _s3Client: S3Client | null = null
|
let _s3Client: S3Client | null = null
|
||||||
|
|
||||||
export function getS3Client(): S3Client {
|
export function getS3Client(): S3Client {
|
||||||
@@ -26,8 +36,6 @@ export function getS3Client(): S3Client {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only pass explicit credentials if both environment variables are available.
|
|
||||||
// Otherwise, fall back to the AWS SDK default credential provider chain (e.g. EC2/ECS roles, shared config files, etc.).
|
|
||||||
_s3Client = new S3Client({
|
_s3Client = new S3Client({
|
||||||
region,
|
region,
|
||||||
credentials:
|
credentials:
|
||||||
@@ -42,133 +50,80 @@ export function getS3Client(): S3Client {
|
|||||||
return _s3Client
|
return _s3Client
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Sanitize a filename for use in S3 metadata headers
|
|
||||||
* S3 metadata headers must contain only ASCII printable characters (0x20-0x7E)
|
|
||||||
* and cannot contain certain special characters
|
|
||||||
*/
|
|
||||||
export function sanitizeFilenameForMetadata(filename: string): string {
|
|
||||||
return (
|
|
||||||
filename
|
|
||||||
// Remove non-ASCII characters (keep only printable ASCII 0x20-0x7E)
|
|
||||||
.replace(/[^\x20-\x7E]/g, '')
|
|
||||||
// Remove characters that are problematic in HTTP headers
|
|
||||||
.replace(/["\\]/g, '')
|
|
||||||
// Replace multiple spaces with single space
|
|
||||||
.replace(/\s+/g, ' ')
|
|
||||||
// Trim whitespace
|
|
||||||
.trim() ||
|
|
||||||
// Provide fallback if completely sanitized
|
|
||||||
'file'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* File information structure
|
|
||||||
*/
|
|
||||||
export interface FileInfo {
|
|
||||||
path: string // Path to access the file
|
|
||||||
key: string // S3 key or local filename
|
|
||||||
name: string // Original filename
|
|
||||||
size: number // File size in bytes
|
|
||||||
type: string // MIME type
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Custom S3 configuration
|
|
||||||
*/
|
|
||||||
export interface CustomS3Config {
|
|
||||||
bucket: string
|
|
||||||
region: string
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Upload a file to S3
|
* Upload a file to S3
|
||||||
* @param file Buffer containing file data
|
* @param file Buffer containing file data
|
||||||
* @param fileName Original file name
|
* @param fileName Original file name
|
||||||
* @param contentType MIME type of the file
|
* @param contentType MIME type of the file
|
||||||
* @param size File size in bytes (optional, will use buffer length if not provided)
|
* @param configOrSize Custom S3 configuration OR file size in bytes (optional)
|
||||||
* @returns Object with file information
|
* @param size File size in bytes (required if configOrSize is S3Config, optional otherwise)
|
||||||
*/
|
|
||||||
export async function uploadToS3(
|
|
||||||
file: Buffer,
|
|
||||||
fileName: string,
|
|
||||||
contentType: string,
|
|
||||||
size?: number
|
|
||||||
): Promise<FileInfo>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Upload a file to S3 with custom bucket configuration
|
|
||||||
* @param file Buffer containing file data
|
|
||||||
* @param fileName Original file name
|
|
||||||
* @param contentType MIME type of the file
|
|
||||||
* @param customConfig Custom S3 configuration (bucket and region)
|
|
||||||
* @param size File size in bytes (optional, will use buffer length if not provided)
|
|
||||||
* @param skipTimestampPrefix Skip adding timestamp prefix to filename (default: false)
|
* @param skipTimestampPrefix Skip adding timestamp prefix to filename (default: false)
|
||||||
|
* @param metadata Optional metadata to store with the file
|
||||||
* @returns Object with file information
|
* @returns Object with file information
|
||||||
*/
|
*/
|
||||||
export async function uploadToS3(
|
export async function uploadToS3(
|
||||||
file: Buffer,
|
file: Buffer,
|
||||||
fileName: string,
|
fileName: string,
|
||||||
contentType: string,
|
contentType: string,
|
||||||
customConfig: CustomS3Config,
|
configOrSize?: S3Config | number,
|
||||||
size?: number,
|
size?: number,
|
||||||
skipTimestampPrefix?: boolean
|
skipTimestampPrefix?: boolean,
|
||||||
|
metadata?: Record<string, string>
|
||||||
): Promise<FileInfo>
|
): Promise<FileInfo>
|
||||||
|
|
||||||
export async function uploadToS3(
|
export async function uploadToS3(
|
||||||
file: Buffer,
|
file: Buffer,
|
||||||
fileName: string,
|
fileName: string,
|
||||||
contentType: string,
|
contentType: string,
|
||||||
configOrSize?: CustomS3Config | number,
|
configOrSize?: S3Config | number,
|
||||||
size?: number,
|
size?: number,
|
||||||
skipTimestampPrefix?: boolean
|
skipTimestampPrefix?: boolean,
|
||||||
|
metadata?: Record<string, string>
|
||||||
): Promise<FileInfo> {
|
): Promise<FileInfo> {
|
||||||
// Handle overloaded parameters
|
let config: S3Config
|
||||||
let config: CustomS3Config
|
|
||||||
let fileSize: number
|
let fileSize: number
|
||||||
let shouldSkipTimestamp: boolean
|
let shouldSkipTimestamp: boolean
|
||||||
|
|
||||||
if (typeof configOrSize === 'object') {
|
if (typeof configOrSize === 'object') {
|
||||||
// Custom config provided
|
|
||||||
config = configOrSize
|
config = configOrSize
|
||||||
fileSize = size ?? file.length
|
fileSize = size ?? file.length
|
||||||
shouldSkipTimestamp = skipTimestampPrefix ?? false
|
shouldSkipTimestamp = skipTimestampPrefix ?? false
|
||||||
} else {
|
} else {
|
||||||
// Use default config
|
|
||||||
config = { bucket: S3_CONFIG.bucket, region: S3_CONFIG.region }
|
config = { bucket: S3_CONFIG.bucket, region: S3_CONFIG.region }
|
||||||
fileSize = configOrSize ?? file.length
|
fileSize = configOrSize ?? file.length
|
||||||
shouldSkipTimestamp = size === undefined ? false : (skipTimestampPrefix ?? false)
|
shouldSkipTimestamp = skipTimestampPrefix ?? false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create filename - optionally skip timestamp prefix
|
|
||||||
const safeFileName = fileName.replace(/\s+/g, '-') // Replace spaces with hyphens
|
const safeFileName = fileName.replace(/\s+/g, '-') // Replace spaces with hyphens
|
||||||
const uniqueKey = shouldSkipTimestamp ? safeFileName : `${Date.now()}-${safeFileName}`
|
const uniqueKey = shouldSkipTimestamp ? safeFileName : `${Date.now()}-${safeFileName}`
|
||||||
|
|
||||||
const s3Client = getS3Client()
|
const s3Client = getS3Client()
|
||||||
|
|
||||||
// Upload the file to S3
|
const s3Metadata: Record<string, string> = {
|
||||||
|
originalName: sanitizeFilenameForMetadata(fileName),
|
||||||
|
uploadedAt: new Date().toISOString(),
|
||||||
|
}
|
||||||
|
|
||||||
|
if (metadata) {
|
||||||
|
Object.assign(s3Metadata, sanitizeStorageMetadata(metadata, 2000))
|
||||||
|
}
|
||||||
|
|
||||||
await s3Client.send(
|
await s3Client.send(
|
||||||
new PutObjectCommand({
|
new PutObjectCommand({
|
||||||
Bucket: config.bucket,
|
Bucket: config.bucket,
|
||||||
Key: uniqueKey,
|
Key: uniqueKey,
|
||||||
Body: file,
|
Body: file,
|
||||||
ContentType: contentType,
|
ContentType: contentType,
|
||||||
// Add some useful metadata with sanitized values
|
Metadata: s3Metadata,
|
||||||
Metadata: {
|
|
||||||
originalName: encodeURIComponent(fileName), // Encode filename to prevent invalid characters in HTTP headers
|
|
||||||
uploadedAt: new Date().toISOString(),
|
|
||||||
},
|
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
// Create a path for API to serve the file
|
|
||||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||||
|
|
||||||
return {
|
return {
|
||||||
path: servePath,
|
path: servePath,
|
||||||
key: uniqueKey,
|
key: uniqueKey,
|
||||||
name: fileName, // Return the actual original filename in the response
|
name: fileName,
|
||||||
size: fileSize,
|
size: fileSize,
|
||||||
type: contentType,
|
type: contentType,
|
||||||
}
|
}
|
||||||
@@ -198,7 +153,7 @@ export async function getPresignedUrl(key: string, expiresIn = 3600) {
|
|||||||
*/
|
*/
|
||||||
export async function getPresignedUrlWithConfig(
|
export async function getPresignedUrlWithConfig(
|
||||||
key: string,
|
key: string,
|
||||||
customConfig: CustomS3Config,
|
customConfig: S3Config,
|
||||||
expiresIn = 3600
|
expiresIn = 3600
|
||||||
) {
|
) {
|
||||||
const command = new GetObjectCommand({
|
const command = new GetObjectCommand({
|
||||||
@@ -222,9 +177,9 @@ export async function downloadFromS3(key: string): Promise<Buffer>
|
|||||||
* @param customConfig Custom S3 configuration
|
* @param customConfig Custom S3 configuration
|
||||||
* @returns File buffer
|
* @returns File buffer
|
||||||
*/
|
*/
|
||||||
export async function downloadFromS3(key: string, customConfig: CustomS3Config): Promise<Buffer>
|
export async function downloadFromS3(key: string, customConfig: S3Config): Promise<Buffer>
|
||||||
|
|
||||||
export async function downloadFromS3(key: string, customConfig?: CustomS3Config): Promise<Buffer> {
|
export async function downloadFromS3(key: string, customConfig?: S3Config): Promise<Buffer> {
|
||||||
const config = customConfig || { bucket: S3_CONFIG.bucket, region: S3_CONFIG.region }
|
const config = customConfig || { bucket: S3_CONFIG.bucket, region: S3_CONFIG.region }
|
||||||
|
|
||||||
const command = new GetObjectCommand({
|
const command = new GetObjectCommand({
|
||||||
@@ -235,31 +190,6 @@ export async function downloadFromS3(key: string, customConfig?: CustomS3Config)
|
|||||||
const response = await getS3Client().send(command)
|
const response = await getS3Client().send(command)
|
||||||
const stream = response.Body as any
|
const stream = response.Body as any
|
||||||
|
|
||||||
// Convert stream to buffer
|
|
||||||
return new Promise<Buffer>((resolve, reject) => {
|
|
||||||
const chunks: Buffer[] = []
|
|
||||||
stream.on('data', (chunk: Buffer) => chunks.push(chunk))
|
|
||||||
stream.on('end', () => resolve(Buffer.concat(chunks)))
|
|
||||||
stream.on('error', reject)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Download a file from S3 with custom bucket configuration
|
|
||||||
* @param key S3 object key
|
|
||||||
* @param customConfig Custom S3 configuration
|
|
||||||
* @returns File buffer
|
|
||||||
*/
|
|
||||||
export async function downloadFromS3WithConfig(key: string, customConfig: CustomS3Config) {
|
|
||||||
const command = new GetObjectCommand({
|
|
||||||
Bucket: customConfig.bucket,
|
|
||||||
Key: key,
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await getS3Client().send(command)
|
|
||||||
const stream = response.Body as any
|
|
||||||
|
|
||||||
// Convert stream to buffer
|
|
||||||
return new Promise<Buffer>((resolve, reject) => {
|
return new Promise<Buffer>((resolve, reject) => {
|
||||||
const chunks: Buffer[] = []
|
const chunks: Buffer[] = []
|
||||||
stream.on('data', (chunk: Buffer) => chunks.push(chunk))
|
stream.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||||
@@ -279,9 +209,9 @@ export async function deleteFromS3(key: string): Promise<void>
|
|||||||
* @param key S3 object key
|
* @param key S3 object key
|
||||||
* @param customConfig Custom S3 configuration
|
* @param customConfig Custom S3 configuration
|
||||||
*/
|
*/
|
||||||
export async function deleteFromS3(key: string, customConfig: CustomS3Config): Promise<void>
|
export async function deleteFromS3(key: string, customConfig: S3Config): Promise<void>
|
||||||
|
|
||||||
export async function deleteFromS3(key: string, customConfig?: CustomS3Config): Promise<void> {
|
export async function deleteFromS3(key: string, customConfig?: S3Config): Promise<void> {
|
||||||
const config = customConfig || { bucket: S3_CONFIG.bucket, region: S3_CONFIG.region }
|
const config = customConfig || { bucket: S3_CONFIG.bucket, region: S3_CONFIG.region }
|
||||||
|
|
||||||
await getS3Client().send(
|
await getS3Client().send(
|
||||||
@@ -292,24 +222,6 @@ export async function deleteFromS3(key: string, customConfig?: CustomS3Config):
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Multipart upload interfaces
|
|
||||||
export interface S3MultipartUploadInit {
|
|
||||||
fileName: string
|
|
||||||
contentType: string
|
|
||||||
fileSize: number
|
|
||||||
customConfig?: CustomS3Config
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface S3PartUploadUrl {
|
|
||||||
partNumber: number
|
|
||||||
url: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface S3MultipartPart {
|
|
||||||
ETag: string
|
|
||||||
PartNumber: number
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initiate a multipart upload for S3
|
* Initiate a multipart upload for S3
|
||||||
*/
|
*/
|
||||||
@@ -321,7 +233,6 @@ export async function initiateS3MultipartUpload(
|
|||||||
const config = customConfig || { bucket: S3_KB_CONFIG.bucket, region: S3_KB_CONFIG.region }
|
const config = customConfig || { bucket: S3_KB_CONFIG.bucket, region: S3_KB_CONFIG.region }
|
||||||
const s3Client = getS3Client()
|
const s3Client = getS3Client()
|
||||||
|
|
||||||
// Create unique key for the object
|
|
||||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||||
const { v4: uuidv4 } = await import('uuid')
|
const { v4: uuidv4 } = await import('uuid')
|
||||||
const uniqueKey = `kb/${uuidv4()}-${safeFileName}`
|
const uniqueKey = `kb/${uuidv4()}-${safeFileName}`
|
||||||
@@ -356,7 +267,7 @@ export async function getS3MultipartPartUrls(
|
|||||||
key: string,
|
key: string,
|
||||||
uploadId: string,
|
uploadId: string,
|
||||||
partNumbers: number[],
|
partNumbers: number[],
|
||||||
customConfig?: CustomS3Config
|
customConfig?: S3Config
|
||||||
): Promise<S3PartUploadUrl[]> {
|
): Promise<S3PartUploadUrl[]> {
|
||||||
const config = customConfig || { bucket: S3_KB_CONFIG.bucket, region: S3_KB_CONFIG.region }
|
const config = customConfig || { bucket: S3_KB_CONFIG.bucket, region: S3_KB_CONFIG.region }
|
||||||
const s3Client = getS3Client()
|
const s3Client = getS3Client()
|
||||||
@@ -385,7 +296,7 @@ export async function completeS3MultipartUpload(
|
|||||||
key: string,
|
key: string,
|
||||||
uploadId: string,
|
uploadId: string,
|
||||||
parts: S3MultipartPart[],
|
parts: S3MultipartPart[],
|
||||||
customConfig?: CustomS3Config
|
customConfig?: S3Config
|
||||||
): Promise<{ location: string; path: string; key: string }> {
|
): Promise<{ location: string; path: string; key: string }> {
|
||||||
const config = customConfig || { bucket: S3_KB_CONFIG.bucket, region: S3_KB_CONFIG.region }
|
const config = customConfig || { bucket: S3_KB_CONFIG.bucket, region: S3_KB_CONFIG.region }
|
||||||
const s3Client = getS3Client()
|
const s3Client = getS3Client()
|
||||||
@@ -417,7 +328,7 @@ export async function completeS3MultipartUpload(
|
|||||||
export async function abortS3MultipartUpload(
|
export async function abortS3MultipartUpload(
|
||||||
key: string,
|
key: string,
|
||||||
uploadId: string,
|
uploadId: string,
|
||||||
customConfig?: CustomS3Config
|
customConfig?: S3Config
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const config = customConfig || { bucket: S3_KB_CONFIG.bucket, region: S3_KB_CONFIG.region }
|
const config = customConfig || { bucket: S3_KB_CONFIG.bucket, region: S3_KB_CONFIG.region }
|
||||||
const s3Client = getS3Client()
|
const s3Client = getS3Client()
|
||||||
@@ -1,11 +1,14 @@
|
|||||||
export {
|
export {
|
||||||
type CustomS3Config,
|
|
||||||
deleteFromS3,
|
deleteFromS3,
|
||||||
downloadFromS3,
|
downloadFromS3,
|
||||||
type FileInfo,
|
|
||||||
getPresignedUrl,
|
getPresignedUrl,
|
||||||
getPresignedUrlWithConfig,
|
getPresignedUrlWithConfig,
|
||||||
getS3Client,
|
getS3Client,
|
||||||
sanitizeFilenameForMetadata,
|
|
||||||
uploadToS3,
|
uploadToS3,
|
||||||
} from '@/lib/uploads/providers/s3/s3-client'
|
} from '@/lib/uploads/providers/s3/client'
|
||||||
|
export type {
|
||||||
|
S3Config,
|
||||||
|
S3MultipartPart,
|
||||||
|
S3MultipartUploadInit,
|
||||||
|
S3PartUploadUrl,
|
||||||
|
} from '@/lib/uploads/providers/s3/types'
|
||||||
|
|||||||
21
apps/sim/lib/uploads/providers/s3/types.ts
Normal file
21
apps/sim/lib/uploads/providers/s3/types.ts
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
export interface S3Config {
|
||||||
|
bucket: string
|
||||||
|
region: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface S3MultipartUploadInit {
|
||||||
|
fileName: string
|
||||||
|
contentType: string
|
||||||
|
fileSize: number
|
||||||
|
customConfig?: S3Config
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface S3PartUploadUrl {
|
||||||
|
partNumber: number
|
||||||
|
url: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface S3MultipartPart {
|
||||||
|
ETag: string
|
||||||
|
PartNumber: number
|
||||||
|
}
|
||||||
202
apps/sim/lib/uploads/server/metadata.ts
Normal file
202
apps/sim/lib/uploads/server/metadata.ts
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workspaceFiles } from '@sim/db/schema'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import type { StorageContext } from '../shared/types'
|
||||||
|
|
||||||
|
const logger = createLogger('FileMetadata')
|
||||||
|
|
||||||
|
export interface FileMetadataRecord {
|
||||||
|
id: string
|
||||||
|
key: string
|
||||||
|
userId: string
|
||||||
|
workspaceId: string | null
|
||||||
|
context: string
|
||||||
|
originalName: string
|
||||||
|
contentType: string
|
||||||
|
size: number
|
||||||
|
uploadedAt: Date
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileMetadataInsertOptions {
|
||||||
|
key: string
|
||||||
|
userId: string
|
||||||
|
workspaceId?: string | null
|
||||||
|
context: StorageContext
|
||||||
|
originalName: string
|
||||||
|
contentType: string
|
||||||
|
size: number
|
||||||
|
id?: string // Optional - will generate UUID if not provided
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FileMetadataQueryOptions {
|
||||||
|
context?: StorageContext
|
||||||
|
workspaceId?: string
|
||||||
|
userId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert file metadata into workspaceFiles table
|
||||||
|
* Handles duplicate key errors gracefully by returning existing record
|
||||||
|
*/
|
||||||
|
export async function insertFileMetadata(
|
||||||
|
options: FileMetadataInsertOptions
|
||||||
|
): Promise<FileMetadataRecord> {
|
||||||
|
const { key, userId, workspaceId, context, originalName, contentType, size, id } = options
|
||||||
|
|
||||||
|
const existing = await db
|
||||||
|
.select()
|
||||||
|
.from(workspaceFiles)
|
||||||
|
.where(eq(workspaceFiles.key, key))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (existing.length > 0) {
|
||||||
|
return {
|
||||||
|
id: existing[0].id,
|
||||||
|
key: existing[0].key,
|
||||||
|
userId: existing[0].userId,
|
||||||
|
workspaceId: existing[0].workspaceId,
|
||||||
|
context: existing[0].context,
|
||||||
|
originalName: existing[0].originalName,
|
||||||
|
contentType: existing[0].contentType,
|
||||||
|
size: existing[0].size,
|
||||||
|
uploadedAt: existing[0].uploadedAt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileId = id || (await import('uuid')).v4()
|
||||||
|
|
||||||
|
try {
|
||||||
|
await db.insert(workspaceFiles).values({
|
||||||
|
id: fileId,
|
||||||
|
key,
|
||||||
|
userId,
|
||||||
|
workspaceId: workspaceId || null,
|
||||||
|
context,
|
||||||
|
originalName,
|
||||||
|
contentType,
|
||||||
|
size,
|
||||||
|
uploadedAt: new Date(),
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: fileId,
|
||||||
|
key,
|
||||||
|
userId,
|
||||||
|
workspaceId: workspaceId || null,
|
||||||
|
context,
|
||||||
|
originalName,
|
||||||
|
contentType,
|
||||||
|
size,
|
||||||
|
uploadedAt: new Date(),
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
if (
|
||||||
|
(error as any)?.code === '23505' ||
|
||||||
|
(error instanceof Error && error.message.includes('unique'))
|
||||||
|
) {
|
||||||
|
const existingAfterError = await db
|
||||||
|
.select()
|
||||||
|
.from(workspaceFiles)
|
||||||
|
.where(eq(workspaceFiles.key, key))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (existingAfterError.length > 0) {
|
||||||
|
return {
|
||||||
|
id: existingAfterError[0].id,
|
||||||
|
key: existingAfterError[0].key,
|
||||||
|
userId: existingAfterError[0].userId,
|
||||||
|
workspaceId: existingAfterError[0].workspaceId,
|
||||||
|
context: existingAfterError[0].context,
|
||||||
|
originalName: existingAfterError[0].originalName,
|
||||||
|
contentType: existingAfterError[0].contentType,
|
||||||
|
size: existingAfterError[0].size,
|
||||||
|
uploadedAt: existingAfterError[0].uploadedAt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(`Failed to insert file metadata for key: ${key}`, error)
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file metadata by key with optional context filter
|
||||||
|
*/
|
||||||
|
export async function getFileMetadataByKey(
|
||||||
|
key: string,
|
||||||
|
context?: StorageContext
|
||||||
|
): Promise<FileMetadataRecord | null> {
|
||||||
|
const conditions = [eq(workspaceFiles.key, key)]
|
||||||
|
|
||||||
|
if (context) {
|
||||||
|
conditions.push(eq(workspaceFiles.context, context))
|
||||||
|
}
|
||||||
|
|
||||||
|
const [record] = await db
|
||||||
|
.select()
|
||||||
|
.from(workspaceFiles)
|
||||||
|
.where(conditions.length > 1 ? and(...conditions) : conditions[0])
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!record) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: record.id,
|
||||||
|
key: record.key,
|
||||||
|
userId: record.userId,
|
||||||
|
workspaceId: record.workspaceId,
|
||||||
|
context: record.context,
|
||||||
|
originalName: record.originalName,
|
||||||
|
contentType: record.contentType,
|
||||||
|
size: record.size,
|
||||||
|
uploadedAt: record.uploadedAt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file metadata by context with optional workspaceId/userId filters
|
||||||
|
*/
|
||||||
|
export async function getFileMetadataByContext(
|
||||||
|
context: StorageContext,
|
||||||
|
options?: FileMetadataQueryOptions
|
||||||
|
): Promise<FileMetadataRecord[]> {
|
||||||
|
const conditions = [eq(workspaceFiles.context, context)]
|
||||||
|
|
||||||
|
if (options?.workspaceId) {
|
||||||
|
conditions.push(eq(workspaceFiles.workspaceId, options.workspaceId))
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options?.userId) {
|
||||||
|
conditions.push(eq(workspaceFiles.userId, options.userId))
|
||||||
|
}
|
||||||
|
|
||||||
|
const records = await db
|
||||||
|
.select()
|
||||||
|
.from(workspaceFiles)
|
||||||
|
.where(conditions.length > 1 ? and(...conditions) : conditions[0])
|
||||||
|
.orderBy(workspaceFiles.uploadedAt)
|
||||||
|
|
||||||
|
return records.map((record) => ({
|
||||||
|
id: record.id,
|
||||||
|
key: record.key,
|
||||||
|
userId: record.userId,
|
||||||
|
workspaceId: record.workspaceId,
|
||||||
|
context: record.context,
|
||||||
|
originalName: record.originalName,
|
||||||
|
contentType: record.contentType,
|
||||||
|
size: record.size,
|
||||||
|
uploadedAt: record.uploadedAt,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete file metadata by key
|
||||||
|
*/
|
||||||
|
export async function deleteFileMetadata(key: string): Promise<boolean> {
|
||||||
|
await db.delete(workspaceFiles).where(eq(workspaceFiles.key, key))
|
||||||
|
return true
|
||||||
|
}
|
||||||
61
apps/sim/lib/uploads/shared/types.ts
Normal file
61
apps/sim/lib/uploads/shared/types.ts
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
export type StorageContext =
|
||||||
|
| 'general'
|
||||||
|
| 'knowledge-base'
|
||||||
|
| 'chat'
|
||||||
|
| 'copilot'
|
||||||
|
| 'execution'
|
||||||
|
| 'workspace'
|
||||||
|
| 'profile-pictures'
|
||||||
|
|
||||||
|
export interface FileInfo {
|
||||||
|
path: string
|
||||||
|
key: string
|
||||||
|
name: string
|
||||||
|
size: number
|
||||||
|
type: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StorageConfig {
|
||||||
|
bucket?: string
|
||||||
|
region?: string
|
||||||
|
containerName?: string
|
||||||
|
accountName?: string
|
||||||
|
accountKey?: string
|
||||||
|
connectionString?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UploadFileOptions {
|
||||||
|
file: Buffer
|
||||||
|
fileName: string
|
||||||
|
contentType: string
|
||||||
|
context: StorageContext
|
||||||
|
preserveKey?: boolean
|
||||||
|
customKey?: string
|
||||||
|
metadata?: Record<string, string>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DownloadFileOptions {
|
||||||
|
key: string
|
||||||
|
context?: StorageContext
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface DeleteFileOptions {
|
||||||
|
key: string
|
||||||
|
context?: StorageContext
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GeneratePresignedUrlOptions {
|
||||||
|
fileName: string
|
||||||
|
contentType: string
|
||||||
|
fileSize: number
|
||||||
|
context: StorageContext
|
||||||
|
userId?: string
|
||||||
|
expirationSeconds?: number
|
||||||
|
metadata?: Record<string, string>
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PresignedUrlResponse {
|
||||||
|
url: string
|
||||||
|
key: string
|
||||||
|
uploadHeaders?: Record<string, string>
|
||||||
|
}
|
||||||
@@ -1,138 +0,0 @@
|
|||||||
import type { Logger } from '@/lib/logs/console/logger'
|
|
||||||
import { type StorageContext, StorageService } from '@/lib/uploads'
|
|
||||||
import { downloadExecutionFile, isExecutionFile } from '@/lib/uploads/contexts/execution'
|
|
||||||
import { extractStorageKey } from '@/lib/uploads/utils/file-utils'
|
|
||||||
import type { UserFile } from '@/executor/types'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts a single raw file object to UserFile format
|
|
||||||
* @param file - Raw file object
|
|
||||||
* @param requestId - Request ID for logging
|
|
||||||
* @param logger - Logger instance
|
|
||||||
* @returns UserFile object
|
|
||||||
* @throws Error if file has no storage key
|
|
||||||
*/
|
|
||||||
export function processSingleFileToUserFile(
|
|
||||||
file: any,
|
|
||||||
requestId: string,
|
|
||||||
logger: Logger
|
|
||||||
): UserFile {
|
|
||||||
// Already a UserFile (from variable reference)
|
|
||||||
if (file.id && file.key && file.uploadedAt) {
|
|
||||||
return file as UserFile
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract storage key from path or key property
|
|
||||||
const storageKey = file.key || (file.path ? extractStorageKey(file.path) : null)
|
|
||||||
|
|
||||||
if (!storageKey) {
|
|
||||||
logger.warn(`[${requestId}] File has no storage key: ${file.name || 'unknown'}`)
|
|
||||||
throw new Error(`File has no storage key: ${file.name || 'unknown'}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const userFile: UserFile = {
|
|
||||||
id: file.id || `file-${Date.now()}`,
|
|
||||||
name: file.name,
|
|
||||||
url: file.url || file.path,
|
|
||||||
size: file.size,
|
|
||||||
type: file.type || 'application/octet-stream',
|
|
||||||
key: storageKey,
|
|
||||||
uploadedAt: file.uploadedAt || new Date().toISOString(),
|
|
||||||
expiresAt: file.expiresAt || new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(),
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Converted file to UserFile: ${userFile.name} (key: ${userFile.key})`)
|
|
||||||
return userFile
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts raw file objects (from file-upload or variable references) to UserFile format
|
|
||||||
* @param files - Array of raw file objects
|
|
||||||
* @param requestId - Request ID for logging
|
|
||||||
* @param logger - Logger instance
|
|
||||||
* @returns Array of UserFile objects
|
|
||||||
*/
|
|
||||||
export function processFilesToUserFiles(
|
|
||||||
files: any[],
|
|
||||||
requestId: string,
|
|
||||||
logger: Logger
|
|
||||||
): UserFile[] {
|
|
||||||
const userFiles: UserFile[] = []
|
|
||||||
|
|
||||||
for (const file of files) {
|
|
||||||
try {
|
|
||||||
const userFile = processSingleFileToUserFile(file, requestId, logger)
|
|
||||||
userFiles.push(userFile)
|
|
||||||
} catch (error) {
|
|
||||||
// Log and skip files that can't be processed
|
|
||||||
logger.warn(
|
|
||||||
`[${requestId}] Skipping file: ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return userFiles
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Infer storage context from file key pattern
|
|
||||||
* @param key - File storage key
|
|
||||||
* @returns Inferred storage context
|
|
||||||
*/
|
|
||||||
function inferContextFromKey(key: string): StorageContext {
|
|
||||||
// KB files always start with 'kb/' prefix
|
|
||||||
if (key.startsWith('kb/')) {
|
|
||||||
return 'knowledge-base'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Execution files: three or more UUID segments
|
|
||||||
// Pattern: {uuid}/{uuid}/{uuid}/{filename}
|
|
||||||
const segments = key.split('/')
|
|
||||||
if (segments.length >= 4 && segments[0].match(/^[a-f0-9-]{36}$/)) {
|
|
||||||
return 'execution'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Workspace files: UUID-like ID followed by timestamp pattern
|
|
||||||
// Pattern: {uuid}/{timestamp}-{random}-{filename}
|
|
||||||
if (key.match(/^[a-f0-9-]{36}\/\d+-[a-z0-9]+-/)) {
|
|
||||||
return 'workspace'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Default to general for all other patterns
|
|
||||||
return 'general'
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Downloads a file from storage (execution or regular)
|
|
||||||
* @param userFile - UserFile object
|
|
||||||
* @param requestId - Request ID for logging
|
|
||||||
* @param logger - Logger instance
|
|
||||||
* @returns Buffer containing file data
|
|
||||||
*/
|
|
||||||
export async function downloadFileFromStorage(
|
|
||||||
userFile: UserFile,
|
|
||||||
requestId: string,
|
|
||||||
logger: Logger
|
|
||||||
): Promise<Buffer> {
|
|
||||||
let buffer: Buffer
|
|
||||||
|
|
||||||
if (isExecutionFile(userFile)) {
|
|
||||||
logger.info(`[${requestId}] Downloading from execution storage: ${userFile.key}`)
|
|
||||||
buffer = await downloadExecutionFile(userFile)
|
|
||||||
} else if (userFile.key) {
|
|
||||||
// Use explicit context from file if available, otherwise infer from key pattern (fallback)
|
|
||||||
const context = (userFile.context as StorageContext) || inferContextFromKey(userFile.key)
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Downloading from ${context} storage (${userFile.context ? 'explicit' : 'inferred'}): ${userFile.key}`
|
|
||||||
)
|
|
||||||
|
|
||||||
buffer = await StorageService.downloadFile({
|
|
||||||
key: userFile.key,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
throw new Error('File has no key - cannot download')
|
|
||||||
}
|
|
||||||
|
|
||||||
return buffer
|
|
||||||
}
|
|
||||||
97
apps/sim/lib/uploads/utils/file-utils.server.ts
Normal file
97
apps/sim/lib/uploads/utils/file-utils.server.ts
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
'use server'
|
||||||
|
|
||||||
|
import type { Logger } from '@/lib/logs/console/logger'
|
||||||
|
import type { StorageContext } from '@/lib/uploads'
|
||||||
|
import type { UserFile } from '@/executor/types'
|
||||||
|
import { inferContextFromKey } from './file-utils'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file is from execution storage based on its key pattern
|
||||||
|
* Execution files have keys in format: workspaceId/workflowId/executionId/filename
|
||||||
|
* Regular files have keys in format: timestamp-random-filename or just filename
|
||||||
|
*/
|
||||||
|
function isExecutionFile(file: UserFile): boolean {
|
||||||
|
if (!file.key) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execution files have at least 3 slashes in their key (4 parts)
|
||||||
|
// e.g., "workspace123/workflow456/execution789/document.pdf"
|
||||||
|
const parts = file.key.split('/')
|
||||||
|
return parts.length >= 4 && !file.key.startsWith('/api/') && !file.key.startsWith('http')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Download a file from a URL (internal or external)
|
||||||
|
* For internal URLs, uses direct storage access (server-side only)
|
||||||
|
* For external URLs, uses HTTP fetch
|
||||||
|
*/
|
||||||
|
export async function downloadFileFromUrl(fileUrl: string, timeoutMs = 180000): Promise<Buffer> {
|
||||||
|
const { isInternalFileUrl } = await import('./file-utils')
|
||||||
|
const { parseInternalFileUrl } = await import('./file-utils')
|
||||||
|
const controller = new AbortController()
|
||||||
|
const timeoutId = setTimeout(() => controller.abort(), timeoutMs)
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (isInternalFileUrl(fileUrl)) {
|
||||||
|
const { key, context } = parseInternalFileUrl(fileUrl, 'knowledge-base')
|
||||||
|
const { downloadFile } = await import('@/lib/uploads/core/storage-service')
|
||||||
|
const buffer = await downloadFile({ key, context })
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
return buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(fileUrl, { signal: controller.signal })
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to download file: ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return Buffer.from(await response.arrayBuffer())
|
||||||
|
} catch (error) {
|
||||||
|
clearTimeout(timeoutId)
|
||||||
|
if (error instanceof Error && error.name === 'AbortError') {
|
||||||
|
throw new Error('File download timed out')
|
||||||
|
}
|
||||||
|
throw error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Downloads a file from storage (execution or regular)
|
||||||
|
* @param userFile - UserFile object
|
||||||
|
* @param requestId - Request ID for logging
|
||||||
|
* @param logger - Logger instance
|
||||||
|
* @returns Buffer containing file data
|
||||||
|
*/
|
||||||
|
export async function downloadFileFromStorage(
|
||||||
|
userFile: UserFile,
|
||||||
|
requestId: string,
|
||||||
|
logger: Logger
|
||||||
|
): Promise<Buffer> {
|
||||||
|
let buffer: Buffer
|
||||||
|
|
||||||
|
if (isExecutionFile(userFile)) {
|
||||||
|
logger.info(`[${requestId}] Downloading from execution storage: ${userFile.key}`)
|
||||||
|
const { downloadExecutionFile } = await import(
|
||||||
|
'@/lib/uploads/contexts/execution/execution-file-manager'
|
||||||
|
)
|
||||||
|
buffer = await downloadExecutionFile(userFile)
|
||||||
|
} else if (userFile.key) {
|
||||||
|
const context = (userFile.context as StorageContext) || inferContextFromKey(userFile.key)
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Downloading from ${context} storage (${userFile.context ? 'explicit' : 'inferred'}): ${userFile.key}`
|
||||||
|
)
|
||||||
|
|
||||||
|
const { downloadFile } = await import('@/lib/uploads/core/storage-service')
|
||||||
|
buffer = await downloadFile({
|
||||||
|
key: userFile.key,
|
||||||
|
context,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
throw new Error('File has no key - cannot download')
|
||||||
|
}
|
||||||
|
|
||||||
|
return buffer
|
||||||
|
}
|
||||||
@@ -1,3 +1,8 @@
|
|||||||
|
import type { Logger } from '@/lib/logs/console/logger'
|
||||||
|
import type { StorageContext } from '@/lib/uploads'
|
||||||
|
import type { UserFile } from '@/executor/types'
|
||||||
|
import { ACCEPTED_FILE_TYPES } from './validation'
|
||||||
|
|
||||||
export interface FileAttachment {
|
export interface FileAttachment {
|
||||||
id: string
|
id: string
|
||||||
key: string
|
key: string
|
||||||
@@ -143,13 +148,71 @@ export function getMimeTypeFromExtension(extension: string): string {
|
|||||||
return extensionMimeMap[extension.toLowerCase()] || 'application/octet-stream'
|
return extensionMimeMap[extension.toLowerCase()] || 'application/octet-stream'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format bytes to human-readable file size
|
||||||
|
* @param bytes - File size in bytes
|
||||||
|
* @param options - Formatting options
|
||||||
|
* @returns Formatted string (e.g., "1.5 MB", "500 KB")
|
||||||
|
*/
|
||||||
|
export function formatFileSize(
|
||||||
|
bytes: number,
|
||||||
|
options?: { includeBytes?: boolean; precision?: number }
|
||||||
|
): string {
|
||||||
|
if (bytes === 0) return '0 Bytes'
|
||||||
|
|
||||||
|
const k = 1024
|
||||||
|
const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']
|
||||||
|
const precision = options?.precision ?? 1
|
||||||
|
const includeBytes = options?.includeBytes ?? false
|
||||||
|
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||||
|
|
||||||
|
if (i === 0 && !includeBytes) {
|
||||||
|
return '0 Bytes'
|
||||||
|
}
|
||||||
|
|
||||||
|
const value = bytes / k ** i
|
||||||
|
const formattedValue = Number.parseFloat(value.toFixed(precision))
|
||||||
|
|
||||||
|
return `${formattedValue} ${sizes[i]}`
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate file size and type for knowledge base uploads (client-side)
|
||||||
|
* @param file - File object to validate
|
||||||
|
* @param maxSizeBytes - Maximum file size in bytes (default: 100MB)
|
||||||
|
* @returns Error message string if validation fails, null if valid
|
||||||
|
*/
|
||||||
|
export function validateKnowledgeBaseFile(
|
||||||
|
file: File,
|
||||||
|
maxSizeBytes: number = 100 * 1024 * 1024
|
||||||
|
): string | null {
|
||||||
|
if (file.size > maxSizeBytes) {
|
||||||
|
const maxSizeMB = Math.round(maxSizeBytes / (1024 * 1024))
|
||||||
|
return `File "${file.name}" is too large. Maximum size is ${maxSizeMB}MB.`
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!ACCEPTED_FILE_TYPES.includes(file.type)) {
|
||||||
|
return `File "${file.name}" has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML, JSON, YAML, or YML files.`
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract storage key from a file path
|
* Extract storage key from a file path
|
||||||
* Handles various path formats: /api/files/serve/xyz, /api/files/serve/s3/xyz, etc.
|
|
||||||
* Strips query parameters from the path before extracting the key.
|
|
||||||
*/
|
*/
|
||||||
export function extractStorageKey(filePath: string): string {
|
export function extractStorageKey(filePath: string): string {
|
||||||
const pathWithoutQuery = filePath.split('?')[0]
|
let pathWithoutQuery = filePath.split('?')[0]
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (pathWithoutQuery.startsWith('http://') || pathWithoutQuery.startsWith('https://')) {
|
||||||
|
const url = new URL(pathWithoutQuery)
|
||||||
|
pathWithoutQuery = url.pathname
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// If URL parsing fails, use the original path
|
||||||
|
}
|
||||||
|
|
||||||
if (pathWithoutQuery.includes('/api/files/serve/s3/')) {
|
if (pathWithoutQuery.includes('/api/files/serve/s3/')) {
|
||||||
return decodeURIComponent(pathWithoutQuery.split('/api/files/serve/s3/')[1])
|
return decodeURIComponent(pathWithoutQuery.split('/api/files/serve/s3/')[1])
|
||||||
@@ -162,3 +225,269 @@ export function extractStorageKey(filePath: string): string {
|
|||||||
}
|
}
|
||||||
return pathWithoutQuery
|
return pathWithoutQuery
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a URL is an internal file serve URL
|
||||||
|
*/
|
||||||
|
export function isInternalFileUrl(fileUrl: string): boolean {
|
||||||
|
return fileUrl.includes('/api/files/serve/')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Infer storage context from file key pattern
|
||||||
|
*/
|
||||||
|
export function inferContextFromKey(key: string): StorageContext {
|
||||||
|
// KB files always start with 'kb/' prefix
|
||||||
|
if (key.startsWith('kb/')) {
|
||||||
|
return 'knowledge-base'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execution files: three or more UUID segments (workspace/workflow/execution/...)
|
||||||
|
// Pattern: {uuid}/{uuid}/{uuid}/{filename}
|
||||||
|
const segments = key.split('/')
|
||||||
|
if (segments.length >= 4 && segments[0].match(/^[a-f0-9-]{36}$/)) {
|
||||||
|
return 'execution'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Workspace files: UUID-like ID followed by timestamp pattern
|
||||||
|
// Pattern: {uuid}/{timestamp}-{random}-{filename}
|
||||||
|
if (key.match(/^[a-f0-9-]{36}\/\d+-[a-z0-9]+-/)) {
|
||||||
|
return 'workspace'
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copilot/General files: timestamp-random-filename (no path segments)
|
||||||
|
// Pattern: {timestamp}-{random}-{filename}
|
||||||
|
// NOTE: This is ambiguous - prefer explicit context parameter
|
||||||
|
if (key.match(/^\d+-[a-z0-9]+-/)) {
|
||||||
|
return 'general'
|
||||||
|
}
|
||||||
|
|
||||||
|
return 'general'
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract storage key and context from an internal file URL
|
||||||
|
* @param fileUrl - Internal file URL (e.g., /api/files/serve/key?context=workspace)
|
||||||
|
* @param defaultContext - Default context if not found in URL params
|
||||||
|
* @returns Object with storage key and context
|
||||||
|
*/
|
||||||
|
export function parseInternalFileUrl(
|
||||||
|
fileUrl: string,
|
||||||
|
defaultContext: StorageContext = 'general'
|
||||||
|
): { key: string; context: StorageContext } {
|
||||||
|
const key = extractStorageKey(fileUrl)
|
||||||
|
|
||||||
|
if (!key) {
|
||||||
|
throw new Error('Could not extract storage key from internal file URL')
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = new URL(fileUrl.startsWith('http') ? fileUrl : `http://localhost${fileUrl}`)
|
||||||
|
const contextParam = url.searchParams.get('context')
|
||||||
|
|
||||||
|
const context = (contextParam as StorageContext) || inferContextFromKey(key) || defaultContext
|
||||||
|
|
||||||
|
return { key, context }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Raw file input that can be converted to UserFile
|
||||||
|
* Supports various file object formats from different sources
|
||||||
|
*/
|
||||||
|
export interface RawFileInput {
|
||||||
|
id?: string
|
||||||
|
key?: string
|
||||||
|
path?: string
|
||||||
|
url?: string
|
||||||
|
name: string
|
||||||
|
size: number
|
||||||
|
type?: string
|
||||||
|
uploadedAt?: string | Date
|
||||||
|
expiresAt?: string | Date
|
||||||
|
[key: string]: unknown // Allow additional properties for flexibility
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a single raw file object to UserFile format
|
||||||
|
* @param file - Raw file object
|
||||||
|
* @param requestId - Request ID for logging
|
||||||
|
* @param logger - Logger instance
|
||||||
|
* @returns UserFile object
|
||||||
|
* @throws Error if file has no storage key
|
||||||
|
*/
|
||||||
|
export function processSingleFileToUserFile(
|
||||||
|
file: RawFileInput,
|
||||||
|
requestId: string,
|
||||||
|
logger: Logger
|
||||||
|
): UserFile {
|
||||||
|
if (file.id && file.key && file.uploadedAt) {
|
||||||
|
return file as UserFile
|
||||||
|
}
|
||||||
|
|
||||||
|
const storageKey = file.key || (file.path ? extractStorageKey(file.path) : null)
|
||||||
|
|
||||||
|
if (!storageKey) {
|
||||||
|
logger.warn(`[${requestId}] File has no storage key: ${file.name || 'unknown'}`)
|
||||||
|
throw new Error(`File has no storage key: ${file.name || 'unknown'}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const userFile: UserFile = {
|
||||||
|
id: file.id || `file-${Date.now()}`,
|
||||||
|
name: file.name,
|
||||||
|
url: file.url || file.path || '',
|
||||||
|
size: file.size,
|
||||||
|
type: file.type || 'application/octet-stream',
|
||||||
|
key: storageKey,
|
||||||
|
uploadedAt: file.uploadedAt
|
||||||
|
? typeof file.uploadedAt === 'string'
|
||||||
|
? file.uploadedAt
|
||||||
|
: file.uploadedAt.toISOString()
|
||||||
|
: new Date().toISOString(),
|
||||||
|
expiresAt: file.expiresAt
|
||||||
|
? typeof file.expiresAt === 'string'
|
||||||
|
? file.expiresAt
|
||||||
|
: file.expiresAt.toISOString()
|
||||||
|
: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(),
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Converted file to UserFile: ${userFile.name} (key: ${userFile.key})`)
|
||||||
|
return userFile
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts raw file objects (from file-upload or variable references) to UserFile format
|
||||||
|
* @param files - Array of raw file objects
|
||||||
|
* @param requestId - Request ID for logging
|
||||||
|
* @param logger - Logger instance
|
||||||
|
* @returns Array of UserFile objects
|
||||||
|
*/
|
||||||
|
export function processFilesToUserFiles(
|
||||||
|
files: RawFileInput[],
|
||||||
|
requestId: string,
|
||||||
|
logger: Logger
|
||||||
|
): UserFile[] {
|
||||||
|
const userFiles: UserFile[] = []
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
try {
|
||||||
|
const userFile = processSingleFileToUserFile(file, requestId, logger)
|
||||||
|
userFiles.push(userFile)
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(
|
||||||
|
`[${requestId}] Skipping file: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return userFiles
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sanitize a filename for use in storage metadata headers
|
||||||
|
* Storage metadata headers must contain only ASCII printable characters (0x20-0x7E)
|
||||||
|
* and cannot contain certain special characters
|
||||||
|
*/
|
||||||
|
export function sanitizeFilenameForMetadata(filename: string): string {
|
||||||
|
return (
|
||||||
|
filename
|
||||||
|
// Remove non-ASCII characters (keep only printable ASCII 0x20-0x7E)
|
||||||
|
.replace(/[^\x20-\x7E]/g, '')
|
||||||
|
// Remove characters that are problematic in HTTP headers
|
||||||
|
.replace(/["\\]/g, '')
|
||||||
|
// Replace multiple spaces with single space
|
||||||
|
.replace(/\s+/g, ' ')
|
||||||
|
// Trim whitespace
|
||||||
|
.trim() ||
|
||||||
|
// Provide fallback if completely sanitized
|
||||||
|
'file'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sanitize metadata values for storage providers
|
||||||
|
* Removes non-printable ASCII characters and limits length
|
||||||
|
* @param metadata Original metadata object
|
||||||
|
* @param maxLength Maximum length per value (Azure Blob: 8000, S3: 2000)
|
||||||
|
* @returns Sanitized metadata object
|
||||||
|
*/
|
||||||
|
export function sanitizeStorageMetadata(
|
||||||
|
metadata: Record<string, string>,
|
||||||
|
maxLength: number
|
||||||
|
): Record<string, string> {
|
||||||
|
const sanitized: Record<string, string> = {}
|
||||||
|
for (const [key, value] of Object.entries(metadata)) {
|
||||||
|
const sanitizedValue = String(value)
|
||||||
|
.replace(/[^\x20-\x7E]/g, '')
|
||||||
|
.replace(/["\\]/g, '')
|
||||||
|
.substring(0, maxLength)
|
||||||
|
if (sanitizedValue) {
|
||||||
|
sanitized[key] = sanitizedValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sanitized
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sanitize a file key/path for local storage
|
||||||
|
* Removes dangerous characters and prevents path traversal
|
||||||
|
* @param key Original file key/path
|
||||||
|
* @returns Sanitized key safe for filesystem use
|
||||||
|
*/
|
||||||
|
export function sanitizeFileKey(key: string): string {
|
||||||
|
return key.replace(/[^a-zA-Z0-9.-]/g, '_').replace(/\.\./g, '')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract clean filename from URL or path, stripping query parameters
|
||||||
|
* Handles both internal serve URLs (/api/files/serve/...) and external URLs
|
||||||
|
* @param urlOrPath URL or path string that may contain query parameters
|
||||||
|
* @returns Clean filename without query parameters
|
||||||
|
*/
|
||||||
|
export function extractCleanFilename(urlOrPath: string): string {
|
||||||
|
const withoutQuery = urlOrPath.split('?')[0]
|
||||||
|
|
||||||
|
try {
|
||||||
|
const url = new URL(
|
||||||
|
withoutQuery.startsWith('http') ? withoutQuery : `http://localhost${withoutQuery}`
|
||||||
|
)
|
||||||
|
const pathname = url.pathname
|
||||||
|
const filename = pathname.split('/').pop() || 'unknown'
|
||||||
|
return decodeURIComponent(filename)
|
||||||
|
} catch {
|
||||||
|
const filename = withoutQuery.split('/').pop() || 'unknown'
|
||||||
|
return decodeURIComponent(filename)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract workspaceId from execution file key pattern
|
||||||
|
* Execution files have format: workspaceId/workflowId/executionId/filename
|
||||||
|
* @param key File storage key
|
||||||
|
* @returns workspaceId if key matches execution file pattern, null otherwise
|
||||||
|
*/
|
||||||
|
export function extractWorkspaceIdFromExecutionKey(key: string): string | null {
|
||||||
|
const segments = key.split('/')
|
||||||
|
if (segments.length >= 4) {
|
||||||
|
const workspaceId = segments[0]
|
||||||
|
if (workspaceId && /^[a-f0-9-]{36}$/.test(workspaceId)) {
|
||||||
|
return workspaceId
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Construct viewer URL for a file
|
||||||
|
* Viewer URL format: /workspace/{workspaceId}/files/{fileKey}/view
|
||||||
|
* @param fileKey File storage key
|
||||||
|
* @param workspaceId Optional workspace ID (will be extracted from key if not provided)
|
||||||
|
* @returns Viewer URL string or null if workspaceId cannot be determined
|
||||||
|
*/
|
||||||
|
export function getViewerUrl(fileKey: string, workspaceId?: string): string | null {
|
||||||
|
const resolvedWorkspaceId = workspaceId || extractWorkspaceIdFromExecutionKey(fileKey)
|
||||||
|
|
||||||
|
if (!resolvedWorkspaceId) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return `/workspace/${resolvedWorkspaceId}/files/${fileKey}/view`
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,3 +1,2 @@
|
|||||||
export * from './file-processing'
|
|
||||||
export * from './file-utils'
|
export * from './file-utils'
|
||||||
export * from './validation'
|
export * from './validation'
|
||||||
|
|||||||
@@ -450,17 +450,23 @@ async function handleInternalRequest(
|
|||||||
): Promise<ToolResponse> {
|
): Promise<ToolResponse> {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
// Format the request parameters
|
|
||||||
const requestParams = formatRequestParams(tool, params)
|
const requestParams = formatRequestParams(tool, params)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const baseUrl = getBaseUrl()
|
const baseUrl = getBaseUrl()
|
||||||
// Handle the case where url may be a function or string
|
|
||||||
const endpointUrl =
|
const endpointUrl =
|
||||||
typeof tool.request.url === 'function' ? tool.request.url(params) : tool.request.url
|
typeof tool.request.url === 'function' ? tool.request.url(params) : tool.request.url
|
||||||
|
|
||||||
const fullUrlObj = new URL(endpointUrl, baseUrl)
|
const fullUrlObj = new URL(endpointUrl, baseUrl)
|
||||||
const isInternalRoute = endpointUrl.startsWith('/api/')
|
const isInternalRoute = endpointUrl.startsWith('/api/')
|
||||||
|
|
||||||
|
if (isInternalRoute) {
|
||||||
|
const workflowId = params._context?.workflowId
|
||||||
|
if (workflowId) {
|
||||||
|
fullUrlObj.searchParams.set('workflowId', workflowId)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const fullUrl = fullUrlObj.toString()
|
const fullUrl = fullUrlObj.toString()
|
||||||
|
|
||||||
// For custom tools, validate parameters on the client side before sending
|
// For custom tools, validate parameters on the client side before sending
|
||||||
|
|||||||
19
packages/db/migrations/0102_eminent_amphibian.sql
Normal file
19
packages/db/migrations/0102_eminent_amphibian.sql
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
CREATE TABLE "workspace_files" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"key" text NOT NULL,
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"workspace_id" text,
|
||||||
|
"context" text NOT NULL,
|
||||||
|
"original_name" text NOT NULL,
|
||||||
|
"content_type" text NOT NULL,
|
||||||
|
"size" integer NOT NULL,
|
||||||
|
"uploaded_at" timestamp DEFAULT now() NOT NULL,
|
||||||
|
CONSTRAINT "workspace_files_key_unique" UNIQUE("key")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
ALTER TABLE "workspace_files" ADD CONSTRAINT "workspace_files_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "workspace_files" ADD CONSTRAINT "workspace_files_workspace_id_workspace_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspace"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
CREATE INDEX "workspace_files_key_idx" ON "workspace_files" USING btree ("key");--> statement-breakpoint
|
||||||
|
CREATE INDEX "workspace_files_user_id_idx" ON "workspace_files" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "workspace_files_workspace_id_idx" ON "workspace_files" USING btree ("workspace_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "workspace_files_context_idx" ON "workspace_files" USING btree ("context");
|
||||||
7250
packages/db/migrations/meta/0102_snapshot.json
Normal file
7250
packages/db/migrations/meta/0102_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -708,6 +708,13 @@
|
|||||||
"when": 1761631932261,
|
"when": 1761631932261,
|
||||||
"tag": "0101_missing_doc_processing",
|
"tag": "0101_missing_doc_processing",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 102,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1761769369858,
|
||||||
|
"tag": "0102_eminent_amphibian",
|
||||||
|
"breakpoints": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -751,6 +751,29 @@ export const workspaceFile = pgTable(
|
|||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
export const workspaceFiles = pgTable(
|
||||||
|
'workspace_files',
|
||||||
|
{
|
||||||
|
id: text('id').primaryKey(),
|
||||||
|
key: text('key').notNull().unique(),
|
||||||
|
userId: text('user_id')
|
||||||
|
.notNull()
|
||||||
|
.references(() => user.id, { onDelete: 'cascade' }),
|
||||||
|
workspaceId: text('workspace_id').references(() => workspace.id, { onDelete: 'cascade' }),
|
||||||
|
context: text('context').notNull(), // 'workspace', 'copilot', 'chat', 'knowledge-base', 'profile-pictures', 'general', 'execution'
|
||||||
|
originalName: text('original_name').notNull(),
|
||||||
|
contentType: text('content_type').notNull(),
|
||||||
|
size: integer('size').notNull(),
|
||||||
|
uploadedAt: timestamp('uploaded_at').notNull().defaultNow(),
|
||||||
|
},
|
||||||
|
(table) => ({
|
||||||
|
keyIdx: index('workspace_files_key_idx').on(table.key),
|
||||||
|
userIdIdx: index('workspace_files_user_id_idx').on(table.userId),
|
||||||
|
workspaceIdIdx: index('workspace_files_workspace_id_idx').on(table.workspaceId),
|
||||||
|
contextIdx: index('workspace_files_context_idx').on(table.context),
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
export const permissionTypeEnum = pgEnum('permission_type', ['admin', 'write', 'read'])
|
export const permissionTypeEnum = pgEnum('permission_type', ['admin', 'write', 'read'])
|
||||||
|
|
||||||
export const workspaceInvitationStatusEnum = pgEnum('workspace_invitation_status', [
|
export const workspaceInvitationStatusEnum = pgEnum('workspace_invitation_status', [
|
||||||
|
|||||||
Reference in New Issue
Block a user