diff --git a/apps/sim/app/api/organizations/[id]/workspaces/route.ts b/apps/sim/app/api/organizations/[id]/workspaces/route.ts deleted file mode 100644 index 6669c8a8b..000000000 --- a/apps/sim/app/api/organizations/[id]/workspaces/route.ts +++ /dev/null @@ -1,204 +0,0 @@ -import { db } from '@sim/db' -import { member, permissions, user, workspace } from '@sim/db/schema' -import { createLogger } from '@sim/logger' -import { and, eq, or } from 'drizzle-orm' -import { type NextRequest, NextResponse } from 'next/server' -import { getSession } from '@/lib/auth' - -const logger = createLogger('OrganizationWorkspacesAPI') - -/** - * GET /api/organizations/[id]/workspaces - * Get workspaces related to the organization with optional filtering - * Query parameters: - * - ?available=true - Only workspaces where user can invite others (admin permissions) - * - ?member=userId - Workspaces where specific member has access - */ -export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { - try { - const session = await getSession() - - if (!session?.user?.id) { - return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) - } - - const { id: organizationId } = await params - const url = new URL(request.url) - const availableOnly = url.searchParams.get('available') === 'true' - const memberId = url.searchParams.get('member') - - // Verify user is a member of this organization - const memberEntry = await db - .select() - .from(member) - .where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id))) - .limit(1) - - if (memberEntry.length === 0) { - return NextResponse.json( - { - error: 'Forbidden - Not a member of this organization', - }, - { status: 403 } - ) - } - - const userRole = memberEntry[0].role - const hasAdminAccess = ['owner', 'admin'].includes(userRole) - - if (availableOnly) { - // Get workspaces where user has admin permissions (can invite others) - const availableWorkspaces = await db - .select({ - id: workspace.id, - name: workspace.name, - ownerId: workspace.ownerId, - createdAt: workspace.createdAt, - isOwner: eq(workspace.ownerId, session.user.id), - permissionType: permissions.permissionType, - }) - .from(workspace) - .leftJoin( - permissions, - and( - eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, workspace.id), - eq(permissions.userId, session.user.id) - ) - ) - .where( - or( - // User owns the workspace - eq(workspace.ownerId, session.user.id), - // User has admin permission on the workspace - and( - eq(permissions.userId, session.user.id), - eq(permissions.entityType, 'workspace'), - eq(permissions.permissionType, 'admin') - ) - ) - ) - - // Filter and format the results - const workspacesWithInvitePermission = availableWorkspaces - .filter((workspace) => { - // Include if user owns the workspace OR has admin permission - return workspace.isOwner || workspace.permissionType === 'admin' - }) - .map((workspace) => ({ - id: workspace.id, - name: workspace.name, - isOwner: workspace.isOwner, - canInvite: true, // All returned workspaces have invite permission - createdAt: workspace.createdAt, - })) - - logger.info('Retrieved available workspaces for organization member', { - organizationId, - userId: session.user.id, - workspaceCount: workspacesWithInvitePermission.length, - }) - - return NextResponse.json({ - success: true, - data: { - workspaces: workspacesWithInvitePermission, - totalCount: workspacesWithInvitePermission.length, - filter: 'available', - }, - }) - } - - if (memberId && hasAdminAccess) { - // Get workspaces where specific member has access (admin only) - const memberWorkspaces = await db - .select({ - id: workspace.id, - name: workspace.name, - ownerId: workspace.ownerId, - isOwner: eq(workspace.ownerId, memberId), - permissionType: permissions.permissionType, - createdAt: permissions.createdAt, - }) - .from(workspace) - .leftJoin( - permissions, - and( - eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, workspace.id), - eq(permissions.userId, memberId) - ) - ) - .where( - or( - // Member owns the workspace - eq(workspace.ownerId, memberId), - // Member has permissions on the workspace - and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace')) - ) - ) - - const formattedWorkspaces = memberWorkspaces.map((workspace) => ({ - id: workspace.id, - name: workspace.name, - isOwner: workspace.isOwner, - permission: workspace.permissionType, - joinedAt: workspace.createdAt, - createdAt: workspace.createdAt, - })) - - return NextResponse.json({ - success: true, - data: { - workspaces: formattedWorkspaces, - totalCount: formattedWorkspaces.length, - filter: 'member', - memberId, - }, - }) - } - - // Default: Get all workspaces (basic info only for regular members) - if (!hasAdminAccess) { - return NextResponse.json({ - success: true, - data: { - workspaces: [], - totalCount: 0, - message: 'Workspace access information is only available to organization admins', - }, - }) - } - - // For admins: Get summary of all workspaces - const allWorkspaces = await db - .select({ - id: workspace.id, - name: workspace.name, - ownerId: workspace.ownerId, - createdAt: workspace.createdAt, - ownerName: user.name, - }) - .from(workspace) - .leftJoin(user, eq(workspace.ownerId, user.id)) - - return NextResponse.json({ - success: true, - data: { - workspaces: allWorkspaces, - totalCount: allWorkspaces.length, - filter: 'all', - }, - userRole, - hasAdminAccess, - }) - } catch (error) { - logger.error('Failed to get organization workspaces', { error }) - return NextResponse.json( - { - error: 'Internal server error', - }, - { status: 500 } - ) - } -} diff --git a/apps/sim/app/api/tools/supabase/storage-upload/route.ts b/apps/sim/app/api/tools/supabase/storage-upload/route.ts new file mode 100644 index 000000000..46122fc19 --- /dev/null +++ b/apps/sim/app/api/tools/supabase/storage-upload/route.ts @@ -0,0 +1,257 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils' +import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('SupabaseStorageUploadAPI') + +const SupabaseStorageUploadSchema = z.object({ + projectId: z.string().min(1, 'Project ID is required'), + apiKey: z.string().min(1, 'API key is required'), + bucket: z.string().min(1, 'Bucket name is required'), + fileName: z.string().min(1, 'File name is required'), + path: z.string().optional().nullable(), + fileData: z.any(), + contentType: z.string().optional().nullable(), + upsert: z.boolean().optional().default(false), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkInternalAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn( + `[${requestId}] Unauthorized Supabase storage upload attempt: ${authResult.error}` + ) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info( + `[${requestId}] Authenticated Supabase storage upload request via ${authResult.authType}`, + { + userId: authResult.userId, + } + ) + + const body = await request.json() + const validatedData = SupabaseStorageUploadSchema.parse(body) + + const fileData = validatedData.fileData + const isStringInput = typeof fileData === 'string' + + logger.info(`[${requestId}] Uploading to Supabase Storage`, { + bucket: validatedData.bucket, + fileName: validatedData.fileName, + path: validatedData.path, + fileDataType: isStringInput ? 'string' : 'object', + }) + + if (!fileData) { + return NextResponse.json( + { + success: false, + error: 'fileData is required', + }, + { status: 400 } + ) + } + + let uploadBody: Buffer + let uploadContentType: string | undefined + + if (isStringInput) { + let content = fileData as string + + const dataUrlMatch = content.match(/^data:([^;]+);base64,(.+)$/s) + if (dataUrlMatch) { + const [, mimeType, base64Data] = dataUrlMatch + content = base64Data + if (!validatedData.contentType) { + uploadContentType = mimeType + } + logger.info(`[${requestId}] Extracted base64 from data URL (MIME: ${mimeType})`) + } + + const cleanedContent = content.replace(/[\s\r\n]/g, '') + const isLikelyBase64 = /^[A-Za-z0-9+/]*={0,2}$/.test(cleanedContent) + + if (isLikelyBase64 && cleanedContent.length >= 4) { + try { + uploadBody = Buffer.from(cleanedContent, 'base64') + + const expectedMinSize = Math.floor(cleanedContent.length * 0.7) + const expectedMaxSize = Math.ceil(cleanedContent.length * 0.8) + + if ( + uploadBody.length >= expectedMinSize && + uploadBody.length <= expectedMaxSize && + uploadBody.length > 0 + ) { + logger.info( + `[${requestId}] Decoded base64 content: ${cleanedContent.length} chars -> ${uploadBody.length} bytes` + ) + } else { + const reEncoded = uploadBody.toString('base64') + if (reEncoded !== cleanedContent) { + logger.info( + `[${requestId}] Content looked like base64 but re-encoding didn't match, using as plain text` + ) + uploadBody = Buffer.from(content, 'utf-8') + } else { + logger.info( + `[${requestId}] Decoded base64 content (verified): ${uploadBody.length} bytes` + ) + } + } + } catch (decodeError) { + logger.info( + `[${requestId}] Failed to decode as base64, using as plain text: ${decodeError}` + ) + uploadBody = Buffer.from(content, 'utf-8') + } + } else { + uploadBody = Buffer.from(content, 'utf-8') + logger.info(`[${requestId}] Using content as plain text (${uploadBody.length} bytes)`) + } + + uploadContentType = + uploadContentType || validatedData.contentType || 'application/octet-stream' + } else { + const rawFile = fileData + logger.info(`[${requestId}] Processing file object: ${rawFile.name || 'unknown'}`) + + let userFile + try { + userFile = processSingleFileToUserFile(rawFile, requestId, logger) + } catch (error) { + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Failed to process file', + }, + { status: 400 } + ) + } + + const buffer = await downloadFileFromStorage(userFile, requestId, logger) + + uploadBody = buffer + uploadContentType = validatedData.contentType || userFile.type || 'application/octet-stream' + } + + let fullPath = validatedData.fileName + if (validatedData.path) { + const folderPath = validatedData.path.endsWith('/') + ? validatedData.path + : `${validatedData.path}/` + fullPath = `${folderPath}${validatedData.fileName}` + } + + const supabaseUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/${validatedData.bucket}/${fullPath}` + + const headers: Record = { + apikey: validatedData.apiKey, + Authorization: `Bearer ${validatedData.apiKey}`, + 'Content-Type': uploadContentType, + } + + if (validatedData.upsert) { + headers['x-upsert'] = 'true' + } + + logger.info(`[${requestId}] Sending to Supabase: ${supabaseUrl}`, { + contentType: uploadContentType, + bodySize: uploadBody.length, + upsert: validatedData.upsert, + }) + + const response = await fetch(supabaseUrl, { + method: 'POST', + headers, + body: new Uint8Array(uploadBody), + }) + + if (!response.ok) { + const errorText = await response.text() + let errorData + try { + errorData = JSON.parse(errorText) + } catch { + errorData = { message: errorText } + } + + logger.error(`[${requestId}] Supabase Storage upload failed:`, { + status: response.status, + statusText: response.statusText, + error: errorData, + }) + + return NextResponse.json( + { + success: false, + error: errorData.message || errorData.error || `Upload failed: ${response.statusText}`, + details: errorData, + }, + { status: response.status } + ) + } + + const result = await response.json() + + logger.info(`[${requestId}] File uploaded successfully to Supabase Storage`, { + bucket: validatedData.bucket, + path: fullPath, + }) + + const publicUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/public/${validatedData.bucket}/${fullPath}` + + return NextResponse.json({ + success: true, + output: { + message: 'Successfully uploaded file to storage', + results: { + ...result, + path: fullPath, + bucket: validatedData.bucket, + publicUrl, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error uploading to Supabase Storage:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx index 017a51c0b..d3bf23e40 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/sub-block.tsx @@ -338,6 +338,11 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool const configEqual = prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type + const canonicalToggleEqual = + !!prevProps.canonicalToggle === !!nextProps.canonicalToggle && + prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode && + prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled + return ( prevProps.blockId === nextProps.blockId && configEqual && @@ -346,8 +351,7 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool prevProps.disabled === nextProps.disabled && prevProps.fieldDiffStatus === nextProps.fieldDiffStatus && prevProps.allowExpandInPreview === nextProps.allowExpandInPreview && - prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode && - prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled + canonicalToggleEqual ) } diff --git a/apps/sim/blocks/blocks/supabase.ts b/apps/sim/blocks/blocks/supabase.ts index e8254a66a..8b5fc75f7 100644 --- a/apps/sim/blocks/blocks/supabase.ts +++ b/apps/sim/blocks/blocks/supabase.ts @@ -661,12 +661,25 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e placeholder: 'folder/subfolder/', condition: { field: 'operation', value: 'storage_upload' }, }, + { + id: 'file', + title: 'File', + type: 'file-upload', + canonicalParamId: 'fileData', + placeholder: 'Upload file to storage', + condition: { field: 'operation', value: 'storage_upload' }, + mode: 'basic', + multiple: false, + required: true, + }, { id: 'fileContent', title: 'File Content', type: 'code', + canonicalParamId: 'fileData', placeholder: 'Base64 encoded for binary files, or plain text', condition: { field: 'operation', value: 'storage_upload' }, + mode: 'advanced', required: true, }, { diff --git a/apps/sim/hooks/use-collaborative-workflow.ts b/apps/sim/hooks/use-collaborative-workflow.ts index d559969af..ac407625f 100644 --- a/apps/sim/hooks/use-collaborative-workflow.ts +++ b/apps/sim/hooks/use-collaborative-workflow.ts @@ -951,14 +951,25 @@ export function useCollaborativeWorkflow() { const collaborativeSetBlockCanonicalMode = useCallback( (id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => { - executeQueuedOperation( - BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE, - OPERATION_TARGETS.BLOCK, - { id, canonicalId, canonicalMode }, - () => useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode) - ) + if (isBaselineDiffView) { + return + } + + useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode) + + const operationId = crypto.randomUUID() + addToQueue({ + id: operationId, + operation: { + operation: BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE, + target: OPERATION_TARGETS.BLOCK, + payload: { id, canonicalId, canonicalMode }, + }, + workflowId: activeWorkflowId || '', + userId: session?.user?.id || 'unknown', + }) }, - [executeQueuedOperation] + [isBaselineDiffView, activeWorkflowId, addToQueue, session?.user?.id] ) const collaborativeBatchToggleBlockHandles = useCallback( diff --git a/apps/sim/stores/operation-queue/store.ts b/apps/sim/stores/operation-queue/store.ts index 07e9381ce..b5a23d8d4 100644 --- a/apps/sim/stores/operation-queue/store.ts +++ b/apps/sim/stores/operation-queue/store.ts @@ -27,6 +27,9 @@ export function registerEmitFunctions( emitSubblockUpdate = subblockEmit emitVariableUpdate = variableEmit currentRegisteredWorkflowId = workflowId + if (workflowId) { + useOperationQueueStore.getState().processNextOperation() + } } let currentRegisteredWorkflowId: string | null = null @@ -262,16 +265,14 @@ export const useOperationQueueStore = create((set, get) => return } - const nextOperation = currentRegisteredWorkflowId - ? state.operations.find( - (op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId - ) - : state.operations.find((op) => op.status === 'pending') - if (!nextOperation) { + if (!currentRegisteredWorkflowId) { return } - if (currentRegisteredWorkflowId && nextOperation.workflowId !== currentRegisteredWorkflowId) { + const nextOperation = state.operations.find( + (op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId + ) + if (!nextOperation) { return } diff --git a/apps/sim/tools/supabase/storage_upload.ts b/apps/sim/tools/supabase/storage_upload.ts index d01faab30..35ba036c5 100644 --- a/apps/sim/tools/supabase/storage_upload.ts +++ b/apps/sim/tools/supabase/storage_upload.ts @@ -38,11 +38,12 @@ export const storageUploadTool: ToolConfig< visibility: 'user-or-llm', description: 'Optional folder path (e.g., "folder/subfolder/")', }, - fileContent: { - type: 'string', + fileData: { + type: 'json', required: true, visibility: 'user-or-llm', - description: 'The file content (base64 encoded for binary files, or plain text)', + description: + 'File to upload - UserFile object (basic mode) or string content (advanced mode: base64 or plain text). Supports data URLs.', }, contentType: { type: 'string', @@ -65,65 +66,28 @@ export const storageUploadTool: ToolConfig< }, request: { - url: (params) => { - // Combine folder path and fileName, ensuring proper formatting - let fullPath = params.fileName - if (params.path) { - // Ensure path ends with / and doesn't have double slashes - const folderPath = params.path.endsWith('/') ? params.path : `${params.path}/` - fullPath = `${folderPath}${params.fileName}` - } - return `https://${params.projectId}.supabase.co/storage/v1/object/${params.bucket}/${fullPath}` - }, + url: '/api/tools/supabase/storage-upload', method: 'POST', - headers: (params) => { - const headers: Record = { - apikey: params.apiKey, - Authorization: `Bearer ${params.apiKey}`, - } - - if (params.contentType) { - headers['Content-Type'] = params.contentType - } - - if (params.upsert) { - headers['x-upsert'] = 'true' - } - - return headers - }, - body: (params) => { - // Return the file content wrapped in an object - // The actual upload will need to handle this appropriately - return { - content: params.fileContent, - } - }, - }, - - transformResponse: async (response: Response) => { - let data - try { - data = await response.json() - } catch (parseError) { - throw new Error(`Failed to parse Supabase storage upload response: ${parseError}`) - } - - return { - success: true, - output: { - message: 'Successfully uploaded file to storage', - results: data, - }, - error: undefined, - } + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params) => ({ + projectId: params.projectId, + apiKey: params.apiKey, + bucket: params.bucket, + fileName: params.fileName, + path: params.path, + fileData: params.fileData, + contentType: params.contentType, + upsert: params.upsert, + }), }, outputs: { message: { type: 'string', description: 'Operation status message' }, results: { type: 'object', - description: 'Upload result including file path and metadata', + description: 'Upload result including file path, bucket, and public URL', }, }, } diff --git a/apps/sim/tools/supabase/types.ts b/apps/sim/tools/supabase/types.ts index 2ccb6232b..dd810a395 100644 --- a/apps/sim/tools/supabase/types.ts +++ b/apps/sim/tools/supabase/types.ts @@ -136,7 +136,7 @@ export interface SupabaseStorageUploadParams { bucket: string fileName: string path?: string - fileContent: string + fileData: any // UserFile object (basic mode) or string (advanced mode: base64/plain text) contentType?: string upsert?: boolean }