mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-26 07:18:38 -05:00
Compare commits
4 Commits
fix/copilo
...
fix/hackat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4b0b0d03c9 | ||
|
|
2f297686f6 | ||
|
|
a6adbafe16 | ||
|
|
19cde17eb8 |
@@ -1,204 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { member, permissions, user, workspace } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { and, eq, or } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
|
|
||||||
const logger = createLogger('OrganizationWorkspacesAPI')
|
|
||||||
|
|
||||||
/**
|
|
||||||
* GET /api/organizations/[id]/workspaces
|
|
||||||
* Get workspaces related to the organization with optional filtering
|
|
||||||
* Query parameters:
|
|
||||||
* - ?available=true - Only workspaces where user can invite others (admin permissions)
|
|
||||||
* - ?member=userId - Workspaces where specific member has access
|
|
||||||
*/
|
|
||||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { id: organizationId } = await params
|
|
||||||
const url = new URL(request.url)
|
|
||||||
const availableOnly = url.searchParams.get('available') === 'true'
|
|
||||||
const memberId = url.searchParams.get('member')
|
|
||||||
|
|
||||||
// Verify user is a member of this organization
|
|
||||||
const memberEntry = await db
|
|
||||||
.select()
|
|
||||||
.from(member)
|
|
||||||
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (memberEntry.length === 0) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: 'Forbidden - Not a member of this organization',
|
|
||||||
},
|
|
||||||
{ status: 403 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const userRole = memberEntry[0].role
|
|
||||||
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
|
||||||
|
|
||||||
if (availableOnly) {
|
|
||||||
// Get workspaces where user has admin permissions (can invite others)
|
|
||||||
const availableWorkspaces = await db
|
|
||||||
.select({
|
|
||||||
id: workspace.id,
|
|
||||||
name: workspace.name,
|
|
||||||
ownerId: workspace.ownerId,
|
|
||||||
createdAt: workspace.createdAt,
|
|
||||||
isOwner: eq(workspace.ownerId, session.user.id),
|
|
||||||
permissionType: permissions.permissionType,
|
|
||||||
})
|
|
||||||
.from(workspace)
|
|
||||||
.leftJoin(
|
|
||||||
permissions,
|
|
||||||
and(
|
|
||||||
eq(permissions.entityType, 'workspace'),
|
|
||||||
eq(permissions.entityId, workspace.id),
|
|
||||||
eq(permissions.userId, session.user.id)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.where(
|
|
||||||
or(
|
|
||||||
// User owns the workspace
|
|
||||||
eq(workspace.ownerId, session.user.id),
|
|
||||||
// User has admin permission on the workspace
|
|
||||||
and(
|
|
||||||
eq(permissions.userId, session.user.id),
|
|
||||||
eq(permissions.entityType, 'workspace'),
|
|
||||||
eq(permissions.permissionType, 'admin')
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
// Filter and format the results
|
|
||||||
const workspacesWithInvitePermission = availableWorkspaces
|
|
||||||
.filter((workspace) => {
|
|
||||||
// Include if user owns the workspace OR has admin permission
|
|
||||||
return workspace.isOwner || workspace.permissionType === 'admin'
|
|
||||||
})
|
|
||||||
.map((workspace) => ({
|
|
||||||
id: workspace.id,
|
|
||||||
name: workspace.name,
|
|
||||||
isOwner: workspace.isOwner,
|
|
||||||
canInvite: true, // All returned workspaces have invite permission
|
|
||||||
createdAt: workspace.createdAt,
|
|
||||||
}))
|
|
||||||
|
|
||||||
logger.info('Retrieved available workspaces for organization member', {
|
|
||||||
organizationId,
|
|
||||||
userId: session.user.id,
|
|
||||||
workspaceCount: workspacesWithInvitePermission.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
data: {
|
|
||||||
workspaces: workspacesWithInvitePermission,
|
|
||||||
totalCount: workspacesWithInvitePermission.length,
|
|
||||||
filter: 'available',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (memberId && hasAdminAccess) {
|
|
||||||
// Get workspaces where specific member has access (admin only)
|
|
||||||
const memberWorkspaces = await db
|
|
||||||
.select({
|
|
||||||
id: workspace.id,
|
|
||||||
name: workspace.name,
|
|
||||||
ownerId: workspace.ownerId,
|
|
||||||
isOwner: eq(workspace.ownerId, memberId),
|
|
||||||
permissionType: permissions.permissionType,
|
|
||||||
createdAt: permissions.createdAt,
|
|
||||||
})
|
|
||||||
.from(workspace)
|
|
||||||
.leftJoin(
|
|
||||||
permissions,
|
|
||||||
and(
|
|
||||||
eq(permissions.entityType, 'workspace'),
|
|
||||||
eq(permissions.entityId, workspace.id),
|
|
||||||
eq(permissions.userId, memberId)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.where(
|
|
||||||
or(
|
|
||||||
// Member owns the workspace
|
|
||||||
eq(workspace.ownerId, memberId),
|
|
||||||
// Member has permissions on the workspace
|
|
||||||
and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace'))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
const formattedWorkspaces = memberWorkspaces.map((workspace) => ({
|
|
||||||
id: workspace.id,
|
|
||||||
name: workspace.name,
|
|
||||||
isOwner: workspace.isOwner,
|
|
||||||
permission: workspace.permissionType,
|
|
||||||
joinedAt: workspace.createdAt,
|
|
||||||
createdAt: workspace.createdAt,
|
|
||||||
}))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
data: {
|
|
||||||
workspaces: formattedWorkspaces,
|
|
||||||
totalCount: formattedWorkspaces.length,
|
|
||||||
filter: 'member',
|
|
||||||
memberId,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Default: Get all workspaces (basic info only for regular members)
|
|
||||||
if (!hasAdminAccess) {
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
data: {
|
|
||||||
workspaces: [],
|
|
||||||
totalCount: 0,
|
|
||||||
message: 'Workspace access information is only available to organization admins',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// For admins: Get summary of all workspaces
|
|
||||||
const allWorkspaces = await db
|
|
||||||
.select({
|
|
||||||
id: workspace.id,
|
|
||||||
name: workspace.name,
|
|
||||||
ownerId: workspace.ownerId,
|
|
||||||
createdAt: workspace.createdAt,
|
|
||||||
ownerName: user.name,
|
|
||||||
})
|
|
||||||
.from(workspace)
|
|
||||||
.leftJoin(user, eq(workspace.ownerId, user.id))
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
data: {
|
|
||||||
workspaces: allWorkspaces,
|
|
||||||
totalCount: allWorkspaces.length,
|
|
||||||
filter: 'all',
|
|
||||||
},
|
|
||||||
userRole,
|
|
||||||
hasAdminAccess,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to get organization workspaces', { error })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: 'Internal server error',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
257
apps/sim/app/api/tools/supabase/storage-upload/route.ts
Normal file
257
apps/sim/app/api/tools/supabase/storage-upload/route.ts
Normal file
@@ -0,0 +1,257 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('SupabaseStorageUploadAPI')
|
||||||
|
|
||||||
|
const SupabaseStorageUploadSchema = z.object({
|
||||||
|
projectId: z.string().min(1, 'Project ID is required'),
|
||||||
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
|
bucket: z.string().min(1, 'Bucket name is required'),
|
||||||
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
|
path: z.string().optional().nullable(),
|
||||||
|
fileData: z.any(),
|
||||||
|
contentType: z.string().optional().nullable(),
|
||||||
|
upsert: z.boolean().optional().default(false),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(
|
||||||
|
`[${requestId}] Unauthorized Supabase storage upload attempt: ${authResult.error}`
|
||||||
|
)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Authenticated Supabase storage upload request via ${authResult.authType}`,
|
||||||
|
{
|
||||||
|
userId: authResult.userId,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = SupabaseStorageUploadSchema.parse(body)
|
||||||
|
|
||||||
|
const fileData = validatedData.fileData
|
||||||
|
const isStringInput = typeof fileData === 'string'
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Uploading to Supabase Storage`, {
|
||||||
|
bucket: validatedData.bucket,
|
||||||
|
fileName: validatedData.fileName,
|
||||||
|
path: validatedData.path,
|
||||||
|
fileDataType: isStringInput ? 'string' : 'object',
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!fileData) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'fileData is required',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let uploadBody: Buffer
|
||||||
|
let uploadContentType: string | undefined
|
||||||
|
|
||||||
|
if (isStringInput) {
|
||||||
|
let content = fileData as string
|
||||||
|
|
||||||
|
const dataUrlMatch = content.match(/^data:([^;]+);base64,(.+)$/s)
|
||||||
|
if (dataUrlMatch) {
|
||||||
|
const [, mimeType, base64Data] = dataUrlMatch
|
||||||
|
content = base64Data
|
||||||
|
if (!validatedData.contentType) {
|
||||||
|
uploadContentType = mimeType
|
||||||
|
}
|
||||||
|
logger.info(`[${requestId}] Extracted base64 from data URL (MIME: ${mimeType})`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const cleanedContent = content.replace(/[\s\r\n]/g, '')
|
||||||
|
const isLikelyBase64 = /^[A-Za-z0-9+/]*={0,2}$/.test(cleanedContent)
|
||||||
|
|
||||||
|
if (isLikelyBase64 && cleanedContent.length >= 4) {
|
||||||
|
try {
|
||||||
|
uploadBody = Buffer.from(cleanedContent, 'base64')
|
||||||
|
|
||||||
|
const expectedMinSize = Math.floor(cleanedContent.length * 0.7)
|
||||||
|
const expectedMaxSize = Math.ceil(cleanedContent.length * 0.8)
|
||||||
|
|
||||||
|
if (
|
||||||
|
uploadBody.length >= expectedMinSize &&
|
||||||
|
uploadBody.length <= expectedMaxSize &&
|
||||||
|
uploadBody.length > 0
|
||||||
|
) {
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Decoded base64 content: ${cleanedContent.length} chars -> ${uploadBody.length} bytes`
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
const reEncoded = uploadBody.toString('base64')
|
||||||
|
if (reEncoded !== cleanedContent) {
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Content looked like base64 but re-encoding didn't match, using as plain text`
|
||||||
|
)
|
||||||
|
uploadBody = Buffer.from(content, 'utf-8')
|
||||||
|
} else {
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Decoded base64 content (verified): ${uploadBody.length} bytes`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (decodeError) {
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Failed to decode as base64, using as plain text: ${decodeError}`
|
||||||
|
)
|
||||||
|
uploadBody = Buffer.from(content, 'utf-8')
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
uploadBody = Buffer.from(content, 'utf-8')
|
||||||
|
logger.info(`[${requestId}] Using content as plain text (${uploadBody.length} bytes)`)
|
||||||
|
}
|
||||||
|
|
||||||
|
uploadContentType =
|
||||||
|
uploadContentType || validatedData.contentType || 'application/octet-stream'
|
||||||
|
} else {
|
||||||
|
const rawFile = fileData
|
||||||
|
logger.info(`[${requestId}] Processing file object: ${rawFile.name || 'unknown'}`)
|
||||||
|
|
||||||
|
let userFile
|
||||||
|
try {
|
||||||
|
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||||
|
} catch (error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
|
||||||
|
uploadBody = buffer
|
||||||
|
uploadContentType = validatedData.contentType || userFile.type || 'application/octet-stream'
|
||||||
|
}
|
||||||
|
|
||||||
|
let fullPath = validatedData.fileName
|
||||||
|
if (validatedData.path) {
|
||||||
|
const folderPath = validatedData.path.endsWith('/')
|
||||||
|
? validatedData.path
|
||||||
|
: `${validatedData.path}/`
|
||||||
|
fullPath = `${folderPath}${validatedData.fileName}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const supabaseUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/${validatedData.bucket}/${fullPath}`
|
||||||
|
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
apikey: validatedData.apiKey,
|
||||||
|
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||||
|
'Content-Type': uploadContentType,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.upsert) {
|
||||||
|
headers['x-upsert'] = 'true'
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Sending to Supabase: ${supabaseUrl}`, {
|
||||||
|
contentType: uploadContentType,
|
||||||
|
bodySize: uploadBody.length,
|
||||||
|
upsert: validatedData.upsert,
|
||||||
|
})
|
||||||
|
|
||||||
|
const response = await fetch(supabaseUrl, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: new Uint8Array(uploadBody),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text()
|
||||||
|
let errorData
|
||||||
|
try {
|
||||||
|
errorData = JSON.parse(errorText)
|
||||||
|
} catch {
|
||||||
|
errorData = { message: errorText }
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] Supabase Storage upload failed:`, {
|
||||||
|
status: response.status,
|
||||||
|
statusText: response.statusText,
|
||||||
|
error: errorData,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: errorData.message || errorData.error || `Upload failed: ${response.statusText}`,
|
||||||
|
details: errorData,
|
||||||
|
},
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await response.json()
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File uploaded successfully to Supabase Storage`, {
|
||||||
|
bucket: validatedData.bucket,
|
||||||
|
path: fullPath,
|
||||||
|
})
|
||||||
|
|
||||||
|
const publicUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/public/${validatedData.bucket}/${fullPath}`
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
message: 'Successfully uploaded file to storage',
|
||||||
|
results: {
|
||||||
|
...result,
|
||||||
|
path: fullPath,
|
||||||
|
bucket: validatedData.bucket,
|
||||||
|
publicUrl,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid request data',
|
||||||
|
details: error.errors,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] Error uploading to Supabase Storage:`, error)
|
||||||
|
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Internal server error',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -338,6 +338,11 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
|||||||
const configEqual =
|
const configEqual =
|
||||||
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
|
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
|
||||||
|
|
||||||
|
const canonicalToggleEqual =
|
||||||
|
!!prevProps.canonicalToggle === !!nextProps.canonicalToggle &&
|
||||||
|
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
||||||
|
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
||||||
|
|
||||||
return (
|
return (
|
||||||
prevProps.blockId === nextProps.blockId &&
|
prevProps.blockId === nextProps.blockId &&
|
||||||
configEqual &&
|
configEqual &&
|
||||||
@@ -346,8 +351,7 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
|||||||
prevProps.disabled === nextProps.disabled &&
|
prevProps.disabled === nextProps.disabled &&
|
||||||
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
||||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
||||||
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
canonicalToggleEqual
|
||||||
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -214,15 +214,6 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
|||||||
],
|
],
|
||||||
config: {
|
config: {
|
||||||
tool: (params) => params.operation as string,
|
tool: (params) => params.operation as string,
|
||||||
params: (params) => {
|
|
||||||
const { fileUpload, fileReference, ...rest } = params
|
|
||||||
const hasFileUpload = Array.isArray(fileUpload) ? fileUpload.length > 0 : !!fileUpload
|
|
||||||
const files = hasFileUpload ? fileUpload : fileReference
|
|
||||||
return {
|
|
||||||
...rest,
|
|
||||||
...(files ? { files } : {}),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
inputs: {
|
inputs: {
|
||||||
|
|||||||
@@ -661,12 +661,25 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
|||||||
placeholder: 'folder/subfolder/',
|
placeholder: 'folder/subfolder/',
|
||||||
condition: { field: 'operation', value: 'storage_upload' },
|
condition: { field: 'operation', value: 'storage_upload' },
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
id: 'file',
|
||||||
|
title: 'File',
|
||||||
|
type: 'file-upload',
|
||||||
|
canonicalParamId: 'fileData',
|
||||||
|
placeholder: 'Upload file to storage',
|
||||||
|
condition: { field: 'operation', value: 'storage_upload' },
|
||||||
|
mode: 'basic',
|
||||||
|
multiple: false,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
id: 'fileContent',
|
id: 'fileContent',
|
||||||
title: 'File Content',
|
title: 'File Content',
|
||||||
type: 'code',
|
type: 'code',
|
||||||
|
canonicalParamId: 'fileData',
|
||||||
placeholder: 'Base64 encoded for binary files, or plain text',
|
placeholder: 'Base64 encoded for binary files, or plain text',
|
||||||
condition: { field: 'operation', value: 'storage_upload' },
|
condition: { field: 'operation', value: 'storage_upload' },
|
||||||
|
mode: 'advanced',
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -680,6 +680,10 @@ export function useCollaborativeWorkflow() {
|
|||||||
previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
|
previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
|
||||||
}
|
}
|
||||||
) => {
|
) => {
|
||||||
|
if (isBaselineDiffView) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch position update - not in active workflow')
|
logger.debug('Skipping batch position update - not in active workflow')
|
||||||
return
|
return
|
||||||
@@ -725,7 +729,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeUpdateBlockName = useCallback(
|
const collaborativeUpdateBlockName = useCallback(
|
||||||
@@ -817,6 +821,10 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
const collaborativeBatchToggleBlockEnabled = useCallback(
|
const collaborativeBatchToggleBlockEnabled = useCallback(
|
||||||
(ids: string[]) => {
|
(ids: string[]) => {
|
||||||
|
if (isBaselineDiffView) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (ids.length === 0) return
|
if (ids.length === 0) return
|
||||||
|
|
||||||
const previousStates: Record<string, boolean> = {}
|
const previousStates: Record<string, boolean> = {}
|
||||||
@@ -849,7 +857,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
|
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
|
||||||
},
|
},
|
||||||
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchUpdateParent = useCallback(
|
const collaborativeBatchUpdateParent = useCallback(
|
||||||
@@ -861,6 +869,10 @@ export function useCollaborativeWorkflow() {
|
|||||||
affectedEdges: Edge[]
|
affectedEdges: Edge[]
|
||||||
}>
|
}>
|
||||||
) => {
|
) => {
|
||||||
|
if (isBaselineDiffView) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch update parent - not in active workflow')
|
logger.debug('Skipping batch update parent - not in active workflow')
|
||||||
return
|
return
|
||||||
@@ -931,7 +943,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
|
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
|
||||||
},
|
},
|
||||||
[isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
|
[isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeToggleBlockAdvancedMode = useCallback(
|
const collaborativeToggleBlockAdvancedMode = useCallback(
|
||||||
@@ -951,18 +963,37 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
const collaborativeSetBlockCanonicalMode = useCallback(
|
const collaborativeSetBlockCanonicalMode = useCallback(
|
||||||
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
|
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
|
||||||
executeQueuedOperation(
|
if (isBaselineDiffView) {
|
||||||
BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
return
|
||||||
OPERATION_TARGETS.BLOCK,
|
}
|
||||||
{ id, canonicalId, canonicalMode },
|
|
||||||
() => useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
||||||
)
|
|
||||||
|
if (!activeWorkflowId) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const operationId = crypto.randomUUID()
|
||||||
|
addToQueue({
|
||||||
|
id: operationId,
|
||||||
|
operation: {
|
||||||
|
operation: BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
||||||
|
target: OPERATION_TARGETS.BLOCK,
|
||||||
|
payload: { id, canonicalId, canonicalMode },
|
||||||
},
|
},
|
||||||
[executeQueuedOperation]
|
workflowId: activeWorkflowId,
|
||||||
|
userId: session?.user?.id || 'unknown',
|
||||||
|
})
|
||||||
|
},
|
||||||
|
[isBaselineDiffView, activeWorkflowId, addToQueue, session?.user?.id]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchToggleBlockHandles = useCallback(
|
const collaborativeBatchToggleBlockHandles = useCallback(
|
||||||
(ids: string[]) => {
|
(ids: string[]) => {
|
||||||
|
if (isBaselineDiffView) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (ids.length === 0) return
|
if (ids.length === 0) return
|
||||||
|
|
||||||
const previousStates: Record<string, boolean> = {}
|
const previousStates: Record<string, boolean> = {}
|
||||||
@@ -995,11 +1026,15 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
undoRedo.recordBatchToggleHandles(validIds, previousStates)
|
undoRedo.recordBatchToggleHandles(validIds, previousStates)
|
||||||
},
|
},
|
||||||
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchAddEdges = useCallback(
|
const collaborativeBatchAddEdges = useCallback(
|
||||||
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
|
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
|
||||||
|
if (isBaselineDiffView) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch add edges - not in active workflow')
|
logger.debug('Skipping batch add edges - not in active workflow')
|
||||||
return false
|
return false
|
||||||
@@ -1035,11 +1070,15 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchRemoveEdges = useCallback(
|
const collaborativeBatchRemoveEdges = useCallback(
|
||||||
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
||||||
|
if (isBaselineDiffView) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch remove edges - not in active workflow')
|
logger.debug('Skipping batch remove edges - not in active workflow')
|
||||||
return false
|
return false
|
||||||
@@ -1089,7 +1128,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
logger.info('Batch removed edges', { count: validEdgeIds.length })
|
logger.info('Batch removed edges', { count: validEdgeIds.length })
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
[isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
|
[isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeSetSubblockValue = useCallback(
|
const collaborativeSetSubblockValue = useCallback(
|
||||||
@@ -1165,6 +1204,10 @@ export function useCollaborativeWorkflow() {
|
|||||||
(blockId: string, subblockId: string, value: any) => {
|
(blockId: string, subblockId: string, value: any) => {
|
||||||
if (isApplyingRemoteChange.current) return
|
if (isApplyingRemoteChange.current) return
|
||||||
|
|
||||||
|
if (isBaselineDiffView) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping tag selection - not in active workflow', {
|
logger.debug('Skipping tag selection - not in active workflow', {
|
||||||
currentWorkflowId,
|
currentWorkflowId,
|
||||||
@@ -1192,7 +1235,14 @@ export function useCollaborativeWorkflow() {
|
|||||||
userId: session?.user?.id || 'unknown',
|
userId: session?.user?.id || 'unknown',
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
[addToQueue, currentWorkflowId, activeWorkflowId, session?.user?.id, isInActiveRoom]
|
[
|
||||||
|
isBaselineDiffView,
|
||||||
|
addToQueue,
|
||||||
|
currentWorkflowId,
|
||||||
|
activeWorkflowId,
|
||||||
|
session?.user?.id,
|
||||||
|
isInActiveRoom,
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeUpdateLoopType = useCallback(
|
const collaborativeUpdateLoopType = useCallback(
|
||||||
@@ -1538,6 +1588,10 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
const collaborativeBatchRemoveBlocks = useCallback(
|
const collaborativeBatchRemoveBlocks = useCallback(
|
||||||
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
||||||
|
if (isBaselineDiffView) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch remove blocks - not in active workflow')
|
logger.debug('Skipping batch remove blocks - not in active workflow')
|
||||||
return false
|
return false
|
||||||
@@ -1619,6 +1673,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
[
|
[
|
||||||
|
isBaselineDiffView,
|
||||||
addToQueue,
|
addToQueue,
|
||||||
activeWorkflowId,
|
activeWorkflowId,
|
||||||
session?.user?.id,
|
session?.user?.id,
|
||||||
|
|||||||
@@ -27,6 +27,9 @@ export function registerEmitFunctions(
|
|||||||
emitSubblockUpdate = subblockEmit
|
emitSubblockUpdate = subblockEmit
|
||||||
emitVariableUpdate = variableEmit
|
emitVariableUpdate = variableEmit
|
||||||
currentRegisteredWorkflowId = workflowId
|
currentRegisteredWorkflowId = workflowId
|
||||||
|
if (workflowId) {
|
||||||
|
useOperationQueueStore.getState().processNextOperation()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let currentRegisteredWorkflowId: string | null = null
|
let currentRegisteredWorkflowId: string | null = null
|
||||||
@@ -262,16 +265,14 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const nextOperation = currentRegisteredWorkflowId
|
if (!currentRegisteredWorkflowId) {
|
||||||
? state.operations.find(
|
|
||||||
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
|
|
||||||
)
|
|
||||||
: state.operations.find((op) => op.status === 'pending')
|
|
||||||
if (!nextOperation) {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if (currentRegisteredWorkflowId && nextOperation.workflowId !== currentRegisteredWorkflowId) {
|
const nextOperation = state.operations.find(
|
||||||
|
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
|
||||||
|
)
|
||||||
|
if (!nextOperation) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -38,11 +38,12 @@ export const storageUploadTool: ToolConfig<
|
|||||||
visibility: 'user-or-llm',
|
visibility: 'user-or-llm',
|
||||||
description: 'Optional folder path (e.g., "folder/subfolder/")',
|
description: 'Optional folder path (e.g., "folder/subfolder/")',
|
||||||
},
|
},
|
||||||
fileContent: {
|
fileData: {
|
||||||
type: 'string',
|
type: 'json',
|
||||||
required: true,
|
required: true,
|
||||||
visibility: 'user-or-llm',
|
visibility: 'user-or-llm',
|
||||||
description: 'The file content (base64 encoded for binary files, or plain text)',
|
description:
|
||||||
|
'File to upload - UserFile object (basic mode) or string content (advanced mode: base64 or plain text). Supports data URLs.',
|
||||||
},
|
},
|
||||||
contentType: {
|
contentType: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
@@ -65,65 +66,28 @@ export const storageUploadTool: ToolConfig<
|
|||||||
},
|
},
|
||||||
|
|
||||||
request: {
|
request: {
|
||||||
url: (params) => {
|
url: '/api/tools/supabase/storage-upload',
|
||||||
// Combine folder path and fileName, ensuring proper formatting
|
|
||||||
let fullPath = params.fileName
|
|
||||||
if (params.path) {
|
|
||||||
// Ensure path ends with / and doesn't have double slashes
|
|
||||||
const folderPath = params.path.endsWith('/') ? params.path : `${params.path}/`
|
|
||||||
fullPath = `${folderPath}${params.fileName}`
|
|
||||||
}
|
|
||||||
return `https://${params.projectId}.supabase.co/storage/v1/object/${params.bucket}/${fullPath}`
|
|
||||||
},
|
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: (params) => {
|
headers: () => ({
|
||||||
const headers: Record<string, string> = {
|
'Content-Type': 'application/json',
|
||||||
apikey: params.apiKey,
|
}),
|
||||||
Authorization: `Bearer ${params.apiKey}`,
|
body: (params) => ({
|
||||||
}
|
projectId: params.projectId,
|
||||||
|
apiKey: params.apiKey,
|
||||||
if (params.contentType) {
|
bucket: params.bucket,
|
||||||
headers['Content-Type'] = params.contentType
|
fileName: params.fileName,
|
||||||
}
|
path: params.path,
|
||||||
|
fileData: params.fileData,
|
||||||
if (params.upsert) {
|
contentType: params.contentType,
|
||||||
headers['x-upsert'] = 'true'
|
upsert: params.upsert,
|
||||||
}
|
}),
|
||||||
|
|
||||||
return headers
|
|
||||||
},
|
|
||||||
body: (params) => {
|
|
||||||
// Return the file content wrapped in an object
|
|
||||||
// The actual upload will need to handle this appropriately
|
|
||||||
return {
|
|
||||||
content: params.fileContent,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
transformResponse: async (response: Response) => {
|
|
||||||
let data
|
|
||||||
try {
|
|
||||||
data = await response.json()
|
|
||||||
} catch (parseError) {
|
|
||||||
throw new Error(`Failed to parse Supabase storage upload response: ${parseError}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
message: 'Successfully uploaded file to storage',
|
|
||||||
results: data,
|
|
||||||
},
|
|
||||||
error: undefined,
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
message: { type: 'string', description: 'Operation status message' },
|
message: { type: 'string', description: 'Operation status message' },
|
||||||
results: {
|
results: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
description: 'Upload result including file path and metadata',
|
description: 'Upload result including file path, bucket, and public URL',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -136,7 +136,7 @@ export interface SupabaseStorageUploadParams {
|
|||||||
bucket: string
|
bucket: string
|
||||||
fileName: string
|
fileName: string
|
||||||
path?: string
|
path?: string
|
||||||
fileContent: string
|
fileData: any // UserFile object (basic mode) or string (advanced mode: base64/plain text)
|
||||||
contentType?: string
|
contentType?: string
|
||||||
upsert?: boolean
|
upsert?: boolean
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user