mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-26 15:28:03 -05:00
Compare commits
38 Commits
fix/hackat
...
python-sdk
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d63a5cb504 | ||
|
|
8bd5d41723 | ||
|
|
c12931bc50 | ||
|
|
e9c4251c1c | ||
|
|
cc2be33d6b | ||
|
|
45371e521e | ||
|
|
0ce0f98aa5 | ||
|
|
dff1c9d083 | ||
|
|
b09f683072 | ||
|
|
a8bb0db660 | ||
|
|
af82820a28 | ||
|
|
4372841797 | ||
|
|
5e8c843241 | ||
|
|
7bf3d73ee6 | ||
|
|
7ffc11a738 | ||
|
|
be578e2ed7 | ||
|
|
f415e5edc4 | ||
|
|
13a6e6c3fa | ||
|
|
f5ab7f21ae | ||
|
|
bfb6fffe38 | ||
|
|
4fbec0a43f | ||
|
|
585f5e365b | ||
|
|
3792bdd252 | ||
|
|
eb5d1f3e5b | ||
|
|
54ab82c8dd | ||
|
|
f895bf469b | ||
|
|
dd3209af06 | ||
|
|
b6ba3b50a7 | ||
|
|
b304233062 | ||
|
|
57e4b49bd6 | ||
|
|
e12dd204ed | ||
|
|
3d9d9cbc54 | ||
|
|
0f4ec962ad | ||
|
|
4827866f9a | ||
|
|
3e697d9ed9 | ||
|
|
4431a1a484 | ||
|
|
4d1a9a3f22 | ||
|
|
eb07a080fb |
204
apps/sim/app/api/organizations/[id]/workspaces/route.ts
Normal file
204
apps/sim/app/api/organizations/[id]/workspaces/route.ts
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { member, permissions, user, workspace } from '@sim/db/schema'
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { and, eq, or } from 'drizzle-orm'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { getSession } from '@/lib/auth'
|
||||||
|
|
||||||
|
const logger = createLogger('OrganizationWorkspacesAPI')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/organizations/[id]/workspaces
|
||||||
|
* Get workspaces related to the organization with optional filtering
|
||||||
|
* Query parameters:
|
||||||
|
* - ?available=true - Only workspaces where user can invite others (admin permissions)
|
||||||
|
* - ?member=userId - Workspaces where specific member has access
|
||||||
|
*/
|
||||||
|
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const { id: organizationId } = await params
|
||||||
|
const url = new URL(request.url)
|
||||||
|
const availableOnly = url.searchParams.get('available') === 'true'
|
||||||
|
const memberId = url.searchParams.get('member')
|
||||||
|
|
||||||
|
// Verify user is a member of this organization
|
||||||
|
const memberEntry = await db
|
||||||
|
.select()
|
||||||
|
.from(member)
|
||||||
|
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (memberEntry.length === 0) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: 'Forbidden - Not a member of this organization',
|
||||||
|
},
|
||||||
|
{ status: 403 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const userRole = memberEntry[0].role
|
||||||
|
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
|
||||||
|
|
||||||
|
if (availableOnly) {
|
||||||
|
// Get workspaces where user has admin permissions (can invite others)
|
||||||
|
const availableWorkspaces = await db
|
||||||
|
.select({
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
isOwner: eq(workspace.ownerId, session.user.id),
|
||||||
|
permissionType: permissions.permissionType,
|
||||||
|
})
|
||||||
|
.from(workspace)
|
||||||
|
.leftJoin(
|
||||||
|
permissions,
|
||||||
|
and(
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.entityId, workspace.id),
|
||||||
|
eq(permissions.userId, session.user.id)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
or(
|
||||||
|
// User owns the workspace
|
||||||
|
eq(workspace.ownerId, session.user.id),
|
||||||
|
// User has admin permission on the workspace
|
||||||
|
and(
|
||||||
|
eq(permissions.userId, session.user.id),
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.permissionType, 'admin')
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Filter and format the results
|
||||||
|
const workspacesWithInvitePermission = availableWorkspaces
|
||||||
|
.filter((workspace) => {
|
||||||
|
// Include if user owns the workspace OR has admin permission
|
||||||
|
return workspace.isOwner || workspace.permissionType === 'admin'
|
||||||
|
})
|
||||||
|
.map((workspace) => ({
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
isOwner: workspace.isOwner,
|
||||||
|
canInvite: true, // All returned workspaces have invite permission
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
}))
|
||||||
|
|
||||||
|
logger.info('Retrieved available workspaces for organization member', {
|
||||||
|
organizationId,
|
||||||
|
userId: session.user.id,
|
||||||
|
workspaceCount: workspacesWithInvitePermission.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
workspaces: workspacesWithInvitePermission,
|
||||||
|
totalCount: workspacesWithInvitePermission.length,
|
||||||
|
filter: 'available',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if (memberId && hasAdminAccess) {
|
||||||
|
// Get workspaces where specific member has access (admin only)
|
||||||
|
const memberWorkspaces = await db
|
||||||
|
.select({
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
isOwner: eq(workspace.ownerId, memberId),
|
||||||
|
permissionType: permissions.permissionType,
|
||||||
|
createdAt: permissions.createdAt,
|
||||||
|
})
|
||||||
|
.from(workspace)
|
||||||
|
.leftJoin(
|
||||||
|
permissions,
|
||||||
|
and(
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.entityId, workspace.id),
|
||||||
|
eq(permissions.userId, memberId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.where(
|
||||||
|
or(
|
||||||
|
// Member owns the workspace
|
||||||
|
eq(workspace.ownerId, memberId),
|
||||||
|
// Member has permissions on the workspace
|
||||||
|
and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace'))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
const formattedWorkspaces = memberWorkspaces.map((workspace) => ({
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
isOwner: workspace.isOwner,
|
||||||
|
permission: workspace.permissionType,
|
||||||
|
joinedAt: workspace.createdAt,
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
}))
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
workspaces: formattedWorkspaces,
|
||||||
|
totalCount: formattedWorkspaces.length,
|
||||||
|
filter: 'member',
|
||||||
|
memberId,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default: Get all workspaces (basic info only for regular members)
|
||||||
|
if (!hasAdminAccess) {
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
workspaces: [],
|
||||||
|
totalCount: 0,
|
||||||
|
message: 'Workspace access information is only available to organization admins',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// For admins: Get summary of all workspaces
|
||||||
|
const allWorkspaces = await db
|
||||||
|
.select({
|
||||||
|
id: workspace.id,
|
||||||
|
name: workspace.name,
|
||||||
|
ownerId: workspace.ownerId,
|
||||||
|
createdAt: workspace.createdAt,
|
||||||
|
ownerName: user.name,
|
||||||
|
})
|
||||||
|
.from(workspace)
|
||||||
|
.leftJoin(user, eq(workspace.ownerId, user.id))
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
data: {
|
||||||
|
workspaces: allWorkspaces,
|
||||||
|
totalCount: allWorkspaces.length,
|
||||||
|
filter: 'all',
|
||||||
|
},
|
||||||
|
userRole,
|
||||||
|
hasAdminAccess,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to get organization workspaces', { error })
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: 'Internal server error',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,257 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
const logger = createLogger('SupabaseStorageUploadAPI')
|
|
||||||
|
|
||||||
const SupabaseStorageUploadSchema = z.object({
|
|
||||||
projectId: z.string().min(1, 'Project ID is required'),
|
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
|
||||||
bucket: z.string().min(1, 'Bucket name is required'),
|
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
|
||||||
path: z.string().optional().nullable(),
|
|
||||||
fileData: z.any(),
|
|
||||||
contentType: z.string().optional().nullable(),
|
|
||||||
upsert: z.boolean().optional().default(false),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
|
|
||||||
try {
|
|
||||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
|
||||||
|
|
||||||
if (!authResult.success) {
|
|
||||||
logger.warn(
|
|
||||||
`[${requestId}] Unauthorized Supabase storage upload attempt: ${authResult.error}`
|
|
||||||
)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: authResult.error || 'Authentication required',
|
|
||||||
},
|
|
||||||
{ status: 401 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Authenticated Supabase storage upload request via ${authResult.authType}`,
|
|
||||||
{
|
|
||||||
userId: authResult.userId,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const validatedData = SupabaseStorageUploadSchema.parse(body)
|
|
||||||
|
|
||||||
const fileData = validatedData.fileData
|
|
||||||
const isStringInput = typeof fileData === 'string'
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to Supabase Storage`, {
|
|
||||||
bucket: validatedData.bucket,
|
|
||||||
fileName: validatedData.fileName,
|
|
||||||
path: validatedData.path,
|
|
||||||
fileDataType: isStringInput ? 'string' : 'object',
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!fileData) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'fileData is required',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let uploadBody: Buffer
|
|
||||||
let uploadContentType: string | undefined
|
|
||||||
|
|
||||||
if (isStringInput) {
|
|
||||||
let content = fileData as string
|
|
||||||
|
|
||||||
const dataUrlMatch = content.match(/^data:([^;]+);base64,(.+)$/s)
|
|
||||||
if (dataUrlMatch) {
|
|
||||||
const [, mimeType, base64Data] = dataUrlMatch
|
|
||||||
content = base64Data
|
|
||||||
if (!validatedData.contentType) {
|
|
||||||
uploadContentType = mimeType
|
|
||||||
}
|
|
||||||
logger.info(`[${requestId}] Extracted base64 from data URL (MIME: ${mimeType})`)
|
|
||||||
}
|
|
||||||
|
|
||||||
const cleanedContent = content.replace(/[\s\r\n]/g, '')
|
|
||||||
const isLikelyBase64 = /^[A-Za-z0-9+/]*={0,2}$/.test(cleanedContent)
|
|
||||||
|
|
||||||
if (isLikelyBase64 && cleanedContent.length >= 4) {
|
|
||||||
try {
|
|
||||||
uploadBody = Buffer.from(cleanedContent, 'base64')
|
|
||||||
|
|
||||||
const expectedMinSize = Math.floor(cleanedContent.length * 0.7)
|
|
||||||
const expectedMaxSize = Math.ceil(cleanedContent.length * 0.8)
|
|
||||||
|
|
||||||
if (
|
|
||||||
uploadBody.length >= expectedMinSize &&
|
|
||||||
uploadBody.length <= expectedMaxSize &&
|
|
||||||
uploadBody.length > 0
|
|
||||||
) {
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Decoded base64 content: ${cleanedContent.length} chars -> ${uploadBody.length} bytes`
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
const reEncoded = uploadBody.toString('base64')
|
|
||||||
if (reEncoded !== cleanedContent) {
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Content looked like base64 but re-encoding didn't match, using as plain text`
|
|
||||||
)
|
|
||||||
uploadBody = Buffer.from(content, 'utf-8')
|
|
||||||
} else {
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Decoded base64 content (verified): ${uploadBody.length} bytes`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (decodeError) {
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Failed to decode as base64, using as plain text: ${decodeError}`
|
|
||||||
)
|
|
||||||
uploadBody = Buffer.from(content, 'utf-8')
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
uploadBody = Buffer.from(content, 'utf-8')
|
|
||||||
logger.info(`[${requestId}] Using content as plain text (${uploadBody.length} bytes)`)
|
|
||||||
}
|
|
||||||
|
|
||||||
uploadContentType =
|
|
||||||
uploadContentType || validatedData.contentType || 'application/octet-stream'
|
|
||||||
} else {
|
|
||||||
const rawFile = fileData
|
|
||||||
logger.info(`[${requestId}] Processing file object: ${rawFile.name || 'unknown'}`)
|
|
||||||
|
|
||||||
let userFile
|
|
||||||
try {
|
|
||||||
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
|
||||||
} catch (error) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
|
||||||
|
|
||||||
uploadBody = buffer
|
|
||||||
uploadContentType = validatedData.contentType || userFile.type || 'application/octet-stream'
|
|
||||||
}
|
|
||||||
|
|
||||||
let fullPath = validatedData.fileName
|
|
||||||
if (validatedData.path) {
|
|
||||||
const folderPath = validatedData.path.endsWith('/')
|
|
||||||
? validatedData.path
|
|
||||||
: `${validatedData.path}/`
|
|
||||||
fullPath = `${folderPath}${validatedData.fileName}`
|
|
||||||
}
|
|
||||||
|
|
||||||
const supabaseUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/${validatedData.bucket}/${fullPath}`
|
|
||||||
|
|
||||||
const headers: Record<string, string> = {
|
|
||||||
apikey: validatedData.apiKey,
|
|
||||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
|
||||||
'Content-Type': uploadContentType,
|
|
||||||
}
|
|
||||||
|
|
||||||
if (validatedData.upsert) {
|
|
||||||
headers['x-upsert'] = 'true'
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Sending to Supabase: ${supabaseUrl}`, {
|
|
||||||
contentType: uploadContentType,
|
|
||||||
bodySize: uploadBody.length,
|
|
||||||
upsert: validatedData.upsert,
|
|
||||||
})
|
|
||||||
|
|
||||||
const response = await fetch(supabaseUrl, {
|
|
||||||
method: 'POST',
|
|
||||||
headers,
|
|
||||||
body: new Uint8Array(uploadBody),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorText = await response.text()
|
|
||||||
let errorData
|
|
||||||
try {
|
|
||||||
errorData = JSON.parse(errorText)
|
|
||||||
} catch {
|
|
||||||
errorData = { message: errorText }
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.error(`[${requestId}] Supabase Storage upload failed:`, {
|
|
||||||
status: response.status,
|
|
||||||
statusText: response.statusText,
|
|
||||||
error: errorData,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: errorData.message || errorData.error || `Upload failed: ${response.statusText}`,
|
|
||||||
details: errorData,
|
|
||||||
},
|
|
||||||
{ status: response.status }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await response.json()
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] File uploaded successfully to Supabase Storage`, {
|
|
||||||
bucket: validatedData.bucket,
|
|
||||||
path: fullPath,
|
|
||||||
})
|
|
||||||
|
|
||||||
const publicUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/public/${validatedData.bucket}/${fullPath}`
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
output: {
|
|
||||||
message: 'Successfully uploaded file to storage',
|
|
||||||
results: {
|
|
||||||
...result,
|
|
||||||
path: fullPath,
|
|
||||||
bucket: validatedData.bucket,
|
|
||||||
publicUrl,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
if (error instanceof z.ZodError) {
|
|
||||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Invalid request data',
|
|
||||||
details: error.errors,
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.error(`[${requestId}] Error uploading to Supabase Storage:`, error)
|
|
||||||
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: error instanceof Error ? error.message : 'Internal server error',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -338,11 +338,6 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
|||||||
const configEqual =
|
const configEqual =
|
||||||
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
|
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
|
||||||
|
|
||||||
const canonicalToggleEqual =
|
|
||||||
!!prevProps.canonicalToggle === !!nextProps.canonicalToggle &&
|
|
||||||
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
|
||||||
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
prevProps.blockId === nextProps.blockId &&
|
prevProps.blockId === nextProps.blockId &&
|
||||||
configEqual &&
|
configEqual &&
|
||||||
@@ -351,7 +346,8 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
|||||||
prevProps.disabled === nextProps.disabled &&
|
prevProps.disabled === nextProps.disabled &&
|
||||||
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
||||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
||||||
canonicalToggleEqual
|
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
||||||
|
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -214,6 +214,15 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
|||||||
],
|
],
|
||||||
config: {
|
config: {
|
||||||
tool: (params) => params.operation as string,
|
tool: (params) => params.operation as string,
|
||||||
|
params: (params) => {
|
||||||
|
const { fileUpload, fileReference, ...rest } = params
|
||||||
|
const hasFileUpload = Array.isArray(fileUpload) ? fileUpload.length > 0 : !!fileUpload
|
||||||
|
const files = hasFileUpload ? fileUpload : fileReference
|
||||||
|
return {
|
||||||
|
...rest,
|
||||||
|
...(files ? { files } : {}),
|
||||||
|
}
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
inputs: {
|
inputs: {
|
||||||
|
|||||||
@@ -661,25 +661,12 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
|
|||||||
placeholder: 'folder/subfolder/',
|
placeholder: 'folder/subfolder/',
|
||||||
condition: { field: 'operation', value: 'storage_upload' },
|
condition: { field: 'operation', value: 'storage_upload' },
|
||||||
},
|
},
|
||||||
{
|
|
||||||
id: 'file',
|
|
||||||
title: 'File',
|
|
||||||
type: 'file-upload',
|
|
||||||
canonicalParamId: 'fileData',
|
|
||||||
placeholder: 'Upload file to storage',
|
|
||||||
condition: { field: 'operation', value: 'storage_upload' },
|
|
||||||
mode: 'basic',
|
|
||||||
multiple: false,
|
|
||||||
required: true,
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
id: 'fileContent',
|
id: 'fileContent',
|
||||||
title: 'File Content',
|
title: 'File Content',
|
||||||
type: 'code',
|
type: 'code',
|
||||||
canonicalParamId: 'fileData',
|
|
||||||
placeholder: 'Base64 encoded for binary files, or plain text',
|
placeholder: 'Base64 encoded for binary files, or plain text',
|
||||||
condition: { field: 'operation', value: 'storage_upload' },
|
condition: { field: 'operation', value: 'storage_upload' },
|
||||||
mode: 'advanced',
|
|
||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -680,10 +680,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
|
previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
|
||||||
}
|
}
|
||||||
) => {
|
) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch position update - not in active workflow')
|
logger.debug('Skipping batch position update - not in active workflow')
|
||||||
return
|
return
|
||||||
@@ -729,7 +725,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeUpdateBlockName = useCallback(
|
const collaborativeUpdateBlockName = useCallback(
|
||||||
@@ -821,10 +817,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
const collaborativeBatchToggleBlockEnabled = useCallback(
|
const collaborativeBatchToggleBlockEnabled = useCallback(
|
||||||
(ids: string[]) => {
|
(ids: string[]) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ids.length === 0) return
|
if (ids.length === 0) return
|
||||||
|
|
||||||
const previousStates: Record<string, boolean> = {}
|
const previousStates: Record<string, boolean> = {}
|
||||||
@@ -857,7 +849,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
|
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchUpdateParent = useCallback(
|
const collaborativeBatchUpdateParent = useCallback(
|
||||||
@@ -869,10 +861,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
affectedEdges: Edge[]
|
affectedEdges: Edge[]
|
||||||
}>
|
}>
|
||||||
) => {
|
) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch update parent - not in active workflow')
|
logger.debug('Skipping batch update parent - not in active workflow')
|
||||||
return
|
return
|
||||||
@@ -943,7 +931,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
|
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
|
[isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeToggleBlockAdvancedMode = useCallback(
|
const collaborativeToggleBlockAdvancedMode = useCallback(
|
||||||
@@ -963,37 +951,18 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
const collaborativeSetBlockCanonicalMode = useCallback(
|
const collaborativeSetBlockCanonicalMode = useCallback(
|
||||||
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
|
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
|
||||||
if (isBaselineDiffView) {
|
executeQueuedOperation(
|
||||||
return
|
BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
||||||
}
|
OPERATION_TARGETS.BLOCK,
|
||||||
|
{ id, canonicalId, canonicalMode },
|
||||||
useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
() => useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
||||||
|
)
|
||||||
if (!activeWorkflowId) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const operationId = crypto.randomUUID()
|
|
||||||
addToQueue({
|
|
||||||
id: operationId,
|
|
||||||
operation: {
|
|
||||||
operation: BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
|
||||||
target: OPERATION_TARGETS.BLOCK,
|
|
||||||
payload: { id, canonicalId, canonicalMode },
|
|
||||||
},
|
|
||||||
workflowId: activeWorkflowId,
|
|
||||||
userId: session?.user?.id || 'unknown',
|
|
||||||
})
|
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, activeWorkflowId, addToQueue, session?.user?.id]
|
[executeQueuedOperation]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchToggleBlockHandles = useCallback(
|
const collaborativeBatchToggleBlockHandles = useCallback(
|
||||||
(ids: string[]) => {
|
(ids: string[]) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (ids.length === 0) return
|
if (ids.length === 0) return
|
||||||
|
|
||||||
const previousStates: Record<string, boolean> = {}
|
const previousStates: Record<string, boolean> = {}
|
||||||
@@ -1026,15 +995,11 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
undoRedo.recordBatchToggleHandles(validIds, previousStates)
|
undoRedo.recordBatchToggleHandles(validIds, previousStates)
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchAddEdges = useCallback(
|
const collaborativeBatchAddEdges = useCallback(
|
||||||
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
|
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch add edges - not in active workflow')
|
logger.debug('Skipping batch add edges - not in active workflow')
|
||||||
return false
|
return false
|
||||||
@@ -1070,15 +1035,11 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeBatchRemoveEdges = useCallback(
|
const collaborativeBatchRemoveEdges = useCallback(
|
||||||
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch remove edges - not in active workflow')
|
logger.debug('Skipping batch remove edges - not in active workflow')
|
||||||
return false
|
return false
|
||||||
@@ -1128,7 +1089,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
logger.info('Batch removed edges', { count: validEdgeIds.length })
|
logger.info('Batch removed edges', { count: validEdgeIds.length })
|
||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
[isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
|
[isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeSetSubblockValue = useCallback(
|
const collaborativeSetSubblockValue = useCallback(
|
||||||
@@ -1204,10 +1165,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
(blockId: string, subblockId: string, value: any) => {
|
(blockId: string, subblockId: string, value: any) => {
|
||||||
if (isApplyingRemoteChange.current) return
|
if (isApplyingRemoteChange.current) return
|
||||||
|
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping tag selection - not in active workflow', {
|
logger.debug('Skipping tag selection - not in active workflow', {
|
||||||
currentWorkflowId,
|
currentWorkflowId,
|
||||||
@@ -1235,14 +1192,7 @@ export function useCollaborativeWorkflow() {
|
|||||||
userId: session?.user?.id || 'unknown',
|
userId: session?.user?.id || 'unknown',
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
[
|
[addToQueue, currentWorkflowId, activeWorkflowId, session?.user?.id, isInActiveRoom]
|
||||||
isBaselineDiffView,
|
|
||||||
addToQueue,
|
|
||||||
currentWorkflowId,
|
|
||||||
activeWorkflowId,
|
|
||||||
session?.user?.id,
|
|
||||||
isInActiveRoom,
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const collaborativeUpdateLoopType = useCallback(
|
const collaborativeUpdateLoopType = useCallback(
|
||||||
@@ -1588,10 +1538,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
|
|
||||||
const collaborativeBatchRemoveBlocks = useCallback(
|
const collaborativeBatchRemoveBlocks = useCallback(
|
||||||
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
|
||||||
if (isBaselineDiffView) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInActiveRoom()) {
|
if (!isInActiveRoom()) {
|
||||||
logger.debug('Skipping batch remove blocks - not in active workflow')
|
logger.debug('Skipping batch remove blocks - not in active workflow')
|
||||||
return false
|
return false
|
||||||
@@ -1673,7 +1619,6 @@ export function useCollaborativeWorkflow() {
|
|||||||
return true
|
return true
|
||||||
},
|
},
|
||||||
[
|
[
|
||||||
isBaselineDiffView,
|
|
||||||
addToQueue,
|
addToQueue,
|
||||||
activeWorkflowId,
|
activeWorkflowId,
|
||||||
session?.user?.id,
|
session?.user?.id,
|
||||||
|
|||||||
@@ -27,9 +27,6 @@ export function registerEmitFunctions(
|
|||||||
emitSubblockUpdate = subblockEmit
|
emitSubblockUpdate = subblockEmit
|
||||||
emitVariableUpdate = variableEmit
|
emitVariableUpdate = variableEmit
|
||||||
currentRegisteredWorkflowId = workflowId
|
currentRegisteredWorkflowId = workflowId
|
||||||
if (workflowId) {
|
|
||||||
useOperationQueueStore.getState().processNextOperation()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let currentRegisteredWorkflowId: string | null = null
|
let currentRegisteredWorkflowId: string | null = null
|
||||||
@@ -265,14 +262,16 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!currentRegisteredWorkflowId) {
|
const nextOperation = currentRegisteredWorkflowId
|
||||||
|
? state.operations.find(
|
||||||
|
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
|
||||||
|
)
|
||||||
|
: state.operations.find((op) => op.status === 'pending')
|
||||||
|
if (!nextOperation) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const nextOperation = state.operations.find(
|
if (currentRegisteredWorkflowId && nextOperation.workflowId !== currentRegisteredWorkflowId) {
|
||||||
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
|
|
||||||
)
|
|
||||||
if (!nextOperation) {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -38,12 +38,11 @@ export const storageUploadTool: ToolConfig<
|
|||||||
visibility: 'user-or-llm',
|
visibility: 'user-or-llm',
|
||||||
description: 'Optional folder path (e.g., "folder/subfolder/")',
|
description: 'Optional folder path (e.g., "folder/subfolder/")',
|
||||||
},
|
},
|
||||||
fileData: {
|
fileContent: {
|
||||||
type: 'json',
|
type: 'string',
|
||||||
required: true,
|
required: true,
|
||||||
visibility: 'user-or-llm',
|
visibility: 'user-or-llm',
|
||||||
description:
|
description: 'The file content (base64 encoded for binary files, or plain text)',
|
||||||
'File to upload - UserFile object (basic mode) or string content (advanced mode: base64 or plain text). Supports data URLs.',
|
|
||||||
},
|
},
|
||||||
contentType: {
|
contentType: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
@@ -66,28 +65,65 @@ export const storageUploadTool: ToolConfig<
|
|||||||
},
|
},
|
||||||
|
|
||||||
request: {
|
request: {
|
||||||
url: '/api/tools/supabase/storage-upload',
|
url: (params) => {
|
||||||
|
// Combine folder path and fileName, ensuring proper formatting
|
||||||
|
let fullPath = params.fileName
|
||||||
|
if (params.path) {
|
||||||
|
// Ensure path ends with / and doesn't have double slashes
|
||||||
|
const folderPath = params.path.endsWith('/') ? params.path : `${params.path}/`
|
||||||
|
fullPath = `${folderPath}${params.fileName}`
|
||||||
|
}
|
||||||
|
return `https://${params.projectId}.supabase.co/storage/v1/object/${params.bucket}/${fullPath}`
|
||||||
|
},
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: () => ({
|
headers: (params) => {
|
||||||
'Content-Type': 'application/json',
|
const headers: Record<string, string> = {
|
||||||
}),
|
apikey: params.apiKey,
|
||||||
body: (params) => ({
|
Authorization: `Bearer ${params.apiKey}`,
|
||||||
projectId: params.projectId,
|
}
|
||||||
apiKey: params.apiKey,
|
|
||||||
bucket: params.bucket,
|
if (params.contentType) {
|
||||||
fileName: params.fileName,
|
headers['Content-Type'] = params.contentType
|
||||||
path: params.path,
|
}
|
||||||
fileData: params.fileData,
|
|
||||||
contentType: params.contentType,
|
if (params.upsert) {
|
||||||
upsert: params.upsert,
|
headers['x-upsert'] = 'true'
|
||||||
}),
|
}
|
||||||
|
|
||||||
|
return headers
|
||||||
|
},
|
||||||
|
body: (params) => {
|
||||||
|
// Return the file content wrapped in an object
|
||||||
|
// The actual upload will need to handle this appropriately
|
||||||
|
return {
|
||||||
|
content: params.fileContent,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
transformResponse: async (response: Response) => {
|
||||||
|
let data
|
||||||
|
try {
|
||||||
|
data = await response.json()
|
||||||
|
} catch (parseError) {
|
||||||
|
throw new Error(`Failed to parse Supabase storage upload response: ${parseError}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
message: 'Successfully uploaded file to storage',
|
||||||
|
results: data,
|
||||||
|
},
|
||||||
|
error: undefined,
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
message: { type: 'string', description: 'Operation status message' },
|
message: { type: 'string', description: 'Operation status message' },
|
||||||
results: {
|
results: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
description: 'Upload result including file path, bucket, and public URL',
|
description: 'Upload result including file path and metadata',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -136,7 +136,7 @@ export interface SupabaseStorageUploadParams {
|
|||||||
bucket: string
|
bucket: string
|
||||||
fileName: string
|
fileName: string
|
||||||
path?: string
|
path?: string
|
||||||
fileData: any // UserFile object (basic mode) or string (advanced mode: base64/plain text)
|
fileContent: string
|
||||||
contentType?: string
|
contentType?: string
|
||||||
upsert?: boolean
|
upsert?: boolean
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user