Compare commits

..

6 Commits

Author SHA1 Message Date
Siddharth Ganesan
80eb2a8aa1 Fix 2026-01-26 12:25:01 -08:00
Siddharth Ganesan
315d9ee3f9 Lint 2026-01-26 10:21:01 -08:00
Siddharth Ganesan
62b06d00de Fix comments 2026-01-25 14:33:27 -08:00
Siddharth Ganesan
2a630859fb Fix validation 2026-01-25 14:31:12 -08:00
Siddharth Ganesan
3533bd009d Fix greptile 2026-01-25 13:27:51 -08:00
Siddharth Ganesan
43402fde1c Fix 2026-01-25 13:15:07 -08:00
16 changed files with 514 additions and 468 deletions

View File

@@ -0,0 +1,204 @@
import { db } from '@sim/db'
import { member, permissions, user, workspace } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
const logger = createLogger('OrganizationWorkspacesAPI')
/**
* GET /api/organizations/[id]/workspaces
* Get workspaces related to the organization with optional filtering
* Query parameters:
* - ?available=true - Only workspaces where user can invite others (admin permissions)
* - ?member=userId - Workspaces where specific member has access
*/
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { id: organizationId } = await params
const url = new URL(request.url)
const availableOnly = url.searchParams.get('available') === 'true'
const memberId = url.searchParams.get('member')
// Verify user is a member of this organization
const memberEntry = await db
.select()
.from(member)
.where(and(eq(member.organizationId, organizationId), eq(member.userId, session.user.id)))
.limit(1)
if (memberEntry.length === 0) {
return NextResponse.json(
{
error: 'Forbidden - Not a member of this organization',
},
{ status: 403 }
)
}
const userRole = memberEntry[0].role
const hasAdminAccess = ['owner', 'admin'].includes(userRole)
if (availableOnly) {
// Get workspaces where user has admin permissions (can invite others)
const availableWorkspaces = await db
.select({
id: workspace.id,
name: workspace.name,
ownerId: workspace.ownerId,
createdAt: workspace.createdAt,
isOwner: eq(workspace.ownerId, session.user.id),
permissionType: permissions.permissionType,
})
.from(workspace)
.leftJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workspace.id),
eq(permissions.userId, session.user.id)
)
)
.where(
or(
// User owns the workspace
eq(workspace.ownerId, session.user.id),
// User has admin permission on the workspace
and(
eq(permissions.userId, session.user.id),
eq(permissions.entityType, 'workspace'),
eq(permissions.permissionType, 'admin')
)
)
)
// Filter and format the results
const workspacesWithInvitePermission = availableWorkspaces
.filter((workspace) => {
// Include if user owns the workspace OR has admin permission
return workspace.isOwner || workspace.permissionType === 'admin'
})
.map((workspace) => ({
id: workspace.id,
name: workspace.name,
isOwner: workspace.isOwner,
canInvite: true, // All returned workspaces have invite permission
createdAt: workspace.createdAt,
}))
logger.info('Retrieved available workspaces for organization member', {
organizationId,
userId: session.user.id,
workspaceCount: workspacesWithInvitePermission.length,
})
return NextResponse.json({
success: true,
data: {
workspaces: workspacesWithInvitePermission,
totalCount: workspacesWithInvitePermission.length,
filter: 'available',
},
})
}
if (memberId && hasAdminAccess) {
// Get workspaces where specific member has access (admin only)
const memberWorkspaces = await db
.select({
id: workspace.id,
name: workspace.name,
ownerId: workspace.ownerId,
isOwner: eq(workspace.ownerId, memberId),
permissionType: permissions.permissionType,
createdAt: permissions.createdAt,
})
.from(workspace)
.leftJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workspace.id),
eq(permissions.userId, memberId)
)
)
.where(
or(
// Member owns the workspace
eq(workspace.ownerId, memberId),
// Member has permissions on the workspace
and(eq(permissions.userId, memberId), eq(permissions.entityType, 'workspace'))
)
)
const formattedWorkspaces = memberWorkspaces.map((workspace) => ({
id: workspace.id,
name: workspace.name,
isOwner: workspace.isOwner,
permission: workspace.permissionType,
joinedAt: workspace.createdAt,
createdAt: workspace.createdAt,
}))
return NextResponse.json({
success: true,
data: {
workspaces: formattedWorkspaces,
totalCount: formattedWorkspaces.length,
filter: 'member',
memberId,
},
})
}
// Default: Get all workspaces (basic info only for regular members)
if (!hasAdminAccess) {
return NextResponse.json({
success: true,
data: {
workspaces: [],
totalCount: 0,
message: 'Workspace access information is only available to organization admins',
},
})
}
// For admins: Get summary of all workspaces
const allWorkspaces = await db
.select({
id: workspace.id,
name: workspace.name,
ownerId: workspace.ownerId,
createdAt: workspace.createdAt,
ownerName: user.name,
})
.from(workspace)
.leftJoin(user, eq(workspace.ownerId, user.id))
return NextResponse.json({
success: true,
data: {
workspaces: allWorkspaces,
totalCount: allWorkspaces.length,
filter: 'all',
},
userRole,
hasAdminAccess,
})
} catch (error) {
logger.error('Failed to get organization workspaces', { error })
return NextResponse.json(
{
error: 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -1,257 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
export const dynamic = 'force-dynamic'
const logger = createLogger('SupabaseStorageUploadAPI')
const SupabaseStorageUploadSchema = z.object({
projectId: z.string().min(1, 'Project ID is required'),
apiKey: z.string().min(1, 'API key is required'),
bucket: z.string().min(1, 'Bucket name is required'),
fileName: z.string().min(1, 'File name is required'),
path: z.string().optional().nullable(),
fileData: z.any(),
contentType: z.string().optional().nullable(),
upsert: z.boolean().optional().default(false),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(
`[${requestId}] Unauthorized Supabase storage upload attempt: ${authResult.error}`
)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
logger.info(
`[${requestId}] Authenticated Supabase storage upload request via ${authResult.authType}`,
{
userId: authResult.userId,
}
)
const body = await request.json()
const validatedData = SupabaseStorageUploadSchema.parse(body)
const fileData = validatedData.fileData
const isStringInput = typeof fileData === 'string'
logger.info(`[${requestId}] Uploading to Supabase Storage`, {
bucket: validatedData.bucket,
fileName: validatedData.fileName,
path: validatedData.path,
fileDataType: isStringInput ? 'string' : 'object',
})
if (!fileData) {
return NextResponse.json(
{
success: false,
error: 'fileData is required',
},
{ status: 400 }
)
}
let uploadBody: Buffer
let uploadContentType: string | undefined
if (isStringInput) {
let content = fileData as string
const dataUrlMatch = content.match(/^data:([^;]+);base64,(.+)$/s)
if (dataUrlMatch) {
const [, mimeType, base64Data] = dataUrlMatch
content = base64Data
if (!validatedData.contentType) {
uploadContentType = mimeType
}
logger.info(`[${requestId}] Extracted base64 from data URL (MIME: ${mimeType})`)
}
const cleanedContent = content.replace(/[\s\r\n]/g, '')
const isLikelyBase64 = /^[A-Za-z0-9+/]*={0,2}$/.test(cleanedContent)
if (isLikelyBase64 && cleanedContent.length >= 4) {
try {
uploadBody = Buffer.from(cleanedContent, 'base64')
const expectedMinSize = Math.floor(cleanedContent.length * 0.7)
const expectedMaxSize = Math.ceil(cleanedContent.length * 0.8)
if (
uploadBody.length >= expectedMinSize &&
uploadBody.length <= expectedMaxSize &&
uploadBody.length > 0
) {
logger.info(
`[${requestId}] Decoded base64 content: ${cleanedContent.length} chars -> ${uploadBody.length} bytes`
)
} else {
const reEncoded = uploadBody.toString('base64')
if (reEncoded !== cleanedContent) {
logger.info(
`[${requestId}] Content looked like base64 but re-encoding didn't match, using as plain text`
)
uploadBody = Buffer.from(content, 'utf-8')
} else {
logger.info(
`[${requestId}] Decoded base64 content (verified): ${uploadBody.length} bytes`
)
}
}
} catch (decodeError) {
logger.info(
`[${requestId}] Failed to decode as base64, using as plain text: ${decodeError}`
)
uploadBody = Buffer.from(content, 'utf-8')
}
} else {
uploadBody = Buffer.from(content, 'utf-8')
logger.info(`[${requestId}] Using content as plain text (${uploadBody.length} bytes)`)
}
uploadContentType =
uploadContentType || validatedData.contentType || 'application/octet-stream'
} else {
const rawFile = fileData
logger.info(`[${requestId}] Processing file object: ${rawFile.name || 'unknown'}`)
let userFile
try {
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
} catch (error) {
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Failed to process file',
},
{ status: 400 }
)
}
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
uploadBody = buffer
uploadContentType = validatedData.contentType || userFile.type || 'application/octet-stream'
}
let fullPath = validatedData.fileName
if (validatedData.path) {
const folderPath = validatedData.path.endsWith('/')
? validatedData.path
: `${validatedData.path}/`
fullPath = `${folderPath}${validatedData.fileName}`
}
const supabaseUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/${validatedData.bucket}/${fullPath}`
const headers: Record<string, string> = {
apikey: validatedData.apiKey,
Authorization: `Bearer ${validatedData.apiKey}`,
'Content-Type': uploadContentType,
}
if (validatedData.upsert) {
headers['x-upsert'] = 'true'
}
logger.info(`[${requestId}] Sending to Supabase: ${supabaseUrl}`, {
contentType: uploadContentType,
bodySize: uploadBody.length,
upsert: validatedData.upsert,
})
const response = await fetch(supabaseUrl, {
method: 'POST',
headers,
body: new Uint8Array(uploadBody),
})
if (!response.ok) {
const errorText = await response.text()
let errorData
try {
errorData = JSON.parse(errorText)
} catch {
errorData = { message: errorText }
}
logger.error(`[${requestId}] Supabase Storage upload failed:`, {
status: response.status,
statusText: response.statusText,
error: errorData,
})
return NextResponse.json(
{
success: false,
error: errorData.message || errorData.error || `Upload failed: ${response.statusText}`,
details: errorData,
},
{ status: response.status }
)
}
const result = await response.json()
logger.info(`[${requestId}] File uploaded successfully to Supabase Storage`, {
bucket: validatedData.bucket,
path: fullPath,
})
const publicUrl = `https://${validatedData.projectId}.supabase.co/storage/v1/object/public/${validatedData.bucket}/${fullPath}`
return NextResponse.json({
success: true,
output: {
message: 'Successfully uploaded file to storage',
results: {
...result,
path: fullPath,
bucket: validatedData.bucket,
publicUrl,
},
},
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{
success: false,
error: 'Invalid request data',
details: error.errors,
},
{ status: 400 }
)
}
logger.error(`[${requestId}] Error uploading to Supabase Storage:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Internal server error',
},
{ status: 500 }
)
}
}

View File

@@ -338,11 +338,6 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
const configEqual =
prevProps.config.id === nextProps.config.id && prevProps.config.type === nextProps.config.type
const canonicalToggleEqual =
!!prevProps.canonicalToggle === !!nextProps.canonicalToggle &&
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
return (
prevProps.blockId === nextProps.blockId &&
configEqual &&
@@ -351,7 +346,8 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
prevProps.disabled === nextProps.disabled &&
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
canonicalToggleEqual
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
)
}

View File

@@ -214,6 +214,15 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
],
config: {
tool: (params) => params.operation as string,
params: (params) => {
const { fileUpload, fileReference, ...rest } = params
const hasFileUpload = Array.isArray(fileUpload) ? fileUpload.length > 0 : !!fileUpload
const files = hasFileUpload ? fileUpload : fileReference
return {
...rest,
...(files ? { files } : {}),
}
},
},
},
inputs: {

View File

@@ -581,18 +581,6 @@ export const GmailV2Block: BlockConfig<GmailToolResponse> = {
results: { type: 'json', description: 'Search/read summary results' },
attachments: { type: 'json', description: 'Downloaded attachments (if enabled)' },
// Draft-specific outputs
draftId: {
type: 'string',
description: 'Draft ID',
condition: { field: 'operation', value: 'draft_gmail' },
},
messageId: {
type: 'string',
description: 'Gmail message ID for the draft',
condition: { field: 'operation', value: 'draft_gmail' },
},
// Trigger outputs (unchanged)
email_id: { type: 'string', description: 'Gmail message ID' },
thread_id: { type: 'string', description: 'Gmail thread ID' },

View File

@@ -661,25 +661,12 @@ Return ONLY the PostgREST filter expression - no explanations, no markdown, no e
placeholder: 'folder/subfolder/',
condition: { field: 'operation', value: 'storage_upload' },
},
{
id: 'file',
title: 'File',
type: 'file-upload',
canonicalParamId: 'fileData',
placeholder: 'Upload file to storage',
condition: { field: 'operation', value: 'storage_upload' },
mode: 'basic',
multiple: false,
required: true,
},
{
id: 'fileContent',
title: 'File Content',
type: 'code',
canonicalParamId: 'fileData',
placeholder: 'Base64 encoded for binary files, or plain text',
condition: { field: 'operation', value: 'storage_upload' },
mode: 'advanced',
required: true,
},
{

View File

@@ -1,9 +1,7 @@
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { normalizeName } from '@/executor/constants'
import type { ExecutionContext } from '@/executor/types'
import type { OutputSchema } from '@/executor/utils/block-reference'
import type { SerializedBlock } from '@/serializer/types'
import type { ToolConfig } from '@/tools/types'
import { getTool } from '@/tools/utils'
export interface BlockDataCollection {
blockData: Record<string, unknown>
@@ -11,32 +9,6 @@ export interface BlockDataCollection {
blockOutputSchemas: Record<string, OutputSchema>
}
export function getBlockSchema(
block: SerializedBlock,
toolConfig?: ToolConfig
): OutputSchema | undefined {
const isTrigger =
block.metadata?.category === 'triggers' ||
(block.config?.params as Record<string, unknown> | undefined)?.triggerMode === true
// Triggers use saved outputs (defines the trigger payload schema)
if (isTrigger && block.outputs && Object.keys(block.outputs).length > 0) {
return block.outputs as OutputSchema
}
// When a tool is selected, tool outputs are the source of truth
if (toolConfig?.outputs && Object.keys(toolConfig.outputs).length > 0) {
return toolConfig.outputs as OutputSchema
}
// Fallback to saved outputs for blocks without tools
if (block.outputs && Object.keys(block.outputs).length > 0) {
return block.outputs as OutputSchema
}
return undefined
}
export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
const blockData: Record<string, unknown> = {}
const blockNameMapping: Record<string, string> = {}
@@ -46,21 +18,24 @@ export function collectBlockData(ctx: ExecutionContext): BlockDataCollection {
if (state.output !== undefined) {
blockData[id] = state.output
}
}
const workflowBlocks = ctx.workflow?.blocks ?? []
for (const block of workflowBlocks) {
const id = block.id
const workflowBlock = ctx.workflow?.blocks?.find((b) => b.id === id)
if (!workflowBlock) continue
if (block.metadata?.name) {
blockNameMapping[normalizeName(block.metadata.name)] = id
if (workflowBlock.metadata?.name) {
blockNameMapping[normalizeName(workflowBlock.metadata.name)] = id
}
const toolId = block.config?.tool
const toolConfig = toolId ? getTool(toolId) : undefined
const schema = getBlockSchema(block, toolConfig)
if (schema && Object.keys(schema).length > 0) {
blockOutputSchemas[id] = schema
const blockType = workflowBlock.metadata?.id
if (blockType) {
const params = workflowBlock.config?.params as Record<string, unknown> | undefined
const subBlocks = params
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
: undefined
const schema = getBlockOutputs(blockType, subBlocks)
if (schema && Object.keys(schema).length > 0) {
blockOutputSchemas[id] = schema
}
}
}

View File

@@ -378,30 +378,8 @@ function buildManualTriggerOutput(
return mergeFilesIntoOutput(output, workflowInput)
}
function buildIntegrationTriggerOutput(
workflowInput: unknown,
structuredInput: Record<string, unknown>,
hasStructured: boolean
): NormalizedBlockOutput {
const output: NormalizedBlockOutput = {}
if (hasStructured) {
for (const [key, value] of Object.entries(structuredInput)) {
output[key] = value
}
}
if (isPlainObject(workflowInput)) {
for (const [key, value] of Object.entries(workflowInput)) {
if (value !== undefined && value !== null) {
output[key] = value
} else if (!Object.hasOwn(output, key)) {
output[key] = value
}
}
}
return mergeFilesIntoOutput(output, workflowInput)
function buildIntegrationTriggerOutput(workflowInput: unknown): NormalizedBlockOutput {
return isPlainObject(workflowInput) ? (workflowInput as NormalizedBlockOutput) : {}
}
function extractSubBlocks(block: SerializedBlock): Record<string, unknown> | undefined {
@@ -450,7 +428,7 @@ export function buildStartBlockOutput(options: StartBlockOutputOptions): Normali
return buildManualTriggerOutput(finalInput, workflowInput)
case StartBlockPath.EXTERNAL_TRIGGER:
return buildIntegrationTriggerOutput(workflowInput, structuredInput, hasStructured)
return buildIntegrationTriggerOutput(workflowInput)
case StartBlockPath.LEGACY_STARTER:
return buildLegacyStarterOutput(

View File

@@ -1,10 +1,10 @@
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import {
isReference,
normalizeName,
parseReferencePath,
SPECIAL_REFERENCE_PREFIXES,
} from '@/executor/constants'
import { getBlockSchema } from '@/executor/utils/block-data'
import {
InvalidFieldError,
type OutputSchema,
@@ -67,9 +67,15 @@ export class BlockResolver implements Resolver {
blockData[blockId] = output
}
const blockType = block.metadata?.id
const params = block.config?.params as Record<string, unknown> | undefined
const subBlocks = params
? Object.fromEntries(Object.entries(params).map(([k, v]) => [k, { value: v }]))
: undefined
const toolId = block.config?.tool
const toolConfig = toolId ? getTool(toolId) : undefined
const outputSchema = getBlockSchema(block, toolConfig)
const outputSchema =
toolConfig?.outputs ?? (blockType ? getBlockOutputs(blockType, subBlocks) : block.outputs)
if (outputSchema && Object.keys(outputSchema).length > 0) {
blockOutputSchemas[blockId] = outputSchema

View File

@@ -680,10 +680,6 @@ export function useCollaborativeWorkflow() {
previousPositions?: Map<string, { x: number; y: number; parentId?: string }>
}
) => {
if (isBaselineDiffView) {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch position update - not in active workflow')
return
@@ -729,7 +725,7 @@ export function useCollaborativeWorkflow() {
}
}
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
)
const collaborativeUpdateBlockName = useCallback(
@@ -821,10 +817,6 @@ export function useCollaborativeWorkflow() {
const collaborativeBatchToggleBlockEnabled = useCallback(
(ids: string[]) => {
if (isBaselineDiffView) {
return
}
if (ids.length === 0) return
const previousStates: Record<string, boolean> = {}
@@ -857,7 +849,7 @@ export function useCollaborativeWorkflow() {
undoRedo.recordBatchToggleEnabled(validIds, previousStates)
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeBatchUpdateParent = useCallback(
@@ -869,10 +861,6 @@ export function useCollaborativeWorkflow() {
affectedEdges: Edge[]
}>
) => {
if (isBaselineDiffView) {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch update parent - not in active workflow')
return
@@ -943,7 +931,7 @@ export function useCollaborativeWorkflow() {
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
},
[isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
[isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
)
const collaborativeToggleBlockAdvancedMode = useCallback(
@@ -963,37 +951,18 @@ export function useCollaborativeWorkflow() {
const collaborativeSetBlockCanonicalMode = useCallback(
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
if (isBaselineDiffView) {
return
}
useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
if (!activeWorkflowId) {
return
}
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
target: OPERATION_TARGETS.BLOCK,
payload: { id, canonicalId, canonicalMode },
},
workflowId: activeWorkflowId,
userId: session?.user?.id || 'unknown',
})
executeQueuedOperation(
BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
OPERATION_TARGETS.BLOCK,
{ id, canonicalId, canonicalMode },
() => useWorkflowStore.getState().setBlockCanonicalMode(id, canonicalId, canonicalMode)
)
},
[isBaselineDiffView, activeWorkflowId, addToQueue, session?.user?.id]
[executeQueuedOperation]
)
const collaborativeBatchToggleBlockHandles = useCallback(
(ids: string[]) => {
if (isBaselineDiffView) {
return
}
if (ids.length === 0) return
const previousStates: Record<string, boolean> = {}
@@ -1026,15 +995,11 @@ export function useCollaborativeWorkflow() {
undoRedo.recordBatchToggleHandles(validIds, previousStates)
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
[addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeBatchAddEdges = useCallback(
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
if (isBaselineDiffView) {
return false
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch add edges - not in active workflow')
return false
@@ -1070,15 +1035,11 @@ export function useCollaborativeWorkflow() {
return true
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
[addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
)
const collaborativeBatchRemoveEdges = useCallback(
(edgeIds: string[], options?: { skipUndoRedo?: boolean }) => {
if (isBaselineDiffView) {
return false
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove edges - not in active workflow')
return false
@@ -1128,7 +1089,7 @@ export function useCollaborativeWorkflow() {
logger.info('Batch removed edges', { count: validEdgeIds.length })
return true
},
[isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
[isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
)
const collaborativeSetSubblockValue = useCallback(
@@ -1204,10 +1165,6 @@ export function useCollaborativeWorkflow() {
(blockId: string, subblockId: string, value: any) => {
if (isApplyingRemoteChange.current) return
if (isBaselineDiffView) {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping tag selection - not in active workflow', {
currentWorkflowId,
@@ -1235,14 +1192,7 @@ export function useCollaborativeWorkflow() {
userId: session?.user?.id || 'unknown',
})
},
[
isBaselineDiffView,
addToQueue,
currentWorkflowId,
activeWorkflowId,
session?.user?.id,
isInActiveRoom,
]
[addToQueue, currentWorkflowId, activeWorkflowId, session?.user?.id, isInActiveRoom]
)
const collaborativeUpdateLoopType = useCallback(
@@ -1588,10 +1538,6 @@ export function useCollaborativeWorkflow() {
const collaborativeBatchRemoveBlocks = useCallback(
(blockIds: string[], options?: { skipUndoRedo?: boolean }) => {
if (isBaselineDiffView) {
return false
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove blocks - not in active workflow')
return false
@@ -1673,7 +1619,6 @@ export function useCollaborativeWorkflow() {
return true
},
[
isBaselineDiffView,
addToQueue,
activeWorkflowId,
session?.user?.id,

View File

@@ -10,6 +10,7 @@ import {
type KnowledgeBaseArgs,
} from '@/lib/copilot/tools/shared/schemas'
import { useCopilotStore } from '@/stores/panel/copilot/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
/**
* Client tool for knowledge base operations
@@ -102,7 +103,19 @@ export class KnowledgeBaseClientTool extends BaseClientTool {
const logger = createLogger('KnowledgeBaseClientTool')
try {
this.setState(ClientToolCallState.executing)
const payload: KnowledgeBaseArgs = { ...(args || { operation: 'list' }) }
// Get the workspace ID from the workflow registry hydration state
const { hydration } = useWorkflowRegistry.getState()
const workspaceId = hydration.workspaceId
// Build payload with workspace ID included in args
const payload: KnowledgeBaseArgs = {
...(args || { operation: 'list' }),
args: {
...(args?.args || {}),
workspaceId: args?.args?.workspaceId || workspaceId || undefined,
},
}
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
method: 'POST',

View File

@@ -2508,6 +2508,10 @@ async function validateWorkflowSelectorIds(
for (const subBlockConfig of blockConfig.subBlocks) {
if (!SELECTOR_TYPES.has(subBlockConfig.type)) continue
// Skip oauth-input - credentials are pre-validated before edit application
// This allows existing collaborator credentials to remain untouched
if (subBlockConfig.type === 'oauth-input') continue
const subBlockValue = blockData.subBlocks?.[subBlockConfig.id]?.value
if (!subBlockValue) continue
@@ -2573,6 +2577,156 @@ async function validateWorkflowSelectorIds(
return errors
}
/**
* Pre-validates credential and apiKey inputs in operations before they are applied.
* - Validates oauth-input (credential) IDs belong to the user
* - Filters out apiKey inputs for hosted models when isHosted is true
* Returns validation errors for any removed inputs.
*/
async function preValidateCredentialInputs(
operations: EditWorkflowOperation[],
context: { userId: string }
): Promise<{ filteredOperations: EditWorkflowOperation[]; errors: ValidationError[] }> {
const { isHosted } = await import('@/lib/core/config/feature-flags')
const { getHostedModels } = await import('@/providers/utils')
const logger = createLogger('PreValidateCredentials')
const errors: ValidationError[] = []
// Collect credential and apiKey inputs that need validation/filtering
const credentialInputs: Array<{
operationIndex: number
blockId: string
blockType: string
fieldName: string
value: string
}> = []
const hostedApiKeyInputs: Array<{
operationIndex: number
blockId: string
blockType: string
model: string
}> = []
const hostedModelsLower = isHosted ? new Set(getHostedModels().map((m) => m.toLowerCase())) : null
operations.forEach((op, opIndex) => {
if (!op.params?.inputs || !op.params?.type) return
const blockConfig = getBlock(op.params.type)
if (!blockConfig) return
// Find oauth-input subblocks
for (const subBlockConfig of blockConfig.subBlocks) {
if (subBlockConfig.type !== 'oauth-input') continue
const inputValue = op.params.inputs[subBlockConfig.id]
if (!inputValue || typeof inputValue !== 'string' || inputValue.trim() === '') continue
credentialInputs.push({
operationIndex: opIndex,
blockId: op.block_id,
blockType: op.params.type,
fieldName: subBlockConfig.id,
value: inputValue,
})
}
// Check for apiKey inputs on hosted models
if (hostedModelsLower && op.params.inputs.apiKey) {
const modelValue = op.params.inputs.model
if (modelValue && typeof modelValue === 'string') {
if (hostedModelsLower.has(modelValue.toLowerCase())) {
hostedApiKeyInputs.push({
operationIndex: opIndex,
blockId: op.block_id,
blockType: op.params.type,
model: modelValue,
})
}
}
}
})
const hasCredentialsToValidate = credentialInputs.length > 0
const hasHostedApiKeysToFilter = hostedApiKeyInputs.length > 0
if (!hasCredentialsToValidate && !hasHostedApiKeysToFilter) {
return { filteredOperations: operations, errors }
}
// Deep clone operations so we can modify them
const filteredOperations = structuredClone(operations)
// Filter out apiKey inputs for hosted models and add validation errors
if (hasHostedApiKeysToFilter) {
logger.info('Filtering apiKey inputs for hosted models', { count: hostedApiKeyInputs.length })
for (const apiKeyInput of hostedApiKeyInputs) {
const op = filteredOperations[apiKeyInput.operationIndex]
if (op.params?.inputs?.apiKey) {
op.params.inputs.apiKey = undefined
logger.debug('Filtered apiKey for hosted model', {
blockId: apiKeyInput.blockId,
model: apiKeyInput.model,
})
errors.push({
blockId: apiKeyInput.blockId,
blockType: apiKeyInput.blockType,
field: 'apiKey',
value: '[redacted]',
error: `Cannot set API key for hosted model "${apiKeyInput.model}" - API keys are managed by the platform when using hosted models`,
})
}
}
}
// Validate credential inputs
if (hasCredentialsToValidate) {
logger.info('Pre-validating credential inputs', {
credentialCount: credentialInputs.length,
userId: context.userId,
})
const allCredentialIds = credentialInputs.map((c) => c.value)
const validationResult = await validateSelectorIds('oauth-input', allCredentialIds, context)
const invalidSet = new Set(validationResult.invalid)
if (invalidSet.size > 0) {
for (const credInput of credentialInputs) {
if (!invalidSet.has(credInput.value)) continue
const op = filteredOperations[credInput.operationIndex]
if (op.params?.inputs?.[credInput.fieldName]) {
delete op.params.inputs[credInput.fieldName]
logger.info('Removed invalid credential from operation', {
blockId: credInput.blockId,
field: credInput.fieldName,
invalidValue: credInput.value,
})
}
const warningInfo = validationResult.warning ? `. ${validationResult.warning}` : ''
errors.push({
blockId: credInput.blockId,
blockType: credInput.blockType,
field: credInput.fieldName,
value: credInput.value,
error: `Invalid credential ID "${credInput.value}" - credential does not exist or user doesn't have access${warningInfo}`,
})
}
logger.warn('Filtered out invalid credentials', {
invalidCount: invalidSet.size,
})
}
}
return { filteredOperations, errors }
}
async function getCurrentWorkflowStateFromDb(
workflowId: string
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
@@ -2657,12 +2811,28 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
// Get permission config for the user
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
// Pre-validate credential and apiKey inputs before applying operations
// This filters out invalid credentials and apiKeys for hosted models
let operationsToApply = operations
const credentialErrors: ValidationError[] = []
if (context?.userId) {
const { filteredOperations, errors: credErrors } = await preValidateCredentialInputs(
operations,
{ userId: context.userId }
)
operationsToApply = filteredOperations
credentialErrors.push(...credErrors)
}
// Apply operations directly to the workflow state
const {
state: modifiedWorkflowState,
validationErrors,
skippedItems,
} = applyOperationsToWorkflowState(workflowState, operations, permissionConfig)
} = applyOperationsToWorkflowState(workflowState, operationsToApply, permissionConfig)
// Add credential validation errors
validationErrors.push(...credentialErrors)
// Get workspaceId for selector validation
let workspaceId: string | undefined

View File

@@ -618,6 +618,13 @@ export function getToolOutputs(
}
}
/**
* Generates output paths for a tool-based block.
*
* @param blockConfig - The block configuration containing tools config
* @param subBlocks - SubBlock values for tool selection and condition evaluation
* @returns Array of output paths for the tool, or empty array on error
*/
export function getToolOutputPaths(
blockConfig: BlockConfig,
subBlocks?: Record<string, SubBlockWithValue>
@@ -627,22 +634,12 @@ export function getToolOutputPaths(
if (!outputs || Object.keys(outputs).length === 0) return []
if (subBlocks && blockConfig.outputs) {
const filteredBlockOutputs = filterOutputsByCondition(blockConfig.outputs, subBlocks)
const allowedKeys = new Set(Object.keys(filteredBlockOutputs))
const filteredOutputs: Record<string, any> = {}
for (const [key, value] of Object.entries(outputs)) {
const blockOutput = blockConfig.outputs[key]
if (!blockOutput || typeof blockOutput !== 'object') {
filteredOutputs[key] = value
continue
}
const condition = 'condition' in blockOutput ? blockOutput.condition : undefined
if (condition) {
if (evaluateOutputCondition(condition, subBlocks)) {
filteredOutputs[key] = value
}
} else {
if (allowedKeys.has(key)) {
filteredOutputs[key] = value
}
}

View File

@@ -27,9 +27,6 @@ export function registerEmitFunctions(
emitSubblockUpdate = subblockEmit
emitVariableUpdate = variableEmit
currentRegisteredWorkflowId = workflowId
if (workflowId) {
useOperationQueueStore.getState().processNextOperation()
}
}
let currentRegisteredWorkflowId: string | null = null
@@ -265,14 +262,16 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
return
}
if (!currentRegisteredWorkflowId) {
const nextOperation = currentRegisteredWorkflowId
? state.operations.find(
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
)
: state.operations.find((op) => op.status === 'pending')
if (!nextOperation) {
return
}
const nextOperation = state.operations.find(
(op) => op.status === 'pending' && op.workflowId === currentRegisteredWorkflowId
)
if (!nextOperation) {
if (currentRegisteredWorkflowId && nextOperation.workflowId !== currentRegisteredWorkflowId) {
return
}

View File

@@ -38,12 +38,11 @@ export const storageUploadTool: ToolConfig<
visibility: 'user-or-llm',
description: 'Optional folder path (e.g., "folder/subfolder/")',
},
fileData: {
type: 'json',
fileContent: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description:
'File to upload - UserFile object (basic mode) or string content (advanced mode: base64 or plain text). Supports data URLs.',
description: 'The file content (base64 encoded for binary files, or plain text)',
},
contentType: {
type: 'string',
@@ -66,28 +65,65 @@ export const storageUploadTool: ToolConfig<
},
request: {
url: '/api/tools/supabase/storage-upload',
url: (params) => {
// Combine folder path and fileName, ensuring proper formatting
let fullPath = params.fileName
if (params.path) {
// Ensure path ends with / and doesn't have double slashes
const folderPath = params.path.endsWith('/') ? params.path : `${params.path}/`
fullPath = `${folderPath}${params.fileName}`
}
return `https://${params.projectId}.supabase.co/storage/v1/object/${params.bucket}/${fullPath}`
},
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => ({
projectId: params.projectId,
apiKey: params.apiKey,
bucket: params.bucket,
fileName: params.fileName,
path: params.path,
fileData: params.fileData,
contentType: params.contentType,
upsert: params.upsert,
}),
headers: (params) => {
const headers: Record<string, string> = {
apikey: params.apiKey,
Authorization: `Bearer ${params.apiKey}`,
}
if (params.contentType) {
headers['Content-Type'] = params.contentType
}
if (params.upsert) {
headers['x-upsert'] = 'true'
}
return headers
},
body: (params) => {
// Return the file content wrapped in an object
// The actual upload will need to handle this appropriately
return {
content: params.fileContent,
}
},
},
transformResponse: async (response: Response) => {
let data
try {
data = await response.json()
} catch (parseError) {
throw new Error(`Failed to parse Supabase storage upload response: ${parseError}`)
}
return {
success: true,
output: {
message: 'Successfully uploaded file to storage',
results: data,
},
error: undefined,
}
},
outputs: {
message: { type: 'string', description: 'Operation status message' },
results: {
type: 'object',
description: 'Upload result including file path, bucket, and public URL',
description: 'Upload result including file path and metadata',
},
},
}

View File

@@ -136,7 +136,7 @@ export interface SupabaseStorageUploadParams {
bucket: string
fileName: string
path?: string
fileData: any // UserFile object (basic mode) or string (advanced mode: base64/plain text)
fileContent: string
contentType?: string
upsert?: boolean
}