Compare commits

...

4 Commits

Author SHA1 Message Date
Adam Gough
1e13f6ee75 duplicated subflow 2025-09-15 00:18:11 -07:00
Adam Gough
2950353952 added suplication 2025-09-13 15:32:26 -07:00
Adam Gough
3e5d3735dc changed search for folders and workflows in logs (#1327)
Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
2025-09-13 12:38:50 -07:00
Waleed
172d51e061 fix(security): fix ssrf vuln and path validation for files route (#1325)
* update infra and remove railway

* fix(security): fix ssrf vuln

* fix path validation for file serve

* Revert "update infra and remove railway"

This reverts commit abfa2f8d51.

* lint

* ack PR comments
2025-09-13 11:32:10 -07:00
13 changed files with 935 additions and 225 deletions

View File

@@ -1,5 +1,5 @@
import { describe, expect, it } from 'vitest'
import { createFileResponse, extractFilename } from './utils'
import { createFileResponse, extractFilename, findLocalFile } from './utils'
describe('extractFilename', () => {
describe('legitimate file paths', () => {
@@ -325,3 +325,91 @@ describe('extractFilename', () => {
})
})
})
describe('findLocalFile - Path Traversal Security Tests', () => {
describe('path traversal attack prevention', () => {
it.concurrent('should reject classic path traversal attacks', () => {
const maliciousInputs = [
'../../../etc/passwd',
'..\\..\\..\\windows\\system32\\config\\sam',
'../../../../etc/shadow',
'../config.json',
'..\\config.ini',
]
maliciousInputs.forEach((input) => {
const result = findLocalFile(input)
expect(result).toBeNull()
})
})
it.concurrent('should reject encoded path traversal attempts', () => {
const encodedInputs = [
'%2e%2e%2f%2e%2e%2f%65%74%63%2f%70%61%73%73%77%64', // ../../../etc/passwd
'..%2f..%2fetc%2fpasswd',
'..%5c..%5cconfig.ini',
]
encodedInputs.forEach((input) => {
const result = findLocalFile(input)
expect(result).toBeNull()
})
})
it.concurrent('should reject mixed path separators', () => {
const mixedInputs = ['../..\\config.txt', '..\\../secret.ini', '/..\\..\\system32']
mixedInputs.forEach((input) => {
const result = findLocalFile(input)
expect(result).toBeNull()
})
})
it.concurrent('should reject filenames with dangerous characters', () => {
const dangerousInputs = [
'file:with:colons.txt',
'file|with|pipes.txt',
'file?with?questions.txt',
'file*with*asterisks.txt',
]
dangerousInputs.forEach((input) => {
const result = findLocalFile(input)
expect(result).toBeNull()
})
})
it.concurrent('should reject null and empty inputs', () => {
expect(findLocalFile('')).toBeNull()
expect(findLocalFile(' ')).toBeNull()
expect(findLocalFile('\t\n')).toBeNull()
})
it.concurrent('should reject filenames that become empty after sanitization', () => {
const emptyAfterSanitization = ['../..', '..\\..\\', '////', '....', '..']
emptyAfterSanitization.forEach((input) => {
const result = findLocalFile(input)
expect(result).toBeNull()
})
})
})
describe('security validation passes for legitimate files', () => {
it.concurrent('should accept properly formatted filenames without throwing errors', () => {
const legitimateInputs = [
'document.pdf',
'image.png',
'data.csv',
'report-2024.doc',
'file_with_underscores.txt',
'file-with-dashes.json',
]
legitimateInputs.forEach((input) => {
// Should not throw security errors for legitimate filenames
expect(() => findLocalFile(input)).not.toThrow()
})
})
})
})

View File

@@ -1,8 +1,11 @@
import { existsSync } from 'fs'
import { join } from 'path'
import { join, resolve, sep } from 'path'
import { NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { UPLOAD_DIR } from '@/lib/uploads/setup'
const logger = createLogger('FilesUtils')
/**
* Response type definitions
*/
@@ -192,18 +195,71 @@ export function extractFilename(path: string): string {
}
/**
* Find a file in possible local storage locations
* Sanitize filename to prevent path traversal attacks
*/
export function findLocalFile(filename: string): string | null {
const possiblePaths = [join(UPLOAD_DIR, filename), join(process.cwd(), 'uploads', filename)]
for (const path of possiblePaths) {
if (existsSync(path)) {
return path
}
function sanitizeFilename(filename: string): string {
if (!filename || typeof filename !== 'string') {
throw new Error('Invalid filename provided')
}
return null
const sanitized = filename
.replace(/\.\./g, '') // Remove .. sequences
.replace(/[/\\]/g, '') // Remove path separators
.replace(/^\./g, '') // Remove leading dots
.trim()
if (!sanitized || sanitized.length === 0) {
throw new Error('Invalid or empty filename after sanitization')
}
if (
sanitized.includes(':') ||
sanitized.includes('|') ||
sanitized.includes('?') ||
sanitized.includes('*') ||
sanitized.includes('\x00') || // Null bytes
/[\x00-\x1F\x7F]/.test(sanitized) // Control characters
) {
throw new Error('Filename contains invalid characters')
}
return sanitized
}
/**
* Find a file in possible local storage locations with proper path validation
*/
export function findLocalFile(filename: string): string | null {
try {
const sanitizedFilename = sanitizeFilename(filename)
const possiblePaths = [
join(UPLOAD_DIR, sanitizedFilename),
join(process.cwd(), 'uploads', sanitizedFilename),
]
for (const path of possiblePaths) {
const resolvedPath = resolve(path)
const allowedDirs = [resolve(UPLOAD_DIR), resolve(process.cwd(), 'uploads')]
const isWithinAllowedDir = allowedDirs.some(
(allowedDir) => resolvedPath.startsWith(allowedDir + sep) || resolvedPath === allowedDir
)
if (!isWithinAllowedDir) {
continue // Skip this path as it's outside allowed directories
}
if (existsSync(resolvedPath)) {
return resolvedPath
}
}
return null
} catch (error) {
logger.error('Error in findLocalFile:', error)
return null
}
}
const SAFE_INLINE_TYPES = new Set([

View File

@@ -48,8 +48,52 @@ describe('Function Execute API Route', () => {
vi.clearAllMocks()
})
describe('Security Tests', () => {
it.concurrent('should create secure fetch in VM context', async () => {
const req = createMockRequest('POST', {
code: 'return "test"',
useLocalVM: true,
})
const { POST } = await import('@/app/api/function/execute/route')
await POST(req)
expect(mockCreateContext).toHaveBeenCalled()
const contextArgs = mockCreateContext.mock.calls[0][0]
expect(contextArgs).toHaveProperty('fetch')
expect(typeof contextArgs.fetch).toBe('function')
expect(contextArgs.fetch.name).toBe('secureFetch')
})
it.concurrent('should block SSRF attacks through secure fetch wrapper', async () => {
const { validateProxyUrl } = await import('@/lib/security/url-validation')
expect(validateProxyUrl('http://169.254.169.254/latest/meta-data/').isValid).toBe(false)
expect(validateProxyUrl('http://127.0.0.1:8080/admin').isValid).toBe(false)
expect(validateProxyUrl('http://192.168.1.1/config').isValid).toBe(false)
expect(validateProxyUrl('http://10.0.0.1/internal').isValid).toBe(false)
})
it.concurrent('should allow legitimate external URLs', async () => {
const { validateProxyUrl } = await import('@/lib/security/url-validation')
expect(validateProxyUrl('https://api.github.com/user').isValid).toBe(true)
expect(validateProxyUrl('https://httpbin.org/get').isValid).toBe(true)
expect(validateProxyUrl('http://example.com/api').isValid).toBe(true)
})
it.concurrent('should block dangerous protocols', async () => {
const { validateProxyUrl } = await import('@/lib/security/url-validation')
expect(validateProxyUrl('file:///etc/passwd').isValid).toBe(false)
expect(validateProxyUrl('ftp://internal.server/files').isValid).toBe(false)
expect(validateProxyUrl('gopher://old.server/menu').isValid).toBe(false)
})
})
describe('Basic Function Execution', () => {
it('should execute simple JavaScript code successfully', async () => {
it.concurrent('should execute simple JavaScript code successfully', async () => {
const req = createMockRequest('POST', {
code: 'return "Hello World"',
timeout: 5000,
@@ -66,7 +110,7 @@ describe('Function Execute API Route', () => {
expect(data.output).toHaveProperty('executionTime')
})
it('should handle missing code parameter', async () => {
it.concurrent('should handle missing code parameter', async () => {
const req = createMockRequest('POST', {
timeout: 5000,
})
@@ -80,7 +124,7 @@ describe('Function Execute API Route', () => {
expect(data).toHaveProperty('error')
})
it('should use default timeout when not provided', async () => {
it.concurrent('should use default timeout when not provided', async () => {
const req = createMockRequest('POST', {
code: 'return "test"',
useLocalVM: true,
@@ -100,7 +144,7 @@ describe('Function Execute API Route', () => {
})
describe('Template Variable Resolution', () => {
it('should resolve environment variables with {{var_name}} syntax', async () => {
it.concurrent('should resolve environment variables with {{var_name}} syntax', async () => {
const req = createMockRequest('POST', {
code: 'return {{API_KEY}}',
useLocalVM: true,
@@ -116,7 +160,7 @@ describe('Function Execute API Route', () => {
// The code should be resolved to: return "secret-key-123"
})
it('should resolve tag variables with <tag_name> syntax', async () => {
it.concurrent('should resolve tag variables with <tag_name> syntax', async () => {
const req = createMockRequest('POST', {
code: 'return <email>',
useLocalVM: true,
@@ -132,7 +176,7 @@ describe('Function Execute API Route', () => {
// The code should be resolved with the email object
})
it('should NOT treat email addresses as template variables', async () => {
it.concurrent('should NOT treat email addresses as template variables', async () => {
const req = createMockRequest('POST', {
code: 'return "Email sent to user"',
useLocalVM: true,
@@ -151,7 +195,7 @@ describe('Function Execute API Route', () => {
// Should not try to replace <waleed@sim.ai> as a template variable
})
it('should only match valid variable names in angle brackets', async () => {
it.concurrent('should only match valid variable names in angle brackets', async () => {
const req = createMockRequest('POST', {
code: 'return <validVar> + "<invalid@email.com>" + <another_valid>',
useLocalVM: true,
@@ -170,64 +214,70 @@ describe('Function Execute API Route', () => {
})
describe('Gmail Email Data Handling', () => {
it('should handle Gmail webhook data with email addresses containing angle brackets', async () => {
const gmailData = {
email: {
id: '123',
from: 'Waleed Latif <waleed@sim.ai>',
to: 'User <user@example.com>',
subject: 'Test Email',
bodyText: 'Hello world',
},
rawEmail: {
id: '123',
payload: {
headers: [
{ name: 'From', value: 'Waleed Latif <waleed@sim.ai>' },
{ name: 'To', value: 'User <user@example.com>' },
],
it.concurrent(
'should handle Gmail webhook data with email addresses containing angle brackets',
async () => {
const gmailData = {
email: {
id: '123',
from: 'Waleed Latif <waleed@sim.ai>',
to: 'User <user@example.com>',
subject: 'Test Email',
bodyText: 'Hello world',
},
},
rawEmail: {
id: '123',
payload: {
headers: [
{ name: 'From', value: 'Waleed Latif <waleed@sim.ai>' },
{ name: 'To', value: 'User <user@example.com>' },
],
},
},
}
const req = createMockRequest('POST', {
code: 'return <email>',
useLocalVM: true,
params: gmailData,
})
const { POST } = await import('@/app/api/function/execute/route')
const response = await POST(req)
expect(response.status).toBe(200)
const data = await response.json()
expect(data.success).toBe(true)
}
)
const req = createMockRequest('POST', {
code: 'return <email>',
useLocalVM: true,
params: gmailData,
})
it.concurrent(
'should properly serialize complex email objects with special characters',
async () => {
const complexEmailData = {
email: {
from: 'Test User <test@example.com>',
bodyHtml: '<div>HTML content with "quotes" and \'apostrophes\'</div>',
bodyText: 'Text with\nnewlines\tand\ttabs',
},
}
const { POST } = await import('@/app/api/function/execute/route')
const response = await POST(req)
const req = createMockRequest('POST', {
code: 'return <email>',
useLocalVM: true,
params: complexEmailData,
})
expect(response.status).toBe(200)
const data = await response.json()
expect(data.success).toBe(true)
})
const { POST } = await import('@/app/api/function/execute/route')
const response = await POST(req)
it('should properly serialize complex email objects with special characters', async () => {
const complexEmailData = {
email: {
from: 'Test User <test@example.com>',
bodyHtml: '<div>HTML content with "quotes" and \'apostrophes\'</div>',
bodyText: 'Text with\nnewlines\tand\ttabs',
},
expect(response.status).toBe(200)
}
const req = createMockRequest('POST', {
code: 'return <email>',
useLocalVM: true,
params: complexEmailData,
})
const { POST } = await import('@/app/api/function/execute/route')
const response = await POST(req)
expect(response.status).toBe(200)
})
)
})
describe('Custom Tools', () => {
it('should handle custom tool execution with direct parameter access', async () => {
it.concurrent('should handle custom tool execution with direct parameter access', async () => {
const req = createMockRequest('POST', {
code: 'return location + " weather is sunny"',
useLocalVM: true,
@@ -246,7 +296,7 @@ describe('Function Execute API Route', () => {
})
describe('Security and Edge Cases', () => {
it('should handle malformed JSON in request body', async () => {
it.concurrent('should handle malformed JSON in request body', async () => {
const req = new NextRequest('http://localhost:3000/api/function/execute', {
method: 'POST',
body: 'invalid json{',
@@ -259,7 +309,7 @@ describe('Function Execute API Route', () => {
expect(response.status).toBe(500)
})
it('should handle timeout parameter', async () => {
it.concurrent('should handle timeout parameter', async () => {
const req = createMockRequest('POST', {
code: 'return "test"',
useLocalVM: true,
@@ -277,7 +327,7 @@ describe('Function Execute API Route', () => {
)
})
it('should handle empty parameters object', async () => {
it.concurrent('should handle empty parameters object', async () => {
const req = createMockRequest('POST', {
code: 'return "no params"',
useLocalVM: true,
@@ -485,7 +535,7 @@ SyntaxError: Invalid or unexpected token
expect(data.debug.lineContent).toBe('return a + b + c + d;')
})
it('should provide helpful suggestions for common syntax errors', async () => {
it.concurrent('should provide helpful suggestions for common syntax errors', async () => {
const mockScript = vi.fn().mockImplementation(() => {
const error = new Error('Unexpected end of input')
error.name = 'SyntaxError'
@@ -517,7 +567,7 @@ SyntaxError: Invalid or unexpected token
})
describe('Utility Functions', () => {
it('should properly escape regex special characters', async () => {
it.concurrent('should properly escape regex special characters', async () => {
// This tests the escapeRegExp function indirectly
const req = createMockRequest('POST', {
code: 'return {{special.chars+*?}}',
@@ -534,7 +584,7 @@ SyntaxError: Invalid or unexpected token
// Should handle special regex characters in variable names
})
it('should handle JSON serialization edge cases', async () => {
it.concurrent('should handle JSON serialization edge cases', async () => {
// Test with complex but not circular data first
const req = createMockRequest('POST', {
code: 'return <complexData>',

View File

@@ -4,6 +4,7 @@ import { env, isTruthy } from '@/lib/env'
import { executeInE2B } from '@/lib/execution/e2b'
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
import { createLogger } from '@/lib/logs/console/logger'
import { validateProxyUrl } from '@/lib/security/url-validation'
import { generateRequestId } from '@/lib/utils'
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
@@ -11,6 +12,29 @@ export const maxDuration = 60
const logger = createLogger('FunctionExecuteAPI')
function createSecureFetch(requestId: string) {
const originalFetch = (globalThis as any).fetch || require('node-fetch').default
return async function secureFetch(input: any, init?: any) {
const url = typeof input === 'string' ? input : input?.url || input
if (!url || typeof url !== 'string') {
throw new Error('Invalid URL provided to fetch')
}
const validation = validateProxyUrl(url)
if (!validation.isValid) {
logger.warn(`[${requestId}] Blocked fetch request due to SSRF validation`, {
url: url.substring(0, 100),
error: validation.error,
})
throw new Error(`Security Error: ${validation.error}`)
}
return originalFetch(input, init)
}
}
// Constants for E2B code wrapping line counts
const E2B_JS_WRAPPER_LINES = 3 // Lines before user code: ';(async () => {', ' try {', ' const __sim_result = await (async () => {'
const E2B_PYTHON_WRAPPER_LINES = 1 // Lines before user code: 'def __sim_main__():'
@@ -737,7 +761,7 @@ export async function POST(req: NextRequest) {
params: executionParams,
environmentVariables: envVars,
...contextVariables,
fetch: (globalThis as any).fetch || require('node-fetch').default,
fetch: createSecureFetch(requestId),
console: {
log: (...args: any[]) => {
const logMessage = `${args

View File

@@ -10,6 +10,7 @@ import {
loadWorkflowFromNormalizedTables,
saveWorkflowToNormalizedTables,
} from '@/lib/workflows/db-helpers'
import { updateBlockReferences } from '@/lib/workflows/reference-utils'
import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/validation'
import { getUserId } from '@/app/api/auth/oauth/utils'
import { getAllBlocks, getBlock } from '@/blocks'
@@ -31,36 +32,6 @@ const YamlWorkflowRequestSchema = z.object({
createCheckpoint: z.boolean().optional().default(false),
})
function updateBlockReferences(
value: any,
blockIdMapping: Map<string, string>,
requestId: string
): any {
if (typeof value === 'string') {
// Replace references in string values
for (const [oldId, newId] of blockIdMapping.entries()) {
if (value.includes(oldId)) {
value = value.replaceAll(`<${oldId}.`, `<${newId}.`).replaceAll(`%${oldId}.`, `%${newId}.`)
}
}
return value
}
if (Array.isArray(value)) {
return value.map((item) => updateBlockReferences(item, blockIdMapping, requestId))
}
if (value && typeof value === 'object') {
const result: Record<string, any> = {}
for (const [key, val] of Object.entries(value)) {
result[key] = updateBlockReferences(val, blockIdMapping, requestId)
}
return result
}
return value
}
/**
* Helper function to create a checkpoint before workflow changes
*/

View File

@@ -1,14 +1,21 @@
import { useEffect, useState } from 'react'
import { useEffect, useMemo, useState } from 'react'
import { Check, ChevronDown } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button } from '@/components/ui/button'
import {
Command,
CommandEmpty,
CommandGroup,
CommandInput,
CommandItem,
CommandList,
} from '@/components/ui/command'
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from '@/components/ui/dropdown-menu'
import { createLogger } from '@/lib/logs/console/logger'
import { useFolderStore } from '@/stores/folders/store'
import { useFilterStore } from '@/stores/logs/filters/store'
@@ -26,6 +33,8 @@ export default function FolderFilter() {
const workspaceId = params.workspaceId as string
const [folders, setFolders] = useState<FolderOption[]>([])
const [loading, setLoading] = useState(true)
const [search, setSearch] = useState('')
const logger = useMemo(() => createLogger('LogsFolderFilter'), [])
// Fetch all available folders from the API
useEffect(() => {
@@ -62,7 +71,7 @@ export default function FolderFilter() {
setFolders(folderOptions)
}
} catch (error) {
console.error('Failed to fetch folders:', error)
logger.error('Failed to fetch folders', { error })
} finally {
setLoading(false)
}
@@ -105,49 +114,53 @@ export default function FolderFilter() {
</DropdownMenuTrigger>
<DropdownMenuContent
align='start'
className='max-h-[300px] w-[200px] overflow-y-auto rounded-lg border-[#E5E5E5] bg-[#FFFFFF] shadow-xs dark:border-[#414141] dark:bg-[var(--surface-elevated)]'
className='w-[200px] rounded-lg border-[#E5E5E5] bg-[#FFFFFF] p-0 shadow-xs dark:border-[#414141] dark:bg-[var(--surface-elevated)]'
>
<DropdownMenuItem
key='all'
onSelect={(e) => {
e.preventDefault()
clearSelections()
}}
className='flex cursor-pointer items-center justify-between rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
>
<span>All folders</span>
{folderIds.length === 0 && <Check className='h-4 w-4 text-muted-foreground' />}
</DropdownMenuItem>
{!loading && folders.length > 0 && <DropdownMenuSeparator />}
{!loading &&
folders.map((folder) => (
<DropdownMenuItem
key={folder.id}
onSelect={(e) => {
e.preventDefault()
toggleFolderId(folder.id)
}}
className='flex cursor-pointer items-center justify-between rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
>
<div className='flex items-center'>
<span className='truncate' title={folder.path}>
{folder.path}
</span>
</div>
{isFolderSelected(folder.id) && <Check className='h-4 w-4 text-muted-foreground' />}
</DropdownMenuItem>
))}
{loading && (
<DropdownMenuItem
disabled
className='rounded-md px-3 py-2 font-[380] text-muted-foreground text-sm'
>
Loading folders...
</DropdownMenuItem>
)}
<Command>
<CommandInput placeholder='Search folders...' onValueChange={(v) => setSearch(v)} />
<CommandList>
<CommandEmpty>{loading ? 'Loading folders...' : 'No folders found.'}</CommandEmpty>
<CommandGroup>
<CommandItem
value='all-folders'
onSelect={() => {
clearSelections()
}}
className='cursor-pointer'
>
<span>All folders</span>
{folderIds.length === 0 && (
<Check className='ml-auto h-4 w-4 text-muted-foreground' />
)}
</CommandItem>
{useMemo(() => {
const q = search.trim().toLowerCase()
const filtered = q
? folders.filter((f) => (f.path || f.name).toLowerCase().includes(q))
: folders
return filtered.map((folder) => (
<CommandItem
key={folder.id}
value={`${folder.path || folder.name}`}
onSelect={() => {
toggleFolderId(folder.id)
}}
className='cursor-pointer'
>
<div className='flex items-center'>
<span className='truncate' title={folder.path}>
{folder.path}
</span>
</div>
{isFolderSelected(folder.id) && (
<Check className='ml-auto h-4 w-4 text-muted-foreground' />
)}
</CommandItem>
))
}, [folders, search, folderIds])}
</CommandGroup>
</CommandList>
</Command>
</DropdownMenuContent>
</DropdownMenu>
)

View File

@@ -1,13 +1,20 @@
import { useEffect, useState } from 'react'
import { useEffect, useMemo, useState } from 'react'
import { Check, ChevronDown } from 'lucide-react'
import { Button } from '@/components/ui/button'
import {
Command,
CommandEmpty,
CommandGroup,
CommandInput,
CommandItem,
CommandList,
} from '@/components/ui/command'
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from '@/components/ui/dropdown-menu'
import { createLogger } from '@/lib/logs/console/logger'
import { useFilterStore } from '@/stores/logs/filters/store'
interface WorkflowOption {
@@ -20,6 +27,8 @@ export default function Workflow() {
const { workflowIds, toggleWorkflowId, setWorkflowIds } = useFilterStore()
const [workflows, setWorkflows] = useState<WorkflowOption[]>([])
const [loading, setLoading] = useState(true)
const [search, setSearch] = useState('')
const logger = useMemo(() => createLogger('LogsWorkflowFilter'), [])
// Fetch all available workflows from the API
useEffect(() => {
@@ -37,7 +46,7 @@ export default function Workflow() {
setWorkflows(workflowOptions)
}
} catch (error) {
console.error('Failed to fetch workflows:', error)
logger.error('Failed to fetch workflows', { error })
} finally {
setLoading(false)
}
@@ -80,57 +89,55 @@ export default function Workflow() {
</DropdownMenuTrigger>
<DropdownMenuContent
align='start'
className='max-h-[300px] w-[180px] overflow-y-auto rounded-lg border-[#E5E5E5] bg-[#FFFFFF] shadow-xs dark:border-[#414141] dark:bg-[var(--surface-elevated)]'
style={{
scrollbarWidth: 'none',
msOverflowStyle: 'none',
}}
className='w-[180px] rounded-lg border-[#E5E5E5] bg-[#FFFFFF] p-0 shadow-xs dark:border-[#414141] dark:bg-[var(--surface-elevated)]'
>
<DropdownMenuItem
key='all'
onSelect={(e) => {
e.preventDefault()
clearSelections()
}}
className='flex cursor-pointer items-center justify-between rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
>
<span>All workflows</span>
{workflowIds.length === 0 && <Check className='h-4 w-4 text-muted-foreground' />}
</DropdownMenuItem>
{!loading && workflows.length > 0 && <DropdownMenuSeparator />}
{!loading &&
workflows.map((workflow) => (
<DropdownMenuItem
key={workflow.id}
onSelect={(e) => {
e.preventDefault()
toggleWorkflowId(workflow.id)
}}
className='flex cursor-pointer items-center justify-between rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
>
<div className='flex items-center'>
<div
className='mr-2 h-2 w-2 rounded-full'
style={{ backgroundColor: workflow.color }}
/>
{workflow.name}
</div>
{isWorkflowSelected(workflow.id) && (
<Check className='h-4 w-4 text-muted-foreground' />
)}
</DropdownMenuItem>
))}
{loading && (
<DropdownMenuItem
disabled
className='rounded-md px-3 py-2 font-[380] text-muted-foreground text-sm'
>
Loading workflows...
</DropdownMenuItem>
)}
<Command>
<CommandInput placeholder='Search workflows...' onValueChange={(v) => setSearch(v)} />
<CommandList>
<CommandEmpty>{loading ? 'Loading workflows...' : 'No workflows found.'}</CommandEmpty>
<CommandGroup>
<CommandItem
value='all-workflows'
onSelect={() => {
clearSelections()
}}
className='cursor-pointer'
>
<span>All workflows</span>
{workflowIds.length === 0 && (
<Check className='ml-auto h-4 w-4 text-muted-foreground' />
)}
</CommandItem>
{useMemo(() => {
const q = search.trim().toLowerCase()
const filtered = q
? workflows.filter((w) => w.name.toLowerCase().includes(q))
: workflows
return filtered.map((workflow) => (
<CommandItem
key={workflow.id}
value={`${workflow.name}`}
onSelect={() => {
toggleWorkflowId(workflow.id)
}}
className='cursor-pointer'
>
<div className='flex items-center'>
<div
className='mr-2 h-2 w-2 rounded-full'
style={{ backgroundColor: workflow.color }}
/>
{workflow.name}
</div>
{isWorkflowSelected(workflow.id) && (
<Check className='ml-auto h-4 w-4 text-muted-foreground' />
)}
</CommandItem>
))
}, [workflows, search, workflowIds])}
</CommandGroup>
</CommandList>
</Command>
</DropdownMenuContent>
</DropdownMenu>
)

View File

@@ -1,16 +1,19 @@
import type React from 'react'
import { memo, useMemo, useRef } from 'react'
import { Trash2 } from 'lucide-react'
import { Copy, Trash2 } from 'lucide-react'
import { Handle, type NodeProps, Position, useReactFlow } from 'reactflow'
import { StartIcon } from '@/components/icons'
import { Button } from '@/components/ui/button'
import { Card } from '@/components/ui/card'
import { createLogger } from '@/lib/logs/console/logger'
import { cn } from '@/lib/utils'
import { type DiffStatus, hasDiffStatus } from '@/lib/workflows/diff/types'
import { IterationBadges } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/components/iteration-badges/iteration-badges'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
const logger = createLogger('SubflowNode')
const SubflowNodeStyles: React.FC = () => {
return (
<style jsx global>{`
@@ -74,7 +77,7 @@ export interface SubflowNodeData {
export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeData>) => {
const { getNodes } = useReactFlow()
const { collaborativeRemoveBlock } = useCollaborativeWorkflow()
const { collaborativeRemoveBlock, collaborativeDuplicateSubflow } = useCollaborativeWorkflow()
const blockRef = useRef<HTMLDivElement>(null)
const currentWorkflow = useCurrentWorkflow()
@@ -171,18 +174,37 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
}}
>
{!isPreview && (
<Button
variant='ghost'
size='sm'
onClick={(e) => {
e.stopPropagation()
collaborativeRemoveBlock(id)
}}
className='absolute top-2 right-2 z-20 text-gray-500 opacity-0 transition-opacity duration-200 hover:text-red-600 group-hover:opacity-100'
<div
className='absolute top-2 right-2 z-20 flex gap-1 opacity-0 transition-opacity duration-200 group-hover:opacity-100'
style={{ pointerEvents: 'auto' }}
>
<Trash2 className='h-4 w-4' />
</Button>
<Button
variant='ghost'
size='sm'
onClick={(e) => {
e.stopPropagation()
try {
collaborativeDuplicateSubflow(id)
} catch (err) {
logger.error('Failed to duplicate subflow', { err })
}
}}
className='text-gray-500 hover:text-slate-900'
>
<Copy className='h-4 w-4' />
</Button>
<Button
variant='ghost'
size='sm'
onClick={(e) => {
e.stopPropagation()
collaborativeRemoveBlock(id)
}}
className='text-gray-500 hover:text-red-600'
>
<Trash2 className='h-4 w-4' />
</Button>
</div>
)}
{/* Subflow Start */}

View File

@@ -2,6 +2,7 @@ import { useCallback, useEffect, useRef } from 'react'
import type { Edge } from 'reactflow'
import { useSession } from '@/lib/auth-client'
import { createLogger } from '@/lib/logs/console/logger'
import { updateBlockReferences } from '@/lib/workflows/reference-utils'
import { getBlock } from '@/blocks'
import { resolveOutputType } from '@/blocks/utils'
import { useSocket } from '@/contexts/socket-context'
@@ -254,6 +255,75 @@ export function useCollaborativeWorkflow() {
}
}
break
case 'duplicate-with-children': {
// Apply a duplicated subflow subtree from a remote collaborator
const parent = payload.parent
const children = Array.isArray(payload.children) ? payload.children : []
const edges = Array.isArray(payload.edges) ? payload.edges : []
// Add parent block
workflowStore.addBlock(
parent.id,
parent.type,
parent.name,
parent.position,
parent.data,
parent.parentId,
parent.extent,
{
enabled: parent.enabled,
horizontalHandles: parent.horizontalHandles,
isWide: parent.isWide,
advancedMode: parent.advancedMode,
triggerMode: parent.triggerMode ?? false,
height: parent.height,
}
)
// Add children blocks
children.forEach((child: any) => {
workflowStore.addBlock(
child.id,
child.type,
child.name,
child.position,
child.data,
child.parentId,
child.extent,
{
enabled: child.enabled,
horizontalHandles: child.horizontalHandles,
isWide: child.isWide,
advancedMode: child.advancedMode,
triggerMode: child.triggerMode ?? false,
height: child.height,
}
)
// Apply subblock values for collaborators to see immediately
if (child.subBlocks && typeof child.subBlocks === 'object') {
Object.entries(child.subBlocks).forEach(([subblockId, subblock]) => {
const value = (subblock as any)?.value
if (value !== undefined) {
subBlockStore.setValue(child.id, subblockId, value)
}
})
}
})
// Add internal edges
edges.forEach((edge: any) => {
workflowStore.addEdge({
id: edge.id,
source: edge.source,
target: edge.target,
sourceHandle: edge.sourceHandle,
targetHandle: edge.targetHandle,
})
})
break
}
}
} else if (target === 'variable') {
switch (operation) {
@@ -1061,6 +1131,222 @@ export function useCollaborativeWorkflow() {
]
)
const collaborativeDuplicateSubflow = useCallback(
(subflowId: string) => {
if (isShowingDiff) {
logger.debug('Skipping subflow duplication in diff mode')
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping subflow duplication - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
subflowId,
})
return
}
const parent = workflowStore.blocks[subflowId]
if (!parent || (parent.type !== 'loop' && parent.type !== 'parallel')) return
const newParentId = crypto.randomUUID()
const parentOffsetPosition = {
x: parent.position.x + 250,
y: parent.position.y + 20,
}
// Name bump similar to duplicateBlock
// Build a set of existing names to ensure uniqueness across the workflow
const existingNames = new Set(Object.values(workflowStore.blocks).map((b) => b.name))
const match = parent.name.match(/(.*?)(\d+)?$/)
let newParentName = match?.[2]
? `${match[1]}${Number.parseInt(match[2]) + 1}`
: `${parent.name} 1`
if (existingNames.has(newParentName)) {
const base = match ? match[1] : `${parent.name} `
let idx = match?.[2] ? Number.parseInt(match[2]) + 1 : 1
while (existingNames.has(`${base}${idx}`)) idx++
newParentName = `${base}${idx}`
}
existingNames.add(newParentName)
// Collect children and internal edges
const allBlocks = workflowStore.blocks
const children = Object.values(allBlocks).filter((b) => b.data?.parentId === subflowId)
const childIdSet = new Set(children.map((c) => c.id))
const allEdges = workflowStore.edges
const startHandle = parent.type === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const internalEdges = allEdges.filter(
(e) =>
(e.source === subflowId && e.sourceHandle === startHandle && childIdSet.has(e.target)) ||
(childIdSet.has(e.source) && childIdSet.has(e.target))
)
// Build ID map
const idMap = new Map<string, string>()
idMap.set(subflowId, newParentId)
children.forEach((c) => idMap.set(c.id, crypto.randomUUID()))
// Construct parent payload
const parentPayload: any = {
id: newParentId,
sourceId: subflowId,
type: parent.type,
name: newParentName,
position: parentOffsetPosition,
data: parent.data ? JSON.parse(JSON.stringify(parent.data)) : {},
subBlocks: {},
outputs: parent.outputs ? JSON.parse(JSON.stringify(parent.outputs)) : {},
parentId: parent.data?.parentId || null,
extent: parent.data?.extent || null,
enabled: parent.enabled ?? true,
horizontalHandles: parent.horizontalHandles ?? true,
isWide: parent.isWide ?? false,
advancedMode: parent.advancedMode ?? false,
triggerMode: false,
height: parent.height || 0,
}
// Optimistic add of parent
workflowStore.addBlock(
newParentId,
parent.type,
newParentName,
parentOffsetPosition,
parentPayload.data,
parentPayload.parentId,
parentPayload.extent,
{
enabled: parentPayload.enabled,
horizontalHandles: parentPayload.horizontalHandles,
isWide: parentPayload.isWide,
advancedMode: parentPayload.advancedMode,
triggerMode: false,
height: parentPayload.height,
}
)
// Build children payloads, copy subblocks with values and update references
const activeId = activeWorkflowId || ''
const subblockValuesForWorkflow = subBlockStore.workflowValues[activeId] || {}
const childPayloads = children.map((child) => {
const newId = idMap.get(child.id) as string
// Name bump logic identical to duplicateBlock
const childNameMatch = child.name.match(/(.*?)(\d+)?$/)
let newChildName = childNameMatch?.[2]
? `${childNameMatch[1]}${Number.parseInt(childNameMatch[2]) + 1}`
: `${child.name} 1`
if (existingNames.has(newChildName)) {
const base = childNameMatch ? childNameMatch[1] : `${child.name} `
let idx = childNameMatch?.[2] ? Number.parseInt(childNameMatch[2]) + 1 : 1
while (existingNames.has(`${base}${idx}`)) idx++
newChildName = `${base}${idx}`
}
existingNames.add(newChildName)
const clonedSubBlocks = child.subBlocks ? JSON.parse(JSON.stringify(child.subBlocks)) : {}
const values = subblockValuesForWorkflow[child.id] || {}
Object.entries(values).forEach(([subblockId, value]) => {
const processed = updateBlockReferences(value, idMap, 'duplicate-subflow')
if (!clonedSubBlocks[subblockId]) {
clonedSubBlocks[subblockId] = { id: subblockId, type: 'unknown', value: processed }
} else {
clonedSubBlocks[subblockId].value = processed
}
})
// Optimistic add child
workflowStore.addBlock(
newId,
child.type,
newChildName,
child.position,
{
...(child.data ? JSON.parse(JSON.stringify(child.data)) : {}),
parentId: newParentId,
extent: 'parent',
},
newParentId,
'parent',
{
enabled: child.enabled,
horizontalHandles: child.horizontalHandles,
isWide: child.isWide,
advancedMode: child.advancedMode,
triggerMode: child.triggerMode ?? false,
height: child.height,
}
)
// Apply subblock values locally for immediate feedback
Object.entries(clonedSubBlocks).forEach(([subblockId, sub]) => {
const v = (sub as any)?.value
if (v !== undefined) {
subBlockStore.setValue(newId, subblockId, v)
}
})
return {
id: newId,
sourceId: child.id,
type: child.type,
name: newChildName,
position: child.position,
data: {
...(child.data ? JSON.parse(JSON.stringify(child.data)) : {}),
parentId: newParentId,
extent: 'parent',
},
subBlocks: clonedSubBlocks,
outputs: child.outputs ? JSON.parse(JSON.stringify(child.outputs)) : {},
parentId: newParentId,
extent: 'parent',
enabled: child.enabled ?? true,
horizontalHandles: child.horizontalHandles ?? true,
isWide: child.isWide ?? false,
advancedMode: child.advancedMode ?? false,
triggerMode: child.triggerMode ?? false,
height: child.height || 0,
}
})
// Duplicate internal edges with remapped IDs
const edgePayloads = internalEdges.map((e) => ({
id: crypto.randomUUID(),
source: idMap.get(e.source) || e.source,
target: idMap.get(e.target) || e.target,
sourceHandle: e.sourceHandle,
targetHandle: e.targetHandle,
}))
// Optimistic add edges
edgePayloads.forEach((edge) => workflowStore.addEdge(edge))
// Queue server op
executeQueuedOperation(
'duplicate-with-children',
'subflow',
{
parent: parentPayload,
children: childPayloads,
edges: edgePayloads,
},
() => {}
)
},
[
isShowingDiff,
isInActiveRoom,
currentWorkflowId,
activeWorkflowId,
workflowStore,
subBlockStore,
executeQueuedOperation,
]
)
const collaborativeUpdateLoopType = useCallback(
(loopId: string, loopType: 'for' | 'forEach') => {
const currentBlock = workflowStore.blocks[loopId]
@@ -1311,6 +1597,7 @@ export function useCollaborativeWorkflow() {
collaborativeRemoveEdge,
collaborativeSetSubblockValue,
collaborativeSetTagSelection,
collaborativeDuplicateSubflow,
// Collaborative variable operations
collaborativeUpdateVariable,

View File

@@ -0,0 +1,47 @@
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('WorkflowReferenceUtils')
/**
* Recursively update block ID references in a value using a provided ID mapping.
* Handles strings, arrays, and objects. Strings are searched for `"<oldId."` and `"%oldId."` patterns.
*/
export function updateBlockReferences(
value: any,
blockIdMapping: Map<string, string>,
contextId?: string
): any {
try {
if (typeof value === 'string') {
let result = value
for (const [oldId, newId] of blockIdMapping.entries()) {
if (result.includes(oldId)) {
result = result
.replaceAll(`<${oldId}.`, `<${newId}.`)
.replaceAll(`%${oldId}.`, `%${newId}.`)
}
}
return result
}
if (Array.isArray(value)) {
return value.map((item) => updateBlockReferences(item, blockIdMapping, contextId))
}
if (value && typeof value === 'object') {
const result: Record<string, any> = {}
for (const [key, val] of Object.entries(value)) {
result[key] = updateBlockReferences(val, blockIdMapping, contextId)
}
return result
}
return value
} catch (err) {
logger.warn('Failed to update block references', {
contextId,
error: err instanceof Error ? err.message : String(err),
})
return value
}
}

View File

@@ -868,6 +868,108 @@ async function handleSubflowOperationTx(
break
}
case 'duplicate-with-children': {
// Validate required structure
const parent = payload?.parent
const children = Array.isArray(payload?.children) ? payload.children : []
const edges = Array.isArray(payload?.edges) ? payload.edges : []
if (!parent || !parent.id || !parent.type || !parent.name || !parent.position) {
throw new Error('Invalid payload for subflow duplication: missing parent fields')
}
if (!isSubflowBlockType(parent.type)) {
throw new Error('Invalid subflow type for duplication')
}
// Insert parent block
await tx.insert(workflowBlocks).values({
id: parent.id,
workflowId,
type: parent.type,
name: parent.name,
positionX: parent.position.x,
positionY: parent.position.y,
data: parent.data || {},
subBlocks: parent.subBlocks || {},
outputs: parent.outputs || {},
parentId: parent.parentId || null,
extent: parent.extent || null,
enabled: parent.enabled ?? true,
horizontalHandles: parent.horizontalHandles ?? true,
isWide: parent.isWide ?? false,
advancedMode: parent.advancedMode ?? false,
height: parent.height || 0,
})
// Create subflow entry for parent
const subflowConfig =
parent.type === SubflowType.LOOP
? {
id: parent.id,
nodes: [],
iterations: parent.data?.count || DEFAULT_LOOP_ITERATIONS,
loopType: parent.data?.loopType || 'for',
forEachItems: parent.data?.collection || '',
}
: {
id: parent.id,
nodes: [],
distribution: parent.data?.collection || '',
...(parent.data?.parallelType ? { parallelType: parent.data.parallelType } : {}),
...(parent.data?.count ? { count: parent.data.count } : {}),
}
await tx.insert(workflowSubflows).values({
id: parent.id,
workflowId,
type: parent.type,
config: subflowConfig,
})
// Insert child blocks
for (const child of children) {
await tx.insert(workflowBlocks).values({
id: child.id,
workflowId,
type: child.type,
name: child.name,
positionX: child.position.x,
positionY: child.position.y,
data: child.data || {},
subBlocks: child.subBlocks || {},
outputs: child.outputs || {},
parentId: parent.id,
extent: 'parent',
enabled: child.enabled ?? true,
horizontalHandles: child.horizontalHandles ?? true,
isWide: child.isWide ?? false,
advancedMode: child.advancedMode ?? false,
height: child.height || 0,
})
}
// Insert internal edges
for (const edge of edges) {
await tx.insert(workflowEdges).values({
id: edge.id,
workflowId,
sourceBlockId: edge.source,
targetBlockId: edge.target,
sourceHandle: edge.sourceHandle || null,
targetHandle: edge.targetHandle || null,
})
}
// Update subflow node list with newly inserted children
await updateSubflowNodeList(tx, workflowId, parent.id)
logger.debug(
`[SERVER] Duplicated subflow subtree ${parent.id} with ${children.length} children and ${edges.length} edges`
)
break
}
// Add other subflow operations as needed
default:
logger.warn(`Unknown subflow operation: ${operation}`)

View File

@@ -105,6 +105,7 @@ export async function verifyOperationPermission(
'update-trigger-mode',
'toggle-handles',
'duplicate',
'duplicate-with-children',
],
write: [
'add',
@@ -119,6 +120,7 @@ export async function verifyOperationPermission(
'update-trigger-mode',
'toggle-handles',
'duplicate',
'duplicate-with-children',
],
read: ['update-position'], // Read-only users can only move things around
}

View File

@@ -67,18 +67,59 @@ export const EdgeOperationSchema = z.object({
operationId: z.string().optional(),
})
export const SubflowOperationSchema = z.object({
operation: z.enum(['add', 'remove', 'update']),
target: z.literal('subflow'),
payload: z.object({
id: z.string(),
type: z.enum(['loop', 'parallel']).optional(),
config: z.record(z.any()).optional(),
}),
timestamp: z.number(),
operationId: z.string().optional(),
// Shared schemas for subflow duplication
const BlockInsertPayloadSchema = z.object({
id: z.string(),
sourceId: z.string().optional(),
type: z.string(),
name: z.string(),
position: PositionSchema,
data: z.record(z.any()).optional(),
subBlocks: z.record(z.any()).optional(),
outputs: z.record(z.any()).optional(),
parentId: z.string().nullable().optional(),
extent: z.enum(['parent']).nullable().optional(),
enabled: z.boolean().optional(),
horizontalHandles: z.boolean().optional(),
isWide: z.boolean().optional(),
advancedMode: z.boolean().optional(),
triggerMode: z.boolean().optional(),
height: z.number().optional(),
})
const EdgeInsertPayloadSchema = z.object({
id: z.string(),
source: z.string(),
target: z.string(),
sourceHandle: z.string().nullable().optional(),
targetHandle: z.string().nullable().optional(),
})
export const SubflowOperationSchema = z.union([
z.object({
operation: z.literal('update'),
target: z.literal('subflow'),
payload: z.object({
id: z.string(),
type: z.enum(['loop', 'parallel']).optional(),
config: z.record(z.any()).optional(),
}),
timestamp: z.number(),
operationId: z.string().optional(),
}),
z.object({
operation: z.literal('duplicate-with-children'),
target: z.literal('subflow'),
payload: z.object({
parent: BlockInsertPayloadSchema,
children: z.array(BlockInsertPayloadSchema),
edges: z.array(EdgeInsertPayloadSchema),
}),
timestamp: z.number(),
operationId: z.string().optional(),
}),
])
export const VariableOperationSchema = z.union([
z.object({
operation: z.literal('add'),