mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-24 14:27:56 -05:00
Compare commits
1 Commits
fix/nested
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2f6ef2bf11 |
111
apps/sim/app/api/workspaces/[id]/export/route.ts
Normal file
111
apps/sim/app/api/workspaces/[id]/export/route.ts
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { workflow, workflowFolder } from '@sim/db/schema'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { loadBulkWorkflowsFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||||
|
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkspaceExportAPI')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /api/workspaces/[id]/export
|
||||||
|
* Export all workspace data (workflows with states, folders) in a single request.
|
||||||
|
* Much more efficient than fetching each workflow individually.
|
||||||
|
*/
|
||||||
|
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||||
|
const startTime = Date.now()
|
||||||
|
const { id: workspaceId } = await params
|
||||||
|
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if user has access to this workspace
|
||||||
|
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||||
|
if (!userPermission) {
|
||||||
|
return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 404 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch all workflows and folders in parallel (2 queries)
|
||||||
|
const [workflows, folders] = await Promise.all([
|
||||||
|
db.select().from(workflow).where(eq(workflow.workspaceId, workspaceId)),
|
||||||
|
db.select().from(workflowFolder).where(eq(workflowFolder.workspaceId, workspaceId)),
|
||||||
|
])
|
||||||
|
|
||||||
|
const workflowIds = workflows.map((w) => w.id)
|
||||||
|
|
||||||
|
// Bulk load all workflow states (3 queries total via inArray)
|
||||||
|
const workflowStates = await loadBulkWorkflowsFromNormalizedTables(workflowIds)
|
||||||
|
|
||||||
|
// Build export data
|
||||||
|
const workflowsExport = workflows.map((w) => {
|
||||||
|
const state = workflowStates.get(w.id)
|
||||||
|
|
||||||
|
// Build the workflow state with defaults if no normalized data
|
||||||
|
const workflowState = state
|
||||||
|
? {
|
||||||
|
blocks: state.blocks,
|
||||||
|
edges: state.edges,
|
||||||
|
loops: state.loops,
|
||||||
|
parallels: state.parallels,
|
||||||
|
lastSaved: Date.now(),
|
||||||
|
isDeployed: w.isDeployed || false,
|
||||||
|
deployedAt: w.deployedAt,
|
||||||
|
}
|
||||||
|
: {
|
||||||
|
blocks: {},
|
||||||
|
edges: [],
|
||||||
|
loops: {},
|
||||||
|
parallels: {},
|
||||||
|
lastSaved: Date.now(),
|
||||||
|
isDeployed: w.isDeployed || false,
|
||||||
|
deployedAt: w.deployedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract variables from workflow record
|
||||||
|
const variables = Object.values((w.variables as Record<string, any>) || {}).map((v: any) => ({
|
||||||
|
id: v.id,
|
||||||
|
name: v.name,
|
||||||
|
type: v.type,
|
||||||
|
value: v.value,
|
||||||
|
}))
|
||||||
|
|
||||||
|
return {
|
||||||
|
workflow: {
|
||||||
|
id: w.id,
|
||||||
|
name: w.name,
|
||||||
|
description: w.description,
|
||||||
|
color: w.color,
|
||||||
|
folderId: w.folderId,
|
||||||
|
},
|
||||||
|
state: workflowState,
|
||||||
|
variables,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
const foldersExport = folders.map((f) => ({
|
||||||
|
id: f.id,
|
||||||
|
name: f.name,
|
||||||
|
parentId: f.parentId,
|
||||||
|
}))
|
||||||
|
|
||||||
|
const elapsed = Date.now() - startTime
|
||||||
|
logger.info(`Exported workspace ${workspaceId} in ${elapsed}ms`, {
|
||||||
|
workflowsCount: workflowsExport.length,
|
||||||
|
foldersCount: foldersExport.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
workflows: workflowsExport,
|
||||||
|
folders: foldersExport,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
const elapsed = Date.now() - startTime
|
||||||
|
logger.error(`Error exporting workspace ${workspaceId} after ${elapsed}ms:`, error)
|
||||||
|
return NextResponse.json({ error: 'Failed to export workspace' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,9 +1,6 @@
|
|||||||
import { useCallback, useState } from 'react'
|
import { useCallback, useState } from 'react'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import { exportWorkspaceToZip } from '@/lib/workflows/operations/import-export'
|
||||||
exportWorkspaceToZip,
|
|
||||||
type WorkflowExportData,
|
|
||||||
} from '@/lib/workflows/operations/import-export'
|
|
||||||
|
|
||||||
const logger = createLogger('useExportWorkspace')
|
const logger = createLogger('useExportWorkspace')
|
||||||
|
|
||||||
@@ -18,8 +15,7 @@ interface UseExportWorkspaceProps {
|
|||||||
* Hook for managing workspace export to ZIP.
|
* Hook for managing workspace export to ZIP.
|
||||||
*
|
*
|
||||||
* Handles:
|
* Handles:
|
||||||
* - Fetching all workflows and folders from workspace
|
* - Fetching all workflows and folders from workspace via bulk export endpoint
|
||||||
* - Fetching workflow states and variables
|
|
||||||
* - Creating ZIP file with all workspace data
|
* - Creating ZIP file with all workspace data
|
||||||
* - Downloading the ZIP file
|
* - Downloading the ZIP file
|
||||||
* - Loading state management
|
* - Loading state management
|
||||||
@@ -42,74 +38,13 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
|
|||||||
try {
|
try {
|
||||||
logger.info('Exporting workspace', { workspaceId })
|
logger.info('Exporting workspace', { workspaceId })
|
||||||
|
|
||||||
// Fetch all workflows in workspace
|
// Single API call to get all workspace data (workflows with states + folders)
|
||||||
const workflowsResponse = await fetch(`/api/workflows?workspaceId=${workspaceId}`)
|
const response = await fetch(`/api/workspaces/${workspaceId}/export`)
|
||||||
if (!workflowsResponse.ok) {
|
if (!response.ok) {
|
||||||
throw new Error('Failed to fetch workflows')
|
throw new Error('Failed to export workspace')
|
||||||
}
|
|
||||||
const { data: workflows } = await workflowsResponse.json()
|
|
||||||
|
|
||||||
// Fetch all folders in workspace
|
|
||||||
const foldersResponse = await fetch(`/api/folders?workspaceId=${workspaceId}`)
|
|
||||||
if (!foldersResponse.ok) {
|
|
||||||
throw new Error('Failed to fetch folders')
|
|
||||||
}
|
|
||||||
const foldersData = await foldersResponse.json()
|
|
||||||
|
|
||||||
// Export each workflow
|
|
||||||
const workflowsToExport: WorkflowExportData[] = []
|
|
||||||
|
|
||||||
for (const workflow of workflows) {
|
|
||||||
try {
|
|
||||||
const workflowResponse = await fetch(`/api/workflows/${workflow.id}`)
|
|
||||||
if (!workflowResponse.ok) {
|
|
||||||
logger.warn(`Failed to fetch workflow ${workflow.id}`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const { data: workflowData } = await workflowResponse.json()
|
|
||||||
if (!workflowData?.state) {
|
|
||||||
logger.warn(`Workflow ${workflow.id} has no state`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const variablesResponse = await fetch(`/api/workflows/${workflow.id}/variables`)
|
|
||||||
let workflowVariables: any[] = []
|
|
||||||
if (variablesResponse.ok) {
|
|
||||||
const variablesData = await variablesResponse.json()
|
|
||||||
workflowVariables = Object.values(variablesData?.data || {}).map((v: any) => ({
|
|
||||||
id: v.id,
|
|
||||||
name: v.name,
|
|
||||||
type: v.type,
|
|
||||||
value: v.value,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
workflowsToExport.push({
|
|
||||||
workflow: {
|
|
||||||
id: workflow.id,
|
|
||||||
name: workflow.name,
|
|
||||||
description: workflow.description,
|
|
||||||
color: workflow.color,
|
|
||||||
folderId: workflow.folderId,
|
|
||||||
},
|
|
||||||
state: workflowData.state,
|
|
||||||
variables: workflowVariables,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Failed to export workflow ${workflow.id}:`, error)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const foldersToExport: Array<{
|
const { workflows: workflowsToExport, folders: foldersToExport } = await response.json()
|
||||||
id: string
|
|
||||||
name: string
|
|
||||||
parentId: string | null
|
|
||||||
}> = (foldersData.folders || []).map((folder: any) => ({
|
|
||||||
id: folder.id,
|
|
||||||
name: folder.name,
|
|
||||||
parentId: folder.parentId,
|
|
||||||
}))
|
|
||||||
|
|
||||||
const zipBlob = await exportWorkspaceToZip(
|
const zipBlob = await exportWorkspaceToZip(
|
||||||
workspaceName,
|
workspaceName,
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import {
|
|||||||
workflowSubflows,
|
workflowSubflows,
|
||||||
} from '@sim/db'
|
} from '@sim/db'
|
||||||
import type { InferSelectModel } from 'drizzle-orm'
|
import type { InferSelectModel } from 'drizzle-orm'
|
||||||
import { and, desc, eq, sql } from 'drizzle-orm'
|
import { and, desc, eq, inArray, sql } from 'drizzle-orm'
|
||||||
import type { Edge } from 'reactflow'
|
import type { Edge } from 'reactflow'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
@@ -602,6 +602,178 @@ export async function deployWorkflow(params: {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bulk load workflow states for multiple workflows in a single set of queries.
|
||||||
|
* Much more efficient than calling loadWorkflowFromNormalizedTables for each workflow.
|
||||||
|
*/
|
||||||
|
export async function loadBulkWorkflowsFromNormalizedTables(
|
||||||
|
workflowIds: string[]
|
||||||
|
): Promise<Map<string, NormalizedWorkflowData>> {
|
||||||
|
const result = new Map<string, NormalizedWorkflowData>()
|
||||||
|
|
||||||
|
if (workflowIds.length === 0) {
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Load all components for all workflows in parallel (just 3 queries total)
|
||||||
|
const [allBlocks, allEdges, allSubflows] = await Promise.all([
|
||||||
|
db.select().from(workflowBlocks).where(inArray(workflowBlocks.workflowId, workflowIds)),
|
||||||
|
db.select().from(workflowEdges).where(inArray(workflowEdges.workflowId, workflowIds)),
|
||||||
|
db.select().from(workflowSubflows).where(inArray(workflowSubflows.workflowId, workflowIds)),
|
||||||
|
])
|
||||||
|
|
||||||
|
// Group blocks by workflow
|
||||||
|
const blocksByWorkflow = new Map<string, typeof allBlocks>()
|
||||||
|
for (const block of allBlocks) {
|
||||||
|
const existing = blocksByWorkflow.get(block.workflowId) || []
|
||||||
|
existing.push(block)
|
||||||
|
blocksByWorkflow.set(block.workflowId, existing)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Group edges by workflow
|
||||||
|
const edgesByWorkflow = new Map<string, typeof allEdges>()
|
||||||
|
for (const edge of allEdges) {
|
||||||
|
const existing = edgesByWorkflow.get(edge.workflowId) || []
|
||||||
|
existing.push(edge)
|
||||||
|
edgesByWorkflow.set(edge.workflowId, existing)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Group subflows by workflow
|
||||||
|
const subflowsByWorkflow = new Map<string, typeof allSubflows>()
|
||||||
|
for (const subflow of allSubflows) {
|
||||||
|
const existing = subflowsByWorkflow.get(subflow.workflowId) || []
|
||||||
|
existing.push(subflow)
|
||||||
|
subflowsByWorkflow.set(subflow.workflowId, existing)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each workflow
|
||||||
|
for (const workflowId of workflowIds) {
|
||||||
|
const blocks = blocksByWorkflow.get(workflowId) || []
|
||||||
|
const edges = edgesByWorkflow.get(workflowId) || []
|
||||||
|
const subflows = subflowsByWorkflow.get(workflowId) || []
|
||||||
|
|
||||||
|
// Skip workflows with no blocks (not migrated yet)
|
||||||
|
if (blocks.length === 0) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert blocks to the expected format
|
||||||
|
const blocksMap: Record<string, BlockState> = {}
|
||||||
|
blocks.forEach((block) => {
|
||||||
|
const blockData = block.data || {}
|
||||||
|
|
||||||
|
const assembled: BlockState = {
|
||||||
|
id: block.id,
|
||||||
|
type: block.type,
|
||||||
|
name: block.name,
|
||||||
|
position: {
|
||||||
|
x: Number(block.positionX),
|
||||||
|
y: Number(block.positionY),
|
||||||
|
},
|
||||||
|
enabled: block.enabled,
|
||||||
|
horizontalHandles: block.horizontalHandles,
|
||||||
|
advancedMode: block.advancedMode,
|
||||||
|
triggerMode: block.triggerMode,
|
||||||
|
height: Number(block.height),
|
||||||
|
subBlocks: (block.subBlocks as BlockState['subBlocks']) || {},
|
||||||
|
outputs: (block.outputs as BlockState['outputs']) || {},
|
||||||
|
data: blockData,
|
||||||
|
}
|
||||||
|
|
||||||
|
blocksMap[block.id] = assembled
|
||||||
|
})
|
||||||
|
|
||||||
|
// Sanitize any invalid custom tools in agent blocks
|
||||||
|
const { blocks: sanitizedBlocks } = sanitizeAgentToolsInBlocks(blocksMap)
|
||||||
|
|
||||||
|
// Migrate old agent block format to new messages array format
|
||||||
|
const migratedBlocks = migrateAgentBlocksToMessagesFormat(sanitizedBlocks)
|
||||||
|
|
||||||
|
// Convert edges to the expected format
|
||||||
|
const edgesArray: Edge[] = edges.map((edge) => ({
|
||||||
|
id: edge.id,
|
||||||
|
source: edge.sourceBlockId,
|
||||||
|
target: edge.targetBlockId,
|
||||||
|
sourceHandle: edge.sourceHandle ?? undefined,
|
||||||
|
targetHandle: edge.targetHandle ?? undefined,
|
||||||
|
type: 'default',
|
||||||
|
data: {},
|
||||||
|
}))
|
||||||
|
|
||||||
|
// Convert subflows to loops and parallels
|
||||||
|
const loops: Record<string, Loop> = {}
|
||||||
|
const parallels: Record<string, Parallel> = {}
|
||||||
|
|
||||||
|
subflows.forEach((subflow) => {
|
||||||
|
const config = (subflow.config ?? {}) as Partial<Loop & Parallel>
|
||||||
|
|
||||||
|
if (subflow.type === SUBFLOW_TYPES.LOOP) {
|
||||||
|
const loopType =
|
||||||
|
(config as Loop).loopType === 'for' ||
|
||||||
|
(config as Loop).loopType === 'forEach' ||
|
||||||
|
(config as Loop).loopType === 'while' ||
|
||||||
|
(config as Loop).loopType === 'doWhile'
|
||||||
|
? (config as Loop).loopType
|
||||||
|
: 'for'
|
||||||
|
|
||||||
|
const loop: Loop = {
|
||||||
|
id: subflow.id,
|
||||||
|
nodes: Array.isArray((config as Loop).nodes) ? (config as Loop).nodes : [],
|
||||||
|
iterations:
|
||||||
|
typeof (config as Loop).iterations === 'number' ? (config as Loop).iterations : 1,
|
||||||
|
loopType,
|
||||||
|
forEachItems: (config as Loop).forEachItems ?? '',
|
||||||
|
whileCondition: (config as Loop).whileCondition ?? '',
|
||||||
|
doWhileCondition: (config as Loop).doWhileCondition ?? '',
|
||||||
|
}
|
||||||
|
loops[subflow.id] = loop
|
||||||
|
|
||||||
|
// Sync block.data with loop config
|
||||||
|
if (migratedBlocks[subflow.id]) {
|
||||||
|
const block = migratedBlocks[subflow.id]
|
||||||
|
migratedBlocks[subflow.id] = {
|
||||||
|
...block,
|
||||||
|
data: {
|
||||||
|
...block.data,
|
||||||
|
collection: loop.forEachItems ?? block.data?.collection ?? '',
|
||||||
|
whileCondition: loop.whileCondition ?? block.data?.whileCondition ?? '',
|
||||||
|
doWhileCondition: loop.doWhileCondition ?? block.data?.doWhileCondition ?? '',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (subflow.type === SUBFLOW_TYPES.PARALLEL) {
|
||||||
|
const parallel: Parallel = {
|
||||||
|
id: subflow.id,
|
||||||
|
nodes: Array.isArray((config as Parallel).nodes) ? (config as Parallel).nodes : [],
|
||||||
|
count: typeof (config as Parallel).count === 'number' ? (config as Parallel).count : 5,
|
||||||
|
distribution: (config as Parallel).distribution ?? '',
|
||||||
|
parallelType:
|
||||||
|
(config as Parallel).parallelType === 'count' ||
|
||||||
|
(config as Parallel).parallelType === 'collection'
|
||||||
|
? (config as Parallel).parallelType
|
||||||
|
: 'count',
|
||||||
|
}
|
||||||
|
parallels[subflow.id] = parallel
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
result.set(workflowId, {
|
||||||
|
blocks: migratedBlocks,
|
||||||
|
edges: edgesArray,
|
||||||
|
loops,
|
||||||
|
parallels,
|
||||||
|
isFromNormalizedTables: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Error bulk loading workflows from normalized tables:', error)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Regenerates all IDs in a workflow state to avoid conflicts when duplicating or using templates
|
* Regenerates all IDs in a workflow state to avoid conflicts when duplicating or using templates
|
||||||
* Returns a new state with all IDs regenerated and references updated
|
* Returns a new state with all IDs regenerated and references updated
|
||||||
|
|||||||
Reference in New Issue
Block a user