mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-11 07:58:06 -05:00
Compare commits
2 Commits
v0.5.34
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2f6ef2bf11 | ||
|
|
7ef1150383 |
@@ -188,7 +188,6 @@ DATABASE_URL="postgresql://postgres:your_password@localhost:5432/simstudio"
|
||||
|
||||
Then run the migrations:
|
||||
```bash
|
||||
cd apps/sim # Required so drizzle picks correct .env file
|
||||
bunx drizzle-kit migrate --config=./drizzle.config.ts
|
||||
```
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ export default function Footer({ fullWidth = false }: FooterProps) {
|
||||
{FOOTER_BLOCKS.map((block) => (
|
||||
<Link
|
||||
key={block}
|
||||
href={`https://docs.sim.ai/blocks/${block.toLowerCase().replaceAll(' ', '-')}`}
|
||||
href={`https://docs.sim.ai/blocks/${block.toLowerCase().replace(' ', '-')}`}
|
||||
target='_blank'
|
||||
rel='noopener noreferrer'
|
||||
className='text-[14px] text-muted-foreground transition-colors hover:text-foreground'
|
||||
|
||||
111
apps/sim/app/api/workspaces/[id]/export/route.ts
Normal file
111
apps/sim/app/api/workspaces/[id]/export/route.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowFolder } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { loadBulkWorkflowsFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WorkspaceExportAPI')
|
||||
|
||||
/**
|
||||
* GET /api/workspaces/[id]/export
|
||||
* Export all workspace data (workflows with states, folders) in a single request.
|
||||
* Much more efficient than fetching each workflow individually.
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const startTime = Date.now()
|
||||
const { id: workspaceId } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user has access to this workspace
|
||||
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||
if (!userPermission) {
|
||||
return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Fetch all workflows and folders in parallel (2 queries)
|
||||
const [workflows, folders] = await Promise.all([
|
||||
db.select().from(workflow).where(eq(workflow.workspaceId, workspaceId)),
|
||||
db.select().from(workflowFolder).where(eq(workflowFolder.workspaceId, workspaceId)),
|
||||
])
|
||||
|
||||
const workflowIds = workflows.map((w) => w.id)
|
||||
|
||||
// Bulk load all workflow states (3 queries total via inArray)
|
||||
const workflowStates = await loadBulkWorkflowsFromNormalizedTables(workflowIds)
|
||||
|
||||
// Build export data
|
||||
const workflowsExport = workflows.map((w) => {
|
||||
const state = workflowStates.get(w.id)
|
||||
|
||||
// Build the workflow state with defaults if no normalized data
|
||||
const workflowState = state
|
||||
? {
|
||||
blocks: state.blocks,
|
||||
edges: state.edges,
|
||||
loops: state.loops,
|
||||
parallels: state.parallels,
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: w.isDeployed || false,
|
||||
deployedAt: w.deployedAt,
|
||||
}
|
||||
: {
|
||||
blocks: {},
|
||||
edges: [],
|
||||
loops: {},
|
||||
parallels: {},
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: w.isDeployed || false,
|
||||
deployedAt: w.deployedAt,
|
||||
}
|
||||
|
||||
// Extract variables from workflow record
|
||||
const variables = Object.values((w.variables as Record<string, any>) || {}).map((v: any) => ({
|
||||
id: v.id,
|
||||
name: v.name,
|
||||
type: v.type,
|
||||
value: v.value,
|
||||
}))
|
||||
|
||||
return {
|
||||
workflow: {
|
||||
id: w.id,
|
||||
name: w.name,
|
||||
description: w.description,
|
||||
color: w.color,
|
||||
folderId: w.folderId,
|
||||
},
|
||||
state: workflowState,
|
||||
variables,
|
||||
}
|
||||
})
|
||||
|
||||
const foldersExport = folders.map((f) => ({
|
||||
id: f.id,
|
||||
name: f.name,
|
||||
parentId: f.parentId,
|
||||
}))
|
||||
|
||||
const elapsed = Date.now() - startTime
|
||||
logger.info(`Exported workspace ${workspaceId} in ${elapsed}ms`, {
|
||||
workflowsCount: workflowsExport.length,
|
||||
foldersCount: foldersExport.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
workflows: workflowsExport,
|
||||
folders: foldersExport,
|
||||
})
|
||||
} catch (error) {
|
||||
const elapsed = Date.now() - startTime
|
||||
logger.error(`Error exporting workspace ${workspaceId} after ${elapsed}ms:`, error)
|
||||
return NextResponse.json({ error: 'Failed to export workspace' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -101,6 +101,9 @@ const ACTION_VERBS = [
|
||||
'Generated',
|
||||
'Rendering',
|
||||
'Rendered',
|
||||
'Sleeping',
|
||||
'Slept',
|
||||
'Resumed',
|
||||
] as const
|
||||
|
||||
/**
|
||||
@@ -580,6 +583,11 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
(toolCall.state === (ClientToolCallState.executing as any) ||
|
||||
toolCall.state === ('executing' as any))
|
||||
|
||||
const showWake =
|
||||
toolCall.name === 'sleep' &&
|
||||
(toolCall.state === (ClientToolCallState.executing as any) ||
|
||||
toolCall.state === ('executing' as any))
|
||||
|
||||
const handleStateChange = (state: any) => {
|
||||
forceUpdate({})
|
||||
onStateChange?.(state)
|
||||
@@ -1102,6 +1110,37 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
Move to Background
|
||||
</Button>
|
||||
</div>
|
||||
) : showWake ? (
|
||||
<div className='mt-[8px]'>
|
||||
<Button
|
||||
onClick={async () => {
|
||||
try {
|
||||
const instance = getClientTool(toolCall.id)
|
||||
// Get elapsed seconds before waking
|
||||
const elapsedSeconds = instance?.getElapsedSeconds?.() || 0
|
||||
// Transition to background state locally so UI updates immediately
|
||||
// Pass elapsed seconds in the result so dynamic text can use it
|
||||
instance?.setState?.((ClientToolCallState as any).background, {
|
||||
result: { _elapsedSeconds: elapsedSeconds },
|
||||
})
|
||||
// Update the tool call params in the store to include elapsed time for display
|
||||
const { updateToolCallParams } = useCopilotStore.getState()
|
||||
updateToolCallParams?.(toolCall.id, { _elapsedSeconds: Math.round(elapsedSeconds) })
|
||||
await instance?.markToolComplete?.(
|
||||
200,
|
||||
`User woke you up after ${Math.round(elapsedSeconds)} seconds`
|
||||
)
|
||||
// Optionally force a re-render; store should sync state from server
|
||||
forceUpdate({})
|
||||
onStateChange?.('background')
|
||||
} catch {}
|
||||
}}
|
||||
variant='primary'
|
||||
title='Wake'
|
||||
>
|
||||
Wake
|
||||
</Button>
|
||||
</div>
|
||||
) : null}
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import { useCallback, useState } from 'react'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
exportWorkspaceToZip,
|
||||
type WorkflowExportData,
|
||||
} from '@/lib/workflows/operations/import-export'
|
||||
import { exportWorkspaceToZip } from '@/lib/workflows/operations/import-export'
|
||||
|
||||
const logger = createLogger('useExportWorkspace')
|
||||
|
||||
@@ -18,8 +15,7 @@ interface UseExportWorkspaceProps {
|
||||
* Hook for managing workspace export to ZIP.
|
||||
*
|
||||
* Handles:
|
||||
* - Fetching all workflows and folders from workspace
|
||||
* - Fetching workflow states and variables
|
||||
* - Fetching all workflows and folders from workspace via bulk export endpoint
|
||||
* - Creating ZIP file with all workspace data
|
||||
* - Downloading the ZIP file
|
||||
* - Loading state management
|
||||
@@ -42,74 +38,13 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
|
||||
try {
|
||||
logger.info('Exporting workspace', { workspaceId })
|
||||
|
||||
// Fetch all workflows in workspace
|
||||
const workflowsResponse = await fetch(`/api/workflows?workspaceId=${workspaceId}`)
|
||||
if (!workflowsResponse.ok) {
|
||||
throw new Error('Failed to fetch workflows')
|
||||
}
|
||||
const { data: workflows } = await workflowsResponse.json()
|
||||
|
||||
// Fetch all folders in workspace
|
||||
const foldersResponse = await fetch(`/api/folders?workspaceId=${workspaceId}`)
|
||||
if (!foldersResponse.ok) {
|
||||
throw new Error('Failed to fetch folders')
|
||||
}
|
||||
const foldersData = await foldersResponse.json()
|
||||
|
||||
// Export each workflow
|
||||
const workflowsToExport: WorkflowExportData[] = []
|
||||
|
||||
for (const workflow of workflows) {
|
||||
try {
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflow.id}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.warn(`Failed to fetch workflow ${workflow.id}`)
|
||||
continue
|
||||
}
|
||||
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflow.id} has no state`)
|
||||
continue
|
||||
}
|
||||
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflow.id}/variables`)
|
||||
let workflowVariables: any[] = []
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = Object.values(variablesData?.data || {}).map((v: any) => ({
|
||||
id: v.id,
|
||||
name: v.name,
|
||||
type: v.type,
|
||||
value: v.value,
|
||||
}))
|
||||
}
|
||||
|
||||
workflowsToExport.push({
|
||||
workflow: {
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
color: workflow.color,
|
||||
folderId: workflow.folderId,
|
||||
},
|
||||
state: workflowData.state,
|
||||
variables: workflowVariables,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflow.id}:`, error)
|
||||
}
|
||||
// Single API call to get all workspace data (workflows with states + folders)
|
||||
const response = await fetch(`/api/workspaces/${workspaceId}/export`)
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to export workspace')
|
||||
}
|
||||
|
||||
const foldersToExport: Array<{
|
||||
id: string
|
||||
name: string
|
||||
parentId: string | null
|
||||
}> = (foldersData.folders || []).map((folder: any) => ({
|
||||
id: folder.id,
|
||||
name: folder.name,
|
||||
parentId: folder.parentId,
|
||||
}))
|
||||
const { workflows: workflowsToExport, folders: foldersToExport } = await response.json()
|
||||
|
||||
const zipBlob = await exportWorkspaceToZip(
|
||||
workspaceName,
|
||||
|
||||
@@ -33,6 +33,7 @@ export const ToolIds = z.enum([
|
||||
'knowledge_base',
|
||||
'manage_custom_tool',
|
||||
'manage_mcp_tool',
|
||||
'sleep',
|
||||
])
|
||||
export type ToolId = z.infer<typeof ToolIds>
|
||||
|
||||
@@ -252,6 +253,14 @@ export const ToolArgSchemas = {
|
||||
.optional()
|
||||
.describe('Required for add and edit operations. The MCP server configuration.'),
|
||||
}),
|
||||
|
||||
sleep: z.object({
|
||||
seconds: z
|
||||
.number()
|
||||
.min(0)
|
||||
.max(180)
|
||||
.describe('The number of seconds to sleep (0-180, max 3 minutes)'),
|
||||
}),
|
||||
} as const
|
||||
export type ToolArgSchemaMap = typeof ToolArgSchemas
|
||||
|
||||
@@ -318,6 +327,7 @@ export const ToolSSESchemas = {
|
||||
knowledge_base: toolCallSSEFor('knowledge_base', ToolArgSchemas.knowledge_base),
|
||||
manage_custom_tool: toolCallSSEFor('manage_custom_tool', ToolArgSchemas.manage_custom_tool),
|
||||
manage_mcp_tool: toolCallSSEFor('manage_mcp_tool', ToolArgSchemas.manage_mcp_tool),
|
||||
sleep: toolCallSSEFor('sleep', ToolArgSchemas.sleep),
|
||||
} as const
|
||||
export type ToolSSESchemaMap = typeof ToolSSESchemas
|
||||
|
||||
@@ -552,6 +562,11 @@ export const ToolResultSchemas = {
|
||||
serverName: z.string().optional(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
sleep: z.object({
|
||||
success: z.boolean(),
|
||||
seconds: z.number(),
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
} as const
|
||||
export type ToolResultSchemaMap = typeof ToolResultSchemas
|
||||
|
||||
|
||||
144
apps/sim/lib/copilot/tools/client/other/sleep.ts
Normal file
144
apps/sim/lib/copilot/tools/client/other/sleep.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import { Loader2, MinusCircle, Moon, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
/** Maximum sleep duration in seconds (3 minutes) */
|
||||
const MAX_SLEEP_SECONDS = 180
|
||||
|
||||
/** Track sleep start times for calculating elapsed time on wake */
|
||||
const sleepStartTimes: Record<string, number> = {}
|
||||
|
||||
interface SleepArgs {
|
||||
seconds?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Format seconds into a human-readable duration string
|
||||
*/
|
||||
function formatDuration(seconds: number): string {
|
||||
if (seconds >= 60) {
|
||||
return `${Math.round(seconds / 60)} minute${seconds >= 120 ? 's' : ''}`
|
||||
}
|
||||
return `${seconds} second${seconds !== 1 ? 's' : ''}`
|
||||
}
|
||||
|
||||
export class SleepClientTool extends BaseClientTool {
|
||||
static readonly id = 'sleep'
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, SleepClientTool.id, SleepClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Preparing to sleep', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Sleeping', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Sleeping', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Finished sleeping', icon: Moon },
|
||||
[ClientToolCallState.error]: { text: 'Sleep interrupted', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Sleep skipped', icon: MinusCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Sleep aborted', icon: MinusCircle },
|
||||
[ClientToolCallState.background]: { text: 'Resumed', icon: Moon },
|
||||
},
|
||||
// No interrupt - auto-execute immediately
|
||||
getDynamicText: (params, state) => {
|
||||
const seconds = params?.seconds
|
||||
if (typeof seconds === 'number' && seconds > 0) {
|
||||
const displayTime = formatDuration(seconds)
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Slept for ${displayTime}`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.pending:
|
||||
return `Sleeping for ${displayTime}`
|
||||
case ClientToolCallState.generating:
|
||||
return `Preparing to sleep for ${displayTime}`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to sleep for ${displayTime}`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped sleeping for ${displayTime}`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted sleeping for ${displayTime}`
|
||||
case ClientToolCallState.background: {
|
||||
// Calculate elapsed time from when sleep started
|
||||
const elapsedSeconds = params?._elapsedSeconds
|
||||
if (typeof elapsedSeconds === 'number' && elapsedSeconds > 0) {
|
||||
return `Resumed after ${formatDuration(Math.round(elapsedSeconds))}`
|
||||
}
|
||||
return 'Resumed early'
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Get elapsed seconds since sleep started
|
||||
*/
|
||||
getElapsedSeconds(): number {
|
||||
const startTime = sleepStartTimes[this.toolCallId]
|
||||
if (!startTime) return 0
|
||||
return (Date.now() - startTime) / 1000
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
await super.handleReject()
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
}
|
||||
|
||||
async handleAccept(args?: SleepArgs): Promise<void> {
|
||||
const logger = createLogger('SleepClientTool')
|
||||
|
||||
// Use a timeout slightly longer than max sleep (3 minutes + buffer)
|
||||
const timeoutMs = (MAX_SLEEP_SECONDS + 30) * 1000
|
||||
|
||||
await this.executeWithTimeout(async () => {
|
||||
const params = args || {}
|
||||
logger.debug('handleAccept() called', {
|
||||
toolCallId: this.toolCallId,
|
||||
state: this.getState(),
|
||||
hasArgs: !!args,
|
||||
seconds: params.seconds,
|
||||
})
|
||||
|
||||
// Validate and clamp seconds
|
||||
let seconds = typeof params.seconds === 'number' ? params.seconds : 0
|
||||
if (seconds < 0) seconds = 0
|
||||
if (seconds > MAX_SLEEP_SECONDS) seconds = MAX_SLEEP_SECONDS
|
||||
|
||||
logger.debug('Starting sleep', { seconds })
|
||||
|
||||
// Track start time for elapsed calculation
|
||||
sleepStartTimes[this.toolCallId] = Date.now()
|
||||
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
try {
|
||||
// Sleep for the specified duration
|
||||
await new Promise((resolve) => setTimeout(resolve, seconds * 1000))
|
||||
|
||||
logger.debug('Sleep completed successfully')
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, `Slept for ${seconds} seconds`)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
logger.error('Sleep failed', { error: message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, message)
|
||||
} finally {
|
||||
// Clean up start time tracking
|
||||
delete sleepStartTimes[this.toolCallId]
|
||||
}
|
||||
}, timeoutMs)
|
||||
}
|
||||
|
||||
async execute(args?: SleepArgs): Promise<void> {
|
||||
// Auto-execute without confirmation - go straight to executing
|
||||
await this.handleAccept(args)
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||
import { validateWorkflowState } from '@/lib/workflows/sanitization/validation'
|
||||
import { getAllBlocks, getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
@@ -850,13 +851,18 @@ function applyOperationsToWorkflowState(
|
||||
* Reorder operations to ensure correct execution sequence:
|
||||
* 1. delete - Remove blocks first to free up IDs and clean state
|
||||
* 2. extract_from_subflow - Extract blocks from subflows before modifications
|
||||
* 3. add - Create new blocks so they exist before being referenced
|
||||
* 3. add - Create new blocks (sorted by connection dependencies)
|
||||
* 4. insert_into_subflow - Insert blocks into subflows (sorted by parent dependency)
|
||||
* 5. edit - Edit existing blocks last, so connections to newly added blocks work
|
||||
*
|
||||
* This ordering is CRITICAL: edit operations may reference blocks being added
|
||||
* in the same batch (e.g., connecting block A to newly added block B).
|
||||
* Without proper ordering, the target block wouldn't exist yet.
|
||||
* This ordering is CRITICAL: operations may reference blocks being added/inserted
|
||||
* in the same batch. Without proper ordering, target blocks wouldn't exist yet.
|
||||
*
|
||||
* For add operations, we use a two-pass approach:
|
||||
* - Pass 1: Create all blocks (without connections)
|
||||
* - Pass 2: Add all connections (now all blocks exist)
|
||||
* This ensures that if block A connects to block B, and both are being added,
|
||||
* B will exist when we try to create the edge from A to B.
|
||||
*/
|
||||
const deletes = operations.filter((op) => op.operation_type === 'delete')
|
||||
const extracts = operations.filter((op) => op.operation_type === 'extract_from_subflow')
|
||||
@@ -868,6 +874,8 @@ function applyOperationsToWorkflowState(
|
||||
// This handles cases where a loop/parallel is being added along with its children
|
||||
const sortedInserts = topologicalSortInserts(inserts, adds)
|
||||
|
||||
// We'll process add operations in two passes (handled in the switch statement below)
|
||||
// This is tracked via a separate flag to know which pass we're in
|
||||
const orderedOperations: EditWorkflowOperation[] = [
|
||||
...deletes,
|
||||
...extracts,
|
||||
@@ -877,15 +885,46 @@ function applyOperationsToWorkflowState(
|
||||
]
|
||||
|
||||
logger.info('Operations after reordering:', {
|
||||
order: orderedOperations.map(
|
||||
totalOperations: orderedOperations.length,
|
||||
deleteCount: deletes.length,
|
||||
extractCount: extracts.length,
|
||||
addCount: adds.length,
|
||||
insertCount: sortedInserts.length,
|
||||
editCount: edits.length,
|
||||
operationOrder: orderedOperations.map(
|
||||
(op) =>
|
||||
`${op.operation_type}:${op.block_id}${op.params?.subflowId ? `(parent:${op.params.subflowId})` : ''}`
|
||||
),
|
||||
})
|
||||
|
||||
// Two-pass processing for add operations:
|
||||
// Pass 1: Create all blocks (without connections)
|
||||
// Pass 2: Add all connections (all blocks now exist)
|
||||
const addOperationsWithConnections: Array<{
|
||||
blockId: string
|
||||
connections: Record<string, any>
|
||||
}> = []
|
||||
|
||||
for (const operation of orderedOperations) {
|
||||
const { operation_type, block_id, params } = operation
|
||||
|
||||
// CRITICAL: Validate block_id is a valid string and not "undefined"
|
||||
// This prevents undefined keys from being set in the workflow state
|
||||
if (!isValidKey(block_id)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: operation_type,
|
||||
blockId: String(block_id || 'invalid'),
|
||||
reason: `Invalid block_id "${block_id}" (type: ${typeof block_id}) - operation skipped. Block IDs must be valid non-empty strings.`,
|
||||
})
|
||||
logger.error('Invalid block_id detected in operation', {
|
||||
operation_type,
|
||||
block_id,
|
||||
block_id_type: typeof block_id,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
logger.debug(`Executing operation: ${operation_type} for block ${block_id}`, {
|
||||
params: params ? Object.keys(params) : [],
|
||||
currentBlockCount: Object.keys(modifiedState.blocks).length,
|
||||
@@ -1128,6 +1167,22 @@ function applyOperationsToWorkflowState(
|
||||
|
||||
// Add new nested blocks
|
||||
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
|
||||
// Validate childId is a valid string
|
||||
if (!isValidKey(childId)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'add_nested_node',
|
||||
blockId: String(childId || 'invalid'),
|
||||
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
|
||||
})
|
||||
logger.error('Invalid childId detected in nestedNodes', {
|
||||
parentBlockId: block_id,
|
||||
childId,
|
||||
childId_type: typeof childId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const childBlockState = createBlockFromParams(
|
||||
childId,
|
||||
childBlock,
|
||||
@@ -1360,6 +1415,22 @@ function applyOperationsToWorkflowState(
|
||||
// Handle nested nodes (for loops/parallels created from scratch)
|
||||
if (params.nestedNodes) {
|
||||
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
|
||||
// Validate childId is a valid string
|
||||
if (!isValidKey(childId)) {
|
||||
logSkippedItem(skippedItems, {
|
||||
type: 'missing_required_params',
|
||||
operationType: 'add_nested_node',
|
||||
blockId: String(childId || 'invalid'),
|
||||
reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`,
|
||||
})
|
||||
logger.error('Invalid childId detected in nestedNodes', {
|
||||
parentBlockId: block_id,
|
||||
childId,
|
||||
childId_type: typeof childId,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const childBlockState = createBlockFromParams(
|
||||
childId,
|
||||
childBlock,
|
||||
@@ -1368,21 +1439,22 @@ function applyOperationsToWorkflowState(
|
||||
)
|
||||
modifiedState.blocks[childId] = childBlockState
|
||||
|
||||
// Defer connection processing to ensure all blocks exist first
|
||||
if (childBlock.connections) {
|
||||
addConnectionsAsEdges(
|
||||
modifiedState,
|
||||
childId,
|
||||
childBlock.connections,
|
||||
logger,
|
||||
skippedItems
|
||||
)
|
||||
addOperationsWithConnections.push({
|
||||
blockId: childId,
|
||||
connections: childBlock.connections,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Add connections as edges
|
||||
// Defer connection processing to ensure all blocks exist first (pass 2)
|
||||
if (params.connections) {
|
||||
addConnectionsAsEdges(modifiedState, block_id, params.connections, logger, skippedItems)
|
||||
addOperationsWithConnections.push({
|
||||
blockId: block_id,
|
||||
connections: params.connections,
|
||||
})
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -1506,13 +1578,18 @@ function applyOperationsToWorkflowState(
|
||||
modifiedState.blocks[block_id] = newBlock
|
||||
}
|
||||
|
||||
// Add/update connections as edges
|
||||
// Defer connection processing to ensure all blocks exist first
|
||||
// This is particularly important when multiple blocks are being inserted
|
||||
// and they have connections to each other
|
||||
if (params.connections) {
|
||||
// Remove existing edges from this block
|
||||
// Remove existing edges from this block first
|
||||
modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id)
|
||||
|
||||
// Add new connections
|
||||
addConnectionsAsEdges(modifiedState, block_id, params.connections, logger, skippedItems)
|
||||
// Add to deferred connections list
|
||||
addOperationsWithConnections.push({
|
||||
blockId: block_id,
|
||||
connections: params.connections,
|
||||
})
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -1562,6 +1639,34 @@ function applyOperationsToWorkflowState(
|
||||
}
|
||||
}
|
||||
|
||||
// Pass 2: Add all deferred connections from add/insert operations
|
||||
// Now all blocks exist (from add, insert, and edit operations), so connections can be safely created
|
||||
// This ensures that if block A connects to block B, and both are being added/inserted,
|
||||
// B will exist when we create the edge from A to B
|
||||
if (addOperationsWithConnections.length > 0) {
|
||||
logger.info('Processing deferred connections from add/insert operations', {
|
||||
deferredConnectionCount: addOperationsWithConnections.length,
|
||||
totalBlocks: Object.keys(modifiedState.blocks).length,
|
||||
})
|
||||
|
||||
for (const { blockId, connections } of addOperationsWithConnections) {
|
||||
// Verify the source block still exists (it might have been deleted by a later operation)
|
||||
if (!modifiedState.blocks[blockId]) {
|
||||
logger.warn('Source block no longer exists for deferred connection', {
|
||||
blockId,
|
||||
availableBlocks: Object.keys(modifiedState.blocks),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
addConnectionsAsEdges(modifiedState, blockId, connections, logger, skippedItems)
|
||||
}
|
||||
|
||||
logger.info('Finished processing deferred connections', {
|
||||
totalEdges: modifiedState.edges.length,
|
||||
})
|
||||
}
|
||||
|
||||
// Regenerate loops and parallels after modifications
|
||||
modifiedState.loops = generateLoopBlocks(modifiedState.blocks)
|
||||
modifiedState.parallels = generateParallelBlocks(modifiedState.blocks)
|
||||
|
||||
@@ -2,6 +2,7 @@ import type { Edge } from 'reactflow'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockWithDiff } from '@/lib/workflows/diff/types'
|
||||
import { isValidKey } from '@/lib/workflows/sanitization/key-validation'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
@@ -537,6 +538,17 @@ export class WorkflowDiffEngine {
|
||||
|
||||
// First pass: build ID mappings
|
||||
for (const [proposedId, proposedBlock] of Object.entries(proposedState.blocks)) {
|
||||
// CRITICAL: Skip invalid block IDs to prevent "undefined" keys in workflow state
|
||||
if (!isValidKey(proposedId)) {
|
||||
logger.error('Invalid proposedId detected in proposed state', {
|
||||
proposedId,
|
||||
proposedId_type: typeof proposedId,
|
||||
blockType: proposedBlock?.type,
|
||||
blockName: proposedBlock?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const key = `${proposedBlock.type}:${proposedBlock.name}`
|
||||
|
||||
// Check if this block exists in current state by type:name
|
||||
@@ -552,7 +564,31 @@ export class WorkflowDiffEngine {
|
||||
|
||||
// Second pass: build final blocks with mapped IDs
|
||||
for (const [proposedId, proposedBlock] of Object.entries(proposedState.blocks)) {
|
||||
// CRITICAL: Skip invalid block IDs to prevent "undefined" keys in workflow state
|
||||
if (!isValidKey(proposedId)) {
|
||||
logger.error('Invalid proposedId detected in proposed state (second pass)', {
|
||||
proposedId,
|
||||
proposedId_type: typeof proposedId,
|
||||
blockType: proposedBlock?.type,
|
||||
blockName: proposedBlock?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const finalId = idMap[proposedId]
|
||||
|
||||
// CRITICAL: Validate finalId before using as key
|
||||
if (!isValidKey(finalId)) {
|
||||
logger.error('Invalid finalId generated from idMap', {
|
||||
proposedId,
|
||||
finalId,
|
||||
finalId_type: typeof finalId,
|
||||
blockType: proposedBlock?.type,
|
||||
blockName: proposedBlock?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const key = `${proposedBlock.type}:${proposedBlock.name}`
|
||||
const existingBlock = existingBlockMap[key]?.block
|
||||
|
||||
@@ -617,6 +653,8 @@ export class WorkflowDiffEngine {
|
||||
const { generateLoopBlocks, generateParallelBlocks } = await import(
|
||||
'@/stores/workflows/workflow/utils'
|
||||
)
|
||||
|
||||
// Build the proposed state
|
||||
const finalProposedState: WorkflowState = {
|
||||
blocks: finalBlocks,
|
||||
edges: finalEdges,
|
||||
@@ -625,6 +663,9 @@ export class WorkflowDiffEngine {
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
// Use the proposed state directly - validation happens at the source
|
||||
const fullyCleanedState = finalProposedState
|
||||
|
||||
// Transfer block heights from baseline workflow for better measurements in diff view
|
||||
// If editing on top of diff, this transfers from the diff (which already has good heights)
|
||||
// Otherwise transfers from original workflow
|
||||
@@ -694,7 +735,7 @@ export class WorkflowDiffEngine {
|
||||
'@/lib/workflows/autolayout/constants'
|
||||
)
|
||||
|
||||
const layoutedBlocks = applyTargetedLayout(finalBlocks, finalProposedState.edges, {
|
||||
const layoutedBlocks = applyTargetedLayout(finalBlocks, fullyCleanedState.edges, {
|
||||
changedBlockIds: impactedBlockArray,
|
||||
horizontalSpacing: DEFAULT_HORIZONTAL_SPACING,
|
||||
verticalSpacing: DEFAULT_VERTICAL_SPACING,
|
||||
@@ -742,7 +783,7 @@ export class WorkflowDiffEngine {
|
||||
|
||||
const layoutResult = applyNativeAutoLayout(
|
||||
finalBlocks,
|
||||
finalProposedState.edges,
|
||||
fullyCleanedState.edges,
|
||||
DEFAULT_LAYOUT_OPTIONS
|
||||
)
|
||||
|
||||
@@ -824,7 +865,7 @@ export class WorkflowDiffEngine {
|
||||
})
|
||||
|
||||
// Create edge identifiers for proposed state
|
||||
finalEdges.forEach((edge) => {
|
||||
fullyCleanedState.edges.forEach((edge) => {
|
||||
const edgeId = `${edge.source}-${edge.sourceHandle || 'source'}-${edge.target}-${edge.targetHandle || 'target'}`
|
||||
proposedEdgeSet.add(edgeId)
|
||||
})
|
||||
@@ -863,21 +904,21 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
|
||||
// Apply diff markers to blocks
|
||||
// Apply diff markers to blocks in the fully cleaned state
|
||||
if (computed) {
|
||||
for (const id of computed.new_blocks || []) {
|
||||
if (finalBlocks[id]) {
|
||||
finalBlocks[id].is_diff = 'new'
|
||||
if (fullyCleanedState.blocks[id]) {
|
||||
;(fullyCleanedState.blocks[id] as any).is_diff = 'new'
|
||||
}
|
||||
}
|
||||
for (const id of computed.edited_blocks || []) {
|
||||
if (finalBlocks[id]) {
|
||||
finalBlocks[id].is_diff = 'edited'
|
||||
if (fullyCleanedState.blocks[id]) {
|
||||
;(fullyCleanedState.blocks[id] as any).is_diff = 'edited'
|
||||
|
||||
// Also mark specific subblocks that changed
|
||||
if (computed.field_diffs?.[id]) {
|
||||
const fieldDiff = computed.field_diffs[id]
|
||||
const block = finalBlocks[id]
|
||||
const block = fullyCleanedState.blocks[id]
|
||||
|
||||
// Apply diff markers to changed subblocks
|
||||
for (const changedField of fieldDiff.changed_fields) {
|
||||
@@ -889,12 +930,12 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: We don't remove deleted blocks from finalBlocks, just mark them
|
||||
// Note: We don't remove deleted blocks from fullyCleanedState, just mark them
|
||||
}
|
||||
|
||||
// Store the diff
|
||||
// Store the diff with the fully sanitized state
|
||||
this.currentDiff = {
|
||||
proposedState: finalProposedState,
|
||||
proposedState: fullyCleanedState,
|
||||
diffAnalysis: computed,
|
||||
metadata: {
|
||||
source: 'workflow_state',
|
||||
@@ -903,10 +944,10 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
|
||||
logger.info('Successfully created diff from workflow state', {
|
||||
blockCount: Object.keys(finalProposedState.blocks).length,
|
||||
edgeCount: finalProposedState.edges.length,
|
||||
hasLoops: Object.keys(finalProposedState.loops || {}).length > 0,
|
||||
hasParallels: Object.keys(finalProposedState.parallels || {}).length > 0,
|
||||
blockCount: Object.keys(fullyCleanedState.blocks).length,
|
||||
edgeCount: fullyCleanedState.edges.length,
|
||||
hasLoops: Object.keys(fullyCleanedState.loops || {}).length > 0,
|
||||
hasParallels: Object.keys(fullyCleanedState.parallels || {}).length > 0,
|
||||
newBlocks: computed?.new_blocks?.length || 0,
|
||||
editedBlocks: computed?.edited_blocks?.length || 0,
|
||||
deletedBlocks: computed?.deleted_blocks?.length || 0,
|
||||
@@ -1096,6 +1137,17 @@ export function stripWorkflowDiffMarkers(state: WorkflowState): WorkflowState {
|
||||
const cleanBlocks: Record<string, BlockState> = {}
|
||||
|
||||
for (const [blockId, block] of Object.entries(state.blocks || {})) {
|
||||
// Validate block ID at the source - skip invalid IDs
|
||||
if (!isValidKey(blockId)) {
|
||||
logger.error('Invalid blockId detected in stripWorkflowDiffMarkers', {
|
||||
blockId,
|
||||
blockId_type: typeof blockId,
|
||||
blockType: block?.type,
|
||||
blockName: block?.name,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const cleanBlock: BlockState = structuredClone(block)
|
||||
const blockWithDiff = cleanBlock as BlockState & BlockWithDiff
|
||||
blockWithDiff.is_diff = undefined
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
workflowSubflows,
|
||||
} from '@sim/db'
|
||||
import type { InferSelectModel } from 'drizzle-orm'
|
||||
import { and, desc, eq, sql } from 'drizzle-orm'
|
||||
import { and, desc, eq, inArray, sql } from 'drizzle-orm'
|
||||
import type { Edge } from 'reactflow'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
@@ -602,6 +602,178 @@ export async function deployWorkflow(params: {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk load workflow states for multiple workflows in a single set of queries.
|
||||
* Much more efficient than calling loadWorkflowFromNormalizedTables for each workflow.
|
||||
*/
|
||||
export async function loadBulkWorkflowsFromNormalizedTables(
|
||||
workflowIds: string[]
|
||||
): Promise<Map<string, NormalizedWorkflowData>> {
|
||||
const result = new Map<string, NormalizedWorkflowData>()
|
||||
|
||||
if (workflowIds.length === 0) {
|
||||
return result
|
||||
}
|
||||
|
||||
try {
|
||||
// Load all components for all workflows in parallel (just 3 queries total)
|
||||
const [allBlocks, allEdges, allSubflows] = await Promise.all([
|
||||
db.select().from(workflowBlocks).where(inArray(workflowBlocks.workflowId, workflowIds)),
|
||||
db.select().from(workflowEdges).where(inArray(workflowEdges.workflowId, workflowIds)),
|
||||
db.select().from(workflowSubflows).where(inArray(workflowSubflows.workflowId, workflowIds)),
|
||||
])
|
||||
|
||||
// Group blocks by workflow
|
||||
const blocksByWorkflow = new Map<string, typeof allBlocks>()
|
||||
for (const block of allBlocks) {
|
||||
const existing = blocksByWorkflow.get(block.workflowId) || []
|
||||
existing.push(block)
|
||||
blocksByWorkflow.set(block.workflowId, existing)
|
||||
}
|
||||
|
||||
// Group edges by workflow
|
||||
const edgesByWorkflow = new Map<string, typeof allEdges>()
|
||||
for (const edge of allEdges) {
|
||||
const existing = edgesByWorkflow.get(edge.workflowId) || []
|
||||
existing.push(edge)
|
||||
edgesByWorkflow.set(edge.workflowId, existing)
|
||||
}
|
||||
|
||||
// Group subflows by workflow
|
||||
const subflowsByWorkflow = new Map<string, typeof allSubflows>()
|
||||
for (const subflow of allSubflows) {
|
||||
const existing = subflowsByWorkflow.get(subflow.workflowId) || []
|
||||
existing.push(subflow)
|
||||
subflowsByWorkflow.set(subflow.workflowId, existing)
|
||||
}
|
||||
|
||||
// Process each workflow
|
||||
for (const workflowId of workflowIds) {
|
||||
const blocks = blocksByWorkflow.get(workflowId) || []
|
||||
const edges = edgesByWorkflow.get(workflowId) || []
|
||||
const subflows = subflowsByWorkflow.get(workflowId) || []
|
||||
|
||||
// Skip workflows with no blocks (not migrated yet)
|
||||
if (blocks.length === 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Convert blocks to the expected format
|
||||
const blocksMap: Record<string, BlockState> = {}
|
||||
blocks.forEach((block) => {
|
||||
const blockData = block.data || {}
|
||||
|
||||
const assembled: BlockState = {
|
||||
id: block.id,
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
position: {
|
||||
x: Number(block.positionX),
|
||||
y: Number(block.positionY),
|
||||
},
|
||||
enabled: block.enabled,
|
||||
horizontalHandles: block.horizontalHandles,
|
||||
advancedMode: block.advancedMode,
|
||||
triggerMode: block.triggerMode,
|
||||
height: Number(block.height),
|
||||
subBlocks: (block.subBlocks as BlockState['subBlocks']) || {},
|
||||
outputs: (block.outputs as BlockState['outputs']) || {},
|
||||
data: blockData,
|
||||
}
|
||||
|
||||
blocksMap[block.id] = assembled
|
||||
})
|
||||
|
||||
// Sanitize any invalid custom tools in agent blocks
|
||||
const { blocks: sanitizedBlocks } = sanitizeAgentToolsInBlocks(blocksMap)
|
||||
|
||||
// Migrate old agent block format to new messages array format
|
||||
const migratedBlocks = migrateAgentBlocksToMessagesFormat(sanitizedBlocks)
|
||||
|
||||
// Convert edges to the expected format
|
||||
const edgesArray: Edge[] = edges.map((edge) => ({
|
||||
id: edge.id,
|
||||
source: edge.sourceBlockId,
|
||||
target: edge.targetBlockId,
|
||||
sourceHandle: edge.sourceHandle ?? undefined,
|
||||
targetHandle: edge.targetHandle ?? undefined,
|
||||
type: 'default',
|
||||
data: {},
|
||||
}))
|
||||
|
||||
// Convert subflows to loops and parallels
|
||||
const loops: Record<string, Loop> = {}
|
||||
const parallels: Record<string, Parallel> = {}
|
||||
|
||||
subflows.forEach((subflow) => {
|
||||
const config = (subflow.config ?? {}) as Partial<Loop & Parallel>
|
||||
|
||||
if (subflow.type === SUBFLOW_TYPES.LOOP) {
|
||||
const loopType =
|
||||
(config as Loop).loopType === 'for' ||
|
||||
(config as Loop).loopType === 'forEach' ||
|
||||
(config as Loop).loopType === 'while' ||
|
||||
(config as Loop).loopType === 'doWhile'
|
||||
? (config as Loop).loopType
|
||||
: 'for'
|
||||
|
||||
const loop: Loop = {
|
||||
id: subflow.id,
|
||||
nodes: Array.isArray((config as Loop).nodes) ? (config as Loop).nodes : [],
|
||||
iterations:
|
||||
typeof (config as Loop).iterations === 'number' ? (config as Loop).iterations : 1,
|
||||
loopType,
|
||||
forEachItems: (config as Loop).forEachItems ?? '',
|
||||
whileCondition: (config as Loop).whileCondition ?? '',
|
||||
doWhileCondition: (config as Loop).doWhileCondition ?? '',
|
||||
}
|
||||
loops[subflow.id] = loop
|
||||
|
||||
// Sync block.data with loop config
|
||||
if (migratedBlocks[subflow.id]) {
|
||||
const block = migratedBlocks[subflow.id]
|
||||
migratedBlocks[subflow.id] = {
|
||||
...block,
|
||||
data: {
|
||||
...block.data,
|
||||
collection: loop.forEachItems ?? block.data?.collection ?? '',
|
||||
whileCondition: loop.whileCondition ?? block.data?.whileCondition ?? '',
|
||||
doWhileCondition: loop.doWhileCondition ?? block.data?.doWhileCondition ?? '',
|
||||
},
|
||||
}
|
||||
}
|
||||
} else if (subflow.type === SUBFLOW_TYPES.PARALLEL) {
|
||||
const parallel: Parallel = {
|
||||
id: subflow.id,
|
||||
nodes: Array.isArray((config as Parallel).nodes) ? (config as Parallel).nodes : [],
|
||||
count: typeof (config as Parallel).count === 'number' ? (config as Parallel).count : 5,
|
||||
distribution: (config as Parallel).distribution ?? '',
|
||||
parallelType:
|
||||
(config as Parallel).parallelType === 'count' ||
|
||||
(config as Parallel).parallelType === 'collection'
|
||||
? (config as Parallel).parallelType
|
||||
: 'count',
|
||||
}
|
||||
parallels[subflow.id] = parallel
|
||||
}
|
||||
})
|
||||
|
||||
result.set(workflowId, {
|
||||
blocks: migratedBlocks,
|
||||
edges: edgesArray,
|
||||
loops,
|
||||
parallels,
|
||||
isFromNormalizedTables: true,
|
||||
})
|
||||
}
|
||||
|
||||
return result
|
||||
} catch (error) {
|
||||
logger.error('Error bulk loading workflows from normalized tables:', error)
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Regenerates all IDs in a workflow state to avoid conflicts when duplicating or using templates
|
||||
* Returns a new state with all IDs regenerated and references updated
|
||||
|
||||
9
apps/sim/lib/workflows/sanitization/key-validation.ts
Normal file
9
apps/sim/lib/workflows/sanitization/key-validation.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* Checks if a key is valid (not undefined, null, empty, or literal "undefined"/"null")
|
||||
* Use this to validate BEFORE setting a dynamic key on any object.
|
||||
*/
|
||||
export function isValidKey(key: unknown): key is string {
|
||||
return (
|
||||
!!key && typeof key === 'string' && key !== 'undefined' && key !== 'null' && key.trim() !== ''
|
||||
)
|
||||
}
|
||||
@@ -32,6 +32,7 @@ import { SearchDocumentationClientTool } from '@/lib/copilot/tools/client/other/
|
||||
import { SearchErrorsClientTool } from '@/lib/copilot/tools/client/other/search-errors'
|
||||
import { SearchOnlineClientTool } from '@/lib/copilot/tools/client/other/search-online'
|
||||
import { SearchPatternsClientTool } from '@/lib/copilot/tools/client/other/search-patterns'
|
||||
import { SleepClientTool } from '@/lib/copilot/tools/client/other/sleep'
|
||||
import { createExecutionContext, getTool } from '@/lib/copilot/tools/client/registry'
|
||||
import { GetCredentialsClientTool } from '@/lib/copilot/tools/client/user/get-credentials'
|
||||
import { SetEnvironmentVariablesClientTool } from '@/lib/copilot/tools/client/user/set-environment-variables'
|
||||
@@ -104,6 +105,7 @@ const CLIENT_TOOL_INSTANTIATORS: Record<string, (id: string) => any> = {
|
||||
navigate_ui: (id) => new NavigateUIClientTool(id),
|
||||
manage_custom_tool: (id) => new ManageCustomToolClientTool(id),
|
||||
manage_mcp_tool: (id) => new ManageMcpToolClientTool(id),
|
||||
sleep: (id) => new SleepClientTool(id),
|
||||
}
|
||||
|
||||
// Read-only static metadata for class-based tools (no instances)
|
||||
@@ -141,6 +143,7 @@ export const CLASS_TOOL_METADATA: Record<string, BaseClientToolMetadata | undefi
|
||||
navigate_ui: (NavigateUIClientTool as any)?.metadata,
|
||||
manage_custom_tool: (ManageCustomToolClientTool as any)?.metadata,
|
||||
manage_mcp_tool: (ManageMcpToolClientTool as any)?.metadata,
|
||||
sleep: (SleepClientTool as any)?.metadata,
|
||||
}
|
||||
|
||||
function ensureClientToolInstance(toolName: string | undefined, toolCallId: string | undefined) {
|
||||
@@ -2260,6 +2263,22 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
set({ toolCallsById: map })
|
||||
} catch {}
|
||||
},
|
||||
|
||||
updateToolCallParams: (toolCallId: string, params: Record<string, any>) => {
|
||||
try {
|
||||
if (!toolCallId) return
|
||||
const map = { ...get().toolCallsById }
|
||||
const current = map[toolCallId]
|
||||
if (!current) return
|
||||
const updatedParams = { ...current.params, ...params }
|
||||
map[toolCallId] = {
|
||||
...current,
|
||||
params: updatedParams,
|
||||
display: resolveToolDisplay(current.name, current.state, toolCallId, updatedParams),
|
||||
}
|
||||
set({ toolCallsById: map })
|
||||
} catch {}
|
||||
},
|
||||
updatePreviewToolCallState: (
|
||||
toolCallState: 'accepted' | 'rejected' | 'error',
|
||||
toolCallId?: string
|
||||
|
||||
@@ -178,6 +178,7 @@ export interface CopilotActions {
|
||||
toolCallId?: string
|
||||
) => void
|
||||
setToolCallState: (toolCall: any, newState: ClientToolCallState, options?: any) => void
|
||||
updateToolCallParams: (toolCallId: string, params: Record<string, any>) => void
|
||||
sendDocsMessage: (query: string, options?: { stream?: boolean; topK?: number }) => Promise<void>
|
||||
saveChatMessages: (chatId: string) => Promise<void>
|
||||
|
||||
|
||||
Reference in New Issue
Block a user