mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 07:27:57 -05:00
fix(copilot): deprecate yaml, json import/export, deprecate build_workflow tool, convert copilot to json-based (#1488)
* Temp commit * Edit workflow self contained * Remove build_workflow * Base bad version * fix lint * Sanitize workflows for copilot * Fixes * Fix import/export buttons * fix autolayout * fix lint * fix training logic to work with json * Add claude sonnet 4.5 to copilot * Lint * Update copilot url * Update default model and fix build errors * Fix tests --------- Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
This commit is contained in:
committed by
GitHub
parent
333442909b
commit
6a664663cf
@@ -10,7 +10,6 @@ type: object
|
||||
required:
|
||||
- type
|
||||
- name
|
||||
- inputs
|
||||
- connections
|
||||
properties:
|
||||
type:
|
||||
@@ -22,21 +21,23 @@ properties:
|
||||
description: Display name for this loop block
|
||||
inputs:
|
||||
type: object
|
||||
required:
|
||||
- loopType
|
||||
description: Optional. If omitted, defaults will be applied.
|
||||
properties:
|
||||
loopType:
|
||||
type: string
|
||||
enum: [for, forEach]
|
||||
description: Type of loop to execute
|
||||
default: for
|
||||
iterations:
|
||||
type: number
|
||||
description: Number of iterations (for 'for' loops)
|
||||
default: 5
|
||||
minimum: 1
|
||||
maximum: 1000
|
||||
collection:
|
||||
type: string
|
||||
description: Collection to iterate over (for 'forEach' loops)
|
||||
default: ""
|
||||
maxConcurrency:
|
||||
type: number
|
||||
description: Maximum concurrent executions
|
||||
@@ -45,13 +46,10 @@ properties:
|
||||
maximum: 10
|
||||
connections:
|
||||
type: object
|
||||
required:
|
||||
- loop
|
||||
properties:
|
||||
# Nested format (recommended)
|
||||
loop:
|
||||
type: object
|
||||
required:
|
||||
- start
|
||||
properties:
|
||||
start:
|
||||
type: string
|
||||
@@ -59,26 +57,37 @@ properties:
|
||||
end:
|
||||
type: string
|
||||
description: Target block ID for loop completion (optional)
|
||||
# Direct handle format (alternative)
|
||||
loop-start-source:
|
||||
type: string | string[]
|
||||
description: Target block ID to execute inside the loop (direct format)
|
||||
loop-end-source:
|
||||
type: string | string[]
|
||||
description: Target block ID for loop completion (direct format, optional)
|
||||
error:
|
||||
type: string
|
||||
description: Target block ID for error handling
|
||||
note: Use either the nested 'loop' format OR the direct 'loop-start-source' format, not both
|
||||
```
|
||||
|
||||
## Connection Configuration
|
||||
|
||||
Loop blocks use a special connection format with a `loop` section:
|
||||
Loop blocks support two connection formats:
|
||||
|
||||
### Direct Handle Format (Alternative)
|
||||
|
||||
```yaml
|
||||
connections:
|
||||
loop:
|
||||
start: <string> # Target block ID to execute inside the loop
|
||||
end: <string> # Target block ID after loop completion (optional)
|
||||
loop-start-source: <string> # Target block ID to execute inside the loop
|
||||
loop-end-source: <string> # Target block ID after loop completion (optional)
|
||||
error: <string> # Target block ID for error handling (optional)
|
||||
```
|
||||
|
||||
Both formats work identically. Use whichever you prefer.
|
||||
|
||||
## Child Block Configuration
|
||||
|
||||
Blocks inside a loop must have their `parentId` set to the loop block ID:
|
||||
Blocks inside a loop must have their `parentId` set to the loop block ID. The `extent` property is automatically set to `'parent'` and doesn't need to be specified:
|
||||
|
||||
```yaml
|
||||
loop-1:
|
||||
@@ -261,6 +270,59 @@ process-task:
|
||||
success: task-completed
|
||||
```
|
||||
|
||||
### Direct Handle Format Example
|
||||
|
||||
The same loop can be written using the direct handle format:
|
||||
|
||||
```yaml
|
||||
my-loop:
|
||||
type: loop
|
||||
name: "Process Items"
|
||||
inputs:
|
||||
loopType: forEach
|
||||
collection: <start.items>
|
||||
connections:
|
||||
loop-start-source: process-item # Direct handle format
|
||||
loop-end-source: final-results # Direct handle format
|
||||
error: handle-error
|
||||
|
||||
process-item:
|
||||
type: agent
|
||||
name: "Process Item"
|
||||
parentId: my-loop
|
||||
inputs:
|
||||
systemPrompt: "Process this item"
|
||||
userPrompt: <loop.currentItem>
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'
|
||||
```
|
||||
|
||||
### Minimal Loop Example (Using Defaults)
|
||||
|
||||
You can omit the `inputs` section entirely, and defaults will be applied:
|
||||
|
||||
```yaml
|
||||
simple-loop:
|
||||
type: loop
|
||||
name: "Simple Loop"
|
||||
# No inputs section - defaults to loopType: 'for', iterations: 5
|
||||
connections:
|
||||
loop-start-source: process-step
|
||||
loop-end-source: complete
|
||||
|
||||
process-step:
|
||||
type: agent
|
||||
name: "Process Step"
|
||||
parentId: simple-loop
|
||||
inputs:
|
||||
systemPrompt: "Execute step"
|
||||
userPrompt: "Step <loop.index>"
|
||||
model: gpt-4o
|
||||
apiKey: '{{OPENAI_API_KEY}}'
|
||||
```
|
||||
|
||||
This loop will execute 5 iterations by default.
|
||||
|
||||
## Loop Variables
|
||||
|
||||
Inside loop child blocks, these special variables are available:
|
||||
|
||||
@@ -230,7 +230,7 @@ describe('Copilot Chat API Route', () => {
|
||||
userId: 'user-123',
|
||||
stream: true,
|
||||
streamToolCalls: true,
|
||||
model: 'gpt-5',
|
||||
model: 'claude-4.5-sonnet',
|
||||
mode: 'agent',
|
||||
messageId: 'mock-uuid-1234-5678',
|
||||
version: '1.0.0',
|
||||
@@ -300,7 +300,7 @@ describe('Copilot Chat API Route', () => {
|
||||
userId: 'user-123',
|
||||
stream: true,
|
||||
streamToolCalls: true,
|
||||
model: 'gpt-5',
|
||||
model: 'claude-4.5-sonnet',
|
||||
mode: 'agent',
|
||||
messageId: 'mock-uuid-1234-5678',
|
||||
version: '1.0.0',
|
||||
@@ -358,7 +358,7 @@ describe('Copilot Chat API Route', () => {
|
||||
userId: 'user-123',
|
||||
stream: true,
|
||||
streamToolCalls: true,
|
||||
model: 'gpt-5',
|
||||
model: 'claude-4.5-sonnet',
|
||||
mode: 'agent',
|
||||
messageId: 'mock-uuid-1234-5678',
|
||||
version: '1.0.0',
|
||||
@@ -450,7 +450,7 @@ describe('Copilot Chat API Route', () => {
|
||||
userId: 'user-123',
|
||||
stream: true,
|
||||
streamToolCalls: true,
|
||||
model: 'gpt-5',
|
||||
model: 'claude-4.5-sonnet',
|
||||
mode: 'ask',
|
||||
messageId: 'mock-uuid-1234-5678',
|
||||
version: '1.0.0',
|
||||
|
||||
@@ -48,10 +48,11 @@ const ChatMessageSchema = z.object({
|
||||
'gpt-4.1',
|
||||
'o3',
|
||||
'claude-4-sonnet',
|
||||
'claude-4.5-sonnet',
|
||||
'claude-4.1-opus',
|
||||
])
|
||||
.optional()
|
||||
.default('gpt-5'),
|
||||
.default('claude-4.5-sonnet'),
|
||||
mode: z.enum(['ask', 'agent']).optional().default('agent'),
|
||||
prefetch: z.boolean().optional(),
|
||||
createNewChat: z.boolean().optional().default(false),
|
||||
|
||||
@@ -51,13 +51,9 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info('Sending training data to agent indexer', {
|
||||
title,
|
||||
operationsCount: operations.length,
|
||||
operationsCount: Array.isArray(operations) ? operations.length : 0,
|
||||
})
|
||||
|
||||
const wrappedOperations = {
|
||||
operations: operations,
|
||||
}
|
||||
|
||||
// Forward to agent indexer
|
||||
const upstreamUrl = `${baseUrl}/operations/add`
|
||||
const upstreamResponse = await fetch(upstreamUrl, {
|
||||
@@ -71,7 +67,7 @@ export async function POST(request: NextRequest) {
|
||||
prompt,
|
||||
input,
|
||||
output,
|
||||
operations: wrappedOperations,
|
||||
operations: { operations },
|
||||
}),
|
||||
})
|
||||
|
||||
|
||||
@@ -6,13 +6,9 @@ import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { simAgentClient } from '@/lib/sim-agent/client'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { applyAutoLayout } from '@/lib/workflows/autolayout'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -120,115 +116,39 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Could not load workflow data' }, { status: 500 })
|
||||
}
|
||||
|
||||
// Create workflow state for autolayout
|
||||
const workflowState = {
|
||||
blocks: currentWorkflowData.blocks,
|
||||
edges: currentWorkflowData.edges,
|
||||
loops: currentWorkflowData.loops || {},
|
||||
parallels: currentWorkflowData.parallels || {},
|
||||
}
|
||||
|
||||
const autoLayoutOptions = {
|
||||
strategy: layoutOptions.strategy,
|
||||
direction: layoutOptions.direction,
|
||||
spacing: {
|
||||
horizontal: layoutOptions.spacing?.horizontal || 500,
|
||||
vertical: layoutOptions.spacing?.vertical || 400,
|
||||
layer: layoutOptions.spacing?.layer || 700,
|
||||
horizontalSpacing: layoutOptions.spacing?.horizontal || 550,
|
||||
verticalSpacing: layoutOptions.spacing?.vertical || 200,
|
||||
padding: {
|
||||
x: layoutOptions.padding?.x || 150,
|
||||
y: layoutOptions.padding?.y || 150,
|
||||
},
|
||||
alignment: layoutOptions.alignment,
|
||||
padding: {
|
||||
x: layoutOptions.padding?.x || 250,
|
||||
y: layoutOptions.padding?.y || 250,
|
||||
},
|
||||
}
|
||||
|
||||
// Gather block registry and utilities for sim-agent
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = block.type
|
||||
acc[blockType] = {
|
||||
...block,
|
||||
id: blockType,
|
||||
subBlocks: block.subBlocks || [],
|
||||
outputs: block.outputs || {},
|
||||
} as any
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
const layoutResult = applyAutoLayout(
|
||||
currentWorkflowData.blocks,
|
||||
currentWorkflowData.edges,
|
||||
currentWorkflowData.loops || {},
|
||||
currentWorkflowData.parallels || {},
|
||||
autoLayoutOptions
|
||||
)
|
||||
|
||||
const autoLayoutResult = await simAgentClient.makeRequest('/api/yaml/autolayout', {
|
||||
body: {
|
||||
workflowState,
|
||||
options: autoLayoutOptions,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Log the full response for debugging
|
||||
logger.info(`[${requestId}] Sim-agent autolayout response:`, {
|
||||
success: autoLayoutResult.success,
|
||||
status: autoLayoutResult.status,
|
||||
error: autoLayoutResult.error,
|
||||
hasData: !!autoLayoutResult.data,
|
||||
hasWorkflowState: !!autoLayoutResult.data?.workflowState,
|
||||
hasBlocks: !!autoLayoutResult.data?.blocks,
|
||||
dataKeys: autoLayoutResult.data ? Object.keys(autoLayoutResult.data) : [],
|
||||
})
|
||||
|
||||
if (
|
||||
!autoLayoutResult.success ||
|
||||
(!autoLayoutResult.data?.workflowState && !autoLayoutResult.data?.blocks)
|
||||
) {
|
||||
if (!layoutResult.success || !layoutResult.blocks) {
|
||||
logger.error(`[${requestId}] Auto layout failed:`, {
|
||||
success: autoLayoutResult.success,
|
||||
error: autoLayoutResult.error,
|
||||
status: autoLayoutResult.status,
|
||||
fullResponse: autoLayoutResult,
|
||||
})
|
||||
const errorMessage =
|
||||
autoLayoutResult.error ||
|
||||
(autoLayoutResult.status === 401
|
||||
? 'Unauthorized - check API key'
|
||||
: autoLayoutResult.status === 404
|
||||
? 'Sim-agent service not found'
|
||||
: `HTTP ${autoLayoutResult.status}`)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Auto layout failed',
|
||||
details: errorMessage,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
// Handle both response formats from sim-agent
|
||||
const layoutedBlocks =
|
||||
autoLayoutResult.data?.workflowState?.blocks || autoLayoutResult.data?.blocks
|
||||
|
||||
if (!layoutedBlocks) {
|
||||
logger.error(`[${requestId}] No blocks returned from sim-agent:`, {
|
||||
responseData: autoLayoutResult.data,
|
||||
error: layoutResult.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Auto layout failed',
|
||||
details: 'No blocks returned from sim-agent',
|
||||
details: layoutResult.error || 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
const elapsed = Date.now() - startTime
|
||||
const blockCount = Object.keys(layoutedBlocks).length
|
||||
const blockCount = Object.keys(layoutResult.blocks).length
|
||||
|
||||
logger.info(`[${requestId}] Autolayout completed successfully in ${elapsed}ms`, {
|
||||
blockCount,
|
||||
@@ -236,7 +156,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
workflowId,
|
||||
})
|
||||
|
||||
// Return the layouted blocks to the frontend - let the store handle saving
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: `Autolayout applied successfully to ${blockCount} blocks`,
|
||||
@@ -245,7 +164,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
direction: layoutOptions.direction,
|
||||
blockCount,
|
||||
elapsed: `${elapsed}ms`,
|
||||
layoutedBlocks: layoutedBlocks,
|
||||
layoutedBlocks: layoutResult.blocks,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,864 +0,0 @@
|
||||
import crypto from 'crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { customTools, workflowCheckpoints, workflow as workflowTable } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { simAgentClient } from '@/lib/sim-agent/client'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent/constants'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import {
|
||||
loadWorkflowFromNormalizedTables,
|
||||
saveWorkflowToNormalizedTables,
|
||||
} from '@/lib/workflows/db-helpers'
|
||||
import { sanitizeAgentToolsInBlocks, validateWorkflowState } from '@/lib/workflows/validation'
|
||||
import { getUserId } from '@/app/api/auth/oauth/utils'
|
||||
import { getAllBlocks, getBlock } from '@/blocks'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlWorkflowAPI')
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const YamlWorkflowRequestSchema = z.object({
|
||||
yamlContent: z.string().min(1, 'YAML content is required'),
|
||||
description: z.string().optional(),
|
||||
chatId: z.string().optional(),
|
||||
source: z.enum(['copilot', 'editor', 'import']).default('editor'),
|
||||
applyAutoLayout: z.boolean().optional().default(false),
|
||||
createCheckpoint: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
function updateBlockReferences(
|
||||
value: any,
|
||||
blockIdMapping: Map<string, string>,
|
||||
requestId: string
|
||||
): any {
|
||||
if (typeof value === 'string') {
|
||||
// Replace references in string values
|
||||
for (const [oldId, newId] of blockIdMapping.entries()) {
|
||||
if (value.includes(oldId)) {
|
||||
value = value.replaceAll(`<${oldId}.`, `<${newId}.`).replaceAll(`%${oldId}.`, `%${newId}.`)
|
||||
}
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((item) => updateBlockReferences(item, blockIdMapping, requestId))
|
||||
}
|
||||
|
||||
if (value && typeof value === 'object') {
|
||||
const result: Record<string, any> = {}
|
||||
for (const [key, val] of Object.entries(value)) {
|
||||
result[key] = updateBlockReferences(val, blockIdMapping, requestId)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to create a checkpoint before workflow changes
|
||||
*/
|
||||
async function createWorkflowCheckpoint(
|
||||
userId: string,
|
||||
workflowId: string,
|
||||
chatId: string,
|
||||
requestId: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
logger.info(`[${requestId}] Creating checkpoint before workflow edit`)
|
||||
|
||||
// Get current workflow state
|
||||
const currentWorkflowData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
|
||||
if (currentWorkflowData) {
|
||||
// Generate YAML from current state
|
||||
// Gather block registry and utilities for sim-agent
|
||||
const allBlockConfigs = getAllBlocks()
|
||||
const blockRegistry = allBlockConfigs.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = block.type
|
||||
acc[blockType] = {
|
||||
...block,
|
||||
id: blockType,
|
||||
subBlocks: block.subBlocks || [],
|
||||
outputs: block.outputs || {},
|
||||
} as any
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
const generateResponse = await fetch(`${env.SIM_AGENT_API_URL}/api/workflow/to-yaml`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
workflowState: currentWorkflowData,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
if (!generateResponse.ok) {
|
||||
const errorText = await generateResponse.text()
|
||||
throw new Error(`Failed to generate YAML: ${errorText}`)
|
||||
}
|
||||
|
||||
const generateResult = await generateResponse.json()
|
||||
if (!generateResult.success || !generateResult.yaml) {
|
||||
throw new Error(generateResult.error || 'Failed to generate YAML')
|
||||
}
|
||||
const currentYaml = generateResult.yaml
|
||||
|
||||
// Create checkpoint using new workflow_checkpoints table
|
||||
await db.insert(workflowCheckpoints).values({
|
||||
userId,
|
||||
workflowId,
|
||||
chatId,
|
||||
workflowState: currentWorkflowData, // Store JSON workflow state
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Checkpoint created successfully`)
|
||||
return true
|
||||
}
|
||||
logger.warn(`[${requestId}] Could not load current workflow state for checkpoint`)
|
||||
return false
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to create checkpoint:`, error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
async function upsertCustomToolsFromBlocks(
|
||||
userId: string,
|
||||
blocks: Record<string, any>,
|
||||
requestId: string
|
||||
): Promise<{ created: number; updated: number }> {
|
||||
try {
|
||||
// Collect custom tools from all agent blocks
|
||||
const collected: Array<{ title: string; schema: any; code: string }> = []
|
||||
|
||||
for (const block of Object.values(blocks)) {
|
||||
if (!block || block.type !== 'agent') continue
|
||||
const toolsSub = block.subBlocks?.tools
|
||||
if (!toolsSub) continue
|
||||
|
||||
let value = toolsSub.value
|
||||
if (!value) continue
|
||||
if (typeof value === 'string') {
|
||||
try {
|
||||
value = JSON.parse(value)
|
||||
} catch {
|
||||
continue
|
||||
}
|
||||
}
|
||||
if (!Array.isArray(value)) continue
|
||||
|
||||
for (const tool of value) {
|
||||
if (
|
||||
tool &&
|
||||
tool.type === 'custom-tool' &&
|
||||
tool.schema &&
|
||||
tool.schema.function &&
|
||||
tool.schema.function.name &&
|
||||
typeof tool.code === 'string'
|
||||
) {
|
||||
collected.push({
|
||||
title: tool.title || tool.schema.function.name,
|
||||
schema: tool.schema,
|
||||
code: tool.code,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (collected.length === 0) return { created: 0, updated: 0 }
|
||||
|
||||
// Ensure unique by function name
|
||||
const byName = new Map<string, { title: string; schema: any; code: string }>()
|
||||
for (const t of collected) {
|
||||
const name = t.schema.function.name
|
||||
if (!byName.has(name)) byName.set(name, t)
|
||||
}
|
||||
|
||||
// Load existing user's tools
|
||||
const existing = await db.select().from(customTools).where(eq(customTools.userId, userId))
|
||||
|
||||
const existingByName = new Map<string, (typeof existing)[number]>()
|
||||
for (const row of existing) {
|
||||
try {
|
||||
const fnName = (row.schema as any)?.function?.name
|
||||
if (fnName) existingByName.set(fnName, row as any)
|
||||
} catch {}
|
||||
}
|
||||
|
||||
let created = 0
|
||||
let updated = 0
|
||||
const now = new Date()
|
||||
|
||||
// Upsert by function name
|
||||
for (const [name, tool] of byName.entries()) {
|
||||
const match = existingByName.get(name)
|
||||
if (!match) {
|
||||
await db.insert(customTools).values({
|
||||
id: crypto.randomUUID(),
|
||||
userId,
|
||||
title: tool.title,
|
||||
schema: tool.schema,
|
||||
code: tool.code,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
created++
|
||||
} else {
|
||||
await db
|
||||
.update(customTools)
|
||||
.set({ title: tool.title, schema: tool.schema, code: tool.code, updatedAt: now })
|
||||
.where(eq(customTools.id, match.id))
|
||||
updated++
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Upserted custom tools from YAML`, { created, updated })
|
||||
return { created, updated }
|
||||
} catch (err) {
|
||||
logger.warn(`[${requestId}] Failed to upsert custom tools from YAML`, {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return { created: 0, updated: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert blocks with 'inputs' field to standard 'subBlocks' structure
|
||||
* This handles trigger blocks that may come from YAML/copilot with legacy format
|
||||
*/
|
||||
function normalizeBlockStructure(blocks: Record<string, any>): Record<string, any> {
|
||||
const normalizedBlocks: Record<string, any> = {}
|
||||
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
const normalizedBlock = { ...block }
|
||||
|
||||
// Normalize position coordinates (handle both uppercase and lowercase)
|
||||
if (block.position) {
|
||||
normalizedBlock.position = {
|
||||
x: block.position.x ?? block.position.X ?? 0,
|
||||
y: block.position.y ?? block.position.Y ?? 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Convert any inputs map into subBlocks for consistency (applies to all blocks)
|
||||
if (block.inputs) {
|
||||
// Convert inputs.inputFormat to subBlocks.inputFormat
|
||||
if (block.inputs.inputFormat) {
|
||||
if (!normalizedBlock.subBlocks) {
|
||||
normalizedBlock.subBlocks = {}
|
||||
}
|
||||
|
||||
normalizedBlock.subBlocks.inputFormat = {
|
||||
id: 'inputFormat',
|
||||
type: 'input-format',
|
||||
value: block.inputs.inputFormat,
|
||||
}
|
||||
}
|
||||
|
||||
// Copy all inputs fields to subBlocks (creating entries as needed)
|
||||
for (const [inputKey, inputValue] of Object.entries(block.inputs)) {
|
||||
if (!normalizedBlock.subBlocks) {
|
||||
normalizedBlock.subBlocks = {}
|
||||
}
|
||||
if (!normalizedBlock.subBlocks[inputKey]) {
|
||||
normalizedBlock.subBlocks[inputKey] = {
|
||||
id: inputKey,
|
||||
type: 'short-input', // Default type, may need adjustment based on actual field
|
||||
value: inputValue,
|
||||
}
|
||||
} else {
|
||||
normalizedBlock.subBlocks[inputKey].value = inputValue
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the inputs field after conversion
|
||||
normalizedBlock.inputs = undefined
|
||||
}
|
||||
|
||||
normalizedBlocks[blockId] = normalizedBlock
|
||||
}
|
||||
|
||||
return normalizedBlocks
|
||||
}
|
||||
|
||||
/**
|
||||
* PUT /api/workflows/[id]/yaml
|
||||
* Consolidated YAML workflow saving endpoint
|
||||
* Handles copilot edits, imports, and text editor saves
|
||||
*/
|
||||
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const startTime = Date.now()
|
||||
const { id: workflowId } = await params
|
||||
|
||||
try {
|
||||
// Parse and validate request
|
||||
const body = await request.json()
|
||||
const { yamlContent, description, chatId, source, applyAutoLayout, createCheckpoint } =
|
||||
YamlWorkflowRequestSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Processing ${source} YAML workflow save`, {
|
||||
workflowId,
|
||||
yamlLength: yamlContent.length,
|
||||
hasDescription: !!description,
|
||||
hasChatId: !!chatId,
|
||||
applyAutoLayout,
|
||||
createCheckpoint,
|
||||
})
|
||||
|
||||
// Get and validate user
|
||||
const userId = await getUserId(requestId, workflowId)
|
||||
if (!userId) {
|
||||
return NextResponse.json({ error: 'Unauthorized or workflow not found' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Create checkpoint if requested (typically for copilot)
|
||||
if (createCheckpoint && chatId) {
|
||||
await createWorkflowCheckpoint(userId, workflowId, chatId, requestId)
|
||||
}
|
||||
|
||||
// Convert YAML to workflow state by calling sim-agent directly
|
||||
// Gather block registry and utilities for sim-agent
|
||||
const allBlockTypes = getAllBlocks()
|
||||
const blockRegistry = allBlockTypes.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = block.type
|
||||
acc[blockType] = {
|
||||
...block,
|
||||
id: blockType,
|
||||
subBlocks: block.subBlocks || [],
|
||||
outputs: block.outputs || {},
|
||||
} as any
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
const conversionResponse = await fetch(`${SIM_AGENT_API_URL}/api/yaml/to-workflow`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
yamlContent,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
options: {
|
||||
generateNewIds: false, // We'll handle ID generation manually for now
|
||||
preservePositions: true,
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
if (!conversionResponse.ok) {
|
||||
const errorText = await conversionResponse.text()
|
||||
logger.error(`[${requestId}] Sim agent API error:`, {
|
||||
status: conversionResponse.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json({
|
||||
success: false,
|
||||
message: 'Failed to convert YAML to workflow',
|
||||
errors: [`Sim agent API error: ${conversionResponse.statusText}`],
|
||||
warnings: [],
|
||||
})
|
||||
}
|
||||
|
||||
const conversionResult = await conversionResponse.json()
|
||||
|
||||
const workflowState = conversionResult.workflowState || conversionResult.diff?.proposedState
|
||||
|
||||
if (!conversionResult.success || !workflowState) {
|
||||
logger.error(`[${requestId}] YAML conversion failed`, {
|
||||
errors: conversionResult.errors,
|
||||
warnings: conversionResult.warnings,
|
||||
})
|
||||
return NextResponse.json({
|
||||
success: false,
|
||||
message: 'Failed to convert YAML to workflow',
|
||||
errors: conversionResult.errors,
|
||||
warnings: conversionResult.warnings || [],
|
||||
})
|
||||
}
|
||||
|
||||
// Normalize blocks that use 'inputs' field to standard 'subBlocks' structure
|
||||
if (workflowState.blocks) {
|
||||
workflowState.blocks = normalizeBlockStructure(workflowState.blocks)
|
||||
}
|
||||
|
||||
// Validate the workflow state before persisting
|
||||
const validation = validateWorkflowState(workflowState, { sanitize: true })
|
||||
|
||||
if (!validation.valid) {
|
||||
logger.error(`[${requestId}] Workflow validation failed`, {
|
||||
errors: validation.errors,
|
||||
warnings: validation.warnings,
|
||||
})
|
||||
return NextResponse.json({
|
||||
success: false,
|
||||
message: 'Invalid workflow structure',
|
||||
errors: validation.errors,
|
||||
warnings: validation.warnings || [],
|
||||
})
|
||||
}
|
||||
|
||||
// Use sanitized state if available
|
||||
const finalWorkflowState = validation.sanitizedState || workflowState
|
||||
|
||||
if (validation.warnings.length > 0) {
|
||||
logger.warn(`[${requestId}] Workflow validation warnings`, {
|
||||
warnings: validation.warnings,
|
||||
})
|
||||
}
|
||||
|
||||
// Ensure all blocks have required fields
|
||||
Object.entries(finalWorkflowState.blocks).forEach(([blockId, block]) => {
|
||||
const blockData = block as any
|
||||
if (!blockData.id) blockData.id = blockId
|
||||
if (!blockData.position) {
|
||||
blockData.position = { x: 0, y: 0 }
|
||||
}
|
||||
if (blockData.enabled === undefined) {
|
||||
blockData.enabled = true
|
||||
}
|
||||
if (blockData.horizontalHandles === undefined) {
|
||||
blockData.horizontalHandles = true
|
||||
}
|
||||
if (blockData.isWide === undefined) {
|
||||
blockData.isWide = false
|
||||
}
|
||||
if (blockData.height === undefined) {
|
||||
blockData.height = 0
|
||||
}
|
||||
if (!blockData.subBlocks) {
|
||||
blockData.subBlocks = {}
|
||||
}
|
||||
if (!blockData.outputs) {
|
||||
blockData.outputs = {}
|
||||
}
|
||||
})
|
||||
|
||||
const blocks = Object.values(finalWorkflowState.blocks) as Array<{
|
||||
id: string
|
||||
type: string
|
||||
name: string
|
||||
position: { x: number; y: number }
|
||||
subBlocks?: Record<string, any>
|
||||
inputs?: Record<string, any>
|
||||
triggerMode?: boolean
|
||||
data?: Record<string, any>
|
||||
parentId?: string
|
||||
extent?: string
|
||||
}>
|
||||
const edges = finalWorkflowState.edges
|
||||
const warnings = conversionResult.warnings || []
|
||||
|
||||
// Create workflow state
|
||||
const newWorkflowState: any = {
|
||||
blocks: {} as Record<string, any>,
|
||||
edges: [] as any[],
|
||||
loops: {} as Record<string, any>,
|
||||
parallels: {} as Record<string, any>,
|
||||
lastSaved: Date.now(),
|
||||
isDeployed: false,
|
||||
deployedAt: undefined,
|
||||
deploymentStatuses: {} as Record<string, any>,
|
||||
hasActiveSchedule: false,
|
||||
hasActiveWebhook: false,
|
||||
}
|
||||
|
||||
// Process blocks with proper configuration setup and assign new IDs
|
||||
const blockIdMapping = new Map<string, string>()
|
||||
|
||||
for (const block of blocks) {
|
||||
const newId = crypto.randomUUID()
|
||||
blockIdMapping.set(block.id, newId)
|
||||
|
||||
// Get block configuration for proper setup
|
||||
const blockConfig = getBlock(block.type)
|
||||
|
||||
if (!blockConfig && (block.type === 'loop' || block.type === 'parallel')) {
|
||||
// Handle loop/parallel blocks (they don't have regular block configs)
|
||||
// Preserve parentId if it exists (though loop/parallel shouldn't have parents)
|
||||
const containerData = block.data || {}
|
||||
if (block.parentId) {
|
||||
containerData.parentId = block.parentId
|
||||
containerData.extent = block.extent || 'parent'
|
||||
}
|
||||
|
||||
newWorkflowState.blocks[newId] = {
|
||||
id: newId,
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
position: block.position,
|
||||
subBlocks: {},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
advancedMode: false,
|
||||
height: 0,
|
||||
data: containerData,
|
||||
}
|
||||
logger.debug(`[${requestId}] Processed loop/parallel block: ${block.id} -> ${newId}`)
|
||||
} else if (blockConfig) {
|
||||
// Handle regular blocks with proper configuration
|
||||
const subBlocks: Record<string, any> = {}
|
||||
|
||||
// Set up subBlocks from block configuration
|
||||
blockConfig.subBlocks.forEach((subBlock) => {
|
||||
subBlocks[subBlock.id] = {
|
||||
id: subBlock.id,
|
||||
type: subBlock.type,
|
||||
value: null,
|
||||
}
|
||||
})
|
||||
|
||||
// Also ensure we have subBlocks for any existing subBlocks from conversion
|
||||
// This handles cases where hidden fields or dynamic configurations exist
|
||||
if (block.subBlocks) {
|
||||
Object.keys(block.subBlocks).forEach((subBlockKey) => {
|
||||
if (!subBlocks[subBlockKey]) {
|
||||
subBlocks[subBlockKey] = {
|
||||
id: subBlockKey,
|
||||
type: block.subBlocks![subBlockKey].type || 'short-input',
|
||||
value: block.subBlocks![subBlockKey].value || null,
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Handle blocks that have inputs instead of subBlocks (from YAML/copilot format)
|
||||
// This is especially important for trigger configuration
|
||||
if (block.inputs) {
|
||||
Object.entries(block.inputs).forEach(([inputKey, inputValue]) => {
|
||||
const matchingSubBlock = blockConfig.subBlocks.find((sb) => sb.id === inputKey)
|
||||
if (!subBlocks[inputKey]) {
|
||||
subBlocks[inputKey] = {
|
||||
id: inputKey,
|
||||
type:
|
||||
matchingSubBlock?.type ||
|
||||
(inputKey === 'triggerConfig' ? 'trigger-config' : 'short-input'),
|
||||
value: inputValue,
|
||||
}
|
||||
} else if (inputValue !== undefined) {
|
||||
subBlocks[inputKey].value = inputValue
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Set up outputs from block configuration
|
||||
const outputs = resolveOutputType(blockConfig.outputs)
|
||||
|
||||
// Preserve parentId if it exists in the imported block
|
||||
const blockData = block.data || {}
|
||||
if (block.parentId) {
|
||||
blockData.parentId = block.parentId
|
||||
blockData.extent = block.extent || 'parent'
|
||||
}
|
||||
|
||||
newWorkflowState.blocks[newId] = {
|
||||
id: newId,
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
position: block.position,
|
||||
subBlocks,
|
||||
outputs,
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
advancedMode: false,
|
||||
height: 0,
|
||||
triggerMode: block.triggerMode || false, // Preserve triggerMode from imported block
|
||||
data: blockData,
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Processed regular block: ${block.id} -> ${newId}`, {
|
||||
blockType: block.type,
|
||||
hasTriggerMode: block.triggerMode,
|
||||
hasInputs: !!block.inputs,
|
||||
inputKeys: block.inputs ? Object.keys(block.inputs) : [],
|
||||
subBlockKeys: Object.keys(subBlocks),
|
||||
})
|
||||
} else {
|
||||
logger.warn(`[${requestId}] Unknown block type: ${block.type}`)
|
||||
}
|
||||
}
|
||||
|
||||
// Set subblock values with block reference mapping
|
||||
for (const block of blocks) {
|
||||
const newId = blockIdMapping.get(block.id)
|
||||
if (!newId || !newWorkflowState.blocks[newId]) continue
|
||||
|
||||
if (block.subBlocks && typeof block.subBlocks === 'object') {
|
||||
Object.entries(block.subBlocks).forEach(([key, subBlock]: [string, any]) => {
|
||||
if (newWorkflowState.blocks[newId].subBlocks[key] && subBlock.value !== undefined) {
|
||||
// Update block references in values to use new mapped IDs
|
||||
const processedValue = updateBlockReferences(subBlock.value, blockIdMapping, requestId)
|
||||
newWorkflowState.blocks[newId].subBlocks[key].value = processedValue
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Update parent-child relationships with mapped IDs
|
||||
logger.info(`[${requestId}] Block ID mapping:`, Object.fromEntries(blockIdMapping))
|
||||
for (const [newId, blockData] of Object.entries(newWorkflowState.blocks)) {
|
||||
const block = blockData as any
|
||||
if (block.data?.parentId) {
|
||||
logger.info(
|
||||
`[${requestId}] Found child block ${block.name} with parentId: ${block.data.parentId}`
|
||||
)
|
||||
const mappedParentId = blockIdMapping.get(block.data.parentId)
|
||||
if (mappedParentId) {
|
||||
logger.info(
|
||||
`[${requestId}] Updating parent reference: ${block.data.parentId} -> ${mappedParentId}`
|
||||
)
|
||||
block.data.parentId = mappedParentId
|
||||
// Ensure extent is set for child blocks
|
||||
if (!block.data.extent) {
|
||||
block.data.extent = 'parent'
|
||||
}
|
||||
} else {
|
||||
logger.error(
|
||||
`[${requestId}] ❌ Parent block not found for mapping: ${block.data.parentId}`
|
||||
)
|
||||
logger.error(`[${requestId}] Available mappings:`, Array.from(blockIdMapping.keys()))
|
||||
// Remove invalid parent reference
|
||||
block.data.parentId = undefined
|
||||
block.data.extent = undefined
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Process edges with mapped IDs and handles
|
||||
for (const edge of edges) {
|
||||
const sourceId = blockIdMapping.get(edge.source)
|
||||
const targetId = blockIdMapping.get(edge.target)
|
||||
|
||||
if (sourceId && targetId) {
|
||||
const newEdgeId = crypto.randomUUID()
|
||||
newWorkflowState.edges.push({
|
||||
id: newEdgeId,
|
||||
source: sourceId,
|
||||
target: targetId,
|
||||
sourceHandle: edge.sourceHandle,
|
||||
targetHandle: edge.targetHandle,
|
||||
type: edge.type || 'default',
|
||||
})
|
||||
} else {
|
||||
logger.warn(
|
||||
`[${requestId}] Skipping edge - missing blocks: ${edge.source} -> ${edge.target}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Generate loop and parallel configurations
|
||||
const loops = generateLoopBlocks(newWorkflowState.blocks)
|
||||
const parallels = generateParallelBlocks(newWorkflowState.blocks)
|
||||
newWorkflowState.loops = loops
|
||||
newWorkflowState.parallels = parallels
|
||||
|
||||
logger.info(`[${requestId}] Generated workflow state`, {
|
||||
blocksCount: Object.keys(newWorkflowState.blocks).length,
|
||||
edgesCount: newWorkflowState.edges.length,
|
||||
loopsCount: Object.keys(loops).length,
|
||||
parallelsCount: Object.keys(parallels).length,
|
||||
})
|
||||
|
||||
// Apply intelligent autolayout if requested
|
||||
if (applyAutoLayout) {
|
||||
try {
|
||||
logger.info(`[${requestId}] Applying autolayout`)
|
||||
|
||||
// Create workflow state for autolayout
|
||||
const workflowStateForLayout = {
|
||||
blocks: newWorkflowState.blocks,
|
||||
edges: newWorkflowState.edges,
|
||||
loops: newWorkflowState.loops || {},
|
||||
parallels: newWorkflowState.parallels || {},
|
||||
}
|
||||
|
||||
const autoLayoutOptions = {
|
||||
strategy: 'smart' as const,
|
||||
direction: 'auto' as const,
|
||||
spacing: {
|
||||
horizontal: 500,
|
||||
vertical: 400,
|
||||
layer: 700,
|
||||
},
|
||||
alignment: 'center' as const,
|
||||
padding: {
|
||||
x: 250,
|
||||
y: 250,
|
||||
},
|
||||
}
|
||||
|
||||
// Gather block registry and utilities for sim-agent
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = block.type
|
||||
acc[blockType] = {
|
||||
...block,
|
||||
id: blockType,
|
||||
subBlocks: block.subBlocks || [],
|
||||
outputs: block.outputs || {},
|
||||
} as any
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
const autoLayoutResult = await simAgentClient.makeRequest('/api/yaml/autolayout', {
|
||||
body: {
|
||||
workflowState: workflowStateForLayout,
|
||||
options: autoLayoutOptions,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if (autoLayoutResult.success && autoLayoutResult.data?.workflowState) {
|
||||
newWorkflowState.blocks = autoLayoutResult.data.workflowState.blocks
|
||||
} else {
|
||||
logger.warn(
|
||||
`[${requestId}] Auto layout failed, using original positions:`,
|
||||
autoLayoutResult.error
|
||||
)
|
||||
}
|
||||
logger.info(`[${requestId}] Autolayout completed successfully`)
|
||||
} catch (layoutError) {
|
||||
logger.warn(`[${requestId}] Autolayout failed, using original positions:`, layoutError)
|
||||
}
|
||||
}
|
||||
|
||||
// Sanitize custom tools in agent blocks before saving
|
||||
const { blocks: sanitizedBlocks, warnings: sanitationWarnings } = sanitizeAgentToolsInBlocks(
|
||||
newWorkflowState.blocks
|
||||
)
|
||||
if (sanitationWarnings.length > 0) {
|
||||
logger.warn(`[${requestId}] Tool sanitation produced ${sanitationWarnings.length} warning(s)`)
|
||||
}
|
||||
newWorkflowState.blocks = sanitizedBlocks
|
||||
|
||||
// Upsert custom tools from blocks
|
||||
await upsertCustomToolsFromBlocks(userId, newWorkflowState.blocks, requestId)
|
||||
|
||||
// Save to database
|
||||
const saveResult = await saveWorkflowToNormalizedTables(workflowId, newWorkflowState)
|
||||
|
||||
if (!saveResult.success) {
|
||||
logger.error(`[${requestId}] Failed to save workflow state:`, saveResult.error)
|
||||
return NextResponse.json({
|
||||
success: false,
|
||||
message: `Database save failed: ${saveResult.error || 'Unknown error'}`,
|
||||
errors: [saveResult.error || 'Database save failed'],
|
||||
warnings: [...warnings, ...sanitationWarnings],
|
||||
})
|
||||
}
|
||||
|
||||
// Update workflow's lastSynced timestamp
|
||||
await db
|
||||
.update(workflowTable)
|
||||
.set({
|
||||
lastSynced: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
|
||||
// Notify socket server for real-time collaboration (for copilot and editor)
|
||||
if (source === 'copilot' || source === 'editor') {
|
||||
try {
|
||||
const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002'
|
||||
await fetch(`${socketUrl}/api/copilot-workflow-edit`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowId,
|
||||
description: description || `${source} edited workflow`,
|
||||
}),
|
||||
})
|
||||
logger.info(`[${requestId}] Notified socket server`)
|
||||
} catch (socketError) {
|
||||
logger.warn(`[${requestId}] Failed to notify socket server:`, socketError)
|
||||
}
|
||||
}
|
||||
|
||||
const elapsed = Date.now() - startTime
|
||||
const totalBlocksInWorkflow = Object.keys(newWorkflowState.blocks).length
|
||||
const summary = `Successfully saved workflow with ${totalBlocksInWorkflow} blocks and ${newWorkflowState.edges.length} connections.`
|
||||
|
||||
logger.info(`[${requestId}] YAML workflow save completed in ${elapsed}ms`, {
|
||||
success: true,
|
||||
blocksCount: totalBlocksInWorkflow,
|
||||
edgesCount: newWorkflowState.edges.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: description ? `Workflow updated: ${description}` : 'Workflow updated successfully',
|
||||
summary,
|
||||
data: {
|
||||
blocksCount: totalBlocksInWorkflow,
|
||||
edgesCount: newWorkflowState.edges.length,
|
||||
loopsCount: Object.keys(loops).length,
|
||||
parallelsCount: Object.keys(parallels).length,
|
||||
},
|
||||
errors: [],
|
||||
warnings: [...warnings, ...sanitationWarnings],
|
||||
})
|
||||
} catch (error) {
|
||||
const elapsed = Date.now() - startTime
|
||||
logger.error(`[${requestId}] YAML workflow save failed in ${elapsed}ms:`, error)
|
||||
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
message: 'Invalid request data',
|
||||
errors: error.errors.map((e) => `${e.path.join('.')}: ${e.message}`),
|
||||
warnings: [],
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
message: `Failed to save YAML workflow: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
errors: [error instanceof Error ? error.message : 'Unknown error'],
|
||||
warnings: [],
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,37 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// Ensure loop blocks have their data populated with defaults
|
||||
if (workflowState.blocks) {
|
||||
Object.entries(workflowState.blocks).forEach(([blockId, block]: [string, any]) => {
|
||||
if (block.type === 'loop') {
|
||||
// Ensure data field exists
|
||||
if (!block.data) {
|
||||
block.data = {}
|
||||
}
|
||||
|
||||
// Apply defaults if not set
|
||||
if (!block.data.loopType) {
|
||||
block.data.loopType = 'for'
|
||||
}
|
||||
if (!block.data.count && block.data.count !== 0) {
|
||||
block.data.count = 5
|
||||
}
|
||||
if (!block.data.collection) {
|
||||
block.data.collection = ''
|
||||
}
|
||||
if (!block.data.maxConcurrency) {
|
||||
block.data.maxConcurrency = 1
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Applied defaults to loop block ${blockId}:`, {
|
||||
loopType: block.data.loopType,
|
||||
count: block.data.count,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Gather block registry and utilities for sim-agent
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
|
||||
@@ -119,6 +119,37 @@ export async function GET(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// Ensure loop blocks have their data populated with defaults
|
||||
if (workflowState.blocks) {
|
||||
Object.entries(workflowState.blocks).forEach(([blockId, block]: [string, any]) => {
|
||||
if (block.type === 'loop') {
|
||||
// Ensure data field exists
|
||||
if (!block.data) {
|
||||
block.data = {}
|
||||
}
|
||||
|
||||
// Apply defaults if not set
|
||||
if (!block.data.loopType) {
|
||||
block.data.loopType = 'for'
|
||||
}
|
||||
if (!block.data.count && block.data.count !== 0) {
|
||||
block.data.count = 5
|
||||
}
|
||||
if (!block.data.collection) {
|
||||
block.data.collection = ''
|
||||
}
|
||||
if (!block.data.maxConcurrency) {
|
||||
block.data.maxConcurrency = 1
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Applied defaults to loop block ${blockId}:`, {
|
||||
loopType: block.data.loopType,
|
||||
count: block.data.count,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Gather block registry and utilities for sim-agent
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
|
||||
@@ -1,25 +1,11 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import {
|
||||
convertLoopBlockToLoop,
|
||||
convertParallelBlockToParallel,
|
||||
findAllDescendantNodes,
|
||||
findChildNodes,
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
import { applyAutoLayout } from '@/lib/workflows/autolayout'
|
||||
|
||||
const logger = createLogger('YamlAutoLayoutAPI')
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const AutoLayoutRequestSchema = z.object({
|
||||
workflowState: z.object({
|
||||
blocks: z.record(z.any()),
|
||||
@@ -60,148 +46,54 @@ export async function POST(request: NextRequest) {
|
||||
blockCount: Object.keys(workflowState.blocks).length,
|
||||
edgeCount: workflowState.edges.length,
|
||||
strategy: options?.strategy || 'smart',
|
||||
simAgentUrl: SIM_AGENT_API_URL,
|
||||
})
|
||||
|
||||
// Gather block registry and utilities
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = block.type
|
||||
acc[blockType] = {
|
||||
...block,
|
||||
id: blockType,
|
||||
subBlocks: block.subBlocks || [],
|
||||
outputs: block.outputs || {},
|
||||
} as any
|
||||
return acc
|
||||
const autoLayoutOptions = {
|
||||
horizontalSpacing: options?.spacing?.horizontal || 550,
|
||||
verticalSpacing: options?.spacing?.vertical || 200,
|
||||
padding: {
|
||||
x: options?.padding?.x || 150,
|
||||
y: options?.padding?.y || 150,
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
// Log sample block data for debugging
|
||||
const sampleBlockId = Object.keys(workflowState.blocks)[0]
|
||||
if (sampleBlockId) {
|
||||
logger.info(`[${requestId}] Sample block data:`, {
|
||||
blockId: sampleBlockId,
|
||||
blockType: workflowState.blocks[sampleBlockId].type,
|
||||
hasPosition: !!workflowState.blocks[sampleBlockId].position,
|
||||
position: workflowState.blocks[sampleBlockId].position,
|
||||
})
|
||||
alignment: options?.alignment || 'center',
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Calling sim-agent autolayout with strategy:`, {
|
||||
strategy: options?.strategy || 'smart (default)',
|
||||
direction: options?.direction || 'auto (default)',
|
||||
spacing: options?.spacing,
|
||||
alignment: options?.alignment || 'center (default)',
|
||||
})
|
||||
const layoutResult = applyAutoLayout(
|
||||
workflowState.blocks,
|
||||
workflowState.edges,
|
||||
workflowState.loops || {},
|
||||
workflowState.parallels || {},
|
||||
autoLayoutOptions
|
||||
)
|
||||
|
||||
// Call sim-agent API
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/yaml/autolayout`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
workflowState: {
|
||||
blocks: workflowState.blocks,
|
||||
edges: workflowState.edges,
|
||||
loops: workflowState.loops || {},
|
||||
parallels: workflowState.parallels || {},
|
||||
},
|
||||
options: {
|
||||
strategy: 'smart',
|
||||
direction: 'auto',
|
||||
spacing: {
|
||||
horizontal: 500,
|
||||
vertical: 400,
|
||||
layer: 700,
|
||||
},
|
||||
alignment: 'center',
|
||||
padding: {
|
||||
x: 250,
|
||||
y: 250,
|
||||
},
|
||||
...options, // Allow override of defaults
|
||||
},
|
||||
blockRegistry,
|
||||
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
convertLoopBlockToLoop: convertLoopBlockToLoop.toString(),
|
||||
convertParallelBlockToParallel: convertParallelBlockToParallel.toString(),
|
||||
findChildNodes: findChildNodes.toString(),
|
||||
findAllDescendantNodes: findAllDescendantNodes.toString(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
|
||||
// Try to parse the error as JSON for better error messages
|
||||
let errorMessage = `Sim agent API error: ${response.statusText}`
|
||||
|
||||
// Check if it's a 404 error
|
||||
if (response.status === 404) {
|
||||
errorMessage =
|
||||
'Auto-layout endpoint not found on sim agent. Please ensure the /api/yaml/autolayout endpoint is implemented in the sim agent service.'
|
||||
} else {
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
if (errorJson.errors && Array.isArray(errorJson.errors)) {
|
||||
errorMessage = errorJson.errors.join(', ')
|
||||
} else if (errorJson.error) {
|
||||
errorMessage = errorJson.error
|
||||
}
|
||||
} catch (e) {
|
||||
// If not JSON, use the raw text
|
||||
errorMessage = errorText || errorMessage
|
||||
}
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Sim agent API error:`, {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
parsedError: errorMessage,
|
||||
if (!layoutResult.success || !layoutResult.blocks) {
|
||||
logger.error(`[${requestId}] Auto layout failed:`, {
|
||||
error: layoutResult.error,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{ success: false, errors: [errorMessage] },
|
||||
{ status: response.status }
|
||||
{
|
||||
success: false,
|
||||
errors: [layoutResult.error || 'Unknown auto layout error'],
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
logger.info(`[${requestId}] Sim agent response summary:`, {
|
||||
success: result.success,
|
||||
hasBlocks: !!result.blocks,
|
||||
blockCount: result.blocks ? Object.keys(result.blocks).length : 0,
|
||||
responseKeys: Object.keys(result),
|
||||
logger.info(`[${requestId}] Auto layout completed successfully:`, {
|
||||
success: true,
|
||||
blockCount: Object.keys(layoutResult.blocks).length,
|
||||
})
|
||||
|
||||
// Transform the response to match the expected format
|
||||
const transformedResponse = {
|
||||
success: result.success,
|
||||
success: true,
|
||||
workflowState: {
|
||||
blocks: result.blocks || {},
|
||||
edges: workflowState.edges || [],
|
||||
blocks: layoutResult.blocks,
|
||||
edges: workflowState.edges,
|
||||
loops: workflowState.loops || {},
|
||||
parallels: workflowState.parallels || {},
|
||||
},
|
||||
errors: result.errors,
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Transformed response:`, {
|
||||
success: transformedResponse.success,
|
||||
blockCount: Object.keys(transformedResponse.workflowState.blocks).length,
|
||||
hasWorkflowState: true,
|
||||
})
|
||||
|
||||
return NextResponse.json(transformedResponse)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Auto layout failed:`, error)
|
||||
|
||||
@@ -1,450 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { validateWorkflowState } from '@/lib/workflows/validation'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import {
|
||||
convertLoopBlockToLoop,
|
||||
convertParallelBlockToParallel,
|
||||
findAllDescendantNodes,
|
||||
findChildNodes,
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlDiffCreateAPI')
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const CreateDiffRequestSchema = z.object({
|
||||
yamlContent: z.string().min(1),
|
||||
diffAnalysis: z
|
||||
.object({
|
||||
new_blocks: z.array(z.string()),
|
||||
edited_blocks: z.array(z.string()),
|
||||
deleted_blocks: z.array(z.string()),
|
||||
field_diffs: z
|
||||
.record(
|
||||
z.object({
|
||||
changed_fields: z.array(z.string()),
|
||||
unchanged_fields: z.array(z.string()),
|
||||
})
|
||||
)
|
||||
.optional(),
|
||||
edge_diff: z
|
||||
.object({
|
||||
new_edges: z.array(z.string()),
|
||||
deleted_edges: z.array(z.string()),
|
||||
unchanged_edges: z.array(z.string()),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
.optional(),
|
||||
options: z
|
||||
.object({
|
||||
applyAutoLayout: z.boolean().optional(),
|
||||
layoutOptions: z.any().optional(),
|
||||
})
|
||||
.optional(),
|
||||
currentWorkflowState: z
|
||||
.object({
|
||||
blocks: z.record(z.any()),
|
||||
edges: z.array(z.any()),
|
||||
loops: z.record(z.any()).optional(),
|
||||
parallels: z.record(z.any()).optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Convert blocks with 'inputs' field to standard 'subBlocks' structure
|
||||
* This handles trigger blocks that may come from YAML/copilot with legacy format
|
||||
*/
|
||||
function normalizeBlockStructure(blocks: Record<string, any>): Record<string, any> {
|
||||
const normalizedBlocks: Record<string, any> = {}
|
||||
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
const normalizedBlock = { ...block }
|
||||
|
||||
// Check if this is a trigger block with 'inputs' field
|
||||
if (
|
||||
block.inputs &&
|
||||
(block.type === 'api_trigger' ||
|
||||
block.type === 'input_trigger' ||
|
||||
block.type === 'starter' ||
|
||||
block.type === 'chat_trigger' ||
|
||||
block.type === 'generic_webhook')
|
||||
) {
|
||||
// Convert inputs.inputFormat to subBlocks.inputFormat
|
||||
if (block.inputs.inputFormat) {
|
||||
if (!normalizedBlock.subBlocks) {
|
||||
normalizedBlock.subBlocks = {}
|
||||
}
|
||||
|
||||
normalizedBlock.subBlocks.inputFormat = {
|
||||
id: 'inputFormat',
|
||||
type: 'input-format',
|
||||
value: block.inputs.inputFormat,
|
||||
}
|
||||
}
|
||||
|
||||
// Copy any other inputs fields to subBlocks
|
||||
for (const [inputKey, inputValue] of Object.entries(block.inputs)) {
|
||||
if (inputKey !== 'inputFormat' && !normalizedBlock.subBlocks[inputKey]) {
|
||||
normalizedBlock.subBlocks[inputKey] = {
|
||||
id: inputKey,
|
||||
type: 'short-input', // Default type, may need adjustment based on actual field
|
||||
value: inputValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the inputs field after conversion
|
||||
normalizedBlock.inputs = undefined
|
||||
}
|
||||
|
||||
normalizedBlocks[blockId] = normalizedBlock
|
||||
}
|
||||
|
||||
return normalizedBlocks
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
logger.info(`[${requestId}] ===== YAML DIFF CREATE API CALLED =====`)
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
logger.info(`[${requestId}] Request body received, parsing...`)
|
||||
const { yamlContent, diffAnalysis, options } = CreateDiffRequestSchema.parse(body)
|
||||
logger.info(`[${requestId}] Request parsed successfully`)
|
||||
|
||||
// Get current workflow state for comparison
|
||||
// Note: This endpoint is stateless, so we need to get this from the request
|
||||
const currentWorkflowState = (body as any).currentWorkflowState
|
||||
|
||||
// Ensure currentWorkflowState has all required properties with proper defaults if provided
|
||||
if (currentWorkflowState) {
|
||||
if (!currentWorkflowState.loops) {
|
||||
currentWorkflowState.loops = {}
|
||||
}
|
||||
if (!currentWorkflowState.parallels) {
|
||||
currentWorkflowState.parallels = {}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Creating diff from YAML`, {
|
||||
contentLength: yamlContent.length,
|
||||
hasDiffAnalysis: !!diffAnalysis,
|
||||
hasOptions: !!options,
|
||||
options: options,
|
||||
hasCurrentWorkflowState: !!currentWorkflowState,
|
||||
currentBlockCount: currentWorkflowState
|
||||
? Object.keys(currentWorkflowState.blocks || {}).length
|
||||
: 0,
|
||||
})
|
||||
|
||||
// Gather block registry
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = block.type
|
||||
acc[blockType] = {
|
||||
...block,
|
||||
id: blockType,
|
||||
subBlocks: block.subBlocks || [],
|
||||
outputs: block.outputs || {},
|
||||
} as any
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
// Call sim-agent API
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/yaml/diff/create`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
yamlContent,
|
||||
diffAnalysis,
|
||||
blockRegistry,
|
||||
currentWorkflowState, // Pass current state for comparison
|
||||
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
convertLoopBlockToLoop: convertLoopBlockToLoop.toString(),
|
||||
convertParallelBlockToParallel: convertParallelBlockToParallel.toString(),
|
||||
findChildNodes: findChildNodes.toString(),
|
||||
findAllDescendantNodes: findAllDescendantNodes.toString(),
|
||||
},
|
||||
options,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error(`[${requestId}] Sim agent API error:`, {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, errors: [`Sim agent API error: ${response.statusText}`] },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
// Log the full response to see if auto-layout is happening
|
||||
logger.info(`[${requestId}] Full sim agent response:`, JSON.stringify(result, null, 2))
|
||||
|
||||
// Log detailed block information to debug parent-child relationships
|
||||
if (result.success) {
|
||||
const blocks = result.diff?.proposedState?.blocks || result.blocks || {}
|
||||
logger.info(`[${requestId}] Sim agent blocks with parent-child info:`)
|
||||
Object.entries(blocks).forEach(([blockId, block]: [string, any]) => {
|
||||
if (block.data?.parentId || block.parentId) {
|
||||
logger.info(`[${requestId}] Child block ${blockId} (${block.name}):`, {
|
||||
type: block.type,
|
||||
parentId: block.data?.parentId || block.parentId,
|
||||
extent: block.data?.extent || block.extent,
|
||||
hasDataField: !!block.data,
|
||||
dataKeys: block.data ? Object.keys(block.data) : [],
|
||||
})
|
||||
}
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
logger.info(`[${requestId}] Container block ${blockId} (${block.name}):`, {
|
||||
type: block.type,
|
||||
hasData: !!block.data,
|
||||
dataKeys: block.data ? Object.keys(block.data) : [],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Log existing loops/parallels from sim-agent
|
||||
const loops = result.diff?.proposedState?.loops || result.loops || {}
|
||||
const parallels = result.diff?.proposedState?.parallels || result.parallels || {}
|
||||
logger.info(`[${requestId}] Sim agent loops:`, loops)
|
||||
logger.info(`[${requestId}] Sim agent parallels:`, parallels)
|
||||
}
|
||||
|
||||
// Log diff analysis specifically
|
||||
if (result.diff?.diffAnalysis) {
|
||||
logger.info(`[${requestId}] Diff analysis received:`, {
|
||||
new_blocks: result.diff.diffAnalysis.new_blocks || [],
|
||||
edited_blocks: result.diff.diffAnalysis.edited_blocks || [],
|
||||
deleted_blocks: result.diff.diffAnalysis.deleted_blocks || [],
|
||||
has_field_diffs: !!result.diff.diffAnalysis.field_diffs,
|
||||
has_edge_diff: !!result.diff.diffAnalysis.edge_diff,
|
||||
})
|
||||
} else {
|
||||
logger.warn(`[${requestId}] No diff analysis in response!`)
|
||||
}
|
||||
|
||||
// Post-process the result to ensure loops and parallels are properly generated
|
||||
const finalResult = result
|
||||
|
||||
if (result.success && result.diff?.proposedState) {
|
||||
// Normalize blocks that use 'inputs' field to standard 'subBlocks' structure
|
||||
if (result.diff.proposedState.blocks) {
|
||||
result.diff.proposedState.blocks = normalizeBlockStructure(result.diff.proposedState.blocks)
|
||||
}
|
||||
|
||||
// Validate the proposed workflow state
|
||||
const validation = validateWorkflowState(result.diff.proposedState, { sanitize: true })
|
||||
|
||||
if (!validation.valid) {
|
||||
logger.error(`[${requestId}] Proposed workflow state validation failed`, {
|
||||
errors: validation.errors,
|
||||
warnings: validation.warnings,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
errors: validation.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Use sanitized state if available
|
||||
if (validation.sanitizedState) {
|
||||
result.diff.proposedState = validation.sanitizedState
|
||||
}
|
||||
|
||||
if (validation.warnings.length > 0) {
|
||||
logger.warn(`[${requestId}] Proposed workflow validation warnings`, {
|
||||
warnings: validation.warnings,
|
||||
})
|
||||
// Include warnings in the response
|
||||
if (!result.warnings) {
|
||||
result.warnings = []
|
||||
}
|
||||
result.warnings.push(...validation.warnings)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully created diff with normalized and validated blocks`)
|
||||
|
||||
// First, fix parent-child relationships based on edges
|
||||
const blocks = result.diff.proposedState.blocks
|
||||
const edges = result.diff.proposedState.edges || []
|
||||
|
||||
// Find all loop and parallel blocks
|
||||
const containerBlocks = Object.values(blocks).filter(
|
||||
(block: any) => block.type === 'loop' || block.type === 'parallel'
|
||||
)
|
||||
|
||||
// For each container, find its children based on loop-start edges
|
||||
containerBlocks.forEach((container: any) => {
|
||||
// Log all edges from this container to debug
|
||||
const allEdgesFromContainer = edges.filter((edge: any) => edge.source === container.id)
|
||||
logger.info(
|
||||
`[${requestId}] All edges from container ${container.id}:`,
|
||||
allEdgesFromContainer.map((e: any) => ({
|
||||
id: e.id,
|
||||
sourceHandle: e.sourceHandle,
|
||||
target: e.target,
|
||||
}))
|
||||
)
|
||||
|
||||
const childEdges = edges.filter(
|
||||
(edge: any) => edge.source === container.id && edge.sourceHandle === 'loop-start-source'
|
||||
)
|
||||
|
||||
childEdges.forEach((edge: any) => {
|
||||
const childBlock = blocks[edge.target]
|
||||
if (childBlock) {
|
||||
// Ensure data field exists
|
||||
if (!childBlock.data) {
|
||||
childBlock.data = {}
|
||||
}
|
||||
// Set parentId and extent
|
||||
childBlock.data.parentId = container.id
|
||||
childBlock.data.extent = 'parent'
|
||||
|
||||
logger.info(`[${requestId}] Fixed parent-child relationship:`, {
|
||||
parent: container.id,
|
||||
parentName: container.name,
|
||||
child: childBlock.id,
|
||||
childName: childBlock.name,
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Now regenerate loops and parallels with the fixed relationships
|
||||
const loops = generateLoopBlocks(result.diff.proposedState.blocks)
|
||||
const parallels = generateParallelBlocks(result.diff.proposedState.blocks)
|
||||
|
||||
result.diff.proposedState.loops = loops
|
||||
result.diff.proposedState.parallels = parallels
|
||||
|
||||
logger.info(`[${requestId}] Regenerated loops and parallels after fixing parent-child:`, {
|
||||
loopsCount: Object.keys(loops).length,
|
||||
parallelsCount: Object.keys(parallels).length,
|
||||
loops: Object.keys(loops).map((id) => ({
|
||||
id,
|
||||
nodes: loops[id].nodes,
|
||||
})),
|
||||
})
|
||||
}
|
||||
|
||||
// If the sim agent returned blocks directly (when auto-layout is applied),
|
||||
// transform it to the expected diff format
|
||||
if (result.success && result.blocks && !result.diff) {
|
||||
logger.info(`[${requestId}] Transforming sim agent blocks response to diff format`)
|
||||
|
||||
// Normalize blocks that use 'inputs' field to standard 'subBlocks' structure
|
||||
result.blocks = normalizeBlockStructure(result.blocks)
|
||||
|
||||
// First, fix parent-child relationships based on edges
|
||||
const blocks = result.blocks
|
||||
const edges = result.edges || []
|
||||
|
||||
// Find all loop and parallel blocks
|
||||
const containerBlocks = Object.values(blocks).filter(
|
||||
(block: any) => block.type === 'loop' || block.type === 'parallel'
|
||||
)
|
||||
|
||||
// For each container, find its children based on loop-start edges
|
||||
containerBlocks.forEach((container: any) => {
|
||||
const childEdges = edges.filter(
|
||||
(edge: any) => edge.source === container.id && edge.sourceHandle === 'loop-start-source'
|
||||
)
|
||||
|
||||
childEdges.forEach((edge: any) => {
|
||||
const childBlock = blocks[edge.target]
|
||||
if (childBlock) {
|
||||
// Ensure data field exists
|
||||
if (!childBlock.data) {
|
||||
childBlock.data = {}
|
||||
}
|
||||
// Set parentId and extent
|
||||
childBlock.data.parentId = container.id
|
||||
childBlock.data.extent = 'parent'
|
||||
|
||||
logger.info(`[${requestId}] Fixed parent-child relationship (auto-layout):`, {
|
||||
parent: container.id,
|
||||
parentName: container.name,
|
||||
child: childBlock.id,
|
||||
childName: childBlock.name,
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Generate loops and parallels for the blocks with fixed relationships
|
||||
const loops = generateLoopBlocks(result.blocks)
|
||||
const parallels = generateParallelBlocks(result.blocks)
|
||||
|
||||
const transformedResult = {
|
||||
success: result.success,
|
||||
diff: {
|
||||
proposedState: {
|
||||
blocks: result.blocks,
|
||||
edges: result.edges || [],
|
||||
loops: loops,
|
||||
parallels: parallels,
|
||||
},
|
||||
diffAnalysis: diffAnalysis,
|
||||
metadata: result.metadata || {
|
||||
source: 'sim-agent',
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
},
|
||||
errors: result.errors || [],
|
||||
}
|
||||
|
||||
return NextResponse.json(transformedResult)
|
||||
}
|
||||
|
||||
return NextResponse.json(finalResult)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Diff creation failed:`, error)
|
||||
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ success: false, errors: error.errors.map((e) => e.message) },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
errors: [error instanceof Error ? error.message : 'Unknown error'],
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,300 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import {
|
||||
convertLoopBlockToLoop,
|
||||
convertParallelBlockToParallel,
|
||||
findAllDescendantNodes,
|
||||
findChildNodes,
|
||||
generateLoopBlocks,
|
||||
generateParallelBlocks,
|
||||
} from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlDiffMergeAPI')
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const MergeDiffRequestSchema = z.object({
|
||||
existingDiff: z.object({
|
||||
proposedState: z.object({
|
||||
blocks: z.record(z.any()),
|
||||
edges: z.array(z.any()),
|
||||
loops: z.record(z.any()).optional(),
|
||||
parallels: z.record(z.any()).optional(),
|
||||
}),
|
||||
diffAnalysis: z.any().optional(),
|
||||
metadata: z.object({
|
||||
source: z.string(),
|
||||
timestamp: z.number(),
|
||||
}),
|
||||
}),
|
||||
yamlContent: z.string().min(1),
|
||||
diffAnalysis: z.any().optional(),
|
||||
options: z
|
||||
.object({
|
||||
applyAutoLayout: z.boolean().optional(),
|
||||
layoutOptions: z.any().optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { existingDiff, yamlContent, diffAnalysis, options } = MergeDiffRequestSchema.parse(body)
|
||||
|
||||
// Ensure existingDiff.proposedState has all required properties with proper defaults
|
||||
if (!existingDiff.proposedState.loops) {
|
||||
existingDiff.proposedState.loops = {}
|
||||
}
|
||||
if (!existingDiff.proposedState.parallels) {
|
||||
existingDiff.proposedState.parallels = {}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Merging diff from YAML`, {
|
||||
contentLength: yamlContent.length,
|
||||
existingBlockCount: Object.keys(existingDiff.proposedState.blocks).length,
|
||||
hasDiffAnalysis: !!diffAnalysis,
|
||||
hasOptions: !!options,
|
||||
options: options,
|
||||
})
|
||||
|
||||
// Gather block registry
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = block.type
|
||||
acc[blockType] = {
|
||||
...block,
|
||||
id: blockType,
|
||||
subBlocks: block.subBlocks || [],
|
||||
outputs: block.outputs || {},
|
||||
} as any
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
// Call sim-agent API
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/yaml/diff/merge`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
existingDiff,
|
||||
yamlContent,
|
||||
diffAnalysis,
|
||||
blockRegistry,
|
||||
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
convertLoopBlockToLoop: convertLoopBlockToLoop.toString(),
|
||||
convertParallelBlockToParallel: convertParallelBlockToParallel.toString(),
|
||||
findChildNodes: findChildNodes.toString(),
|
||||
findAllDescendantNodes: findAllDescendantNodes.toString(),
|
||||
},
|
||||
options,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error(`[${requestId}] Sim agent API error:`, {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, errors: [`Sim agent API error: ${response.statusText}`] },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
// Log the full response to see if auto-layout is happening
|
||||
logger.info(`[${requestId}] Full sim agent response:`, JSON.stringify(result, null, 2))
|
||||
|
||||
// Log detailed block information to debug parent-child relationships
|
||||
if (result.success) {
|
||||
const blocks = result.diff?.proposedState?.blocks || result.blocks || {}
|
||||
logger.info(`[${requestId}] Sim agent blocks with parent-child info:`)
|
||||
Object.entries(blocks).forEach(([blockId, block]: [string, any]) => {
|
||||
if (block.data?.parentId || block.parentId) {
|
||||
logger.info(`[${requestId}] Child block ${blockId} (${block.name}):`, {
|
||||
type: block.type,
|
||||
parentId: block.data?.parentId || block.parentId,
|
||||
extent: block.data?.extent || block.extent,
|
||||
hasDataField: !!block.data,
|
||||
dataKeys: block.data ? Object.keys(block.data) : [],
|
||||
})
|
||||
}
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
logger.info(`[${requestId}] Container block ${blockId} (${block.name}):`, {
|
||||
type: block.type,
|
||||
hasData: !!block.data,
|
||||
dataKeys: block.data ? Object.keys(block.data) : [],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Log existing loops/parallels from sim-agent
|
||||
const loops = result.diff?.proposedState?.loops || result.loops || {}
|
||||
const parallels = result.diff?.proposedState?.parallels || result.parallels || {}
|
||||
logger.info(`[${requestId}] Sim agent loops:`, loops)
|
||||
logger.info(`[${requestId}] Sim agent parallels:`, parallels)
|
||||
}
|
||||
|
||||
// Post-process the result to ensure loops and parallels are properly generated
|
||||
const finalResult = result
|
||||
|
||||
if (result.success && result.diff?.proposedState) {
|
||||
// First, fix parent-child relationships based on edges
|
||||
const blocks = result.diff.proposedState.blocks
|
||||
const edges = result.diff.proposedState.edges || []
|
||||
|
||||
// Find all loop and parallel blocks
|
||||
const containerBlocks = Object.values(blocks).filter(
|
||||
(block: any) => block.type === 'loop' || block.type === 'parallel'
|
||||
)
|
||||
|
||||
// For each container, find its children based on loop-start edges
|
||||
containerBlocks.forEach((container: any) => {
|
||||
const childEdges = edges.filter(
|
||||
(edge: any) => edge.source === container.id && edge.sourceHandle === 'loop-start-source'
|
||||
)
|
||||
|
||||
childEdges.forEach((edge: any) => {
|
||||
const childBlock = blocks[edge.target]
|
||||
if (childBlock) {
|
||||
// Ensure data field exists
|
||||
if (!childBlock.data) {
|
||||
childBlock.data = {}
|
||||
}
|
||||
// Set parentId and extent
|
||||
childBlock.data.parentId = container.id
|
||||
childBlock.data.extent = 'parent'
|
||||
|
||||
logger.info(`[${requestId}] Fixed parent-child relationship:`, {
|
||||
parent: container.id,
|
||||
parentName: container.name,
|
||||
child: childBlock.id,
|
||||
childName: childBlock.name,
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Now regenerate loops and parallels with the fixed relationships
|
||||
const loops = generateLoopBlocks(result.diff.proposedState.blocks)
|
||||
const parallels = generateParallelBlocks(result.diff.proposedState.blocks)
|
||||
|
||||
result.diff.proposedState.loops = loops
|
||||
result.diff.proposedState.parallels = parallels
|
||||
|
||||
logger.info(`[${requestId}] Regenerated loops and parallels after fixing parent-child:`, {
|
||||
loopsCount: Object.keys(loops).length,
|
||||
parallelsCount: Object.keys(parallels).length,
|
||||
loops: Object.keys(loops).map((id) => ({
|
||||
id,
|
||||
nodes: loops[id].nodes,
|
||||
})),
|
||||
})
|
||||
}
|
||||
|
||||
// If the sim agent returned blocks directly (when auto-layout is applied),
|
||||
// transform it to the expected diff format
|
||||
if (result.success && result.blocks && !result.diff) {
|
||||
logger.info(`[${requestId}] Transforming sim agent blocks response to diff format`)
|
||||
|
||||
// First, fix parent-child relationships based on edges
|
||||
const blocks = result.blocks
|
||||
const edges = result.edges || []
|
||||
|
||||
// Find all loop and parallel blocks
|
||||
const containerBlocks = Object.values(blocks).filter(
|
||||
(block: any) => block.type === 'loop' || block.type === 'parallel'
|
||||
)
|
||||
|
||||
// For each container, find its children based on loop-start edges
|
||||
containerBlocks.forEach((container: any) => {
|
||||
const childEdges = edges.filter(
|
||||
(edge: any) => edge.source === container.id && edge.sourceHandle === 'loop-start-source'
|
||||
)
|
||||
|
||||
childEdges.forEach((edge: any) => {
|
||||
const childBlock = blocks[edge.target]
|
||||
if (childBlock) {
|
||||
// Ensure data field exists
|
||||
if (!childBlock.data) {
|
||||
childBlock.data = {}
|
||||
}
|
||||
// Set parentId and extent
|
||||
childBlock.data.parentId = container.id
|
||||
childBlock.data.extent = 'parent'
|
||||
|
||||
logger.info(`[${requestId}] Fixed parent-child relationship (auto-layout):`, {
|
||||
parent: container.id,
|
||||
parentName: container.name,
|
||||
child: childBlock.id,
|
||||
childName: childBlock.name,
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Generate loops and parallels for the blocks with fixed relationships
|
||||
const loops = generateLoopBlocks(result.blocks)
|
||||
const parallels = generateParallelBlocks(result.blocks)
|
||||
|
||||
const transformedResult = {
|
||||
success: result.success,
|
||||
diff: {
|
||||
proposedState: {
|
||||
blocks: result.blocks,
|
||||
edges: result.edges || existingDiff.proposedState.edges || [],
|
||||
loops: loops,
|
||||
parallels: parallels,
|
||||
},
|
||||
diffAnalysis: diffAnalysis,
|
||||
metadata: result.metadata || {
|
||||
source: 'sim-agent',
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
},
|
||||
errors: result.errors || [],
|
||||
}
|
||||
|
||||
return NextResponse.json(transformedResult)
|
||||
}
|
||||
|
||||
return NextResponse.json(finalResult)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Diff merge failed:`, error)
|
||||
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ success: false, errors: error.errors.map((e) => e.message) },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
errors: [error instanceof Error ? error.message : 'Unknown error'],
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,99 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlGenerateAPI')
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const GenerateRequestSchema = z.object({
|
||||
workflowState: z.any(), // Let the yaml service handle validation
|
||||
subBlockValues: z.record(z.record(z.any())).optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { workflowState, subBlockValues } = GenerateRequestSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Generating YAML from workflow`, {
|
||||
blocksCount: workflowState.blocks ? Object.keys(workflowState.blocks).length : 0,
|
||||
edgesCount: workflowState.edges ? workflowState.edges.length : 0,
|
||||
})
|
||||
|
||||
// Gather block registry and utilities
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = block.type
|
||||
acc[blockType] = {
|
||||
...block,
|
||||
id: blockType,
|
||||
subBlocks: block.subBlocks || [],
|
||||
outputs: block.outputs || {},
|
||||
} as any
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
// Call sim-agent API
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/workflow/to-yaml`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
workflowState,
|
||||
subBlockValues,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error(`[${requestId}] Sim agent API error:`, {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: `Sim agent API error: ${response.statusText}` },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
return NextResponse.json(result)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] YAML generation failed:`, error)
|
||||
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: error.errors.map((e) => e.message).join(', ') },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
const logger = createLogger('YamlHealthAPI')
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
export async function GET() {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
logger.info(`[${requestId}] Checking YAML service health`)
|
||||
|
||||
// Check sim-agent health
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/health`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
const isHealthy = response.ok
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
healthy: isHealthy,
|
||||
service: 'yaml',
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] YAML health check failed:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
healthy: false,
|
||||
service: 'yaml',
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlParseAPI')
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const ParseRequestSchema = z.object({
|
||||
yamlContent: z.string(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { yamlContent } = ParseRequestSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Parsing YAML`, {
|
||||
contentLength: yamlContent.length,
|
||||
})
|
||||
|
||||
// Gather block registry and utilities
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = block.type
|
||||
acc[blockType] = {
|
||||
...block,
|
||||
id: blockType,
|
||||
subBlocks: block.subBlocks || [],
|
||||
outputs: block.outputs || {},
|
||||
} as any
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
// Call sim-agent API
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/yaml/parse`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
yamlContent,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error(`[${requestId}] Sim agent API error:`, {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, errors: [`Sim agent API error: ${response.statusText}`] },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
return NextResponse.json(result)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] YAML parse failed:`, error)
|
||||
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ success: false, errors: error.errors.map((e) => e.message) },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
errors: [error instanceof Error ? error.message : 'Unknown error'],
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,106 +0,0 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const logger = createLogger('YamlToWorkflowAPI')
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
const ConvertRequestSchema = z.object({
|
||||
yamlContent: z.string().min(1),
|
||||
options: z
|
||||
.object({
|
||||
generateNewIds: z.boolean().optional(),
|
||||
preservePositions: z.boolean().optional(),
|
||||
existingBlocks: z.record(z.any()).optional(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const { yamlContent, options } = ConvertRequestSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Converting YAML to workflow`, {
|
||||
contentLength: yamlContent.length,
|
||||
hasOptions: !!options,
|
||||
})
|
||||
|
||||
// Gather block registry and utilities
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = block.type
|
||||
acc[blockType] = {
|
||||
...block,
|
||||
id: blockType,
|
||||
subBlocks: block.subBlocks || [],
|
||||
outputs: block.outputs || {},
|
||||
} as any
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
// Call sim-agent API
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/yaml/to-workflow`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
yamlContent,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
options,
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error(`[${requestId}] Sim agent API error:`, {
|
||||
status: response.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, errors: [`Sim agent API error: ${response.statusText}`], warnings: [] },
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
return NextResponse.json(result)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] YAML to workflow conversion failed:`, error)
|
||||
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ success: false, errors: error.errors.map((e) => e.message), warnings: [] },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
errors: [error instanceof Error ? error.message : 'Unknown error'],
|
||||
warnings: [],
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,11 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import { Upload } from 'lucide-react'
|
||||
import { ArrowDownToLine } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowJsonStore } from '@/stores/workflows/json/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('ExportControls')
|
||||
@@ -16,6 +17,7 @@ interface ExportControlsProps {
|
||||
export function ExportControls({ disabled = false }: ExportControlsProps) {
|
||||
const [isExporting, setIsExporting] = useState(false)
|
||||
const { workflows, activeWorkflowId } = useWorkflowRegistry()
|
||||
const { getJson } = useWorkflowJsonStore()
|
||||
|
||||
const currentWorkflow = activeWorkflowId ? workflows[activeWorkflowId] : null
|
||||
|
||||
@@ -35,7 +37,7 @@ export function ExportControls({ disabled = false }: ExportControlsProps) {
|
||||
}
|
||||
}
|
||||
|
||||
const handleExportYaml = async () => {
|
||||
const handleExportJson = async () => {
|
||||
if (!currentWorkflow || !activeWorkflowId) {
|
||||
logger.warn('No active workflow to export')
|
||||
return
|
||||
@@ -43,25 +45,18 @@ export function ExportControls({ disabled = false }: ExportControlsProps) {
|
||||
|
||||
setIsExporting(true)
|
||||
try {
|
||||
// Use the new database-based export endpoint
|
||||
const response = await fetch(`/api/workflows/yaml/export?workflowId=${activeWorkflowId}`)
|
||||
// Get the JSON from the store
|
||||
const jsonContent = await getJson()
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
throw new Error(errorData?.error || `Failed to export YAML: ${response.statusText}`)
|
||||
if (!jsonContent) {
|
||||
throw new Error('Failed to generate JSON')
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success || !result.yaml) {
|
||||
throw new Error(result.error || 'Failed to export YAML')
|
||||
}
|
||||
|
||||
const filename = `${currentWorkflow.name.replace(/[^a-z0-9]/gi, '-')}.yaml`
|
||||
downloadFile(result.yaml, filename, 'text/yaml')
|
||||
logger.info('Workflow exported as YAML from database')
|
||||
const filename = `${currentWorkflow.name.replace(/[^a-z0-9]/gi, '-')}.json`
|
||||
downloadFile(jsonContent, filename, 'application/json')
|
||||
logger.info('Workflow exported as JSON')
|
||||
} catch (error) {
|
||||
logger.error('Failed to export workflow as YAML:', error)
|
||||
logger.error('Failed to export workflow as JSON:', error)
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
@@ -73,26 +68,21 @@ export function ExportControls({ disabled = false }: ExportControlsProps) {
|
||||
if (disabled) return 'Export not available'
|
||||
if (!currentWorkflow) return 'No workflow to export'
|
||||
if (isExporting) return 'Exporting...'
|
||||
return 'Export as YAML'
|
||||
return 'Export workflow as JSON'
|
||||
}
|
||||
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
{isDisabled ? (
|
||||
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
|
||||
<Upload className='h-4 w-4' />
|
||||
</div>
|
||||
) : (
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={handleExportYaml}
|
||||
className='h-12 w-12 rounded-[11px] border bg-card text-card-foreground shadow-xs hover:bg-secondary'
|
||||
>
|
||||
<Upload className='h-5 w-5' />
|
||||
<span className='sr-only'>Export as YAML</span>
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant='outline'
|
||||
onClick={handleExportJson}
|
||||
disabled={isDisabled}
|
||||
className='h-12 w-12 rounded-[11px] border bg-card text-card-foreground shadow-xs hover:bg-secondary'
|
||||
>
|
||||
<ArrowDownToLine className='h-5 w-5' />
|
||||
<span className='sr-only'>Export</span>
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>{getTooltipText()}</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
@@ -213,7 +213,7 @@ export function DiffControls() {
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'build_workflow' || tn === 'edit_workflow') {
|
||||
if (tn === 'edit_workflow') {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
@@ -221,9 +221,7 @@ export function DiffControls() {
|
||||
}
|
||||
}
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter(
|
||||
(t) => t.name === 'build_workflow' || t.name === 'edit_workflow'
|
||||
)
|
||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
if (id) updatePreviewToolCallState('accepted', id)
|
||||
@@ -264,7 +262,7 @@ export function DiffControls() {
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'build_workflow' || tn === 'edit_workflow') {
|
||||
if (tn === 'edit_workflow') {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
@@ -272,9 +270,7 @@ export function DiffControls() {
|
||||
}
|
||||
}
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter(
|
||||
(t) => t.name === 'build_workflow' || t.name === 'edit_workflow'
|
||||
)
|
||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
if (id) updatePreviewToolCallState('rejected', id)
|
||||
|
||||
@@ -64,7 +64,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
const { getPreviewByToolCall, getLatestPendingPreview } = usePreviewStore()
|
||||
|
||||
// Import COPILOT_TOOL_IDS - placing it here since it's needed in multiple functions
|
||||
const WORKFLOW_TOOL_NAMES = ['build_workflow', 'edit_workflow']
|
||||
const WORKFLOW_TOOL_NAMES = ['edit_workflow']
|
||||
|
||||
// Get checkpoints for this message if it's a user message
|
||||
const messageCheckpoints = isUser ? allMessageCheckpoints[message.id] || [] : []
|
||||
@@ -118,7 +118,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
.map((block) => (block as any).toolCall),
|
||||
]
|
||||
|
||||
// Find workflow tools (build_workflow or edit_workflow)
|
||||
// Find workflow tools (edit_workflow)
|
||||
const workflowTools = allToolCalls.filter((toolCall) =>
|
||||
WORKFLOW_TOOL_NAMES.includes(toolCall?.name)
|
||||
)
|
||||
|
||||
@@ -1789,12 +1789,13 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
{ value: 'gpt-4.1', label: 'gpt-4.1' },
|
||||
{ value: 'o3', label: 'o3' },
|
||||
{ value: 'claude-4-sonnet', label: 'claude-4-sonnet' },
|
||||
{ value: 'claude-4.5-sonnet', label: 'claude-4.5-sonnet' },
|
||||
{ value: 'claude-4.1-opus', label: 'claude-4.1-opus' },
|
||||
] as const
|
||||
|
||||
const getCollapsedModeLabel = () => {
|
||||
const model = modelOptions.find((m) => m.value === selectedModel)
|
||||
return model ? model.label : 'GPT-5 Default'
|
||||
return model ? model.label : 'Claude 4.5 Sonnet'
|
||||
}
|
||||
|
||||
const getModelIcon = () => {
|
||||
@@ -1806,7 +1807,9 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
if (['gpt-5-high', 'o3', 'claude-4.1-opus'].includes(selectedModel)) {
|
||||
return <BrainCircuit className={`h-3 w-3 ${colorClass}`} />
|
||||
}
|
||||
if (['gpt-5', 'gpt-5-medium', 'claude-4-sonnet'].includes(selectedModel)) {
|
||||
if (
|
||||
['gpt-5', 'gpt-5-medium', 'claude-4-sonnet', 'claude-4.5-sonnet'].includes(selectedModel)
|
||||
) {
|
||||
return <Brain className={`h-3 w-3 ${colorClass}`} />
|
||||
}
|
||||
if (['gpt-4o', 'gpt-4.1', 'gpt-5-fast'].includes(selectedModel)) {
|
||||
@@ -3222,9 +3225,12 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
)
|
||||
}
|
||||
if (
|
||||
['gpt-5', 'gpt-5-medium', 'claude-4-sonnet'].includes(
|
||||
modelValue
|
||||
)
|
||||
[
|
||||
'gpt-5',
|
||||
'gpt-5-medium',
|
||||
'claude-4-sonnet',
|
||||
'claude-4.5-sonnet',
|
||||
].includes(modelValue)
|
||||
) {
|
||||
return <Brain className='h-3 w-3 text-muted-foreground' />
|
||||
}
|
||||
@@ -3293,9 +3299,11 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
<div className='space-y-0.5'>
|
||||
{modelOptions
|
||||
.filter((option) =>
|
||||
['claude-4-sonnet', 'claude-4.1-opus'].includes(
|
||||
option.value
|
||||
)
|
||||
[
|
||||
'claude-4-sonnet',
|
||||
'claude-4.5-sonnet',
|
||||
'claude-4.1-opus',
|
||||
].includes(option.value)
|
||||
)
|
||||
.map(renderModelOption)}
|
||||
</div>
|
||||
|
||||
@@ -17,7 +17,6 @@ import type {
|
||||
MessageFileAttachment,
|
||||
UserInputRef,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/user-input/user-input'
|
||||
import { COPILOT_TOOL_IDS } from '@/stores/copilot/constants'
|
||||
import { usePreviewStore } from '@/stores/copilot/preview-store'
|
||||
import { useCopilotStore } from '@/stores/copilot/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -292,29 +291,6 @@ export const Copilot = forwardRef<CopilotRef, CopilotProps>(({ panelWidth }, ref
|
||||
}
|
||||
}, [isSendingMessage, abortMessage])
|
||||
|
||||
// Watch for completed preview_workflow tool calls in the new format
|
||||
useEffect(() => {
|
||||
if (!messages.length) return
|
||||
|
||||
const lastMessage = messages[messages.length - 1]
|
||||
if (lastMessage.role !== 'assistant' || !lastMessage.toolCalls) return
|
||||
|
||||
// Check for completed preview_workflow tool calls
|
||||
const previewToolCall = lastMessage.toolCalls.find(
|
||||
(tc) =>
|
||||
tc.name === COPILOT_TOOL_IDS.BUILD_WORKFLOW &&
|
||||
tc.state === 'success' &&
|
||||
!isToolCallSeen(tc.id)
|
||||
)
|
||||
|
||||
if (previewToolCall) {
|
||||
logger.info('Preview workflow completed via native SSE')
|
||||
// Mark as seen to prevent duplicate processing
|
||||
markToolCallAsSeen(previewToolCall.id)
|
||||
// Tool call handling logic would go here if needed
|
||||
}
|
||||
}, [messages, isToolCallSeen, markToolCallAsSeen])
|
||||
|
||||
// Handle new chat creation
|
||||
const handleStartNewChat = useCallback(() => {
|
||||
// Preview clearing is now handled automatically by the copilot store
|
||||
|
||||
@@ -28,6 +28,7 @@ import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/json-sanitizer'
|
||||
import { formatEditSequence } from '@/lib/workflows/training/compute-edit-sequence'
|
||||
import { useCopilotTrainingStore } from '@/stores/copilot-training/store'
|
||||
|
||||
@@ -102,73 +103,19 @@ export function TrainingModal() {
|
||||
|
||||
const sendToIndexer = async (dataset: any) => {
|
||||
try {
|
||||
// Extract subblock values from the workflow states
|
||||
const extractSubBlockValues = (state: any) => {
|
||||
const subBlockValues: Record<string, Record<string, any>> = {}
|
||||
// Sanitize workflow states to remove UI-specific data (positions, lastSaved, etc)
|
||||
const sanitizedInput = sanitizeForCopilot(dataset.startState)
|
||||
const sanitizedOutput = sanitizeForCopilot(dataset.endState)
|
||||
|
||||
if (state.blocks) {
|
||||
for (const [blockId, block] of Object.entries(state.blocks)) {
|
||||
if ((block as any).subBlocks) {
|
||||
const blockSubValues: Record<string, any> = {}
|
||||
for (const [subBlockId, subBlock] of Object.entries((block as any).subBlocks)) {
|
||||
if ((subBlock as any).value !== undefined) {
|
||||
blockSubValues[subBlockId] = (subBlock as any).value
|
||||
}
|
||||
}
|
||||
if (Object.keys(blockSubValues).length > 0) {
|
||||
subBlockValues[blockId] = blockSubValues
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return subBlockValues
|
||||
}
|
||||
|
||||
const startSubBlockValues = extractSubBlockValues(dataset.startState)
|
||||
const endSubBlockValues = extractSubBlockValues(dataset.endState)
|
||||
|
||||
// Convert both states to YAML in parallel
|
||||
const [startYamlResponse, endYamlResponse] = await Promise.all([
|
||||
fetch('/api/workflows/yaml/convert', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowState: dataset.startState,
|
||||
subBlockValues: startSubBlockValues,
|
||||
}),
|
||||
}),
|
||||
fetch('/api/workflows/yaml/convert', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowState: dataset.endState,
|
||||
subBlockValues: endSubBlockValues,
|
||||
}),
|
||||
}),
|
||||
])
|
||||
|
||||
if (!startYamlResponse.ok) {
|
||||
throw new Error('Failed to convert start state to YAML')
|
||||
}
|
||||
if (!endYamlResponse.ok) {
|
||||
throw new Error('Failed to convert end state to YAML')
|
||||
}
|
||||
|
||||
const [startResult, endResult] = await Promise.all([
|
||||
startYamlResponse.json(),
|
||||
endYamlResponse.json(),
|
||||
])
|
||||
|
||||
// Now send to the indexer with YAML states
|
||||
// Send to the indexer with sanitized JSON workflow states
|
||||
const response = await fetch('/api/copilot/training', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
title: dataset.title,
|
||||
prompt: dataset.prompt,
|
||||
input: startResult.yaml, // YAML string
|
||||
output: endResult.yaml, // YAML string
|
||||
input: sanitizedInput,
|
||||
output: sanitizedOutput,
|
||||
operations: dataset.editSequence,
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -57,11 +57,14 @@ export const WorkflowEdge = ({
|
||||
sourceHandle?: string | null,
|
||||
targetHandle?: string | null
|
||||
): string => {
|
||||
// The sim agent generates edge identifiers in the format: sourceId-source-targetId-target
|
||||
return `${sourceId}-source-${targetId}-target`
|
||||
// The diff analysis generates edge identifiers in the format: sourceId-sourceHandle-targetId-targetHandle
|
||||
// Use actual handle names, defaulting to 'source' and 'target' if not provided
|
||||
const actualSourceHandle = sourceHandle || 'source'
|
||||
const actualTargetHandle = targetHandle || 'target'
|
||||
return `${sourceId}-${actualSourceHandle}-${targetId}-${actualTargetHandle}`
|
||||
}
|
||||
|
||||
// Generate edge identifier using the exact same logic as the sim agent
|
||||
// Generate edge identifier using the exact same logic as the diff engine
|
||||
const edgeIdentifier = generateEdgeIdentity(source, target, sourceHandle, targetHandle)
|
||||
|
||||
// Debug logging to understand what's happening
|
||||
@@ -117,8 +120,12 @@ export const WorkflowEdge = ({
|
||||
// Determine edge diff status
|
||||
let edgeDiffStatus: EdgeDiffStatus = null
|
||||
|
||||
// Check if edge is directly marked as deleted (for reconstructed edges)
|
||||
if (data?.isDeleted) {
|
||||
edgeDiffStatus = 'deleted'
|
||||
}
|
||||
// Only attempt to determine diff status if all required data is available
|
||||
if (diffAnalysis?.edge_diff && edgeIdentifier && isDiffReady) {
|
||||
else if (diffAnalysis?.edge_diff && edgeIdentifier && isDiffReady) {
|
||||
if (isShowingDiff) {
|
||||
// In diff view, show new edges
|
||||
if (diffAnalysis.edge_diff.new_edges.includes(edgeIdentifier)) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { dump as yamlDump } from 'js-yaml'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowJsonStore } from '@/stores/workflows/json/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -70,34 +70,11 @@ export function generateFullWorkflowData() {
|
||||
*/
|
||||
export async function exportWorkflow(format: EditorFormat): Promise<string> {
|
||||
try {
|
||||
if (format === 'yaml') {
|
||||
// Get the active workflow ID from registry
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
|
||||
if (!activeWorkflowId) {
|
||||
throw new Error('No active workflow to export')
|
||||
}
|
||||
|
||||
// Call the new database-based export endpoint
|
||||
const response = await fetch(`/api/workflows/yaml/export?workflowId=${activeWorkflowId}`)
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => null)
|
||||
throw new Error(errorData?.error || `Failed to generate YAML: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!result.success || !result.yaml) {
|
||||
throw new Error(result.error || 'Failed to generate YAML')
|
||||
}
|
||||
return result.yaml
|
||||
}
|
||||
// Generate full JSON format
|
||||
const fullData = generateFullWorkflowData()
|
||||
return JSON.stringify(fullData, null, 2)
|
||||
// Always use JSON format now
|
||||
const { getJson } = useWorkflowJsonStore.getState()
|
||||
return await getJson()
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow as ${format}:`, error)
|
||||
logger.error(`Failed to export workflow:`, error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -106,11 +83,6 @@ export async function exportWorkflow(format: EditorFormat): Promise<string> {
|
||||
* Parse workflow content based on format
|
||||
*/
|
||||
export async function parseWorkflowContent(content: string, format: EditorFormat): Promise<any> {
|
||||
if (format === 'yaml') {
|
||||
// For now, we'll parse YAML on the server when it's being saved
|
||||
// The workflow-text-editor should handle the actual conversion
|
||||
throw new Error('YAML parsing should be handled by the server when saving the workflow')
|
||||
}
|
||||
return JSON.parse(content)
|
||||
}
|
||||
|
||||
@@ -122,22 +94,6 @@ export function convertBetweenFormats(
|
||||
fromFormat: EditorFormat,
|
||||
toFormat: EditorFormat
|
||||
): string {
|
||||
if (fromFormat === toFormat) return content
|
||||
|
||||
try {
|
||||
const parsed = parseWorkflowContent(content, fromFormat)
|
||||
|
||||
if (toFormat === 'yaml') {
|
||||
return yamlDump(parsed, {
|
||||
indent: 2,
|
||||
lineWidth: -1,
|
||||
noRefs: true,
|
||||
sortKeys: false,
|
||||
})
|
||||
}
|
||||
return JSON.stringify(parsed, null, 2)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to convert from ${fromFormat} to ${toFormat}:`, error)
|
||||
throw error
|
||||
}
|
||||
// Always JSON now
|
||||
return content
|
||||
}
|
||||
|
||||
@@ -16,10 +16,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { applyWorkflowDiff } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-text-editor/workflow-applier'
|
||||
import { exportWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-text-editor/workflow-exporter'
|
||||
import {
|
||||
type EditorFormat,
|
||||
WorkflowTextEditor,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-text-editor/workflow-text-editor'
|
||||
import { WorkflowTextEditor } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-text-editor/workflow-text-editor'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('WorkflowTextEditorModal')
|
||||
@@ -34,7 +31,6 @@ export function WorkflowTextEditorModal({
|
||||
className,
|
||||
}: WorkflowTextEditorModalProps) {
|
||||
const [isOpen, setIsOpen] = useState(false)
|
||||
const [format, setFormat] = useState<EditorFormat>('yaml')
|
||||
const [initialContent, setInitialContent] = useState('')
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
|
||||
@@ -44,47 +40,31 @@ export function WorkflowTextEditorModal({
|
||||
useEffect(() => {
|
||||
if (isOpen && activeWorkflowId) {
|
||||
setIsLoading(true)
|
||||
exportWorkflow(format)
|
||||
exportWorkflow('json')
|
||||
.then((content) => {
|
||||
setInitialContent(content)
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.error('Failed to export workflow:', error)
|
||||
setInitialContent('# Error loading workflow content')
|
||||
setInitialContent('// Error loading workflow content')
|
||||
})
|
||||
.finally(() => {
|
||||
setIsLoading(false)
|
||||
})
|
||||
}
|
||||
}, [isOpen, format, activeWorkflowId])
|
||||
|
||||
// Handle format changes
|
||||
const handleFormatChange = useCallback((newFormat: EditorFormat) => {
|
||||
setFormat(newFormat)
|
||||
}, [])
|
||||
}, [isOpen, activeWorkflowId])
|
||||
|
||||
// Handle save operation
|
||||
const handleSave = useCallback(
|
||||
async (content: string, contentFormat: EditorFormat) => {
|
||||
console.log('🔥 WorkflowTextEditorModal.handleSave called!', {
|
||||
contentFormat,
|
||||
contentLength: content.length,
|
||||
activeWorkflowId,
|
||||
})
|
||||
|
||||
async (content: string) => {
|
||||
if (!activeWorkflowId) {
|
||||
return { success: false, errors: ['No active workflow'] }
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info('Applying workflow changes from text editor', { format: contentFormat })
|
||||
logger.info('Applying workflow changes from JSON editor')
|
||||
|
||||
console.log('🔥 About to call applyWorkflowDiff!', { contentFormat })
|
||||
|
||||
// Apply changes using the simplified approach
|
||||
const applyResult = await applyWorkflowDiff(content, contentFormat)
|
||||
|
||||
console.log('🔥 applyWorkflowDiff returned!', { success: applyResult.success })
|
||||
const applyResult = await applyWorkflowDiff(content, 'json')
|
||||
|
||||
if (applyResult.success) {
|
||||
logger.info('Successfully applied workflow changes', {
|
||||
@@ -93,7 +73,7 @@ export function WorkflowTextEditorModal({
|
||||
|
||||
// Update initial content to reflect current state
|
||||
try {
|
||||
const updatedContent = await exportWorkflow(contentFormat)
|
||||
const updatedContent = await exportWorkflow('json')
|
||||
setInitialContent(updatedContent)
|
||||
} catch (error) {
|
||||
logger.error('Failed to refresh content after save:', error)
|
||||
@@ -158,10 +138,10 @@ export function WorkflowTextEditorModal({
|
||||
|
||||
<DialogContent className='flex h-[85vh] w-[90vw] max-w-6xl flex-col p-0'>
|
||||
<DialogHeader className='flex-shrink-0 border-b px-6 py-4'>
|
||||
<DialogTitle>Workflow Text Editor</DialogTitle>
|
||||
<DialogTitle>Workflow JSON Editor</DialogTitle>
|
||||
<DialogDescription>
|
||||
Edit your workflow as YAML or JSON. Changes will completely replace the current workflow
|
||||
when you save.
|
||||
Edit your workflow as JSON. Changes will completely replace the current workflow when
|
||||
you save.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
@@ -176,9 +156,7 @@ export function WorkflowTextEditorModal({
|
||||
) : (
|
||||
<WorkflowTextEditor
|
||||
initialValue={initialContent}
|
||||
format={format}
|
||||
onSave={handleSave}
|
||||
onFormatChange={handleFormatChange}
|
||||
disabled={isDisabled}
|
||||
className='h-full rounded-none border-0'
|
||||
/>
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { dump as yamlDump, load as yamlLoad } from 'js-yaml'
|
||||
import { AlertCircle, Check, FileCode, Save } from 'lucide-react'
|
||||
import { Alert, AlertDescription } from '@/components/ui/alert'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Tabs, TabsList, TabsTrigger } from '@/components/ui/tabs'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { cn } from '@/lib/utils'
|
||||
@@ -13,7 +11,7 @@ import { CodeEditor } from '../workflow-block/components/sub-block/components/to
|
||||
|
||||
const logger = createLogger('WorkflowTextEditor')
|
||||
|
||||
export type EditorFormat = 'yaml' | 'json'
|
||||
export type EditorFormat = 'json'
|
||||
|
||||
interface ValidationError {
|
||||
line?: number
|
||||
@@ -23,26 +21,18 @@ interface ValidationError {
|
||||
|
||||
interface WorkflowTextEditorProps {
|
||||
initialValue: string
|
||||
format: EditorFormat
|
||||
onSave: (
|
||||
content: string,
|
||||
format: EditorFormat
|
||||
) => Promise<{ success: boolean; errors?: string[]; warnings?: string[] }>
|
||||
onFormatChange?: (format: EditorFormat) => void
|
||||
onSave: (content: string) => Promise<{ success: boolean; errors?: string[]; warnings?: string[] }>
|
||||
className?: string
|
||||
disabled?: boolean
|
||||
}
|
||||
|
||||
export function WorkflowTextEditor({
|
||||
initialValue,
|
||||
format,
|
||||
onSave,
|
||||
onFormatChange,
|
||||
className,
|
||||
disabled = false,
|
||||
}: WorkflowTextEditorProps) {
|
||||
const [content, setContent] = useState(initialValue)
|
||||
const [currentFormat, setCurrentFormat] = useState<EditorFormat>(format)
|
||||
const [validationErrors, setValidationErrors] = useState<ValidationError[]>([])
|
||||
const [isSaving, setIsSaving] = useState(false)
|
||||
const [saveResult, setSaveResult] = useState<{
|
||||
@@ -52,25 +42,19 @@ export function WorkflowTextEditor({
|
||||
} | null>(null)
|
||||
const [hasUnsavedChanges, setHasUnsavedChanges] = useState(false)
|
||||
|
||||
// Validate content based on format
|
||||
const validateSyntax = useCallback((text: string, fmt: EditorFormat): ValidationError[] => {
|
||||
// Validate JSON syntax
|
||||
const validateSyntax = useCallback((text: string): ValidationError[] => {
|
||||
const errors: ValidationError[] = []
|
||||
|
||||
if (!text.trim()) {
|
||||
return errors // Empty content is valid
|
||||
return errors
|
||||
}
|
||||
|
||||
try {
|
||||
if (fmt === 'yaml') {
|
||||
// Basic YAML syntax validation using js-yaml
|
||||
yamlLoad(text)
|
||||
} else if (fmt === 'json') {
|
||||
JSON.parse(text)
|
||||
}
|
||||
JSON.parse(text)
|
||||
} catch (error: any) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Parse error'
|
||||
|
||||
// Extract line/column info if available
|
||||
const lineMatch = errorMessage.match(/line (\d+)/i)
|
||||
const columnMatch = errorMessage.match(/column (\d+)/i)
|
||||
|
||||
@@ -84,39 +68,6 @@ export function WorkflowTextEditor({
|
||||
return errors
|
||||
}, [])
|
||||
|
||||
// Convert between formats
|
||||
const convertFormat = useCallback(
|
||||
(text: string, fromFormat: EditorFormat, toFormat: EditorFormat): string => {
|
||||
if (fromFormat === toFormat || !text.trim()) {
|
||||
return text
|
||||
}
|
||||
|
||||
try {
|
||||
let parsed: any
|
||||
|
||||
if (fromFormat === 'yaml') {
|
||||
// Use basic YAML parsing for synchronous conversion
|
||||
parsed = yamlLoad(text)
|
||||
} else {
|
||||
parsed = JSON.parse(text)
|
||||
}
|
||||
|
||||
if (toFormat === 'yaml') {
|
||||
return yamlDump(parsed, {
|
||||
indent: 2,
|
||||
lineWidth: -1,
|
||||
noRefs: true,
|
||||
})
|
||||
}
|
||||
return JSON.stringify(parsed, null, 2)
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to convert from ${fromFormat} to ${toFormat}:`, error)
|
||||
return text // Return original if conversion fails
|
||||
}
|
||||
},
|
||||
[]
|
||||
)
|
||||
|
||||
// Handle content changes
|
||||
const handleContentChange = useCallback(
|
||||
(newContent: string) => {
|
||||
@@ -124,34 +75,13 @@ export function WorkflowTextEditor({
|
||||
setHasUnsavedChanges(newContent !== initialValue)
|
||||
|
||||
// Validate on change
|
||||
const errors = validateSyntax(newContent, currentFormat)
|
||||
const errors = validateSyntax(newContent)
|
||||
setValidationErrors(errors)
|
||||
|
||||
// Clear save result when editing
|
||||
setSaveResult(null)
|
||||
},
|
||||
[initialValue, currentFormat, validateSyntax]
|
||||
)
|
||||
|
||||
// Handle format changes
|
||||
const handleFormatChange = useCallback(
|
||||
(newFormat: EditorFormat) => {
|
||||
if (newFormat === currentFormat) return
|
||||
|
||||
// Convert content to new format
|
||||
const convertedContent = convertFormat(content, currentFormat, newFormat)
|
||||
|
||||
setCurrentFormat(newFormat)
|
||||
setContent(convertedContent)
|
||||
|
||||
// Validate converted content
|
||||
const errors = validateSyntax(convertedContent, newFormat)
|
||||
setValidationErrors(errors)
|
||||
|
||||
// Notify parent
|
||||
onFormatChange?.(newFormat)
|
||||
},
|
||||
[content, currentFormat, convertFormat, validateSyntax, onFormatChange]
|
||||
[initialValue, validateSyntax]
|
||||
)
|
||||
|
||||
// Handle save
|
||||
@@ -165,7 +95,7 @@ export function WorkflowTextEditor({
|
||||
setSaveResult(null)
|
||||
|
||||
try {
|
||||
const result = await onSave(content, currentFormat)
|
||||
const result = await onSave(content)
|
||||
setSaveResult(result)
|
||||
|
||||
if (result.success) {
|
||||
@@ -183,7 +113,7 @@ export function WorkflowTextEditor({
|
||||
} finally {
|
||||
setIsSaving(false)
|
||||
}
|
||||
}, [content, currentFormat, validationErrors, onSave])
|
||||
}, [content, validationErrors, onSave])
|
||||
|
||||
// Update content when initialValue changes
|
||||
useEffect(() => {
|
||||
@@ -196,9 +126,6 @@ export function WorkflowTextEditor({
|
||||
const isValid = validationErrors.length === 0
|
||||
const canSave = isValid && hasUnsavedChanges && !disabled
|
||||
|
||||
// Get editor language for syntax highlighting
|
||||
const editorLanguage = currentFormat === 'yaml' ? 'javascript' : 'json' // yaml highlighting not available, use js
|
||||
|
||||
return (
|
||||
<div className={cn('flex h-full flex-col bg-background', className)}>
|
||||
{/* Header with controls */}
|
||||
@@ -206,22 +133,9 @@ export function WorkflowTextEditor({
|
||||
<div className='mb-3 flex items-center justify-between'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<FileCode className='h-5 w-5' />
|
||||
<span className='font-semibold'>Workflow Text Editor</span>
|
||||
<span className='font-semibold'>Workflow JSON Editor</span>
|
||||
</div>
|
||||
<div className='flex items-center gap-2'>
|
||||
<Tabs
|
||||
value={currentFormat}
|
||||
onValueChange={(value) => handleFormatChange(value as EditorFormat)}
|
||||
>
|
||||
<TabsList className='grid w-fit grid-cols-2'>
|
||||
<TabsTrigger value='yaml' disabled={disabled}>
|
||||
YAML
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value='json' disabled={disabled}>
|
||||
JSON
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
</Tabs>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
@@ -252,7 +166,7 @@ export function WorkflowTextEditor({
|
||||
{isValid ? (
|
||||
<div className='flex items-center gap-1 text-green-600'>
|
||||
<Check className='h-4 w-4' />
|
||||
Valid {currentFormat.toUpperCase()}
|
||||
Valid JSON
|
||||
</div>
|
||||
) : (
|
||||
<div className='flex items-center gap-1 text-red-600'>
|
||||
@@ -333,8 +247,8 @@ export function WorkflowTextEditor({
|
||||
<CodeEditor
|
||||
value={content}
|
||||
onChange={handleContentChange}
|
||||
language={editorLanguage}
|
||||
placeholder={`Enter ${currentFormat.toUpperCase()} workflow definition...`}
|
||||
language='json'
|
||||
placeholder='Enter JSON workflow definition...'
|
||||
className={cn(
|
||||
'h-full w-full overflow-auto rounded-md border',
|
||||
!isValid && 'border-red-500',
|
||||
|
||||
@@ -3,20 +3,20 @@ import { createLogger } from '@/lib/logs/console/logger'
|
||||
const logger = createLogger('AutoLayoutUtils')
|
||||
|
||||
/**
|
||||
* Default auto layout options
|
||||
* Default auto layout options (now using native compact spacing)
|
||||
*/
|
||||
export const DEFAULT_AUTO_LAYOUT_OPTIONS: AutoLayoutOptions = {
|
||||
strategy: 'smart',
|
||||
direction: 'auto',
|
||||
spacing: {
|
||||
horizontal: 250,
|
||||
horizontal: 550,
|
||||
vertical: 200,
|
||||
layer: 350,
|
||||
layer: 550,
|
||||
},
|
||||
alignment: 'center',
|
||||
padding: {
|
||||
x: 125,
|
||||
y: 125,
|
||||
x: 150,
|
||||
y: 150,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -146,25 +146,54 @@ const WorkflowContent = React.memo(() => {
|
||||
|
||||
// Parse deleted edge identifiers to reconstruct edges
|
||||
diffAnalysis.edge_diff.deleted_edges.forEach((edgeIdentifier) => {
|
||||
// Edge identifier format: "sourceId-source-targetId-target"
|
||||
// Parse this to extract the components
|
||||
const match = edgeIdentifier.match(/^([^-]+)-source-([^-]+)-target$/)
|
||||
if (match) {
|
||||
const [, sourceId, targetId] = match
|
||||
// Edge identifier format: "sourceId-sourceHandle-targetId-targetHandle"
|
||||
// Split by '-' and extract components
|
||||
const parts = edgeIdentifier.split('-')
|
||||
if (parts.length >= 4) {
|
||||
// Find the index where targetId starts (after the source handle)
|
||||
// We need to handle cases where IDs contain hyphens
|
||||
let sourceEndIndex = -1
|
||||
let targetStartIndex = -1
|
||||
|
||||
// Only reconstruct if both blocks still exist
|
||||
if (blocks[sourceId] && blocks[targetId]) {
|
||||
// Generate a unique edge ID
|
||||
const edgeId = `deleted-edge-${sourceId}-${targetId}`
|
||||
// Look for valid handle names to identify boundaries
|
||||
const validHandles = ['source', 'target', 'success', 'error', 'default', 'condition']
|
||||
|
||||
reconstructedEdges.push({
|
||||
id: edgeId,
|
||||
source: sourceId,
|
||||
target: targetId,
|
||||
sourceHandle: null, // Default handle
|
||||
targetHandle: null, // Default handle
|
||||
type: 'workflowEdge',
|
||||
})
|
||||
for (let i = 1; i < parts.length - 1; i++) {
|
||||
if (validHandles.includes(parts[i])) {
|
||||
sourceEndIndex = i
|
||||
// Find the next part that could be the start of targetId
|
||||
for (let j = i + 1; j < parts.length - 1; j++) {
|
||||
// Check if this could be a valid target ID start
|
||||
if (parts[j].length > 0) {
|
||||
targetStartIndex = j
|
||||
break
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (sourceEndIndex > 0 && targetStartIndex > 0) {
|
||||
const sourceId = parts.slice(0, sourceEndIndex).join('-')
|
||||
const sourceHandle = parts[sourceEndIndex]
|
||||
const targetHandle = parts[parts.length - 1]
|
||||
const targetId = parts.slice(targetStartIndex, parts.length - 1).join('-')
|
||||
|
||||
// Only reconstruct if both blocks still exist
|
||||
if (blocks[sourceId] && blocks[targetId]) {
|
||||
// Generate a unique edge ID
|
||||
const edgeId = `deleted-${sourceId}-${sourceHandle}-${targetId}-${targetHandle}`
|
||||
|
||||
reconstructedEdges.push({
|
||||
id: edgeId,
|
||||
source: sourceId,
|
||||
target: targetId,
|
||||
sourceHandle,
|
||||
targetHandle,
|
||||
type: 'workflowEdge',
|
||||
data: { isDeleted: true }, // Mark as deleted for styling
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
@@ -11,14 +11,14 @@ import { cn } from '@/lib/utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { parseWorkflowJson } from '@/stores/workflows/json/importer'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { parseWorkflowYaml } from '@/stores/workflows/yaml/importer'
|
||||
|
||||
const logger = createLogger('CreateMenu')
|
||||
|
||||
const TIMERS = {
|
||||
LONG_PRESS_DELAY: 500,
|
||||
CLOSE_DELAY: 300,
|
||||
CLOSE_DELAY: 600,
|
||||
} as const
|
||||
|
||||
interface CreateMenuProps {
|
||||
@@ -117,18 +117,18 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
const handleDirectImport = useCallback(
|
||||
async (content: string, filename?: string) => {
|
||||
if (!content.trim()) {
|
||||
logger.error('YAML content is required')
|
||||
logger.error('JSON content is required')
|
||||
return
|
||||
}
|
||||
|
||||
setIsImporting(true)
|
||||
|
||||
try {
|
||||
// First validate the YAML without importing
|
||||
const { data: yamlWorkflow, errors: parseErrors } = parseWorkflowYaml(content)
|
||||
// First validate the JSON without importing
|
||||
const { data: workflowData, errors: parseErrors } = parseWorkflowJson(content)
|
||||
|
||||
if (!yamlWorkflow || parseErrors.length > 0) {
|
||||
logger.error('Failed to parse YAML:', { errors: parseErrors })
|
||||
if (!workflowData || parseErrors.length > 0) {
|
||||
logger.error('Failed to parse JSON:', { errors: parseErrors })
|
||||
return
|
||||
}
|
||||
|
||||
@@ -136,7 +136,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
const getWorkflowName = () => {
|
||||
if (filename) {
|
||||
// Remove file extension and use the filename
|
||||
const nameWithoutExtension = filename.replace(/\.(ya?ml)$/i, '')
|
||||
const nameWithoutExtension = filename.replace(/\.json$/i, '')
|
||||
return (
|
||||
nameWithoutExtension.trim() || `Imported Workflow - ${new Date().toLocaleString()}`
|
||||
)
|
||||
@@ -151,45 +151,66 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
// Create a new workflow
|
||||
const newWorkflowId = await createWorkflow({
|
||||
name: getWorkflowName(),
|
||||
description: 'Workflow imported from YAML',
|
||||
description: 'Workflow imported from JSON',
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
// Use the new consolidated YAML endpoint to import the workflow
|
||||
const response = await fetch(`/api/workflows/${newWorkflowId}/yaml`, {
|
||||
// Load the imported workflow state into stores immediately (optimistic update)
|
||||
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
||||
const { useSubBlockStore } = await import('@/stores/workflows/subblock/store')
|
||||
|
||||
// Set the workflow as active in the registry to prevent reload
|
||||
useWorkflowRegistry.setState({ activeWorkflowId: newWorkflowId })
|
||||
|
||||
// Set the workflow state immediately
|
||||
useWorkflowStore.setState({
|
||||
blocks: workflowData.blocks || {},
|
||||
edges: workflowData.edges || [],
|
||||
loops: workflowData.loops || {},
|
||||
parallels: workflowData.parallels || {},
|
||||
lastSaved: Date.now(),
|
||||
})
|
||||
|
||||
// Initialize subblock store with the imported blocks
|
||||
useSubBlockStore.getState().initializeFromWorkflow(newWorkflowId, workflowData.blocks || {})
|
||||
|
||||
// Also set subblock values if they exist in the imported data
|
||||
const subBlockStore = useSubBlockStore.getState()
|
||||
Object.entries(workflowData.blocks).forEach(([blockId, block]: [string, any]) => {
|
||||
if (block.subBlocks) {
|
||||
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]: [string, any]) => {
|
||||
if (subBlock.value !== null && subBlock.value !== undefined) {
|
||||
subBlockStore.setValue(blockId, subBlockId, subBlock.value)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
// Navigate to the new workflow after setting state
|
||||
router.push(`/workspace/${workspaceId}/w/${newWorkflowId}`)
|
||||
|
||||
logger.info('Workflow imported successfully from JSON')
|
||||
|
||||
// Save to database in the background (fire and forget)
|
||||
fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
yamlContent: content,
|
||||
description: 'Workflow imported from YAML',
|
||||
source: 'import',
|
||||
applyAutoLayout: true,
|
||||
createCheckpoint: false,
|
||||
}),
|
||||
body: JSON.stringify(workflowData),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json()
|
||||
logger.error('Import failed:', {
|
||||
message: errorData.message || `HTTP ${response.status}: ${response.statusText}`,
|
||||
.then((response) => {
|
||||
if (!response.ok) {
|
||||
logger.error('Failed to persist imported workflow to database')
|
||||
} else {
|
||||
logger.info('Imported workflow persisted to database')
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.error('Failed to persist imported workflow:', error)
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
// Navigate to the new workflow AFTER import is complete
|
||||
if (result.success) {
|
||||
logger.info('Navigating to imported workflow')
|
||||
router.push(`/workspace/${workspaceId}/w/${newWorkflowId}`)
|
||||
logger.info('YAML import completed successfully')
|
||||
} else {
|
||||
logger.error('Import failed:', { errors: result.errors || [] })
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to import YAML workflow:', { error })
|
||||
logger.error('Failed to import workflow:', { error })
|
||||
} finally {
|
||||
setIsImporting(false)
|
||||
}
|
||||
@@ -230,9 +251,9 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
clearAllTimers()
|
||||
handleCreateWorkflow()
|
||||
setIsOpen(true)
|
||||
},
|
||||
[clearAllTimers, handleCreateWorkflow]
|
||||
[clearAllTimers]
|
||||
)
|
||||
|
||||
const handleContextMenu = useCallback((e: React.MouseEvent) => {
|
||||
@@ -371,7 +392,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
<input
|
||||
ref={fileInputRef}
|
||||
type='file'
|
||||
accept='.yaml,.yml'
|
||||
accept='.json'
|
||||
style={{ display: 'none' }}
|
||||
onChange={handleFileChange}
|
||||
/>
|
||||
|
||||
@@ -66,6 +66,7 @@ export interface SendMessageRequest {
|
||||
| 'gpt-4.1'
|
||||
| 'o3'
|
||||
| 'claude-4-sonnet'
|
||||
| 'claude-4.5-sonnet'
|
||||
| 'claude-4.1-opus'
|
||||
prefetch?: boolean
|
||||
createNewChat?: boolean
|
||||
|
||||
@@ -432,8 +432,7 @@ export function InlineToolCall({
|
||||
} else if (state === (ClientToolCallState as any).error || state === 'error') {
|
||||
colorClass = 'text-red-500'
|
||||
} else if (state === (ClientToolCallState as any).success || state === 'success') {
|
||||
const isBuildOrEdit =
|
||||
toolCall.name === 'build_workflow' || toolCall.name === 'edit_workflow'
|
||||
const isBuildOrEdit = toolCall.name === 'edit_workflow'
|
||||
colorClass = isBuildOrEdit ? 'text-[var(--brand-primary-hover-hex)]' : 'text-green-600'
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import { copilotChats, document, knowledgeBase, templates } from '@sim/db/schema
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/json-sanitizer'
|
||||
import type { ChatContext } from '@/stores/copilot/types'
|
||||
|
||||
export type AgentContextType =
|
||||
@@ -260,12 +261,14 @@ async function processWorkflowFromDb(
|
||||
loops: normalized.loops || {},
|
||||
parallels: normalized.parallels || {},
|
||||
}
|
||||
// Sanitize workflow state for copilot (remove UI-specific data like positions)
|
||||
const sanitizedState = sanitizeForCopilot(workflowState)
|
||||
// Match get-user-workflow format: just the workflow state JSON
|
||||
const content = JSON.stringify(workflowState, null, 2)
|
||||
logger.info('Processed workflow context', {
|
||||
const content = JSON.stringify(sanitizedState, null, 2)
|
||||
logger.info('Processed sanitized workflow context', {
|
||||
workflowId,
|
||||
blocks: Object.keys(workflowState.blocks || {}).length,
|
||||
edges: workflowState.edges.length,
|
||||
blocks: Object.keys(sanitizedState.blocks || {}).length,
|
||||
edges: sanitizedState.edges.length,
|
||||
})
|
||||
// Use the provided kind for the type
|
||||
return { type: kind, tag, content }
|
||||
|
||||
@@ -3,7 +3,6 @@ import { z } from 'zod'
|
||||
// Tool IDs supported by the new Copilot runtime
|
||||
export const ToolIds = z.enum([
|
||||
'get_user_workflow',
|
||||
'build_workflow',
|
||||
'edit_workflow',
|
||||
'run_workflow',
|
||||
'get_workflow_console',
|
||||
@@ -11,6 +10,7 @@ export const ToolIds = z.enum([
|
||||
'get_blocks_metadata',
|
||||
'get_trigger_examples',
|
||||
'get_examples_rag',
|
||||
'get_operations_examples',
|
||||
'search_documentation',
|
||||
'search_online',
|
||||
'make_api_request',
|
||||
@@ -71,10 +71,6 @@ export const ToolArgSchemas = {
|
||||
// New
|
||||
oauth_request_access: z.object({}),
|
||||
|
||||
build_workflow: z.object({
|
||||
yamlContent: z.string(),
|
||||
}),
|
||||
|
||||
edit_workflow: z.object({
|
||||
operations: z
|
||||
.array(
|
||||
@@ -108,10 +104,6 @@ export const ToolArgSchemas = {
|
||||
blockIds: StringArray.min(1),
|
||||
}),
|
||||
|
||||
get_build_workflow_examples: z.object({
|
||||
exampleIds: StringArray.min(1),
|
||||
}),
|
||||
|
||||
get_edit_workflow_examples: z.object({
|
||||
exampleIds: StringArray.min(1),
|
||||
}),
|
||||
@@ -122,6 +114,10 @@ export const ToolArgSchemas = {
|
||||
query: z.string(),
|
||||
}),
|
||||
|
||||
get_operations_examples: z.object({
|
||||
query: z.string(),
|
||||
}),
|
||||
|
||||
search_documentation: z.object({
|
||||
query: z.string(),
|
||||
topK: NumberOptional,
|
||||
@@ -200,7 +196,6 @@ export const ToolSSESchemas = {
|
||||
'set_global_workflow_variables',
|
||||
ToolArgSchemas.set_global_workflow_variables
|
||||
),
|
||||
build_workflow: toolCallSSEFor('build_workflow', ToolArgSchemas.build_workflow),
|
||||
edit_workflow: toolCallSSEFor('edit_workflow', ToolArgSchemas.edit_workflow),
|
||||
run_workflow: toolCallSSEFor('run_workflow', ToolArgSchemas.run_workflow),
|
||||
get_workflow_console: toolCallSSEFor('get_workflow_console', ToolArgSchemas.get_workflow_console),
|
||||
@@ -210,6 +205,10 @@ export const ToolSSESchemas = {
|
||||
|
||||
get_trigger_examples: toolCallSSEFor('get_trigger_examples', ToolArgSchemas.get_trigger_examples),
|
||||
get_examples_rag: toolCallSSEFor('get_examples_rag', ToolArgSchemas.get_examples_rag),
|
||||
get_operations_examples: toolCallSSEFor(
|
||||
'get_operations_examples',
|
||||
ToolArgSchemas.get_operations_examples
|
||||
),
|
||||
search_documentation: toolCallSSEFor('search_documentation', ToolArgSchemas.search_documentation),
|
||||
search_online: toolCallSSEFor('search_online', ToolArgSchemas.search_online),
|
||||
make_api_request: toolCallSSEFor('make_api_request', ToolArgSchemas.make_api_request),
|
||||
@@ -286,7 +285,6 @@ export const ToolResultSchemas = {
|
||||
message: z.string().optional(),
|
||||
}),
|
||||
|
||||
build_workflow: BuildOrEditWorkflowResult,
|
||||
edit_workflow: BuildOrEditWorkflowResult,
|
||||
run_workflow: z.object({
|
||||
executionId: z.string().optional(),
|
||||
@@ -298,11 +296,6 @@ export const ToolResultSchemas = {
|
||||
get_blocks_metadata: z.object({ metadata: z.record(z.any()) }),
|
||||
get_trigger_blocks: z.object({ triggerBlockIds: z.array(z.string()) }),
|
||||
get_block_best_practices: z.object({ bestPractices: z.array(z.any()) }),
|
||||
get_build_workflow_examples: z.object({
|
||||
examples: z.array(
|
||||
z.object({ id: z.string(), title: z.string().optional(), yamlContent: z.string().optional() })
|
||||
),
|
||||
}),
|
||||
get_edit_workflow_examples: z.object({
|
||||
examples: z.array(
|
||||
z.object({
|
||||
@@ -330,6 +323,15 @@ export const ToolResultSchemas = {
|
||||
})
|
||||
),
|
||||
}),
|
||||
get_operations_examples: z.object({
|
||||
examples: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
title: z.string().optional(),
|
||||
operations: z.array(z.any()).optional(),
|
||||
})
|
||||
),
|
||||
}),
|
||||
search_documentation: z.object({ results: z.array(z.any()) }),
|
||||
search_online: z.object({ results: z.array(z.any()) }),
|
||||
make_api_request: z.object({
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
import { Grid2x2, Grid2x2Check, Grid2x2X, Loader2, MinusCircle, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import {
|
||||
BuildWorkflowInput,
|
||||
BuildWorkflowResult,
|
||||
ExecuteResponseSuccessSchema,
|
||||
} from '@/lib/copilot/tools/shared/schemas'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
|
||||
interface BuildWorkflowArgs {
|
||||
yamlContent: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export class BuildWorkflowClientTool extends BaseClientTool {
|
||||
static readonly id = 'build_workflow'
|
||||
private lastResult: any | undefined
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, BuildWorkflowClientTool.id, BuildWorkflowClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Building your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Building your workflow', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Built your workflow', icon: Grid2x2Check },
|
||||
[ClientToolCallState.error]: { text: 'Failed to build your workflow', icon: XCircle },
|
||||
[ClientToolCallState.review]: { text: 'Review your workflow', icon: Grid2x2 },
|
||||
[ClientToolCallState.rejected]: { text: 'Rejected workflow changes', icon: Grid2x2X },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted building your workflow', icon: MinusCircle },
|
||||
[ClientToolCallState.pending]: { text: 'Building your workflow', icon: Loader2 },
|
||||
},
|
||||
}
|
||||
|
||||
async handleAccept(): Promise<void> {
|
||||
const logger = createLogger('BuildWorkflowClientTool')
|
||||
logger.info('handleAccept called', {
|
||||
toolCallId: this.toolCallId,
|
||||
state: this.getState(),
|
||||
hasResult: this.lastResult !== undefined,
|
||||
})
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Workflow accepted', this.lastResult)
|
||||
this.setState(ClientToolCallState.success)
|
||||
}
|
||||
|
||||
async handleReject(): Promise<void> {
|
||||
const logger = createLogger('BuildWorkflowClientTool')
|
||||
logger.info('handleReject called', {
|
||||
toolCallId: this.toolCallId,
|
||||
state: this.getState(),
|
||||
})
|
||||
this.setState(ClientToolCallState.rejected)
|
||||
await this.markToolComplete(200, 'Workflow rejected')
|
||||
}
|
||||
|
||||
async execute(args?: BuildWorkflowArgs): Promise<void> {
|
||||
const logger = createLogger('BuildWorkflowClientTool')
|
||||
try {
|
||||
logger.info('execute called', { toolCallId: this.toolCallId, argsProvided: !!args })
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
const { yamlContent, description } = BuildWorkflowInput.parse(args || {})
|
||||
logger.info('parsed input', {
|
||||
yamlLength: yamlContent?.length || 0,
|
||||
hasDescription: !!description,
|
||||
})
|
||||
|
||||
const res = await fetch('/api/copilot/execute-copilot-server-tool', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ toolName: 'build_workflow', payload: { yamlContent, description } }),
|
||||
})
|
||||
if (!res.ok) {
|
||||
const errorText = await res.text().catch(() => '')
|
||||
try {
|
||||
const errorJson = JSON.parse(errorText)
|
||||
throw new Error(errorJson.error || errorText || `Server error (${res.status})`)
|
||||
} catch {
|
||||
throw new Error(errorText || `Server error (${res.status})`)
|
||||
}
|
||||
}
|
||||
|
||||
const json = await res.json()
|
||||
const parsed = ExecuteResponseSuccessSchema.parse(json)
|
||||
const result = BuildWorkflowResult.parse(parsed.result)
|
||||
this.lastResult = result
|
||||
logger.info('server result parsed', {
|
||||
success: result.success,
|
||||
hasWorkflowState: !!(result as any).workflowState,
|
||||
yamlLength: result.yamlContent?.length || 0,
|
||||
})
|
||||
|
||||
// Populate diff preview immediately (without marking complete yet)
|
||||
try {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
await diffStore.setProposedChanges(result.yamlContent)
|
||||
logger.info('diff proposed changes set')
|
||||
} catch (e) {
|
||||
const logArg: any = e
|
||||
logger.warn('Failed to set proposed changes in diff store', logArg)
|
||||
}
|
||||
|
||||
// Mark complete as soon as the diff view is available so LLM stream continues
|
||||
await this.markToolComplete(200, 'Workflow diff ready for review', result)
|
||||
|
||||
// Move tool into review and stash the result on the tool instance
|
||||
logger.info('setting review state')
|
||||
this.setState(ClientToolCallState.review, { result })
|
||||
} catch (error: any) {
|
||||
const message = error instanceof Error ? error.message : String(error)
|
||||
logger.error('execute error', { message })
|
||||
await this.markToolComplete(500, message)
|
||||
this.setState(ClientToolCallState.error)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -148,22 +148,29 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
const result = parsed.result as any
|
||||
this.lastResult = result
|
||||
logger.info('server result parsed', {
|
||||
hasYaml: !!result?.yamlContent,
|
||||
yamlLength: (result?.yamlContent || '').length,
|
||||
hasWorkflowState: !!result?.workflowState,
|
||||
blocksCount: result?.workflowState
|
||||
? Object.keys(result.workflowState.blocks || {}).length
|
||||
: 0,
|
||||
})
|
||||
|
||||
// Update diff via YAML so colors/highlights persist
|
||||
try {
|
||||
if (!this.hasAppliedDiff) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
await diffStore.setProposedChanges(result.yamlContent)
|
||||
logger.info('diff proposed changes set for edit_workflow')
|
||||
this.hasAppliedDiff = true
|
||||
} else {
|
||||
logger.info('skipping diff apply (already applied)')
|
||||
// Update diff directly with workflow state - no YAML conversion needed!
|
||||
if (result.workflowState) {
|
||||
try {
|
||||
if (!this.hasAppliedDiff) {
|
||||
const diffStore = useWorkflowDiffStore.getState()
|
||||
await diffStore.setProposedChanges(result.workflowState)
|
||||
logger.info('diff proposed changes set for edit_workflow with direct workflow state')
|
||||
this.hasAppliedDiff = true
|
||||
} else {
|
||||
logger.info('skipping diff apply (already applied)')
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to set proposed changes in diff store', e as any)
|
||||
throw new Error('Failed to create workflow diff')
|
||||
}
|
||||
} catch (e) {
|
||||
logger.warn('Failed to set proposed changes in diff store', e as any)
|
||||
} else {
|
||||
throw new Error('No workflow state returned from server')
|
||||
}
|
||||
|
||||
// Mark complete early to unblock LLM stream
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/json-sanitizer'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
@@ -130,11 +131,14 @@ export class GetUserWorkflowClientTool extends BaseClientTool {
|
||||
return
|
||||
}
|
||||
|
||||
// Sanitize workflow state for copilot (remove UI-specific data)
|
||||
const sanitizedState = sanitizeForCopilot(workflowState)
|
||||
|
||||
// Convert to JSON string for transport
|
||||
let workflowJson = ''
|
||||
try {
|
||||
workflowJson = JSON.stringify(workflowState, null, 2)
|
||||
logger.info('Successfully stringified workflow state', {
|
||||
workflowJson = JSON.stringify(sanitizedState, null, 2)
|
||||
logger.info('Successfully stringified sanitized workflow state', {
|
||||
workflowId,
|
||||
jsonLength: workflowJson.length,
|
||||
})
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/json-sanitizer'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
const logger = createLogger('GetWorkflowFromNameClientTool')
|
||||
@@ -82,7 +83,9 @@ export class GetWorkflowFromNameClientTool extends BaseClientTool {
|
||||
loops: wf.state.loops || {},
|
||||
parallels: wf.state.parallels || {},
|
||||
}
|
||||
const userWorkflow = JSON.stringify(workflowState, null, 2)
|
||||
// Sanitize workflow state for copilot (remove UI-specific data)
|
||||
const sanitizedState = sanitizeForCopilot(workflowState)
|
||||
const userWorkflow = JSON.stringify(sanitizedState, null, 2)
|
||||
|
||||
await this.markToolComplete(200, `Retrieved workflow ${workflowName}`, { userWorkflow })
|
||||
this.setState(ClientToolCallState.success)
|
||||
|
||||
@@ -10,12 +10,9 @@ import { searchOnlineServerTool } from '@/lib/copilot/tools/server/other/search-
|
||||
import { getEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/get-environment-variables'
|
||||
import { getOAuthCredentialsServerTool } from '@/lib/copilot/tools/server/user/get-oauth-credentials'
|
||||
import { setEnvironmentVariablesServerTool } from '@/lib/copilot/tools/server/user/set-environment-variables'
|
||||
import { buildWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/build-workflow'
|
||||
import { editWorkflowServerTool } from '@/lib/copilot/tools/server/workflow/edit-workflow'
|
||||
import { getWorkflowConsoleServerTool } from '@/lib/copilot/tools/server/workflow/get-workflow-console'
|
||||
import {
|
||||
BuildWorkflowInput,
|
||||
BuildWorkflowResult,
|
||||
ExecuteResponseSuccessSchema,
|
||||
GetBlocksAndToolsInput,
|
||||
GetBlocksAndToolsResult,
|
||||
@@ -38,7 +35,6 @@ const logger = createLogger('ServerToolRouter')
|
||||
serverToolRegistry[getBlocksAndToolsServerTool.name] = getBlocksAndToolsServerTool
|
||||
serverToolRegistry[getBlocksMetadataServerTool.name] = getBlocksMetadataServerTool
|
||||
serverToolRegistry[getTriggerBlocksServerTool.name] = getTriggerBlocksServerTool
|
||||
serverToolRegistry[buildWorkflowServerTool.name] = buildWorkflowServerTool
|
||||
serverToolRegistry[editWorkflowServerTool.name] = editWorkflowServerTool
|
||||
serverToolRegistry[getWorkflowConsoleServerTool.name] = getWorkflowConsoleServerTool
|
||||
serverToolRegistry[searchDocumentationServerTool.name] = searchDocumentationServerTool
|
||||
@@ -80,9 +76,6 @@ export async function routeExecution(
|
||||
if (toolName === 'get_trigger_blocks') {
|
||||
args = GetTriggerBlocksInput.parse(args)
|
||||
}
|
||||
if (toolName === 'build_workflow') {
|
||||
args = BuildWorkflowInput.parse(args)
|
||||
}
|
||||
|
||||
const result = await tool.execute(args, context)
|
||||
|
||||
@@ -95,9 +88,6 @@ export async function routeExecution(
|
||||
if (toolName === 'get_trigger_blocks') {
|
||||
return GetTriggerBlocksResult.parse(result)
|
||||
}
|
||||
if (toolName === 'build_workflow') {
|
||||
return BuildWorkflowResult.parse(result)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1,111 +0,0 @@
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import type { BuildWorkflowInput, BuildWorkflowResult } from '@/lib/copilot/tools/shared/schemas'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent/constants'
|
||||
import { validateWorkflowState } from '@/lib/workflows/validation'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
export const buildWorkflowServerTool: BaseServerTool<
|
||||
ReturnType<typeof BuildWorkflowInput.parse>,
|
||||
ReturnType<typeof BuildWorkflowResult.parse>
|
||||
> = {
|
||||
name: 'build_workflow',
|
||||
async execute({
|
||||
yamlContent,
|
||||
description,
|
||||
}: ReturnType<typeof BuildWorkflowInput.parse>): Promise<
|
||||
ReturnType<typeof BuildWorkflowResult.parse>
|
||||
> {
|
||||
const logger = createLogger('BuildWorkflowServerTool')
|
||||
logger.info('Building workflow for copilot', {
|
||||
yamlLength: yamlContent.length,
|
||||
description,
|
||||
})
|
||||
|
||||
try {
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = (block as any).type
|
||||
;(acc as any)[blockType] = {
|
||||
...(block as any),
|
||||
id: blockType,
|
||||
subBlocks: (block as any).subBlocks || [],
|
||||
outputs: (block as any).outputs || {},
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/yaml/to-workflow`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
yamlContent,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
options: { generateNewIds: true, preservePositions: false },
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text().catch(() => '')
|
||||
throw new Error(`Sim agent API error: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const conversionResult = await response.json()
|
||||
|
||||
if (!conversionResult.success || !conversionResult.workflowState) {
|
||||
logger.error('YAML conversion failed', {
|
||||
errors: conversionResult.errors,
|
||||
warnings: conversionResult.warnings,
|
||||
})
|
||||
throw new Error(conversionResult.errors?.join(', ') || 'Failed to convert YAML to workflow')
|
||||
}
|
||||
|
||||
const workflowState = conversionResult.workflowState
|
||||
|
||||
// Validate the workflow state before returning
|
||||
const validation = validateWorkflowState(workflowState, { sanitize: true })
|
||||
|
||||
if (!validation.valid) {
|
||||
logger.error('Generated workflow state is invalid', {
|
||||
errors: validation.errors,
|
||||
warnings: validation.warnings,
|
||||
})
|
||||
throw new Error(`Invalid workflow: ${validation.errors.join('; ')}`)
|
||||
}
|
||||
|
||||
if (validation.warnings.length > 0) {
|
||||
logger.warn('Workflow validation warnings', {
|
||||
warnings: validation.warnings,
|
||||
})
|
||||
}
|
||||
|
||||
// Use sanitized state if available
|
||||
const finalWorkflowState = validation.sanitizedState || workflowState
|
||||
|
||||
return {
|
||||
success: true,
|
||||
workflowState: finalWorkflowState,
|
||||
yamlContent,
|
||||
message: `Successfully built workflow with ${Object.keys(finalWorkflowState.blocks).length} blocks`,
|
||||
description: description || 'Built workflow',
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error('Error building workflow', error)
|
||||
throw error
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,14 +1,12 @@
|
||||
import crypto from 'crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { workflow as workflowTable } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent/constants'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
|
||||
import { validateWorkflowState } from '@/lib/workflows/validation'
|
||||
import { getAllBlocks } from '@/blocks/registry'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { resolveOutputType } from '@/blocks/utils'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
|
||||
@@ -24,46 +22,26 @@ interface EditWorkflowParams {
|
||||
currentUserWorkflow?: string
|
||||
}
|
||||
|
||||
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
|
||||
|
||||
async function applyOperationsToYaml(
|
||||
currentYaml: string,
|
||||
/**
|
||||
* Apply operations directly to the workflow JSON state
|
||||
*/
|
||||
function applyOperationsToWorkflowState(
|
||||
workflowState: any,
|
||||
operations: EditWorkflowOperation[]
|
||||
): Promise<string> {
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = (block as any).type
|
||||
;(acc as any)[blockType] = {
|
||||
...(block as any),
|
||||
id: blockType,
|
||||
subBlocks: (block as any).subBlocks || [],
|
||||
outputs: (block as any).outputs || {},
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
): any {
|
||||
// Deep clone the workflow state to avoid mutations
|
||||
const modifiedState = JSON.parse(JSON.stringify(workflowState))
|
||||
|
||||
const parseResponse = await fetch(`${SIM_AGENT_API_URL}/api/yaml/parse`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
yamlContent: currentYaml,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
}),
|
||||
// Log initial state
|
||||
const logger = createLogger('EditWorkflowServerTool')
|
||||
logger.debug('Initial blocks before operations:', {
|
||||
blockCount: Object.keys(modifiedState.blocks || {}).length,
|
||||
blockTypes: Object.entries(modifiedState.blocks || {}).map(([id, block]: [string, any]) => ({
|
||||
id,
|
||||
type: block.type,
|
||||
hasType: block.type !== undefined,
|
||||
})),
|
||||
})
|
||||
if (!parseResponse.ok) throw new Error(`Sim agent API error: ${parseResponse.statusText}`)
|
||||
const parseResult = await parseResponse.json()
|
||||
if (!parseResult.success || !parseResult.data || parseResult.errors?.length > 0) {
|
||||
throw new Error(`Invalid YAML format: ${parseResult.errors?.join(', ') || 'Unknown error'}`)
|
||||
}
|
||||
const workflowData = parseResult.data
|
||||
|
||||
// Reorder operations: delete -> add -> edit to ensure consistent application semantics
|
||||
const deletes = operations.filter((op) => op.operation_type === 'delete')
|
||||
@@ -73,115 +51,265 @@ async function applyOperationsToYaml(
|
||||
|
||||
for (const operation of orderedOperations) {
|
||||
const { operation_type, block_id, params } = operation
|
||||
|
||||
switch (operation_type) {
|
||||
case 'delete':
|
||||
if (workflowData.blocks[block_id]) {
|
||||
const childBlocksToRemove: string[] = []
|
||||
Object.entries(workflowData.blocks).forEach(([childId, child]: [string, any]) => {
|
||||
if (child.parentId === block_id) childBlocksToRemove.push(childId)
|
||||
})
|
||||
delete workflowData.blocks[block_id]
|
||||
childBlocksToRemove.forEach((childId) => delete workflowData.blocks[childId])
|
||||
const allDeleted = [block_id, ...childBlocksToRemove]
|
||||
Object.values(workflowData.blocks).forEach((block: any) => {
|
||||
if (!block.connections) return
|
||||
Object.keys(block.connections).forEach((key) => {
|
||||
const value = block.connections[key]
|
||||
if (typeof value === 'string') {
|
||||
if (allDeleted.includes(value)) delete block.connections[key]
|
||||
} else if (Array.isArray(value)) {
|
||||
block.connections[key] = value.filter((item: any) =>
|
||||
typeof item === 'string'
|
||||
? !allDeleted.includes(item)
|
||||
: !allDeleted.includes(item?.block)
|
||||
)
|
||||
if (block.connections[key].length === 0) delete block.connections[key]
|
||||
} else if (typeof value === 'object' && value?.block) {
|
||||
if (allDeleted.includes(value.block)) delete block.connections[key]
|
||||
case 'delete': {
|
||||
if (modifiedState.blocks[block_id]) {
|
||||
// Find all child blocks to remove
|
||||
const blocksToRemove = new Set<string>([block_id])
|
||||
const findChildren = (parentId: string) => {
|
||||
Object.entries(modifiedState.blocks).forEach(([childId, child]: [string, any]) => {
|
||||
if (child.data?.parentId === parentId) {
|
||||
blocksToRemove.add(childId)
|
||||
findChildren(childId)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
findChildren(block_id)
|
||||
|
||||
// Remove blocks
|
||||
blocksToRemove.forEach((id) => delete modifiedState.blocks[id])
|
||||
|
||||
// Remove edges connected to deleted blocks
|
||||
modifiedState.edges = modifiedState.edges.filter(
|
||||
(edge: any) => !blocksToRemove.has(edge.source) && !blocksToRemove.has(edge.target)
|
||||
)
|
||||
}
|
||||
break
|
||||
case 'edit':
|
||||
if (workflowData.blocks[block_id]) {
|
||||
const block = workflowData.blocks[block_id]
|
||||
if (params?.inputs) {
|
||||
if (!block.inputs) block.inputs = {}
|
||||
Object.assign(block.inputs, params.inputs)
|
||||
}
|
||||
|
||||
case 'edit': {
|
||||
if (modifiedState.blocks[block_id]) {
|
||||
const block = modifiedState.blocks[block_id]
|
||||
|
||||
// Ensure block has essential properties
|
||||
if (!block.type) {
|
||||
logger.warn(`Block ${block_id} missing type property, skipping edit`, {
|
||||
blockKeys: Object.keys(block),
|
||||
blockData: JSON.stringify(block),
|
||||
})
|
||||
break
|
||||
}
|
||||
if (params?.connections) {
|
||||
if (!block.connections) block.connections = {}
|
||||
Object.entries(params.connections).forEach(([key, value]) => {
|
||||
if (value === null) delete block.connections[key]
|
||||
else (block.connections as any)[key] = value
|
||||
|
||||
// Update inputs (convert to subBlocks format)
|
||||
if (params?.inputs) {
|
||||
if (!block.subBlocks) block.subBlocks = {}
|
||||
Object.entries(params.inputs).forEach(([key, value]) => {
|
||||
if (!block.subBlocks[key]) {
|
||||
block.subBlocks[key] = {
|
||||
id: key,
|
||||
type: 'short-input',
|
||||
value: value,
|
||||
}
|
||||
} else {
|
||||
block.subBlocks[key].value = value
|
||||
}
|
||||
})
|
||||
}
|
||||
if (params?.type) block.type = params.type
|
||||
if (params?.name) block.name = params.name
|
||||
// Handle trigger mode toggle and clean incoming edges when enabling
|
||||
|
||||
// Update basic properties
|
||||
if (params?.type !== undefined) block.type = params.type
|
||||
if (params?.name !== undefined) block.name = params.name
|
||||
|
||||
// Handle trigger mode toggle
|
||||
if (typeof params?.triggerMode === 'boolean') {
|
||||
// Set triggerMode as a top-level block property
|
||||
block.triggerMode = params.triggerMode
|
||||
|
||||
if (params.triggerMode === true) {
|
||||
// Remove all incoming connections where this block is referenced as a target
|
||||
Object.values(workflowData.blocks).forEach((other: any) => {
|
||||
if (!other?.connections) return
|
||||
Object.keys(other.connections).forEach((handle) => {
|
||||
const value = other.connections[handle]
|
||||
if (typeof value === 'string') {
|
||||
if (value === block_id) delete other.connections[handle]
|
||||
} else if (Array.isArray(value)) {
|
||||
other.connections[handle] = value.filter((item: any) =>
|
||||
typeof item === 'string' ? item !== block_id : item?.block !== block_id
|
||||
)
|
||||
if (other.connections[handle].length === 0) delete other.connections[handle]
|
||||
} else if (typeof value === 'object' && value?.block) {
|
||||
if (value.block === block_id) delete other.connections[handle]
|
||||
}
|
||||
})
|
||||
})
|
||||
// Remove all incoming edges when enabling trigger mode
|
||||
modifiedState.edges = modifiedState.edges.filter(
|
||||
(edge: any) => edge.target !== block_id
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Handle connections update (convert to edges)
|
||||
if (params?.connections) {
|
||||
// Remove existing edges from this block
|
||||
modifiedState.edges = modifiedState.edges.filter(
|
||||
(edge: any) => edge.source !== block_id
|
||||
)
|
||||
|
||||
// Add new edges based on connections
|
||||
Object.entries(params.connections).forEach(([connectionType, targets]) => {
|
||||
if (targets === null) return
|
||||
|
||||
// Map semantic connection names to actual React Flow handle IDs
|
||||
// 'success' in YAML/connections maps to 'source' handle in React Flow
|
||||
const mapConnectionTypeToHandle = (type: string): string => {
|
||||
if (type === 'success') return 'source'
|
||||
if (type === 'error') return 'error'
|
||||
// Conditions and other types pass through as-is
|
||||
return type
|
||||
}
|
||||
|
||||
const actualSourceHandle = mapConnectionTypeToHandle(connectionType)
|
||||
|
||||
const addEdge = (targetBlock: string, targetHandle?: string) => {
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: block_id,
|
||||
sourceHandle: actualSourceHandle,
|
||||
target: targetBlock,
|
||||
targetHandle: targetHandle || 'target',
|
||||
type: 'default',
|
||||
})
|
||||
}
|
||||
|
||||
if (typeof targets === 'string') {
|
||||
addEdge(targets)
|
||||
} else if (Array.isArray(targets)) {
|
||||
targets.forEach((target: any) => {
|
||||
if (typeof target === 'string') {
|
||||
addEdge(target)
|
||||
} else if (target?.block) {
|
||||
addEdge(target.block, target.handle)
|
||||
}
|
||||
})
|
||||
} else if (typeof targets === 'object' && (targets as any)?.block) {
|
||||
addEdge((targets as any).block, (targets as any).handle)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Handle edge removal
|
||||
if (params?.removeEdges && Array.isArray(params.removeEdges)) {
|
||||
params.removeEdges.forEach(({ targetBlockId, sourceHandle = 'default' }) => {
|
||||
const value = block.connections?.[sourceHandle]
|
||||
if (typeof value === 'string') {
|
||||
if (value === targetBlockId) delete (block.connections as any)[sourceHandle]
|
||||
} else if (Array.isArray(value)) {
|
||||
;(block.connections as any)[sourceHandle] = value.filter((item: any) =>
|
||||
typeof item === 'string' ? item !== targetBlockId : item?.block !== targetBlockId
|
||||
)
|
||||
if ((block.connections as any)[sourceHandle].length === 0)
|
||||
delete (block.connections as any)[sourceHandle]
|
||||
} else if (typeof value === 'object' && value?.block) {
|
||||
if (value.block === targetBlockId) delete (block.connections as any)[sourceHandle]
|
||||
params.removeEdges.forEach(({ targetBlockId, sourceHandle = 'source' }) => {
|
||||
modifiedState.edges = modifiedState.edges.filter(
|
||||
(edge: any) =>
|
||||
!(
|
||||
edge.source === block_id &&
|
||||
edge.target === targetBlockId &&
|
||||
edge.sourceHandle === sourceHandle
|
||||
)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'add': {
|
||||
if (params?.type && params?.name) {
|
||||
// Get block configuration
|
||||
const blockConfig = getAllBlocks().find((block) => block.type === params.type)
|
||||
|
||||
// Create new block with proper structure
|
||||
const newBlock: any = {
|
||||
id: block_id,
|
||||
type: params.type,
|
||||
name: params.name,
|
||||
position: { x: 0, y: 0 }, // Default position
|
||||
enabled: true,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
advancedMode: false,
|
||||
height: 0,
|
||||
triggerMode: false,
|
||||
subBlocks: {},
|
||||
outputs: blockConfig ? resolveOutputType(blockConfig.outputs) : {},
|
||||
data: {},
|
||||
}
|
||||
|
||||
// Add inputs as subBlocks
|
||||
if (params.inputs) {
|
||||
Object.entries(params.inputs).forEach(([key, value]) => {
|
||||
newBlock.subBlocks[key] = {
|
||||
id: key,
|
||||
type: 'short-input',
|
||||
value: value,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Set up subBlocks from block configuration
|
||||
if (blockConfig) {
|
||||
blockConfig.subBlocks.forEach((subBlock) => {
|
||||
if (!newBlock.subBlocks[subBlock.id]) {
|
||||
newBlock.subBlocks[subBlock.id] = {
|
||||
id: subBlock.id,
|
||||
type: subBlock.type,
|
||||
value: null,
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
modifiedState.blocks[block_id] = newBlock
|
||||
|
||||
// Add connections as edges
|
||||
if (params.connections) {
|
||||
Object.entries(params.connections).forEach(([sourceHandle, targets]) => {
|
||||
const addEdge = (targetBlock: string, targetHandle?: string) => {
|
||||
modifiedState.edges.push({
|
||||
id: crypto.randomUUID(),
|
||||
source: block_id,
|
||||
sourceHandle: sourceHandle,
|
||||
target: targetBlock,
|
||||
targetHandle: targetHandle || 'target',
|
||||
type: 'default',
|
||||
})
|
||||
}
|
||||
|
||||
if (typeof targets === 'string') {
|
||||
addEdge(targets)
|
||||
} else if (Array.isArray(targets)) {
|
||||
targets.forEach((target: any) => {
|
||||
if (typeof target === 'string') {
|
||||
addEdge(target)
|
||||
} else if (target?.block) {
|
||||
addEdge(target.block, target.handle)
|
||||
}
|
||||
})
|
||||
} else if (typeof targets === 'object' && (targets as any)?.block) {
|
||||
addEdge((targets as any).block, (targets as any).handle)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
break
|
||||
case 'add':
|
||||
if (params?.type && params?.name) {
|
||||
workflowData.blocks[block_id] = {
|
||||
type: params.type,
|
||||
name: params.name,
|
||||
inputs: params.inputs || {},
|
||||
connections: params.connections || {},
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const { dump: yamlDump } = await import('js-yaml')
|
||||
return yamlDump(workflowData)
|
||||
// Regenerate loops and parallels after modifications
|
||||
modifiedState.loops = generateLoopBlocks(modifiedState.blocks)
|
||||
modifiedState.parallels = generateParallelBlocks(modifiedState.blocks)
|
||||
|
||||
// Validate all blocks have types before returning
|
||||
const blocksWithoutType = Object.entries(modifiedState.blocks)
|
||||
.filter(([_, block]: [string, any]) => !block.type || block.type === undefined)
|
||||
.map(([id, block]: [string, any]) => ({ id, block }))
|
||||
|
||||
if (blocksWithoutType.length > 0) {
|
||||
logger.error('Blocks without type after operations:', {
|
||||
blocksWithoutType: blocksWithoutType.map(({ id, block }) => ({
|
||||
id,
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
keys: Object.keys(block),
|
||||
})),
|
||||
})
|
||||
|
||||
// Attempt to fix by removing type-less blocks
|
||||
blocksWithoutType.forEach(({ id }) => {
|
||||
delete modifiedState.blocks[id]
|
||||
})
|
||||
|
||||
// Remove edges connected to removed blocks
|
||||
const removedIds = new Set(blocksWithoutType.map(({ id }) => id))
|
||||
modifiedState.edges = modifiedState.edges.filter(
|
||||
(edge: any) => !removedIds.has(edge.source) && !removedIds.has(edge.target)
|
||||
)
|
||||
}
|
||||
|
||||
return modifiedState
|
||||
}
|
||||
|
||||
async function getCurrentWorkflowStateFromDb(
|
||||
workflowId: string
|
||||
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
|
||||
const logger = createLogger('EditWorkflowServerTool')
|
||||
const [workflowRecord] = await db
|
||||
.select()
|
||||
.from(workflowTable)
|
||||
@@ -190,11 +318,34 @@ async function getCurrentWorkflowStateFromDb(
|
||||
if (!workflowRecord) throw new Error(`Workflow ${workflowId} not found in database`)
|
||||
const normalized = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
if (!normalized) throw new Error('Workflow has no normalized data')
|
||||
|
||||
// Validate and fix blocks without types
|
||||
const blocks = { ...normalized.blocks }
|
||||
const invalidBlocks: string[] = []
|
||||
|
||||
Object.entries(blocks).forEach(([id, block]: [string, any]) => {
|
||||
if (!block.type) {
|
||||
logger.warn(`Block ${id} loaded without type from database`, {
|
||||
blockKeys: Object.keys(block),
|
||||
blockName: block.name,
|
||||
})
|
||||
invalidBlocks.push(id)
|
||||
}
|
||||
})
|
||||
|
||||
// Remove invalid blocks
|
||||
invalidBlocks.forEach((id) => delete blocks[id])
|
||||
|
||||
// Remove edges connected to invalid blocks
|
||||
const edges = normalized.edges.filter(
|
||||
(edge: any) => !invalidBlocks.includes(edge.source) && !invalidBlocks.includes(edge.target)
|
||||
)
|
||||
|
||||
const workflowState: any = {
|
||||
blocks: normalized.blocks,
|
||||
edges: normalized.edges,
|
||||
loops: normalized.loops,
|
||||
parallels: normalized.parallels,
|
||||
blocks,
|
||||
edges,
|
||||
loops: normalized.loops || {},
|
||||
parallels: normalized.parallels || {},
|
||||
}
|
||||
const subBlockValues: Record<string, Record<string, any>> = {}
|
||||
Object.entries(normalized.blocks).forEach(([blockId, block]) => {
|
||||
@@ -220,104 +371,25 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
|
||||
hasCurrentUserWorkflow: !!currentUserWorkflow,
|
||||
})
|
||||
|
||||
const blocks = getAllBlocks()
|
||||
const blockRegistry = blocks.reduce(
|
||||
(acc, block) => {
|
||||
const blockType = (block as any).type
|
||||
;(acc as any)[blockType] = {
|
||||
...(block as any),
|
||||
id: blockType,
|
||||
subBlocks: (block as any).subBlocks || [],
|
||||
outputs: (block as any).outputs || {},
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BlockConfig>
|
||||
)
|
||||
|
||||
// Get current workflow as YAML via sim-agent
|
||||
let currentYaml: string
|
||||
{
|
||||
// Prepare workflowState and subBlockValues
|
||||
let workflowState: any | undefined
|
||||
let subBlockValues: Record<string, Record<string, any>> | undefined
|
||||
if (currentUserWorkflow) {
|
||||
try {
|
||||
workflowState = JSON.parse(currentUserWorkflow)
|
||||
// Extract subBlockValues from provided state
|
||||
subBlockValues = {}
|
||||
Object.entries(workflowState.blocks || {}).forEach(([blockId, block]: [string, any]) => {
|
||||
;(subBlockValues as any)[blockId] = {}
|
||||
Object.entries(block.subBlocks || {}).forEach(([subId, sub]: [string, any]) => {
|
||||
if (sub?.value !== undefined) (subBlockValues as any)[blockId][subId] = sub.value
|
||||
})
|
||||
})
|
||||
} catch {}
|
||||
} else {
|
||||
const fromDb = await getCurrentWorkflowStateFromDb(workflowId)
|
||||
workflowState = fromDb.workflowState
|
||||
subBlockValues = fromDb.subBlockValues
|
||||
// Get current workflow state
|
||||
let workflowState: any
|
||||
if (currentUserWorkflow) {
|
||||
try {
|
||||
workflowState = JSON.parse(currentUserWorkflow)
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse currentUserWorkflow', error)
|
||||
throw new Error('Invalid currentUserWorkflow format')
|
||||
}
|
||||
|
||||
// Log the workflow state to see if triggerMode is present
|
||||
logger.info('Workflow state being sent to sim-agent for YAML conversion:', {
|
||||
blockCount: Object.keys(workflowState.blocks || {}).length,
|
||||
blocksWithTriggerMode: Object.entries(workflowState.blocks || {})
|
||||
.filter(([_, block]: [string, any]) => block.triggerMode === true)
|
||||
.map(([id]) => id),
|
||||
})
|
||||
|
||||
const resp = await fetch(`${SIM_AGENT_API_URL}/api/workflow/to-yaml`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowState,
|
||||
subBlockValues,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
}),
|
||||
})
|
||||
if (!resp.ok) throw new Error(`Sim agent API error: ${resp.statusText}`)
|
||||
const json = await resp.json()
|
||||
if (!json.success || !json.yaml) throw new Error(json.error || 'Failed to generate YAML')
|
||||
currentYaml = json.yaml
|
||||
} else {
|
||||
const fromDb = await getCurrentWorkflowStateFromDb(workflowId)
|
||||
workflowState = fromDb.workflowState
|
||||
}
|
||||
|
||||
const modifiedYaml = await applyOperationsToYaml(currentYaml, operations)
|
||||
|
||||
// Convert the modified YAML back to workflow state for validation
|
||||
const validationResponse = await fetch(`${SIM_AGENT_API_URL}/api/yaml/to-workflow`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
yamlContent: modifiedYaml,
|
||||
blockRegistry,
|
||||
utilities: {
|
||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
||||
resolveOutputType: resolveOutputType.toString(),
|
||||
},
|
||||
options: { generateNewIds: false, preservePositions: true },
|
||||
}),
|
||||
})
|
||||
|
||||
if (!validationResponse.ok) {
|
||||
throw new Error(`Failed to validate edited workflow: ${validationResponse.statusText}`)
|
||||
}
|
||||
|
||||
const validationResult = await validationResponse.json()
|
||||
if (!validationResult.success || !validationResult.workflowState) {
|
||||
throw new Error(
|
||||
validationResult.errors?.join(', ') || 'Failed to convert edited YAML to workflow'
|
||||
)
|
||||
}
|
||||
// Apply operations directly to the workflow state
|
||||
const modifiedWorkflowState = applyOperationsToWorkflowState(workflowState, operations)
|
||||
|
||||
// Validate the workflow state
|
||||
const validation = validateWorkflowState(validationResult.workflowState, { sanitize: true })
|
||||
const validation = validateWorkflowState(modifiedWorkflowState, { sanitize: true })
|
||||
|
||||
if (!validation.valid) {
|
||||
logger.error('Edited workflow state is invalid', {
|
||||
@@ -333,13 +405,18 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
|
||||
})
|
||||
}
|
||||
|
||||
logger.info('edit_workflow generated modified YAML', {
|
||||
logger.info('edit_workflow successfully applied operations', {
|
||||
operationCount: operations.length,
|
||||
modifiedYamlLength: modifiedYaml.length,
|
||||
blocksCount: Object.keys(modifiedWorkflowState.blocks).length,
|
||||
edgesCount: modifiedWorkflowState.edges.length,
|
||||
validationErrors: validation.errors.length,
|
||||
validationWarnings: validation.warnings.length,
|
||||
})
|
||||
|
||||
return { success: true, yamlContent: modifiedYaml }
|
||||
// Return the modified workflow state for the client to convert to YAML if needed
|
||||
return {
|
||||
success: true,
|
||||
workflowState: validation.sanitizedState || modifiedWorkflowState,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -34,23 +34,3 @@ export const GetTriggerBlocksResult = z.object({
|
||||
triggerBlockIds: z.array(z.string()),
|
||||
})
|
||||
export type GetTriggerBlocksResultType = z.infer<typeof GetTriggerBlocksResult>
|
||||
|
||||
// build_workflow
|
||||
export const BuildWorkflowInput = z.object({
|
||||
yamlContent: z.string(),
|
||||
description: z.string().optional(),
|
||||
})
|
||||
export const BuildWorkflowResult = z.object({
|
||||
success: z.boolean(),
|
||||
message: z.string(),
|
||||
yamlContent: z.string(),
|
||||
description: z.string().optional(),
|
||||
workflowState: z.unknown().optional(),
|
||||
data: z
|
||||
.object({
|
||||
blocksCount: z.number(),
|
||||
edgesCount: z.number(),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
export type BuildWorkflowResultType = z.infer<typeof BuildWorkflowResult>
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
export const SIM_AGENT_API_URL_DEFAULT = 'https://d2vaeznw6mw0n7.cloudfront.net'
|
||||
export const SIM_AGENT_API_URL_DEFAULT = 'https://copilot.sim.ai'
|
||||
export const SIM_AGENT_VERSION = '1.0.0'
|
||||
|
||||
95
apps/sim/lib/workflows/autolayout/containers.ts
Normal file
95
apps/sim/lib/workflows/autolayout/containers.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import { assignLayers, groupByLayer } from './layering'
|
||||
import { calculatePositions } from './positioning'
|
||||
import type { Edge, LayoutOptions } from './types'
|
||||
import { DEFAULT_CONTAINER_HEIGHT, DEFAULT_CONTAINER_WIDTH, getBlocksByParent } from './utils'
|
||||
|
||||
const logger = createLogger('AutoLayout:Containers')
|
||||
|
||||
const CONTAINER_PADDING = 150
|
||||
const CONTAINER_HORIZONTAL_PADDING = 180
|
||||
const CONTAINER_VERTICAL_PADDING = 100
|
||||
|
||||
export function layoutContainers(
|
||||
blocks: Record<string, BlockState>,
|
||||
edges: Edge[],
|
||||
options: LayoutOptions = {}
|
||||
): void {
|
||||
const { root, children } = getBlocksByParent(blocks)
|
||||
|
||||
const containerOptions: LayoutOptions = {
|
||||
horizontalSpacing: options.horizontalSpacing ? options.horizontalSpacing * 0.85 : 400,
|
||||
verticalSpacing: options.verticalSpacing ? options.verticalSpacing : 200,
|
||||
padding: { x: CONTAINER_HORIZONTAL_PADDING, y: CONTAINER_VERTICAL_PADDING },
|
||||
alignment: options.alignment,
|
||||
}
|
||||
|
||||
for (const [parentId, childIds] of children.entries()) {
|
||||
const parentBlock = blocks[parentId]
|
||||
if (!parentBlock) continue
|
||||
|
||||
logger.debug('Processing container', { parentId, childCount: childIds.length })
|
||||
|
||||
const childBlocks: Record<string, BlockState> = {}
|
||||
for (const childId of childIds) {
|
||||
childBlocks[childId] = blocks[childId]
|
||||
}
|
||||
|
||||
const childEdges = edges.filter(
|
||||
(edge) => childIds.includes(edge.source) && childIds.includes(edge.target)
|
||||
)
|
||||
|
||||
if (Object.keys(childBlocks).length === 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
const childNodes = assignLayers(childBlocks, childEdges)
|
||||
const childLayers = groupByLayer(childNodes)
|
||||
calculatePositions(childLayers, containerOptions)
|
||||
|
||||
let minX = Number.POSITIVE_INFINITY
|
||||
let minY = Number.POSITIVE_INFINITY
|
||||
let maxX = Number.NEGATIVE_INFINITY
|
||||
let maxY = Number.NEGATIVE_INFINITY
|
||||
|
||||
// Normalize positions to start from padding offset
|
||||
for (const node of childNodes.values()) {
|
||||
minX = Math.min(minX, node.position.x)
|
||||
minY = Math.min(minY, node.position.y)
|
||||
maxX = Math.max(maxX, node.position.x + node.dimensions.width)
|
||||
maxY = Math.max(maxY, node.position.y + node.dimensions.height)
|
||||
}
|
||||
|
||||
// Adjust all child positions to start at proper padding from container edges
|
||||
const xOffset = CONTAINER_HORIZONTAL_PADDING - minX
|
||||
const yOffset = CONTAINER_VERTICAL_PADDING - minY
|
||||
|
||||
for (const node of childNodes.values()) {
|
||||
childBlocks[node.id].position = {
|
||||
x: node.position.x + xOffset,
|
||||
y: node.position.y + yOffset,
|
||||
}
|
||||
}
|
||||
|
||||
const calculatedWidth = maxX - minX + CONTAINER_PADDING * 2
|
||||
const calculatedHeight = maxY - minY + CONTAINER_PADDING * 2
|
||||
|
||||
const containerWidth = Math.max(calculatedWidth, DEFAULT_CONTAINER_WIDTH)
|
||||
const containerHeight = Math.max(calculatedHeight, DEFAULT_CONTAINER_HEIGHT)
|
||||
|
||||
if (!parentBlock.data) {
|
||||
parentBlock.data = {}
|
||||
}
|
||||
|
||||
parentBlock.data.width = containerWidth
|
||||
parentBlock.data.height = containerHeight
|
||||
|
||||
logger.debug('Container dimensions calculated', {
|
||||
parentId,
|
||||
width: containerWidth,
|
||||
height: containerHeight,
|
||||
childCount: childIds.length,
|
||||
})
|
||||
}
|
||||
}
|
||||
131
apps/sim/lib/workflows/autolayout/incremental.ts
Normal file
131
apps/sim/lib/workflows/autolayout/incremental.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import type { AdjustmentOptions, Edge } from './types'
|
||||
import { boxesOverlap, createBoundingBox, getBlockDimensions } from './utils'
|
||||
|
||||
const logger = createLogger('AutoLayout:Incremental')
|
||||
|
||||
const DEFAULT_SHIFT_SPACING = 550
|
||||
|
||||
export function adjustForNewBlock(
|
||||
blocks: Record<string, BlockState>,
|
||||
edges: Edge[],
|
||||
newBlockId: string,
|
||||
options: AdjustmentOptions = {}
|
||||
): void {
|
||||
const newBlock = blocks[newBlockId]
|
||||
if (!newBlock) {
|
||||
logger.warn('New block not found in blocks', { newBlockId })
|
||||
return
|
||||
}
|
||||
|
||||
const shiftSpacing = options.horizontalSpacing ?? DEFAULT_SHIFT_SPACING
|
||||
|
||||
const incomingEdges = edges.filter((e) => e.target === newBlockId)
|
||||
const outgoingEdges = edges.filter((e) => e.source === newBlockId)
|
||||
|
||||
if (incomingEdges.length === 0 && outgoingEdges.length === 0) {
|
||||
logger.debug('New block has no connections, no adjustment needed', { newBlockId })
|
||||
return
|
||||
}
|
||||
|
||||
const sourceBlocks = incomingEdges
|
||||
.map((e) => blocks[e.source])
|
||||
.filter((b) => b !== undefined && b.id !== newBlockId)
|
||||
|
||||
if (sourceBlocks.length > 0) {
|
||||
const avgSourceX = sourceBlocks.reduce((sum, b) => sum + b.position.x, 0) / sourceBlocks.length
|
||||
const avgSourceY = sourceBlocks.reduce((sum, b) => sum + b.position.y, 0) / sourceBlocks.length
|
||||
const maxSourceX = Math.max(...sourceBlocks.map((b) => b.position.x))
|
||||
|
||||
newBlock.position = {
|
||||
x: maxSourceX + shiftSpacing,
|
||||
y: avgSourceY,
|
||||
}
|
||||
|
||||
logger.debug('Positioned new block based on source blocks', {
|
||||
newBlockId,
|
||||
position: newBlock.position,
|
||||
sourceCount: sourceBlocks.length,
|
||||
})
|
||||
}
|
||||
|
||||
const targetBlocks = outgoingEdges
|
||||
.map((e) => blocks[e.target])
|
||||
.filter((b) => b !== undefined && b.id !== newBlockId)
|
||||
|
||||
if (targetBlocks.length > 0 && sourceBlocks.length === 0) {
|
||||
const minTargetX = Math.min(...targetBlocks.map((b) => b.position.x))
|
||||
const avgTargetY = targetBlocks.reduce((sum, b) => sum + b.position.y, 0) / targetBlocks.length
|
||||
|
||||
newBlock.position = {
|
||||
x: Math.max(150, minTargetX - shiftSpacing),
|
||||
y: avgTargetY,
|
||||
}
|
||||
|
||||
logger.debug('Positioned new block based on target blocks', {
|
||||
newBlockId,
|
||||
position: newBlock.position,
|
||||
targetCount: targetBlocks.length,
|
||||
})
|
||||
}
|
||||
|
||||
const newBlockDims = getBlockDimensions(newBlock)
|
||||
const newBlockBox = createBoundingBox(newBlock.position, newBlockDims)
|
||||
|
||||
const blocksToShift: Array<{ block: BlockState; shiftAmount: number }> = []
|
||||
|
||||
for (const [id, block] of Object.entries(blocks)) {
|
||||
if (id === newBlockId) continue
|
||||
if (block.data?.parentId) continue
|
||||
|
||||
if (block.position.x >= newBlock.position.x) {
|
||||
const blockDims = getBlockDimensions(block)
|
||||
const blockBox = createBoundingBox(block.position, blockDims)
|
||||
|
||||
if (boxesOverlap(newBlockBox, blockBox, 50)) {
|
||||
const requiredShift = newBlock.position.x + newBlockDims.width + 50 - block.position.x
|
||||
if (requiredShift > 0) {
|
||||
blocksToShift.push({ block, shiftAmount: requiredShift })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (blocksToShift.length > 0) {
|
||||
logger.debug('Shifting blocks to accommodate new block', {
|
||||
newBlockId,
|
||||
shiftCount: blocksToShift.length,
|
||||
})
|
||||
|
||||
for (const { block, shiftAmount } of blocksToShift) {
|
||||
block.position.x += shiftAmount
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function compactHorizontally(blocks: Record<string, BlockState>, edges: Edge[]): void {
|
||||
const blockArray = Object.values(blocks).filter((b) => !b.data?.parentId)
|
||||
|
||||
blockArray.sort((a, b) => a.position.x - b.position.x)
|
||||
|
||||
const MIN_SPACING = 500
|
||||
|
||||
for (let i = 1; i < blockArray.length; i++) {
|
||||
const prevBlock = blockArray[i - 1]
|
||||
const currentBlock = blockArray[i]
|
||||
|
||||
const prevDims = getBlockDimensions(prevBlock)
|
||||
const expectedX = prevBlock.position.x + prevDims.width + MIN_SPACING
|
||||
|
||||
if (currentBlock.position.x > expectedX + 150) {
|
||||
const shift = currentBlock.position.x - expectedX
|
||||
currentBlock.position.x = expectedX
|
||||
|
||||
logger.debug('Compacted block horizontally', {
|
||||
blockId: currentBlock.id,
|
||||
shift,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
102
apps/sim/lib/workflows/autolayout/index.ts
Normal file
102
apps/sim/lib/workflows/autolayout/index.ts
Normal file
@@ -0,0 +1,102 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import { layoutContainers } from './containers'
|
||||
import { adjustForNewBlock as adjustForNewBlockInternal, compactHorizontally } from './incremental'
|
||||
import { assignLayers, groupByLayer } from './layering'
|
||||
import { calculatePositions } from './positioning'
|
||||
import type { AdjustmentOptions, Edge, LayoutOptions, LayoutResult, Loop, Parallel } from './types'
|
||||
import { getBlocksByParent } from './utils'
|
||||
|
||||
const logger = createLogger('AutoLayout')
|
||||
|
||||
export function applyAutoLayout(
|
||||
blocks: Record<string, BlockState>,
|
||||
edges: Edge[],
|
||||
loops: Record<string, Loop> = {},
|
||||
parallels: Record<string, Parallel> = {},
|
||||
options: LayoutOptions = {}
|
||||
): LayoutResult {
|
||||
try {
|
||||
logger.info('Starting auto layout', {
|
||||
blockCount: Object.keys(blocks).length,
|
||||
edgeCount: edges.length,
|
||||
loopCount: Object.keys(loops).length,
|
||||
parallelCount: Object.keys(parallels).length,
|
||||
})
|
||||
|
||||
const blocksCopy: Record<string, BlockState> = JSON.parse(JSON.stringify(blocks))
|
||||
|
||||
const { root: rootBlockIds } = getBlocksByParent(blocksCopy)
|
||||
|
||||
const rootBlocks: Record<string, BlockState> = {}
|
||||
for (const id of rootBlockIds) {
|
||||
rootBlocks[id] = blocksCopy[id]
|
||||
}
|
||||
|
||||
const rootEdges = edges.filter(
|
||||
(edge) => rootBlockIds.includes(edge.source) && rootBlockIds.includes(edge.target)
|
||||
)
|
||||
|
||||
if (Object.keys(rootBlocks).length > 0) {
|
||||
const nodes = assignLayers(rootBlocks, rootEdges)
|
||||
const layers = groupByLayer(nodes)
|
||||
calculatePositions(layers, options)
|
||||
|
||||
for (const node of nodes.values()) {
|
||||
blocksCopy[node.id].position = node.position
|
||||
}
|
||||
}
|
||||
|
||||
layoutContainers(blocksCopy, edges, options)
|
||||
|
||||
logger.info('Auto layout completed successfully', {
|
||||
blockCount: Object.keys(blocksCopy).length,
|
||||
})
|
||||
|
||||
return {
|
||||
blocks: blocksCopy,
|
||||
success: true,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Auto layout failed', { error })
|
||||
return {
|
||||
blocks,
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function adjustForNewBlock(
|
||||
blocks: Record<string, BlockState>,
|
||||
edges: Edge[],
|
||||
newBlockId: string,
|
||||
options: AdjustmentOptions = {}
|
||||
): LayoutResult {
|
||||
try {
|
||||
logger.info('Adjusting layout for new block', { newBlockId })
|
||||
|
||||
const blocksCopy: Record<string, BlockState> = JSON.parse(JSON.stringify(blocks))
|
||||
|
||||
adjustForNewBlockInternal(blocksCopy, edges, newBlockId, options)
|
||||
|
||||
if (!options.preservePositions) {
|
||||
compactHorizontally(blocksCopy, edges)
|
||||
}
|
||||
|
||||
return {
|
||||
blocks: blocksCopy,
|
||||
success: true,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to adjust layout for new block', { newBlockId, error })
|
||||
return {
|
||||
blocks,
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type { LayoutOptions, LayoutResult, AdjustmentOptions, Edge, Loop, Parallel }
|
||||
export { getBlockDimensions, isContainerType } from './utils'
|
||||
94
apps/sim/lib/workflows/autolayout/layering.ts
Normal file
94
apps/sim/lib/workflows/autolayout/layering.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import type { Edge, GraphNode } from './types'
|
||||
import { getBlockDimensions, isStarterBlock } from './utils'
|
||||
|
||||
const logger = createLogger('AutoLayout:Layering')
|
||||
|
||||
export function assignLayers(
|
||||
blocks: Record<string, BlockState>,
|
||||
edges: Edge[]
|
||||
): Map<string, GraphNode> {
|
||||
const nodes = new Map<string, GraphNode>()
|
||||
|
||||
for (const [id, block] of Object.entries(blocks)) {
|
||||
nodes.set(id, {
|
||||
id,
|
||||
block,
|
||||
dimensions: getBlockDimensions(block),
|
||||
incoming: new Set(),
|
||||
outgoing: new Set(),
|
||||
layer: 0,
|
||||
position: { ...block.position },
|
||||
})
|
||||
}
|
||||
|
||||
for (const edge of edges) {
|
||||
const sourceNode = nodes.get(edge.source)
|
||||
const targetNode = nodes.get(edge.target)
|
||||
|
||||
if (sourceNode && targetNode) {
|
||||
sourceNode.outgoing.add(edge.target)
|
||||
targetNode.incoming.add(edge.source)
|
||||
}
|
||||
}
|
||||
|
||||
const starterNodes = Array.from(nodes.values()).filter(
|
||||
(node) => node.incoming.size === 0 || isStarterBlock(node.block)
|
||||
)
|
||||
|
||||
if (starterNodes.length === 0 && nodes.size > 0) {
|
||||
const firstNode = Array.from(nodes.values())[0]
|
||||
starterNodes.push(firstNode)
|
||||
logger.warn('No starter blocks found, using first block as starter', { blockId: firstNode.id })
|
||||
}
|
||||
|
||||
const visited = new Set<string>()
|
||||
const queue: Array<{ nodeId: string; layer: number }> = []
|
||||
|
||||
for (const starter of starterNodes) {
|
||||
starter.layer = 0
|
||||
queue.push({ nodeId: starter.id, layer: 0 })
|
||||
}
|
||||
|
||||
while (queue.length > 0) {
|
||||
const { nodeId, layer } = queue.shift()!
|
||||
|
||||
if (visited.has(nodeId)) {
|
||||
continue
|
||||
}
|
||||
|
||||
visited.add(nodeId)
|
||||
const node = nodes.get(nodeId)!
|
||||
node.layer = Math.max(node.layer, layer)
|
||||
|
||||
for (const targetId of node.outgoing) {
|
||||
const targetNode = nodes.get(targetId)
|
||||
if (targetNode) {
|
||||
queue.push({ nodeId: targetId, layer: layer + 1 })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const node of nodes.values()) {
|
||||
if (!visited.has(node.id)) {
|
||||
logger.debug('Isolated node detected, assigning to layer 0', { blockId: node.id })
|
||||
node.layer = 0
|
||||
}
|
||||
}
|
||||
|
||||
return nodes
|
||||
}
|
||||
|
||||
export function groupByLayer(nodes: Map<string, GraphNode>): Map<number, GraphNode[]> {
|
||||
const layers = new Map<number, GraphNode[]>()
|
||||
|
||||
for (const node of nodes.values()) {
|
||||
if (!layers.has(node.layer)) {
|
||||
layers.set(node.layer, [])
|
||||
}
|
||||
layers.get(node.layer)!.push(node)
|
||||
}
|
||||
|
||||
return layers
|
||||
}
|
||||
124
apps/sim/lib/workflows/autolayout/positioning.ts
Normal file
124
apps/sim/lib/workflows/autolayout/positioning.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { GraphNode, LayoutOptions } from './types'
|
||||
import { boxesOverlap, createBoundingBox } from './utils'
|
||||
|
||||
const logger = createLogger('AutoLayout:Positioning')
|
||||
|
||||
const DEFAULT_HORIZONTAL_SPACING = 550
|
||||
const DEFAULT_VERTICAL_SPACING = 200
|
||||
const DEFAULT_PADDING = { x: 150, y: 150 }
|
||||
|
||||
export function calculatePositions(
|
||||
layers: Map<number, GraphNode[]>,
|
||||
options: LayoutOptions = {}
|
||||
): void {
|
||||
const horizontalSpacing = options.horizontalSpacing ?? DEFAULT_HORIZONTAL_SPACING
|
||||
const verticalSpacing = options.verticalSpacing ?? DEFAULT_VERTICAL_SPACING
|
||||
const padding = options.padding ?? DEFAULT_PADDING
|
||||
const alignment = options.alignment ?? 'center'
|
||||
|
||||
const layerNumbers = Array.from(layers.keys()).sort((a, b) => a - b)
|
||||
|
||||
// Calculate positions for each layer
|
||||
for (const layerNum of layerNumbers) {
|
||||
const nodesInLayer = layers.get(layerNum)!
|
||||
const xPosition = padding.x + layerNum * horizontalSpacing
|
||||
|
||||
// Calculate total height needed for this layer
|
||||
const totalHeight = nodesInLayer.reduce(
|
||||
(sum, node, idx) => sum + node.dimensions.height + (idx > 0 ? verticalSpacing : 0),
|
||||
0
|
||||
)
|
||||
|
||||
// Start Y position based on alignment
|
||||
let yOffset: number
|
||||
switch (alignment) {
|
||||
case 'start':
|
||||
yOffset = padding.y
|
||||
break
|
||||
case 'center':
|
||||
// Center the layer vertically
|
||||
yOffset = Math.max(padding.y, 300 - totalHeight / 2)
|
||||
break
|
||||
case 'end':
|
||||
yOffset = 600 - totalHeight - padding.y
|
||||
break
|
||||
default:
|
||||
yOffset = padding.y
|
||||
break
|
||||
}
|
||||
|
||||
// Position each node in the layer
|
||||
for (const node of nodesInLayer) {
|
||||
node.position = {
|
||||
x: xPosition,
|
||||
y: yOffset,
|
||||
}
|
||||
|
||||
yOffset += node.dimensions.height + verticalSpacing
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve any overlaps
|
||||
resolveOverlaps(Array.from(layers.values()).flat(), verticalSpacing)
|
||||
}
|
||||
|
||||
function resolveOverlaps(nodes: GraphNode[], verticalSpacing: number): void {
|
||||
const MAX_ITERATIONS = 20
|
||||
let iteration = 0
|
||||
let hasOverlap = true
|
||||
|
||||
while (hasOverlap && iteration < MAX_ITERATIONS) {
|
||||
hasOverlap = false
|
||||
iteration++
|
||||
|
||||
// Sort nodes by position for consistent processing
|
||||
const sortedNodes = [...nodes].sort((a, b) => {
|
||||
if (a.layer !== b.layer) return a.layer - b.layer
|
||||
return a.position.y - b.position.y
|
||||
})
|
||||
|
||||
for (let i = 0; i < sortedNodes.length; i++) {
|
||||
for (let j = i + 1; j < sortedNodes.length; j++) {
|
||||
const node1 = sortedNodes[i]
|
||||
const node2 = sortedNodes[j]
|
||||
|
||||
const box1 = createBoundingBox(node1.position, node1.dimensions)
|
||||
const box2 = createBoundingBox(node2.position, node2.dimensions)
|
||||
|
||||
// Check for overlap with margin
|
||||
if (boxesOverlap(box1, box2, 30)) {
|
||||
hasOverlap = true
|
||||
|
||||
// If in same layer, shift vertically
|
||||
if (node1.layer === node2.layer) {
|
||||
const totalHeight = node1.dimensions.height + node2.dimensions.height + verticalSpacing
|
||||
const midpoint = (node1.position.y + node2.position.y) / 2
|
||||
|
||||
node1.position.y = midpoint - node1.dimensions.height / 2 - verticalSpacing / 2
|
||||
node2.position.y = midpoint + node2.dimensions.height / 2 + verticalSpacing / 2
|
||||
} else {
|
||||
// Different layers - shift the later one down
|
||||
const requiredSpace = box1.y + box1.height + verticalSpacing
|
||||
if (node2.position.y < requiredSpace) {
|
||||
node2.position.y = requiredSpace
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('Resolved overlap between blocks', {
|
||||
block1: node1.id,
|
||||
block2: node2.id,
|
||||
samLayer: node1.layer === node2.layer,
|
||||
iteration,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasOverlap) {
|
||||
logger.warn('Could not fully resolve all overlaps after max iterations', {
|
||||
iterations: MAX_ITERATIONS,
|
||||
})
|
||||
}
|
||||
}
|
||||
68
apps/sim/lib/workflows/autolayout/types.ts
Normal file
68
apps/sim/lib/workflows/autolayout/types.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import type { BlockState, Position } from '@/stores/workflows/workflow/types'
|
||||
|
||||
export interface LayoutOptions {
|
||||
horizontalSpacing?: number
|
||||
verticalSpacing?: number
|
||||
padding?: { x: number; y: number }
|
||||
alignment?: 'start' | 'center' | 'end'
|
||||
}
|
||||
|
||||
export interface LayoutResult {
|
||||
blocks: Record<string, BlockState>
|
||||
success: boolean
|
||||
error?: string
|
||||
}
|
||||
|
||||
export interface Edge {
|
||||
id: string
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string | null
|
||||
targetHandle?: string | null
|
||||
}
|
||||
|
||||
export interface Loop {
|
||||
id: string
|
||||
nodes: string[]
|
||||
iterations: number
|
||||
loopType: 'for' | 'forEach'
|
||||
}
|
||||
|
||||
export interface Parallel {
|
||||
id: string
|
||||
nodes: string[]
|
||||
count?: number
|
||||
parallelType?: 'count' | 'collection'
|
||||
}
|
||||
|
||||
export interface BlockDimensions {
|
||||
width: number
|
||||
height: number
|
||||
}
|
||||
|
||||
export interface BoundingBox {
|
||||
x: number
|
||||
y: number
|
||||
width: number
|
||||
height: number
|
||||
}
|
||||
|
||||
export interface LayerInfo {
|
||||
layer: number
|
||||
order: number
|
||||
}
|
||||
|
||||
export interface GraphNode {
|
||||
id: string
|
||||
block: BlockState
|
||||
dimensions: BlockDimensions
|
||||
incoming: Set<string>
|
||||
outgoing: Set<string>
|
||||
layer: number
|
||||
position: Position
|
||||
}
|
||||
|
||||
export interface AdjustmentOptions extends LayoutOptions {
|
||||
preservePositions?: boolean
|
||||
minimalShift?: boolean
|
||||
}
|
||||
74
apps/sim/lib/workflows/autolayout/utils.ts
Normal file
74
apps/sim/lib/workflows/autolayout/utils.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import type { BlockDimensions, BoundingBox } from './types'
|
||||
|
||||
export const DEFAULT_BLOCK_WIDTH = 350
|
||||
export const DEFAULT_BLOCK_WIDTH_WIDE = 480
|
||||
export const DEFAULT_BLOCK_HEIGHT = 100
|
||||
export const DEFAULT_CONTAINER_WIDTH = 500
|
||||
export const DEFAULT_CONTAINER_HEIGHT = 300
|
||||
|
||||
export function isContainerType(blockType: string): boolean {
|
||||
return blockType === 'loop' || blockType === 'parallel'
|
||||
}
|
||||
|
||||
export function getBlockDimensions(block: BlockState): BlockDimensions {
|
||||
if (isContainerType(block.type)) {
|
||||
return {
|
||||
width: block.data?.width ? Math.max(block.data.width, 400) : DEFAULT_CONTAINER_WIDTH,
|
||||
height: block.data?.height ? Math.max(block.data.height, 200) : DEFAULT_CONTAINER_HEIGHT,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
width: block.isWide ? DEFAULT_BLOCK_WIDTH_WIDE : DEFAULT_BLOCK_WIDTH,
|
||||
height: Math.max(block.height || DEFAULT_BLOCK_HEIGHT, DEFAULT_BLOCK_HEIGHT),
|
||||
}
|
||||
}
|
||||
|
||||
export function createBoundingBox(
|
||||
position: { x: number; y: number },
|
||||
dimensions: BlockDimensions
|
||||
): BoundingBox {
|
||||
return {
|
||||
x: position.x,
|
||||
y: position.y,
|
||||
width: dimensions.width,
|
||||
height: dimensions.height,
|
||||
}
|
||||
}
|
||||
|
||||
export function boxesOverlap(box1: BoundingBox, box2: BoundingBox, margin = 0): boolean {
|
||||
return !(
|
||||
box1.x + box1.width + margin <= box2.x ||
|
||||
box2.x + box2.width + margin <= box1.x ||
|
||||
box1.y + box1.height + margin <= box2.y ||
|
||||
box2.y + box2.height + margin <= box1.y
|
||||
)
|
||||
}
|
||||
|
||||
export function getBlocksByParent(blocks: Record<string, BlockState>): {
|
||||
root: string[]
|
||||
children: Map<string, string[]>
|
||||
} {
|
||||
const root: string[] = []
|
||||
const children = new Map<string, string[]>()
|
||||
|
||||
for (const [id, block] of Object.entries(blocks)) {
|
||||
const parentId = block.data?.parentId
|
||||
|
||||
if (!parentId) {
|
||||
root.push(id)
|
||||
} else {
|
||||
if (!children.has(parentId)) {
|
||||
children.set(parentId, [])
|
||||
}
|
||||
children.get(parentId)!.push(id)
|
||||
}
|
||||
}
|
||||
|
||||
return { root, children }
|
||||
}
|
||||
|
||||
export function isStarterBlock(block: BlockState): boolean {
|
||||
return block.type === 'starter' || block.type === 'webhook' || block.type === 'schedule'
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
@@ -5,6 +7,82 @@ import type { BlockWithDiff } from './types'
|
||||
|
||||
const logger = createLogger('WorkflowDiffEngine')
|
||||
|
||||
// Helper function to check if a block has changed
|
||||
function hasBlockChanged(currentBlock: BlockState, proposedBlock: BlockState): boolean {
|
||||
// Compare key fields that indicate a change
|
||||
if (currentBlock.type !== proposedBlock.type) return true
|
||||
if (currentBlock.name !== proposedBlock.name) return true
|
||||
if (currentBlock.enabled !== proposedBlock.enabled) return true
|
||||
if (currentBlock.triggerMode !== proposedBlock.triggerMode) return true
|
||||
|
||||
// Compare subBlocks
|
||||
const currentSubKeys = Object.keys(currentBlock.subBlocks || {})
|
||||
const proposedSubKeys = Object.keys(proposedBlock.subBlocks || {})
|
||||
|
||||
if (currentSubKeys.length !== proposedSubKeys.length) return true
|
||||
|
||||
for (const key of currentSubKeys) {
|
||||
if (!proposedSubKeys.includes(key)) return true
|
||||
const currentSub = currentBlock.subBlocks[key]
|
||||
const proposedSub = proposedBlock.subBlocks?.[key]
|
||||
if (!proposedSub) return true
|
||||
if (JSON.stringify(currentSub.value) !== JSON.stringify(proposedSub.value)) return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// Helper function to compute field differences between blocks
|
||||
function computeFieldDiff(
|
||||
currentBlock: BlockState,
|
||||
proposedBlock: BlockState
|
||||
): {
|
||||
changedFields: string[]
|
||||
unchangedFields: string[]
|
||||
} {
|
||||
const changedFields: string[] = []
|
||||
const unchangedFields: string[] = []
|
||||
|
||||
// Check basic fields
|
||||
const fieldsToCheck = ['type', 'name', 'enabled', 'triggerMode', 'horizontalHandles', 'isWide']
|
||||
for (const field of fieldsToCheck) {
|
||||
const currentValue = (currentBlock as any)[field]
|
||||
const proposedValue = (proposedBlock as any)[field]
|
||||
if (JSON.stringify(currentValue) !== JSON.stringify(proposedValue)) {
|
||||
changedFields.push(field)
|
||||
} else if (currentValue !== undefined) {
|
||||
unchangedFields.push(field)
|
||||
}
|
||||
}
|
||||
|
||||
// Check subBlocks - use just the key name for UI compatibility
|
||||
const currentSubKeys = Object.keys(currentBlock.subBlocks || {})
|
||||
const proposedSubKeys = Object.keys(proposedBlock.subBlocks || {})
|
||||
const allSubKeys = new Set([...currentSubKeys, ...proposedSubKeys])
|
||||
|
||||
for (const key of allSubKeys) {
|
||||
const currentSub = currentBlock.subBlocks?.[key]
|
||||
const proposedSub = proposedBlock.subBlocks?.[key]
|
||||
|
||||
if (!currentSub && proposedSub) {
|
||||
// New subblock
|
||||
changedFields.push(key)
|
||||
} else if (currentSub && !proposedSub) {
|
||||
// Deleted subblock
|
||||
changedFields.push(key)
|
||||
} else if (currentSub && proposedSub) {
|
||||
// Check if value changed
|
||||
if (JSON.stringify(currentSub.value) !== JSON.stringify(proposedSub.value)) {
|
||||
changedFields.push(key)
|
||||
} else {
|
||||
unchangedFields.push(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { changedFields, unchangedFields }
|
||||
}
|
||||
|
||||
export interface DiffMetadata {
|
||||
source: string
|
||||
timestamp: number
|
||||
@@ -190,6 +268,351 @@ export class WorkflowDiffEngine {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a diff from a WorkflowState object directly (more efficient than YAML)
|
||||
* This follows the same logic as sim-agent's YamlDiffCreate handler
|
||||
*/
|
||||
async createDiffFromWorkflowState(
|
||||
proposedState: WorkflowState,
|
||||
diffAnalysis?: DiffAnalysis
|
||||
): Promise<DiffResult & { diff?: WorkflowDiff }> {
|
||||
try {
|
||||
logger.info('WorkflowDiffEngine.createDiffFromWorkflowState called with:', {
|
||||
blockCount: Object.keys(proposedState.blocks || {}).length,
|
||||
edgeCount: proposedState.edges?.length || 0,
|
||||
hasDiffAnalysis: !!diffAnalysis,
|
||||
})
|
||||
|
||||
// Get current workflow state for comparison
|
||||
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
|
||||
const currentWorkflowState = useWorkflowStore.getState().getWorkflowState()
|
||||
|
||||
// Merge subblock values from subblock store to ensure manual edits are included
|
||||
let mergedBaseline: WorkflowState = currentWorkflowState
|
||||
try {
|
||||
mergedBaseline = {
|
||||
...currentWorkflowState,
|
||||
blocks: mergeSubblockState(currentWorkflowState.blocks),
|
||||
}
|
||||
logger.info('Merged subblock values into baseline for diff creation', {
|
||||
blockCount: Object.keys(mergedBaseline.blocks || {}).length,
|
||||
})
|
||||
} catch (mergeError) {
|
||||
logger.warn('Failed to merge subblock values into baseline; proceeding with raw state', {
|
||||
error: mergeError instanceof Error ? mergeError.message : String(mergeError),
|
||||
})
|
||||
}
|
||||
|
||||
// Build a map of existing blocks by type:name for matching
|
||||
const existingBlockMap: Record<string, { id: string; block: BlockState }> = {}
|
||||
for (const [id, block] of Object.entries(mergedBaseline.blocks)) {
|
||||
const key = `${block.type}:${block.name}`
|
||||
existingBlockMap[key] = { id, block }
|
||||
}
|
||||
|
||||
// Create ID mapping - preserve existing IDs where blocks match by type:name
|
||||
const idMap: Record<string, string> = {}
|
||||
const finalBlocks: Record<string, BlockState & BlockWithDiff> = {}
|
||||
|
||||
// First pass: build ID mappings
|
||||
for (const [proposedId, proposedBlock] of Object.entries(proposedState.blocks)) {
|
||||
const key = `${proposedBlock.type}:${proposedBlock.name}`
|
||||
|
||||
// Check if this block exists in current state by type:name
|
||||
if (existingBlockMap[key]) {
|
||||
// Preserve existing ID
|
||||
idMap[proposedId] = existingBlockMap[key].id
|
||||
} else {
|
||||
// Generate new ID for truly new blocks
|
||||
const newId = uuidv4()
|
||||
idMap[proposedId] = newId
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: build final blocks with mapped IDs
|
||||
for (const [proposedId, proposedBlock] of Object.entries(proposedState.blocks)) {
|
||||
const finalId = idMap[proposedId]
|
||||
const key = `${proposedBlock.type}:${proposedBlock.name}`
|
||||
const existingBlock = existingBlockMap[key]?.block
|
||||
|
||||
// Merge with existing block if found, otherwise use proposed
|
||||
const finalBlock: BlockState & BlockWithDiff = existingBlock
|
||||
? {
|
||||
...existingBlock,
|
||||
...proposedBlock,
|
||||
id: finalId,
|
||||
// Preserve position from proposed or fallback to existing
|
||||
position: proposedBlock.position || existingBlock.position,
|
||||
}
|
||||
: {
|
||||
...proposedBlock,
|
||||
id: finalId,
|
||||
}
|
||||
|
||||
finalBlocks[finalId] = finalBlock
|
||||
}
|
||||
|
||||
// Map edges with new IDs and standardized handles
|
||||
const edgeMap = new Map<string, Edge>()
|
||||
|
||||
proposedState.edges.forEach((edge) => {
|
||||
const source = idMap[edge.source] || edge.source
|
||||
const target = idMap[edge.target] || edge.target
|
||||
const sourceHandle = edge.sourceHandle || 'source'
|
||||
const targetHandle = edge.targetHandle || 'target'
|
||||
|
||||
// Create a unique key for deduplication
|
||||
const edgeKey = `${source}-${sourceHandle}-${target}-${targetHandle}`
|
||||
|
||||
// Only add if we haven't seen this edge combination before
|
||||
if (!edgeMap.has(edgeKey)) {
|
||||
edgeMap.set(edgeKey, {
|
||||
...edge,
|
||||
id: uuidv4(), // Use UUID for unique edge IDs
|
||||
source,
|
||||
target,
|
||||
sourceHandle,
|
||||
targetHandle,
|
||||
type: edge.type || 'workflowEdge',
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
const finalEdges: Edge[] = Array.from(edgeMap.values())
|
||||
|
||||
// Build final proposed state
|
||||
const finalProposedState: WorkflowState = {
|
||||
blocks: finalBlocks,
|
||||
edges: finalEdges,
|
||||
loops: proposedState.loops || {},
|
||||
parallels: proposedState.parallels || {},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
// Ensure loops and parallels are generated
|
||||
if (Object.keys(finalProposedState.loops).length === 0) {
|
||||
const { generateLoopBlocks } = await import('@/stores/workflows/workflow/utils')
|
||||
finalProposedState.loops = generateLoopBlocks(finalProposedState.blocks)
|
||||
}
|
||||
if (Object.keys(finalProposedState.parallels).length === 0) {
|
||||
const { generateParallelBlocks } = await import('@/stores/workflows/workflow/utils')
|
||||
finalProposedState.parallels = generateParallelBlocks(finalProposedState.blocks)
|
||||
}
|
||||
|
||||
// Apply autolayout to the proposed state
|
||||
logger.info('Applying autolayout to proposed workflow state')
|
||||
try {
|
||||
const { applyAutoLayout: applyNativeAutoLayout } = await import(
|
||||
'@/lib/workflows/autolayout'
|
||||
)
|
||||
|
||||
const autoLayoutOptions = {
|
||||
horizontalSpacing: 550,
|
||||
verticalSpacing: 200,
|
||||
padding: {
|
||||
x: 150,
|
||||
y: 150,
|
||||
},
|
||||
alignment: 'center' as const,
|
||||
}
|
||||
|
||||
const layoutResult = applyNativeAutoLayout(
|
||||
finalBlocks,
|
||||
finalProposedState.edges,
|
||||
finalProposedState.loops || {},
|
||||
finalProposedState.parallels || {},
|
||||
autoLayoutOptions
|
||||
)
|
||||
|
||||
if (layoutResult.success && layoutResult.blocks) {
|
||||
Object.entries(layoutResult.blocks).forEach(([id, layoutBlock]) => {
|
||||
if (finalBlocks[id]) {
|
||||
finalBlocks[id].position = layoutBlock.position
|
||||
}
|
||||
})
|
||||
logger.info('Successfully applied autolayout to proposed state', {
|
||||
blocksLayouted: Object.keys(layoutResult.blocks).length,
|
||||
})
|
||||
} else {
|
||||
logger.warn('Autolayout failed, using default positions', {
|
||||
error: layoutResult.error,
|
||||
})
|
||||
}
|
||||
} catch (layoutError) {
|
||||
logger.warn('Error applying autolayout, using default positions', {
|
||||
error: layoutError instanceof Error ? layoutError.message : String(layoutError),
|
||||
})
|
||||
}
|
||||
|
||||
// Compute diff analysis if not provided
|
||||
let computed = diffAnalysis
|
||||
if (!computed) {
|
||||
// Generate diff analysis between current and proposed states
|
||||
const currentIds = new Set(Object.keys(mergedBaseline.blocks))
|
||||
const proposedIds = new Set(Object.keys(finalBlocks))
|
||||
|
||||
const newBlocks: string[] = []
|
||||
const editedBlocks: string[] = []
|
||||
const deletedBlocks: string[] = []
|
||||
|
||||
// Find new and edited blocks
|
||||
for (const [id, block] of Object.entries(finalBlocks)) {
|
||||
if (!currentIds.has(id)) {
|
||||
newBlocks.push(id)
|
||||
} else {
|
||||
// Check if block was edited by comparing key fields
|
||||
const currentBlock = mergedBaseline.blocks[id]
|
||||
if (hasBlockChanged(currentBlock, block)) {
|
||||
editedBlocks.push(id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find deleted blocks
|
||||
for (const id of currentIds) {
|
||||
if (!proposedIds.has(id)) {
|
||||
deletedBlocks.push(id)
|
||||
}
|
||||
}
|
||||
|
||||
// Compute field diffs for edited blocks
|
||||
const fieldDiffs: Record<string, { changed_fields: string[]; unchanged_fields: string[] }> =
|
||||
{}
|
||||
for (const id of editedBlocks) {
|
||||
const currentBlock = mergedBaseline.blocks[id]
|
||||
const proposedBlock = finalBlocks[id]
|
||||
const { changedFields, unchangedFields } = computeFieldDiff(currentBlock, proposedBlock)
|
||||
if (changedFields.length > 0) {
|
||||
fieldDiffs[id] = {
|
||||
changed_fields: changedFields,
|
||||
unchanged_fields: unchangedFields,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compute edge diffs
|
||||
const currentEdgeSet = new Set<string>()
|
||||
const proposedEdgeSet = new Set<string>()
|
||||
|
||||
// Create edge identifiers for current state (using sim-agent format)
|
||||
mergedBaseline.edges.forEach((edge: any) => {
|
||||
const edgeId = `${edge.source}-${edge.sourceHandle || 'source'}-${edge.target}-${edge.targetHandle || 'target'}`
|
||||
currentEdgeSet.add(edgeId)
|
||||
})
|
||||
|
||||
// Create edge identifiers for proposed state
|
||||
finalEdges.forEach((edge) => {
|
||||
const edgeId = `${edge.source}-${edge.sourceHandle || 'source'}-${edge.target}-${edge.targetHandle || 'target'}`
|
||||
proposedEdgeSet.add(edgeId)
|
||||
})
|
||||
|
||||
// Classify edges
|
||||
const newEdges: string[] = []
|
||||
const deletedEdges: string[] = []
|
||||
const unchangedEdges: string[] = []
|
||||
|
||||
// Find new edges (in proposed but not current)
|
||||
proposedEdgeSet.forEach((edgeId) => {
|
||||
if (!currentEdgeSet.has(edgeId)) {
|
||||
newEdges.push(edgeId)
|
||||
} else {
|
||||
unchangedEdges.push(edgeId)
|
||||
}
|
||||
})
|
||||
|
||||
// Find deleted edges (in current but not proposed)
|
||||
currentEdgeSet.forEach((edgeId) => {
|
||||
if (!proposedEdgeSet.has(edgeId)) {
|
||||
deletedEdges.push(edgeId)
|
||||
}
|
||||
})
|
||||
|
||||
computed = {
|
||||
new_blocks: newBlocks,
|
||||
edited_blocks: editedBlocks,
|
||||
deleted_blocks: deletedBlocks,
|
||||
field_diffs: Object.keys(fieldDiffs).length > 0 ? fieldDiffs : undefined,
|
||||
edge_diff: {
|
||||
new_edges: newEdges,
|
||||
deleted_edges: deletedEdges,
|
||||
unchanged_edges: unchangedEdges,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Apply diff markers to blocks
|
||||
if (computed) {
|
||||
for (const id of computed.new_blocks || []) {
|
||||
if (finalBlocks[id]) {
|
||||
finalBlocks[id].is_diff = 'new'
|
||||
}
|
||||
}
|
||||
for (const id of computed.edited_blocks || []) {
|
||||
if (finalBlocks[id]) {
|
||||
finalBlocks[id].is_diff = 'edited'
|
||||
|
||||
// Also mark specific subblocks that changed
|
||||
if (computed.field_diffs?.[id]) {
|
||||
const fieldDiff = computed.field_diffs[id]
|
||||
const block = finalBlocks[id]
|
||||
|
||||
// Apply diff markers to changed subblocks
|
||||
for (const changedField of fieldDiff.changed_fields) {
|
||||
if (block.subBlocks?.[changedField]) {
|
||||
// Add a diff marker to the subblock itself
|
||||
;(block.subBlocks[changedField] as any).is_diff = 'changed'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: We don't remove deleted blocks from finalBlocks, just mark them
|
||||
}
|
||||
|
||||
// Store the diff
|
||||
this.currentDiff = {
|
||||
proposedState: finalProposedState,
|
||||
diffAnalysis: computed,
|
||||
metadata: {
|
||||
source: 'workflow_state',
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
}
|
||||
|
||||
logger.info('Successfully created diff from workflow state', {
|
||||
blockCount: Object.keys(finalProposedState.blocks).length,
|
||||
edgeCount: finalProposedState.edges.length,
|
||||
hasLoops: Object.keys(finalProposedState.loops || {}).length > 0,
|
||||
hasParallels: Object.keys(finalProposedState.parallels || {}).length > 0,
|
||||
newBlocks: computed?.new_blocks?.length || 0,
|
||||
editedBlocks: computed?.edited_blocks?.length || 0,
|
||||
deletedBlocks: computed?.deleted_blocks?.length || 0,
|
||||
newEdges: computed?.edge_diff?.new_edges?.length || 0,
|
||||
deletedEdges: computed?.edge_diff?.deleted_edges?.length || 0,
|
||||
unchangedEdges: computed?.edge_diff?.unchanged_edges?.length || 0,
|
||||
})
|
||||
|
||||
if (computed?.edge_diff?.deleted_edges && computed.edge_diff.deleted_edges.length > 0) {
|
||||
logger.info('Deleted edges detected:', {
|
||||
deletedEdges: computed.edge_diff.deleted_edges,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
diff: this.currentDiff,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to create diff from workflow state:', error)
|
||||
return {
|
||||
success: false,
|
||||
errors: [
|
||||
error instanceof Error ? error.message : 'Failed to create diff from workflow state',
|
||||
],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge new YAML content into existing diff
|
||||
* Used for cumulative updates within the same message
|
||||
|
||||
370
apps/sim/lib/workflows/json-sanitizer.ts
Normal file
370
apps/sim/lib/workflows/json-sanitizer.ts
Normal file
@@ -0,0 +1,370 @@
|
||||
import type { Edge } from 'reactflow'
|
||||
import type {
|
||||
BlockState,
|
||||
Loop,
|
||||
Parallel,
|
||||
Position,
|
||||
WorkflowState,
|
||||
} from '@/stores/workflows/workflow/types'
|
||||
|
||||
/**
|
||||
* Sanitized workflow state for copilot (removes all UI-specific data)
|
||||
*/
|
||||
export interface CopilotWorkflowState {
|
||||
blocks: Record<string, CopilotBlockState>
|
||||
edges: CopilotEdge[]
|
||||
loops: Record<string, Loop>
|
||||
parallels: Record<string, Parallel>
|
||||
}
|
||||
|
||||
/**
|
||||
* Block state for copilot (no positions, no UI dimensions)
|
||||
*/
|
||||
export interface CopilotBlockState {
|
||||
id: string
|
||||
type: string
|
||||
name: string
|
||||
subBlocks: BlockState['subBlocks']
|
||||
outputs: BlockState['outputs']
|
||||
enabled: boolean
|
||||
advancedMode?: boolean
|
||||
triggerMode?: boolean
|
||||
// Keep semantic data only (no width/height)
|
||||
data?: {
|
||||
parentId?: string
|
||||
extent?: 'parent'
|
||||
loopType?: 'for' | 'forEach'
|
||||
parallelType?: 'collection' | 'count'
|
||||
collection?: any
|
||||
count?: number
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Edge state for copilot (only semantic connection data)
|
||||
*/
|
||||
export interface CopilotEdge {
|
||||
id: string
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string
|
||||
targetHandle?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Export workflow state (includes positions but removes secrets)
|
||||
*/
|
||||
export interface ExportWorkflowState {
|
||||
version: string
|
||||
exportedAt: string
|
||||
state: {
|
||||
blocks: Record<string, BlockState>
|
||||
edges: Edge[]
|
||||
loops: Record<string, Loop>
|
||||
parallels: Record<string, Parallel>
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize workflow state for copilot by removing all UI-specific data
|
||||
* Copilot doesn't need to see positions, dimensions, or visual styling
|
||||
*/
|
||||
export function sanitizeForCopilot(state: WorkflowState): CopilotWorkflowState {
|
||||
const sanitizedBlocks: Record<string, CopilotBlockState> = {}
|
||||
|
||||
// Sanitize blocks - remove position and UI-only fields
|
||||
Object.entries(state.blocks).forEach(([blockId, block]) => {
|
||||
const sanitizedData: CopilotBlockState['data'] = block.data
|
||||
? {
|
||||
// Keep semantic fields only
|
||||
...(block.data.parentId !== undefined && { parentId: block.data.parentId }),
|
||||
...(block.data.extent !== undefined && { extent: block.data.extent }),
|
||||
...(block.data.loopType !== undefined && { loopType: block.data.loopType }),
|
||||
...(block.data.parallelType !== undefined && { parallelType: block.data.parallelType }),
|
||||
...(block.data.collection !== undefined && { collection: block.data.collection }),
|
||||
...(block.data.count !== undefined && { count: block.data.count }),
|
||||
}
|
||||
: undefined
|
||||
|
||||
sanitizedBlocks[blockId] = {
|
||||
id: block.id,
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
subBlocks: block.subBlocks,
|
||||
outputs: block.outputs,
|
||||
enabled: block.enabled,
|
||||
...(block.advancedMode !== undefined && { advancedMode: block.advancedMode }),
|
||||
...(block.triggerMode !== undefined && { triggerMode: block.triggerMode }),
|
||||
...(sanitizedData && Object.keys(sanitizedData).length > 0 && { data: sanitizedData }),
|
||||
}
|
||||
})
|
||||
|
||||
// Sanitize edges - keep only semantic connection data
|
||||
const sanitizedEdges: CopilotEdge[] = state.edges.map((edge) => ({
|
||||
id: edge.id,
|
||||
source: edge.source,
|
||||
target: edge.target,
|
||||
...(edge.sourceHandle !== undefined &&
|
||||
edge.sourceHandle !== null && { sourceHandle: edge.sourceHandle }),
|
||||
...(edge.targetHandle !== undefined &&
|
||||
edge.targetHandle !== null && { targetHandle: edge.targetHandle }),
|
||||
}))
|
||||
|
||||
return {
|
||||
blocks: sanitizedBlocks,
|
||||
edges: sanitizedEdges,
|
||||
loops: state.loops || {},
|
||||
parallels: state.parallels || {},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize workflow state for export by removing secrets but keeping positions
|
||||
* Users need positions to restore the visual layout when importing
|
||||
*/
|
||||
export function sanitizeForExport(state: WorkflowState): ExportWorkflowState {
|
||||
// Deep clone to avoid mutating original state
|
||||
const clonedState = JSON.parse(
|
||||
JSON.stringify({
|
||||
blocks: state.blocks,
|
||||
edges: state.edges,
|
||||
loops: state.loops || {},
|
||||
parallels: state.parallels || {},
|
||||
})
|
||||
)
|
||||
|
||||
// Remove sensitive data from subblocks
|
||||
Object.values(clonedState.blocks).forEach((block: any) => {
|
||||
if (block.subBlocks) {
|
||||
Object.entries(block.subBlocks).forEach(([key, subBlock]: [string, any]) => {
|
||||
// Clear OAuth credentials and API keys using regex patterns
|
||||
if (
|
||||
/credential|oauth|api[_-]?key|token|secret|auth|password|bearer/i.test(key) ||
|
||||
/credential|oauth|api[_-]?key|token|secret|auth|password|bearer/i.test(
|
||||
subBlock.type || ''
|
||||
) ||
|
||||
(typeof subBlock.value === 'string' &&
|
||||
/credential|oauth|api[_-]?key|token|secret|auth|password|bearer/i.test(subBlock.value))
|
||||
) {
|
||||
subBlock.value = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Also clear from data field if present
|
||||
if (block.data) {
|
||||
Object.entries(block.data).forEach(([key, value]: [string, any]) => {
|
||||
if (/credential|oauth|api[_-]?key|token|secret|auth|password|bearer/i.test(key)) {
|
||||
block.data[key] = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
version: '1.0',
|
||||
exportedAt: new Date().toISOString(),
|
||||
state: clonedState,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate that edges reference existing blocks
|
||||
*/
|
||||
export function validateEdges(
|
||||
blocks: Record<string, any>,
|
||||
edges: CopilotEdge[]
|
||||
): {
|
||||
valid: boolean
|
||||
errors: string[]
|
||||
} {
|
||||
const errors: string[] = []
|
||||
const blockIds = new Set(Object.keys(blocks))
|
||||
|
||||
edges.forEach((edge, index) => {
|
||||
if (!blockIds.has(edge.source)) {
|
||||
errors.push(`Edge ${index} references non-existent source block: ${edge.source}`)
|
||||
}
|
||||
if (!blockIds.has(edge.target)) {
|
||||
errors.push(`Edge ${index} references non-existent target block: ${edge.target}`)
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate position for a new block based on its connections
|
||||
* Uses compact horizontal spacing and intelligent positioning
|
||||
*/
|
||||
export function generatePositionForNewBlock(
|
||||
blockId: string,
|
||||
edges: CopilotEdge[],
|
||||
existingBlocks: Record<string, BlockState>
|
||||
): Position {
|
||||
const HORIZONTAL_SPACING = 550
|
||||
const VERTICAL_SPACING = 200
|
||||
|
||||
const incomingEdges = edges.filter((e) => e.target === blockId)
|
||||
|
||||
if (incomingEdges.length > 0) {
|
||||
const sourceBlocks = incomingEdges
|
||||
.map((e) => existingBlocks[e.source])
|
||||
.filter((b) => b !== undefined)
|
||||
|
||||
if (sourceBlocks.length > 0) {
|
||||
const rightmostX = Math.max(...sourceBlocks.map((b) => b.position.x))
|
||||
const avgY = sourceBlocks.reduce((sum, b) => sum + b.position.y, 0) / sourceBlocks.length
|
||||
|
||||
return {
|
||||
x: rightmostX + HORIZONTAL_SPACING,
|
||||
y: avgY,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const outgoingEdges = edges.filter((e) => e.source === blockId)
|
||||
|
||||
if (outgoingEdges.length > 0) {
|
||||
const targetBlocks = outgoingEdges
|
||||
.map((e) => existingBlocks[e.target])
|
||||
.filter((b) => b !== undefined)
|
||||
|
||||
if (targetBlocks.length > 0) {
|
||||
const leftmostX = Math.min(...targetBlocks.map((b) => b.position.x))
|
||||
const avgY = targetBlocks.reduce((sum, b) => sum + b.position.y, 0) / targetBlocks.length
|
||||
|
||||
return {
|
||||
x: Math.max(150, leftmostX - HORIZONTAL_SPACING),
|
||||
y: avgY,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const existingPositions = Object.values(existingBlocks).map((b) => b.position)
|
||||
if (existingPositions.length > 0) {
|
||||
const maxY = Math.max(...existingPositions.map((p) => p.y))
|
||||
return {
|
||||
x: 150,
|
||||
y: maxY + VERTICAL_SPACING,
|
||||
}
|
||||
}
|
||||
|
||||
return { x: 150, y: 300 }
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge sanitized copilot state with full UI state
|
||||
* Preserves positions for existing blocks, generates positions for new blocks
|
||||
*/
|
||||
export function mergeWithUIState(
|
||||
sanitized: CopilotWorkflowState,
|
||||
fullState: WorkflowState
|
||||
): WorkflowState {
|
||||
const mergedBlocks: Record<string, BlockState> = {}
|
||||
const existingBlocks = fullState.blocks
|
||||
|
||||
// Convert sanitized edges to full edges for position generation
|
||||
const sanitizedEdges = sanitized.edges
|
||||
|
||||
// Process each block from sanitized state
|
||||
Object.entries(sanitized.blocks).forEach(([blockId, sanitizedBlock]) => {
|
||||
const existingBlock = existingBlocks[blockId]
|
||||
|
||||
if (existingBlock) {
|
||||
// Existing block - preserve position and UI fields, update semantic fields
|
||||
mergedBlocks[blockId] = {
|
||||
...existingBlock,
|
||||
// Update semantic fields from sanitized
|
||||
type: sanitizedBlock.type,
|
||||
name: sanitizedBlock.name,
|
||||
subBlocks: sanitizedBlock.subBlocks,
|
||||
outputs: sanitizedBlock.outputs,
|
||||
enabled: sanitizedBlock.enabled,
|
||||
advancedMode: sanitizedBlock.advancedMode,
|
||||
triggerMode: sanitizedBlock.triggerMode,
|
||||
// Merge data carefully
|
||||
data: sanitizedBlock.data
|
||||
? {
|
||||
...existingBlock.data,
|
||||
...sanitizedBlock.data,
|
||||
}
|
||||
: existingBlock.data,
|
||||
}
|
||||
} else {
|
||||
// New block - generate position
|
||||
const position = generatePositionForNewBlock(blockId, sanitizedEdges, existingBlocks)
|
||||
|
||||
mergedBlocks[blockId] = {
|
||||
id: sanitizedBlock.id,
|
||||
type: sanitizedBlock.type,
|
||||
name: sanitizedBlock.name,
|
||||
position,
|
||||
subBlocks: sanitizedBlock.subBlocks,
|
||||
outputs: sanitizedBlock.outputs,
|
||||
enabled: sanitizedBlock.enabled,
|
||||
horizontalHandles: true,
|
||||
isWide: false,
|
||||
height: 0,
|
||||
advancedMode: sanitizedBlock.advancedMode,
|
||||
triggerMode: sanitizedBlock.triggerMode,
|
||||
data: sanitizedBlock.data
|
||||
? {
|
||||
...sanitizedBlock.data,
|
||||
// Add UI dimensions if it's a container
|
||||
...(sanitizedBlock.type === 'loop' || sanitizedBlock.type === 'parallel'
|
||||
? {
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: 'subflowNode',
|
||||
}
|
||||
: {}),
|
||||
}
|
||||
: undefined,
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Convert sanitized edges to full edges
|
||||
const mergedEdges: Edge[] = sanitized.edges.map((edge) => {
|
||||
// Try to find existing edge to preserve styling
|
||||
const existingEdge = fullState.edges.find(
|
||||
(e) =>
|
||||
e.source === edge.source &&
|
||||
e.target === edge.target &&
|
||||
e.sourceHandle === edge.sourceHandle &&
|
||||
e.targetHandle === edge.targetHandle
|
||||
)
|
||||
|
||||
if (existingEdge) {
|
||||
return existingEdge
|
||||
}
|
||||
|
||||
// New edge - create with defaults
|
||||
return {
|
||||
id: edge.id,
|
||||
source: edge.source,
|
||||
target: edge.target,
|
||||
sourceHandle: edge.sourceHandle,
|
||||
targetHandle: edge.targetHandle,
|
||||
type: 'default',
|
||||
data: {},
|
||||
} as Edge
|
||||
})
|
||||
|
||||
return {
|
||||
blocks: mergedBlocks,
|
||||
edges: mergedEdges,
|
||||
loops: sanitized.loops,
|
||||
parallels: sanitized.parallels,
|
||||
lastSaved: Date.now(),
|
||||
// Preserve deployment info
|
||||
isDeployed: fullState.isDeployed,
|
||||
deployedAt: fullState.deployedAt,
|
||||
deploymentStatuses: fullState.deploymentStatuses,
|
||||
hasActiveWebhook: fullState.hasActiveWebhook,
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import type { CopilotWorkflowState } from '@/lib/workflows/json-sanitizer'
|
||||
|
||||
export interface EditOperation {
|
||||
operation_type: 'add' | 'edit' | 'delete'
|
||||
@@ -10,6 +10,20 @@ export interface EditOperation {
|
||||
inputs?: Record<string, any>
|
||||
connections?: Record<string, any>
|
||||
removeEdges?: Array<{ targetBlockId: string; sourceHandle?: string }>
|
||||
loopConfig?: {
|
||||
nodes?: string[]
|
||||
iterations?: number
|
||||
loopType?: 'for' | 'forEach'
|
||||
forEachItems?: any
|
||||
}
|
||||
parallelConfig?: {
|
||||
nodes?: string[]
|
||||
distribution?: any
|
||||
count?: number
|
||||
parallelType?: 'count' | 'collection'
|
||||
}
|
||||
parentId?: string
|
||||
extent?: 'parent'
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,16 +34,18 @@ export interface WorkflowDiff {
|
||||
blocksModified: number
|
||||
blocksDeleted: number
|
||||
edgesChanged: number
|
||||
subflowsChanged: number
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the edit sequence (operations) needed to transform startState into endState
|
||||
* This analyzes the differences and generates operations that can recreate the changes
|
||||
* Works with sanitized CopilotWorkflowState (no positions, only semantic data)
|
||||
*/
|
||||
export function computeEditSequence(
|
||||
startState: WorkflowState,
|
||||
endState: WorkflowState
|
||||
startState: CopilotWorkflowState,
|
||||
endState: CopilotWorkflowState
|
||||
): WorkflowDiff {
|
||||
const operations: EditOperation[] = []
|
||||
|
||||
@@ -37,12 +53,17 @@ export function computeEditSequence(
|
||||
const endBlocks = endState.blocks || {}
|
||||
const startEdges = startState.edges || []
|
||||
const endEdges = endState.edges || []
|
||||
const startLoops = startState.loops || {}
|
||||
const endLoops = endState.loops || {}
|
||||
const startParallels = startState.parallels || {}
|
||||
const endParallels = endState.parallels || {}
|
||||
|
||||
// Track statistics
|
||||
let blocksAdded = 0
|
||||
let blocksModified = 0
|
||||
let blocksDeleted = 0
|
||||
let edgesChanged = 0
|
||||
let subflowsChanged = 0
|
||||
|
||||
// 1. Find deleted blocks (exist in start but not in end)
|
||||
for (const blockId in startBlocks) {
|
||||
@@ -59,7 +80,7 @@ export function computeEditSequence(
|
||||
for (const blockId in endBlocks) {
|
||||
if (!(blockId in startBlocks)) {
|
||||
const block = endBlocks[blockId]
|
||||
const addParams: Record<string, any> = {
|
||||
const addParams: EditOperation['params'] = {
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
inputs: extractInputValues(block),
|
||||
@@ -67,6 +88,25 @@ export function computeEditSequence(
|
||||
triggerMode: Boolean(block?.triggerMode),
|
||||
}
|
||||
|
||||
// Add loop/parallel configuration if this block is in a subflow
|
||||
const loopConfig = findLoopConfigForBlock(blockId, endLoops)
|
||||
if (loopConfig) {
|
||||
;(addParams as any).loopConfig = loopConfig
|
||||
subflowsChanged++
|
||||
}
|
||||
|
||||
const parallelConfig = findParallelConfigForBlock(blockId, endParallels)
|
||||
if (parallelConfig) {
|
||||
;(addParams as any).parallelConfig = parallelConfig
|
||||
subflowsChanged++
|
||||
}
|
||||
|
||||
// Add parent-child relationship if present
|
||||
if (block.data?.parentId) {
|
||||
addParams.parentId = block.data.parentId
|
||||
addParams.extent = block.data.extent
|
||||
}
|
||||
|
||||
operations.push({
|
||||
operation_type: 'add',
|
||||
block_id: blockId,
|
||||
@@ -81,7 +121,17 @@ export function computeEditSequence(
|
||||
if (blockId in startBlocks) {
|
||||
const startBlock = startBlocks[blockId]
|
||||
const endBlock = endBlocks[blockId]
|
||||
const changes = computeBlockChanges(startBlock, endBlock, blockId, startEdges, endEdges)
|
||||
const changes = computeBlockChanges(
|
||||
startBlock,
|
||||
endBlock,
|
||||
blockId,
|
||||
startEdges,
|
||||
endEdges,
|
||||
startLoops,
|
||||
endLoops,
|
||||
startParallels,
|
||||
endParallels
|
||||
)
|
||||
|
||||
if (changes) {
|
||||
operations.push({
|
||||
@@ -93,10 +143,21 @@ export function computeEditSequence(
|
||||
if (changes.connections || changes.removeEdges) {
|
||||
edgesChanged++
|
||||
}
|
||||
if (changes.loopConfig || changes.parallelConfig) {
|
||||
subflowsChanged++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Check for standalone loop/parallel changes (not tied to specific blocks)
|
||||
const loopChanges = detectSubflowChanges(startLoops, endLoops, 'loop')
|
||||
const parallelChanges = detectSubflowChanges(startParallels, endParallels, 'parallel')
|
||||
|
||||
if (loopChanges > 0 || parallelChanges > 0) {
|
||||
subflowsChanged += loopChanges + parallelChanges
|
||||
}
|
||||
|
||||
return {
|
||||
operations,
|
||||
summary: {
|
||||
@@ -104,6 +165,7 @@ export function computeEditSequence(
|
||||
blocksModified,
|
||||
blocksDeleted,
|
||||
edgesChanged,
|
||||
subflowsChanged,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -171,6 +233,101 @@ function extractConnections(
|
||||
return connections
|
||||
}
|
||||
|
||||
/**
|
||||
* Find loop configuration for a block
|
||||
*/
|
||||
function findLoopConfigForBlock(
|
||||
blockId: string,
|
||||
loops: Record<string, any>
|
||||
):
|
||||
| {
|
||||
nodes?: string[]
|
||||
iterations?: number
|
||||
loopType?: 'for' | 'forEach'
|
||||
forEachItems?: any
|
||||
}
|
||||
| undefined {
|
||||
for (const loop of Object.values(loops)) {
|
||||
if (loop.id === blockId || loop.nodes?.includes(blockId)) {
|
||||
return {
|
||||
nodes: loop.nodes,
|
||||
iterations: loop.iterations,
|
||||
loopType: loop.loopType,
|
||||
forEachItems: loop.forEachItems,
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Find parallel configuration for a block
|
||||
*/
|
||||
function findParallelConfigForBlock(
|
||||
blockId: string,
|
||||
parallels: Record<string, any>
|
||||
):
|
||||
| {
|
||||
nodes?: string[]
|
||||
distribution?: any
|
||||
count?: number
|
||||
parallelType?: 'count' | 'collection'
|
||||
}
|
||||
| undefined {
|
||||
for (const parallel of Object.values(parallels)) {
|
||||
if (parallel.id === blockId || parallel.nodes?.includes(blockId)) {
|
||||
return {
|
||||
nodes: parallel.nodes,
|
||||
distribution: parallel.distribution,
|
||||
count: parallel.count,
|
||||
parallelType: parallel.parallelType,
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect changes in subflow configurations
|
||||
*/
|
||||
function detectSubflowChanges(
|
||||
startSubflows: Record<string, any>,
|
||||
endSubflows: Record<string, any>,
|
||||
type: 'loop' | 'parallel'
|
||||
): number {
|
||||
let changes = 0
|
||||
|
||||
// Check for added/removed subflows
|
||||
const startIds = new Set(Object.keys(startSubflows))
|
||||
const endIds = new Set(Object.keys(endSubflows))
|
||||
|
||||
for (const id of endIds) {
|
||||
if (!startIds.has(id)) {
|
||||
changes++ // New subflow
|
||||
}
|
||||
}
|
||||
|
||||
for (const id of startIds) {
|
||||
if (!endIds.has(id)) {
|
||||
changes++ // Removed subflow
|
||||
}
|
||||
}
|
||||
|
||||
// Check for modified subflows
|
||||
for (const id of endIds) {
|
||||
if (startIds.has(id)) {
|
||||
const startSubflow = startSubflows[id]
|
||||
const endSubflow = endSubflows[id]
|
||||
|
||||
if (JSON.stringify(startSubflow) !== JSON.stringify(endSubflow)) {
|
||||
changes++ // Modified subflow
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return changes
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute what changed in a block between two states
|
||||
*/
|
||||
@@ -189,7 +346,11 @@ function computeBlockChanges(
|
||||
target: string
|
||||
sourceHandle?: string | null
|
||||
targetHandle?: string | null
|
||||
}>
|
||||
}>,
|
||||
startLoops: Record<string, any>,
|
||||
endLoops: Record<string, any>,
|
||||
startParallels: Record<string, any>,
|
||||
endParallels: Record<string, any>
|
||||
): Record<string, any> | null {
|
||||
const changes: Record<string, any> = {}
|
||||
let hasChanges = false
|
||||
@@ -268,6 +429,42 @@ function computeBlockChanges(
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check loop membership changes
|
||||
const startLoopConfig = findLoopConfigForBlock(blockId, startLoops)
|
||||
const endLoopConfig = findLoopConfigForBlock(blockId, endLoops)
|
||||
|
||||
if (JSON.stringify(startLoopConfig) !== JSON.stringify(endLoopConfig)) {
|
||||
if (endLoopConfig) {
|
||||
;(changes as any).loopConfig = endLoopConfig
|
||||
}
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check parallel membership changes
|
||||
const startParallelConfig = findParallelConfigForBlock(blockId, startParallels)
|
||||
const endParallelConfig = findParallelConfigForBlock(blockId, endParallels)
|
||||
|
||||
if (JSON.stringify(startParallelConfig) !== JSON.stringify(endParallelConfig)) {
|
||||
if (endParallelConfig) {
|
||||
;(changes as any).parallelConfig = endParallelConfig
|
||||
}
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check parent-child relationship changes
|
||||
const startParentId = startBlock.data?.parentId
|
||||
const endParentId = endBlock.data?.parentId
|
||||
const startExtent = startBlock.data?.extent
|
||||
const endExtent = endBlock.data?.extent
|
||||
|
||||
if (startParentId !== endParentId || startExtent !== endExtent) {
|
||||
if (endParentId) {
|
||||
changes.parentId = endParentId
|
||||
changes.extent = endExtent
|
||||
}
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
return hasChanges ? changes : null
|
||||
}
|
||||
|
||||
@@ -288,6 +485,9 @@ export function formatEditSequence(operations: EditOperation[]): string[] {
|
||||
if (op.params?.inputs) changes.push('inputs')
|
||||
if (op.params?.connections) changes.push('connections')
|
||||
if (op.params?.removeEdges) changes.push(`remove ${op.params.removeEdges.length} edge(s)`)
|
||||
if ((op.params as any)?.loopConfig) changes.push('loop configuration')
|
||||
if ((op.params as any)?.parallelConfig) changes.push('parallel configuration')
|
||||
if (op.params?.parentId) changes.push('parent-child relationship')
|
||||
return `Edit block "${op.block_id}": ${changes.join(', ')}`
|
||||
}
|
||||
default:
|
||||
|
||||
@@ -154,6 +154,14 @@ export function validateWorkflowState(
|
||||
|
||||
// Check if block type exists
|
||||
const blockConfig = getBlock(block.type)
|
||||
|
||||
// Special handling for container blocks (loop and parallel)
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
// These are valid container types, they don't need block configs
|
||||
sanitizedBlocks[blockId] = block
|
||||
continue
|
||||
}
|
||||
|
||||
if (!blockConfig) {
|
||||
errors.push(`Block ${block.name || blockId}: unknown block type '${block.type}'`)
|
||||
if (options.sanitize) {
|
||||
|
||||
@@ -10,11 +10,9 @@ export const API_ENDPOINTS = {
|
||||
export const COPILOT_TOOL_DISPLAY_NAMES: Record<string, string> = {
|
||||
search_documentation: 'Searching documentation',
|
||||
get_user_workflow: 'Analyzing your workflow',
|
||||
build_workflow: 'Building your workflow',
|
||||
get_blocks_and_tools: 'Getting block information',
|
||||
get_blocks_metadata: 'Getting block metadata',
|
||||
get_yaml_structure: 'Analyzing workflow structure',
|
||||
get_build_workflow_examples: 'Viewing workflow examples',
|
||||
get_edit_workflow_examples: 'Viewing workflow examples',
|
||||
get_environment_variables: 'Viewing environment variables',
|
||||
set_environment_variables: 'Setting environment variables',
|
||||
@@ -29,11 +27,9 @@ export const COPILOT_TOOL_DISPLAY_NAMES: Record<string, string> = {
|
||||
export const COPILOT_TOOL_PAST_TENSE: Record<string, string> = {
|
||||
search_documentation: 'Searched documentation',
|
||||
get_user_workflow: 'Analyzed your workflow',
|
||||
build_workflow: 'Built your workflow',
|
||||
get_blocks_and_tools: 'Retrieved block information',
|
||||
get_blocks_metadata: 'Retrieved block metadata',
|
||||
get_yaml_structure: 'Analyzed workflow structure',
|
||||
get_build_workflow_examples: 'Viewed workflow examples',
|
||||
get_edit_workflow_examples: 'Viewed workflow examples',
|
||||
get_environment_variables: 'Found environment variables',
|
||||
set_environment_variables: 'Set environment variables',
|
||||
@@ -48,11 +44,9 @@ export const COPILOT_TOOL_PAST_TENSE: Record<string, string> = {
|
||||
export const COPILOT_TOOL_ERROR_NAMES: Record<string, string> = {
|
||||
search_documentation: 'Errored searching documentation',
|
||||
get_user_workflow: 'Errored analyzing your workflow',
|
||||
build_workflow: 'Errored building your workflow',
|
||||
get_blocks_and_tools: 'Errored getting block information',
|
||||
get_blocks_metadata: 'Errored getting block metadata',
|
||||
get_yaml_structure: 'Errored analyzing workflow structure',
|
||||
get_build_workflow_examples: 'Errored getting workflow examples',
|
||||
get_edit_workflow_examples: 'Errored getting workflow examples',
|
||||
get_environment_variables: 'Errored getting environment variables',
|
||||
set_environment_variables: 'Errored setting environment variables',
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { sanitizeForCopilot } from '@/lib/workflows/json-sanitizer'
|
||||
import {
|
||||
computeEditSequence,
|
||||
type EditOperation,
|
||||
@@ -126,8 +127,12 @@ export const useCopilotTrainingStore = create<CopilotTrainingState>()(
|
||||
const endSnapshot = captureWorkflowSnapshot()
|
||||
const duration = state.startTime ? Date.now() - state.startTime : 0
|
||||
|
||||
// Sanitize snapshots for compute-edit-sequence (it works with sanitized state)
|
||||
const sanitizedStart = sanitizeForCopilot(state.startSnapshot!)
|
||||
const sanitizedEnd = sanitizeForCopilot(endSnapshot)
|
||||
|
||||
// Compute the edit sequence
|
||||
const { operations, summary } = computeEditSequence(state.startSnapshot, endSnapshot)
|
||||
const { operations, summary } = computeEditSequence(sanitizedStart, sanitizedEnd)
|
||||
|
||||
// Get workflow ID from the store
|
||||
const { activeWorkflowId } = useWorkflowStore.getState() as any
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
// Tool IDs
|
||||
export const COPILOT_TOOL_IDS = {
|
||||
GET_USER_WORKFLOW: 'get_user_workflow',
|
||||
BUILD_WORKFLOW: 'build_workflow',
|
||||
EDIT_WORKFLOW: 'edit_workflow',
|
||||
SEARCH_DOCUMENTATION: 'search_documentation',
|
||||
GET_BLOCKS_AND_TOOLS: 'get_blocks_and_tools',
|
||||
|
||||
@@ -206,7 +206,7 @@ export const usePreviewStore = create<PreviewStore>()(
|
||||
if (message.role === 'assistant' && message.toolCalls) {
|
||||
message.toolCalls.forEach((toolCall: CopilotToolCall) => {
|
||||
if (
|
||||
toolCall.name === COPILOT_TOOL_IDS.BUILD_WORKFLOW &&
|
||||
toolCall.name === COPILOT_TOOL_IDS.EDIT_WORKFLOW &&
|
||||
toolCall.state === 'success' &&
|
||||
toolCall.id
|
||||
) {
|
||||
|
||||
@@ -32,7 +32,6 @@ import { createExecutionContext, getTool } from '@/lib/copilot/tools/client/regi
|
||||
import { GetEnvironmentVariablesClientTool } from '@/lib/copilot/tools/client/user/get-environment-variables'
|
||||
import { GetOAuthCredentialsClientTool } from '@/lib/copilot/tools/client/user/get-oauth-credentials'
|
||||
import { SetEnvironmentVariablesClientTool } from '@/lib/copilot/tools/client/user/set-environment-variables'
|
||||
import { BuildWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/build-workflow'
|
||||
import { EditWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/edit-workflow'
|
||||
import { GetGlobalWorkflowVariablesClientTool } from '@/lib/copilot/tools/client/workflow/get-global-workflow-variables'
|
||||
import { GetUserWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/get-user-workflow'
|
||||
@@ -84,7 +83,6 @@ const CLIENT_TOOL_INSTANTIATORS: Record<string, (id: string) => any> = {
|
||||
gdrive_request_access: (id) => new GDriveRequestAccessClientTool(id),
|
||||
oauth_request_access: (id) => new OAuthRequestAccessClientTool(id),
|
||||
edit_workflow: (id) => new EditWorkflowClientTool(id),
|
||||
build_workflow: (id) => new BuildWorkflowClientTool(id),
|
||||
get_user_workflow: (id) => new GetUserWorkflowClientTool(id),
|
||||
list_user_workflows: (id) => new ListUserWorkflowsClientTool(id),
|
||||
get_workflow_from_name: (id) => new GetWorkflowFromNameClientTool(id),
|
||||
@@ -114,7 +112,6 @@ export const CLASS_TOOL_METADATA: Record<string, BaseClientToolMetadata | undefi
|
||||
mark_todo_in_progress: (MarkTodoInProgressClientTool as any)?.metadata,
|
||||
gdrive_request_access: (GDriveRequestAccessClientTool as any)?.metadata,
|
||||
edit_workflow: (EditWorkflowClientTool as any)?.metadata,
|
||||
build_workflow: (BuildWorkflowClientTool as any)?.metadata,
|
||||
get_user_workflow: (GetUserWorkflowClientTool as any)?.metadata,
|
||||
list_user_workflows: (ListUserWorkflowsClientTool as any)?.metadata,
|
||||
get_workflow_from_name: (GetWorkflowFromNameClientTool as any)?.metadata,
|
||||
@@ -1275,7 +1272,7 @@ async function* parseSSEStream(
|
||||
// Initial state (subset required for UI/streaming)
|
||||
const initialState = {
|
||||
mode: 'agent' as const,
|
||||
selectedModel: 'gpt-5' as CopilotStore['selectedModel'],
|
||||
selectedModel: 'claude-4.5-sonnet' as CopilotStore['selectedModel'],
|
||||
agentPrefetch: true,
|
||||
isCollapsed: false,
|
||||
currentChat: null as CopilotChat | null,
|
||||
@@ -1809,7 +1806,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const b = blocks[bi]
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'build_workflow' || tn === 'edit_workflow') {
|
||||
if (tn === 'edit_workflow') {
|
||||
id = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
@@ -1818,9 +1815,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
}
|
||||
// Fallback to map if not found in messages
|
||||
if (!id) {
|
||||
const candidates = Object.values(toolCallsById).filter(
|
||||
(t) => t.name === 'build_workflow' || t.name === 'edit_workflow'
|
||||
)
|
||||
const candidates = Object.values(toolCallsById).filter((t) => t.name === 'edit_workflow')
|
||||
id = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,6 +77,7 @@ export interface CopilotState {
|
||||
| 'gpt-4.1'
|
||||
| 'o3'
|
||||
| 'claude-4-sonnet'
|
||||
| 'claude-4.5-sonnet'
|
||||
| 'claude-4.1-opus'
|
||||
agentPrefetch: boolean
|
||||
isCollapsed: boolean
|
||||
|
||||
@@ -2,7 +2,7 @@ import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { getClientTool } from '@/lib/copilot/tools/client/manager'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { type DiffAnalysis, WorkflowDiffEngine } from '@/lib/workflows/diff'
|
||||
import { type DiffAnalysis, type WorkflowDiff, WorkflowDiffEngine } from '@/lib/workflows/diff'
|
||||
import { validateWorkflowState } from '@/lib/workflows/validation'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { useWorkflowRegistry } from '../workflows/registry/store'
|
||||
@@ -119,14 +119,26 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
||||
|
||||
_batchedStateUpdate: batchedUpdate,
|
||||
|
||||
setProposedChanges: async (yamlContent: string, diffAnalysis?: DiffAnalysis) => {
|
||||
setProposedChanges: async (
|
||||
proposedContent: string | WorkflowState,
|
||||
diffAnalysis?: DiffAnalysis
|
||||
) => {
|
||||
// PERFORMANCE OPTIMIZATION: Immediate state update to prevent UI flicker
|
||||
batchedUpdate({ isDiffReady: false, diffError: null })
|
||||
|
||||
// Clear any existing diff state to ensure a fresh start
|
||||
diffEngine.clearDiff()
|
||||
|
||||
const result = await diffEngine.createDiffFromYaml(yamlContent, diffAnalysis)
|
||||
let result: { success: boolean; diff?: WorkflowDiff; errors?: string[] }
|
||||
|
||||
// Handle both YAML string and direct WorkflowState object
|
||||
if (typeof proposedContent === 'string') {
|
||||
// Legacy YAML path (for backward compatibility)
|
||||
result = await diffEngine.createDiffFromYaml(proposedContent, diffAnalysis)
|
||||
} else {
|
||||
// Direct WorkflowState path (new, more efficient)
|
||||
result = await diffEngine.createDiffFromWorkflowState(proposedContent, diffAnalysis)
|
||||
}
|
||||
|
||||
if (result.success && result.diff) {
|
||||
// Validate proposed workflow using serializer round-trip to catch canvas-breaking issues
|
||||
@@ -421,7 +433,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
||||
for (const b of m.contentBlocks as any[]) {
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'build_workflow' || tn === 'edit_workflow') {
|
||||
if (tn === 'edit_workflow') {
|
||||
toolCallId = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
@@ -431,7 +443,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
||||
// Fallback to toolCallsById map if not found in messages
|
||||
if (!toolCallId) {
|
||||
const candidates = Object.values(toolCallsById).filter(
|
||||
(t: any) => t.name === 'build_workflow' || t.name === 'edit_workflow'
|
||||
(t: any) => t.name === 'edit_workflow'
|
||||
) as any[]
|
||||
toolCallId = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
@@ -487,7 +499,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
||||
for (const b of m.contentBlocks as any[]) {
|
||||
if (b?.type === 'tool_call') {
|
||||
const tn = b.toolCall?.name
|
||||
if (tn === 'build_workflow' || tn === 'edit_workflow') {
|
||||
if (tn === 'edit_workflow') {
|
||||
toolCallId = b.toolCall?.id
|
||||
break outer
|
||||
}
|
||||
@@ -497,7 +509,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
||||
// Fallback to toolCallsById map if not found in messages
|
||||
if (!toolCallId) {
|
||||
const candidates = Object.values(toolCallsById).filter(
|
||||
(t: any) => t.name === 'build_workflow' || t.name === 'edit_workflow'
|
||||
(t: any) => t.name === 'edit_workflow'
|
||||
) as any[]
|
||||
toolCallId = candidates.length ? candidates[candidates.length - 1].id : undefined
|
||||
}
|
||||
|
||||
230
apps/sim/stores/workflows/json/importer.ts
Normal file
230
apps/sim/stores/workflows/json/importer.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { WorkflowState } from '../workflow/types'
|
||||
|
||||
const logger = createLogger('WorkflowJsonImporter')
|
||||
|
||||
/**
|
||||
* Generate new IDs for all blocks and edges to avoid conflicts
|
||||
*/
|
||||
function regenerateIds(workflowState: WorkflowState): WorkflowState {
|
||||
const blockIdMap = new Map<string, string>()
|
||||
const newBlocks: WorkflowState['blocks'] = {}
|
||||
|
||||
// First pass: create new IDs for all blocks
|
||||
Object.entries(workflowState.blocks).forEach(([oldId, block]) => {
|
||||
const newId = uuidv4()
|
||||
blockIdMap.set(oldId, newId)
|
||||
newBlocks[newId] = {
|
||||
...block,
|
||||
id: newId,
|
||||
}
|
||||
})
|
||||
|
||||
// Second pass: update edges with new block IDs
|
||||
const newEdges = workflowState.edges.map((edge) => ({
|
||||
...edge,
|
||||
id: uuidv4(), // Generate new edge ID
|
||||
source: blockIdMap.get(edge.source) || edge.source,
|
||||
target: blockIdMap.get(edge.target) || edge.target,
|
||||
}))
|
||||
|
||||
// Third pass: update loops with new block IDs
|
||||
// CRITICAL: Loop IDs must match their block IDs (loops are keyed by their block ID)
|
||||
const newLoops: WorkflowState['loops'] = {}
|
||||
if (workflowState.loops) {
|
||||
Object.entries(workflowState.loops).forEach(([oldLoopId, loop]) => {
|
||||
// Map the loop ID using the block ID mapping (loop ID = block ID)
|
||||
const newLoopId = blockIdMap.get(oldLoopId) || oldLoopId
|
||||
newLoops[newLoopId] = {
|
||||
...loop,
|
||||
id: newLoopId,
|
||||
nodes: loop.nodes.map((nodeId) => blockIdMap.get(nodeId) || nodeId),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Fourth pass: update parallels with new block IDs
|
||||
// CRITICAL: Parallel IDs must match their block IDs (parallels are keyed by their block ID)
|
||||
const newParallels: WorkflowState['parallels'] = {}
|
||||
if (workflowState.parallels) {
|
||||
Object.entries(workflowState.parallels).forEach(([oldParallelId, parallel]) => {
|
||||
// Map the parallel ID using the block ID mapping (parallel ID = block ID)
|
||||
const newParallelId = blockIdMap.get(oldParallelId) || oldParallelId
|
||||
newParallels[newParallelId] = {
|
||||
...parallel,
|
||||
id: newParallelId,
|
||||
nodes: parallel.nodes.map((nodeId) => blockIdMap.get(nodeId) || nodeId),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Fifth pass: update any block references in subblock values
|
||||
Object.entries(newBlocks).forEach(([blockId, block]) => {
|
||||
if (block.subBlocks) {
|
||||
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]) => {
|
||||
if (subBlock.value && typeof subBlock.value === 'string') {
|
||||
// Replace any block references in the value
|
||||
let updatedValue = subBlock.value
|
||||
blockIdMap.forEach((newId, oldId) => {
|
||||
// Replace references like <blockId.output> with new IDs
|
||||
const regex = new RegExp(`<${oldId}\\.`, 'g')
|
||||
updatedValue = updatedValue.replace(regex, `<${newId}.`)
|
||||
})
|
||||
block.subBlocks[subBlockId] = {
|
||||
...subBlock,
|
||||
value: updatedValue,
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Update parentId references in block.data
|
||||
if (block.data?.parentId) {
|
||||
const newParentId = blockIdMap.get(block.data.parentId)
|
||||
if (newParentId) {
|
||||
block.data.parentId = newParentId
|
||||
} else {
|
||||
// Parent ID not in mapping - this shouldn't happen but log it
|
||||
logger.warn(`Block ${blockId} references unmapped parent ${block.data.parentId}`)
|
||||
// Remove invalid parent reference
|
||||
block.data.parentId = undefined
|
||||
block.data.extent = undefined
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
blocks: newBlocks,
|
||||
edges: newEdges,
|
||||
loops: newLoops,
|
||||
parallels: newParallels,
|
||||
}
|
||||
}
|
||||
|
||||
export function parseWorkflowJson(
|
||||
jsonContent: string,
|
||||
regenerateIdsFlag = true
|
||||
): {
|
||||
data: WorkflowState | null
|
||||
errors: string[]
|
||||
} {
|
||||
const errors: string[] = []
|
||||
|
||||
try {
|
||||
// Parse JSON content
|
||||
let data: any
|
||||
try {
|
||||
data = JSON.parse(jsonContent)
|
||||
} catch (parseError) {
|
||||
errors.push(
|
||||
`Invalid JSON: ${parseError instanceof Error ? parseError.message : 'Parse error'}`
|
||||
)
|
||||
return { data: null, errors }
|
||||
}
|
||||
|
||||
// Validate top-level structure
|
||||
if (!data || typeof data !== 'object') {
|
||||
errors.push('Invalid JSON: Root must be an object')
|
||||
return { data: null, errors }
|
||||
}
|
||||
|
||||
// Handle new export format (version/exportedAt/state) or old format (blocks/edges at root)
|
||||
let workflowData: any
|
||||
if (data.version && data.state) {
|
||||
// New format with versioning
|
||||
logger.info('Parsing workflow JSON with version', {
|
||||
version: data.version,
|
||||
exportedAt: data.exportedAt,
|
||||
})
|
||||
workflowData = data.state
|
||||
} else {
|
||||
// Old format - blocks/edges at root level
|
||||
logger.info('Parsing legacy workflow JSON format')
|
||||
workflowData = data
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
if (!workflowData.blocks || typeof workflowData.blocks !== 'object') {
|
||||
errors.push('Missing or invalid field: blocks')
|
||||
return { data: null, errors }
|
||||
}
|
||||
|
||||
if (!Array.isArray(workflowData.edges)) {
|
||||
errors.push('Missing or invalid field: edges (must be an array)')
|
||||
return { data: null, errors }
|
||||
}
|
||||
|
||||
// Validate blocks have required fields
|
||||
Object.entries(workflowData.blocks).forEach(([blockId, block]: [string, any]) => {
|
||||
if (!block || typeof block !== 'object') {
|
||||
errors.push(`Invalid block ${blockId}: must be an object`)
|
||||
return
|
||||
}
|
||||
|
||||
if (!block.id) {
|
||||
errors.push(`Block ${blockId} missing required field: id`)
|
||||
}
|
||||
if (!block.type) {
|
||||
errors.push(`Block ${blockId} missing required field: type`)
|
||||
}
|
||||
if (
|
||||
!block.position ||
|
||||
typeof block.position.x !== 'number' ||
|
||||
typeof block.position.y !== 'number'
|
||||
) {
|
||||
errors.push(`Block ${blockId} missing or invalid position`)
|
||||
}
|
||||
})
|
||||
|
||||
// Validate edges have required fields
|
||||
workflowData.edges.forEach((edge: any, index: number) => {
|
||||
if (!edge || typeof edge !== 'object') {
|
||||
errors.push(`Invalid edge at index ${index}: must be an object`)
|
||||
return
|
||||
}
|
||||
|
||||
if (!edge.id) {
|
||||
errors.push(`Edge at index ${index} missing required field: id`)
|
||||
}
|
||||
if (!edge.source) {
|
||||
errors.push(`Edge at index ${index} missing required field: source`)
|
||||
}
|
||||
if (!edge.target) {
|
||||
errors.push(`Edge at index ${index} missing required field: target`)
|
||||
}
|
||||
})
|
||||
|
||||
// If there are errors, return null
|
||||
if (errors.length > 0) {
|
||||
return { data: null, errors }
|
||||
}
|
||||
|
||||
// Construct the workflow state with defaults
|
||||
let workflowState: WorkflowState = {
|
||||
blocks: workflowData.blocks || {},
|
||||
edges: workflowData.edges || [],
|
||||
loops: workflowData.loops || {},
|
||||
parallels: workflowData.parallels || {},
|
||||
}
|
||||
|
||||
// Regenerate IDs if requested (default: true)
|
||||
if (regenerateIdsFlag) {
|
||||
workflowState = regenerateIds(workflowState)
|
||||
logger.info('Regenerated IDs for imported workflow to avoid conflicts')
|
||||
}
|
||||
|
||||
logger.info('Successfully parsed workflow JSON', {
|
||||
blocksCount: Object.keys(workflowState.blocks).length,
|
||||
edgesCount: workflowState.edges.length,
|
||||
loopsCount: Object.keys(workflowState.loops).length,
|
||||
parallelsCount: Object.keys(workflowState.parallels).length,
|
||||
})
|
||||
|
||||
return { data: workflowState, errors: [] }
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse workflow JSON:', error)
|
||||
errors.push(`Unexpected error: ${error instanceof Error ? error.message : 'Unknown error'}`)
|
||||
return { data: null, errors }
|
||||
}
|
||||
}
|
||||
89
apps/sim/stores/workflows/json/store.ts
Normal file
89
apps/sim/stores/workflows/json/store.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { type ExportWorkflowState, sanitizeForExport } from '@/lib/workflows/json-sanitizer'
|
||||
import { getWorkflowWithValues } from '@/stores/workflows'
|
||||
import { useWorkflowRegistry } from '../registry/store'
|
||||
|
||||
const logger = createLogger('WorkflowJsonStore')
|
||||
|
||||
interface WorkflowJsonStore {
|
||||
json: string
|
||||
lastGenerated?: number
|
||||
|
||||
generateJson: () => void
|
||||
getJson: () => Promise<string>
|
||||
refreshJson: () => void
|
||||
}
|
||||
|
||||
export const useWorkflowJsonStore = create<WorkflowJsonStore>()(
|
||||
devtools(
|
||||
(set, get) => ({
|
||||
json: '',
|
||||
lastGenerated: undefined,
|
||||
|
||||
generateJson: () => {
|
||||
// Get the active workflow ID from registry
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
|
||||
if (!activeWorkflowId) {
|
||||
logger.warn('No active workflow to generate JSON for')
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
// Get the workflow state with merged subblock values
|
||||
const workflow = getWorkflowWithValues(activeWorkflowId)
|
||||
|
||||
if (!workflow || !workflow.state) {
|
||||
logger.warn('No workflow state found for ID:', activeWorkflowId)
|
||||
return
|
||||
}
|
||||
|
||||
const workflowState = workflow.state
|
||||
|
||||
// Sanitize for export (keeps positions, removes secrets, adds version)
|
||||
const exportState: ExportWorkflowState = sanitizeForExport(workflowState)
|
||||
|
||||
// Convert to formatted JSON
|
||||
const jsonString = JSON.stringify(exportState, null, 2)
|
||||
|
||||
set({
|
||||
json: jsonString,
|
||||
lastGenerated: Date.now(),
|
||||
})
|
||||
|
||||
logger.info('Workflow JSON generated successfully', {
|
||||
version: exportState.version,
|
||||
exportedAt: exportState.exportedAt,
|
||||
blocksCount: Object.keys(exportState.state.blocks).length,
|
||||
edgesCount: exportState.state.edges.length,
|
||||
jsonLength: jsonString.length,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to generate JSON:', error)
|
||||
}
|
||||
},
|
||||
|
||||
getJson: async () => {
|
||||
const currentTime = Date.now()
|
||||
const { json, lastGenerated } = get()
|
||||
|
||||
// Auto-refresh if data is stale (older than 1 second) or never generated
|
||||
if (!lastGenerated || currentTime - lastGenerated > 1000) {
|
||||
get().generateJson()
|
||||
return get().json
|
||||
}
|
||||
|
||||
return json
|
||||
},
|
||||
|
||||
refreshJson: () => {
|
||||
get().generateJson()
|
||||
},
|
||||
}),
|
||||
{
|
||||
name: 'workflow-json-store',
|
||||
}
|
||||
)
|
||||
)
|
||||
@@ -238,6 +238,7 @@ function calculateBlockPositions(
|
||||
const startX = 150
|
||||
const startY = 300
|
||||
|
||||
// First pass: position all blocks as if they're root level
|
||||
layers.forEach((layer, layerIndex) => {
|
||||
const layerX = startX + layerIndex * horizontalSpacing
|
||||
|
||||
@@ -247,6 +248,22 @@ function calculateBlockPositions(
|
||||
})
|
||||
})
|
||||
|
||||
// Second pass: adjust positions for child blocks to be relative to their parent
|
||||
Object.entries(yamlWorkflow.blocks).forEach(([blockId, block]) => {
|
||||
if (block.parentId && positions[blockId] && positions[block.parentId]) {
|
||||
// Convert absolute position to relative position within parent
|
||||
const parentPos = positions[block.parentId]
|
||||
const childPos = positions[blockId]
|
||||
|
||||
// Calculate relative position inside the parent container
|
||||
// Start child blocks at a reasonable offset inside the parent
|
||||
positions[blockId] = {
|
||||
x: 50 + (childPos.x - parentPos.x) * 0.3, // Scale down and offset
|
||||
y: 100 + (childPos.y - parentPos.y) * 0.3, // Scale down and offset
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return positions
|
||||
}
|
||||
|
||||
@@ -345,13 +362,31 @@ export function convertYamlToWorkflow(yamlWorkflow: YamlWorkflow): ImportResult
|
||||
// Add container-specific data
|
||||
if (yamlBlock.type === 'loop' || yamlBlock.type === 'parallel') {
|
||||
// For loop/parallel blocks, map the inputs to the data field since they don't use subBlocks
|
||||
importedBlock.data = {
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: 'subflowNode',
|
||||
// Map YAML inputs to data properties for loop/parallel blocks
|
||||
...(yamlBlock.inputs || {}),
|
||||
const inputs = yamlBlock.inputs || {}
|
||||
|
||||
// Apply defaults for loop blocks
|
||||
if (yamlBlock.type === 'loop') {
|
||||
importedBlock.data = {
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: 'subflowNode',
|
||||
loopType: inputs.loopType || 'for',
|
||||
count: inputs.iterations || inputs.count || 5,
|
||||
collection: inputs.collection || '',
|
||||
maxConcurrency: inputs.maxConcurrency || 1,
|
||||
// Include any other inputs provided
|
||||
...inputs,
|
||||
}
|
||||
} else {
|
||||
// Parallel blocks
|
||||
importedBlock.data = {
|
||||
width: 500,
|
||||
height: 300,
|
||||
type: 'subflowNode',
|
||||
...inputs,
|
||||
}
|
||||
}
|
||||
|
||||
// Clear inputs since they're now in data
|
||||
importedBlock.inputs = {}
|
||||
}
|
||||
@@ -359,13 +394,13 @@ export function convertYamlToWorkflow(yamlWorkflow: YamlWorkflow): ImportResult
|
||||
// Handle parent-child relationships for nested blocks
|
||||
if (yamlBlock.parentId) {
|
||||
importedBlock.parentId = yamlBlock.parentId
|
||||
importedBlock.extent = 'parent'
|
||||
importedBlock.extent = 'parent' // Always 'parent' when parentId exists
|
||||
// Also add to data for consistency with how the system works
|
||||
if (!importedBlock.data) {
|
||||
importedBlock.data = {}
|
||||
}
|
||||
importedBlock.data.parentId = yamlBlock.parentId
|
||||
importedBlock.data.extent = 'parent'
|
||||
importedBlock.data.extent = 'parent' // Always 'parent' when parentId exists
|
||||
}
|
||||
|
||||
blocks.push(importedBlock)
|
||||
|
||||
@@ -31,6 +31,11 @@ export interface ConnectionsFormat {
|
||||
start?: string | string[]
|
||||
end?: string | string[]
|
||||
}
|
||||
// Direct handle format (alternative to nested format above)
|
||||
'loop-start-source'?: string | string[]
|
||||
'loop-end-source'?: string | string[]
|
||||
'parallel-start-source'?: string | string[]
|
||||
'parallel-end-source'?: string | string[]
|
||||
// Legacy format support
|
||||
incoming?: Array<{
|
||||
source: string
|
||||
@@ -561,6 +566,60 @@ function parseNewFormatConnections(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse direct handle formats (alternative to nested format)
|
||||
// This allows using 'loop-start-source' directly instead of 'loop.start'
|
||||
if (connections['loop-start-source']) {
|
||||
const targets = Array.isArray(connections['loop-start-source'])
|
||||
? connections['loop-start-source']
|
||||
: [connections['loop-start-source']]
|
||||
targets.forEach((target) => {
|
||||
if (typeof target === 'string') {
|
||||
edges.push(createEdge(blockId, target, 'loop-start-source', 'target'))
|
||||
} else {
|
||||
errors.push(`Invalid loop-start-source target in block '${blockId}': must be a string`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (connections['loop-end-source']) {
|
||||
const targets = Array.isArray(connections['loop-end-source'])
|
||||
? connections['loop-end-source']
|
||||
: [connections['loop-end-source']]
|
||||
targets.forEach((target) => {
|
||||
if (typeof target === 'string') {
|
||||
edges.push(createEdge(blockId, target, 'loop-end-source', 'target'))
|
||||
} else {
|
||||
errors.push(`Invalid loop-end-source target in block '${blockId}': must be a string`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (connections['parallel-start-source']) {
|
||||
const targets = Array.isArray(connections['parallel-start-source'])
|
||||
? connections['parallel-start-source']
|
||||
: [connections['parallel-start-source']]
|
||||
targets.forEach((target) => {
|
||||
if (typeof target === 'string') {
|
||||
edges.push(createEdge(blockId, target, 'parallel-start-source', 'target'))
|
||||
} else {
|
||||
errors.push(`Invalid parallel-start-source target in block '${blockId}': must be a string`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (connections['parallel-end-source']) {
|
||||
const targets = Array.isArray(connections['parallel-end-source'])
|
||||
? connections['parallel-end-source']
|
||||
: [connections['parallel-end-source']]
|
||||
targets.forEach((target) => {
|
||||
if (typeof target === 'string') {
|
||||
edges.push(createEdge(blockId, target, 'parallel-end-source', 'target'))
|
||||
} else {
|
||||
errors.push(`Invalid parallel-end-source target in block '${blockId}': must be a string`)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function parseLegacyOutgoingConnections(
|
||||
|
||||
Reference in New Issue
Block a user