mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-08 22:48:14 -05:00
feat(autolayout): add fitToView on autolayout and reduce horizontal spacing between blocks (#2575)
* feat(autolayout): add fitToView on autolayout and reduce horizontal spacing between blocks * remove additional yaml code
This commit is contained in:
@@ -1066,7 +1066,6 @@ export async function GET(req: NextRequest) {
|
|||||||
model: chat.model,
|
model: chat.model,
|
||||||
messages: Array.isArray(chat.messages) ? chat.messages : [],
|
messages: Array.isArray(chat.messages) ? chat.messages : [],
|
||||||
messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0,
|
messageCount: Array.isArray(chat.messages) ? chat.messages.length : 0,
|
||||||
previewYaml: null, // Not needed for chat list
|
|
||||||
planArtifact: chat.planArtifact || null,
|
planArtifact: chat.planArtifact || null,
|
||||||
config: chat.config || null,
|
config: chat.config || null,
|
||||||
createdAt: chat.createdAt,
|
createdAt: chat.createdAt,
|
||||||
|
|||||||
@@ -1,117 +0,0 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { simAgentClient } from '@/lib/copilot/client'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
|
||||||
import { getAllBlocks } from '@/blocks/registry'
|
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
|
||||||
import { resolveOutputType } from '@/blocks/utils'
|
|
||||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowYamlAPI')
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
|
|
||||||
try {
|
|
||||||
logger.info(`[${requestId}] Converting workflow JSON to YAML`)
|
|
||||||
|
|
||||||
const body = await request.json()
|
|
||||||
const { workflowState, subBlockValues, includeMetadata = false } = body
|
|
||||||
|
|
||||||
if (!workflowState) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{ success: false, error: 'workflowState is required' },
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure loop blocks have their data populated with defaults
|
|
||||||
if (workflowState.blocks) {
|
|
||||||
Object.entries(workflowState.blocks).forEach(([blockId, block]: [string, any]) => {
|
|
||||||
if (block.type === 'loop') {
|
|
||||||
// Ensure data field exists
|
|
||||||
if (!block.data) {
|
|
||||||
block.data = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply defaults if not set
|
|
||||||
if (!block.data.loopType) {
|
|
||||||
block.data.loopType = 'for'
|
|
||||||
}
|
|
||||||
if (!block.data.count && block.data.count !== 0) {
|
|
||||||
block.data.count = 5
|
|
||||||
}
|
|
||||||
if (!block.data.collection) {
|
|
||||||
block.data.collection = ''
|
|
||||||
}
|
|
||||||
if (!block.data.maxConcurrency) {
|
|
||||||
block.data.maxConcurrency = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.debug(`[${requestId}] Applied defaults to loop block ${blockId}:`, {
|
|
||||||
loopType: block.data.loopType,
|
|
||||||
count: block.data.count,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gather block registry and utilities for sim-agent
|
|
||||||
const blocks = getAllBlocks()
|
|
||||||
const blockRegistry = blocks.reduce(
|
|
||||||
(acc, block) => {
|
|
||||||
const blockType = block.type
|
|
||||||
acc[blockType] = {
|
|
||||||
...block,
|
|
||||||
id: blockType,
|
|
||||||
subBlocks: block.subBlocks || [],
|
|
||||||
outputs: block.outputs || {},
|
|
||||||
} as any
|
|
||||||
return acc
|
|
||||||
},
|
|
||||||
{} as Record<string, BlockConfig>
|
|
||||||
)
|
|
||||||
|
|
||||||
// Call sim-agent directly
|
|
||||||
const result = await simAgentClient.makeRequest('/api/workflow/to-yaml', {
|
|
||||||
body: {
|
|
||||||
workflowState,
|
|
||||||
subBlockValues,
|
|
||||||
blockRegistry,
|
|
||||||
utilities: {
|
|
||||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
|
||||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
|
||||||
resolveOutputType: resolveOutputType.toString(),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!result.success || !result.data?.yaml) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: result.error || 'Failed to generate YAML',
|
|
||||||
},
|
|
||||||
{ status: result.status || 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Successfully generated YAML`, {
|
|
||||||
yamlLength: result.data.yaml.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
yaml: result.data.yaml,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] YAML generation failed`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `Failed to generate YAML: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,210 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { workflow } from '@sim/db/schema'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import { simAgentClient } from '@/lib/copilot/client'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
|
||||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
|
||||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
|
||||||
import { getAllBlocks } from '@/blocks/registry'
|
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
|
||||||
import { resolveOutputType } from '@/blocks/utils'
|
|
||||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowYamlExportAPI')
|
|
||||||
|
|
||||||
export async function GET(request: NextRequest) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const url = new URL(request.url)
|
|
||||||
const workflowId = url.searchParams.get('workflowId')
|
|
||||||
|
|
||||||
try {
|
|
||||||
logger.info(`[${requestId}] Exporting workflow YAML from database: ${workflowId}`)
|
|
||||||
|
|
||||||
if (!workflowId) {
|
|
||||||
return NextResponse.json({ success: false, error: 'workflowId is required' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the session for authentication
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized access attempt for workflow ${workflowId}`)
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const userId = session.user.id
|
|
||||||
|
|
||||||
// Fetch the workflow from database
|
|
||||||
const workflowData = await db
|
|
||||||
.select()
|
|
||||||
.from(workflow)
|
|
||||||
.where(eq(workflow.id, workflowId))
|
|
||||||
.then((rows) => rows[0])
|
|
||||||
|
|
||||||
if (!workflowData) {
|
|
||||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
|
||||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if user has access to this workflow
|
|
||||||
let hasAccess = false
|
|
||||||
|
|
||||||
// Case 1: User owns the workflow
|
|
||||||
if (workflowData.userId === userId) {
|
|
||||||
hasAccess = true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Case 2: Workflow belongs to a workspace the user has permissions for
|
|
||||||
if (!hasAccess && workflowData.workspaceId) {
|
|
||||||
const userPermission = await getUserEntityPermissions(
|
|
||||||
userId,
|
|
||||||
'workspace',
|
|
||||||
workflowData.workspaceId
|
|
||||||
)
|
|
||||||
if (userPermission !== null) {
|
|
||||||
hasAccess = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] User ${userId} denied access to workflow ${workflowId}`)
|
|
||||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try to load from normalized tables first
|
|
||||||
logger.debug(`[${requestId}] Attempting to load workflow ${workflowId} from normalized tables`)
|
|
||||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
|
||||||
|
|
||||||
let workflowState: any
|
|
||||||
const subBlockValues: Record<string, Record<string, any>> = {}
|
|
||||||
|
|
||||||
if (normalizedData) {
|
|
||||||
logger.debug(`[${requestId}] Found normalized data for workflow ${workflowId}:`, {
|
|
||||||
blocksCount: Object.keys(normalizedData.blocks).length,
|
|
||||||
edgesCount: normalizedData.edges.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Use normalized table data - construct state from normalized tables
|
|
||||||
workflowState = {
|
|
||||||
deploymentStatuses: {},
|
|
||||||
blocks: normalizedData.blocks,
|
|
||||||
edges: normalizedData.edges,
|
|
||||||
loops: normalizedData.loops,
|
|
||||||
parallels: normalizedData.parallels,
|
|
||||||
lastSaved: Date.now(),
|
|
||||||
isDeployed: workflowData.isDeployed || false,
|
|
||||||
deployedAt: workflowData.deployedAt,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract subblock values from the normalized blocks
|
|
||||||
Object.entries(normalizedData.blocks).forEach(([blockId, block]: [string, any]) => {
|
|
||||||
subBlockValues[blockId] = {}
|
|
||||||
if (block.subBlocks) {
|
|
||||||
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]: [string, any]) => {
|
|
||||||
if (subBlock && typeof subBlock === 'object' && 'value' in subBlock) {
|
|
||||||
subBlockValues[blockId][subBlockId] = subBlock.value
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Loaded workflow ${workflowId} from normalized tables`)
|
|
||||||
} else {
|
|
||||||
return NextResponse.json(
|
|
||||||
{ success: false, error: 'Workflow has no normalized data' },
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure loop blocks have their data populated with defaults
|
|
||||||
if (workflowState.blocks) {
|
|
||||||
Object.entries(workflowState.blocks).forEach(([blockId, block]: [string, any]) => {
|
|
||||||
if (block.type === 'loop') {
|
|
||||||
// Ensure data field exists
|
|
||||||
if (!block.data) {
|
|
||||||
block.data = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply defaults if not set
|
|
||||||
if (!block.data.loopType) {
|
|
||||||
block.data.loopType = 'for'
|
|
||||||
}
|
|
||||||
if (!block.data.count && block.data.count !== 0) {
|
|
||||||
block.data.count = 5
|
|
||||||
}
|
|
||||||
if (!block.data.collection) {
|
|
||||||
block.data.collection = ''
|
|
||||||
}
|
|
||||||
if (!block.data.maxConcurrency) {
|
|
||||||
block.data.maxConcurrency = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.debug(`[${requestId}] Applied defaults to loop block ${blockId}:`, {
|
|
||||||
loopType: block.data.loopType,
|
|
||||||
count: block.data.count,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gather block registry and utilities for sim-agent
|
|
||||||
const blocks = getAllBlocks()
|
|
||||||
const blockRegistry = blocks.reduce(
|
|
||||||
(acc, block) => {
|
|
||||||
const blockType = block.type
|
|
||||||
acc[blockType] = {
|
|
||||||
...block,
|
|
||||||
id: blockType,
|
|
||||||
subBlocks: block.subBlocks || [],
|
|
||||||
outputs: block.outputs || {},
|
|
||||||
} as any
|
|
||||||
return acc
|
|
||||||
},
|
|
||||||
{} as Record<string, BlockConfig>
|
|
||||||
)
|
|
||||||
|
|
||||||
// Call sim-agent directly
|
|
||||||
const result = await simAgentClient.makeRequest('/api/workflow/to-yaml', {
|
|
||||||
body: {
|
|
||||||
workflowState,
|
|
||||||
subBlockValues,
|
|
||||||
blockRegistry,
|
|
||||||
utilities: {
|
|
||||||
generateLoopBlocks: generateLoopBlocks.toString(),
|
|
||||||
generateParallelBlocks: generateParallelBlocks.toString(),
|
|
||||||
resolveOutputType: resolveOutputType.toString(),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!result.success || !result.data?.yaml) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: result.error || 'Failed to generate YAML',
|
|
||||||
},
|
|
||||||
{ status: result.status || 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Successfully generated YAML from database`, {
|
|
||||||
yamlLength: result.data.yaml.length,
|
|
||||||
})
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
yaml: result.data.yaml,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] YAML export failed`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: `Failed to export YAML: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -14,7 +14,6 @@ const logger = createLogger('DiffControls')
|
|||||||
|
|
||||||
export const DiffControls = memo(function DiffControls() {
|
export const DiffControls = memo(function DiffControls() {
|
||||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||||
// Optimized: Single diff store subscription
|
|
||||||
const {
|
const {
|
||||||
isShowingDiff,
|
isShowingDiff,
|
||||||
isDiffReady,
|
isDiffReady,
|
||||||
@@ -38,12 +37,10 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
// Optimized: Single copilot store subscription for needed values
|
const { updatePreviewToolCallState, currentChat, messages } = useCopilotStore(
|
||||||
const { updatePreviewToolCallState, clearPreviewYaml, currentChat, messages } = useCopilotStore(
|
|
||||||
useCallback(
|
useCallback(
|
||||||
(state) => ({
|
(state) => ({
|
||||||
updatePreviewToolCallState: state.updatePreviewToolCallState,
|
updatePreviewToolCallState: state.updatePreviewToolCallState,
|
||||||
clearPreviewYaml: state.clearPreviewYaml,
|
|
||||||
currentChat: state.currentChat,
|
currentChat: state.currentChat,
|
||||||
messages: state.messages,
|
messages: state.messages,
|
||||||
}),
|
}),
|
||||||
@@ -222,11 +219,6 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
logger.warn('Failed to create checkpoint before accept:', error)
|
logger.warn('Failed to create checkpoint before accept:', error)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Clear preview YAML immediately
|
|
||||||
await clearPreviewYaml().catch((error) => {
|
|
||||||
logger.warn('Failed to clear preview YAML:', error)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Resolve target toolCallId for build/edit and update to terminal success state in the copilot store
|
// Resolve target toolCallId for build/edit and update to terminal success state in the copilot store
|
||||||
try {
|
try {
|
||||||
const { toolCallsById, messages } = useCopilotStore.getState()
|
const { toolCallsById, messages } = useCopilotStore.getState()
|
||||||
@@ -266,16 +258,11 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
logger.error('Workflow update failed:', errorMessage)
|
logger.error('Workflow update failed:', errorMessage)
|
||||||
alert(`Failed to save workflow changes: ${errorMessage}`)
|
alert(`Failed to save workflow changes: ${errorMessage}`)
|
||||||
}
|
}
|
||||||
}, [createCheckpoint, clearPreviewYaml, updatePreviewToolCallState, acceptChanges])
|
}, [createCheckpoint, updatePreviewToolCallState, acceptChanges])
|
||||||
|
|
||||||
const handleReject = useCallback(() => {
|
const handleReject = useCallback(() => {
|
||||||
logger.info('Rejecting proposed changes (optimistic)')
|
logger.info('Rejecting proposed changes (optimistic)')
|
||||||
|
|
||||||
// Clear preview YAML immediately
|
|
||||||
clearPreviewYaml().catch((error) => {
|
|
||||||
logger.warn('Failed to clear preview YAML:', error)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Resolve target toolCallId for build/edit and update to terminal rejected state in the copilot store
|
// Resolve target toolCallId for build/edit and update to terminal rejected state in the copilot store
|
||||||
try {
|
try {
|
||||||
const { toolCallsById, messages } = useCopilotStore.getState()
|
const { toolCallsById, messages } = useCopilotStore.getState()
|
||||||
@@ -306,7 +293,7 @@ export const DiffControls = memo(function DiffControls() {
|
|||||||
rejectChanges().catch((error) => {
|
rejectChanges().catch((error) => {
|
||||||
logger.error('Failed to reject changes (background):', error)
|
logger.error('Failed to reject changes (background):', error)
|
||||||
})
|
})
|
||||||
}, [clearPreviewYaml, updatePreviewToolCallState, rejectChanges])
|
}, [updatePreviewToolCallState, rejectChanges])
|
||||||
|
|
||||||
// Don't show anything if no diff is available or diff is not ready
|
// Don't show anything if no diff is available or diff is not ready
|
||||||
if (!hasActiveDiff || !isDiffReady) {
|
if (!hasActiveDiff || !isDiffReady) {
|
||||||
|
|||||||
@@ -65,56 +65,6 @@ export function useMessageFeedback(
|
|||||||
return null
|
return null
|
||||||
}, [messages, message.id])
|
}, [messages, message.id])
|
||||||
|
|
||||||
/**
|
|
||||||
* Extracts workflow YAML from workflow tool calls
|
|
||||||
*/
|
|
||||||
const getWorkflowYaml = useCallback(() => {
|
|
||||||
const allToolCalls = [
|
|
||||||
...(message.toolCalls || []),
|
|
||||||
...(message.contentBlocks || [])
|
|
||||||
.filter((block) => block.type === 'tool_call')
|
|
||||||
.map((block) => (block as any).toolCall),
|
|
||||||
]
|
|
||||||
|
|
||||||
const workflowTools = allToolCalls.filter((toolCall) =>
|
|
||||||
WORKFLOW_TOOL_NAMES.includes(toolCall?.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
for (const toolCall of workflowTools) {
|
|
||||||
const yamlContent =
|
|
||||||
toolCall.result?.yamlContent ||
|
|
||||||
toolCall.result?.data?.yamlContent ||
|
|
||||||
toolCall.input?.yamlContent ||
|
|
||||||
toolCall.input?.data?.yamlContent
|
|
||||||
|
|
||||||
if (yamlContent && typeof yamlContent === 'string' && yamlContent.trim()) {
|
|
||||||
return yamlContent
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (currentChat?.previewYaml?.trim()) {
|
|
||||||
return currentChat.previewYaml
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const toolCall of workflowTools) {
|
|
||||||
if (toolCall.id) {
|
|
||||||
const preview = getPreviewByToolCall(toolCall.id)
|
|
||||||
if (preview?.yamlContent?.trim()) {
|
|
||||||
return preview.yamlContent
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (workflowTools.length > 0 && workflowId) {
|
|
||||||
const latestPreview = getLatestPendingPreview(workflowId, currentChat?.id)
|
|
||||||
if (latestPreview?.yamlContent?.trim()) {
|
|
||||||
return latestPreview.yamlContent
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null
|
|
||||||
}, [message, currentChat, workflowId, getPreviewByToolCall, getLatestPendingPreview])
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Submits feedback to the API
|
* Submits feedback to the API
|
||||||
*/
|
*/
|
||||||
@@ -137,20 +87,14 @@ export function useMessageFeedback(
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const workflowYaml = getWorkflowYaml()
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const requestBody: any = {
|
const requestBody = {
|
||||||
chatId: currentChat.id,
|
chatId: currentChat.id,
|
||||||
userQuery,
|
userQuery,
|
||||||
agentResponse,
|
agentResponse,
|
||||||
isPositiveFeedback: isPositive,
|
isPositiveFeedback: isPositive,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (workflowYaml) {
|
|
||||||
requestBody.workflowYaml = workflowYaml
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await fetch('/api/copilot/feedback', {
|
const response = await fetch('/api/copilot/feedback', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -168,7 +112,7 @@ export function useMessageFeedback(
|
|||||||
logger.error('Error submitting feedback:', error)
|
logger.error('Error submitting feedback:', error)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[currentChat, getLastUserQuery, getFullAssistantContent, message, getWorkflowYaml]
|
[currentChat, getLastUserQuery, getFullAssistantContent, message]
|
||||||
)
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -37,6 +37,7 @@ import {
|
|||||||
useUsageLimits,
|
useUsageLimits,
|
||||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/hooks'
|
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/hooks'
|
||||||
import { Variables } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/variables/variables'
|
import { Variables } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/variables/variables'
|
||||||
|
import { useAutoLayout } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-auto-layout'
|
||||||
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution'
|
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-workflow-execution'
|
||||||
import { useDeleteWorkflow, useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks'
|
import { useDeleteWorkflow, useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks'
|
||||||
import { useChatStore } from '@/stores/chat/store'
|
import { useChatStore } from '@/stores/chat/store'
|
||||||
@@ -99,6 +100,7 @@ export function Panel() {
|
|||||||
hydration.phase === 'state-loading'
|
hydration.phase === 'state-loading'
|
||||||
const { getJson } = useWorkflowJsonStore()
|
const { getJson } = useWorkflowJsonStore()
|
||||||
const { blocks } = useWorkflowStore()
|
const { blocks } = useWorkflowStore()
|
||||||
|
const { handleAutoLayout: autoLayoutWithFitView } = useAutoLayout(activeWorkflowId || null)
|
||||||
|
|
||||||
// Delete workflow hook
|
// Delete workflow hook
|
||||||
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
||||||
@@ -201,22 +203,11 @@ export function Panel() {
|
|||||||
|
|
||||||
setIsAutoLayouting(true)
|
setIsAutoLayouting(true)
|
||||||
try {
|
try {
|
||||||
// Use the standalone auto layout utility for immediate frontend updates
|
await autoLayoutWithFitView()
|
||||||
const { applyAutoLayoutAndUpdateStore } = await import('../../utils')
|
|
||||||
|
|
||||||
const result = await applyAutoLayoutAndUpdateStore(activeWorkflowId!)
|
|
||||||
|
|
||||||
if (result.success) {
|
|
||||||
logger.info('Auto layout completed successfully')
|
|
||||||
} else {
|
|
||||||
logger.error('Auto layout failed:', result.error)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Auto layout error:', error)
|
|
||||||
} finally {
|
} finally {
|
||||||
setIsAutoLayouting(false)
|
setIsAutoLayouting(false)
|
||||||
}
|
}
|
||||||
}, [isExecuting, userPermissions.canEdit, isAutoLayouting, activeWorkflowId])
|
}, [isExecuting, userPermissions.canEdit, isAutoLayouting, autoLayoutWithFitView])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles exporting workflow as JSON
|
* Handles exporting workflow as JSON
|
||||||
|
|||||||
@@ -1,14 +1,21 @@
|
|||||||
import { useCallback } from 'react'
|
import { useCallback } from 'react'
|
||||||
import type { AutoLayoutOptions } from '../utils/auto-layout-utils'
|
import { useReactFlow } from 'reactflow'
|
||||||
import { applyAutoLayoutAndUpdateStore as applyAutoLayoutStandalone } from '../utils/auto-layout-utils'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import type { AutoLayoutOptions } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils'
|
||||||
|
import { applyAutoLayoutAndUpdateStore as applyAutoLayoutStandalone } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils'
|
||||||
|
|
||||||
export type { AutoLayoutOptions }
|
export type { AutoLayoutOptions }
|
||||||
|
|
||||||
|
const logger = createLogger('useAutoLayout')
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Hook providing auto-layout functionality for workflows
|
* Hook providing auto-layout functionality for workflows
|
||||||
* Binds workflowId context and provides memoized callback for React components
|
* Binds workflowId context and provides memoized callback for React components
|
||||||
|
* Includes automatic fitView animation after successful layout
|
||||||
*/
|
*/
|
||||||
export function useAutoLayout(workflowId: string | null) {
|
export function useAutoLayout(workflowId: string | null) {
|
||||||
|
const { fitView } = useReactFlow()
|
||||||
|
|
||||||
const applyAutoLayoutAndUpdateStore = useCallback(
|
const applyAutoLayoutAndUpdateStore = useCallback(
|
||||||
async (options: AutoLayoutOptions = {}) => {
|
async (options: AutoLayoutOptions = {}) => {
|
||||||
if (!workflowId) {
|
if (!workflowId) {
|
||||||
@@ -19,7 +26,34 @@ export function useAutoLayout(workflowId: string | null) {
|
|||||||
[workflowId]
|
[workflowId]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applies auto-layout and animates to fit all blocks in view
|
||||||
|
*/
|
||||||
|
const handleAutoLayout = useCallback(async () => {
|
||||||
|
try {
|
||||||
|
const result = await applyAutoLayoutAndUpdateStore()
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
logger.info('Auto layout completed successfully')
|
||||||
|
requestAnimationFrame(() => {
|
||||||
|
fitView({ padding: 0.8, duration: 600 })
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
logger.error('Auto layout failed:', result.error)
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Auto layout error:', error)
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [applyAutoLayoutAndUpdateStore, fitView])
|
||||||
|
|
||||||
return {
|
return {
|
||||||
applyAutoLayoutAndUpdateStore,
|
applyAutoLayoutAndUpdateStore,
|
||||||
|
handleAutoLayout,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -198,7 +198,7 @@ const WorkflowContent = React.memo(() => {
|
|||||||
return resizeLoopNodes(updateNodeDimensions)
|
return resizeLoopNodes(updateNodeDimensions)
|
||||||
}, [resizeLoopNodes, updateNodeDimensions])
|
}, [resizeLoopNodes, updateNodeDimensions])
|
||||||
|
|
||||||
const { applyAutoLayoutAndUpdateStore } = useAutoLayout(activeWorkflowId || null)
|
const { handleAutoLayout: autoLayoutWithFitView } = useAutoLayout(activeWorkflowId || null)
|
||||||
|
|
||||||
const isWorkflowEmpty = useMemo(() => Object.keys(blocks).length === 0, [blocks])
|
const isWorkflowEmpty = useMemo(() => Object.keys(blocks).length === 0, [blocks])
|
||||||
|
|
||||||
@@ -441,19 +441,8 @@ const WorkflowContent = React.memo(() => {
|
|||||||
/** Applies auto-layout to the workflow canvas. */
|
/** Applies auto-layout to the workflow canvas. */
|
||||||
const handleAutoLayout = useCallback(async () => {
|
const handleAutoLayout = useCallback(async () => {
|
||||||
if (Object.keys(blocks).length === 0) return
|
if (Object.keys(blocks).length === 0) return
|
||||||
|
await autoLayoutWithFitView()
|
||||||
try {
|
}, [blocks, autoLayoutWithFitView])
|
||||||
const result = await applyAutoLayoutAndUpdateStore()
|
|
||||||
|
|
||||||
if (result.success) {
|
|
||||||
logger.info('Auto layout completed successfully')
|
|
||||||
} else {
|
|
||||||
logger.error('Auto layout failed:', result.error)
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Auto layout error:', error)
|
|
||||||
}
|
|
||||||
}, [blocks, applyAutoLayoutAndUpdateStore])
|
|
||||||
|
|
||||||
const debouncedAutoLayout = useCallback(() => {
|
const debouncedAutoLayout = useCallback(() => {
|
||||||
const debounceTimer = setTimeout(() => {
|
const debounceTimer = setTimeout(() => {
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ const sleep = async (ms: number, options: SleepOptions = {}): Promise<boolean> =
|
|||||||
}
|
}
|
||||||
|
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
|
// biome-ignore lint/style/useConst: Variable is assigned after closure definitions that reference it
|
||||||
let mainTimeoutId: NodeJS.Timeout | undefined
|
let mainTimeoutId: NodeJS.Timeout | undefined
|
||||||
let checkIntervalId: NodeJS.Timeout | undefined
|
let checkIntervalId: NodeJS.Timeout | undefined
|
||||||
let resolved = false
|
let resolved = false
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { useCallback, useEffect, useRef } from 'react'
|
|||||||
import type { Edge } from 'reactflow'
|
import type { Edge } from 'reactflow'
|
||||||
import { useSession } from '@/lib/auth/auth-client'
|
import { useSession } from '@/lib/auth/auth-client'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { DEFAULT_DUPLICATE_OFFSET } from '@/lib/workflows/autolayout/constants'
|
||||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||||
import { useSocket } from '@/app/workspace/providers/socket-provider'
|
import { useSocket } from '@/app/workspace/providers/socket-provider'
|
||||||
@@ -1326,8 +1327,8 @@ export function useCollaborativeWorkflow() {
|
|||||||
// Generate new ID and calculate position
|
// Generate new ID and calculate position
|
||||||
const newId = crypto.randomUUID()
|
const newId = crypto.randomUUID()
|
||||||
const offsetPosition = {
|
const offsetPosition = {
|
||||||
x: sourceBlock.position.x + 250,
|
x: sourceBlock.position.x + DEFAULT_DUPLICATE_OFFSET.x,
|
||||||
y: sourceBlock.position.y + 20,
|
y: sourceBlock.position.y + DEFAULT_DUPLICATE_OFFSET.y,
|
||||||
}
|
}
|
||||||
|
|
||||||
const newName = getUniqueBlockName(sourceBlock.name, workflowStore.blocks)
|
const newName = getUniqueBlockName(sourceBlock.name, workflowStore.blocks)
|
||||||
|
|||||||
@@ -40,7 +40,6 @@ export interface CopilotChat {
|
|||||||
model: string
|
model: string
|
||||||
messages: CopilotMessage[]
|
messages: CopilotMessage[]
|
||||||
messageCount: number
|
messageCount: number
|
||||||
previewYaml: string | null
|
|
||||||
planArtifact: string | null
|
planArtifact: string | null
|
||||||
config: CopilotChatConfig | null
|
config: CopilotChatConfig | null
|
||||||
createdAt: Date
|
createdAt: Date
|
||||||
|
|||||||
@@ -41,7 +41,6 @@ export const ToolIds = z.enum([
|
|||||||
])
|
])
|
||||||
export type ToolId = z.infer<typeof ToolIds>
|
export type ToolId = z.infer<typeof ToolIds>
|
||||||
|
|
||||||
// Base SSE wrapper for tool_call events emitted by the LLM
|
|
||||||
const ToolCallSSEBase = z.object({
|
const ToolCallSSEBase = z.object({
|
||||||
type: z.literal('tool_call'),
|
type: z.literal('tool_call'),
|
||||||
data: z.object({
|
data: z.object({
|
||||||
@@ -53,18 +52,14 @@ const ToolCallSSEBase = z.object({
|
|||||||
})
|
})
|
||||||
export type ToolCallSSE = z.infer<typeof ToolCallSSEBase>
|
export type ToolCallSSE = z.infer<typeof ToolCallSSEBase>
|
||||||
|
|
||||||
// Reusable small schemas
|
|
||||||
const StringArray = z.array(z.string())
|
const StringArray = z.array(z.string())
|
||||||
const BooleanOptional = z.boolean().optional()
|
const BooleanOptional = z.boolean().optional()
|
||||||
const NumberOptional = z.number().optional()
|
const NumberOptional = z.number().optional()
|
||||||
|
|
||||||
// Tool argument schemas (per SSE examples provided)
|
|
||||||
export const ToolArgSchemas = {
|
export const ToolArgSchemas = {
|
||||||
get_user_workflow: z.object({}),
|
get_user_workflow: z.object({}),
|
||||||
// New tools
|
|
||||||
list_user_workflows: z.object({}),
|
list_user_workflows: z.object({}),
|
||||||
get_workflow_from_name: z.object({ workflow_name: z.string() }),
|
get_workflow_from_name: z.object({ workflow_name: z.string() }),
|
||||||
// Workflow data tool (variables, custom tools, MCP tools, files)
|
|
||||||
get_workflow_data: z.object({
|
get_workflow_data: z.object({
|
||||||
data_type: z.enum(['global_variables', 'custom_tools', 'mcp_tools', 'files']),
|
data_type: z.enum(['global_variables', 'custom_tools', 'mcp_tools', 'files']),
|
||||||
}),
|
}),
|
||||||
@@ -377,7 +372,6 @@ export type ToolSSESchemaMap = typeof ToolSSESchemas
|
|||||||
// Known result schemas per tool (what tool_result.result should conform to)
|
// Known result schemas per tool (what tool_result.result should conform to)
|
||||||
// Note: Where legacy variability exists, schema captures the common/expected shape for new runtime.
|
// Note: Where legacy variability exists, schema captures the common/expected shape for new runtime.
|
||||||
const BuildOrEditWorkflowResult = z.object({
|
const BuildOrEditWorkflowResult = z.object({
|
||||||
yamlContent: z.string(),
|
|
||||||
description: z.string().optional(),
|
description: z.string().optional(),
|
||||||
workflowState: z.unknown().optional(),
|
workflowState: z.unknown().optional(),
|
||||||
data: z
|
data: z
|
||||||
@@ -411,14 +405,9 @@ const ExecutionEntry = z.object({
|
|||||||
})
|
})
|
||||||
|
|
||||||
export const ToolResultSchemas = {
|
export const ToolResultSchemas = {
|
||||||
get_user_workflow: z.object({ yamlContent: z.string() }).or(z.string()),
|
get_user_workflow: z.string(),
|
||||||
// New tools
|
|
||||||
list_user_workflows: z.object({ workflow_names: z.array(z.string()) }),
|
list_user_workflows: z.object({ workflow_names: z.array(z.string()) }),
|
||||||
get_workflow_from_name: z
|
get_workflow_from_name: z.object({ userWorkflow: z.string() }).or(z.string()),
|
||||||
.object({ yamlContent: z.string() })
|
|
||||||
.or(z.object({ userWorkflow: z.string() }))
|
|
||||||
.or(z.string()),
|
|
||||||
// Workflow data tool results (variables, custom tools, MCP tools, files)
|
|
||||||
get_workflow_data: z.union([
|
get_workflow_data: z.union([
|
||||||
z.object({
|
z.object({
|
||||||
variables: z.array(z.object({ id: z.string(), name: z.string(), value: z.any() })),
|
variables: z.array(z.object({ id: z.string(), name: z.string(), value: z.any() })),
|
||||||
@@ -462,7 +451,6 @@ export const ToolResultSchemas = {
|
|||||||
set_global_workflow_variables: z
|
set_global_workflow_variables: z
|
||||||
.object({ variables: z.record(z.any()) })
|
.object({ variables: z.record(z.any()) })
|
||||||
.or(z.object({ message: z.any().optional(), data: z.any().optional() })),
|
.or(z.object({ message: z.any().optional(), data: z.any().optional() })),
|
||||||
// New
|
|
||||||
oauth_request_access: z.object({
|
oauth_request_access: z.object({
|
||||||
granted: z.boolean().optional(),
|
granted: z.boolean().optional(),
|
||||||
message: z.string().optional(),
|
message: z.string().optional(),
|
||||||
@@ -685,7 +673,6 @@ export const ToolResultSchemas = {
|
|||||||
} as const
|
} as const
|
||||||
export type ToolResultSchemaMap = typeof ToolResultSchemas
|
export type ToolResultSchemaMap = typeof ToolResultSchemas
|
||||||
|
|
||||||
// Consolidated registry entry per tool
|
|
||||||
export const ToolRegistry = Object.freeze(
|
export const ToolRegistry = Object.freeze(
|
||||||
(Object.keys(ToolArgSchemas) as ToolId[]).reduce(
|
(Object.keys(ToolArgSchemas) as ToolId[]).reduce(
|
||||||
(acc, toolId) => {
|
(acc, toolId) => {
|
||||||
@@ -703,7 +690,6 @@ export const ToolRegistry = Object.freeze(
|
|||||||
)
|
)
|
||||||
export type ToolRegistryMap = typeof ToolRegistry
|
export type ToolRegistryMap = typeof ToolRegistry
|
||||||
|
|
||||||
// Convenience helper types inferred from schemas
|
|
||||||
export type InferArgs<T extends ToolId> = z.infer<(typeof ToolArgSchemas)[T]>
|
export type InferArgs<T extends ToolId> = z.infer<(typeof ToolArgSchemas)[T]>
|
||||||
export type InferResult<T extends ToolId> = z.infer<(typeof ToolResultSchemas)[T]>
|
export type InferResult<T extends ToolId> = z.infer<(typeof ToolResultSchemas)[T]>
|
||||||
export type InferToolCallSSE<T extends ToolId> = z.infer<(typeof ToolSSESchemas)[T]>
|
export type InferToolCallSSE<T extends ToolId> = z.infer<(typeof ToolSSESchemas)[T]>
|
||||||
|
|||||||
@@ -11,13 +11,21 @@ export { BLOCK_DIMENSIONS, CONTAINER_DIMENSIONS } from '@/lib/workflows/blocks/b
|
|||||||
/**
|
/**
|
||||||
* Horizontal spacing between layers (columns)
|
* Horizontal spacing between layers (columns)
|
||||||
*/
|
*/
|
||||||
export const DEFAULT_HORIZONTAL_SPACING = 250
|
export const DEFAULT_HORIZONTAL_SPACING = 180
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Vertical spacing between blocks in the same layer
|
* Vertical spacing between blocks in the same layer
|
||||||
*/
|
*/
|
||||||
export const DEFAULT_VERTICAL_SPACING = 200
|
export const DEFAULT_VERTICAL_SPACING = 200
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default offset when duplicating blocks
|
||||||
|
*/
|
||||||
|
export const DEFAULT_DUPLICATE_OFFSET = {
|
||||||
|
x: 180,
|
||||||
|
y: 20,
|
||||||
|
} as const
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* General container padding for layout calculations
|
* General container padding for layout calculations
|
||||||
*/
|
*/
|
||||||
@@ -78,15 +86,10 @@ export const DEFAULT_LAYOUT_OPTIONS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default horizontal spacing for containers (tighter than root level)
|
* Container-specific layout options (same spacing as root level for consistency)
|
||||||
*/
|
|
||||||
export const DEFAULT_CONTAINER_HORIZONTAL_SPACING = 250
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Container-specific layout options (tighter spacing for nested layouts)
|
|
||||||
*/
|
*/
|
||||||
export const CONTAINER_LAYOUT_OPTIONS = {
|
export const CONTAINER_LAYOUT_OPTIONS = {
|
||||||
horizontalSpacing: DEFAULT_CONTAINER_HORIZONTAL_SPACING,
|
horizontalSpacing: DEFAULT_HORIZONTAL_SPACING,
|
||||||
verticalSpacing: DEFAULT_VERTICAL_SPACING,
|
verticalSpacing: DEFAULT_VERTICAL_SPACING,
|
||||||
padding: { x: CONTAINER_PADDING_X, y: CONTAINER_PADDING_Y },
|
padding: { x: CONTAINER_PADDING_X, y: CONTAINER_PADDING_Y },
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import type { CopilotMessage, CopilotToolCall } from '@/stores/panel/copilot/typ
|
|||||||
export interface PreviewData {
|
export interface PreviewData {
|
||||||
id: string
|
id: string
|
||||||
workflowState: any
|
workflowState: any
|
||||||
yamlContent: string
|
|
||||||
description?: string
|
description?: string
|
||||||
timestamp: number
|
timestamp: number
|
||||||
status: 'pending' | 'accepted' | 'rejected'
|
status: 'pending' | 'accepted' | 'rejected'
|
||||||
|
|||||||
@@ -2520,14 +2520,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
return messageCheckpoints[messageId] || []
|
return messageCheckpoints[messageId] || []
|
||||||
},
|
},
|
||||||
|
|
||||||
// Preview YAML (stubbed/no-op)
|
|
||||||
setPreviewYaml: async (_yamlContent: string) => {},
|
|
||||||
clearPreviewYaml: async () => {
|
|
||||||
set((state) => ({
|
|
||||||
currentChat: state.currentChat ? { ...state.currentChat, previewYaml: null } : null,
|
|
||||||
}))
|
|
||||||
},
|
|
||||||
|
|
||||||
// Handle streaming response
|
// Handle streaming response
|
||||||
handleStreamingResponse: async (
|
handleStreamingResponse: async (
|
||||||
stream: ReadableStream,
|
stream: ReadableStream,
|
||||||
@@ -2685,7 +2677,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
model: selectedModel,
|
model: selectedModel,
|
||||||
messages: get().messages,
|
messages: get().messages,
|
||||||
messageCount: get().messages.length,
|
messageCount: get().messages.length,
|
||||||
previewYaml: null,
|
|
||||||
planArtifact: streamingPlanContent || null,
|
planArtifact: streamingPlanContent || null,
|
||||||
config: {
|
config: {
|
||||||
mode,
|
mode,
|
||||||
@@ -2843,10 +2834,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
// Diff updates are out of scope for minimal store
|
|
||||||
updateDiffStore: async (_yamlContent: string) => {},
|
|
||||||
updateDiffStoreWithWorkflowState: async (_workflowState: any) => {},
|
|
||||||
|
|
||||||
setSelectedModel: async (model) => {
|
setSelectedModel: async (model) => {
|
||||||
logger.info('[Context Usage] Model changed', { from: get().selectedModel, to: model })
|
logger.info('[Context Usage] Model changed', { from: get().selectedModel, to: model })
|
||||||
set({ selectedModel: model })
|
set({ selectedModel: model })
|
||||||
|
|||||||
@@ -188,9 +188,6 @@ export interface CopilotActions {
|
|||||||
revertToCheckpoint: (checkpointId: string) => Promise<void>
|
revertToCheckpoint: (checkpointId: string) => Promise<void>
|
||||||
getCheckpointsForMessage: (messageId: string) => any[]
|
getCheckpointsForMessage: (messageId: string) => any[]
|
||||||
|
|
||||||
setPreviewYaml: (yamlContent: string) => Promise<void>
|
|
||||||
clearPreviewYaml: () => Promise<void>
|
|
||||||
|
|
||||||
clearMessages: () => void
|
clearMessages: () => void
|
||||||
clearError: () => void
|
clearError: () => void
|
||||||
clearSaveError: () => void
|
clearSaveError: () => void
|
||||||
@@ -217,8 +214,6 @@ export interface CopilotActions {
|
|||||||
triggerUserMessageId?: string
|
triggerUserMessageId?: string
|
||||||
) => Promise<void>
|
) => Promise<void>
|
||||||
handleNewChatCreation: (newChatId: string) => Promise<void>
|
handleNewChatCreation: (newChatId: string) => Promise<void>
|
||||||
updateDiffStore: (yamlContent: string, toolName?: string) => Promise<void>
|
|
||||||
updateDiffStoreWithWorkflowState: (workflowState: any, toolName?: string) => Promise<void>
|
|
||||||
executeIntegrationTool: (toolCallId: string) => Promise<void>
|
executeIntegrationTool: (toolCallId: string) => Promise<void>
|
||||||
skipIntegrationTool: (toolCallId: string) => void
|
skipIntegrationTool: (toolCallId: string) => void
|
||||||
loadAutoAllowedTools: () => Promise<void>
|
loadAutoAllowedTools: () => Promise<void>
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import type { Edge } from 'reactflow'
|
|||||||
import { create } from 'zustand'
|
import { create } from 'zustand'
|
||||||
import { devtools } from 'zustand/middleware'
|
import { devtools } from 'zustand/middleware'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { DEFAULT_DUPLICATE_OFFSET } from '@/lib/workflows/autolayout/constants'
|
||||||
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
|
||||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||||
import { getBlock } from '@/blocks'
|
import { getBlock } from '@/blocks'
|
||||||
@@ -591,8 +592,8 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
|||||||
|
|
||||||
const newId = crypto.randomUUID()
|
const newId = crypto.randomUUID()
|
||||||
const offsetPosition = {
|
const offsetPosition = {
|
||||||
x: block.position.x + 250,
|
x: block.position.x + DEFAULT_DUPLICATE_OFFSET.x,
|
||||||
y: block.position.y + 20,
|
y: block.position.y + DEFAULT_DUPLICATE_OFFSET.y,
|
||||||
}
|
}
|
||||||
|
|
||||||
const newName = getUniqueBlockName(block.name, get().blocks)
|
const newName = getUniqueBlockName(block.name, get().blocks)
|
||||||
|
|||||||
@@ -1,504 +0,0 @@
|
|||||||
import { load as yamlParse } from 'js-yaml'
|
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
|
||||||
import { getBlock } from '@/blocks'
|
|
||||||
import { normalizeName } from '@/executor/constants'
|
|
||||||
import {
|
|
||||||
type ConnectionsFormat,
|
|
||||||
expandConditionInputs,
|
|
||||||
type ImportedEdge,
|
|
||||||
parseBlockConnections,
|
|
||||||
validateBlockReferences,
|
|
||||||
validateBlockStructure,
|
|
||||||
} from '@/stores/workflows/yaml/parsing-utils'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowYamlImporter')
|
|
||||||
|
|
||||||
interface YamlBlock {
|
|
||||||
type: string
|
|
||||||
name: string
|
|
||||||
inputs?: Record<string, any>
|
|
||||||
connections?: ConnectionsFormat
|
|
||||||
parentId?: string // Add parentId for nested blocks
|
|
||||||
}
|
|
||||||
|
|
||||||
interface YamlWorkflow {
|
|
||||||
version: string
|
|
||||||
blocks: Record<string, YamlBlock>
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ImportedBlock {
|
|
||||||
id: string
|
|
||||||
type: string
|
|
||||||
name: string
|
|
||||||
inputs: Record<string, any>
|
|
||||||
position: { x: number; y: number }
|
|
||||||
data?: Record<string, any>
|
|
||||||
parentId?: string
|
|
||||||
extent?: 'parent'
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ImportResult {
|
|
||||||
blocks: ImportedBlock[]
|
|
||||||
edges: ImportedEdge[]
|
|
||||||
errors: string[]
|
|
||||||
warnings: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse YAML content and validate its structure
|
|
||||||
*/
|
|
||||||
export function parseWorkflowYaml(yamlContent: string): {
|
|
||||||
data: YamlWorkflow | null
|
|
||||||
errors: string[]
|
|
||||||
} {
|
|
||||||
const errors: string[] = []
|
|
||||||
|
|
||||||
try {
|
|
||||||
const data = yamlParse(yamlContent) as unknown
|
|
||||||
|
|
||||||
// Validate top-level structure
|
|
||||||
if (!data || typeof data !== 'object') {
|
|
||||||
errors.push('Invalid YAML: Root must be an object')
|
|
||||||
return { data: null, errors }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Type guard to check if data has the expected structure
|
|
||||||
const parsedData = data as Record<string, unknown>
|
|
||||||
|
|
||||||
if (!parsedData.version) {
|
|
||||||
errors.push('Missing required field: version')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!parsedData.blocks || typeof parsedData.blocks !== 'object') {
|
|
||||||
errors.push('Missing or invalid field: blocks')
|
|
||||||
return { data: null, errors }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate blocks structure
|
|
||||||
const blocks = parsedData.blocks as Record<string, unknown>
|
|
||||||
Object.entries(blocks).forEach(([blockId, block]: [string, unknown]) => {
|
|
||||||
if (!block || typeof block !== 'object') {
|
|
||||||
errors.push(`Invalid block definition for '${blockId}': must be an object`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const blockData = block as Record<string, unknown>
|
|
||||||
|
|
||||||
if (!blockData.type || typeof blockData.type !== 'string') {
|
|
||||||
errors.push(`Invalid block '${blockId}': missing or invalid 'type' field`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!blockData.name || typeof blockData.name !== 'string') {
|
|
||||||
errors.push(`Invalid block '${blockId}': missing or invalid 'name' field`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (blockData.inputs && typeof blockData.inputs !== 'object') {
|
|
||||||
errors.push(`Invalid block '${blockId}': 'inputs' must be an object`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (blockData.preceding && !Array.isArray(blockData.preceding)) {
|
|
||||||
errors.push(`Invalid block '${blockId}': 'preceding' must be an array`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (blockData.following && !Array.isArray(blockData.following)) {
|
|
||||||
errors.push(`Invalid block '${blockId}': 'following' must be an array`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
if (errors.length > 0) {
|
|
||||||
return { data: null, errors }
|
|
||||||
}
|
|
||||||
|
|
||||||
return { data: parsedData as unknown as YamlWorkflow, errors: [] }
|
|
||||||
} catch (error) {
|
|
||||||
errors.push(`YAML parsing error: ${error instanceof Error ? error.message : 'Unknown error'}`)
|
|
||||||
return { data: null, errors }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate that block types exist and are valid
|
|
||||||
*/
|
|
||||||
function validateBlockTypes(yamlWorkflow: YamlWorkflow): { errors: string[]; warnings: string[] } {
|
|
||||||
const errors: string[] = []
|
|
||||||
const warnings: string[] = []
|
|
||||||
|
|
||||||
// Precompute counts that are used in validations to avoid O(n^2) checks
|
|
||||||
const apiTriggerCount = Object.values(yamlWorkflow.blocks).filter(
|
|
||||||
(b) => b.type === 'api_trigger'
|
|
||||||
).length
|
|
||||||
|
|
||||||
Object.entries(yamlWorkflow.blocks).forEach(([blockId, block]) => {
|
|
||||||
// Use shared structure validation
|
|
||||||
const { errors: structureErrors, warnings: structureWarnings } = validateBlockStructure(
|
|
||||||
blockId,
|
|
||||||
block
|
|
||||||
)
|
|
||||||
errors.push(...structureErrors)
|
|
||||||
warnings.push(...structureWarnings)
|
|
||||||
|
|
||||||
// Check if block type exists
|
|
||||||
const blockConfig = getBlock(block.type)
|
|
||||||
|
|
||||||
// Special handling for container blocks
|
|
||||||
if (block.type === 'loop' || block.type === 'parallel') {
|
|
||||||
// These are valid container types
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!blockConfig) {
|
|
||||||
errors.push(`Unknown block type '${block.type}' for block '${blockId}'`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate inputs against block configuration
|
|
||||||
if (block.inputs && blockConfig.subBlocks) {
|
|
||||||
Object.keys(block.inputs).forEach((inputKey) => {
|
|
||||||
const subBlockConfig = blockConfig.subBlocks.find((sb) => sb.id === inputKey)
|
|
||||||
if (!subBlockConfig) {
|
|
||||||
warnings.push(
|
|
||||||
`Block '${blockId}' has unknown input '${inputKey}' for type '${block.type}'`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// Enforce only one API trigger in YAML (single check outside the loop)
|
|
||||||
if (apiTriggerCount > 1) {
|
|
||||||
errors.push('Only one API trigger is allowed per workflow (YAML contains multiple).')
|
|
||||||
}
|
|
||||||
|
|
||||||
return { errors, warnings }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validates block names are non-empty and unique (by normalized name).
|
|
||||||
*/
|
|
||||||
function validateBlockNames(blocks: Record<string, YamlBlock>): string[] {
|
|
||||||
const errors: string[] = []
|
|
||||||
const seen = new Map<string, string>()
|
|
||||||
|
|
||||||
for (const [blockId, block] of Object.entries(blocks)) {
|
|
||||||
const normalized = normalizeName(block.name)
|
|
||||||
|
|
||||||
if (!normalized) {
|
|
||||||
errors.push(`Block "${blockId}" has empty name`)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
const existingBlockId = seen.get(normalized)
|
|
||||||
if (existingBlockId) {
|
|
||||||
errors.push(
|
|
||||||
`Block "${blockId}" has same name as "${existingBlockId}" (normalized: "${normalized}")`
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
seen.set(normalized, blockId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Calculate positions for blocks based on their connections
|
|
||||||
* Uses a simple layered approach similar to the auto-layout algorithm
|
|
||||||
*/
|
|
||||||
function calculateBlockPositions(
|
|
||||||
yamlWorkflow: YamlWorkflow
|
|
||||||
): Record<string, { x: number; y: number }> {
|
|
||||||
const positions: Record<string, { x: number; y: number }> = {}
|
|
||||||
const blockIds = Object.keys(yamlWorkflow.blocks)
|
|
||||||
|
|
||||||
// Find starter blocks (no incoming connections)
|
|
||||||
const starterBlocks = blockIds.filter((id) => {
|
|
||||||
const block = yamlWorkflow.blocks[id]
|
|
||||||
return !block.connections?.incoming || block.connections.incoming.length === 0
|
|
||||||
})
|
|
||||||
|
|
||||||
// If no starter blocks found, use first block as starter
|
|
||||||
if (starterBlocks.length === 0 && blockIds.length > 0) {
|
|
||||||
starterBlocks.push(blockIds[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build layers
|
|
||||||
const layers: string[][] = []
|
|
||||||
const visited = new Set<string>()
|
|
||||||
const queue = [...starterBlocks]
|
|
||||||
|
|
||||||
// BFS to organize blocks into layers
|
|
||||||
while (queue.length > 0) {
|
|
||||||
const currentLayer: string[] = []
|
|
||||||
const currentLayerSize = queue.length
|
|
||||||
|
|
||||||
for (let i = 0; i < currentLayerSize; i++) {
|
|
||||||
const blockId = queue.shift()!
|
|
||||||
if (visited.has(blockId)) continue
|
|
||||||
|
|
||||||
visited.add(blockId)
|
|
||||||
currentLayer.push(blockId)
|
|
||||||
|
|
||||||
// Add following blocks to queue
|
|
||||||
const block = yamlWorkflow.blocks[blockId]
|
|
||||||
if (block.connections?.outgoing) {
|
|
||||||
block.connections.outgoing.forEach((connection) => {
|
|
||||||
if (!visited.has(connection.target)) {
|
|
||||||
queue.push(connection.target)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (currentLayer.length > 0) {
|
|
||||||
layers.push(currentLayer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add any remaining blocks as isolated layer
|
|
||||||
const remainingBlocks = blockIds.filter((id) => !visited.has(id))
|
|
||||||
if (remainingBlocks.length > 0) {
|
|
||||||
layers.push(remainingBlocks)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate positions
|
|
||||||
const horizontalSpacing = 600
|
|
||||||
const verticalSpacing = 200
|
|
||||||
const startX = 150
|
|
||||||
const startY = 300
|
|
||||||
|
|
||||||
// First pass: position all blocks as if they're root level
|
|
||||||
layers.forEach((layer, layerIndex) => {
|
|
||||||
const layerX = startX + layerIndex * horizontalSpacing
|
|
||||||
|
|
||||||
layer.forEach((blockId, blockIndex) => {
|
|
||||||
const blockY = startY + (blockIndex - layer.length / 2) * verticalSpacing
|
|
||||||
positions[blockId] = { x: layerX, y: blockY }
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Second pass: adjust positions for child blocks to be relative to their parent
|
|
||||||
Object.entries(yamlWorkflow.blocks).forEach(([blockId, block]) => {
|
|
||||||
if (block.parentId && positions[blockId] && positions[block.parentId]) {
|
|
||||||
// Convert absolute position to relative position within parent
|
|
||||||
const parentPos = positions[block.parentId]
|
|
||||||
const childPos = positions[blockId]
|
|
||||||
|
|
||||||
// Calculate relative position inside the parent container
|
|
||||||
// Start child blocks at a reasonable offset inside the parent
|
|
||||||
positions[blockId] = {
|
|
||||||
x: 50 + (childPos.x - parentPos.x) * 0.3, // Scale down and offset
|
|
||||||
y: 100 + (childPos.y - parentPos.y) * 0.3, // Scale down and offset
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return positions
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sort blocks to ensure parents are processed before children
|
|
||||||
* This ensures proper creation order for nested blocks
|
|
||||||
*/
|
|
||||||
function sortBlocksByParentChildOrder(blocks: ImportedBlock[]): ImportedBlock[] {
|
|
||||||
const sorted: ImportedBlock[] = []
|
|
||||||
const processed = new Set<string>()
|
|
||||||
const visiting = new Set<string>() // Track blocks currently being processed to detect cycles
|
|
||||||
|
|
||||||
// Create a map for quick lookup
|
|
||||||
const blockMap = new Map<string, ImportedBlock>()
|
|
||||||
blocks.forEach((block) => blockMap.set(block.id, block))
|
|
||||||
|
|
||||||
// Process blocks recursively, ensuring parents are added first
|
|
||||||
function processBlock(block: ImportedBlock) {
|
|
||||||
if (processed.has(block.id)) {
|
|
||||||
return // Already processed
|
|
||||||
}
|
|
||||||
|
|
||||||
if (visiting.has(block.id)) {
|
|
||||||
// Circular dependency detected - break the cycle by processing this block without its parent
|
|
||||||
logger.warn(`Circular parent-child dependency detected for block ${block.id}, breaking cycle`)
|
|
||||||
sorted.push(block)
|
|
||||||
processed.add(block.id)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
visiting.add(block.id)
|
|
||||||
|
|
||||||
// If this block has a parent, ensure the parent is processed first
|
|
||||||
if (block.parentId) {
|
|
||||||
const parentBlock = blockMap.get(block.parentId)
|
|
||||||
if (parentBlock && !processed.has(block.parentId)) {
|
|
||||||
processBlock(parentBlock)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now process this block
|
|
||||||
visiting.delete(block.id)
|
|
||||||
sorted.push(block)
|
|
||||||
processed.add(block.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process all blocks
|
|
||||||
blocks.forEach((block) => processBlock(block))
|
|
||||||
|
|
||||||
return sorted
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert YAML workflow to importable format
|
|
||||||
*/
|
|
||||||
export function convertYamlToWorkflow(yamlWorkflow: YamlWorkflow): ImportResult {
|
|
||||||
const errors: string[] = []
|
|
||||||
const warnings: string[] = []
|
|
||||||
const blocks: ImportedBlock[] = []
|
|
||||||
const edges: ImportedEdge[] = []
|
|
||||||
|
|
||||||
// Validate block references
|
|
||||||
const referenceErrors = validateBlockReferences(yamlWorkflow.blocks)
|
|
||||||
errors.push(...referenceErrors)
|
|
||||||
|
|
||||||
// Validate block types
|
|
||||||
const { errors: typeErrors, warnings: typeWarnings } = validateBlockTypes(yamlWorkflow)
|
|
||||||
errors.push(...typeErrors)
|
|
||||||
warnings.push(...typeWarnings)
|
|
||||||
|
|
||||||
// Validate block names (non-empty and unique)
|
|
||||||
const nameErrors = validateBlockNames(yamlWorkflow.blocks)
|
|
||||||
errors.push(...nameErrors)
|
|
||||||
|
|
||||||
if (errors.length > 0) {
|
|
||||||
return { blocks: [], edges: [], errors, warnings }
|
|
||||||
}
|
|
||||||
|
|
||||||
const positions = calculateBlockPositions(yamlWorkflow)
|
|
||||||
|
|
||||||
Object.entries(yamlWorkflow.blocks).forEach(([blockId, yamlBlock]) => {
|
|
||||||
const position = positions[blockId] || { x: 100, y: 100 }
|
|
||||||
|
|
||||||
const processedInputs =
|
|
||||||
yamlBlock.type === 'condition'
|
|
||||||
? expandConditionInputs(blockId, yamlBlock.inputs || {})
|
|
||||||
: yamlBlock.inputs || {}
|
|
||||||
|
|
||||||
const importedBlock: ImportedBlock = {
|
|
||||||
id: blockId,
|
|
||||||
type: yamlBlock.type,
|
|
||||||
name: yamlBlock.name,
|
|
||||||
inputs: processedInputs,
|
|
||||||
position,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add container-specific data
|
|
||||||
if (yamlBlock.type === 'loop' || yamlBlock.type === 'parallel') {
|
|
||||||
// For loop/parallel blocks, map the inputs to the data field since they don't use subBlocks
|
|
||||||
const inputs = yamlBlock.inputs || {}
|
|
||||||
|
|
||||||
// Apply defaults for loop blocks
|
|
||||||
if (yamlBlock.type === 'loop') {
|
|
||||||
importedBlock.data = {
|
|
||||||
width: 500,
|
|
||||||
height: 300,
|
|
||||||
type: 'subflowNode',
|
|
||||||
loopType: inputs.loopType || 'for',
|
|
||||||
count: inputs.iterations || inputs.count || 5,
|
|
||||||
collection: inputs.collection || '',
|
|
||||||
maxConcurrency: inputs.maxConcurrency || 1,
|
|
||||||
// Include any other inputs provided
|
|
||||||
...inputs,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Parallel blocks
|
|
||||||
importedBlock.data = {
|
|
||||||
width: 500,
|
|
||||||
height: 300,
|
|
||||||
type: 'subflowNode',
|
|
||||||
...inputs,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Clear inputs since they're now in data
|
|
||||||
importedBlock.inputs = {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle parent-child relationships for nested blocks
|
|
||||||
if (yamlBlock.parentId) {
|
|
||||||
importedBlock.parentId = yamlBlock.parentId
|
|
||||||
importedBlock.extent = 'parent' // Always 'parent' when parentId exists
|
|
||||||
// Also add to data for consistency with how the system works
|
|
||||||
if (!importedBlock.data) {
|
|
||||||
importedBlock.data = {}
|
|
||||||
}
|
|
||||||
importedBlock.data.parentId = yamlBlock.parentId
|
|
||||||
importedBlock.data.extent = 'parent' // Always 'parent' when parentId exists
|
|
||||||
}
|
|
||||||
|
|
||||||
blocks.push(importedBlock)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Convert edges from connections using shared parser
|
|
||||||
Object.entries(yamlWorkflow.blocks).forEach(([blockId, yamlBlock]) => {
|
|
||||||
const {
|
|
||||||
edges: blockEdges,
|
|
||||||
errors: connectionErrors,
|
|
||||||
warnings: connectionWarnings,
|
|
||||||
} = parseBlockConnections(blockId, yamlBlock.connections, yamlBlock.type)
|
|
||||||
|
|
||||||
edges.push(...blockEdges)
|
|
||||||
errors.push(...connectionErrors)
|
|
||||||
warnings.push(...connectionWarnings)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Sort blocks to ensure parents are created before children
|
|
||||||
const sortedBlocks = sortBlocksByParentChildOrder(blocks)
|
|
||||||
|
|
||||||
return { blocks: sortedBlocks, edges, errors, warnings }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create smart ID mapping that preserves existing block IDs and generates new ones for new blocks
|
|
||||||
*/
|
|
||||||
function createSmartIdMapping(
|
|
||||||
yamlBlocks: ImportedBlock[],
|
|
||||||
existingBlocks: Record<string, any>,
|
|
||||||
activeWorkflowId: string,
|
|
||||||
forceNewIds = false
|
|
||||||
): Map<string, string> {
|
|
||||||
const yamlIdToActualId = new Map<string, string>()
|
|
||||||
const existingBlockIds = new Set(Object.keys(existingBlocks))
|
|
||||||
|
|
||||||
logger.info('Creating smart ID mapping', {
|
|
||||||
activeWorkflowId,
|
|
||||||
yamlBlockCount: yamlBlocks.length,
|
|
||||||
existingBlockCount: Object.keys(existingBlocks).length,
|
|
||||||
existingBlockIds: Array.from(existingBlockIds),
|
|
||||||
yamlBlockIds: yamlBlocks.map((b) => b.id),
|
|
||||||
forceNewIds,
|
|
||||||
})
|
|
||||||
|
|
||||||
for (const block of yamlBlocks) {
|
|
||||||
if (forceNewIds || !existingBlockIds.has(block.id)) {
|
|
||||||
// Force new ID or block ID doesn't exist in current workflow - generate new UUID
|
|
||||||
const newId = uuidv4()
|
|
||||||
yamlIdToActualId.set(block.id, newId)
|
|
||||||
logger.info(
|
|
||||||
`🆕 Mapping new block: ${block.id} -> ${newId} (${forceNewIds ? 'forced new ID' : `not found in workflow ${activeWorkflowId}`})`
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
// Block ID exists in current workflow - preserve it
|
|
||||||
yamlIdToActualId.set(block.id, block.id)
|
|
||||||
logger.info(
|
|
||||||
`✅ Preserving existing block ID: ${block.id} (exists in workflow ${activeWorkflowId})`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('Smart ID mapping completed', {
|
|
||||||
mappings: Array.from(yamlIdToActualId.entries()),
|
|
||||||
preservedCount: Array.from(yamlIdToActualId.entries()).filter(([old, new_]) => old === new_)
|
|
||||||
.length,
|
|
||||||
newCount: Array.from(yamlIdToActualId.entries()).filter(([old, new_]) => old !== new_).length,
|
|
||||||
})
|
|
||||||
|
|
||||||
return yamlIdToActualId
|
|
||||||
}
|
|
||||||
@@ -1,714 +0,0 @@
|
|||||||
import { v4 as uuidv4 } from 'uuid'
|
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
|
||||||
import { EDGE } from '@/executor/constants'
|
|
||||||
|
|
||||||
const logger = createLogger('YamlParsingUtils')
|
|
||||||
|
|
||||||
export interface ImportedEdge {
|
|
||||||
id: string
|
|
||||||
source: string
|
|
||||||
target: string
|
|
||||||
sourceHandle: string
|
|
||||||
targetHandle: string
|
|
||||||
type: string
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ParsedConnections {
|
|
||||||
edges: ImportedEdge[]
|
|
||||||
errors: string[]
|
|
||||||
warnings: string[]
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConnectionsFormat {
|
|
||||||
// New format - grouped by handle type
|
|
||||||
success?: string | string[]
|
|
||||||
error?: string | string[]
|
|
||||||
conditions?: Record<string, string | string[]>
|
|
||||||
loop?: {
|
|
||||||
start?: string | string[]
|
|
||||||
end?: string | string[]
|
|
||||||
}
|
|
||||||
parallel?: {
|
|
||||||
start?: string | string[]
|
|
||||||
end?: string | string[]
|
|
||||||
}
|
|
||||||
// Direct handle format (alternative to nested format above)
|
|
||||||
'loop-start-source'?: string | string[]
|
|
||||||
'loop-end-source'?: string | string[]
|
|
||||||
'parallel-start-source'?: string | string[]
|
|
||||||
'parallel-end-source'?: string | string[]
|
|
||||||
// Legacy format support
|
|
||||||
incoming?: Array<{
|
|
||||||
source: string
|
|
||||||
sourceHandle?: string
|
|
||||||
targetHandle?: string
|
|
||||||
}>
|
|
||||||
outgoing?: Array<{
|
|
||||||
target: string
|
|
||||||
sourceHandle?: string
|
|
||||||
targetHandle?: string
|
|
||||||
}>
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse block connections from both new grouped format and legacy format
|
|
||||||
*/
|
|
||||||
export function parseBlockConnections(
|
|
||||||
blockId: string,
|
|
||||||
connections: ConnectionsFormat | undefined,
|
|
||||||
blockType?: string
|
|
||||||
): ParsedConnections {
|
|
||||||
const edges: ImportedEdge[] = []
|
|
||||||
const errors: string[] = []
|
|
||||||
const warnings: string[] = []
|
|
||||||
|
|
||||||
if (!connections) {
|
|
||||||
return { edges, errors, warnings }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle new grouped format
|
|
||||||
if (hasNewFormat(connections)) {
|
|
||||||
parseNewFormatConnections(blockId, connections, edges, errors, warnings, blockType)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle legacy format (for backwards compatibility)
|
|
||||||
if (connections.outgoing) {
|
|
||||||
parseLegacyOutgoingConnections(blockId, connections.outgoing, edges, errors, warnings)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { edges, errors, warnings }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generate connections in the new grouped format from edges
|
|
||||||
*/
|
|
||||||
export function generateBlockConnections(
|
|
||||||
blockId: string,
|
|
||||||
edges: ImportedEdge[] | any[]
|
|
||||||
): ConnectionsFormat {
|
|
||||||
const connections: ConnectionsFormat = {}
|
|
||||||
|
|
||||||
const outgoingEdges = edges.filter((edge) => edge.source === blockId)
|
|
||||||
|
|
||||||
if (outgoingEdges.length === 0) {
|
|
||||||
return connections
|
|
||||||
}
|
|
||||||
|
|
||||||
// Group edges by source handle type
|
|
||||||
const successTargets: string[] = []
|
|
||||||
const errorTargets: string[] = []
|
|
||||||
const conditionTargets: Record<string, string[]> = {}
|
|
||||||
const loopTargets: { start: string[]; end: string[] } = { start: [], end: [] }
|
|
||||||
const parallelTargets: { start: string[]; end: string[] } = { start: [], end: [] }
|
|
||||||
|
|
||||||
// Track condition ordering for clean sequential else-if naming
|
|
||||||
const rawConditionIds: string[] = []
|
|
||||||
|
|
||||||
for (const edge of outgoingEdges) {
|
|
||||||
const handle = edge.sourceHandle ?? 'source'
|
|
||||||
|
|
||||||
if (handle === 'source') {
|
|
||||||
successTargets.push(edge.target)
|
|
||||||
} else if (handle === 'error') {
|
|
||||||
errorTargets.push(edge.target)
|
|
||||||
} else if (handle.startsWith(EDGE.CONDITION_PREFIX)) {
|
|
||||||
const rawConditionId = extractConditionId(handle)
|
|
||||||
rawConditionIds.push(rawConditionId)
|
|
||||||
|
|
||||||
if (!conditionTargets[rawConditionId]) {
|
|
||||||
conditionTargets[rawConditionId] = []
|
|
||||||
}
|
|
||||||
conditionTargets[rawConditionId].push(edge.target)
|
|
||||||
} else if (handle === 'loop-start-source') {
|
|
||||||
loopTargets.start.push(edge.target)
|
|
||||||
} else if (handle === 'loop-end-source') {
|
|
||||||
loopTargets.end.push(edge.target)
|
|
||||||
} else if (handle === 'parallel-start-source') {
|
|
||||||
parallelTargets.start.push(edge.target)
|
|
||||||
} else if (handle === 'parallel-end-source') {
|
|
||||||
parallelTargets.end.push(edge.target)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create clean condition mapping for timestamp-based else-if IDs
|
|
||||||
const cleanConditionTargets: Record<string, string[]> = {}
|
|
||||||
let elseIfCount = 0
|
|
||||||
|
|
||||||
Object.entries(conditionTargets).forEach(([rawId, targets]) => {
|
|
||||||
let cleanId = rawId
|
|
||||||
|
|
||||||
// Simple check: if this is exactly 'else', keep it as 'else'
|
|
||||||
if (rawId === 'else') {
|
|
||||||
cleanId = 'else'
|
|
||||||
}
|
|
||||||
// Convert timestamp-based else-if IDs to clean sequential format
|
|
||||||
else if (rawId.startsWith('else-if-') && /else-if-\d+$/.test(rawId)) {
|
|
||||||
elseIfCount++
|
|
||||||
if (elseIfCount === 1) {
|
|
||||||
cleanId = 'else-if'
|
|
||||||
} else {
|
|
||||||
cleanId = `else-if-${elseIfCount}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
cleanConditionTargets[cleanId] = targets
|
|
||||||
})
|
|
||||||
|
|
||||||
// After processing all conditions, check if we need to convert the last else-if to else
|
|
||||||
// If we have more than the expected number of else-if conditions, the last one should be else
|
|
||||||
const conditionKeys = Object.keys(cleanConditionTargets)
|
|
||||||
const hasElse = conditionKeys.includes('else')
|
|
||||||
const elseIfKeys = conditionKeys.filter((key) => key.startsWith('else-if'))
|
|
||||||
|
|
||||||
if (!hasElse && elseIfKeys.length > 0) {
|
|
||||||
// Find the highest numbered else-if and convert it to else
|
|
||||||
const highestElseIf = elseIfKeys.sort((a, b) => {
|
|
||||||
const aNum = a === 'else-if' ? 1 : Number.parseInt(a.replace('else-if-', ''))
|
|
||||||
const bNum = b === 'else-if' ? 1 : Number.parseInt(b.replace('else-if-', ''))
|
|
||||||
return bNum - aNum
|
|
||||||
})[0]
|
|
||||||
|
|
||||||
// Move the targets from the highest else-if to else
|
|
||||||
cleanConditionTargets.else = cleanConditionTargets[highestElseIf]
|
|
||||||
delete cleanConditionTargets[highestElseIf]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add to connections object (use single values for single targets, arrays for multiple)
|
|
||||||
if (successTargets.length > 0) {
|
|
||||||
connections.success = successTargets.length === 1 ? successTargets[0] : successTargets
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errorTargets.length > 0) {
|
|
||||||
connections.error = errorTargets.length === 1 ? errorTargets[0] : errorTargets
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Object.keys(cleanConditionTargets).length > 0) {
|
|
||||||
connections.conditions = {}
|
|
||||||
|
|
||||||
// Sort condition keys to maintain consistent order: if, else-if, else-if-2, ..., else
|
|
||||||
const sortedConditionKeys = Object.keys(cleanConditionTargets).sort((a, b) => {
|
|
||||||
// Define the order priority
|
|
||||||
const getOrder = (key: string): number => {
|
|
||||||
if (key === 'if') return 0
|
|
||||||
if (key === 'else-if') return 1
|
|
||||||
if (key.startsWith('else-if-')) {
|
|
||||||
const num = Number.parseInt(key.replace('else-if-', ''), 10)
|
|
||||||
return 1 + num // else-if-2 = 3, else-if-3 = 4, etc.
|
|
||||||
}
|
|
||||||
if (key === 'else') return 1000 // Always last
|
|
||||||
return 500 // Other conditions in the middle
|
|
||||||
}
|
|
||||||
|
|
||||||
return getOrder(a) - getOrder(b)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Build the connections object in the correct order
|
|
||||||
for (const conditionId of sortedConditionKeys) {
|
|
||||||
const targets = cleanConditionTargets[conditionId]
|
|
||||||
connections.conditions[conditionId] = targets.length === 1 ? targets[0] : targets
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (loopTargets.start.length > 0 || loopTargets.end.length > 0) {
|
|
||||||
connections.loop = {}
|
|
||||||
if (loopTargets.start.length > 0) {
|
|
||||||
connections.loop.start =
|
|
||||||
loopTargets.start.length === 1 ? loopTargets.start[0] : loopTargets.start
|
|
||||||
}
|
|
||||||
if (loopTargets.end.length > 0) {
|
|
||||||
connections.loop.end = loopTargets.end.length === 1 ? loopTargets.end[0] : loopTargets.end
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parallelTargets.start.length > 0 || parallelTargets.end.length > 0) {
|
|
||||||
connections.parallel = {}
|
|
||||||
if (parallelTargets.start.length > 0) {
|
|
||||||
connections.parallel.start =
|
|
||||||
parallelTargets.start.length === 1 ? parallelTargets.start[0] : parallelTargets.start
|
|
||||||
}
|
|
||||||
if (parallelTargets.end.length > 0) {
|
|
||||||
connections.parallel.end =
|
|
||||||
parallelTargets.end.length === 1 ? parallelTargets.end[0] : parallelTargets.end
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return connections
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate block structure (type, name, inputs)
|
|
||||||
*/
|
|
||||||
export function validateBlockStructure(
|
|
||||||
blockId: string,
|
|
||||||
block: any
|
|
||||||
): { errors: string[]; warnings: string[] } {
|
|
||||||
const errors: string[] = []
|
|
||||||
const warnings: string[] = []
|
|
||||||
|
|
||||||
if (!block || typeof block !== 'object') {
|
|
||||||
errors.push(`Invalid block definition for '${blockId}': must be an object`)
|
|
||||||
return { errors, warnings }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!block.type || typeof block.type !== 'string') {
|
|
||||||
errors.push(`Invalid block '${blockId}': missing or invalid 'type' field`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!block.name || typeof block.name !== 'string') {
|
|
||||||
errors.push(`Invalid block '${blockId}': missing or invalid 'name' field`)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (block.inputs && typeof block.inputs !== 'object') {
|
|
||||||
errors.push(`Invalid block '${blockId}': 'inputs' must be an object`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return { errors, warnings }
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clean up condition inputs to remove UI state and use semantic format
|
|
||||||
* Preserves actual condition IDs that match connections
|
|
||||||
*/
|
|
||||||
export function cleanConditionInputs(
|
|
||||||
blockId: string,
|
|
||||||
inputs: Record<string, any>
|
|
||||||
): Record<string, any> {
|
|
||||||
const cleanInputs = { ...inputs }
|
|
||||||
|
|
||||||
// Handle condition blocks specially
|
|
||||||
if (cleanInputs.conditions) {
|
|
||||||
try {
|
|
||||||
// Parse the JSON string conditions
|
|
||||||
const conditions =
|
|
||||||
typeof cleanInputs.conditions === 'string'
|
|
||||||
? JSON.parse(cleanInputs.conditions)
|
|
||||||
: cleanInputs.conditions
|
|
||||||
|
|
||||||
if (Array.isArray(conditions)) {
|
|
||||||
// Convert to clean format, preserving actual IDs for connection mapping
|
|
||||||
const tempConditions: Array<{ key: string; value: string }> = []
|
|
||||||
|
|
||||||
// Track else-if count for clean numbering
|
|
||||||
let elseIfCount = 0
|
|
||||||
|
|
||||||
conditions.forEach((condition: any) => {
|
|
||||||
if (condition.title && condition.value !== undefined) {
|
|
||||||
// Create clean semantic keys instead of preserving timestamps
|
|
||||||
let key = condition.title
|
|
||||||
if (condition.title === 'else if') {
|
|
||||||
elseIfCount++
|
|
||||||
if (elseIfCount === 1) {
|
|
||||||
key = 'else-if'
|
|
||||||
} else {
|
|
||||||
key = `else-if-${elseIfCount}`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const stringValue = String(condition.value || '')
|
|
||||||
if (stringValue.trim()) {
|
|
||||||
tempConditions.push({ key, value: stringValue.trim() })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// Sort conditions to maintain consistent order: if, else-if, else-if-2, ..., else
|
|
||||||
tempConditions.sort((a, b) => {
|
|
||||||
const getOrder = (key: string): number => {
|
|
||||||
if (key === 'if') return 0
|
|
||||||
if (key === 'else-if') return 1
|
|
||||||
if (key.startsWith('else-if-')) {
|
|
||||||
const num = Number.parseInt(key.replace('else-if-', ''), 10)
|
|
||||||
return 1 + num // else-if-2 = 3, else-if-3 = 4, etc.
|
|
||||||
}
|
|
||||||
if (key === 'else') return 1000 // Always last
|
|
||||||
return 500 // Other conditions in the middle
|
|
||||||
}
|
|
||||||
|
|
||||||
return getOrder(a.key) - getOrder(b.key)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Build the final ordered object
|
|
||||||
const cleanConditions: Record<string, string> = {}
|
|
||||||
tempConditions.forEach(({ key, value }) => {
|
|
||||||
cleanConditions[key] = value
|
|
||||||
})
|
|
||||||
|
|
||||||
// Replace the verbose format with clean format
|
|
||||||
if (Object.keys(cleanConditions).length > 0) {
|
|
||||||
cleanInputs.conditions = cleanConditions
|
|
||||||
} else {
|
|
||||||
cleanInputs.conditions = undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
// If parsing fails, leave as-is with a warning
|
|
||||||
logger.warn(`Failed to clean condition inputs for block ${blockId}:`, error)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return cleanInputs
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert clean condition inputs back to internal format for import
|
|
||||||
*/
|
|
||||||
export function expandConditionInputs(
|
|
||||||
blockId: string,
|
|
||||||
inputs: Record<string, any>
|
|
||||||
): Record<string, any> {
|
|
||||||
const expandedInputs = { ...inputs }
|
|
||||||
|
|
||||||
// Handle clean condition format
|
|
||||||
if (
|
|
||||||
expandedInputs.conditions &&
|
|
||||||
typeof expandedInputs.conditions === 'object' &&
|
|
||||||
!Array.isArray(expandedInputs.conditions)
|
|
||||||
) {
|
|
||||||
const conditionsObj = expandedInputs.conditions as Record<string, string>
|
|
||||||
const conditionsArray: any[] = []
|
|
||||||
|
|
||||||
Object.entries(conditionsObj).forEach(([key, value]) => {
|
|
||||||
const conditionId = `${blockId}-${key}`
|
|
||||||
|
|
||||||
// Determine display title from key
|
|
||||||
let title = key
|
|
||||||
if (key.startsWith('else-if')) {
|
|
||||||
title = 'else if'
|
|
||||||
}
|
|
||||||
|
|
||||||
conditionsArray.push({
|
|
||||||
id: conditionId,
|
|
||||||
title: title,
|
|
||||||
value: String(value || ''),
|
|
||||||
showTags: false,
|
|
||||||
showEnvVars: false,
|
|
||||||
searchTerm: '',
|
|
||||||
cursorPosition: 0,
|
|
||||||
activeSourceBlockId: null,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
// Add default else if not present and no existing else key
|
|
||||||
const hasElse = Object.keys(conditionsObj).some((key) => key === 'else')
|
|
||||||
if (!hasElse) {
|
|
||||||
conditionsArray.push({
|
|
||||||
id: `${blockId}-else`,
|
|
||||||
title: 'else',
|
|
||||||
value: '',
|
|
||||||
showTags: false,
|
|
||||||
showEnvVars: false,
|
|
||||||
searchTerm: '',
|
|
||||||
cursorPosition: 0,
|
|
||||||
activeSourceBlockId: null,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
expandedInputs.conditions = JSON.stringify(conditionsArray)
|
|
||||||
}
|
|
||||||
|
|
||||||
return expandedInputs
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate that block references in connections exist
|
|
||||||
*/
|
|
||||||
export function validateBlockReferences(blocks: Record<string, any>): string[] {
|
|
||||||
const errors: string[] = []
|
|
||||||
const blockIds = new Set(Object.keys(blocks))
|
|
||||||
|
|
||||||
Object.entries(blocks).forEach(([blockId, block]) => {
|
|
||||||
if (!block.connections) return
|
|
||||||
|
|
||||||
const { edges } = parseBlockConnections(blockId, block.connections, block.type)
|
|
||||||
|
|
||||||
edges.forEach((edge) => {
|
|
||||||
if (!blockIds.has(edge.target)) {
|
|
||||||
errors.push(`Block '${blockId}' references non-existent target block '${edge.target}'`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// Check parent references
|
|
||||||
if (block.parentId && !blockIds.has(block.parentId)) {
|
|
||||||
errors.push(`Block '${blockId}' references non-existent parent block '${block.parentId}'`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
return errors
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper functions
|
|
||||||
|
|
||||||
function hasNewFormat(connections: ConnectionsFormat): boolean {
|
|
||||||
return !!(
|
|
||||||
connections.success ||
|
|
||||||
connections.error ||
|
|
||||||
connections.conditions ||
|
|
||||||
connections.loop ||
|
|
||||||
connections.parallel
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseNewFormatConnections(
|
|
||||||
blockId: string,
|
|
||||||
connections: ConnectionsFormat,
|
|
||||||
edges: ImportedEdge[],
|
|
||||||
errors: string[],
|
|
||||||
warnings: string[],
|
|
||||||
blockType?: string
|
|
||||||
) {
|
|
||||||
// Parse success connections
|
|
||||||
if (connections.success) {
|
|
||||||
const targets = Array.isArray(connections.success) ? connections.success : [connections.success]
|
|
||||||
targets.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
edges.push(createEdge(blockId, target, 'source', 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(`Invalid success target in block '${blockId}': must be a string`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse error connections
|
|
||||||
if (connections.error) {
|
|
||||||
const targets = Array.isArray(connections.error) ? connections.error : [connections.error]
|
|
||||||
targets.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
edges.push(createEdge(blockId, target, 'error', 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(`Invalid error target in block '${blockId}': must be a string`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse condition connections
|
|
||||||
if (connections.conditions) {
|
|
||||||
if (typeof connections.conditions !== 'object') {
|
|
||||||
errors.push(`Invalid conditions in block '${blockId}': must be an object`)
|
|
||||||
} else {
|
|
||||||
Object.entries(connections.conditions).forEach(([conditionId, targets]) => {
|
|
||||||
const targetArray = Array.isArray(targets) ? targets : [targets]
|
|
||||||
targetArray.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
// Create condition handle based on block type and condition ID
|
|
||||||
const sourceHandle = createConditionHandle(blockId, conditionId, blockType)
|
|
||||||
edges.push(createEdge(blockId, target, sourceHandle, 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(
|
|
||||||
`Invalid condition target for '${conditionId}' in block '${blockId}': must be a string`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse loop connections
|
|
||||||
if (connections.loop) {
|
|
||||||
if (typeof connections.loop !== 'object') {
|
|
||||||
errors.push(`Invalid loop connections in block '${blockId}': must be an object`)
|
|
||||||
} else {
|
|
||||||
if (connections.loop.start) {
|
|
||||||
const targets = Array.isArray(connections.loop.start)
|
|
||||||
? connections.loop.start
|
|
||||||
: [connections.loop.start]
|
|
||||||
targets.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
edges.push(createEdge(blockId, target, 'loop-start-source', 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(`Invalid loop start target in block '${blockId}': must be a string`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (connections.loop.end) {
|
|
||||||
const targets = Array.isArray(connections.loop.end)
|
|
||||||
? connections.loop.end
|
|
||||||
: [connections.loop.end]
|
|
||||||
targets.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
edges.push(createEdge(blockId, target, 'loop-end-source', 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(`Invalid loop end target in block '${blockId}': must be a string`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse parallel connections
|
|
||||||
if (connections.parallel) {
|
|
||||||
if (typeof connections.parallel !== 'object') {
|
|
||||||
errors.push(`Invalid parallel connections in block '${blockId}': must be an object`)
|
|
||||||
} else {
|
|
||||||
if (connections.parallel.start) {
|
|
||||||
const targets = Array.isArray(connections.parallel.start)
|
|
||||||
? connections.parallel.start
|
|
||||||
: [connections.parallel.start]
|
|
||||||
targets.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
edges.push(createEdge(blockId, target, 'parallel-start-source', 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(`Invalid parallel start target in block '${blockId}': must be a string`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (connections.parallel.end) {
|
|
||||||
const targets = Array.isArray(connections.parallel.end)
|
|
||||||
? connections.parallel.end
|
|
||||||
: [connections.parallel.end]
|
|
||||||
targets.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
edges.push(createEdge(blockId, target, 'parallel-end-source', 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(`Invalid parallel end target in block '${blockId}': must be a string`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse direct handle formats (alternative to nested format)
|
|
||||||
// This allows using 'loop-start-source' directly instead of 'loop.start'
|
|
||||||
if (connections['loop-start-source']) {
|
|
||||||
const targets = Array.isArray(connections['loop-start-source'])
|
|
||||||
? connections['loop-start-source']
|
|
||||||
: [connections['loop-start-source']]
|
|
||||||
targets.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
edges.push(createEdge(blockId, target, 'loop-start-source', 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(`Invalid loop-start-source target in block '${blockId}': must be a string`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (connections['loop-end-source']) {
|
|
||||||
const targets = Array.isArray(connections['loop-end-source'])
|
|
||||||
? connections['loop-end-source']
|
|
||||||
: [connections['loop-end-source']]
|
|
||||||
targets.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
edges.push(createEdge(blockId, target, 'loop-end-source', 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(`Invalid loop-end-source target in block '${blockId}': must be a string`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (connections['parallel-start-source']) {
|
|
||||||
const targets = Array.isArray(connections['parallel-start-source'])
|
|
||||||
? connections['parallel-start-source']
|
|
||||||
: [connections['parallel-start-source']]
|
|
||||||
targets.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
edges.push(createEdge(blockId, target, 'parallel-start-source', 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(`Invalid parallel-start-source target in block '${blockId}': must be a string`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (connections['parallel-end-source']) {
|
|
||||||
const targets = Array.isArray(connections['parallel-end-source'])
|
|
||||||
? connections['parallel-end-source']
|
|
||||||
: [connections['parallel-end-source']]
|
|
||||||
targets.forEach((target) => {
|
|
||||||
if (typeof target === 'string') {
|
|
||||||
edges.push(createEdge(blockId, target, 'parallel-end-source', 'target'))
|
|
||||||
} else {
|
|
||||||
errors.push(`Invalid parallel-end-source target in block '${blockId}': must be a string`)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseLegacyOutgoingConnections(
|
|
||||||
blockId: string,
|
|
||||||
outgoing: Array<{ target: string; sourceHandle?: string; targetHandle?: string }>,
|
|
||||||
edges: ImportedEdge[],
|
|
||||||
errors: string[],
|
|
||||||
warnings: string[]
|
|
||||||
) {
|
|
||||||
warnings.push(
|
|
||||||
`Block '${blockId}' uses legacy connection format - consider upgrading to the new grouped format`
|
|
||||||
)
|
|
||||||
|
|
||||||
outgoing.forEach((connection) => {
|
|
||||||
if (!connection.target) {
|
|
||||||
errors.push(`Missing target in outgoing connection for block '${blockId}'`)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
edges.push(
|
|
||||||
createEdge(
|
|
||||||
blockId,
|
|
||||||
connection.target,
|
|
||||||
connection.sourceHandle || 'source',
|
|
||||||
connection.targetHandle || 'target'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function createEdge(
|
|
||||||
source: string,
|
|
||||||
target: string,
|
|
||||||
sourceHandle: string,
|
|
||||||
targetHandle: string
|
|
||||||
): ImportedEdge {
|
|
||||||
return {
|
|
||||||
id: uuidv4(),
|
|
||||||
source,
|
|
||||||
target,
|
|
||||||
sourceHandle,
|
|
||||||
targetHandle,
|
|
||||||
type: 'workflowEdge',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function createConditionHandle(blockId: string, conditionId: string, blockType?: string): string {
|
|
||||||
// For condition blocks, create the handle format that the system expects
|
|
||||||
if (blockType === 'condition') {
|
|
||||||
// Map semantic condition IDs to the internal format the system expects
|
|
||||||
const actualConditionId = `${blockId}-${conditionId}`
|
|
||||||
return `${EDGE.CONDITION_PREFIX}${actualConditionId}`
|
|
||||||
}
|
|
||||||
// For other blocks that might have conditions, use a more explicit format
|
|
||||||
return `${EDGE.CONDITION_PREFIX}${blockId}-${conditionId}`
|
|
||||||
}
|
|
||||||
|
|
||||||
function extractConditionId(sourceHandle: string): string {
|
|
||||||
// Extract condition ID from handle like "condition-blockId-semantic-key"
|
|
||||||
// Example: "condition-e23e6318-bcdc-4572-a76b-5015e3950121-else-if-1752111795510"
|
|
||||||
|
|
||||||
if (!sourceHandle.startsWith(EDGE.CONDITION_PREFIX)) {
|
|
||||||
return sourceHandle
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove condition prefix
|
|
||||||
const withoutPrefix = sourceHandle.substring(EDGE.CONDITION_PREFIX.length)
|
|
||||||
|
|
||||||
// Special case: check if this ends with "-else" (the auto-added else condition)
|
|
||||||
if (withoutPrefix.endsWith('-else')) {
|
|
||||||
return 'else'
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the first UUID pattern (36 characters with 4 hyphens in specific positions)
|
|
||||||
// UUID format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
|
|
||||||
const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}-(.+)$/i
|
|
||||||
const match = withoutPrefix.match(uuidRegex)
|
|
||||||
|
|
||||||
if (match) {
|
|
||||||
// Extract everything after the UUID - return raw ID for further processing
|
|
||||||
return match[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback for legacy format or simpler cases
|
|
||||||
const parts = sourceHandle.split('-')
|
|
||||||
if (parts.length >= 2) {
|
|
||||||
return parts[parts.length - 1]
|
|
||||||
}
|
|
||||||
|
|
||||||
return sourceHandle
|
|
||||||
}
|
|
||||||
@@ -1,175 +0,0 @@
|
|||||||
import { create } from 'zustand'
|
|
||||||
import { devtools } from 'zustand/middleware'
|
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
|
||||||
import { useWorkflowRegistry } from '../registry/store'
|
|
||||||
import { useSubBlockStore } from '../subblock/store'
|
|
||||||
import { useWorkflowStore } from '../workflow/store'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowYamlStore')
|
|
||||||
|
|
||||||
interface WorkflowYamlState {
|
|
||||||
yaml: string
|
|
||||||
lastGenerated?: number
|
|
||||||
}
|
|
||||||
|
|
||||||
interface WorkflowYamlActions {
|
|
||||||
generateYaml: () => Promise<void>
|
|
||||||
getYaml: () => Promise<string>
|
|
||||||
refreshYaml: () => void
|
|
||||||
}
|
|
||||||
|
|
||||||
type WorkflowYamlStore = WorkflowYamlState & WorkflowYamlActions
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get subblock values organized by block for the shared utility
|
|
||||||
*/
|
|
||||||
function getSubBlockValues() {
|
|
||||||
const workflowState = useWorkflowStore.getState()
|
|
||||||
const subBlockStore = useSubBlockStore.getState()
|
|
||||||
|
|
||||||
const subBlockValues: Record<string, Record<string, any>> = {}
|
|
||||||
Object.entries(workflowState.blocks).forEach(([blockId]) => {
|
|
||||||
subBlockValues[blockId] = {}
|
|
||||||
// Get all subblock values for this block
|
|
||||||
Object.keys(workflowState.blocks[blockId].subBlocks || {}).forEach((subBlockId) => {
|
|
||||||
const value = subBlockStore.getValue(blockId, subBlockId)
|
|
||||||
if (value !== undefined) {
|
|
||||||
subBlockValues[blockId][subBlockId] = value
|
|
||||||
}
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
return subBlockValues
|
|
||||||
}
|
|
||||||
|
|
||||||
// Track if subscriptions have been initialized
|
|
||||||
let subscriptionsInitialized = false
|
|
||||||
|
|
||||||
// Track timeout IDs for cleanup
|
|
||||||
let workflowRefreshTimeoutId: NodeJS.Timeout | null = null
|
|
||||||
let subBlockRefreshTimeoutId: NodeJS.Timeout | null = null
|
|
||||||
|
|
||||||
// Initialize subscriptions lazily
|
|
||||||
function initializeSubscriptions() {
|
|
||||||
if (subscriptionsInitialized) return
|
|
||||||
subscriptionsInitialized = true
|
|
||||||
|
|
||||||
// Auto-refresh YAML when workflow state changes
|
|
||||||
let lastWorkflowState: { blockCount: number; edgeCount: number } | null = null
|
|
||||||
|
|
||||||
useWorkflowStore.subscribe((state) => {
|
|
||||||
const currentState = {
|
|
||||||
blockCount: Object.keys(state.blocks).length,
|
|
||||||
edgeCount: state.edges.length,
|
|
||||||
}
|
|
||||||
|
|
||||||
// Only refresh if the structure has changed
|
|
||||||
if (
|
|
||||||
!lastWorkflowState ||
|
|
||||||
lastWorkflowState.blockCount !== currentState.blockCount ||
|
|
||||||
lastWorkflowState.edgeCount !== currentState.edgeCount
|
|
||||||
) {
|
|
||||||
lastWorkflowState = currentState
|
|
||||||
|
|
||||||
// Clear existing timeout to properly debounce
|
|
||||||
if (workflowRefreshTimeoutId) {
|
|
||||||
clearTimeout(workflowRefreshTimeoutId)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Debounce the refresh to avoid excessive updates
|
|
||||||
const refreshYaml = useWorkflowYamlStore.getState().refreshYaml
|
|
||||||
workflowRefreshTimeoutId = setTimeout(() => {
|
|
||||||
refreshYaml()
|
|
||||||
workflowRefreshTimeoutId = null
|
|
||||||
}, 100)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
// Subscribe to subblock store changes
|
|
||||||
let lastSubBlockChangeTime = 0
|
|
||||||
|
|
||||||
useSubBlockStore.subscribe((state) => {
|
|
||||||
const currentTime = Date.now()
|
|
||||||
|
|
||||||
// Debounce rapid changes
|
|
||||||
if (currentTime - lastSubBlockChangeTime > 100) {
|
|
||||||
lastSubBlockChangeTime = currentTime
|
|
||||||
|
|
||||||
// Clear existing timeout to properly debounce
|
|
||||||
if (subBlockRefreshTimeoutId) {
|
|
||||||
clearTimeout(subBlockRefreshTimeoutId)
|
|
||||||
}
|
|
||||||
|
|
||||||
const refreshYaml = useWorkflowYamlStore.getState().refreshYaml
|
|
||||||
subBlockRefreshTimeoutId = setTimeout(() => {
|
|
||||||
refreshYaml()
|
|
||||||
subBlockRefreshTimeoutId = null
|
|
||||||
}, 100)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
export const useWorkflowYamlStore = create<WorkflowYamlStore>()(
|
|
||||||
devtools(
|
|
||||||
(set, get) => ({
|
|
||||||
yaml: '',
|
|
||||||
lastGenerated: undefined,
|
|
||||||
|
|
||||||
generateYaml: async () => {
|
|
||||||
// Initialize subscriptions on first use
|
|
||||||
initializeSubscriptions()
|
|
||||||
|
|
||||||
// Get the active workflow ID from registry
|
|
||||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
|
||||||
|
|
||||||
if (!activeWorkflowId) {
|
|
||||||
logger.warn('No active workflow to generate YAML for')
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Call the new database-based export endpoint
|
|
||||||
const response = await fetch(`/api/workflows/yaml/export?workflowId=${activeWorkflowId}`)
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
const errorData = await response.json().catch(() => null)
|
|
||||||
logger.error('Failed to generate YAML:', errorData?.error || response.statusText)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await response.json()
|
|
||||||
|
|
||||||
if (result.success && result.yaml) {
|
|
||||||
set({
|
|
||||||
yaml: result.yaml,
|
|
||||||
lastGenerated: Date.now(),
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
logger.error('Failed to generate YAML:', result.error)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
getYaml: async () => {
|
|
||||||
// Initialize subscriptions on first use
|
|
||||||
initializeSubscriptions()
|
|
||||||
|
|
||||||
const currentTime = Date.now()
|
|
||||||
const { yaml, lastGenerated } = get()
|
|
||||||
|
|
||||||
// Auto-refresh if data is stale (older than 1 second) or never generated
|
|
||||||
if (!lastGenerated || currentTime - lastGenerated > 1000) {
|
|
||||||
await get().generateYaml()
|
|
||||||
return get().yaml
|
|
||||||
}
|
|
||||||
|
|
||||||
return yaml
|
|
||||||
},
|
|
||||||
|
|
||||||
refreshYaml: () => {
|
|
||||||
get().generateYaml()
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
{
|
|
||||||
name: 'workflow-yaml-store',
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
Reference in New Issue
Block a user