Compare commits

..

20 Commits

Author SHA1 Message Date
Waleed Latif
d75cc1ed84 v0.3.30: duplication, control bar fixes 2025-08-18 08:57:26 -07:00
Waleed Latif
5a8a703ecb fix(duplicate): fixed detached state on duplication (#1011) 2025-08-18 08:51:18 -07:00
Waleed Latif
6f64188b8d fix(control-bar): fix icons styling in disabled state (#1010) 2025-08-18 08:22:06 -07:00
Vikhyath Mondreti
60a9a25553 Merge pull request #1009 from simstudioai/staging
update migration file for notekeeping purpose
2025-08-18 01:59:02 -07:00
Vikhyath Mondreti
52fa388f81 update migration file for notekeeping purpose 2025-08-18 01:56:34 -07:00
Vikhyath Mondreti
5c56cbd558 Merge pull request #1008 from simstudioai/staging
reduce batch size to prevent timeouts
2025-08-18 01:11:49 -07:00
Vikhyath Mondreti
dc19525a6f reduce batch size to prevent timeouts 2025-08-18 01:10:47 -07:00
Vikhyath Mondreti
3873f44875 Merge pull request #1007 from simstudioai/staging
syntax issue in migration
2025-08-18 00:59:53 -07:00
Vikhyath Mondreti
09b95f41ea syntax issue in migration 2025-08-18 00:58:09 -07:00
Vikhyath Mondreti
af60ccd188 fix: migration mem issues bypass
fix: migration mem issues bypass
2025-08-18 00:50:20 -07:00
Vikhyath Mondreti
eb75afd115 make logs migration batched to prevent mem issues (#1005) 2025-08-18 00:42:38 -07:00
Waleed Latif
fdb8256468 fix(subflow): remove all edges when removing a block from a subflow (#1003) 2025-08-18 00:21:26 -07:00
Vikhyath Mondreti
570c07bf2a Merge pull request #1004 from simstudioai/staging
v0.3.29: copilot fixes, remove block from subflow, code cleanups
2025-08-18 00:18:44 -07:00
Adam Gough
5c16e7d390 fix(subflow): add ability to remove block from subflow and refactor to consolidate subflow code (#983)
* added logic to remove blocks from subflows

* refactored logic into just subflow-node

* bun run lint

* added subflow test

* added a safety check for data.parentId

* added state update logic

* bun run lint

* removed old logic

* removed any

* added tests

* added type safety

* removed test script

* type safety

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
Co-authored-by: waleedlatif1 <walif6@gmail.com>
2025-08-17 22:25:31 -07:00
Waleed Latif
bd38062705 fix(workflow-error): allow users to delete workflows with invalid configs/state (#1000)
* fix(workflow-error): allow users to delete workflows with invalid configs/state

* cleanup
2025-08-17 22:23:41 -07:00
Siddharth Ganesan
d7fd4a9618 feat(copilot): diff improvements (#1002)
* Fix abort

* Cred updates

* Updates

* Fix sheet id showing up in diff view

* Update diff view

* Text overflow

* Optimistic accept

* Serialization catching

* Depth 0 fix

* Fix icons

* Updates

* Lint
2025-08-16 15:09:48 -07:00
Vikhyath Mondreti
d972bab206 fix(logs-sidebar): remove message and fix race condition for quickly switching b/w logs (#1001) 2025-08-16 15:05:39 -07:00
Vikhyath Mondreti
f254d70624 improvement(logs): cleanup code (#999) 2025-08-16 13:44:00 -07:00
Waleed Latif
8748e1d5f9 improvement(db): remove deprecated 'state' column from workflow table (#994)
* improvement(db): remove deprecated  column from workflow table

* removed extraneous logs

* update sockets envvar
2025-08-16 13:04:49 -07:00
Siddharth Ganesan
133a32e6d3 Fix abort (#998) 2025-08-16 11:10:09 -07:00
65 changed files with 14162 additions and 2694 deletions

View File

@@ -65,6 +65,7 @@ export async function POST(req: NextRequest) {
if (!Number.isNaN(limit) && limit > 0 && currentUsage >= limit) {
// Usage exceeded
logger.info('[API VALIDATION] Usage exceeded', { userId, currentUsage, limit })
return new NextResponse(null, { status: 402 })
}
}

View File

@@ -371,7 +371,42 @@ export async function POST(req: NextRequest) {
(currentChat?.conversationId as string | undefined) || conversationId
// If we have a conversationId, only send the most recent user message; else send full history
const messagesForAgent = effectiveConversationId ? [messages[messages.length - 1]] : messages
const latestUserMessage =
[...messages].reverse().find((m) => m?.role === 'user') || messages[messages.length - 1]
const messagesForAgent = effectiveConversationId ? [latestUserMessage] : messages
const requestPayload = {
messages: messagesForAgent,
workflowId,
userId: authenticatedUserId,
stream: stream,
streamToolCalls: true,
mode: mode,
provider: providerToUse,
...(effectiveConversationId ? { conversationId: effectiveConversationId } : {}),
...(typeof depth === 'number' ? { depth } : {}),
...(session?.user?.name && { userName: session.user.name }),
}
// Log the payload being sent to the streaming endpoint
try {
logger.info(`[${tracker.requestId}] Sending payload to sim agent streaming endpoint`, {
url: `${SIM_AGENT_API_URL}/api/chat-completion-streaming`,
provider: providerToUse,
mode,
stream,
workflowId,
hasConversationId: !!effectiveConversationId,
depth: typeof depth === 'number' ? depth : undefined,
messagesCount: requestPayload.messages.length,
})
// Full payload as JSON string
logger.info(
`[${tracker.requestId}] Full streaming payload: ${JSON.stringify(requestPayload)}`
)
} catch (e) {
logger.warn(`[${tracker.requestId}] Failed to log payload preview for streaming endpoint`, e)
}
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/chat-completion-streaming`, {
method: 'POST',
@@ -379,18 +414,7 @@ export async function POST(req: NextRequest) {
'Content-Type': 'application/json',
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
},
body: JSON.stringify({
messages: messagesForAgent,
workflowId,
userId: authenticatedUserId,
stream: stream,
streamToolCalls: true,
mode: mode,
provider: providerToUse,
...(effectiveConversationId ? { conversationId: effectiveConversationId } : {}),
...(typeof depth === 'number' ? { depth } : {}),
...(session?.user?.name && { userName: session.user.name }),
}),
body: JSON.stringify(requestPayload),
})
if (!simAgentResponse.ok) {
@@ -690,7 +714,7 @@ export async function POST(req: NextRequest) {
)
}
const responseId = responseIdFromDone || responseIdFromStart
const responseId = responseIdFromDone
// Update chat in database immediately (without title)
await db

View File

@@ -46,20 +46,7 @@ export async function GET(
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString(),
totalDurationMs: workflowLog.totalDurationMs,
blockStats: {
total: workflowLog.blockCount,
success: workflowLog.successCount,
error: workflowLog.errorCount,
skipped: workflowLog.skippedCount,
},
cost: {
total: workflowLog.totalCost ? Number.parseFloat(workflowLog.totalCost) : null,
input: workflowLog.totalInputCost ? Number.parseFloat(workflowLog.totalInputCost) : null,
output: workflowLog.totalOutputCost
? Number.parseFloat(workflowLog.totalOutputCost)
: null,
},
totalTokens: workflowLog.totalTokens,
cost: workflowLog.cost || null,
},
}

View File

@@ -0,0 +1,102 @@
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { permissions, workflow, workflowExecutionLogs } from '@/db/schema'
const logger = createLogger('LogDetailsByIdAPI')
export const revalidate = 0
export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized log details access attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const { id } = await params
const rows = await db
.select({
id: workflowExecutionLogs.id,
workflowId: workflowExecutionLogs.workflowId,
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
createdAt: workflowExecutionLogs.createdAt,
workflowName: workflow.name,
workflowDescription: workflow.description,
workflowColor: workflow.color,
workflowFolderId: workflow.folderId,
workflowUserId: workflow.userId,
workflowWorkspaceId: workflow.workspaceId,
workflowCreatedAt: workflow.createdAt,
workflowUpdatedAt: workflow.updatedAt,
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, userId)
)
)
.where(eq(workflowExecutionLogs.id, id))
.limit(1)
const log = rows[0]
if (!log) {
return NextResponse.json({ error: 'Not found' }, { status: 404 })
}
const workflowSummary = {
id: log.workflowId,
name: log.workflowName,
description: log.workflowDescription,
color: log.workflowColor,
folderId: log.workflowFolderId,
userId: log.workflowUserId,
workspaceId: log.workflowWorkspaceId,
createdAt: log.workflowCreatedAt,
updatedAt: log.workflowUpdatedAt,
}
const response = {
id: log.id,
workflowId: log.workflowId,
executionId: log.executionId,
level: log.level,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger,
createdAt: log.startedAt.toISOString(),
files: log.files || undefined,
workflow: workflowSummary,
executionData: {
totalDuration: log.totalDurationMs,
...(log.executionData as any),
enhanced: true,
},
cost: log.cost as any,
}
return NextResponse.json({ data: response })
} catch (error: any) {
logger.error(`[${requestId}] log details fetch error`, error)
return NextResponse.json({ error: error.message }, { status: 500 })
}
}

View File

@@ -99,21 +99,13 @@ export async function GET(request: NextRequest) {
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalInputCost: workflowExecutionLogs.totalInputCost,
totalOutputCost: workflowExecutionLogs.totalOutputCost,
totalTokens: workflowExecutionLogs.totalTokens,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
metadata: workflowExecutionLogs.metadata,
createdAt: workflowExecutionLogs.createdAt,
})
.from(workflowExecutionLogs)

View File

@@ -1,4 +1,4 @@
import { and, desc, eq, gte, inArray, lte, or, type SQL, sql } from 'drizzle-orm'
import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
@@ -44,8 +44,7 @@ function extractBlockExecutionsFromTraceSpans(traceSpans: any[]): any[] {
export const revalidate = 0
const QueryParamsSchema = z.object({
includeWorkflow: z.coerce.boolean().optional().default(false),
includeBlocks: z.coerce.boolean().optional().default(false),
details: z.enum(['basic', 'full']).optional().default('basic'),
limit: z.coerce.number().optional().default(100),
offset: z.coerce.number().optional().default(0),
level: z.string().optional(),
@@ -81,20 +80,12 @@ export async function GET(request: NextRequest) {
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalInputCost: workflowExecutionLogs.totalInputCost,
totalOutputCost: workflowExecutionLogs.totalOutputCost,
totalTokens: workflowExecutionLogs.totalTokens,
metadata: workflowExecutionLogs.metadata,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
createdAt: workflowExecutionLogs.createdAt,
workflowName: workflow.name,
@@ -163,13 +154,8 @@ export async function GET(request: NextRequest) {
// Filter by search query
if (params.search) {
const searchTerm = `%${params.search}%`
conditions = and(
conditions,
or(
sql`${workflowExecutionLogs.message} ILIKE ${searchTerm}`,
sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`
)
)
// With message removed, restrict search to executionId only
conditions = and(conditions, sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`)
}
// Execute the query using the optimized join
@@ -290,31 +276,20 @@ export async function GET(request: NextRequest) {
const enhancedLogs = logs.map((log) => {
const blockExecutions = blockExecutionsByExecution[log.executionId] || []
// Use stored trace spans from metadata if available, otherwise create from block executions
const storedTraceSpans = (log.metadata as any)?.traceSpans
// Use stored trace spans if available, otherwise create from block executions
const storedTraceSpans = (log.executionData as any)?.traceSpans
const traceSpans =
storedTraceSpans && Array.isArray(storedTraceSpans) && storedTraceSpans.length > 0
? storedTraceSpans
: createTraceSpans(blockExecutions)
// Use extracted cost summary if available, otherwise use stored values
// Prefer stored cost JSON; otherwise synthesize from blocks
const costSummary =
blockExecutions.length > 0
? extractCostSummary(blockExecutions)
: {
input: Number(log.totalInputCost) || 0,
output: Number(log.totalOutputCost) || 0,
total: Number(log.totalCost) || 0,
tokens: {
total: log.totalTokens || 0,
prompt: (log.metadata as any)?.tokenBreakdown?.prompt || 0,
completion: (log.metadata as any)?.tokenBreakdown?.completion || 0,
},
models: (log.metadata as any)?.models || {},
}
log.cost && Object.keys(log.cost as any).length > 0
? (log.cost as any)
: extractCostSummary(blockExecutions)
// Build workflow object from joined data
const workflow = {
const workflowSummary = {
id: log.workflowId,
name: log.workflowName,
description: log.workflowDescription,
@@ -329,67 +304,28 @@ export async function GET(request: NextRequest) {
return {
id: log.id,
workflowId: log.workflowId,
executionId: log.executionId,
executionId: params.details === 'full' ? log.executionId : undefined,
level: log.level,
message: log.message,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger,
createdAt: log.startedAt.toISOString(),
files: log.files || undefined,
workflow: params.includeWorkflow ? workflow : undefined,
metadata: {
totalDuration: log.totalDurationMs,
cost: costSummary,
blockStats: {
total: log.blockCount,
success: log.successCount,
error: log.errorCount,
skipped: log.skippedCount,
},
traceSpans,
blockExecutions,
enhanced: true,
},
files: params.details === 'full' ? log.files || undefined : undefined,
workflow: workflowSummary,
executionData:
params.details === 'full'
? {
totalDuration: log.totalDurationMs,
traceSpans,
blockExecutions,
enhanced: true,
}
: undefined,
cost:
params.details === 'full'
? (costSummary as any)
: { total: (costSummary as any)?.total || 0 },
}
})
// Include block execution data if requested
if (params.includeBlocks) {
// Block executions are now extracted from stored trace spans in metadata
const blockLogsByExecution: Record<string, any[]> = {}
logs.forEach((log) => {
const storedTraceSpans = (log.metadata as any)?.traceSpans
if (storedTraceSpans && Array.isArray(storedTraceSpans)) {
blockLogsByExecution[log.executionId] =
extractBlockExecutionsFromTraceSpans(storedTraceSpans)
} else {
blockLogsByExecution[log.executionId] = []
}
})
// Add block logs to metadata
const logsWithBlocks = enhancedLogs.map((log) => ({
...log,
metadata: {
...log.metadata,
blockExecutions: blockLogsByExecution[log.executionId] || [],
},
}))
return NextResponse.json(
{
data: logsWithBlocks,
total: Number(count),
page: Math.floor(params.offset / params.limit) + 1,
pageSize: params.limit,
totalPages: Math.ceil(Number(count) / params.limit),
},
{ status: 200 }
)
}
// Return basic logs
return NextResponse.json(
{
data: enhancedLogs,

View File

@@ -80,7 +80,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
workspaceId: workspaceId,
name: `${templateData.name} (copy)`,
description: templateData.description,
state: templateData.state,
color: templateData.color,
userId: session.user.id,
createdAt: now,
@@ -158,9 +157,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}))
}
// Update the workflow with the corrected state
await tx.update(workflow).set({ state: updatedState }).where(eq(workflow.id, newWorkflowId))
// Insert blocks and edges
if (blockEntries.length > 0) {
await tx.insert(workflowBlocks).values(blockEntries)

View File

@@ -7,7 +7,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { db } from '@/db'
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@/db/schema'
import type { LoopConfig, ParallelConfig, WorkflowState } from '@/stores/workflows/workflow/types'
import type { LoopConfig, ParallelConfig } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowDuplicateAPI')
@@ -90,7 +90,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
folderId: folderId || source.folderId,
name,
description: description || source.description,
state: source.state, // We'll update this later with new block IDs
color: color || source.color,
lastSynced: now,
createdAt: now,
@@ -112,9 +111,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
// Create a mapping from old block IDs to new block IDs
const blockIdMapping = new Map<string, string>()
// Initialize state for updating with new block IDs
let updatedState: WorkflowState = source.state as WorkflowState
if (sourceBlocks.length > 0) {
// First pass: Create all block ID mappings
sourceBlocks.forEach((block) => {
@@ -265,86 +261,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
)
}
// Update the JSON state to use new block IDs
if (updatedState && typeof updatedState === 'object') {
updatedState = JSON.parse(JSON.stringify(updatedState)) as WorkflowState
// Update blocks object keys
if (updatedState.blocks && typeof updatedState.blocks === 'object') {
const newBlocks = {} as Record<string, (typeof updatedState.blocks)[string]>
for (const [oldId, blockData] of Object.entries(updatedState.blocks)) {
const newId = blockIdMapping.get(oldId) || oldId
newBlocks[newId] = {
...blockData,
id: newId,
// Update data.parentId and extent in the JSON state as well
data: (() => {
const block = blockData as any
if (block.data && typeof block.data === 'object' && block.data.parentId) {
return {
...block.data,
parentId: blockIdMapping.get(block.data.parentId) || block.data.parentId,
extent: 'parent', // Ensure extent is set for child blocks
}
}
return block.data
})(),
}
}
updatedState.blocks = newBlocks
}
// Update edges array
if (updatedState.edges && Array.isArray(updatedState.edges)) {
updatedState.edges = updatedState.edges.map((edge) => ({
...edge,
id: crypto.randomUUID(),
source: blockIdMapping.get(edge.source) || edge.source,
target: blockIdMapping.get(edge.target) || edge.target,
}))
}
// Update loops and parallels if they exist
if (updatedState.loops && typeof updatedState.loops === 'object') {
const newLoops = {} as Record<string, (typeof updatedState.loops)[string]>
for (const [oldId, loopData] of Object.entries(updatedState.loops)) {
const newId = blockIdMapping.get(oldId) || oldId
const loopConfig = loopData as any
newLoops[newId] = {
...loopConfig,
id: newId,
// Update node references in loop config
nodes: loopConfig.nodes
? loopConfig.nodes.map((nodeId: string) => blockIdMapping.get(nodeId) || nodeId)
: [],
}
}
updatedState.loops = newLoops
}
if (updatedState.parallels && typeof updatedState.parallels === 'object') {
const newParallels = {} as Record<string, (typeof updatedState.parallels)[string]>
for (const [oldId, parallelData] of Object.entries(updatedState.parallels)) {
const newId = blockIdMapping.get(oldId) || oldId
const parallelConfig = parallelData as any
newParallels[newId] = {
...parallelConfig,
id: newId,
// Update node references in parallel config
nodes: parallelConfig.nodes
? parallelConfig.nodes.map((nodeId: string) => blockIdMapping.get(nodeId) || nodeId)
: [],
}
}
updatedState.parallels = newParallels
}
}
// Update the workflow state with the new block IDs
// Update the workflow timestamp
await tx
.update(workflow)
.set({
state: updatedState,
updatedAt: now,
})
.where(eq(workflow.id, newWorkflowId))

View File

@@ -89,7 +89,14 @@ describe('Workflow By ID API Route', () => {
userId: 'user-123',
name: 'Test Workflow',
workspaceId: null,
state: { blocks: {}, edges: [] },
}
const mockNormalizedData = {
blocks: {},
edges: [],
loops: {},
parallels: {},
isFromNormalizedTables: true,
}
vi.doMock('@/lib/auth', () => ({
@@ -110,6 +117,10 @@ describe('Workflow By ID API Route', () => {
},
}))
vi.doMock('@/lib/workflows/db-helpers', () => ({
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
}))
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123')
const params = Promise.resolve({ id: 'workflow-123' })
@@ -127,7 +138,14 @@ describe('Workflow By ID API Route', () => {
userId: 'other-user',
name: 'Test Workflow',
workspaceId: 'workspace-456',
state: { blocks: {}, edges: [] },
}
const mockNormalizedData = {
blocks: {},
edges: [],
loops: {},
parallels: {},
isFromNormalizedTables: true,
}
vi.doMock('@/lib/auth', () => ({
@@ -148,6 +166,10 @@ describe('Workflow By ID API Route', () => {
},
}))
vi.doMock('@/lib/workflows/db-helpers', () => ({
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
}))
vi.doMock('@/lib/permissions/utils', () => ({
getUserEntityPermissions: vi.fn().mockResolvedValue('read'),
hasAdminPermission: vi.fn().mockResolvedValue(false),
@@ -170,7 +192,6 @@ describe('Workflow By ID API Route', () => {
userId: 'other-user',
name: 'Test Workflow',
workspaceId: 'workspace-456',
state: { blocks: {}, edges: [] },
}
vi.doMock('@/lib/auth', () => ({
@@ -213,7 +234,6 @@ describe('Workflow By ID API Route', () => {
userId: 'user-123',
name: 'Test Workflow',
workspaceId: null,
state: { blocks: {}, edges: [] },
}
const mockNormalizedData = {

View File

@@ -120,8 +120,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
logger.debug(`[${requestId}] Attempting to load workflow ${workflowId} from normalized tables`)
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
const finalWorkflowData = { ...workflowData }
if (normalizedData) {
logger.debug(`[${requestId}] Found normalized data for workflow ${workflowId}:`, {
blocksCount: Object.keys(normalizedData.blocks).length,
@@ -131,38 +129,31 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
loops: normalizedData.loops,
})
// Use normalized table data - reconstruct complete state object
// First get any existing state properties, then override with normalized data
const existingState =
workflowData.state && typeof workflowData.state === 'object' ? workflowData.state : {}
finalWorkflowData.state = {
// Default values for expected properties
deploymentStatuses: {},
hasActiveWebhook: false,
// Preserve any existing state properties
...existingState,
// Override with normalized data (this takes precedence)
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
lastSaved: Date.now(),
isDeployed: workflowData.isDeployed || false,
deployedAt: workflowData.deployedAt,
// Construct response object with workflow data and state from normalized tables
const finalWorkflowData = {
...workflowData,
state: {
// Default values for expected properties
deploymentStatuses: {},
hasActiveWebhook: false,
// Data from normalized tables
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
lastSaved: Date.now(),
isDeployed: workflowData.isDeployed || false,
deployedAt: workflowData.deployedAt,
},
}
logger.info(`[${requestId}] Loaded workflow ${workflowId} from normalized tables`)
} else {
// Fallback to JSON blob
logger.info(
`[${requestId}] Using JSON blob for workflow ${workflowId} - no normalized data found`
)
const elapsed = Date.now() - startTime
logger.info(`[${requestId}] Successfully fetched workflow ${workflowId} in ${elapsed}ms`)
return NextResponse.json({ data: finalWorkflowData }, { status: 200 })
}
const elapsed = Date.now() - startTime
logger.info(`[${requestId}] Successfully fetched workflow ${workflowId} in ${elapsed}ms`)
return NextResponse.json({ data: finalWorkflowData }, { status: 200 })
return NextResponse.json({ error: 'Workflow has no normalized data' }, { status: 400 })
} catch (error: any) {
const elapsed = Date.now() - startTime
logger.error(`[${requestId}] Error fetching workflow ${workflowId} after ${elapsed}ms`, error)

View File

@@ -220,7 +220,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
.set({
lastSynced: new Date(),
updatedAt: new Date(),
state: saveResult.jsonBlob, // Also update JSON blob for backward compatibility
})
.where(eq(workflow.id, workflowId))

View File

@@ -18,14 +18,12 @@ import { db } from '@/db'
import { workflowCheckpoints, workflow as workflowTable } from '@/db/schema'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
// Sim Agent API configuration
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
export const dynamic = 'force-dynamic'
const logger = createLogger('WorkflowYamlAPI')
// Request schema for YAML workflow operations
const YamlWorkflowRequestSchema = z.object({
yamlContent: z.string().min(1, 'YAML content is required'),
description: z.string().optional(),
@@ -647,14 +645,13 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
.set({
lastSynced: new Date(),
updatedAt: new Date(),
state: saveResult.jsonBlob,
})
.where(eq(workflowTable.id, workflowId))
// Notify socket server for real-time collaboration (for copilot and editor)
if (source === 'copilot' || source === 'editor') {
try {
const socketUrl = process.env.SOCKET_URL || 'http://localhost:3002'
const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002'
await fetch(`${socketUrl}/api/copilot-workflow-edit`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },

View File

@@ -151,7 +151,6 @@ export async function POST(req: NextRequest) {
folderId: folderId || null,
name,
description,
state: initialState,
color,
lastSynced: now,
createdAt: now,

View File

@@ -85,14 +85,10 @@ export async function GET(request: NextRequest) {
edgesCount: normalizedData.edges.length,
})
// Use normalized table data - reconstruct complete state object
const existingState =
workflowData.state && typeof workflowData.state === 'object' ? workflowData.state : {}
// Use normalized table data - construct state from normalized tables
workflowState = {
deploymentStatuses: {},
hasActiveWebhook: false,
...existingState,
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
@@ -116,33 +112,10 @@ export async function GET(request: NextRequest) {
logger.info(`[${requestId}] Loaded workflow ${workflowId} from normalized tables`)
} else {
// Fallback to JSON blob
logger.info(
`[${requestId}] Using JSON blob for workflow ${workflowId} - no normalized data found`
return NextResponse.json(
{ success: false, error: 'Workflow has no normalized data' },
{ status: 400 }
)
if (!workflowData.state || typeof workflowData.state !== 'object') {
return NextResponse.json(
{ success: false, error: 'Workflow has no valid state data' },
{ status: 400 }
)
}
workflowState = workflowData.state as any
// Extract subblock values from JSON blob state
if (workflowState.blocks) {
Object.entries(workflowState.blocks).forEach(([blockId, block]: [string, any]) => {
subBlockValues[blockId] = {}
if (block.subBlocks) {
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]: [string, any]) => {
if (subBlock && typeof subBlock === 'object' && 'value' in subBlock) {
subBlockValues[blockId][subBlockId] = subBlock.value
}
})
}
})
}
}
// Gather block registry and utilities for sim-agent

View File

@@ -113,64 +113,6 @@ async function createWorkspace(userId: string, name: string) {
// Create initial workflow for the workspace with start block
const starterId = crypto.randomUUID()
const initialState = {
blocks: {
[starterId]: {
id: starterId,
type: 'starter',
name: 'Start',
position: { x: 100, y: 100 },
subBlocks: {
startWorkflow: {
id: 'startWorkflow',
type: 'dropdown',
value: 'manual',
},
webhookPath: {
id: 'webhookPath',
type: 'short-input',
value: '',
},
webhookSecret: {
id: 'webhookSecret',
type: 'short-input',
value: '',
},
scheduleType: {
id: 'scheduleType',
type: 'dropdown',
value: 'daily',
},
minutesInterval: {
id: 'minutesInterval',
type: 'short-input',
value: '',
},
minutesStartingAt: {
id: 'minutesStartingAt',
type: 'short-input',
value: '',
},
},
outputs: {
response: { type: { input: 'any' } },
},
enabled: true,
horizontalHandles: true,
isWide: false,
advancedMode: false,
height: 95,
},
},
edges: [],
subflows: {},
variables: {},
metadata: {
version: '1.0.0',
createdAt: now.toISOString(),
updatedAt: now.toISOString(),
},
}
// Create the workflow
await tx.insert(workflow).values({
@@ -180,7 +122,6 @@ async function createWorkspace(userId: string, name: string) {
folderId: null,
name: 'default-agent',
description: 'Your first workflow - start building here!',
state: initialState,
color: '#3972F6',
lastSynced: now,
createdAt: now,

View File

@@ -14,7 +14,8 @@
}
.workflow-container .react-flow__node-loopNode,
.workflow-container .react-flow__node-parallelNode {
.workflow-container .react-flow__node-parallelNode,
.workflow-container .react-flow__node-subflowNode {
z-index: -1 !important;
}

View File

@@ -1,7 +1,7 @@
'use client'
import { useEffect, useMemo, useRef, useState } from 'react'
import { ChevronDown, ChevronUp, Eye, X } from 'lucide-react'
import { ChevronDown, ChevronUp, Eye, Loader2, X } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { CopyButton } from '@/components/ui/copy-button'
import { ScrollArea } from '@/components/ui/scroll-area'
@@ -209,29 +209,30 @@ export function Sidebar({
}
}, [log?.id])
const isLoadingDetails = useMemo(() => {
if (!log) return false
// Only show while we expect details to arrive (has executionId)
if (!log.executionId) return false
const hasEnhanced = !!log.executionData?.enhanced
const hasAnyDetails = hasEnhanced || !!log.cost || Array.isArray(log.executionData?.traceSpans)
return !hasAnyDetails
}, [log])
const formattedContent = useMemo(() => {
if (!log) return null
let blockInput: Record<string, any> | undefined
if (log.metadata?.blockInput) {
blockInput = log.metadata.blockInput
} else if (log.metadata?.traceSpans) {
const blockIdMatch = log.message.match(/Block .+?(\d+)/i)
const blockId = blockIdMatch ? blockIdMatch[1] : null
if (blockId) {
const matchingSpan = log.metadata.traceSpans.find(
(span) => span.blockId === blockId || span.name.includes(`Block ${blockId}`)
)
if (matchingSpan?.input) {
blockInput = matchingSpan.input
}
if (log.executionData?.blockInput) {
blockInput = log.executionData.blockInput
} else if (log.executionData?.traceSpans) {
const firstSpanWithInput = log.executionData.traceSpans.find((s) => s.input)
if (firstSpanWithInput?.input) {
blockInput = firstSpanWithInput.input as any
}
}
return formatJsonContent(log.message, blockInput)
return null
}, [log])
useEffect(() => {
@@ -243,22 +244,16 @@ export function Sidebar({
// Determine if this is a workflow execution log
const isWorkflowExecutionLog = useMemo(() => {
if (!log) return false
// Check if message contains workflow execution phrases (success or failure)
return (
log.message.toLowerCase().includes('workflow executed') ||
log.message.toLowerCase().includes('execution completed') ||
log.message.toLowerCase().includes('workflow execution failed') ||
log.message.toLowerCase().includes('execution failed') ||
(log.trigger === 'manual' && log.duration) ||
// Also check if we have enhanced logging metadata with trace spans
(log.metadata?.enhanced && log.metadata?.traceSpans)
(log.trigger === 'manual' && !!log.duration) ||
(log.executionData?.enhanced && log.executionData?.traceSpans)
)
}, [log])
// Helper to determine if we have cost information to display
// All workflow executions now have cost info (base charge + any model costs)
const hasCostInfo = useMemo(() => {
return isWorkflowExecutionLog && log?.metadata?.cost
return isWorkflowExecutionLog && log?.cost
}, [log, isWorkflowExecutionLog])
const isWorkflowWithCost = useMemo(() => {
@@ -490,6 +485,14 @@ export function Sidebar({
</div>
)}
{/* Suspense while details load (positioned after summary fields) */}
{isLoadingDetails && (
<div className='flex w-full items-center justify-start gap-2 py-2 text-muted-foreground'>
<Loader2 className='h-4 w-4 animate-spin' />
<span className='text-sm'>Loading details</span>
</div>
)}
{/* Files */}
{log.files && log.files.length > 0 && (
<div>
@@ -541,19 +544,15 @@ export function Sidebar({
</div>
)}
{/* Message Content */}
<div className='w-full pb-2'>
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Message</h3>
<div className='w-full'>{formattedContent}</div>
</div>
{/* end suspense */}
{/* Trace Spans (if available and this is a workflow execution log) */}
{isWorkflowExecutionLog && log.metadata?.traceSpans && (
{isWorkflowExecutionLog && log.executionData?.traceSpans && (
<div className='w-full'>
<div className='w-full overflow-x-hidden'>
<TraceSpansDisplay
traceSpans={log.metadata.traceSpans}
totalDuration={log.metadata.totalDuration}
traceSpans={log.executionData.traceSpans}
totalDuration={log.executionData.totalDuration}
onExpansionChange={handleTraceSpanToggle}
/>
</div>
@@ -561,11 +560,11 @@ export function Sidebar({
)}
{/* Tool Calls (if available) */}
{log.metadata?.toolCalls && log.metadata.toolCalls.length > 0 && (
{log.executionData?.toolCalls && log.executionData.toolCalls.length > 0 && (
<div className='w-full'>
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Tool Calls</h3>
<div className='w-full overflow-x-hidden rounded-md bg-secondary/30 p-3'>
<ToolCallsDisplay metadata={log.metadata} />
<ToolCallsDisplay metadata={log.executionData} />
</div>
</div>
)}
@@ -584,86 +583,80 @@ export function Sidebar({
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-sm'>Model Input:</span>
<span className='text-sm'>
{formatCost(log.metadata?.cost?.input || 0)}
</span>
<span className='text-sm'>{formatCost(log.cost?.input || 0)}</span>
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-sm'>Model Output:</span>
<span className='text-sm'>
{formatCost(log.metadata?.cost?.output || 0)}
</span>
<span className='text-sm'>{formatCost(log.cost?.output || 0)}</span>
</div>
<div className='mt-1 flex items-center justify-between border-t pt-2'>
<span className='text-muted-foreground text-sm'>Total:</span>
<span className='text-foreground text-sm'>
{formatCost(log.metadata?.cost?.total || 0)}
{formatCost(log.cost?.total || 0)}
</span>
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-xs'>Tokens:</span>
<span className='text-muted-foreground text-xs'>
{log.metadata?.cost?.tokens?.prompt || 0} in /{' '}
{log.metadata?.cost?.tokens?.completion || 0} out
{log.cost?.tokens?.prompt || 0} in / {log.cost?.tokens?.completion || 0}{' '}
out
</span>
</div>
</div>
{/* Models Breakdown */}
{log.metadata?.cost?.models &&
Object.keys(log.metadata?.cost?.models).length > 0 && (
<div className='border-t'>
<button
onClick={() => setIsModelsExpanded(!isModelsExpanded)}
className='flex w-full items-center justify-between p-3 text-left transition-colors hover:bg-muted/50'
>
<span className='font-medium text-muted-foreground text-xs'>
Model Breakdown (
{Object.keys(log.metadata?.cost?.models || {}).length})
</span>
{isModelsExpanded ? (
<ChevronUp className='h-3 w-3 text-muted-foreground' />
) : (
<ChevronDown className='h-3 w-3 text-muted-foreground' />
)}
</button>
{log.cost?.models && Object.keys(log.cost?.models).length > 0 && (
<div className='border-t'>
<button
onClick={() => setIsModelsExpanded(!isModelsExpanded)}
className='flex w-full items-center justify-between p-3 text-left transition-colors hover:bg-muted/50'
>
<span className='font-medium text-muted-foreground text-xs'>
Model Breakdown ({Object.keys(log.cost?.models || {}).length})
</span>
{isModelsExpanded ? (
<ChevronUp className='h-3 w-3 text-muted-foreground' />
) : (
<ChevronDown className='h-3 w-3 text-muted-foreground' />
)}
</button>
{isModelsExpanded && (
<div className='space-y-3 border-t bg-muted/30 p-3'>
{Object.entries(log.metadata?.cost?.models || {}).map(
([model, cost]: [string, any]) => (
<div key={model} className='space-y-1'>
<div className='font-medium font-mono text-xs'>{model}</div>
<div className='space-y-1 text-xs'>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Input:</span>
<span>{formatCost(cost.input || 0)}</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Output:</span>
<span>{formatCost(cost.output || 0)}</span>
</div>
<div className='flex justify-between border-t pt-1'>
<span className='text-muted-foreground'>Total:</span>
<span className='font-medium'>
{formatCost(cost.total || 0)}
</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Tokens:</span>
<span>
{cost.tokens?.prompt || 0} in /{' '}
{cost.tokens?.completion || 0} out
</span>
</div>
{isModelsExpanded && (
<div className='space-y-3 border-t bg-muted/30 p-3'>
{Object.entries(log.cost?.models || {}).map(
([model, cost]: [string, any]) => (
<div key={model} className='space-y-1'>
<div className='font-medium font-mono text-xs'>{model}</div>
<div className='space-y-1 text-xs'>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Input:</span>
<span>{formatCost(cost.input || 0)}</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Output:</span>
<span>{formatCost(cost.output || 0)}</span>
</div>
<div className='flex justify-between border-t pt-1'>
<span className='text-muted-foreground'>Total:</span>
<span className='font-medium'>
{formatCost(cost.total || 0)}
</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Tokens:</span>
<span>
{cost.tokens?.prompt || 0} in /{' '}
{cost.tokens?.completion || 0} out
</span>
</div>
</div>
)
)}
</div>
)}
</div>
)}
</div>
)
)}
</div>
)}
</div>
)}
{isWorkflowWithCost && (
<div className='border-t bg-muted p-3 text-muted-foreground text-xs'>
@@ -688,7 +681,7 @@ export function Sidebar({
executionId={log.executionId}
workflowName={log.workflow?.name}
trigger={log.trigger || undefined}
traceSpans={log.metadata?.traceSpans}
traceSpans={log.executionData?.traceSpans}
isOpen={isFrozenCanvasOpen}
onClose={() => setIsFrozenCanvasOpen(false)}
/>

View File

@@ -85,6 +85,10 @@ export default function Logs() {
const [selectedLog, setSelectedLog] = useState<WorkflowLog | null>(null)
const [selectedLogIndex, setSelectedLogIndex] = useState<number>(-1)
const [isSidebarOpen, setIsSidebarOpen] = useState(false)
const [isDetailsLoading, setIsDetailsLoading] = useState(false)
const detailsCacheRef = useRef<Map<string, any>>(new Map())
const detailsAbortRef = useRef<AbortController | null>(null)
const currentDetailsIdRef = useRef<string | null>(null)
const selectedRowRef = useRef<HTMLTableRowElement | null>(null)
const loaderRef = useRef<HTMLDivElement>(null)
const scrollContainerRef = useRef<HTMLDivElement>(null)
@@ -116,13 +120,122 @@ export default function Logs() {
const index = logs.findIndex((l) => l.id === log.id)
setSelectedLogIndex(index)
setIsSidebarOpen(true)
setIsDetailsLoading(true)
// Fetch details for current, previous, and next concurrently with cache
const currentId = log.id
const prevId = index > 0 ? logs[index - 1]?.id : undefined
const nextId = index < logs.length - 1 ? logs[index + 1]?.id : undefined
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
currentDetailsIdRef.current = currentId
const idsToFetch: Array<{ id: string; merge: boolean }> = []
const cachedCurrent = currentId ? detailsCacheRef.current.get(currentId) : undefined
if (currentId && !cachedCurrent) idsToFetch.push({ id: currentId, merge: true })
if (prevId && !detailsCacheRef.current.has(prevId))
idsToFetch.push({ id: prevId, merge: false })
if (nextId && !detailsCacheRef.current.has(nextId))
idsToFetch.push({ id: nextId, merge: false })
// Merge cached current immediately
if (cachedCurrent) {
setSelectedLog((prev) =>
prev && prev.id === currentId
? ({ ...(prev as any), ...(cachedCurrent as any) } as any)
: prev
)
setIsDetailsLoading(false)
}
if (idsToFetch.length === 0) return
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === currentId) {
setSelectedLog((prev) =>
prev && prev.id === id ? ({ ...(prev as any), ...(detailed as any) } as any) : prev
)
if (currentDetailsIdRef.current === id) setIsDetailsLoading(false)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
const handleNavigateNext = useCallback(() => {
if (selectedLogIndex < logs.length - 1) {
const nextIndex = selectedLogIndex + 1
setSelectedLogIndex(nextIndex)
setSelectedLog(logs[nextIndex])
const nextLog = logs[nextIndex]
setSelectedLog(nextLog)
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
const cached = detailsCacheRef.current.get(nextLog.id)
if (cached) {
setSelectedLog((prev) =>
prev && prev.id === nextLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev
)
} else {
const prevId = nextIndex > 0 ? logs[nextIndex - 1]?.id : undefined
const afterId = nextIndex < logs.length - 1 ? logs[nextIndex + 1]?.id : undefined
const idsToFetch: Array<{ id: string; merge: boolean }> = []
if (nextLog.id && !detailsCacheRef.current.has(nextLog.id))
idsToFetch.push({ id: nextLog.id, merge: true })
if (prevId && !detailsCacheRef.current.has(prevId))
idsToFetch.push({ id: prevId, merge: false })
if (afterId && !detailsCacheRef.current.has(afterId))
idsToFetch.push({ id: afterId, merge: false })
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === nextLog.id) {
setSelectedLog((prev) =>
prev && prev.id === id
? ({ ...(prev as any), ...(detailed as any) } as any)
: prev
)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
}
}, [selectedLogIndex, logs])
@@ -130,7 +243,57 @@ export default function Logs() {
if (selectedLogIndex > 0) {
const prevIndex = selectedLogIndex - 1
setSelectedLogIndex(prevIndex)
setSelectedLog(logs[prevIndex])
const prevLog = logs[prevIndex]
setSelectedLog(prevLog)
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
const cached = detailsCacheRef.current.get(prevLog.id)
if (cached) {
setSelectedLog((prev) =>
prev && prev.id === prevLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev
)
} else {
const beforeId = prevIndex > 0 ? logs[prevIndex - 1]?.id : undefined
const afterId = prevIndex < logs.length - 1 ? logs[prevIndex + 1]?.id : undefined
const idsToFetch: Array<{ id: string; merge: boolean }> = []
if (prevLog.id && !detailsCacheRef.current.has(prevLog.id))
idsToFetch.push({ id: prevLog.id, merge: true })
if (beforeId && !detailsCacheRef.current.has(beforeId))
idsToFetch.push({ id: beforeId, merge: false })
if (afterId && !detailsCacheRef.current.has(afterId))
idsToFetch.push({ id: afterId, merge: false })
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === prevLog.id) {
setSelectedLog((prev) =>
prev && prev.id === id
? ({ ...(prev as any), ...(detailed as any) } as any)
: prev
)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
}
}, [selectedLogIndex, logs])
@@ -160,7 +323,7 @@ export default function Logs() {
// Get fresh query params by calling buildQueryParams from store
const { buildQueryParams: getCurrentQueryParams } = useFilterStore.getState()
const queryParams = getCurrentQueryParams(pageNum, LOGS_PER_PAGE)
const response = await fetch(`/api/logs?${queryParams}`)
const response = await fetch(`/api/logs?${queryParams}&details=basic`)
if (!response.ok) {
throw new Error(`Error fetching logs: ${response.statusText}`)
@@ -262,7 +425,7 @@ export default function Logs() {
// Build query params inline to avoid dependency issues
const params = new URLSearchParams()
params.set('includeWorkflow', 'true')
params.set('details', 'basic')
params.set('limit', LOGS_PER_PAGE.toString())
params.set('offset', '0') // Always start from page 1
params.set('workspaceId', workspaceId)
@@ -482,7 +645,7 @@ export default function Logs() {
{/* Header */}
<div>
<div className='border-border border-b'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_80px_1fr] gap-2 px-2 pb-3 md:grid-cols-[140px_90px_140px_90px_1fr] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_100px_1fr] lg:gap-4 xl:grid-cols-[160px_100px_160px_100px_100px_1fr_100px]'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px] gap-2 px-2 pb-3 md:grid-cols-[140px_90px_140px_120px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px]'>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Time
</div>
@@ -493,14 +656,12 @@ export default function Logs() {
Workflow
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
ID
Cost
</div>
<div className='hidden font-[480] font-sans text-[13px] text-muted-foreground leading-normal xl:block'>
Trigger
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Message
</div>
<div className='hidden font-[480] font-sans text-[13px] text-muted-foreground leading-normal xl:block'>
Duration
</div>
@@ -547,7 +708,7 @@ export default function Logs() {
}`}
onClick={() => handleLogClick(log)}
>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_80px_1fr] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_90px_1fr] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_100px_1fr] lg:gap-4 xl:grid-cols-[160px_100px_160px_100px_100px_1fr_100px]'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_120px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px]'>
{/* Time */}
<div>
<div className='text-[13px]'>
@@ -584,10 +745,12 @@ export default function Logs() {
</div>
</div>
{/* ID */}
{/* Cost */}
<div>
<div className='font-medium text-muted-foreground text-xs'>
#{log.id.slice(-4)}
{typeof (log as any)?.cost?.total === 'number'
? `$${((log as any).cost.total as number).toFixed(4)}`
: '—'}
</div>
</div>
@@ -614,11 +777,6 @@ export default function Logs() {
)}
</div>
{/* Message */}
<div className='min-w-0'>
<div className='truncate font-[420] text-[13px]'>{log.message}</div>
</div>
{/* Duration */}
<div className='hidden xl:block'>
<div className='text-muted-foreground text-xs'>

View File

@@ -32,7 +32,6 @@ import {
TooltipTrigger,
} from '@/components/ui'
import { useSession } from '@/lib/auth-client'
import { isDev } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { cn } from '@/lib/utils'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
@@ -341,10 +340,11 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
* Handle deleting the current workflow
*/
const handleDeleteWorkflow = () => {
if (!activeWorkflowId || !userPermissions.canEdit) return
const currentWorkflowId = params.workflowId as string
if (!currentWorkflowId || !userPermissions.canEdit) return
const sidebarWorkflows = getSidebarOrderedWorkflows()
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === activeWorkflowId)
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === currentWorkflowId)
// Find next workflow: try next, then previous
let nextWorkflowId: string | null = null
@@ -363,8 +363,8 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
router.push(`/workspace/${workspaceId}`)
}
// Remove the workflow from the registry
useWorkflowRegistry.getState().removeWorkflow(activeWorkflowId)
// Remove the workflow from the registry using the URL parameter
useWorkflowRegistry.getState().removeWorkflow(currentWorkflowId)
}
// Helper function to open subscription settings
@@ -413,7 +413,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<Tooltip>
<TooltipTrigger asChild>
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
<Trash2 className='h-5 w-5' />
<Trash2 className='h-4 w-4' />
</div>
</TooltipTrigger>
<TooltipContent>{getTooltipText()}</TooltipContent>
@@ -498,7 +498,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<TooltipTrigger asChild>
{isDisabled ? (
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
<Copy className='h-5 w-5' />
<Copy className='h-4 w-4' />
</div>
) : (
<Button
@@ -563,9 +563,9 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
{isDisabled ? (
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
{isAutoLayouting ? (
<RefreshCw className='h-5 w-5 animate-spin' />
<RefreshCw className='h-4 w-4 animate-spin' />
) : (
<Layers className='h-5 w-5' />
<Layers className='h-4 w-4' />
)}
</div>
) : (
@@ -721,7 +721,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<TooltipTrigger asChild>
{isDisabled ? (
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
<Store className='h-5 w-5' />
<Store className='h-4 w-4' />
</div>
) : (
<Button
@@ -775,7 +775,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
isDebugging && 'text-amber-500'
)}
>
<Bug className='h-5 w-5' />
<Bug className='h-4 w-4' />
</div>
) : (
<Button variant='outline' onClick={handleDebugToggle} className={buttonClass}>
@@ -1002,8 +1002,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
{renderToggleButton()}
{isExpanded && <ExportControls />}
{isExpanded && renderAutoLayoutButton()}
{!isDev && isExpanded && renderDuplicateButton()}
{isDev && renderDuplicateButton()}
{renderDuplicateButton()}
{renderDeleteButton()}
{!isDebugging && renderDebugModeToggle()}
{isExpanded && renderPublishButton()}

View File

@@ -196,22 +196,17 @@ export function DiffControls() {
logger.warn('Failed to clear preview YAML:', error)
})
// Accept changes with automatic backup and rollback on failure
await acceptChanges()
// Accept changes without blocking the UI; errors will be logged by the store handler
acceptChanges().catch((error) => {
logger.error('Failed to accept changes (background):', error)
})
logger.info('Successfully accepted and saved workflow changes')
// Show success feedback if needed
logger.info('Accept triggered; UI will update optimistically')
} catch (error) {
logger.error('Failed to accept changes:', error)
// Show error notification to user
// Note: The acceptChanges function has already rolled back the state
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
// You could add toast notification here
console.error('Workflow update failed:', errorMessage)
// Optionally show user-facing error dialog
alert(`Failed to save workflow changes: ${errorMessage}`)
}
}

View File

@@ -4,6 +4,8 @@ import { Component, type ReactNode, useEffect } from 'react'
import { BotIcon } from 'lucide-react'
import { Card } from '@/components/ui/card'
import { createLogger } from '@/lib/logs/console/logger'
import { ControlBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/control-bar/control-bar'
import { Panel } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel'
const logger = createLogger('ErrorBoundary')
@@ -22,18 +24,32 @@ export function ErrorUI({
fullScreen = false,
}: ErrorUIProps) {
const containerClass = fullScreen
? 'flex items-center justify-center w-full h-screen bg-muted/40'
: 'flex items-center justify-center w-full h-full bg-muted/40'
? 'flex flex-col w-full h-screen bg-muted/40'
: 'flex flex-col w-full h-full bg-muted/40'
return (
<div className={containerClass}>
<Card className='max-w-md space-y-4 p-6 text-center'>
<div className='flex justify-center'>
<BotIcon className='h-16 w-16 text-muted-foreground' />
{/* Control bar */}
<ControlBar hasValidationErrors={false} />
{/* Main content area */}
<div className='relative flex flex-1'>
{/* Error message */}
<div className='flex flex-1 items-center justify-center'>
<Card className='max-w-md space-y-4 p-6 text-center'>
<div className='flex justify-center'>
<BotIcon className='h-16 w-16 text-muted-foreground' />
</div>
<h3 className='font-semibold text-lg'>{title}</h3>
<p className='text-muted-foreground'>{message}</p>
</Card>
</div>
<h3 className='font-semibold text-lg'>{title}</h3>
<p className='text-muted-foreground'>{message}</p>
</Card>
{/* Console panel */}
<div className='fixed top-0 right-0 z-10'>
<Panel />
</div>
</div>
</div>
)
}

View File

@@ -2,8 +2,7 @@ export { ControlBar } from './control-bar/control-bar'
export { ErrorBoundary } from './error/index'
export { Panel } from './panel/panel'
export { SkeletonLoading } from './skeleton-loading/skeleton-loading'
export { LoopNodeComponent } from './subflows/loop/loop-node'
export { ParallelNodeComponent } from './subflows/parallel/parallel-node'
export { SubflowNodeComponent } from './subflows/subflow-node'
export { WandPromptBar } from './wand-prompt-bar/wand-prompt-bar'
export { WorkflowBlock } from './workflow-block/workflow-block'
export { WorkflowEdge } from './workflow-edge/workflow-edge'

View File

@@ -10,12 +10,12 @@ import {
} from 'react'
import {
ArrowUp,
Boxes,
Brain,
BrainCircuit,
BrainCog,
Check,
FileText,
Image,
Infinity as InfinityIcon,
Loader2,
MessageCircle,
Package,
@@ -435,14 +435,14 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
}
const getDepthLabel = () => {
if (agentDepth === 0) return 'Lite'
if (agentDepth === 0) return 'Fast'
if (agentDepth === 1) return 'Auto'
if (agentDepth === 2) return 'Pro'
return 'Max'
}
const getDepthLabelFor = (value: 0 | 1 | 2 | 3) => {
if (value === 0) return 'Lite'
if (value === 0) return 'Fast'
if (value === 1) return 'Auto'
if (value === 2) return 'Pro'
return 'Max'
@@ -459,9 +459,9 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const getDepthIconFor = (value: 0 | 1 | 2 | 3) => {
if (value === 0) return <Zap className='h-3 w-3 text-muted-foreground' />
if (value === 1) return <Boxes className='h-3 w-3 text-muted-foreground' />
if (value === 2) return <BrainCircuit className='h-3 w-3 text-muted-foreground' />
return <BrainCog className='h-3 w-3 text-muted-foreground' />
if (value === 1) return <InfinityIcon className='h-3 w-3 text-muted-foreground' />
if (value === 2) return <Brain className='h-3 w-3 text-muted-foreground' />
return <BrainCircuit className='h-3 w-3 text-muted-foreground' />
}
const getDepthIcon = () => getDepthIconFor(agentDepth)
@@ -654,7 +654,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
)}
>
<span className='flex items-center gap-1.5'>
<Boxes className='h-3 w-3 text-muted-foreground' />
<InfinityIcon className='h-3 w-3 text-muted-foreground' />
Auto
</span>
{agentDepth === 1 && (
@@ -682,7 +682,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
>
<span className='flex items-center gap-1.5'>
<Zap className='h-3 w-3 text-muted-foreground' />
Lite
Fast
</span>
{agentDepth === 0 && (
<Check className='h-3 w-3 text-muted-foreground' />
@@ -709,7 +709,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
)}
>
<span className='flex items-center gap-1.5'>
<BrainCircuit className='h-3 w-3 text-muted-foreground' />
<Brain className='h-3 w-3 text-muted-foreground' />
Pro
</span>
{agentDepth === 2 && (
@@ -737,7 +737,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
)}
>
<span className='flex items-center gap-1.5'>
<BrainCog className='h-3 w-3 text-muted-foreground' />
<BrainCircuit className='h-3 w-3 text-muted-foreground' />
Max
</span>
{agentDepth === 3 && (

View File

@@ -0,0 +1,388 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
// Mock hooks
const mockCollaborativeUpdates = {
collaborativeUpdateLoopType: vi.fn(),
collaborativeUpdateParallelType: vi.fn(),
collaborativeUpdateIterationCount: vi.fn(),
collaborativeUpdateIterationCollection: vi.fn(),
}
const mockStoreData = {
loops: {},
parallels: {},
}
vi.mock('@/hooks/use-collaborative-workflow', () => ({
useCollaborativeWorkflow: () => mockCollaborativeUpdates,
}))
vi.mock('@/stores/workflows/workflow/store', () => ({
useWorkflowStore: () => mockStoreData,
}))
vi.mock('@/components/ui/badge', () => ({
Badge: ({ children, ...props }: any) => (
<div data-testid='badge' {...props}>
{children}
</div>
),
}))
vi.mock('@/components/ui/input', () => ({
Input: (props: any) => <input data-testid='input' {...props} />,
}))
vi.mock('@/components/ui/popover', () => ({
Popover: ({ children }: any) => <div data-testid='popover'>{children}</div>,
PopoverContent: ({ children }: any) => <div data-testid='popover-content'>{children}</div>,
PopoverTrigger: ({ children }: any) => <div data-testid='popover-trigger'>{children}</div>,
}))
vi.mock('@/components/ui/tag-dropdown', () => ({
checkTagTrigger: vi.fn(() => ({ show: false })),
TagDropdown: ({ children }: any) => <div data-testid='tag-dropdown'>{children}</div>,
}))
vi.mock('react-simple-code-editor', () => ({
default: (props: any) => <textarea data-testid='code-editor' {...props} />,
}))
describe('IterationBadges', () => {
const defaultProps = {
nodeId: 'test-node-1',
data: {
width: 500,
height: 300,
isPreview: false,
},
iterationType: 'loop' as const,
}
beforeEach(() => {
vi.clearAllMocks()
mockStoreData.loops = {}
mockStoreData.parallels = {}
})
describe('Component Interface', () => {
it.concurrent('should accept required props', () => {
expect(defaultProps.nodeId).toBeDefined()
expect(defaultProps.data).toBeDefined()
expect(defaultProps.iterationType).toBeDefined()
})
it.concurrent('should handle loop iteration type prop', () => {
const loopProps = { ...defaultProps, iterationType: 'loop' as const }
expect(loopProps.iterationType).toBe('loop')
})
it.concurrent('should handle parallel iteration type prop', () => {
const parallelProps = { ...defaultProps, iterationType: 'parallel' as const }
expect(parallelProps.iterationType).toBe('parallel')
})
})
describe('Configuration System', () => {
it.concurrent('should use correct config for loop type', () => {
const CONFIG = {
loop: {
typeLabels: { for: 'For Loop', forEach: 'For Each' },
typeKey: 'loopType' as const,
storeKey: 'loops' as const,
maxIterations: 100,
configKeys: {
iterations: 'iterations' as const,
items: 'forEachItems' as const,
},
},
}
expect(CONFIG.loop.typeLabels.for).toBe('For Loop')
expect(CONFIG.loop.typeLabels.forEach).toBe('For Each')
expect(CONFIG.loop.maxIterations).toBe(100)
expect(CONFIG.loop.storeKey).toBe('loops')
})
it.concurrent('should use correct config for parallel type', () => {
const CONFIG = {
parallel: {
typeLabels: { count: 'Parallel Count', collection: 'Parallel Each' },
typeKey: 'parallelType' as const,
storeKey: 'parallels' as const,
maxIterations: 20,
configKeys: {
iterations: 'count' as const,
items: 'distribution' as const,
},
},
}
expect(CONFIG.parallel.typeLabels.count).toBe('Parallel Count')
expect(CONFIG.parallel.typeLabels.collection).toBe('Parallel Each')
expect(CONFIG.parallel.maxIterations).toBe(20)
expect(CONFIG.parallel.storeKey).toBe('parallels')
})
})
describe('Type Determination Logic', () => {
it.concurrent('should default to "for" for loop type', () => {
type IterationType = 'loop' | 'parallel'
const determineDefaultType = (iterationType: IterationType) => {
return iterationType === 'loop' ? 'for' : 'count'
}
const currentType = determineDefaultType('loop')
expect(currentType).toBe('for')
})
it.concurrent('should default to "count" for parallel type', () => {
type IterationType = 'loop' | 'parallel'
const determineDefaultType = (iterationType: IterationType) => {
return iterationType === 'loop' ? 'for' : 'count'
}
const currentType = determineDefaultType('parallel')
expect(currentType).toBe('count')
})
it.concurrent('should use explicit loopType when provided', () => {
type IterationType = 'loop' | 'parallel'
const determineType = (explicitType: string | undefined, iterationType: IterationType) => {
return explicitType || (iterationType === 'loop' ? 'for' : 'count')
}
const currentType = determineType('forEach', 'loop')
expect(currentType).toBe('forEach')
})
it.concurrent('should use explicit parallelType when provided', () => {
type IterationType = 'loop' | 'parallel'
const determineType = (explicitType: string | undefined, iterationType: IterationType) => {
return explicitType || (iterationType === 'loop' ? 'for' : 'count')
}
const currentType = determineType('collection', 'parallel')
expect(currentType).toBe('collection')
})
})
describe('Count Mode Detection', () => {
it.concurrent('should be in count mode for loop + for combination', () => {
type IterationType = 'loop' | 'parallel'
type LoopType = 'for' | 'forEach'
type ParallelType = 'count' | 'collection'
const iterationType: IterationType = 'loop'
const currentType: LoopType = 'for'
const isCountMode = iterationType === 'loop' && currentType === 'for'
expect(isCountMode).toBe(true)
})
it.concurrent('should be in count mode for parallel + count combination', () => {
type IterationType = 'loop' | 'parallel'
type ParallelType = 'count' | 'collection'
const iterationType: IterationType = 'parallel'
const currentType: ParallelType = 'count'
const isCountMode = iterationType === 'parallel' && currentType === 'count'
expect(isCountMode).toBe(true)
})
it.concurrent('should not be in count mode for loop + forEach combination', () => {
type IterationType = 'loop' | 'parallel'
const testCountMode = (iterationType: IterationType, currentType: string) => {
return iterationType === 'loop' && currentType === 'for'
}
const isCountMode = testCountMode('loop', 'forEach')
expect(isCountMode).toBe(false)
})
it.concurrent('should not be in count mode for parallel + collection combination', () => {
type IterationType = 'loop' | 'parallel'
const testCountMode = (iterationType: IterationType, currentType: string) => {
return iterationType === 'parallel' && currentType === 'count'
}
const isCountMode = testCountMode('parallel', 'collection')
expect(isCountMode).toBe(false)
})
})
describe('Configuration Values', () => {
it.concurrent('should handle default iteration count', () => {
const data = { count: undefined }
const configIterations = data.count ?? 5
expect(configIterations).toBe(5)
})
it.concurrent('should use provided iteration count', () => {
const data = { count: 10 }
const configIterations = data.count ?? 5
expect(configIterations).toBe(10)
})
it.concurrent('should handle string collection', () => {
const collection = '[1, 2, 3, 4, 5]'
const collectionString =
typeof collection === 'string' ? collection : JSON.stringify(collection) || ''
expect(collectionString).toBe('[1, 2, 3, 4, 5]')
})
it.concurrent('should handle object collection', () => {
const collection = { items: [1, 2, 3] }
const collectionString =
typeof collection === 'string' ? collection : JSON.stringify(collection) || ''
expect(collectionString).toBe('{"items":[1,2,3]}')
})
it.concurrent('should handle array collection', () => {
const collection = [1, 2, 3, 4, 5]
const collectionString =
typeof collection === 'string' ? collection : JSON.stringify(collection) || ''
expect(collectionString).toBe('[1,2,3,4,5]')
})
})
describe('Preview Mode Handling', () => {
it.concurrent('should handle preview mode for loops', () => {
const previewProps = {
...defaultProps,
data: { ...defaultProps.data, isPreview: true },
iterationType: 'loop' as const,
}
expect(previewProps.data.isPreview).toBe(true)
// In preview mode, collaborative functions shouldn't be called
expect(mockCollaborativeUpdates.collaborativeUpdateLoopType).not.toHaveBeenCalled()
})
it.concurrent('should handle preview mode for parallels', () => {
const previewProps = {
...defaultProps,
data: { ...defaultProps.data, isPreview: true },
iterationType: 'parallel' as const,
}
expect(previewProps.data.isPreview).toBe(true)
// In preview mode, collaborative functions shouldn't be called
expect(mockCollaborativeUpdates.collaborativeUpdateParallelType).not.toHaveBeenCalled()
})
})
describe('Store Integration', () => {
it.concurrent('should access loops store for loop iteration type', () => {
const nodeId = 'loop-node-1'
;(mockStoreData.loops as any)[nodeId] = { iterations: 10 }
const nodeConfig = (mockStoreData.loops as any)[nodeId]
expect(nodeConfig).toBeDefined()
expect(nodeConfig.iterations).toBe(10)
})
it.concurrent('should access parallels store for parallel iteration type', () => {
const nodeId = 'parallel-node-1'
;(mockStoreData.parallels as any)[nodeId] = { count: 5 }
const nodeConfig = (mockStoreData.parallels as any)[nodeId]
expect(nodeConfig).toBeDefined()
expect(nodeConfig.count).toBe(5)
})
it.concurrent('should handle missing node configuration gracefully', () => {
const nodeId = 'missing-node'
const nodeConfig = (mockStoreData.loops as any)[nodeId]
expect(nodeConfig).toBeUndefined()
})
})
describe('Max Iterations Limits', () => {
it.concurrent('should enforce max iterations for loops (100)', () => {
const maxIterations = 100
const testValue = 150
const clampedValue = Math.min(maxIterations, testValue)
expect(clampedValue).toBe(100)
})
it.concurrent('should enforce max iterations for parallels (20)', () => {
const maxIterations = 20
const testValue = 50
const clampedValue = Math.min(maxIterations, testValue)
expect(clampedValue).toBe(20)
})
it.concurrent('should allow values within limits', () => {
const loopMaxIterations = 100
const parallelMaxIterations = 20
expect(Math.min(loopMaxIterations, 50)).toBe(50)
expect(Math.min(parallelMaxIterations, 10)).toBe(10)
})
})
describe('Collaborative Update Functions', () => {
it.concurrent('should have the correct collaborative functions available', () => {
expect(mockCollaborativeUpdates.collaborativeUpdateLoopType).toBeDefined()
expect(mockCollaborativeUpdates.collaborativeUpdateParallelType).toBeDefined()
expect(mockCollaborativeUpdates.collaborativeUpdateIterationCount).toBeDefined()
expect(mockCollaborativeUpdates.collaborativeUpdateIterationCollection).toBeDefined()
})
it.concurrent('should call correct function for loop type updates', () => {
const handleTypeChange = (newType: string, iterationType: string, nodeId: string) => {
if (iterationType === 'loop') {
mockCollaborativeUpdates.collaborativeUpdateLoopType(nodeId, newType)
} else {
mockCollaborativeUpdates.collaborativeUpdateParallelType(nodeId, newType)
}
}
handleTypeChange('forEach', 'loop', 'test-node')
expect(mockCollaborativeUpdates.collaborativeUpdateLoopType).toHaveBeenCalledWith(
'test-node',
'forEach'
)
})
it.concurrent('should call correct function for parallel type updates', () => {
const handleTypeChange = (newType: string, iterationType: string, nodeId: string) => {
if (iterationType === 'loop') {
mockCollaborativeUpdates.collaborativeUpdateLoopType(nodeId, newType)
} else {
mockCollaborativeUpdates.collaborativeUpdateParallelType(nodeId, newType)
}
}
handleTypeChange('collection', 'parallel', 'test-node')
expect(mockCollaborativeUpdates.collaborativeUpdateParallelType).toHaveBeenCalledWith(
'test-node',
'collection'
)
})
})
describe('Input Sanitization', () => {
it.concurrent('should sanitize numeric input by removing non-digits', () => {
const testInput = 'abc123def456'
const sanitized = testInput.replace(/[^0-9]/g, '')
expect(sanitized).toBe('123456')
})
it.concurrent('should handle empty input', () => {
const testInput = ''
const sanitized = testInput.replace(/[^0-9]/g, '')
expect(sanitized).toBe('')
})
it.concurrent('should preserve valid numeric input', () => {
const testInput = '42'
const sanitized = testInput.replace(/[^0-9]/g, '')
expect(sanitized).toBe('42')
})
})
})

View File

@@ -1,452 +0,0 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-node'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
vi.mock('@/stores/workflows/workflow/store', () => ({
useWorkflowStore: vi.fn(),
}))
vi.mock('@/lib/logs/logger', () => ({
createLogger: vi.fn(() => ({
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
})),
}))
vi.mock('reactflow', () => ({
Handle: ({ id, type, position }: any) => ({ id, type, position }),
Position: {
Top: 'top',
Bottom: 'bottom',
Left: 'left',
Right: 'right',
},
useReactFlow: () => ({
getNodes: vi.fn(() => []),
}),
NodeResizer: ({ isVisible }: any) => ({ isVisible }),
memo: (component: any) => component,
}))
vi.mock('react', async () => {
const actual = await vi.importActual('react')
return {
...actual,
memo: (component: any) => component,
useMemo: (fn: any) => fn(),
useRef: () => ({ current: null }),
}
})
vi.mock('@/components/ui/button', () => ({
Button: ({ children, onClick, ...props }: any) => ({ children, onClick, ...props }),
}))
vi.mock('@/components/ui/card', () => ({
Card: ({ children, ...props }: any) => ({ children, ...props }),
}))
vi.mock('@/components/icons', async (importOriginal) => {
const actual = (await importOriginal()) as any
return {
...actual,
// Override specific icons if needed for testing
StartIcon: ({ className }: any) => ({ className }),
}
})
vi.mock('@/lib/utils', () => ({
cn: (...classes: any[]) => classes.filter(Boolean).join(' '),
}))
vi.mock('@/app/workspace/[workspaceId]/w/[workflowId]/components/loop-badges', () => ({
LoopBadges: ({ loopId }: any) => ({ loopId }),
}))
describe('LoopNodeComponent', () => {
const mockRemoveBlock = vi.fn()
const mockGetNodes = vi.fn()
const defaultProps = {
id: 'loop-1',
type: 'loopNode',
data: {
width: 500,
height: 300,
state: 'valid',
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
beforeEach(() => {
vi.clearAllMocks()
;(useWorkflowStore as any).mockImplementation((selector: any) => {
const state = {
removeBlock: mockRemoveBlock,
}
return selector(state)
})
mockGetNodes.mockReturnValue([])
})
describe('Component Definition and Structure', () => {
it('should be defined as a function component', () => {
expect(LoopNodeComponent).toBeDefined()
expect(typeof LoopNodeComponent).toBe('function')
})
it('should have correct display name', () => {
expect(LoopNodeComponent.displayName).toBe('LoopNodeComponent')
})
it('should be a memoized component', () => {
expect(LoopNodeComponent).toBeDefined()
})
})
describe('Props Validation and Type Safety', () => {
it('should accept NodeProps interface', () => {
const validProps = {
id: 'test-id',
type: 'loopNode' as const,
data: {
width: 400,
height: 300,
state: 'valid' as const,
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
expect(() => {
const _component: typeof LoopNodeComponent = LoopNodeComponent
expect(_component).toBeDefined()
}).not.toThrow()
})
it('should handle different data configurations', () => {
const configurations = [
{ width: 500, height: 300, state: 'valid' },
{ width: 800, height: 600, state: 'invalid' },
{ width: 0, height: 0, state: 'pending' },
{},
]
configurations.forEach((data) => {
const props = { ...defaultProps, data }
expect(() => {
const _component: typeof LoopNodeComponent = LoopNodeComponent
expect(_component).toBeDefined()
}).not.toThrow()
})
})
})
describe('Store Integration', () => {
it('should integrate with workflow store', () => {
expect(useWorkflowStore).toBeDefined()
const mockState = { removeBlock: mockRemoveBlock }
const selector = vi.fn((state) => state.removeBlock)
expect(() => {
selector(mockState)
}).not.toThrow()
expect(selector(mockState)).toBe(mockRemoveBlock)
})
it('should handle removeBlock function', () => {
expect(mockRemoveBlock).toBeDefined()
expect(typeof mockRemoveBlock).toBe('function')
mockRemoveBlock('test-id')
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
})
describe('Component Logic Tests', () => {
it('should handle nesting level calculation logic', () => {
const testCases = [
{ nodes: [], parentId: undefined, expectedLevel: 0 },
{ nodes: [{ id: 'parent', data: {} }], parentId: 'parent', expectedLevel: 1 },
{
nodes: [
{ id: 'parent', data: { parentId: 'grandparent' } },
{ id: 'grandparent', data: {} },
],
parentId: 'parent',
expectedLevel: 2,
},
]
testCases.forEach(({ nodes, parentId, expectedLevel }) => {
mockGetNodes.mockReturnValue(nodes)
// Simulate the nesting level calculation logic
let level = 0
let currentParentId = parentId
while (currentParentId) {
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(expectedLevel)
})
})
it('should handle nested styles generation', () => {
// Test the nested styles logic
const testCases = [
{ nestingLevel: 0, state: 'valid', expectedBg: 'rgba(34,197,94,0.05)' },
{ nestingLevel: 0, state: 'invalid', expectedBg: 'transparent' },
{ nestingLevel: 1, state: 'valid', expectedBg: '#e2e8f030' },
{ nestingLevel: 2, state: 'valid', expectedBg: '#cbd5e130' },
]
testCases.forEach(({ nestingLevel, state, expectedBg }) => {
// Simulate the getNestedStyles logic
const styles: Record<string, string> = {
backgroundColor: state === 'valid' ? 'rgba(34,197,94,0.05)' : 'transparent',
}
if (nestingLevel > 0) {
const colors = ['#e2e8f0', '#cbd5e1', '#94a3b8', '#64748b', '#475569']
const colorIndex = (nestingLevel - 1) % colors.length
styles.backgroundColor = `${colors[colorIndex]}30`
}
expect(styles.backgroundColor).toBe(expectedBg)
})
})
})
describe('Component Configuration', () => {
it('should handle different dimensions', () => {
const dimensionTests = [
{ width: 500, height: 300 },
{ width: 800, height: 600 },
{ width: 0, height: 0 },
{ width: 10000, height: 10000 },
]
dimensionTests.forEach(({ width, height }) => {
const data = { width, height, state: 'valid' }
expect(data.width).toBe(width)
expect(data.height).toBe(height)
})
})
it('should handle different states', () => {
const stateTests = ['valid', 'invalid', 'pending', 'executing']
stateTests.forEach((state) => {
const data = { width: 500, height: 300, state }
expect(data.state).toBe(state)
})
})
})
describe('Event Handling Logic', () => {
it('should handle delete button click logic', () => {
const mockEvent = {
stopPropagation: vi.fn(),
}
// Simulate the delete button click handler
const handleDelete = (e: any, nodeId: string) => {
e.stopPropagation()
mockRemoveBlock(nodeId)
}
handleDelete(mockEvent, 'test-id')
expect(mockEvent.stopPropagation).toHaveBeenCalled()
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
it('should handle event propagation prevention', () => {
const mockEvent = {
stopPropagation: vi.fn(),
}
// Test that stopPropagation is called
mockEvent.stopPropagation()
expect(mockEvent.stopPropagation).toHaveBeenCalled()
})
})
describe('Component Data Handling', () => {
it('should handle missing data properties gracefully', () => {
const testCases = [
undefined,
{},
{ width: 500 },
{ height: 300 },
{ state: 'valid' },
{ width: 500, height: 300 },
]
testCases.forEach((data) => {
const props = { ...defaultProps, data }
// Test default values logic
const width = Math.max(0, data?.width || 500)
const height = Math.max(0, data?.height || 300)
expect(width).toBeGreaterThanOrEqual(0)
expect(height).toBeGreaterThanOrEqual(0)
})
})
it('should handle parent ID relationships', () => {
const testCases = [
{ parentId: undefined, hasParent: false },
{ parentId: 'parent-1', hasParent: true },
{ parentId: '', hasParent: false },
]
testCases.forEach(({ parentId, hasParent }) => {
const data = { ...defaultProps.data, parentId }
expect(Boolean(data.parentId)).toBe(hasParent)
})
})
})
describe('Edge Cases and Error Handling', () => {
it('should handle circular parent references', () => {
// Test circular reference prevention
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node1' } },
]
mockGetNodes.mockReturnValue(nodes)
// Test the actual component's nesting level calculation logic
// This simulates the real useMemo logic from the component
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
// This is the actual logic pattern used in the component
while (currentParentId) {
// If we've seen this parent before, we have a cycle - break immediately
if (visited.has(currentParentId)) {
break
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// With proper circular reference detection, we should stop at level 2
// (node1 -> node2, then detect cycle when trying to go back to node1)
expect(level).toBe(2)
expect(visited.has('node1')).toBe(true)
expect(visited.has('node2')).toBe(true)
})
it('should handle complex circular reference chains', () => {
// Test more complex circular reference scenarios
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node3' } },
{ id: 'node3', data: { parentId: 'node1' } }, // Creates a 3-node cycle
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break // Cycle detected
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// Should traverse node1 -> node2 -> node3, then detect cycle
expect(level).toBe(3)
expect(visited.size).toBe(3)
})
it('should handle self-referencing nodes', () => {
// Test node that references itself
const nodes = [
{ id: 'node1', data: { parentId: 'node1' } }, // Self-reference
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break // Cycle detected immediately
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// Should detect self-reference immediately after first iteration
expect(level).toBe(1)
expect(visited.has('node1')).toBe(true)
})
it('should handle extreme values', () => {
const extremeValues = [
{ width: Number.MAX_SAFE_INTEGER, height: Number.MAX_SAFE_INTEGER },
{ width: -1, height: -1 },
{ width: 0, height: 0 },
{ width: null, height: null },
]
extremeValues.forEach((data) => {
expect(() => {
const width = data.width || 500
const height = data.height || 300
expect(typeof width).toBe('number')
expect(typeof height).toBe('number')
}).not.toThrow()
})
})
})
})

View File

@@ -1,585 +0,0 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-node'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
vi.mock('@/stores/workflows/workflow/store', () => ({
useWorkflowStore: vi.fn(),
}))
vi.mock('@/lib/logs/logger', () => ({
createLogger: vi.fn(() => ({
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
})),
}))
vi.mock('reactflow', () => ({
Handle: ({ id, type, position }: any) => ({ id, type, position }),
Position: {
Top: 'top',
Bottom: 'bottom',
Left: 'left',
Right: 'right',
},
useReactFlow: () => ({
getNodes: vi.fn(() => []),
}),
NodeResizer: ({ isVisible }: any) => ({ isVisible }),
memo: (component: any) => component,
}))
vi.mock('react', async () => {
const actual = await vi.importActual('react')
return {
...actual,
memo: (component: any) => component,
useMemo: (fn: any) => fn(),
useRef: () => ({ current: null }),
}
})
vi.mock('@/components/ui/button', () => ({
Button: ({ children, onClick, ...props }: any) => ({ children, onClick, ...props }),
}))
vi.mock('@/components/ui/card', () => ({
Card: ({ children, ...props }: any) => ({ children, ...props }),
}))
vi.mock('@/blocks/registry', () => ({
getBlock: vi.fn(() => ({
name: 'Mock Block',
description: 'Mock block description',
icon: () => null,
subBlocks: [],
outputs: {},
})),
getAllBlocks: vi.fn(() => ({})),
}))
vi.mock('@/lib/utils', () => ({
cn: (...classes: any[]) => classes.filter(Boolean).join(' '),
}))
vi.mock(
'@/app/workspace/[workspaceId]/w/[workflowId]/components/parallel-node/components/parallel-badges',
() => ({
ParallelBadges: ({ parallelId }: any) => ({ parallelId }),
})
)
describe('ParallelNodeComponent', () => {
const mockRemoveBlock = vi.fn()
const mockGetNodes = vi.fn()
const defaultProps = {
id: 'parallel-1',
type: 'parallelNode',
data: {
width: 500,
height: 300,
state: 'valid',
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
beforeEach(() => {
vi.clearAllMocks()
;(useWorkflowStore as any).mockImplementation((selector: any) => {
const state = {
removeBlock: mockRemoveBlock,
}
return selector(state)
})
mockGetNodes.mockReturnValue([])
})
describe('Component Definition and Structure', () => {
it.concurrent('should be defined as a function component', () => {
expect(ParallelNodeComponent).toBeDefined()
expect(typeof ParallelNodeComponent).toBe('function')
})
it.concurrent('should have correct display name', () => {
expect(ParallelNodeComponent.displayName).toBe('ParallelNodeComponent')
})
it.concurrent('should be a memoized component', () => {
expect(ParallelNodeComponent).toBeDefined()
})
})
describe('Props Validation and Type Safety', () => {
it.concurrent('should accept NodeProps interface', () => {
expect(() => {
const _component: typeof ParallelNodeComponent = ParallelNodeComponent
expect(_component).toBeDefined()
}).not.toThrow()
})
it.concurrent('should handle different data configurations', () => {
const configurations = [
{ width: 500, height: 300, state: 'valid' },
{ width: 800, height: 600, state: 'invalid' },
{ width: 0, height: 0, state: 'pending' },
{},
]
configurations.forEach((data) => {
const props = { ...defaultProps, data }
expect(() => {
const _component: typeof ParallelNodeComponent = ParallelNodeComponent
expect(_component).toBeDefined()
}).not.toThrow()
})
})
})
describe('Store Integration', () => {
it.concurrent('should integrate with workflow store', () => {
expect(useWorkflowStore).toBeDefined()
const mockState = { removeBlock: mockRemoveBlock }
const selector = vi.fn((state) => state.removeBlock)
expect(() => {
selector(mockState)
}).not.toThrow()
expect(selector(mockState)).toBe(mockRemoveBlock)
})
it.concurrent('should handle removeBlock function', () => {
expect(mockRemoveBlock).toBeDefined()
expect(typeof mockRemoveBlock).toBe('function')
mockRemoveBlock('test-id')
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
})
describe('Component Logic Tests', () => {
it.concurrent('should handle nesting level calculation logic', () => {
const testCases = [
{ nodes: [], parentId: undefined, expectedLevel: 0 },
{ nodes: [{ id: 'parent', data: {} }], parentId: 'parent', expectedLevel: 1 },
{
nodes: [
{ id: 'parent', data: { parentId: 'grandparent' } },
{ id: 'grandparent', data: {} },
],
parentId: 'parent',
expectedLevel: 2,
},
]
testCases.forEach(({ nodes, parentId, expectedLevel }) => {
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = parentId
while (currentParentId) {
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(expectedLevel)
})
})
it.concurrent('should handle nested styles generation for parallel nodes', () => {
const testCases = [
{ nestingLevel: 0, state: 'valid', expectedBg: 'rgba(254,225,43,0.05)' },
{ nestingLevel: 0, state: 'invalid', expectedBg: 'transparent' },
{ nestingLevel: 1, state: 'valid', expectedBg: '#e2e8f030' },
{ nestingLevel: 2, state: 'valid', expectedBg: '#cbd5e130' },
]
testCases.forEach(({ nestingLevel, state, expectedBg }) => {
const styles: Record<string, string> = {
backgroundColor: state === 'valid' ? 'rgba(254,225,43,0.05)' : 'transparent',
}
if (nestingLevel > 0) {
const colors = ['#e2e8f0', '#cbd5e1', '#94a3b8', '#64748b', '#475569']
const colorIndex = (nestingLevel - 1) % colors.length
styles.backgroundColor = `${colors[colorIndex]}30`
}
expect(styles.backgroundColor).toBe(expectedBg)
})
})
})
describe('Parallel-Specific Features', () => {
it.concurrent('should handle parallel execution states', () => {
const parallelStates = ['valid', 'invalid', 'executing', 'completed', 'pending']
parallelStates.forEach((state) => {
const data = { width: 500, height: 300, state }
expect(data.state).toBe(state)
const isExecuting = state === 'executing'
const isCompleted = state === 'completed'
expect(typeof isExecuting).toBe('boolean')
expect(typeof isCompleted).toBe('boolean')
})
})
it.concurrent('should handle parallel node color scheme', () => {
const parallelColors = {
background: 'rgba(254,225,43,0.05)',
ring: '#FEE12B',
startIcon: '#FEE12B',
}
expect(parallelColors.background).toContain('254,225,43')
expect(parallelColors.ring).toBe('#FEE12B')
expect(parallelColors.startIcon).toBe('#FEE12B')
})
it.concurrent('should differentiate from loop node styling', () => {
const loopColors = {
background: 'rgba(34,197,94,0.05)',
ring: '#2FB3FF',
startIcon: '#2FB3FF',
}
const parallelColors = {
background: 'rgba(254,225,43,0.05)',
ring: '#FEE12B',
startIcon: '#FEE12B',
}
expect(parallelColors.background).not.toBe(loopColors.background)
expect(parallelColors.ring).not.toBe(loopColors.ring)
expect(parallelColors.startIcon).not.toBe(loopColors.startIcon)
})
})
describe('Component Configuration', () => {
it.concurrent('should handle different dimensions', () => {
const dimensionTests = [
{ width: 500, height: 300 },
{ width: 800, height: 600 },
{ width: 0, height: 0 },
{ width: 10000, height: 10000 },
]
dimensionTests.forEach(({ width, height }) => {
const data = { width, height, state: 'valid' }
expect(data.width).toBe(width)
expect(data.height).toBe(height)
})
})
it.concurrent('should handle different states', () => {
const stateTests = ['valid', 'invalid', 'pending', 'executing', 'completed']
stateTests.forEach((state) => {
const data = { width: 500, height: 300, state }
expect(data.state).toBe(state)
})
})
})
describe('Event Handling Logic', () => {
it.concurrent('should handle delete button click logic', () => {
const mockEvent = {
stopPropagation: vi.fn(),
}
const handleDelete = (e: any, nodeId: string) => {
e.stopPropagation()
mockRemoveBlock(nodeId)
}
handleDelete(mockEvent, 'test-id')
expect(mockEvent.stopPropagation).toHaveBeenCalled()
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
it.concurrent('should handle event propagation prevention', () => {
const mockEvent = {
stopPropagation: vi.fn(),
}
mockEvent.stopPropagation()
expect(mockEvent.stopPropagation).toHaveBeenCalled()
})
})
describe('Component Data Handling', () => {
it.concurrent('should handle missing data properties gracefully', () => {
const testCases = [
undefined,
{},
{ width: 500 },
{ height: 300 },
{ state: 'valid' },
{ width: 500, height: 300 },
]
testCases.forEach((data) => {
const props = { ...defaultProps, data }
// Test default values logic
const width = data?.width || 500
const height = data?.height || 300
expect(width).toBeGreaterThanOrEqual(0)
expect(height).toBeGreaterThanOrEqual(0)
})
})
it.concurrent('should handle parent ID relationships', () => {
const testCases = [
{ parentId: undefined, hasParent: false },
{ parentId: 'parent-1', hasParent: true },
{ parentId: '', hasParent: false },
]
testCases.forEach(({ parentId, hasParent }) => {
const data = { ...defaultProps.data, parentId }
expect(Boolean(data.parentId)).toBe(hasParent)
})
})
})
describe('Handle Configuration', () => {
it.concurrent('should have correct handle IDs for parallel nodes', () => {
const handleIds = {
startSource: 'parallel-start-source',
endSource: 'parallel-end-source',
}
expect(handleIds.startSource).toContain('parallel')
expect(handleIds.endSource).toContain('parallel')
expect(handleIds.startSource).not.toContain('loop')
expect(handleIds.endSource).not.toContain('loop')
})
it.concurrent('should handle different handle positions', () => {
const positions = {
left: 'left',
right: 'right',
top: 'top',
bottom: 'bottom',
}
Object.values(positions).forEach((position) => {
expect(typeof position).toBe('string')
expect(position.length).toBeGreaterThan(0)
})
})
})
describe('Edge Cases and Error Handling', () => {
it.concurrent('should handle circular parent references', () => {
// Test circular reference prevention
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node1' } },
]
mockGetNodes.mockReturnValue(nodes)
// Test the actual component's nesting level calculation logic
// This simulates the real useMemo logic from the component
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
// This is the actual logic pattern used in the component
while (currentParentId) {
// If we've seen this parent before, we have a cycle - break immediately
if (visited.has(currentParentId)) {
break
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// With proper circular reference detection, we should stop at level 2
// (node1 -> node2, then detect cycle when trying to go back to node1)
expect(level).toBe(2)
expect(visited.has('node1')).toBe(true)
expect(visited.has('node2')).toBe(true)
})
it.concurrent('should handle complex circular reference chains', () => {
// Test more complex circular reference scenarios
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node3' } },
{ id: 'node3', data: { parentId: 'node1' } }, // Creates a 3-node cycle
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break // Cycle detected
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// Should traverse node1 -> node2 -> node3, then detect cycle
expect(level).toBe(3)
expect(visited.size).toBe(3)
})
it.concurrent('should handle self-referencing nodes', () => {
// Test node that references itself
const nodes = [
{ id: 'node1', data: { parentId: 'node1' } }, // Self-reference
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break // Cycle detected immediately
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// Should detect self-reference immediately after first iteration
expect(level).toBe(1)
expect(visited.has('node1')).toBe(true)
})
it.concurrent('should handle extreme values', () => {
const extremeValues = [
{ width: Number.MAX_SAFE_INTEGER, height: Number.MAX_SAFE_INTEGER },
{ width: -1, height: -1 },
{ width: 0, height: 0 },
{ width: null, height: null },
]
extremeValues.forEach((data) => {
expect(() => {
const width = data.width || 500
const height = data.height || 300
expect(typeof width).toBe('number')
expect(typeof height).toBe('number')
}).not.toThrow()
})
})
it.concurrent('should handle negative position values', () => {
const positions = [
{ xPos: -100, yPos: -200 },
{ xPos: 0, yPos: 0 },
{ xPos: 1000, yPos: 2000 },
]
positions.forEach(({ xPos, yPos }) => {
const props = { ...defaultProps, xPos, yPos }
expect(props.xPos).toBe(xPos)
expect(props.yPos).toBe(yPos)
expect(typeof props.xPos).toBe('number')
expect(typeof props.yPos).toBe('number')
})
})
})
describe('Component Comparison with Loop Node', () => {
it.concurrent('should have similar structure to loop node but different type', () => {
expect(defaultProps.type).toBe('parallelNode')
expect(defaultProps.id).toContain('parallel')
// Should not be a loop node
expect(defaultProps.type).not.toBe('loopNode')
expect(defaultProps.id).not.toContain('loop')
})
it.concurrent('should handle the same prop structure as loop node', () => {
// Test that parallel node accepts the same prop structure as loop node
const sharedPropStructure = {
id: 'test-parallel',
type: 'parallelNode' as const,
data: {
width: 400,
height: 300,
state: 'valid' as const,
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
expect(() => {
const _component: typeof ParallelNodeComponent = ParallelNodeComponent
expect(_component).toBeDefined()
}).not.toThrow()
// Verify the structure
expect(sharedPropStructure.type).toBe('parallelNode')
expect(sharedPropStructure.data.width).toBe(400)
expect(sharedPropStructure.data.height).toBe(300)
})
it.concurrent('should maintain consistency with loop node interface', () => {
const baseProps = [
'id',
'type',
'data',
'selected',
'zIndex',
'isConnectable',
'xPos',
'yPos',
'dragging',
]
baseProps.forEach((prop) => {
expect(defaultProps).toHaveProperty(prop)
})
})
})
})

View File

@@ -1,273 +0,0 @@
import type React from 'react'
import { memo, useMemo, useRef } from 'react'
import { Trash2 } from 'lucide-react'
import { Handle, type NodeProps, Position, useReactFlow } from 'reactflow'
import { StartIcon } from '@/components/icons'
import { Button } from '@/components/ui/button'
import { Card } from '@/components/ui/card'
import { cn } from '@/lib/utils'
import { IterationBadges } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/components/iteration-badges/iteration-badges'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
const ParallelNodeStyles: React.FC = () => {
return (
<style jsx global>{`
@keyframes parallel-node-pulse {
0% {
box-shadow: 0 0 0 0 rgba(139, 195, 74, 0.3);
}
70% {
box-shadow: 0 0 0 6px rgba(139, 195, 74, 0);
}
100% {
box-shadow: 0 0 0 0 rgba(139, 195, 74, 0);
}
}
.parallel-node-drag-over {
animation: parallel-node-pulse 1.2s cubic-bezier(0.4, 0, 0.6, 1)
infinite;
border-style: solid !important;
background-color: rgba(139, 195, 74, 0.08) !important;
box-shadow: 0 0 0 8px rgba(139, 195, 74, 0.1);
}
/* Make resizer handles more visible */
.react-flow__resize-control {
z-index: 10;
pointer-events: all !important;
}
/* Ensure parent borders are visible when hovering over resize controls */
.react-flow__node-group:hover,
.hover-highlight {
border-color: #1e293b !important;
}
/* Ensure hover effects work well */
.group-node-container:hover .react-flow__resize-control.bottom-right {
opacity: 1 !important;
visibility: visible !important;
}
/* React Flow position transitions within parallel blocks */
.react-flow__node[data-parent-node-id] {
transition: transform 0.05s ease;
pointer-events: all;
}
/* Prevent jumpy drag behavior */
.parallel-drop-container .react-flow__node {
transform-origin: center;
position: absolute;
}
/* Remove default border from React Flow group nodes */
.react-flow__node-group {
border: none;
background-color: transparent;
outline: none;
box-shadow: none;
}
/* Ensure child nodes stay within parent bounds */
.react-flow__node[data-parent-node-id] .react-flow__handle {
z-index: 30;
}
/* Enhanced drag detection */
.react-flow__node-group.dragging-over {
background-color: rgba(139, 195, 74, 0.05);
transition: all 0.2s ease-in-out;
}
`}</style>
)
}
export const ParallelNodeComponent = memo(({ data, selected, id }: NodeProps) => {
const { getNodes } = useReactFlow()
const { collaborativeRemoveBlock } = useCollaborativeWorkflow()
const blockRef = useRef<HTMLDivElement>(null)
// Use the clean abstraction for current workflow state
const currentWorkflow = useCurrentWorkflow()
const currentBlock = currentWorkflow.getBlockById(id)
const diffStatus =
currentWorkflow.isDiffMode && currentBlock ? (currentBlock as any).is_diff : undefined
// Check if this is preview mode
const isPreview = data?.isPreview || false
// Determine nesting level by counting parents
const nestingLevel = useMemo(() => {
const maxDepth = 100 // Prevent infinite loops
let level = 0
let currentParentId = data?.parentId
while (currentParentId && level < maxDepth) {
level++
const parentNode = getNodes().find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
return level
}, [id, data?.parentId, getNodes])
// Generate different background styles based on nesting level
const getNestedStyles = () => {
// Base styles
const styles: Record<string, string> = {
backgroundColor: 'rgba(0, 0, 0, 0.02)',
}
// Apply nested styles
if (nestingLevel > 0) {
// Each nesting level gets a different color
const colors = ['#e2e8f0', '#cbd5e1', '#94a3b8', '#64748b', '#475569']
const colorIndex = (nestingLevel - 1) % colors.length
styles.backgroundColor = `${colors[colorIndex]}30` // Slightly more visible background
}
return styles
}
const nestedStyles = getNestedStyles()
return (
<>
<ParallelNodeStyles />
<div className='group relative'>
<Card
ref={blockRef}
className={cn(
'relative cursor-default select-none',
'transition-block-bg transition-ring',
'z-[20]',
data?.state === 'valid',
nestingLevel > 0 &&
`border border-[0.5px] ${nestingLevel % 2 === 0 ? 'border-slate-300/60' : 'border-slate-400/60'}`,
data?.hasNestedError && 'border-2 border-red-500 bg-red-50/50',
// Diff highlighting
diffStatus === 'new' && 'bg-green-50/50 ring-2 ring-green-500 dark:bg-green-900/10',
diffStatus === 'edited' &&
'bg-orange-50/50 ring-2 ring-orange-500 dark:bg-orange-900/10'
)}
style={{
width: data.width || 500,
height: data.height || 300,
position: 'relative',
overflow: 'visible',
...nestedStyles,
pointerEvents: isPreview ? 'none' : 'all',
}}
data-node-id={id}
data-type='parallelNode'
data-nesting-level={nestingLevel}
>
{/* Critical drag handle that controls only the parallel node movement */}
{!isPreview && (
<div
className='workflow-drag-handle absolute top-0 right-0 left-0 z-10 h-10 cursor-move'
style={{ pointerEvents: 'auto' }}
/>
)}
{/* Custom visible resize handle */}
{!isPreview && (
<div
className='absolute right-2 bottom-2 z-20 flex h-8 w-8 cursor-se-resize items-center justify-center text-muted-foreground'
style={{ pointerEvents: 'auto' }}
/>
)}
{/* Child nodes container - Set pointerEvents to allow dragging of children */}
<div
className='h-[calc(100%-10px)] p-4'
data-dragarea='true'
style={{
position: 'relative',
minHeight: '100%',
pointerEvents: isPreview ? 'none' : 'auto',
}}
>
{/* Delete button - styled like in action-bar.tsx */}
{!isPreview && (
<Button
variant='ghost'
size='sm'
onClick={(e) => {
e.stopPropagation()
collaborativeRemoveBlock(id)
}}
className='absolute top-2 right-2 z-20 text-gray-500 opacity-0 transition-opacity duration-200 hover:text-red-600 group-hover:opacity-100'
style={{ pointerEvents: 'auto' }}
>
<Trash2 className='h-4 w-4' />
</Button>
)}
{/* Parallel Start Block */}
<div
className='-translate-y-1/2 absolute top-1/2 left-8 flex h-10 w-10 transform items-center justify-center rounded-md bg-[#FEE12B] p-2'
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
data-parent-id={id}
data-node-role='parallel-start'
data-extent='parent'
>
<StartIcon className='h-6 w-6 text-white' />
<Handle
type='source'
position={Position.Right}
id='parallel-start-source'
className='!w-[6px] !h-4 !bg-slate-300 dark:!bg-slate-500 !rounded-[2px] !border-none !z-[30] hover:!w-[10px] hover:!right-[-10px] hover:!rounded-r-full hover:!rounded-l-none !cursor-crosshair transition-[colors] duration-150'
style={{
right: '-6px',
top: '50%',
transform: 'translateY(-50%)',
pointerEvents: 'auto',
}}
data-parent-id={id}
/>
</div>
</div>
{/* Input handle on left middle */}
<Handle
type='target'
position={Position.Left}
className='!w-[7px] !h-5 !bg-slate-300 dark:!bg-slate-500 !rounded-[2px] !border-none !z-[30] hover:!w-[10px] hover:!left-[-10px] hover:!rounded-l-full hover:!rounded-r-none !cursor-crosshair transition-[colors] duration-150'
style={{
left: '-7px',
top: '50%',
transform: 'translateY(-50%)',
pointerEvents: 'auto',
}}
/>
{/* Output handle on right middle */}
<Handle
type='source'
position={Position.Right}
className='!w-[7px] !h-5 !bg-slate-300 dark:!bg-slate-500 !rounded-[2px] !border-none !z-[30] hover:!w-[10px] hover:!right-[-10px] hover:!rounded-r-full hover:!rounded-l-none !cursor-crosshair transition-[colors] duration-150'
style={{
right: '-7px',
top: '50%',
transform: 'translateY(-50%)',
pointerEvents: 'auto',
}}
id='parallel-end-source'
/>
{/* Parallel Configuration Badges */}
<IterationBadges nodeId={id} data={data} iterationType='parallel' />
</Card>
</div>
</>
)
})
ParallelNodeComponent.displayName = 'ParallelNodeComponent'

View File

@@ -0,0 +1,579 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { SubflowNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node'
// Shared spies used across mocks
const mockRemoveBlock = vi.fn()
const mockGetNodes = vi.fn()
// Mocks
vi.mock('@/hooks/use-collaborative-workflow', () => ({
useCollaborativeWorkflow: vi.fn(() => ({
collaborativeRemoveBlock: mockRemoveBlock,
})),
}))
vi.mock('@/lib/logs/console/logger', () => ({
createLogger: vi.fn(() => ({
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
})),
}))
vi.mock('reactflow', () => ({
Handle: ({ id, type, position }: any) => ({ id, type, position }),
Position: {
Top: 'top',
Bottom: 'bottom',
Left: 'left',
Right: 'right',
},
useReactFlow: () => ({
getNodes: mockGetNodes,
}),
memo: (component: any) => component,
}))
vi.mock('react', async () => {
const actual = await vi.importActual<any>('react')
return {
...actual,
memo: (component: any) => component,
useMemo: (fn: any) => fn(),
useRef: () => ({ current: null }),
}
})
vi.mock('@/components/ui/button', () => ({
Button: ({ children, onClick, ...props }: any) => ({ children, onClick, ...props }),
}))
vi.mock('@/components/ui/card', () => ({
Card: ({ children, ...props }: any) => ({ children, ...props }),
}))
vi.mock('@/components/icons', async (importOriginal) => {
const actual = (await importOriginal()) as any
return {
...actual,
StartIcon: ({ className }: any) => ({ className }),
}
})
vi.mock('@/lib/utils', () => ({
cn: (...classes: any[]) => classes.filter(Boolean).join(' '),
}))
vi.mock(
'@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/components/iteration-badges/iteration-badges',
() => ({
IterationBadges: ({ nodeId, iterationType }: any) => ({ nodeId, iterationType }),
})
)
describe('SubflowNodeComponent', () => {
const defaultProps = {
id: 'subflow-1',
type: 'subflowNode',
data: {
width: 500,
height: 300,
isPreview: false,
kind: 'loop' as const,
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
beforeEach(() => {
vi.clearAllMocks()
mockGetNodes.mockReturnValue([])
})
describe('Component Definition and Structure', () => {
it.concurrent('should be defined as a function component', () => {
expect(SubflowNodeComponent).toBeDefined()
expect(typeof SubflowNodeComponent).toBe('function')
})
it.concurrent('should have correct display name', () => {
expect(SubflowNodeComponent.displayName).toBe('SubflowNodeComponent')
})
it.concurrent('should be a memoized component', () => {
expect(SubflowNodeComponent).toBeDefined()
})
})
describe('Props Validation and Type Safety', () => {
it.concurrent('should accept NodeProps interface', () => {
const validProps = {
id: 'test-id',
type: 'subflowNode' as const,
data: {
width: 400,
height: 300,
isPreview: true,
kind: 'parallel' as const,
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
expect(() => {
const _component: typeof SubflowNodeComponent = SubflowNodeComponent
expect(_component).toBeDefined()
expect(validProps.type).toBe('subflowNode')
}).not.toThrow()
})
it.concurrent('should handle different data configurations', () => {
const configurations = [
{ width: 500, height: 300, isPreview: false, kind: 'loop' as const },
{ width: 800, height: 600, isPreview: true, kind: 'parallel' as const },
{ width: 0, height: 0, isPreview: false, kind: 'loop' as const },
{ kind: 'loop' as const },
]
configurations.forEach((data) => {
const props = { ...defaultProps, data }
expect(() => {
const _component: typeof SubflowNodeComponent = SubflowNodeComponent
expect(_component).toBeDefined()
expect(props.data).toBeDefined()
}).not.toThrow()
})
})
})
describe('Hook Integration', () => {
it.concurrent('should provide collaborativeRemoveBlock', () => {
expect(mockRemoveBlock).toBeDefined()
expect(typeof mockRemoveBlock).toBe('function')
mockRemoveBlock('test-id')
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
})
describe('Component Logic Tests', () => {
it.concurrent('should handle nesting level calculation logic', () => {
const testCases = [
{ nodes: [], parentId: undefined, expectedLevel: 0 },
{ nodes: [{ id: 'parent', data: {} }], parentId: 'parent', expectedLevel: 1 },
{
nodes: [
{ id: 'parent', data: { parentId: 'grandparent' } },
{ id: 'grandparent', data: {} },
],
parentId: 'parent',
expectedLevel: 2,
},
]
testCases.forEach(({ nodes, parentId, expectedLevel }) => {
mockGetNodes.mockReturnValue(nodes)
// Simulate the nesting level calculation logic
let level = 0
let currentParentId = parentId
while (currentParentId) {
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(expectedLevel)
})
})
it.concurrent('should handle nested styles generation', () => {
// Test the nested styles logic
const testCases = [
{ nestingLevel: 0, expectedBg: 'rgba(34,197,94,0.05)' },
{ nestingLevel: 1, expectedBg: '#e2e8f030' },
{ nestingLevel: 2, expectedBg: '#cbd5e130' },
]
testCases.forEach(({ nestingLevel, expectedBg }) => {
// Simulate the getNestedStyles logic
const styles: Record<string, string> = {
backgroundColor: 'rgba(34,197,94,0.05)',
}
if (nestingLevel > 0) {
const colors = ['#e2e8f0', '#cbd5e1', '#94a3b8', '#64748b', '#475569']
const colorIndex = (nestingLevel - 1) % colors.length
styles.backgroundColor = `${colors[colorIndex]}30`
}
expect(styles.backgroundColor).toBe(expectedBg)
})
})
})
describe('Component Configuration', () => {
it.concurrent('should handle different dimensions', () => {
const dimensionTests = [
{ width: 500, height: 300 },
{ width: 800, height: 600 },
{ width: 0, height: 0 },
{ width: 10000, height: 10000 },
]
dimensionTests.forEach(({ width, height }) => {
const data = { width, height }
expect(data.width).toBe(width)
expect(data.height).toBe(height)
})
})
})
describe('Event Handling Logic', () => {
it.concurrent('should handle delete button click logic (simulated)', () => {
const mockEvent = { stopPropagation: vi.fn() }
const handleDelete = (e: any, nodeId: string) => {
e.stopPropagation()
mockRemoveBlock(nodeId)
}
handleDelete(mockEvent, 'test-id')
expect(mockEvent.stopPropagation).toHaveBeenCalled()
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
it.concurrent('should handle event propagation prevention', () => {
const mockEvent = { stopPropagation: vi.fn() }
mockEvent.stopPropagation()
expect(mockEvent.stopPropagation).toHaveBeenCalled()
})
})
describe('Component Data Handling', () => {
it.concurrent('should handle missing data properties gracefully', () => {
const testCases = [
undefined,
{},
{ width: 500 },
{ height: 300 },
{ width: 500, height: 300 },
]
testCases.forEach((data: any) => {
const props = { ...defaultProps, data }
const width = Math.max(0, data?.width || 500)
const height = Math.max(0, data?.height || 300)
expect(width).toBeGreaterThanOrEqual(0)
expect(height).toBeGreaterThanOrEqual(0)
expect(props.type).toBe('subflowNode')
})
})
it.concurrent('should handle parent ID relationships', () => {
const testCases = [
{ parentId: undefined, hasParent: false },
{ parentId: 'parent-1', hasParent: true },
{ parentId: '', hasParent: false },
]
testCases.forEach(({ parentId, hasParent }) => {
const data = { ...defaultProps.data, parentId }
expect(Boolean(data.parentId)).toBe(hasParent)
})
})
})
describe('Loop vs Parallel Kind Specific Tests', () => {
it.concurrent('should generate correct handle IDs for loop kind', () => {
const loopData = { ...defaultProps.data, kind: 'loop' as const }
const startHandleId = loopData.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = loopData.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
expect(startHandleId).toBe('loop-start-source')
expect(endHandleId).toBe('loop-end-source')
})
it.concurrent('should generate correct handle IDs for parallel kind', () => {
type SubflowKind = 'loop' | 'parallel'
const testHandleGeneration = (kind: SubflowKind) => {
const startHandleId = kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
return { startHandleId, endHandleId }
}
const result = testHandleGeneration('parallel')
expect(result.startHandleId).toBe('parallel-start-source')
expect(result.endHandleId).toBe('parallel-end-source')
})
it.concurrent('should generate correct background colors for loop kind', () => {
const loopData = { ...defaultProps.data, kind: 'loop' as const }
const startBg = loopData.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
expect(startBg).toBe('#2FB3FF')
})
it.concurrent('should generate correct background colors for parallel kind', () => {
type SubflowKind = 'loop' | 'parallel'
const testBgGeneration = (kind: SubflowKind) => {
return kind === 'loop' ? '#2FB3FF' : '#FEE12B'
}
const startBg = testBgGeneration('parallel')
expect(startBg).toBe('#FEE12B')
})
it.concurrent('should demonstrate handle ID generation for any kind', () => {
type SubflowKind = 'loop' | 'parallel'
const testKind = (kind: SubflowKind) => {
const data = { kind }
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
return { startHandleId, endHandleId }
}
const loopResult = testKind('loop')
expect(loopResult.startHandleId).toBe('loop-start-source')
expect(loopResult.endHandleId).toBe('loop-end-source')
const parallelResult = testKind('parallel')
expect(parallelResult.startHandleId).toBe('parallel-start-source')
expect(parallelResult.endHandleId).toBe('parallel-end-source')
})
it.concurrent('should pass correct iterationType to IterationBadges for loop', () => {
const loopProps = { ...defaultProps, data: { ...defaultProps.data, kind: 'loop' as const } }
// Mock IterationBadges should receive the kind as iterationType
expect(loopProps.data.kind).toBe('loop')
})
it.concurrent('should pass correct iterationType to IterationBadges for parallel', () => {
const parallelProps = {
...defaultProps,
data: { ...defaultProps.data, kind: 'parallel' as const },
}
// Mock IterationBadges should receive the kind as iterationType
expect(parallelProps.data.kind).toBe('parallel')
})
it.concurrent('should handle both kinds in configuration arrays', () => {
const bothKinds = ['loop', 'parallel'] as const
bothKinds.forEach((kind) => {
const data = { ...defaultProps.data, kind }
expect(['loop', 'parallel']).toContain(data.kind)
// Test handle ID generation for both kinds
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
const startBg = data.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
if (kind === 'loop') {
expect(startHandleId).toBe('loop-start-source')
expect(endHandleId).toBe('loop-end-source')
expect(startBg).toBe('#2FB3FF')
} else {
expect(startHandleId).toBe('parallel-start-source')
expect(endHandleId).toBe('parallel-end-source')
expect(startBg).toBe('#FEE12B')
}
})
})
it.concurrent('should maintain consistent styling behavior across both kinds', () => {
const loopProps = { ...defaultProps, data: { ...defaultProps.data, kind: 'loop' as const } }
const parallelProps = {
...defaultProps,
data: { ...defaultProps.data, kind: 'parallel' as const },
}
// Both should have same base properties except kind-specific ones
expect(loopProps.data.width).toBe(parallelProps.data.width)
expect(loopProps.data.height).toBe(parallelProps.data.height)
expect(loopProps.data.isPreview).toBe(parallelProps.data.isPreview)
// But different kinds
expect(loopProps.data.kind).toBe('loop')
expect(parallelProps.data.kind).toBe('parallel')
})
})
describe('Integration with IterationBadges', () => {
it.concurrent('should pass nodeId to IterationBadges', () => {
const testId = 'test-subflow-123'
const props = { ...defaultProps, id: testId }
// Verify the props would be passed correctly
expect(props.id).toBe(testId)
})
it.concurrent('should pass data object to IterationBadges', () => {
const testData = { ...defaultProps.data, customProperty: 'test' }
const props = { ...defaultProps, data: testData }
// Verify the data object structure
expect(props.data).toEqual(testData)
expect(props.data.kind).toBeDefined()
})
it.concurrent('should pass iterationType matching the kind', () => {
const loopProps = { ...defaultProps, data: { ...defaultProps.data, kind: 'loop' as const } }
const parallelProps = {
...defaultProps,
data: { ...defaultProps.data, kind: 'parallel' as const },
}
// The iterationType should match the kind
expect(loopProps.data.kind).toBe('loop')
expect(parallelProps.data.kind).toBe('parallel')
})
})
describe('CSS Class Generation', () => {
it.concurrent('should generate proper CSS classes for nested loops', () => {
const nestingLevel = 2
const expectedBorderClass =
nestingLevel > 0 &&
`border border-[0.5px] ${nestingLevel % 2 === 0 ? 'border-slate-300/60' : 'border-slate-400/60'}`
expect(expectedBorderClass).toBeTruthy()
expect(expectedBorderClass).toContain('border-slate-300/60') // even nesting level
})
it.concurrent('should generate proper CSS classes for odd nested levels', () => {
const nestingLevel = 3
const expectedBorderClass =
nestingLevel > 0 &&
`border border-[0.5px] ${nestingLevel % 2 === 0 ? 'border-slate-300/60' : 'border-slate-400/60'}`
expect(expectedBorderClass).toBeTruthy()
expect(expectedBorderClass).toContain('border-slate-400/60') // odd nesting level
})
it.concurrent('should handle error state styling', () => {
const hasNestedError = true
const errorClasses = hasNestedError && 'border-2 border-red-500 bg-red-50/50'
expect(errorClasses).toBe('border-2 border-red-500 bg-red-50/50')
})
it.concurrent('should handle diff status styling', () => {
const diffStatuses = ['new', 'edited'] as const
diffStatuses.forEach((status) => {
let diffClass = ''
if (status === 'new') {
diffClass = 'bg-green-50/50 ring-2 ring-green-500 dark:bg-green-900/10'
} else if (status === 'edited') {
diffClass = 'bg-orange-50/50 ring-2 ring-orange-500 dark:bg-orange-900/10'
}
expect(diffClass).toBeTruthy()
if (status === 'new') {
expect(diffClass).toContain('ring-green-500')
} else {
expect(diffClass).toContain('ring-orange-500')
}
})
})
})
describe('Edge Cases and Error Handling', () => {
it.concurrent('should handle circular parent references', () => {
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node1' } },
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(2)
expect(visited.has('node1')).toBe(true)
expect(visited.has('node2')).toBe(true)
})
it.concurrent('should handle complex circular reference chains', () => {
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node3' } },
{ id: 'node3', data: { parentId: 'node1' } },
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(3)
expect(visited.size).toBe(3)
})
it.concurrent('should handle self-referencing nodes', () => {
const nodes = [{ id: 'node1', data: { parentId: 'node1' } }]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(1)
expect(visited.has('node1')).toBe(true)
})
})
})

View File

@@ -6,60 +6,54 @@ import { StartIcon } from '@/components/icons'
import { Button } from '@/components/ui/button'
import { Card } from '@/components/ui/card'
import { cn } from '@/lib/utils'
import { type DiffStatus, hasDiffStatus } from '@/lib/workflows/diff/types'
import { IterationBadges } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/components/iteration-badges/iteration-badges'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
// Add these styles to your existing global CSS file or create a separate CSS module
const LoopNodeStyles: React.FC = () => {
const SubflowNodeStyles: React.FC = () => {
return (
<style jsx global>{`
@keyframes loop-node-pulse {
0% { box-shadow: 0 0 0 0 rgba(64, 224, 208, 0.3); }
70% { box-shadow: 0 0 0 6px rgba(64, 224, 208, 0); }
100% { box-shadow: 0 0 0 0 rgba(64, 224, 208, 0); }
0% { box-shadow: 0 0 0 0 rgba(47, 179, 255, 0.3); }
70% { box-shadow: 0 0 0 6px rgba(47, 179, 255, 0); }
100% { box-shadow: 0 0 0 0 rgba(47, 179, 255, 0); }
}
@keyframes parallel-node-pulse {
0% { box-shadow: 0 0 0 0 rgba(139, 195, 74, 0.3); }
70% { box-shadow: 0 0 0 6px rgba(139, 195, 74, 0); }
100% { box-shadow: 0 0 0 0 rgba(139, 195, 74, 0); }
}
.loop-node-drag-over {
animation: loop-node-pulse 1.2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
border-style: solid !important;
background-color: rgba(47, 179, 255, 0.08) !important;
box-shadow: 0 0 0 8px rgba(47, 179, 255, 0.1);
}
/* Ensure parent borders are visible when hovering over resize controls */
.parallel-node-drag-over {
animation: parallel-node-pulse 1.2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
border-style: solid !important;
background-color: rgba(139, 195, 74, 0.08) !important;
box-shadow: 0 0 0 8px rgba(139, 195, 74, 0.1);
}
.react-flow__node-group:hover,
.hover-highlight {
border-color: #1e293b !important;
}
/* Ensure hover effects work well */
.group-node-container:hover .react-flow__resize-control.bottom-right {
opacity: 1 !important;
visibility: visible !important;
}
/* Prevent jumpy drag behavior */
.loop-drop-container .react-flow__node {
transform-origin: center;
position: absolute;
}
/* Remove default border from React Flow group nodes */
.react-flow__node-group {
border: none;
background-color: transparent;
outline: none;
box-shadow: none;
}
/* Ensure child nodes stay within parent bounds */
.react-flow__node[data-parent-node-id] .react-flow__handle {
z-index: 30;
}
/* Enhanced drag detection */
.react-flow__node-group.dragging-over {
background-color: rgba(34,197,94,0.05);
transition: all 0.2s ease-in-out;
@@ -68,21 +62,30 @@ const LoopNodeStyles: React.FC = () => {
)
}
export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
export interface SubflowNodeData {
width?: number
height?: number
parentId?: string
extent?: 'parent'
hasNestedError?: boolean
isPreview?: boolean
kind: 'loop' | 'parallel'
}
export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeData>) => {
const { getNodes } = useReactFlow()
const { collaborativeRemoveBlock } = useCollaborativeWorkflow()
const blockRef = useRef<HTMLDivElement>(null)
// Use the clean abstraction for current workflow state
const currentWorkflow = useCurrentWorkflow()
const currentBlock = currentWorkflow.getBlockById(id)
const diffStatus =
currentWorkflow.isDiffMode && currentBlock ? (currentBlock as any).is_diff : undefined
const diffStatus: DiffStatus =
currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock)
? currentBlock.is_diff
: undefined
// Check if this is preview mode
const isPreview = data?.isPreview || false
// Determine nesting level by counting parents
const nestingLevel = useMemo(() => {
let level = 0
let currentParentId = data?.parentId
@@ -97,42 +100,37 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
return level
}, [id, data?.parentId, getNodes])
// Generate different background styles based on nesting level
const getNestedStyles = () => {
// Base styles
const styles: Record<string, string> = {
backgroundColor: 'rgba(0, 0, 0, 0.02)',
}
// Apply nested styles
if (nestingLevel > 0) {
// Each nesting level gets a different color
const colors = ['#e2e8f0', '#cbd5e1', '#94a3b8', '#64748b', '#475569']
const colorIndex = (nestingLevel - 1) % colors.length
styles.backgroundColor = `${colors[colorIndex]}30` // Slightly more visible background
styles.backgroundColor = `${colors[colorIndex]}30`
}
return styles
}
const nestedStyles = getNestedStyles()
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
const startBg = data.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
return (
<>
<LoopNodeStyles />
<SubflowNodeStyles />
<div className='group relative'>
<Card
ref={blockRef}
className={cn(
' relative cursor-default select-none',
'relative cursor-default select-none',
'transition-block-bg transition-ring',
'z-[20]',
data?.state === 'valid',
nestingLevel > 0 &&
`border border-[0.5px] ${nestingLevel % 2 === 0 ? 'border-slate-300/60' : 'border-slate-400/60'}`,
data?.hasNestedError && 'border-2 border-red-500 bg-red-50/50',
// Diff highlighting
diffStatus === 'new' && 'bg-green-50/50 ring-2 ring-green-500 dark:bg-green-900/10',
diffStatus === 'edited' &&
'bg-orange-50/50 ring-2 ring-orange-500 dark:bg-orange-900/10'
@@ -146,10 +144,9 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
pointerEvents: isPreview ? 'none' : 'all',
}}
data-node-id={id}
data-type='loopNode'
data-type='subflowNode'
data-nesting-level={nestingLevel}
>
{/* Critical drag handle that controls only the loop node movement */}
{!isPreview && (
<div
className='workflow-drag-handle absolute top-0 right-0 left-0 z-10 h-10 cursor-move'
@@ -157,7 +154,6 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
/>
)}
{/* Custom visible resize handle */}
{!isPreview && (
<div
className='absolute right-2 bottom-2 z-20 flex h-8 w-8 cursor-se-resize items-center justify-center text-muted-foreground'
@@ -165,7 +161,6 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
/>
)}
{/* Child nodes container - Enable pointer events to allow dragging of children */}
<div
className='h-[calc(100%-10px)] p-4'
data-dragarea='true'
@@ -175,7 +170,6 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
pointerEvents: isPreview ? 'none' : 'auto',
}}
>
{/* Delete button - styled like in action-bar.tsx */}
{!isPreview && (
<Button
variant='ghost'
@@ -191,12 +185,12 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
</Button>
)}
{/* Loop Start Block */}
{/* Subflow Start */}
<div
className='-translate-y-1/2 absolute top-1/2 left-8 flex h-10 w-10 transform items-center justify-center rounded-md bg-[#2FB3FF] p-2'
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
className='-translate-y-1/2 absolute top-1/2 left-8 flex h-10 w-10 transform items-center justify-center rounded-md p-2'
style={{ pointerEvents: isPreview ? 'none' : 'auto', backgroundColor: startBg }}
data-parent-id={id}
data-node-role='loop-start'
data-node-role={`${data.kind}-start`}
data-extent='parent'
>
<StartIcon className='h-6 w-6 text-white' />
@@ -204,7 +198,7 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
<Handle
type='source'
position={Position.Right}
id='loop-start-source'
id={startHandleId}
className='!w-[6px] !h-4 !bg-slate-300 dark:!bg-slate-500 !rounded-[2px] !border-none !z-[30] hover:!w-[10px] hover:!right-[-10px] hover:!rounded-r-full hover:!rounded-l-none !cursor-crosshair transition-[colors] duration-150'
style={{
right: '-6px',
@@ -241,15 +235,14 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
transform: 'translateY(-50%)',
pointerEvents: 'auto',
}}
id='loop-end-source'
id={endHandleId}
/>
{/* Loop Configuration Badges */}
<IterationBadges nodeId={id} data={data} iterationType='loop' />
<IterationBadges nodeId={id} data={data} iterationType={data.kind} />
</Card>
</div>
</>
)
})
LoopNodeComponent.displayName = 'LoopNodeComponent'
SubflowNodeComponent.displayName = 'SubflowNodeComponent'

View File

@@ -1,4 +1,4 @@
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, Copy, Trash2 } from 'lucide-react'
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, Copy, LogOut, Trash2 } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { cn } from '@/lib/utils'
@@ -23,6 +23,10 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
const horizontalHandles = useWorkflowStore(
(state) => state.blocks[blockId]?.horizontalHandles ?? false
)
const parentId = useWorkflowStore((state) => state.blocks[blockId]?.data?.parentId)
const parentType = useWorkflowStore((state) =>
parentId ? state.blocks[parentId]?.type : undefined
)
const userPermissions = useUserPermissionsContext()
const isStarterBlock = blockType === 'starter'
@@ -102,6 +106,33 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
</Tooltip>
)}
{/* Remove from subflow - only show when inside loop/parallel */}
{!isStarterBlock && parentId && (parentType === 'loop' || parentType === 'parallel') && (
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='ghost'
size='sm'
onClick={() => {
if (!disabled && userPermissions.canEdit) {
window.dispatchEvent(
new CustomEvent('remove-from-subflow', { detail: { blockId } })
)
}
}}
className={cn(
'text-gray-500',
(disabled || !userPermissions.canEdit) && 'cursor-not-allowed opacity-50'
)}
disabled={disabled || !userPermissions.canEdit}
>
<LogOut className='h-4 w-4' />
</Button>
</TooltipTrigger>
<TooltipContent side='right'>{getTooltipMessage('Remove From Subflow')}</TooltipContent>
</Tooltip>
)}
<Tooltip>
<TooltipTrigger asChild>
<Button

View File

@@ -34,6 +34,7 @@ interface FileSelectorInputProps {
disabled: boolean
isPreview?: boolean
previewValue?: any | null
previewContextValues?: Record<string, any>
}
export function FileSelectorInput({
@@ -42,6 +43,7 @@ export function FileSelectorInput({
disabled,
isPreview = false,
previewValue,
previewContextValues,
}: FileSelectorInputProps) {
const { getValue } = useSubBlockStore()
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
@@ -49,6 +51,23 @@ export function FileSelectorInput({
const params = useParams()
const workflowIdFromUrl = (params?.workflowId as string) || activeWorkflowId || ''
// Helper to coerce various preview value shapes into a string ID
const coerceToIdString = (val: unknown): string => {
if (!val) return ''
if (typeof val === 'string') return val
if (typeof val === 'number') return String(val)
if (typeof val === 'object') {
const obj = val as Record<string, any>
return (obj.id ||
obj.fileId ||
obj.value ||
obj.documentId ||
obj.spreadsheetId ||
'') as string
}
return ''
}
// Use the proper hook to get the current value and setter
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
const [selectedFileId, setSelectedFileId] = useState<string>('')
@@ -108,19 +127,37 @@ export function FileSelectorInput({
const isMicrosoftSharePoint = provider === 'microsoft' && subBlock.serviceId === 'sharepoint'
const isMicrosoftPlanner = provider === 'microsoft-planner'
// For Confluence and Jira, we need the domain and credentials
const domain = isConfluence || isJira ? (getValue(blockId, 'domain') as string) || '' : ''
const jiraCredential = isJira ? (getValue(blockId, 'credential') as string) || '' : ''
const domain =
isConfluence || isJira
? (isPreview && previewContextValues?.domain?.value) ||
(getValue(blockId, 'domain') as string) ||
''
: ''
const jiraCredential = isJira
? (isPreview && previewContextValues?.credential?.value) ||
(getValue(blockId, 'credential') as string) ||
''
: ''
// For Discord, we need the bot token and server ID
const botToken = isDiscord ? (getValue(blockId, 'botToken') as string) || '' : ''
const serverId = isDiscord ? (getValue(blockId, 'serverId') as string) || '' : ''
const botToken = isDiscord
? (isPreview && previewContextValues?.botToken?.value) ||
(getValue(blockId, 'botToken') as string) ||
''
: ''
const serverId = isDiscord
? (isPreview && previewContextValues?.serverId?.value) ||
(getValue(blockId, 'serverId') as string) ||
''
: ''
// Use preview value when in preview mode, otherwise use store value
const value = isPreview ? previewValue : storeValue
// Keep local selection in sync with store value (and preview)
useEffect(() => {
const effective = isPreview && previewValue !== undefined ? previewValue : storeValue
if (typeof effective === 'string' && effective !== '') {
const raw = isPreview && previewValue !== undefined ? previewValue : storeValue
const effective = coerceToIdString(raw)
if (effective) {
if (isJira) {
setSelectedIssueId(effective)
} else if (isDiscord) {
@@ -385,7 +422,7 @@ export function FileSelectorInput({
<TooltipTrigger asChild>
<div className='w-full'>
<MicrosoftFileSelector
value={selectedFileId}
value={coerceToIdString(selectedFileId)}
onChange={handleFileChange}
provider='microsoft-excel'
requiredScopes={subBlock.requiredScopes || []}
@@ -418,7 +455,7 @@ export function FileSelectorInput({
<TooltipTrigger asChild>
<div className='w-full'>
<MicrosoftFileSelector
value={selectedFileId}
value={coerceToIdString(selectedFileId)}
onChange={handleFileChange}
provider='microsoft-word'
requiredScopes={subBlock.requiredScopes || []}
@@ -450,7 +487,7 @@ export function FileSelectorInput({
<TooltipTrigger asChild>
<div className='w-full'>
<MicrosoftFileSelector
value={selectedFileId}
value={coerceToIdString(selectedFileId)}
onChange={handleFileChange}
provider='microsoft'
requiredScopes={subBlock.requiredScopes || []}
@@ -482,7 +519,7 @@ export function FileSelectorInput({
<TooltipTrigger asChild>
<div className='w-full'>
<MicrosoftFileSelector
value={selectedFileId}
value={coerceToIdString(selectedFileId)}
onChange={handleFileChange}
provider='microsoft'
requiredScopes={subBlock.requiredScopes || []}
@@ -662,11 +699,9 @@ export function FileSelectorInput({
// Default to Google Drive picker
return (
<GoogleDrivePicker
value={
(isPreview && previewValue !== undefined
? (previewValue as string)
: (storeValue as string)) || ''
}
value={coerceToIdString(
(isPreview && previewValue !== undefined ? previewValue : storeValue) as any
)}
onChange={(val, info) => {
setSelectedFileId(val)
setFileInfo(info || null)
@@ -682,7 +717,11 @@ export function FileSelectorInput({
onFileInfoChange={setFileInfo}
clientId={clientId}
apiKey={apiKey}
credentialId={(getValue(blockId, 'credential') as string) || ''}
credentialId={
((isPreview && previewContextValues?.credential?.value) ||
(getValue(blockId, 'credential') as string) ||
'') as string
}
workflowId={workflowIdFromUrl}
/>
)

View File

@@ -389,6 +389,8 @@ export function LongInput({
fontFamily: 'inherit',
lineHeight: 'inherit',
height: `${height}px`,
wordBreak: 'break-word',
whiteSpace: 'pre-wrap',
}}
/>
<div
@@ -397,7 +399,7 @@ export function LongInput({
style={{
fontFamily: 'inherit',
lineHeight: 'inherit',
width: textareaRef.current ? `${textareaRef.current.clientWidth}px` : '100%',
width: '100%',
height: `${height}px`,
overflow: 'hidden',
}}

View File

@@ -55,6 +55,7 @@ interface ToolInputProps {
isPreview?: boolean
previewValue?: any
disabled?: boolean
allowExpandInPreview?: boolean
}
interface StoredTool {
@@ -105,6 +106,7 @@ function FileSelectorSyncWrapper({
onChange,
uiComponent,
disabled,
previewContextValues,
}: {
blockId: string
paramId: string
@@ -112,6 +114,7 @@ function FileSelectorSyncWrapper({
onChange: (value: string) => void
uiComponent: any
disabled: boolean
previewContextValues?: Record<string, any>
}) {
return (
<GenericSyncWrapper blockId={blockId} paramId={paramId} value={value} onChange={onChange}>
@@ -128,6 +131,7 @@ function FileSelectorSyncWrapper({
placeholder: uiComponent.placeholder,
}}
disabled={disabled}
previewContextValues={previewContextValues}
/>
</GenericSyncWrapper>
)
@@ -398,6 +402,7 @@ export function ToolInput({
isPreview = false,
previewValue,
disabled = false,
allowExpandInPreview,
}: ToolInputProps) {
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
const [open, setOpen] = useState(false)
@@ -775,8 +780,19 @@ export function ToolInput({
)
}
// Local expansion overrides for preview/diff mode
const [previewExpanded, setPreviewExpanded] = useState<Record<number, boolean>>({})
const toggleToolExpansion = (toolIndex: number) => {
if (isPreview || disabled) return
if ((isPreview && !allowExpandInPreview) || disabled) return
if (isPreview) {
setPreviewExpanded((prev) => ({
...prev,
[toolIndex]: !(prev[toolIndex] ?? !!selectedTools[toolIndex]?.isExpanded),
}))
return
}
setStoreValue(
selectedTools.map((tool, index) =>
@@ -929,7 +945,8 @@ export function ToolInput({
param: ToolParameterConfig,
value: string,
onChange: (value: string) => void,
toolIndex?: number
toolIndex?: number,
currentToolParams?: Record<string, string>
) => {
// Create unique blockId for tool parameters to avoid conflicts with main block
const uniqueBlockId = toolIndex !== undefined ? `${blockId}-tool-${toolIndex}` : blockId
@@ -1076,6 +1093,7 @@ export function ToolInput({
onChange={onChange}
uiComponent={uiComponent}
disabled={disabled}
previewContextValues={currentToolParams as any}
/>
)
@@ -1363,6 +1381,9 @@ export function ToolInput({
const oauthConfig = !isCustomTool ? getToolOAuthConfig(currentToolId) : null
// Tools are always expandable so users can access the interface
const isExpandedForDisplay = isPreview
? (previewExpanded[toolIndex] ?? !!tool.isExpanded)
: !!tool.isExpanded
return (
<div
@@ -1458,29 +1479,27 @@ export function ToolInput({
</span>
<span
className={`font-medium text-xs ${
tool.usageControl === 'force'
? 'block text-muted-foreground'
: 'hidden'
tool.usageControl === 'force' ? 'block' : 'hidden'
}`}
>
Force
</span>
<span
className={`font-medium text-xs ${
tool.usageControl === 'none'
? 'block text-muted-foreground'
: 'hidden'
tool.usageControl === 'none' ? 'block' : 'hidden'
}`}
>
Deny
None
</span>
</Toggle>
</TooltipTrigger>
<TooltipContent side='bottom' className='max-w-[240px] p-2'>
<p className='text-xs'>
<TooltipContent className='max-w-[280px] p-2' side='top'>
<p className='text-muted-foreground text-xs'>
Control how the model uses this tool in its response.
{tool.usageControl === 'auto' && (
<span>
<span className='font-medium'>Auto:</span> Let the agent decide
{' '}
<span className='font-medium'>Auto:</span> Let the model decide
when to use the tool
</span>
)}
@@ -1511,7 +1530,7 @@ export function ToolInput({
</div>
</div>
{!isCustomTool && tool.isExpanded && (
{!isCustomTool && isExpandedForDisplay && (
<div className='space-y-3 overflow-visible p-3'>
{/* Operation dropdown for tools with multiple operations */}
{(() => {
@@ -1660,7 +1679,8 @@ export function ToolInput({
param,
tool.params[param.id] || '',
(value) => handleParamChange(toolIndex, param.id, value),
toolIndex
toolIndex,
tool.params
)
) : (
<ShortInput

View File

@@ -3,6 +3,7 @@ import { useEffect, useState } from 'react'
import { AlertTriangle, Info } from 'lucide-react'
import { Label, Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui'
import { cn } from '@/lib/utils'
import type { FieldDiffStatus } from '@/lib/workflows/diff/types'
import {
ChannelSelectorInput,
CheckboxList,
@@ -43,7 +44,8 @@ interface SubBlockProps {
isPreview?: boolean
subBlockValues?: Record<string, any>
disabled?: boolean
fieldDiffStatus?: 'changed' | 'unchanged'
fieldDiffStatus?: FieldDiffStatus
allowExpandInPreview?: boolean
}
export function SubBlock({
@@ -54,6 +56,7 @@ export function SubBlock({
subBlockValues,
disabled = false,
fieldDiffStatus,
allowExpandInPreview,
}: SubBlockProps) {
const [isValidJson, setIsValidJson] = useState(true)
@@ -211,7 +214,8 @@ export function SubBlock({
subBlockId={config.id}
isPreview={isPreview}
previewValue={previewValue}
disabled={isDisabled}
disabled={allowExpandInPreview ? false : isDisabled}
allowExpandInPreview={allowExpandInPreview}
/>
)
case 'checkbox-list':
@@ -355,6 +359,7 @@ export function SubBlock({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue}
previewContextValues={subBlockValues}
/>
)
case 'project-selector':

View File

@@ -8,6 +8,7 @@ import { Card } from '@/components/ui/card'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { parseCronToHumanReadable } from '@/lib/schedules/utils'
import { cn, validateName } from '@/lib/utils'
import { type DiffStatus, hasDiffStatus } from '@/lib/workflows/diff/types'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import type { BlockConfig, SubBlockConfig, SubBlockType } from '@/blocks/types'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
@@ -76,12 +77,16 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
: (currentBlock?.enabled ?? true)
// Get diff status from the block itself (set by diff engine)
const diffStatus =
currentWorkflow.isDiffMode && currentBlock ? (currentBlock as any).is_diff : undefined
const diffStatus: DiffStatus =
currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock)
? currentBlock.is_diff
: undefined
// Get field-level diff information
// Get field-level diff information for this specific block
const fieldDiff =
currentWorkflow.isDiffMode && currentBlock ? (currentBlock as any).field_diffs : undefined
currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock)
? currentBlock.field_diffs?.[id]
: undefined
// Debug: Log diff status for this block
useEffect(() => {
@@ -151,6 +156,24 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
const blockAdvancedMode = useWorkflowStore((state) => state.blocks[id]?.advancedMode ?? false)
const blockTriggerMode = useWorkflowStore((state) => state.blocks[id]?.triggerMode ?? false)
// Local UI state for diff mode controls
const [diffIsWide, setDiffIsWide] = useState<boolean>(isWide)
const [diffAdvancedMode, setDiffAdvancedMode] = useState<boolean>(blockAdvancedMode)
const [diffTriggerMode, setDiffTriggerMode] = useState<boolean>(blockTriggerMode)
useEffect(() => {
if (currentWorkflow.isDiffMode) {
setDiffIsWide(isWide)
setDiffAdvancedMode(blockAdvancedMode)
setDiffTriggerMode(blockTriggerMode)
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [currentWorkflow.isDiffMode, id])
const displayIsWide = currentWorkflow.isDiffMode ? diffIsWide : isWide
const displayAdvancedMode = currentWorkflow.isDiffMode ? diffAdvancedMode : blockAdvancedMode
const displayTriggerMode = currentWorkflow.isDiffMode ? diffTriggerMode : blockTriggerMode
// Collaborative workflow actions
const {
collaborativeUpdateBlockName,
@@ -414,6 +437,8 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
const isAdvancedMode = useWorkflowStore.getState().blocks[blockId]?.advancedMode ?? false
const isTriggerMode = useWorkflowStore.getState().blocks[blockId]?.triggerMode ?? false
const effectiveAdvanced = currentWorkflow.isDiffMode ? displayAdvancedMode : isAdvancedMode
const effectiveTrigger = currentWorkflow.isDiffMode ? displayTriggerMode : isTriggerMode
// Filter visible blocks and those that meet their conditions
const visibleSubBlocks = subBlocks.filter((block) => {
@@ -423,18 +448,18 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
if (block.type === ('trigger-config' as SubBlockType)) {
// Show trigger-config blocks when in trigger mode OR for pure trigger blocks
const isPureTriggerBlock = config?.triggers?.enabled && config.category === 'triggers'
return isTriggerMode || isPureTriggerBlock
return effectiveTrigger || isPureTriggerBlock
}
if (isTriggerMode && block.type !== ('trigger-config' as SubBlockType)) {
if (effectiveTrigger && block.type !== ('trigger-config' as SubBlockType)) {
// In trigger mode, hide all non-trigger-config blocks
return false
}
// Filter by mode if specified
if (block.mode) {
if (block.mode === 'basic' && isAdvancedMode) return false
if (block.mode === 'advanced' && !isAdvancedMode) return false
if (block.mode === 'basic' && effectiveAdvanced) return false
if (block.mode === 'advanced' && !effectiveAdvanced) return false
}
// If there's no condition, the block should be shown
@@ -562,7 +587,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
className={cn(
'relative cursor-default select-none shadow-md',
'transition-block-bg transition-ring',
isWide ? 'w-[480px]' : 'w-[320px]',
displayIsWide ? 'w-[480px]' : 'w-[320px]',
!isEnabled && 'shadow-sm',
isActive && 'animate-pulse-ring ring-2 ring-blue-500',
isPending && 'ring-2 ring-amber-500',
@@ -658,7 +683,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
onClick={handleNameClick}
title={name}
style={{
maxWidth: !isEnabled ? (isWide ? '200px' : '140px') : '180px',
maxWidth: !isEnabled ? (displayIsWide ? '200px' : '140px') : '180px',
}}
>
{name}
@@ -758,26 +783,30 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
variant='ghost'
size='sm'
onClick={() => {
if (userPermissions.canEdit) {
if (currentWorkflow.isDiffMode) {
setDiffAdvancedMode((prev) => !prev)
} else if (userPermissions.canEdit) {
collaborativeToggleBlockAdvancedMode(id)
}
}}
className={cn(
'h-7 p-1 text-gray-500',
blockAdvancedMode && 'text-[var(--brand-primary-hex)]',
!userPermissions.canEdit && 'cursor-not-allowed opacity-50'
displayAdvancedMode && 'text-[var(--brand-primary-hex)]',
!userPermissions.canEdit &&
!currentWorkflow.isDiffMode &&
'cursor-not-allowed opacity-50'
)}
disabled={!userPermissions.canEdit}
disabled={!userPermissions.canEdit && !currentWorkflow.isDiffMode}
>
<Code className='h-5 w-5' />
</Button>
</TooltipTrigger>
<TooltipContent side='top'>
{!userPermissions.canEdit
{!userPermissions.canEdit && !currentWorkflow.isDiffMode
? userPermissions.isOfflineMode
? 'Connection lost - please refresh'
: 'Read-only mode'
: blockAdvancedMode
: displayAdvancedMode
? 'Switch to Basic Mode'
: 'Switch to Advanced Mode'}
</TooltipContent>
@@ -791,27 +820,31 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
variant='ghost'
size='sm'
onClick={() => {
if (userPermissions.canEdit) {
if (currentWorkflow.isDiffMode) {
setDiffTriggerMode((prev) => !prev)
} else if (userPermissions.canEdit) {
// Toggle trigger mode using collaborative function
collaborativeToggleBlockTriggerMode(id)
}
}}
className={cn(
'h-7 p-1 text-gray-500',
blockTriggerMode && 'text-[#22C55E]',
!userPermissions.canEdit && 'cursor-not-allowed opacity-50'
displayTriggerMode && 'text-[#22C55E]',
!userPermissions.canEdit &&
!currentWorkflow.isDiffMode &&
'cursor-not-allowed opacity-50'
)}
disabled={!userPermissions.canEdit}
disabled={!userPermissions.canEdit && !currentWorkflow.isDiffMode}
>
<Zap className='h-5 w-5' />
</Button>
</TooltipTrigger>
<TooltipContent side='top'>
{!userPermissions.canEdit
{!userPermissions.canEdit && !currentWorkflow.isDiffMode
? userPermissions.isOfflineMode
? 'Connection lost - please refresh'
: 'Read-only mode'
: blockTriggerMode
: displayTriggerMode
? 'Switch to Action Mode'
: 'Switch to Trigger Mode'}
</TooltipContent>
@@ -892,17 +925,21 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
variant='ghost'
size='sm'
onClick={() => {
if (userPermissions.canEdit) {
if (currentWorkflow.isDiffMode) {
setDiffIsWide((prev) => !prev)
} else if (userPermissions.canEdit) {
collaborativeToggleBlockWide(id)
}
}}
className={cn(
'h-7 p-1 text-gray-500',
!userPermissions.canEdit && 'cursor-not-allowed opacity-50'
!userPermissions.canEdit &&
!currentWorkflow.isDiffMode &&
'cursor-not-allowed opacity-50'
)}
disabled={!userPermissions.canEdit}
disabled={!userPermissions.canEdit && !currentWorkflow.isDiffMode}
>
{isWide ? (
{displayIsWide ? (
<RectangleHorizontal className='h-5 w-5' />
) : (
<RectangleVertical className='h-5 w-5' />
@@ -910,11 +947,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
</Button>
</TooltipTrigger>
<TooltipContent side='top'>
{!userPermissions.canEdit
{!userPermissions.canEdit && !currentWorkflow.isDiffMode
? userPermissions.isOfflineMode
? 'Connection lost - please refresh'
: 'Read-only mode'
: isWide
: displayIsWide
? 'Narrow Block'
: 'Expand Block'}
</TooltipContent>
@@ -942,8 +979,13 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
blockId={id}
config={subBlock}
isConnecting={isConnecting}
isPreview={data.isPreview}
subBlockValues={data.subBlockValues}
isPreview={data.isPreview || currentWorkflow.isDiffMode}
subBlockValues={
data.subBlockValues ||
(currentWorkflow.isDiffMode && currentBlock
? (currentBlock as any).subBlocks
: undefined)
}
disabled={!userPermissions.canEdit}
fieldDiffStatus={
fieldDiff?.changed_fields?.includes(subBlock.id)
@@ -952,6 +994,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
? 'unchanged'
: undefined
}
allowExpandInPreview={currentWorkflow.isDiffMode}
/>
</div>
))}

View File

@@ -1,6 +1,7 @@
import { useEffect } from 'react'
import { X } from 'lucide-react'
import { BaseEdge, EdgeLabelRenderer, type EdgeProps, getSmoothStepPath } from 'reactflow'
import type { EdgeDiffStatus } from '@/lib/workflows/diff/types'
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
import { useCurrentWorkflow } from '../../hooks'
@@ -114,7 +115,7 @@ export const WorkflowEdge = ({
}, [diffAnalysis, id, currentWorkflow.blocks, currentWorkflow.edges, isShowingDiff])
// Determine edge diff status
let edgeDiffStatus: 'new' | 'deleted' | 'unchanged' | null = null
let edgeDiffStatus: EdgeDiffStatus = null
// Only attempt to determine diff status if all required data is available
if (diffAnalysis?.edge_diff && edgeIdentifier && isDiffReady) {

View File

@@ -14,7 +14,8 @@ const isContainerType = (blockType: string): boolean => {
blockType === 'loop' ||
blockType === 'parallel' ||
blockType === 'loopNode' ||
blockType === 'parallelNode'
blockType === 'parallelNode' ||
blockType === 'subflowNode'
)
}
@@ -325,7 +326,10 @@ export const updateNodeParent = (
} else if (currentParentId) {
const absolutePosition = getNodeAbsolutePosition(nodeId, getNodes)
// First set the absolute position so the node visually stays in place
updateBlockPosition(nodeId, absolutePosition)
// Then clear the parent relationship in the store (empty string removes parentId/extent)
updateParentId(nodeId, '', 'parent')
}
resizeLoopNodes()

View File

@@ -18,8 +18,7 @@ import { ControlBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/compone
import { DiffControls } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/diff-controls'
import { ErrorBoundary } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/error/index'
import { Panel } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel'
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-node'
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-node'
import { SubflowNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node'
import { WorkflowBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block'
import { WorkflowEdge } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
@@ -48,8 +47,7 @@ const logger = createLogger('Workflow')
// Define custom node and edge types
const nodeTypes: NodeTypes = {
workflowBlock: WorkflowBlock,
loopNode: LoopNodeComponent,
parallelNode: ParallelNodeComponent,
subflowNode: SubflowNodeComponent,
}
const edgeTypes: EdgeTypes = { workflowEdge: WorkflowEdge }
@@ -343,6 +341,35 @@ const WorkflowContent = React.memo(() => {
}
}, [debouncedAutoLayout])
// Listen for explicit remove-from-subflow actions from ActionBar
useEffect(() => {
const handleRemoveFromSubflow = (event: Event) => {
const customEvent = event as CustomEvent<{ blockId: string }>
const { blockId } = customEvent.detail || ({} as any)
if (!blockId) return
try {
// Remove parent-child relationship while preserving absolute position
updateNodeParent(blockId, null)
// Remove all edges connected to this block
const connectedEdges = edgesForDisplay.filter(
(e) => e.source === blockId || e.target === blockId
)
connectedEdges.forEach((edge) => {
removeEdge(edge.id)
})
} catch (err) {
logger.error('Failed to remove from subflow', { err })
}
}
window.addEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener)
return () =>
window.removeEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener)
}, [getNodes, updateNodeParent, removeEdge, edgesForDisplay])
// Handle drops
const findClosestOutput = useCallback(
(newNodePosition: { x: number; y: number }): BlockData | null => {
@@ -451,7 +478,7 @@ const WorkflowContent = React.memo(() => {
{
width: 500,
height: 300,
type: type === 'loop' ? 'loopNode' : 'parallelNode',
type: 'subflowNode',
},
undefined,
undefined,
@@ -571,7 +598,7 @@ const WorkflowContent = React.memo(() => {
addBlock(id, data.type, name, relativePosition, {
width: 500,
height: 300,
type: data.type === 'loop' ? 'loopNode' : 'parallelNode',
type: 'subflowNode',
parentId: containerInfo.loopId,
extent: 'parent',
})
@@ -607,7 +634,7 @@ const WorkflowContent = React.memo(() => {
{
width: 500,
height: 300,
type: data.type === 'loop' ? 'loopNode' : 'parallelNode',
type: 'subflowNode',
},
undefined,
undefined,
@@ -657,10 +684,12 @@ const WorkflowContent = React.memo(() => {
const containerNode = getNodes().find((n) => n.id === containerInfo.loopId)
const containerType = containerNode?.type
if (containerType === 'loopNode' || containerType === 'parallelNode') {
if (containerType === 'subflowNode') {
// Connect from the container's start node to the new block
const startSourceHandle =
containerType === 'loopNode' ? 'loop-start-source' : 'parallel-start-source'
(containerNode?.data as any)?.kind === 'loop'
? 'loop-start-source'
: 'parallel-start-source'
addEdge({
id: crypto.randomUUID(),
@@ -781,9 +810,15 @@ const WorkflowContent = React.memo(() => {
if (containerElement) {
// Determine the type of container node for appropriate styling
const containerNode = getNodes().find((n) => n.id === containerInfo.loopId)
if (containerNode?.type === 'loopNode') {
if (
containerNode?.type === 'subflowNode' &&
(containerNode.data as any)?.kind === 'loop'
) {
containerElement.classList.add('loop-node-drag-over')
} else if (containerNode?.type === 'parallelNode') {
} else if (
containerNode?.type === 'subflowNode' &&
(containerNode.data as any)?.kind === 'parallel'
) {
containerElement.classList.add('parallel-node-drag-over')
}
document.body.style.cursor = 'copy'
@@ -918,31 +953,11 @@ const WorkflowContent = React.memo(() => {
}
// Handle container nodes differently
if (block.type === 'loop') {
if (block.type === 'loop' || block.type === 'parallel') {
const hasNestedError = nestedSubflowErrors.has(block.id)
nodeArray.push({
id: block.id,
type: 'loopNode',
position: block.position,
parentId: block.data?.parentId,
extent: block.data?.extent || undefined,
dragHandle: '.workflow-drag-handle',
data: {
...block.data,
width: block.data?.width || 500,
height: block.data?.height || 300,
hasNestedError,
},
})
return
}
// Handle parallel nodes
if (block.type === 'parallel') {
const hasNestedError = nestedSubflowErrors.has(block.id)
nodeArray.push({
id: block.id,
type: 'parallelNode',
type: 'subflowNode',
position: block.position,
parentId: block.data?.parentId,
extent: block.data?.extent || undefined,
@@ -952,6 +967,7 @@ const WorkflowContent = React.memo(() => {
width: block.data?.width || 500,
height: block.data?.height || 300,
hasNestedError,
kind: block.type === 'loop' ? 'loop' : 'parallel',
},
})
return
@@ -1191,13 +1207,13 @@ const WorkflowContent = React.memo(() => {
const intersectingNodes = getNodes()
.filter((n) => {
// Only consider container nodes that aren't the dragged node
if ((n.type !== 'loopNode' && n.type !== 'parallelNode') || n.id === node.id) return false
if (n.type !== 'subflowNode' || n.id === node.id) return false
// Skip if this container is already the parent of the node being dragged
if (n.id === currentParentId) return false
// Skip self-nesting: prevent a container from becoming its own descendant
if (node.type === 'loopNode' || node.type === 'parallelNode') {
if (node.type === 'subflowNode') {
// Get the full hierarchy of the potential parent
const hierarchy = getNodeHierarchyWrapper(n.id)
@@ -1212,14 +1228,14 @@ const WorkflowContent = React.memo(() => {
// Get dimensions based on node type
const nodeWidth =
node.type === 'loopNode' || node.type === 'parallelNode'
node.type === 'subflowNode'
? node.data?.width || 500
: node.type === 'condition'
? 250
: 350
const nodeHeight =
node.type === 'loopNode' || node.type === 'parallelNode'
node.type === 'subflowNode'
? node.data?.height || 300
: node.type === 'condition'
? 150
@@ -1286,9 +1302,15 @@ const WorkflowContent = React.memo(() => {
)
if (containerElement) {
// Apply appropriate class based on container type
if (bestContainerMatch.container.type === 'loopNode') {
if (
bestContainerMatch.container.type === 'subflowNode' &&
(bestContainerMatch.container.data as any)?.kind === 'loop'
) {
containerElement.classList.add('loop-node-drag-over')
} else if (bestContainerMatch.container.type === 'parallelNode') {
} else if (
bestContainerMatch.container.type === 'subflowNode' &&
(bestContainerMatch.container.data as any)?.kind === 'parallel'
) {
containerElement.classList.add('parallel-node-drag-over')
}
document.body.style.cursor = 'copy'
@@ -1356,7 +1378,7 @@ const WorkflowContent = React.memo(() => {
}
// If we're dragging a container node, do additional checks to prevent circular references
if ((node.type === 'loopNode' || node.type === 'parallelNode') && potentialParentId) {
if (node.type === 'subflowNode' && potentialParentId) {
// Get the hierarchy of the potential parent container
const parentHierarchy = getNodeHierarchyWrapper(potentialParentId)

View File

@@ -202,9 +202,6 @@ export function Copilot() {
<div className='flex items-center justify-between gap-4'>
<div className='min-w-0 flex-1'>
<div className='rounded bg-muted/50 px-2 py-1 font-mono text-sm'>{value}</div>
<p className='mt-1 text-muted-foreground text-xs'>
Key ID: <span className='font-mono'>{k.id}</span>
</p>
</div>
<div className='flex items-center gap-2'>
<TooltipProvider>

View File

@@ -15,8 +15,7 @@ import 'reactflow/dist/style.css'
import { createLogger } from '@/lib/logs/console/logger'
import { cn } from '@/lib/utils'
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-node'
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-node'
import { SubflowNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node'
import { WorkflowBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block'
import { WorkflowEdge } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge'
import { getBlock } from '@/blocks'
@@ -39,8 +38,7 @@ interface WorkflowPreviewProps {
// Define node types - the components now handle preview mode internally
const nodeTypes: NodeTypes = {
workflowBlock: WorkflowBlock,
loopNode: LoopNodeComponent,
parallelNode: ParallelNodeComponent,
subflowNode: SubflowNodeComponent,
}
// Define edge types
@@ -131,7 +129,7 @@ export function WorkflowPreview({
if (block.type === 'loop') {
nodeArray.push({
id: block.id,
type: 'loopNode',
type: 'subflowNode',
position: absolutePosition,
parentId: block.data?.parentId,
extent: block.data?.extent || undefined,
@@ -142,6 +140,7 @@ export function WorkflowPreview({
height: block.data?.height || 300,
state: 'valid',
isPreview: true,
kind: 'loop',
},
})
return
@@ -150,7 +149,7 @@ export function WorkflowPreview({
if (block.type === 'parallel') {
nodeArray.push({
id: block.id,
type: 'parallelNode',
type: 'subflowNode',
position: absolutePosition,
parentId: block.data?.parentId,
extent: block.data?.extent || undefined,
@@ -161,6 +160,7 @@ export function WorkflowPreview({
height: block.data?.height || 300,
state: 'valid',
isPreview: true,
kind: 'parallel',
},
})
return

View File

@@ -5,12 +5,14 @@ import { BlockPathCalculator } from '@/lib/block-path-calculator'
import { extractFieldsFromSchema, parseResponseFormatSafely } from '@/lib/response-format'
import { cn } from '@/lib/utils'
import { getBlock } from '@/blocks'
import type { BlockConfig } from '@/blocks/types'
import { Serializer } from '@/serializer'
import { useVariablesStore } from '@/stores/panel/variables/store'
import type { Variable } from '@/stores/panel/variables/types'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState } from '@/stores/workflows/workflow/types'
import { getTool } from '@/tools/utils'
import { getTriggersByProvider } from '@/triggers'
@@ -51,8 +53,8 @@ export const checkTagTrigger = (text: string, cursorPosition: number): { show: b
const BLOCK_COLORS = {
VARIABLE: '#2F8BFF',
DEFAULT: '#2F55FF',
LOOP: '#8857E6',
PARALLEL: '#FF5757',
LOOP: '#2FB3FF',
PARALLEL: '#FEE12B',
} as const
const TAG_PREFIXES = {
@@ -73,11 +75,11 @@ const getSubBlockValue = (blockId: string, property: string): any => {
const createTagEventHandlers = (
tag: string,
group: any,
group: BlockTagGroup | undefined,
tagIndex: number,
handleTagSelect: (tag: string, group?: any) => void,
handleTagSelect: (tag: string, group?: BlockTagGroup) => void,
setSelectedIndex: (index: number) => void,
setHoveredNested: (value: any) => void
setHoveredNested: (value: { tag: string; index: number } | null) => void
) => ({
onMouseEnter: () => {
setSelectedIndex(tagIndex >= 0 ? tagIndex : 0)
@@ -96,8 +98,8 @@ const createTagEventHandlers = (
})
const getOutputTypeForPath = (
block: any,
blockConfig: any,
block: BlockState,
blockConfig: BlockConfig | null,
blockId: string,
outputPath: string
): string => {
@@ -137,7 +139,9 @@ const getOutputTypeForPath = (
// For API mode, check inputFormat for custom field types
const inputFormatValue = getSubBlockValue(blockId, 'inputFormat')
if (inputFormatValue && Array.isArray(inputFormatValue)) {
const field = inputFormatValue.find((f: any) => f.name === outputPath)
const field = inputFormatValue.find(
(f: { name?: string; type?: string }) => f.name === outputPath
)
if (field?.type) {
return field.type
}
@@ -224,7 +228,7 @@ const generateOutputPathsWithTypes = (
return paths
}
const generateToolOutputPaths = (blockConfig: any, operation: string): string[] => {
const generateToolOutputPaths = (blockConfig: BlockConfig, operation: string): string[] => {
if (!blockConfig?.tools?.config?.tool) return []
try {
@@ -244,7 +248,7 @@ const generateToolOutputPaths = (blockConfig: any, operation: string): string[]
}
}
const getToolOutputType = (blockConfig: any, operation: string, path: string): string => {
const getToolOutputType = (blockConfig: BlockConfig, operation: string, path: string): string => {
if (!blockConfig?.tools?.config?.tool) return 'any'
try {
@@ -366,9 +370,9 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const metricsValue = getSubBlockValue(activeSourceBlockId, 'metrics')
if (metricsValue && Array.isArray(metricsValue) && metricsValue.length > 0) {
const validMetrics = metricsValue.filter((metric: any) => metric?.name)
const validMetrics = metricsValue.filter((metric: { name?: string }) => metric?.name)
blockTags = validMetrics.map(
(metric: any) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
(metric: { name: string }) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
)
} else {
const outputPaths = generateOutputPaths(blockConfig.outputs)
@@ -402,8 +406,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
inputFormatValue.length > 0
) {
blockTags = inputFormatValue
.filter((field: any) => field.name && field.name.trim() !== '')
.map((field: any) => `${normalizedBlockName}.${field.name}`)
.filter((field: { name?: string }) => field.name && field.name.trim() !== '')
.map((field: { name: string }) => `${normalizedBlockName}.${field.name}`)
} else {
blockTags = [normalizedBlockName]
}
@@ -556,9 +560,14 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
)
let containingParallelBlockId: string | null = null
if (containingParallel) {
const [parallelId] = containingParallel
const [parallelId, parallel] = containingParallel
containingParallelBlockId = parallelId
const contextualTags: string[] = ['index', 'currentItem', 'items']
const parallelType = parallel.parallelType || 'count'
const contextualTags: string[] = ['index']
if (parallelType === 'collection') {
contextualTags.push('currentItem')
contextualTags.push('items')
}
const containingParallelBlock = blocks[parallelId]
if (containingParallelBlock) {
@@ -629,9 +638,9 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const metricsValue = getSubBlockValue(accessibleBlockId, 'metrics')
if (metricsValue && Array.isArray(metricsValue) && metricsValue.length > 0) {
const validMetrics = metricsValue.filter((metric: any) => metric?.name)
const validMetrics = metricsValue.filter((metric: { name?: string }) => metric?.name)
blockTags = validMetrics.map(
(metric: any) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
(metric: { name: string }) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
)
} else {
const outputPaths = generateOutputPaths(blockConfig.outputs)
@@ -665,8 +674,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
inputFormatValue.length > 0
) {
blockTags = inputFormatValue
.filter((field: any) => field.name && field.name.trim() !== '')
.map((field: any) => `${normalizedBlockName}.${field.name}`)
.filter((field: { name?: string }) => field.name && field.name.trim() !== '')
.map((field: { name: string }) => `${normalizedBlockName}.${field.name}`)
} else {
blockTags = [normalizedBlockName]
}
@@ -880,8 +889,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
liveCursor = activeEl.selectionStart ?? cursorPosition
// Prefer the active element value if present. This ensures we include the most
// recently typed character(s) that might not yet be reflected in React state.
if (typeof (activeEl as any).value === 'string') {
liveValue = (activeEl as any).value
if ('value' in activeEl && typeof activeEl.value === 'string') {
liveValue = activeEl.value
}
}
}
@@ -1289,7 +1298,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
tagDescription = getOutputTypeForPath(
block,
blockConfig,
blockConfig || null,
group.blockId,
outputPath
)
@@ -1429,7 +1438,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
childType = getOutputTypeForPath(
block,
blockConfig,
blockConfig || null,
group.blockId,
childOutputPath
)

View File

@@ -0,0 +1 @@
ALTER TABLE "workflow" DROP COLUMN "state";

View File

@@ -0,0 +1,167 @@
-- One-shot data migration to create/populate execution_data & cost, then drop legacy columns
-- Safe on reruns and across differing prior schemas
-- Note: Depending on runner timeouts, might have to be run manually
-- 1) Ensure execution_data exists (prefer rename if only metadata exists)
DO $$
BEGIN
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'workflow_execution_logs' AND column_name = 'metadata'
) AND NOT EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'workflow_execution_logs' AND column_name = 'execution_data'
) THEN
EXECUTE 'ALTER TABLE workflow_execution_logs RENAME COLUMN metadata TO execution_data';
END IF;
END $$;--> statement-breakpoint
ALTER TABLE "workflow_execution_logs"
ADD COLUMN IF NOT EXISTS "execution_data" jsonb NOT NULL DEFAULT '{}'::jsonb,
ADD COLUMN IF NOT EXISTS "cost" jsonb;--> statement-breakpoint
-- Process the backfill in batches to avoid large temporary files on big datasets
DO $$
DECLARE
v_batch_size integer := 500; -- keep batches small to avoid timeouts/spills
v_rows_updated integer := 0;
v_rows_selected integer := 0;
v_last_id text := '';
v_last_created_at timestamp := '1970-01-01 00:00:00';
BEGIN
-- modest per-statement timeout; adjust based on observed per-batch runtime
PERFORM set_config('statement_timeout', '180s', true);
LOOP
CREATE TEMP TABLE IF NOT EXISTS _tmp_candidate_ids(id text, created_at timestamp) ON COMMIT DROP;
TRUNCATE _tmp_candidate_ids;
INSERT INTO _tmp_candidate_ids(id, created_at)
SELECT id, created_at
FROM workflow_execution_logs
WHERE (created_at, id) > (v_last_created_at, v_last_id) AND cost IS NULL
ORDER BY created_at, id
LIMIT v_batch_size;
SELECT COUNT(*) INTO v_rows_selected FROM _tmp_candidate_ids;
EXIT WHEN v_rows_selected = 0;
SELECT created_at, id
INTO v_last_created_at, v_last_id
FROM _tmp_candidate_ids
ORDER BY created_at DESC, id DESC
LIMIT 1;
WITH RECURSIVE
spans AS (
SELECT l.id, s.span
FROM workflow_execution_logs l
JOIN _tmp_candidate_ids c ON c.id = l.id
LEFT JOIN LATERAL jsonb_array_elements(
COALESCE(
CASE
WHEN jsonb_typeof(l.execution_data->'traceSpans') = 'array' THEN l.execution_data->'traceSpans'
ELSE '[]'::jsonb
END
)
) s(span) ON true
UNION ALL
SELECT spans.id, c.span
FROM spans
JOIN LATERAL jsonb_array_elements(COALESCE(spans.span->'children','[]'::jsonb)) c(span) ON true
),
agg AS (
SELECT id,
SUM(COALESCE((span->'cost'->>'input')::numeric,0)) AS agg_input,
SUM(COALESCE((span->'cost'->>'output')::numeric,0)) AS agg_output,
SUM(COALESCE((span->'cost'->>'total')::numeric,0)) AS agg_total,
SUM(COALESCE((span->'cost'->'tokens'->>'prompt')::numeric, COALESCE((span->'tokens'->>'prompt')::numeric,0))) AS agg_tokens_prompt,
SUM(COALESCE((span->'cost'->'tokens'->>'completion')::numeric, COALESCE((span->'tokens'->>'completion')::numeric,0))) AS agg_tokens_completion,
SUM(COALESCE((span->'cost'->'tokens'->>'total')::numeric, COALESCE((span->'tokens'->>'total')::numeric,0))) AS agg_tokens_total
FROM spans
GROUP BY id
),
model_rows AS (
SELECT id,
(span->'cost'->>'model') AS model,
COALESCE((span->'cost'->>'input')::numeric,0) AS input,
COALESCE((span->'cost'->>'output')::numeric,0) AS output,
COALESCE((span->'cost'->>'total')::numeric,0) AS total,
COALESCE((span->'cost'->'tokens'->>'prompt')::numeric,0) AS tokens_prompt,
COALESCE((span->'cost'->'tokens'->>'completion')::numeric,0) AS tokens_completion,
COALESCE((span->'cost'->'tokens'->>'total')::numeric,0) AS tokens_total
FROM spans
WHERE span ? 'cost' AND (span->'cost'->>'model') IS NOT NULL
),
model_sums AS (
SELECT id,
model,
SUM(input) AS input,
SUM(output) AS output,
SUM(total) AS total,
SUM(tokens_prompt) AS tokens_prompt,
SUM(tokens_completion) AS tokens_completion,
SUM(tokens_total) AS tokens_total
FROM model_rows
GROUP BY id, model
),
models AS (
SELECT id,
jsonb_object_agg(model, jsonb_build_object(
'input', input,
'output', output,
'total', total,
'tokens', jsonb_build_object(
'prompt', tokens_prompt,
'completion', tokens_completion,
'total', tokens_total
)
)) AS models
FROM model_sums
GROUP BY id
),
tb AS (
SELECT l.id,
NULLIF((l.execution_data->'tokenBreakdown'->>'prompt')::numeric, 0) AS prompt,
NULLIF((l.execution_data->'tokenBreakdown'->>'completion')::numeric, 0) AS completion
FROM workflow_execution_logs l
JOIN _tmp_candidate_ids c ON c.id = l.id
)
UPDATE workflow_execution_logs AS l
SET cost = jsonb_strip_nulls(
jsonb_build_object(
'total', COALESCE((to_jsonb(l)->>'total_cost')::numeric, NULLIF(agg.agg_total,0)),
'input', COALESCE((to_jsonb(l)->>'total_input_cost')::numeric, NULLIF(agg.agg_input,0)),
'output', COALESCE((to_jsonb(l)->>'total_output_cost')::numeric, NULLIF(agg.agg_output,0)),
'tokens', CASE
WHEN (to_jsonb(l) ? 'total_tokens') OR tb.prompt IS NOT NULL OR tb.completion IS NOT NULL OR NULLIF(agg.agg_tokens_total,0) IS NOT NULL THEN
jsonb_strip_nulls(
jsonb_build_object(
'total', COALESCE((to_jsonb(l)->>'total_tokens')::numeric, NULLIF(agg.agg_tokens_total,0)),
'prompt', COALESCE(tb.prompt, NULLIF(agg.agg_tokens_prompt,0)),
'completion', COALESCE(tb.completion, NULLIF(agg.agg_tokens_completion,0))
)
)
ELSE NULL
END,
'models', models.models
)
)
FROM agg
LEFT JOIN models ON models.id = agg.id
LEFT JOIN tb ON tb.id = agg.id
WHERE l.id = agg.id;
GET DIAGNOSTICS v_rows_updated = ROW_COUNT;
-- continue advancing by id until no more rows are selected
END LOOP;
END $$;--> statement-breakpoint
-- 3) Drop legacy columns now that backfill is complete
ALTER TABLE "workflow_execution_logs"
DROP COLUMN IF EXISTS "message",
DROP COLUMN IF EXISTS "block_count",
DROP COLUMN IF EXISTS "success_count",
DROP COLUMN IF EXISTS "error_count",
DROP COLUMN IF EXISTS "skipped_count",
DROP COLUMN IF EXISTS "total_cost",
DROP COLUMN IF EXISTS "total_input_cost",
DROP COLUMN IF EXISTS "total_output_cost",
DROP COLUMN IF EXISTS "total_tokens",
DROP COLUMN IF EXISTS "metadata";

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -519,6 +519,20 @@
"when": 1755304368539,
"tag": "0074_abnormal_dreadnoughts",
"breakpoints": true
},
{
"idx": 75,
"version": "7",
"when": 1755319635487,
"tag": "0075_lush_moonstone",
"breakpoints": true
},
{
"idx": 76,
"version": "7",
"when": 1755375658161,
"tag": "0076_damp_vector",
"breakpoints": true
}
]
}

View File

@@ -121,8 +121,6 @@ export const workflow = pgTable(
folderId: text('folder_id').references(() => workflowFolder.id, { onDelete: 'set null' }),
name: text('name').notNull(),
description: text('description'),
// DEPRECATED: Use normalized tables (workflow_blocks, workflow_edges, workflow_subflows) instead
state: json('state').notNull(),
color: text('color').notNull().default('#3972F6'),
lastSynced: timestamp('last_synced').notNull(),
createdAt: timestamp('created_at').notNull(),
@@ -130,7 +128,6 @@ export const workflow = pgTable(
isDeployed: boolean('is_deployed').notNull().default(false),
deployedState: json('deployed_state'),
deployedAt: timestamp('deployed_at'),
// When set, only this API key is authorized for execution
pinnedApiKey: text('pinned_api_key'),
collaborators: json('collaborators').notNull().default('[]'),
runCount: integer('run_count').notNull().default(0),
@@ -285,24 +282,14 @@ export const workflowExecutionLogs = pgTable(
.references(() => workflowExecutionSnapshots.id),
level: text('level').notNull(), // 'info', 'error'
message: text('message').notNull(),
trigger: text('trigger').notNull(), // 'api', 'webhook', 'schedule', 'manual', 'chat'
startedAt: timestamp('started_at').notNull(),
endedAt: timestamp('ended_at'),
totalDurationMs: integer('total_duration_ms'),
blockCount: integer('block_count').notNull().default(0),
successCount: integer('success_count').notNull().default(0),
errorCount: integer('error_count').notNull().default(0),
skippedCount: integer('skipped_count').notNull().default(0),
totalCost: decimal('total_cost', { precision: 10, scale: 6 }),
totalInputCost: decimal('total_input_cost', { precision: 10, scale: 6 }),
totalOutputCost: decimal('total_output_cost', { precision: 10, scale: 6 }),
totalTokens: integer('total_tokens'),
metadata: jsonb('metadata').notNull().default('{}'),
executionData: jsonb('execution_data').notNull().default('{}'),
cost: jsonb('cost'),
files: jsonb('files'), // File metadata for execution files
createdAt: timestamp('created_at').notNull().defaultNow(),
},

View File

@@ -402,11 +402,11 @@ export const SERVER_TOOL_METADATA: Record<ServerToolId, ToolMetadata> = {
id: SERVER_TOOL_IDS.GET_OAUTH_CREDENTIALS,
displayConfig: {
states: {
executing: { displayName: 'Retrieving OAuth credentials', icon: 'spinner' },
success: { displayName: 'Retrieved OAuth credentials', icon: 'key' },
rejected: { displayName: 'Skipped retrieving OAuth credentials', icon: 'skip' },
errored: { displayName: 'Failed to retrieve OAuth credentials', icon: 'error' },
aborted: { displayName: 'Retrieving OAuth credentials aborted', icon: 'x' },
executing: { displayName: 'Retrieving login IDs', icon: 'spinner' },
success: { displayName: 'Retrieved login IDs', icon: 'key' },
rejected: { displayName: 'Skipped retrieving login IDs', icon: 'skip' },
errored: { displayName: 'Failed to retrieve login IDs', icon: 'error' },
aborted: { displayName: 'Retrieving login IDs aborted', icon: 'x' },
},
},
schema: {

View File

@@ -1,6 +1,7 @@
import { eq } from 'drizzle-orm'
import { jwtDecode } from 'jwt-decode'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account, user } from '@/db/schema'
import { BaseCopilotTool } from '../base'
@@ -15,6 +16,7 @@ interface OAuthCredentialItem {
provider: string
lastUsed: string
isDefault: boolean
accessToken: string | null
}
interface GetOAuthCredentialsResult {
@@ -55,6 +57,9 @@ class GetOAuthCredentialsTool extends BaseCopilotTool<
const credentials: OAuthCredentialItem[] = []
// Short request id for log correlation
const requestId = crypto.randomUUID().slice(0, 8)
for (const acc of accounts) {
const providerId = acc.providerId
const [baseProvider, featureType = 'default'] = providerId.split('-')
@@ -90,12 +95,26 @@ class GetOAuthCredentialsTool extends BaseCopilotTool<
displayName = `${acc.accountId} (${baseProvider})`
}
// Ensure we return a valid access token, refreshing if needed
let accessToken: string | null = acc.accessToken ?? null
try {
const { accessToken: refreshedToken } = await refreshTokenIfNeeded(
requestId,
acc as any,
acc.id
)
accessToken = refreshedToken || accessToken
} catch (_error) {
// If refresh fails, we still return whatever we had (may be null)
}
credentials.push({
id: acc.id,
name: displayName,
provider: providerId,
lastUsed: acc.updatedAt.toISOString(),
isDefault: featureType === 'default',
accessToken,
})
}

View File

@@ -352,24 +352,8 @@ async function getUserWorkflow(workflowId: string): Promise<string> {
}
})
})
} else if (workflowRecord.state) {
// Fallback to JSON blob
const jsonState = workflowRecord.state as any
workflowState = {
blocks: jsonState.blocks || {},
edges: jsonState.edges || [],
loops: jsonState.loops || {},
parallels: jsonState.parallels || {},
}
// For JSON blob, subblock values are embedded in the block state
Object.entries((workflowState.blocks as any) || {}).forEach(([blockId, block]) => {
subBlockValues[blockId] = {}
Object.entries((block as any).subBlocks || {}).forEach(([subBlockId, subBlock]) => {
if ((subBlock as any).value !== undefined) {
subBlockValues[blockId][subBlockId] = (subBlock as any).value
}
})
})
} else {
throw new Error('Workflow has no normalized data')
}
if (!workflowState || !workflowState.blocks) {

View File

@@ -39,15 +39,10 @@ interface ExecutionEntry {
id: string
executionId: string
level: string
message: string
trigger: string
startedAt: string
endedAt: string | null
durationMs: number | null
blockCount: number
successCount: number
errorCount: number
skippedCount: number
totalCost: number | null
totalTokens: number | null
blockExecutions: BlockExecution[]
@@ -124,18 +119,12 @@ async function getWorkflowConsole(
id: workflowExecutionLogs.id,
executionId: workflowExecutionLogs.executionId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalTokens: workflowExecutionLogs.totalTokens,
metadata: workflowExecutionLogs.metadata,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
})
.from(workflowExecutionLogs)
.where(eq(workflowExecutionLogs.workflowId, workflowId))
@@ -144,9 +133,8 @@ async function getWorkflowConsole(
// Format the response with detailed block execution data
const formattedEntries: ExecutionEntry[] = executionLogs.map((log) => {
// Extract trace spans from metadata
const metadata = log.metadata as any
const traceSpans = metadata?.traceSpans || []
// Extract trace spans from execution data
const traceSpans = (log.executionData as any)?.traceSpans || []
const blockExecutions = extractBlockExecutionsFromTraceSpans(traceSpans)
// Try to find the final output from the last executed block
@@ -172,17 +160,12 @@ async function getWorkflowConsole(
id: log.id,
executionId: log.executionId,
level: log.level,
message: log.message,
trigger: log.trigger,
startedAt: log.startedAt.toISOString(),
endedAt: log.endedAt?.toISOString() || null,
durationMs: log.totalDurationMs,
blockCount: log.blockCount,
successCount: log.successCount,
errorCount: log.errorCount,
skippedCount: log.skippedCount || 0,
totalCost: log.totalCost ? Number.parseFloat(log.totalCost.toString()) : null,
totalTokens: log.totalTokens,
totalCost: (log.cost as any)?.total ?? null,
totalTokens: (log.cost as any)?.tokens?.total ?? null,
blockExecutions: includeDetails ? blockExecutions : [],
output: finalOutput,
}

View File

@@ -59,20 +59,11 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId,
stateSnapshotId: snapshotResult.snapshot.id,
level: 'info',
message: `${this.getTriggerPrefix(trigger.type)} execution started`,
trigger: trigger.type,
startedAt: startTime,
endedAt: null,
totalDurationMs: null,
blockCount: 0,
successCount: 0,
errorCount: 0,
skippedCount: 0,
totalCost: null,
totalInputCost: null,
totalOutputCost: null,
totalTokens: null,
metadata: {
executionData: {
environment,
trigger,
},
@@ -88,20 +79,11 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId: workflowLog.executionId,
stateSnapshotId: workflowLog.stateSnapshotId,
level: workflowLog.level as 'info' | 'error',
message: workflowLog.message,
trigger: workflowLog.trigger as ExecutionTrigger['type'],
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString() || workflowLog.startedAt.toISOString(),
totalDurationMs: workflowLog.totalDurationMs || 0,
blockCount: workflowLog.blockCount,
successCount: workflowLog.successCount,
errorCount: workflowLog.errorCount,
skippedCount: workflowLog.skippedCount,
totalCost: Number(workflowLog.totalCost) || 0,
totalInputCost: Number(workflowLog.totalInputCost) || 0,
totalOutputCost: Number(workflowLog.totalOutputCost) || 0,
totalTokens: workflowLog.totalTokens || 0,
metadata: workflowLog.metadata as WorkflowExecutionLog['metadata'],
executionData: workflowLog.executionData as WorkflowExecutionLog['executionData'],
createdAt: workflowLog.createdAt.toISOString(),
},
snapshot: snapshotResult.snapshot,
@@ -151,7 +133,6 @@ export class ExecutionLogger implements IExecutionLoggerService {
})
const level = hasErrors ? 'error' : 'info'
const message = hasErrors ? 'Workflow execution failed' : 'Workflow execution completed'
// Extract files from trace spans and final output
const executionFiles = this.extractFilesFromExecution(traceSpans, finalOutput)
@@ -160,19 +141,10 @@ export class ExecutionLogger implements IExecutionLoggerService {
.update(workflowExecutionLogs)
.set({
level,
message,
endedAt: new Date(endedAt),
totalDurationMs,
blockCount: 0,
successCount: 0,
errorCount: 0,
skippedCount: 0,
totalCost: costSummary.totalCost.toString(),
totalInputCost: costSummary.totalInputCost.toString(),
totalOutputCost: costSummary.totalOutputCost.toString(),
totalTokens: costSummary.totalTokens,
files: executionFiles.length > 0 ? executionFiles : null,
metadata: {
executionData: {
traceSpans,
finalOutput,
tokenBreakdown: {
@@ -182,6 +154,17 @@ export class ExecutionLogger implements IExecutionLoggerService {
},
models: costSummary.models,
},
cost: {
total: costSummary.totalCost,
input: costSummary.totalInputCost,
output: costSummary.totalOutputCost,
tokens: {
prompt: costSummary.totalPromptTokens,
completion: costSummary.totalCompletionTokens,
total: costSummary.totalTokens,
},
models: costSummary.models,
},
})
.where(eq(workflowExecutionLogs.executionId, executionId))
.returning()
@@ -205,20 +188,12 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId: updatedLog.executionId,
stateSnapshotId: updatedLog.stateSnapshotId,
level: updatedLog.level as 'info' | 'error',
message: updatedLog.message,
trigger: updatedLog.trigger as ExecutionTrigger['type'],
startedAt: updatedLog.startedAt.toISOString(),
endedAt: updatedLog.endedAt?.toISOString() || endedAt,
totalDurationMs: updatedLog.totalDurationMs || totalDurationMs,
blockCount: updatedLog.blockCount,
successCount: updatedLog.successCount,
errorCount: updatedLog.errorCount,
skippedCount: updatedLog.skippedCount,
totalCost: Number(updatedLog.totalCost) || 0,
totalInputCost: Number(updatedLog.totalInputCost) || 0,
totalOutputCost: Number(updatedLog.totalOutputCost) || 0,
totalTokens: updatedLog.totalTokens || 0,
metadata: updatedLog.metadata as WorkflowExecutionLog['metadata'],
executionData: updatedLog.executionData as WorkflowExecutionLog['executionData'],
cost: updatedLog.cost as any,
createdAt: updatedLog.createdAt.toISOString(),
}
}
@@ -238,20 +213,12 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId: workflowLog.executionId,
stateSnapshotId: workflowLog.stateSnapshotId,
level: workflowLog.level as 'info' | 'error',
message: workflowLog.message,
trigger: workflowLog.trigger as ExecutionTrigger['type'],
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString() || workflowLog.startedAt.toISOString(),
totalDurationMs: workflowLog.totalDurationMs || 0,
blockCount: workflowLog.blockCount,
successCount: workflowLog.successCount,
errorCount: workflowLog.errorCount,
skippedCount: workflowLog.skippedCount,
totalCost: Number(workflowLog.totalCost) || 0,
totalInputCost: Number(workflowLog.totalInputCost) || 0,
totalOutputCost: Number(workflowLog.totalOutputCost) || 0,
totalTokens: workflowLog.totalTokens || 0,
metadata: workflowLog.metadata as WorkflowExecutionLog['metadata'],
executionData: workflowLog.executionData as WorkflowExecutionLog['executionData'],
cost: workflowLog.cost as any,
createdAt: workflowLog.createdAt.toISOString(),
}
}

View File

@@ -82,19 +82,10 @@ export interface WorkflowExecutionLog {
executionId: string
stateSnapshotId: string
level: 'info' | 'error'
message: string
trigger: ExecutionTrigger['type']
startedAt: string
endedAt: string
totalDurationMs: number
blockCount: number
successCount: number
errorCount: number
skippedCount: number
totalCost: number
totalInputCost: number
totalOutputCost: number
totalTokens: number
files?: Array<{
id: string
name: string
@@ -107,9 +98,10 @@ export interface WorkflowExecutionLog {
storageProvider?: 's3' | 'blob' | 'local'
bucketName?: string
}>
metadata: {
environment: ExecutionEnvironment
trigger: ExecutionTrigger
// Execution details
executionData: {
environment?: ExecutionEnvironment
trigger?: ExecutionTrigger
traceSpans?: TraceSpan[]
errorDetails?: {
blockId: string
@@ -118,6 +110,22 @@ export interface WorkflowExecutionLog {
stackTrace?: string
}
}
// Top-level cost information
cost?: {
input?: number
output?: number
total?: number
tokens?: { prompt?: number; completion?: number; total?: number }
models?: Record<
string,
{
input?: number
output?: number
total?: number
tokens?: { prompt?: number; completion?: number; total?: number }
}
>
}
duration?: string
createdAt: string
}

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@/lib/logs/console/logger'
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
import type { BlockWithDiff } from './types'
const logger = createLogger('WorkflowDiffEngine')
@@ -334,10 +335,10 @@ export class WorkflowDiffEngine {
for (const [blockId, block] of Object.entries(state.blocks)) {
const cleanBlock: BlockState = { ...block }
// Remove diff markers using bracket notation to avoid TypeScript errors
;(cleanBlock as any).is_diff = undefined
;(cleanBlock as any).field_diff = undefined
// Remove diff markers using proper typing
const blockWithDiff = cleanBlock as BlockState & BlockWithDiff
blockWithDiff.is_diff = undefined
blockWithDiff.field_diffs = undefined
// Ensure outputs is never null/undefined
if (cleanBlock.outputs === undefined || cleanBlock.outputs === null) {

View File

@@ -0,0 +1,18 @@
/**
* Type definitions for workflow diff functionality
*/
export type DiffStatus = 'new' | 'edited' | undefined
export type FieldDiffStatus = 'changed' | 'unchanged'
export type EdgeDiffStatus = 'new' | 'deleted' | 'unchanged' | null
export interface BlockWithDiff {
is_diff?: DiffStatus
field_diffs?: Record<string, { changed_fields: string[]; unchanged_fields: string[] }>
}
export function hasDiffStatus(block: any): block is BlockWithDiff {
return block && typeof block === 'object' && ('is_diff' in block || 'field_diffs' in block)
}

View File

@@ -1,171 +0,0 @@
#!/usr/bin/env bun
import { db } from '@/db'
import { user, workflow, workspace } from '@/db/schema'
const testWorkflowState = {
blocks: {
'start-block-123': {
id: 'start-block-123',
type: 'starter',
name: 'Start',
position: {
x: 100,
y: 100,
},
subBlocks: {
startWorkflow: {
id: 'startWorkflow',
type: 'dropdown',
value: 'manual',
},
},
outputs: {
response: {
input: 'any',
},
},
enabled: true,
horizontalHandles: true,
isWide: false,
advancedMode: false,
height: 90,
},
'loop-block-456': {
id: 'loop-block-456',
type: 'loop',
name: 'For Loop',
position: {
x: 400,
y: 100,
},
subBlocks: {},
outputs: {},
enabled: true,
horizontalHandles: true,
isWide: false,
advancedMode: false,
height: 0,
data: {
width: 400,
height: 200,
type: 'loopNode',
},
},
'function-block-789': {
id: 'function-block-789',
type: 'function',
name: 'Return X',
position: {
x: 50,
y: 50,
},
subBlocks: {
code: {
id: 'code',
type: 'code',
value: "return 'X'",
},
},
outputs: {
response: {
result: 'any',
stdout: 'string',
},
},
enabled: true,
horizontalHandles: true,
isWide: false,
advancedMode: false,
height: 144,
data: {
parentId: 'loop-block-456',
extent: 'parent',
},
},
},
edges: [
{
id: 'edge-start-to-loop',
source: 'start-block-123',
target: 'loop-block-456',
sourceHandle: 'source',
targetHandle: 'target',
},
{
id: 'edge-loop-to-function',
source: 'loop-block-456',
target: 'function-block-789',
sourceHandle: 'loop-start-source',
targetHandle: 'target',
},
],
loops: {
'loop-block-456': {
id: 'loop-block-456',
nodes: ['function-block-789'],
iterations: 3,
loopType: 'for',
forEachItems: '',
},
},
parallels: {},
lastSaved: Date.now(),
isDeployed: false,
}
async function insertTestWorkflow() {
try {
console.log('🔍 Finding first workspace and user...')
// Get the first workspace
const workspaces = await db.select().from(workspace).limit(1)
if (workspaces.length === 0) {
throw new Error('No workspaces found. Please create a workspace first.')
}
// Get the first user
const users = await db.select().from(user).limit(1)
if (users.length === 0) {
throw new Error('No users found. Please create a user first.')
}
const workspaceId = workspaces[0].id
const userId = users[0].id
console.log(`✅ Using workspace: ${workspaceId}`)
console.log(`✅ Using user: ${userId}`)
// Insert workflow with old JSON state format
const testWorkflowId = `test-migration-workflow-${Date.now()}`
const now = new Date()
await db.insert(workflow).values({
id: testWorkflowId,
name: 'Test Migration Workflow (Old JSON Format)',
workspaceId: workspaceId,
userId: userId,
state: testWorkflowState, // This is the old JSON format
lastSynced: now,
createdAt: now,
updatedAt: now,
isDeployed: false,
isPublished: false,
})
console.log(`✅ Inserted test workflow with old JSON format: ${testWorkflowId}`)
console.log(`🌐 Access it at: http://localhost:3000/w/${testWorkflowId}`)
console.log('')
console.log('📋 Test steps:')
console.log('1. Open the workflow in your browser')
console.log('2. Verify it renders correctly with all blocks and connections')
console.log('3. Try editing some subblock values')
console.log('4. Run the migration script')
console.log('5. Verify it still works after migration')
} catch (error) {
console.error('❌ Error inserting test workflow:', error)
process.exit(1)
}
}
insertTestWorkflow()

View File

@@ -1,306 +0,0 @@
#!/usr/bin/env bun
import { readFileSync } from 'fs'
import { and, eq, inArray, isNotNull } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { db } from '@/db'
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@/db/schema'
interface WorkflowState {
blocks: Record<string, any>
edges: any[]
loops?: Record<string, any>
parallels?: Record<string, any>
lastSaved?: number
isDeployed?: boolean
}
async function migrateWorkflowStates(specificWorkflowIds?: string[] | null) {
try {
if (specificWorkflowIds) {
console.log(`🔍 Finding ${specificWorkflowIds.length} specific workflows...`)
} else {
console.log('🔍 Finding workflows with old JSON state format...')
}
// Build the where condition based on whether we have specific IDs
const whereCondition = specificWorkflowIds
? and(
isNotNull(workflow.state), // Has JSON state
inArray(workflow.id, specificWorkflowIds) // Only specific IDs
)
: and(
isNotNull(workflow.state) // Has JSON state
// We'll check for normalized data existence per workflow
)
// Find workflows that have state but no normalized table entries
const workflowsToMigrate = await db
.select({
id: workflow.id,
name: workflow.name,
state: workflow.state,
})
.from(workflow)
.where(whereCondition)
console.log(`📊 Found ${workflowsToMigrate.length} workflows with JSON state`)
if (specificWorkflowIds) {
const foundIds = workflowsToMigrate.map((w) => w.id)
const missingIds = specificWorkflowIds.filter((id) => !foundIds.includes(id))
if (missingIds.length > 0) {
console.log(`⚠️ Warning: ${missingIds.length} specified workflow IDs not found:`)
missingIds.forEach((id) => console.log(` - ${id}`))
}
console.log('')
}
let migratedCount = 0
let skippedCount = 0
let errorCount = 0
for (const wf of workflowsToMigrate) {
try {
// Check if this workflow already has normalized data
const existingBlocks = await db
.select({ id: workflowBlocks.id })
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, wf.id))
.limit(1)
if (existingBlocks.length > 0) {
console.log(`⏭️ Skipping ${wf.name} (${wf.id}) - already has normalized data`)
skippedCount++
continue
}
console.log(`🔄 Migrating ${wf.name} (${wf.id})...`)
const state = wf.state as WorkflowState
if (!state || !state.blocks) {
console.log(`⚠️ Skipping ${wf.name} - invalid state format`)
skippedCount++
continue
}
// Clean up invalid blocks (those without an id field) before migration
const originalBlockCount = Object.keys(state.blocks).length
const validBlocks: Record<string, any> = {}
let removedBlockCount = 0
for (const [blockKey, block] of Object.entries(state.blocks)) {
if (block && typeof block === 'object' && block.id) {
// Valid block - has an id field
validBlocks[blockKey] = block
} else {
// Invalid block - missing id field
console.log(` 🗑️ Removing invalid block ${blockKey} (no id field)`)
removedBlockCount++
}
}
if (removedBlockCount > 0) {
console.log(
` 🧹 Cleaned up ${removedBlockCount} invalid blocks (${originalBlockCount}${Object.keys(validBlocks).length})`
)
state.blocks = validBlocks
}
await db.transaction(async (tx) => {
// Migrate blocks - generate new IDs and create mapping
const blocks = Object.values(state.blocks)
console.log(` 📦 Migrating ${blocks.length} blocks...`)
// Create mapping from old block IDs to new block IDs
const blockIdMapping: Record<string, string> = {}
for (const block of blocks) {
const newBlockId = nanoid()
blockIdMapping[block.id] = newBlockId
await tx.insert(workflowBlocks).values({
id: newBlockId,
workflowId: wf.id,
type: block.type,
name: block.name,
positionX: String(block.position?.x || 0),
positionY: String(block.position?.y || 0),
enabled: block.enabled ?? true,
horizontalHandles: block.horizontalHandles ?? true,
isWide: block.isWide ?? false,
advancedMode: block.advancedMode ?? false,
triggerMode: block.triggerMode ?? false,
height: String(block.height || 0),
subBlocks: block.subBlocks || {},
outputs: block.outputs || {},
data: block.data || {},
parentId: block.data?.parentId ? blockIdMapping[block.data.parentId] || null : null,
})
}
// Migrate edges - use new block IDs
const edges = state.edges || []
console.log(` 🔗 Migrating ${edges.length} edges...`)
for (const edge of edges) {
const newSourceId = blockIdMapping[edge.source]
const newTargetId = blockIdMapping[edge.target]
// Skip edges that reference blocks that don't exist in our mapping
if (!newSourceId || !newTargetId) {
console.log(` ⚠️ Skipping edge ${edge.id} - references missing blocks`)
continue
}
await tx.insert(workflowEdges).values({
id: nanoid(),
workflowId: wf.id,
sourceBlockId: newSourceId,
targetBlockId: newTargetId,
sourceHandle: edge.sourceHandle || null,
targetHandle: edge.targetHandle || null,
})
}
// Migrate loops - update node IDs to use new block IDs
const loops = state.loops || {}
const loopIds = Object.keys(loops)
console.log(` 🔄 Migrating ${loopIds.length} loops...`)
for (const loopId of loopIds) {
const loop = loops[loopId]
// Map old node IDs to new block IDs
const updatedNodes = (loop.nodes || [])
.map((nodeId: string) => blockIdMapping[nodeId])
.filter(Boolean)
await tx.insert(workflowSubflows).values({
id: nanoid(),
workflowId: wf.id,
type: 'loop',
config: {
id: loop.id,
nodes: updatedNodes,
iterationCount: loop.iterations || 5,
iterationType: loop.loopType || 'for',
collection: loop.forEachItems || '',
},
})
}
// Migrate parallels - update node IDs to use new block IDs
const parallels = state.parallels || {}
const parallelIds = Object.keys(parallels)
console.log(` ⚡ Migrating ${parallelIds.length} parallels...`)
for (const parallelId of parallelIds) {
const parallel = parallels[parallelId]
// Map old node IDs to new block IDs
const updatedNodes = (parallel.nodes || [])
.map((nodeId: string) => blockIdMapping[nodeId])
.filter(Boolean)
await tx.insert(workflowSubflows).values({
id: nanoid(),
workflowId: wf.id,
type: 'parallel',
config: {
id: parallel.id,
nodes: updatedNodes,
parallelCount: 2, // Default parallel count
collection: parallel.distribution || '',
},
})
}
})
console.log(`✅ Successfully migrated ${wf.name}`)
migratedCount++
} catch (error) {
console.error(`❌ Error migrating ${wf.name} (${wf.id}):`, error)
errorCount++
}
}
console.log('')
console.log('📊 Migration Summary:')
console.log(`✅ Migrated: ${migratedCount} workflows`)
console.log(`⏭️ Skipped: ${skippedCount} workflows`)
console.log(`❌ Errors: ${errorCount} workflows`)
console.log('')
if (migratedCount > 0) {
console.log('🎉 Migration completed successfully!')
console.log('')
console.log('📋 Next steps:')
console.log('1. Test the migrated workflows in your browser')
console.log('2. Verify all blocks, edges, and subflows work correctly')
console.log('3. Check that editing and collaboration still work')
console.log('4. Once confirmed, the workflow.state JSON field can be deprecated')
}
} catch (error) {
console.error('❌ Migration failed:', error)
process.exit(1)
}
}
// Add command line argument parsing
const args = process.argv.slice(2)
const dryRun = args.includes('--dry-run')
const showHelp = args.includes('--help') || args.includes('-h')
if (showHelp) {
console.log('🔄 Workflow State Migration Script')
console.log('')
console.log('Usage:')
console.log(' bun run scripts/migrate-workflow-states.ts [options]')
console.log('')
console.log('Options:')
console.log(' --dry-run Show what would be migrated without making changes')
console.log(' --file <path> Migrate only workflow IDs listed in file (comma-separated)')
console.log(' --help, -h Show this help message')
console.log('')
console.log('Examples:')
console.log(' bun run scripts/migrate-workflow-states.ts')
console.log(' bun run scripts/migrate-workflow-states.ts --dry-run')
console.log(' bun run scripts/migrate-workflow-states.ts --file workflow-ids.txt')
console.log(' bun run scripts/migrate-workflow-states.ts --dry-run --file workflow-ids.txt')
console.log('')
console.log('File format (workflow-ids.txt):')
console.log(' abc-123,def-456,ghi-789')
console.log('')
process.exit(0)
}
// Parse --file flag for workflow IDs
let specificWorkflowIds: string[] | null = null
const fileIndex = args.findIndex((arg) => arg === '--file')
if (fileIndex !== -1 && args[fileIndex + 1]) {
const filePath = args[fileIndex + 1]
try {
console.log(`📁 Reading workflow IDs from file: ${filePath}`)
const fileContent = readFileSync(filePath, 'utf-8')
specificWorkflowIds = fileContent
.split(',')
.map((id) => id.trim())
.filter((id) => id.length > 0)
console.log(`📋 Found ${specificWorkflowIds.length} workflow IDs in file`)
console.log('')
} catch (error) {
console.error(`❌ Error reading file ${filePath}:`, error)
process.exit(1)
}
}
if (dryRun) {
console.log('🔍 DRY RUN MODE - No changes will be made')
console.log('')
}
if (specificWorkflowIds) {
console.log('🎯 TARGETED MIGRATION - Only migrating specified workflow IDs')
console.log('')
}
migrateWorkflowStates(specificWorkflowIds)

View File

@@ -125,15 +125,11 @@ export async function getWorkflowState(workflowId: string) {
if (normalizedData) {
// Use normalized data as source of truth
const existingState = workflowData[0].state || {}
const finalState = {
// Default values for expected properties
deploymentStatuses: {},
hasActiveWebhook: false,
// Preserve any existing state properties
...existingState,
// Override with normalized data (this takes precedence)
// Data from normalized tables
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
@@ -494,11 +490,25 @@ async function handleBlockOperationTx(
throw new Error('Missing block ID for update parent operation')
}
// Fetch current parent to update subflow node list when detaching or reparenting
const [existing] = await tx
.select({
id: workflowBlocks.id,
parentId: workflowBlocks.parentId,
})
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, payload.id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
const isRemovingFromParent = !payload.parentId
const updateResult = await tx
.update(workflowBlocks)
.set({
parentId: payload.parentId || null,
extent: payload.extent || null,
parentId: isRemovingFromParent ? null : payload.parentId || null,
extent: isRemovingFromParent ? null : payload.extent || null,
// When removing from a subflow, also clear data JSON entirely
...(isRemovingFromParent ? { data: {} } : {}),
updatedAt: new Date(),
})
.where(and(eq(workflowBlocks.id, payload.id), eq(workflowBlocks.workflowId, workflowId)))
@@ -508,13 +518,19 @@ async function handleBlockOperationTx(
throw new Error(`Block ${payload.id} not found in workflow ${workflowId}`)
}
// If the block now has a parent, update the parent's subflow node list
// If the block now has a parent, update the new parent's subflow node list
if (payload.parentId) {
await updateSubflowNodeList(tx, workflowId, payload.parentId)
}
// If the block had a previous parent, update that parent's node list as well
if (existing?.parentId && existing.parentId !== payload.parentId) {
await updateSubflowNodeList(tx, workflowId, existing.parentId)
}
logger.debug(
`Updated block parent: ${payload.id} -> parent: ${payload.parentId}, extent: ${payload.extent}`
`Updated block parent: ${payload.id} -> parent: ${payload.parentId || 'null'}, extent: ${payload.extent || 'null'}${
isRemovingFromParent ? ' (cleared data JSON)' : ''
}`
)
break
}
@@ -811,7 +827,7 @@ async function handleSubflowOperationTx(
collection: payload.config.forEachItems,
width: 500,
height: 300,
type: 'loopNode',
type: 'subflowNode',
},
updatedAt: new Date(),
})
@@ -822,7 +838,7 @@ async function handleSubflowOperationTx(
...payload.config,
width: 500,
height: 300,
type: 'parallelNode',
type: 'subflowNode',
}
// Include count if provided

View File

@@ -1814,7 +1814,7 @@ async function* parseSSEStream(
const COPILOT_AUTH_REQUIRED_MESSAGE =
'*Authorization failed. An API key must be configured in order to use the copilot. You can configure an API key at [sim.ai](https://sim.ai).*'
const COPILOT_USAGE_EXCEEDED_MESSAGE =
'*Usage limit exceeded, please upgrade your plan at [sim.ai](https://sim.ai) to continue using the copilot*'
'*Usage limit exceeded, please upgrade your plan or top up credits at [sim.ai](https://sim.ai) to continue using the copilot*'
/**
* Copilot store using the new unified API

View File

@@ -261,8 +261,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
buildQueryParams: (page: number, limit: number) => {
const { workspaceId, timeRange, level, workflowIds, folderIds, searchQuery, triggers } = get()
const params = new URLSearchParams()
params.set('includeWorkflow', 'true')
params.set('limit', limit.toString())
params.set('offset', ((page - 1) * limit).toString())

View File

@@ -71,9 +71,8 @@ export interface TraceSpan {
export interface WorkflowLog {
id: string
workflowId: string
executionId: string | null
executionId?: string | null
level: string
message: string
duration: string | null
trigger: string | null
createdAt: string
@@ -90,10 +89,10 @@ export interface WorkflowLog {
storageProvider?: 's3' | 'blob' | 'local'
bucketName?: string
}>
metadata?: ToolCallMetadata & {
cost?: CostMetadata
executionData?: ToolCallMetadata & {
traceSpans?: TraceSpan[]
totalDuration?: number
cost?: CostMetadata
blockInput?: Record<string, any>
enhanced?: boolean

View File

@@ -2,6 +2,7 @@ import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import { createLogger } from '@/lib/logs/console/logger'
import { type DiffAnalysis, WorkflowDiffEngine } from '@/lib/workflows/diff'
import { Serializer } from '@/serializer'
import { useWorkflowRegistry } from '../workflows/registry/store'
import { useSubBlockStore } from '../workflows/subblock/store'
import { useWorkflowStore } from '../workflows/workflow/store'
@@ -47,6 +48,8 @@ interface WorkflowDiffState {
source: string
timestamp: number
} | null
// Store validation error when proposed diff is invalid for the canvas
diffError?: string | null
// PERFORMANCE OPTIMIZATION: Cache frequently accessed computed values
_cachedDisplayState?: WorkflowState
_lastDisplayStateHash?: string
@@ -105,6 +108,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
diffWorkflow: null,
diffAnalysis: null,
diffMetadata: null,
diffError: null,
_cachedDisplayState: undefined,
_lastDisplayStateHash: undefined,
@@ -112,7 +116,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
setProposedChanges: async (yamlContent: string, diffAnalysis?: DiffAnalysis) => {
// PERFORMANCE OPTIMIZATION: Immediate state update to prevent UI flicker
batchedUpdate({ isDiffReady: false })
batchedUpdate({ isDiffReady: false, diffError: null })
// Clear any existing diff state to ensure a fresh start
diffEngine.clearDiff()
@@ -120,6 +124,28 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
const result = await diffEngine.createDiffFromYaml(yamlContent, diffAnalysis)
if (result.success && result.diff) {
// Validate proposed workflow using serializer round-trip to catch canvas-breaking issues
try {
const proposed = result.diff.proposedState
const serializer = new Serializer()
const serialized = serializer.serializeWorkflow(
proposed.blocks,
proposed.edges,
proposed.loops,
proposed.parallels,
false // do not enforce user-only required params at diff time
)
// Ensure we can deserialize back without errors
serializer.deserializeWorkflow(serialized)
} catch (e: any) {
const message =
e instanceof Error ? e.message : 'Invalid workflow in proposed changes'
logger.error('[DiffStore] Diff validation failed:', { message, error: e })
// Do not mark ready; store error and keep diff hidden
batchedUpdate({ isDiffReady: false, diffError: message, isShowingDiff: false })
return
}
// PERFORMANCE OPTIMIZATION: Log diff analysis efficiently
if (result.diff.diffAnalysis) {
const analysis = result.diff.diffAnalysis
@@ -138,6 +164,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
diffWorkflow: result.diff.proposedState,
diffAnalysis: result.diff.diffAnalysis || null,
diffMetadata: result.diff.metadata,
diffError: null,
_cachedDisplayState: undefined, // Clear cache
_lastDisplayStateHash: undefined,
})
@@ -145,7 +172,10 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
logger.info('Diff created successfully')
} else {
logger.error('Failed to create diff:', result.errors)
batchedUpdate({ isDiffReady: false })
batchedUpdate({
isDiffReady: false,
diffError: result.errors?.join(', ') || 'Failed to create diff',
})
throw new Error(result.errors?.join(', ') || 'Failed to create diff')
}
},
@@ -154,11 +184,31 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
logger.info('Merging proposed changes via YAML')
// First, set isDiffReady to false to prevent premature rendering
batchedUpdate({ isDiffReady: false })
batchedUpdate({ isDiffReady: false, diffError: null })
const result = await diffEngine.mergeDiffFromYaml(yamlContent, diffAnalysis)
if (result.success && result.diff) {
// Validate proposed workflow using serializer round-trip to catch canvas-breaking issues
try {
const proposed = result.diff.proposedState
const serializer = new Serializer()
const serialized = serializer.serializeWorkflow(
proposed.blocks,
proposed.edges,
proposed.loops,
proposed.parallels,
false
)
serializer.deserializeWorkflow(serialized)
} catch (e: any) {
const message =
e instanceof Error ? e.message : 'Invalid workflow in proposed changes'
logger.error('[DiffStore] Diff validation failed on merge:', { message, error: e })
batchedUpdate({ isDiffReady: false, diffError: message, isShowingDiff: false })
return
}
// Set all state at once, with isDiffReady true
batchedUpdate({
isShowingDiff: true,
@@ -166,12 +216,16 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
diffWorkflow: result.diff.proposedState,
diffAnalysis: result.diff.diffAnalysis || null,
diffMetadata: result.diff.metadata,
diffError: null,
})
logger.info('Diff merged successfully')
} else {
logger.error('Failed to merge diff:', result.errors)
// Reset isDiffReady on failure
batchedUpdate({ isDiffReady: false })
batchedUpdate({
isDiffReady: false,
diffError: result.errors?.join(', ') || 'Failed to merge diff',
})
throw new Error(result.errors?.join(', ') || 'Failed to merge diff')
}
},
@@ -185,6 +239,7 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
diffWorkflow: null,
diffAnalysis: null,
diffMetadata: null,
diffError: null,
})
},
@@ -248,48 +303,49 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
logger.info('Successfully applied diff workflow to main store')
// Persist to database
try {
logger.info('Persisting accepted diff changes to database')
const response = await fetch(`/api/workflows/${activeWorkflowId}/state`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
...cleanState,
lastSaved: Date.now(),
}),
})
if (!response.ok) {
const errorData = await response.json()
logger.error('Failed to persist accepted diff to database:', errorData)
throw new Error(errorData.error || `Failed to save: ${response.statusText}`)
}
const result = await response.json()
logger.info('Successfully persisted accepted diff to database', {
blocksCount: result.blocksCount,
edgesCount: result.edgesCount,
})
} catch (persistError) {
logger.error('Failed to persist accepted diff to database:', persistError)
// Don't throw here - the store is already updated, so the UI is correct
logger.warn('Diff was applied to local stores but not persisted to database')
}
// Clear the diff
// Optimistically clear the diff immediately so UI updates instantly
get().clearDiff()
// Update copilot tool call state to 'accepted'
try {
const { useCopilotStore } = await import('@/stores/copilot/store')
useCopilotStore.getState().updatePreviewToolCallState('accepted')
} catch (error) {
logger.warn('Failed to update copilot tool call state after accept:', error)
}
// Fire-and-forget: persist to database and update copilot state in the background
;(async () => {
try {
logger.info('Persisting accepted diff changes to database')
const response = await fetch(`/api/workflows/${activeWorkflowId}/state`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
...cleanState,
lastSaved: Date.now(),
}),
})
if (!response.ok) {
const errorData = await response.json().catch(() => ({}))
logger.error('Failed to persist accepted diff to database:', errorData)
} else {
const result = await response.json().catch(() => ({}))
logger.info('Successfully persisted accepted diff to database', {
blocksCount: (result as any)?.blocksCount,
edgesCount: (result as any)?.edgesCount,
})
}
} catch (persistError) {
logger.error('Failed to persist accepted diff to database:', persistError)
logger.warn('Diff was applied to local stores but not persisted to database')
}
// Update copilot tool call state to 'accepted'
try {
const { useCopilotStore } = await import('@/stores/copilot/store')
useCopilotStore.getState().updatePreviewToolCallState('accepted')
} catch (error) {
logger.warn('Failed to update copilot tool call state after accept:', error)
}
})()
} catch (error) {
logger.error('Failed to accept changes:', error)
throw error

View File

@@ -1212,10 +1212,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
}))
}
// Set as active workflow and update store
set({ activeWorkflowId: id })
useWorkflowStore.setState(newState)
// Workflow has already been persisted to the database via the duplication endpoint
logger.info(

View File

@@ -264,19 +264,16 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
const absolutePosition = { ...block.position }
// Handle empty or null parentId (removing from parent)
// On removal, clear the data JSON entirely per normalized DB contract
const newData = !parentId
? { ...block.data } // Remove parentId and extent if empty
? {}
: {
...block.data,
parentId,
extent,
}
// Remove parentId and extent properties for empty parent ID
if (!parentId && newData.parentId) {
newData.parentId = undefined
newData.extent = undefined
}
// For removal we already set data to {}; for setting a parent keep as-is
const newState = {
blocks: {

View File

@@ -338,7 +338,7 @@ export function convertYamlToWorkflow(yamlWorkflow: YamlWorkflow): ImportResult
importedBlock.data = {
width: 500,
height: 300,
type: yamlBlock.type === 'loop' ? 'loopNode' : 'parallelNode',
type: 'subflowNode',
// Map YAML inputs to data properties for loop/parallel blocks
...(yamlBlock.inputs || {}),
}