Compare commits

..

30 Commits

Author SHA1 Message Date
Waleed Latif
d75cc1ed84 v0.3.30: duplication, control bar fixes 2025-08-18 08:57:26 -07:00
Waleed Latif
5a8a703ecb fix(duplicate): fixed detached state on duplication (#1011) 2025-08-18 08:51:18 -07:00
Waleed Latif
6f64188b8d fix(control-bar): fix icons styling in disabled state (#1010) 2025-08-18 08:22:06 -07:00
Vikhyath Mondreti
60a9a25553 Merge pull request #1009 from simstudioai/staging
update migration file for notekeeping purpose
2025-08-18 01:59:02 -07:00
Vikhyath Mondreti
52fa388f81 update migration file for notekeeping purpose 2025-08-18 01:56:34 -07:00
Vikhyath Mondreti
5c56cbd558 Merge pull request #1008 from simstudioai/staging
reduce batch size to prevent timeouts
2025-08-18 01:11:49 -07:00
Vikhyath Mondreti
dc19525a6f reduce batch size to prevent timeouts 2025-08-18 01:10:47 -07:00
Vikhyath Mondreti
3873f44875 Merge pull request #1007 from simstudioai/staging
syntax issue in migration
2025-08-18 00:59:53 -07:00
Vikhyath Mondreti
09b95f41ea syntax issue in migration 2025-08-18 00:58:09 -07:00
Vikhyath Mondreti
af60ccd188 fix: migration mem issues bypass
fix: migration mem issues bypass
2025-08-18 00:50:20 -07:00
Vikhyath Mondreti
eb75afd115 make logs migration batched to prevent mem issues (#1005) 2025-08-18 00:42:38 -07:00
Waleed Latif
fdb8256468 fix(subflow): remove all edges when removing a block from a subflow (#1003) 2025-08-18 00:21:26 -07:00
Vikhyath Mondreti
570c07bf2a Merge pull request #1004 from simstudioai/staging
v0.3.29: copilot fixes, remove block from subflow, code cleanups
2025-08-18 00:18:44 -07:00
Adam Gough
5c16e7d390 fix(subflow): add ability to remove block from subflow and refactor to consolidate subflow code (#983)
* added logic to remove blocks from subflows

* refactored logic into just subflow-node

* bun run lint

* added subflow test

* added a safety check for data.parentId

* added state update logic

* bun run lint

* removed old logic

* removed any

* added tests

* added type safety

* removed test script

* type safety

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
Co-authored-by: waleedlatif1 <walif6@gmail.com>
2025-08-17 22:25:31 -07:00
Waleed Latif
bd38062705 fix(workflow-error): allow users to delete workflows with invalid configs/state (#1000)
* fix(workflow-error): allow users to delete workflows with invalid configs/state

* cleanup
2025-08-17 22:23:41 -07:00
Siddharth Ganesan
d7fd4a9618 feat(copilot): diff improvements (#1002)
* Fix abort

* Cred updates

* Updates

* Fix sheet id showing up in diff view

* Update diff view

* Text overflow

* Optimistic accept

* Serialization catching

* Depth 0 fix

* Fix icons

* Updates

* Lint
2025-08-16 15:09:48 -07:00
Vikhyath Mondreti
d972bab206 fix(logs-sidebar): remove message and fix race condition for quickly switching b/w logs (#1001) 2025-08-16 15:05:39 -07:00
Vikhyath Mondreti
f254d70624 improvement(logs): cleanup code (#999) 2025-08-16 13:44:00 -07:00
Waleed Latif
8748e1d5f9 improvement(db): remove deprecated 'state' column from workflow table (#994)
* improvement(db): remove deprecated  column from workflow table

* removed extraneous logs

* update sockets envvar
2025-08-16 13:04:49 -07:00
Siddharth Ganesan
133a32e6d3 Fix abort (#998) 2025-08-16 11:10:09 -07:00
Waleed Latif
97b6bcc43d v0.3.28: autolayout, export, copilot, kb ui improvements 2025-08-16 09:12:17 -07:00
Waleed Latif
42917ce641 fix(agent): stringify input into user prompt for agent (#984) 2025-08-15 19:36:49 -07:00
Waleed Latif
5f6d219223 fix(kb-ui): fixed upload files modal ui, processing ui to match the rest of the kb (#991)
* fix(kb-ui): fixed upload files modal, processing ui to match the rest of the kb

* more ui fixes

* ack PR comments

* fix help modal
2025-08-15 19:35:50 -07:00
Siddharth Ganesan
bab74307f4 fix(ishosted): make ishosted true on staging (#993)
* Add staging to ishosted

* www
2025-08-15 18:36:32 -07:00
Siddharth Ganesan
16aaa37dad improvement(agent): enable autolayout, export, copilot (#992)
* Enable autolayout, export, and copilot in dev

* Updates
2025-08-15 18:29:34 -07:00
Siddharth Ganesan
c6166a9483 feat(copilot): generate agent api key (#989)
* Add skeleton copilot to settings modal and add migration for copilot api keys

* Add hash index on encrypted key

* Security 1

* Remove sim agent api key

* Fix api key stuff

* Auth

* Status code handling

* Update env key

* Copilot api key ui

* Update copilot costs

* Add copilot stats

* Lint

* Remove logs

* Remove migrations

* Remove another migration

* Updates

* Hide if hosted

* Fix test

* Lint

* Lint

* Fixes

* Lint

---------

Co-authored-by: Waleed Latif <walif6@gmail.com>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
Co-authored-by: Adam Gough <77861281+aadamgough@users.noreply.github.com>
Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
2025-08-15 18:05:54 -07:00
Waleed Latif
0258a1b4ce fix(loading): fix workflow detached on first load (#987) 2025-08-15 17:26:47 -07:00
Vikhyath Mondreti
4d4aefa346 fix(envvar): clear separation between server-side and client-side billing envvar (#988) 2025-08-15 16:41:02 -07:00
Vikhyath Mondreti
a0cf003abf Merge pull request #986 from simstudioai/staging
attempt to fix build issues (#985)
2025-08-15 15:22:26 -07:00
Vikhyath Mondreti
2e027dd77d attempt to fix build issues (#985) 2025-08-15 15:21:34 -07:00
104 changed files with 21119 additions and 3018 deletions

View File

@@ -3,8 +3,7 @@ import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalApiKey } from '@/lib/copilot/utils'
import { env } from '@/lib/env'
import { isBillingEnabled, isProd } from '@/lib/environment'
import { isBillingEnabled } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { userStats } from '@/db/schema'
@@ -17,6 +16,7 @@ const UpdateCostSchema = z.object({
input: z.number().min(0, 'Input tokens must be a non-negative number'),
output: z.number().min(0, 'Output tokens must be a non-negative number'),
model: z.string().min(1, 'Model is required'),
multiplier: z.number().min(0),
})
/**
@@ -75,27 +75,27 @@ export async function POST(req: NextRequest) {
)
}
const { userId, input, output, model } = validation.data
const { userId, input, output, model, multiplier } = validation.data
logger.info(`[${requestId}] Processing cost update`, {
userId,
input,
output,
model,
multiplier,
})
const finalPromptTokens = input
const finalCompletionTokens = output
const totalTokens = input + output
// Calculate cost using COPILOT_COST_MULTIPLIER (only in production, like normal executions)
const copilotMultiplier = isProd ? env.COPILOT_COST_MULTIPLIER || 1 : 1
// Calculate cost using provided multiplier (required)
const costResult = calculateCost(
model,
finalPromptTokens,
finalCompletionTokens,
false,
copilotMultiplier
multiplier
)
logger.info(`[${requestId}] Cost calculation result`, {
@@ -104,7 +104,7 @@ export async function POST(req: NextRequest) {
promptTokens: finalPromptTokens,
completionTokens: finalCompletionTokens,
totalTokens: totalTokens,
copilotMultiplier,
multiplier,
costResult,
})
@@ -127,6 +127,10 @@ export async function POST(req: NextRequest) {
totalTokensUsed: totalTokens,
totalCost: costToStore.toString(),
currentPeriodCost: costToStore.toString(),
// Copilot usage tracking
totalCopilotCost: costToStore.toString(),
totalCopilotTokens: totalTokens,
totalCopilotCalls: 1,
lastActive: new Date(),
})
@@ -141,6 +145,10 @@ export async function POST(req: NextRequest) {
totalTokensUsed: sql`total_tokens_used + ${totalTokens}`,
totalCost: sql`total_cost + ${costToStore}`,
currentPeriodCost: sql`current_period_cost + ${costToStore}`,
// Copilot usage tracking increments
totalCopilotCost: sql`total_copilot_cost + ${costToStore}`,
totalCopilotTokens: sql`total_copilot_tokens + ${totalTokens}`,
totalCopilotCalls: sql`total_copilot_calls + 1`,
totalApiCalls: sql`total_api_calls`,
lastActive: new Date(),
}

View File

@@ -0,0 +1,70 @@
import { createCipheriv, createHash, createHmac, randomBytes } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { generateApiKey } from '@/lib/utils'
import { db } from '@/db'
import { copilotApiKeys } from '@/db/schema'
const logger = createLogger('CopilotApiKeysGenerate')
function deriveKey(keyString: string): Buffer {
return createHash('sha256').update(keyString, 'utf8').digest()
}
function encryptRandomIv(plaintext: string, keyString: string): string {
const key = deriveKey(keyString)
const iv = randomBytes(16)
const cipher = createCipheriv('aes-256-gcm', key, iv)
let encrypted = cipher.update(plaintext, 'utf8', 'hex')
encrypted += cipher.final('hex')
const authTag = cipher.getAuthTag().toString('hex')
return `${iv.toString('hex')}:${encrypted}:${authTag}`
}
function computeLookup(plaintext: string, keyString: string): string {
// Deterministic, constant-time comparable MAC: HMAC-SHA256(DB_KEY, plaintext)
return createHmac('sha256', Buffer.from(keyString, 'utf8'))
.update(plaintext, 'utf8')
.digest('hex')
}
export async function POST(req: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
if (!env.AGENT_API_DB_ENCRYPTION_KEY) {
logger.error('AGENT_API_DB_ENCRYPTION_KEY is not set')
return NextResponse.json({ error: 'Server not configured' }, { status: 500 })
}
const userId = session.user.id
// Generate and prefix the key (strip the generic sim_ prefix from the random part)
const rawKey = generateApiKey().replace(/^sim_/, '')
const plaintextKey = `sk-sim-copilot-${rawKey}`
// Encrypt with random IV for confidentiality
const dbEncrypted = encryptRandomIv(plaintextKey, env.AGENT_API_DB_ENCRYPTION_KEY)
// Compute deterministic lookup value for O(1) search
const lookup = computeLookup(plaintextKey, env.AGENT_API_DB_ENCRYPTION_KEY)
const [inserted] = await db
.insert(copilotApiKeys)
.values({ userId, apiKeyEncrypted: dbEncrypted, apiKeyLookup: lookup })
.returning({ id: copilotApiKeys.id })
return NextResponse.json(
{ success: true, key: { id: inserted.id, apiKey: plaintextKey } },
{ status: 201 }
)
} catch (error) {
logger.error('Failed to generate copilot API key', { error })
return NextResponse.json({ error: 'Failed to generate copilot API key' }, { status: 500 })
}
}

View File

@@ -0,0 +1,85 @@
import { createDecipheriv, createHash } from 'crypto'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { copilotApiKeys } from '@/db/schema'
const logger = createLogger('CopilotApiKeys')
function deriveKey(keyString: string): Buffer {
return createHash('sha256').update(keyString, 'utf8').digest()
}
function decryptWithKey(encryptedValue: string, keyString: string): string {
const parts = encryptedValue.split(':')
if (parts.length !== 3) {
throw new Error('Invalid encrypted value format')
}
const [ivHex, encryptedHex, authTagHex] = parts
const key = deriveKey(keyString)
const iv = Buffer.from(ivHex, 'hex')
const decipher = createDecipheriv('aes-256-gcm', key, iv)
decipher.setAuthTag(Buffer.from(authTagHex, 'hex'))
let decrypted = decipher.update(encryptedHex, 'hex', 'utf8')
decrypted += decipher.final('utf8')
return decrypted
}
export async function GET(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
if (!env.AGENT_API_DB_ENCRYPTION_KEY) {
logger.error('AGENT_API_DB_ENCRYPTION_KEY is not set')
return NextResponse.json({ error: 'Server not configured' }, { status: 500 })
}
const userId = session.user.id
const rows = await db
.select({ id: copilotApiKeys.id, apiKeyEncrypted: copilotApiKeys.apiKeyEncrypted })
.from(copilotApiKeys)
.where(eq(copilotApiKeys.userId, userId))
const keys = rows.map((row) => ({
id: row.id,
apiKey: decryptWithKey(row.apiKeyEncrypted, env.AGENT_API_DB_ENCRYPTION_KEY as string),
}))
return NextResponse.json({ keys }, { status: 200 })
} catch (error) {
logger.error('Failed to get copilot API keys', { error })
return NextResponse.json({ error: 'Failed to get keys' }, { status: 500 })
}
}
export async function DELETE(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const url = new URL(request.url)
const id = url.searchParams.get('id')
if (!id) {
return NextResponse.json({ error: 'id is required' }, { status: 400 })
}
await db
.delete(copilotApiKeys)
.where(and(eq(copilotApiKeys.userId, userId), eq(copilotApiKeys.id, id)))
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error('Failed to delete copilot API key', { error })
return NextResponse.json({ error: 'Failed to delete key' }, { status: 500 })
}
}

View File

@@ -0,0 +1,79 @@
import { createHmac } from 'crypto'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { copilotApiKeys, userStats } from '@/db/schema'
const logger = createLogger('CopilotApiKeysValidate')
function computeLookup(plaintext: string, keyString: string): string {
// Deterministic MAC: HMAC-SHA256(DB_KEY, plaintext)
return createHmac('sha256', Buffer.from(keyString, 'utf8'))
.update(plaintext, 'utf8')
.digest('hex')
}
export async function POST(req: NextRequest) {
try {
if (!env.AGENT_API_DB_ENCRYPTION_KEY) {
logger.error('AGENT_API_DB_ENCRYPTION_KEY is not set')
return NextResponse.json({ error: 'Server not configured' }, { status: 500 })
}
const body = await req.json().catch(() => null)
const apiKey = typeof body?.apiKey === 'string' ? body.apiKey : undefined
if (!apiKey) {
return new NextResponse(null, { status: 401 })
}
const lookup = computeLookup(apiKey, env.AGENT_API_DB_ENCRYPTION_KEY)
// Find matching API key and its user
const rows = await db
.select({ id: copilotApiKeys.id, userId: copilotApiKeys.userId })
.from(copilotApiKeys)
.where(eq(copilotApiKeys.apiKeyLookup, lookup))
.limit(1)
if (rows.length === 0) {
return new NextResponse(null, { status: 401 })
}
const { userId } = rows[0]
// Check usage for the associated user
const usage = await db
.select({
currentPeriodCost: userStats.currentPeriodCost,
totalCost: userStats.totalCost,
currentUsageLimit: userStats.currentUsageLimit,
})
.from(userStats)
.where(eq(userStats.userId, userId))
.limit(1)
if (usage.length > 0) {
const currentUsage = Number.parseFloat(
(usage[0].currentPeriodCost?.toString() as string) ||
(usage[0].totalCost as unknown as string) ||
'0'
)
const limit = Number.parseFloat((usage[0].currentUsageLimit as unknown as string) || '0')
if (!Number.isNaN(limit) && limit > 0 && currentUsage >= limit) {
// Usage exceeded
logger.info('[API VALIDATION] Usage exceeded', { userId, currentUsage, limit })
return new NextResponse(null, { status: 402 })
}
}
// Valid and within usage limits
return new NextResponse(null, { status: 200 })
} catch (error) {
logger.error('Error validating copilot API key', { error })
return NextResponse.json({ error: 'Failed to validate key' }, { status: 500 })
}
}

View File

@@ -104,7 +104,7 @@ describe('Copilot Chat API Route', () => {
vi.doMock('@/lib/env', () => ({
env: {
SIM_AGENT_API_URL: 'http://localhost:8000',
SIM_AGENT_API_KEY: 'test-sim-agent-key',
COPILOT_API_KEY: 'test-sim-agent-key',
},
}))

View File

@@ -1,3 +1,4 @@
import { createCipheriv, createDecipheriv, createHash, randomBytes } from 'crypto'
import { and, desc, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
@@ -13,6 +14,7 @@ import { getCopilotModel } from '@/lib/copilot/config'
import { TITLE_GENERATION_SYSTEM_PROMPT, TITLE_GENERATION_USER_PROMPT } from '@/lib/copilot/prompts'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { downloadFile } from '@/lib/uploads'
import { downloadFromS3WithConfig } from '@/lib/uploads/s3/s3-client'
import { S3_COPILOT_CONFIG, USE_S3_STORAGE } from '@/lib/uploads/setup'
@@ -23,6 +25,37 @@ import { createAnthropicFileContent, isSupportedFileType } from './file-utils'
const logger = createLogger('CopilotChatAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
function deriveKey(keyString: string): Buffer {
return createHash('sha256').update(keyString, 'utf8').digest()
}
function decryptWithKey(encryptedValue: string, keyString: string): string {
const [ivHex, encryptedHex, authTagHex] = encryptedValue.split(':')
if (!ivHex || !encryptedHex || !authTagHex) {
throw new Error('Invalid encrypted format')
}
const key = deriveKey(keyString)
const iv = Buffer.from(ivHex, 'hex')
const decipher = createDecipheriv('aes-256-gcm', key, iv)
decipher.setAuthTag(Buffer.from(authTagHex, 'hex'))
let decrypted = decipher.update(encryptedHex, 'hex', 'utf8')
decrypted += decipher.final('utf8')
return decrypted
}
function encryptWithKey(plaintext: string, keyString: string): string {
const key = deriveKey(keyString)
const iv = randomBytes(16)
const cipher = createCipheriv('aes-256-gcm', key, iv)
let encrypted = cipher.update(plaintext, 'utf8', 'hex')
encrypted += cipher.final('hex')
const authTag = cipher.getAuthTag().toString('hex')
return `${iv.toString('hex')}:${encrypted}:${authTag}`
}
// Schema for file attachments
const FileAttachmentSchema = z.object({
id: z.string(),
@@ -48,10 +81,6 @@ const ChatMessageSchema = z.object({
conversationId: z.string().optional(),
})
// Sim Agent API configuration
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = env.SIM_AGENT_API_KEY
/**
* Generate a chat title using LLM
*/
@@ -179,6 +208,7 @@ export async function POST(req: NextRequest) {
hasImplicitFeedback: !!implicitFeedback,
provider: provider || 'openai',
hasConversationId: !!conversationId,
depth,
})
// Handle chat context
@@ -341,34 +371,64 @@ export async function POST(req: NextRequest) {
(currentChat?.conversationId as string | undefined) || conversationId
// If we have a conversationId, only send the most recent user message; else send full history
const messagesForAgent = effectiveConversationId ? [messages[messages.length - 1]] : messages
const latestUserMessage =
[...messages].reverse().find((m) => m?.role === 'user') || messages[messages.length - 1]
const messagesForAgent = effectiveConversationId ? [latestUserMessage] : messages
const requestPayload = {
messages: messagesForAgent,
workflowId,
userId: authenticatedUserId,
stream: stream,
streamToolCalls: true,
mode: mode,
provider: providerToUse,
...(effectiveConversationId ? { conversationId: effectiveConversationId } : {}),
...(typeof depth === 'number' ? { depth } : {}),
...(session?.user?.name && { userName: session.user.name }),
}
// Log the payload being sent to the streaming endpoint
try {
logger.info(`[${tracker.requestId}] Sending payload to sim agent streaming endpoint`, {
url: `${SIM_AGENT_API_URL}/api/chat-completion-streaming`,
provider: providerToUse,
mode,
stream,
workflowId,
hasConversationId: !!effectiveConversationId,
depth: typeof depth === 'number' ? depth : undefined,
messagesCount: requestPayload.messages.length,
})
// Full payload as JSON string
logger.info(
`[${tracker.requestId}] Full streaming payload: ${JSON.stringify(requestPayload)}`
)
} catch (e) {
logger.warn(`[${tracker.requestId}] Failed to log payload preview for streaming endpoint`, e)
}
const simAgentResponse = await fetch(`${SIM_AGENT_API_URL}/api/chat-completion-streaming`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
},
body: JSON.stringify({
messages: messagesForAgent,
workflowId,
userId: authenticatedUserId,
stream: stream,
streamToolCalls: true,
mode: mode,
provider: providerToUse,
...(effectiveConversationId ? { conversationId: effectiveConversationId } : {}),
...(typeof depth === 'number' ? { depth } : {}),
...(session?.user?.name && { userName: session.user.name }),
}),
body: JSON.stringify(requestPayload),
})
if (!simAgentResponse.ok) {
const errorText = await simAgentResponse.text()
if (simAgentResponse.status === 401 || simAgentResponse.status === 402) {
// Rethrow status only; client will render appropriate assistant message
return new NextResponse(null, { status: simAgentResponse.status })
}
const errorText = await simAgentResponse.text().catch(() => '')
logger.error(`[${tracker.requestId}] Sim agent API error:`, {
status: simAgentResponse.status,
error: errorText,
})
return NextResponse.json(
{ error: `Sim agent API error: ${simAgentResponse.statusText}` },
{ status: simAgentResponse.status }
@@ -654,7 +714,7 @@ export async function POST(req: NextRequest) {
)
}
const responseId = responseIdFromDone || responseIdFromStart
const responseId = responseIdFromDone
// Update chat in database immediately (without title)
await db

View File

@@ -48,11 +48,6 @@ async function updateToolCallStatus(
while (Date.now() - startTime < timeout) {
const exists = await redis.exists(key)
if (exists) {
logger.info('Tool call found in Redis, updating status', {
toolCallId,
key,
pollDuration: Date.now() - startTime,
})
break
}
@@ -79,27 +74,8 @@ async function updateToolCallStatus(
timestamp: new Date().toISOString(),
}
// Log what we're about to update in Redis
logger.info('About to update Redis with tool call data', {
toolCallId,
key,
toolCallData,
serializedData: JSON.stringify(toolCallData),
providedStatus: status,
providedMessage: message,
messageIsUndefined: message === undefined,
messageIsNull: message === null,
})
await redis.set(key, JSON.stringify(toolCallData), 'EX', 86400) // Keep 24 hour expiry
logger.info('Tool call status updated in Redis', {
toolCallId,
key,
status,
message,
pollDuration: Date.now() - startTime,
})
return true
} catch (error) {
logger.error('Failed to update tool call status in Redis', {
@@ -131,13 +107,6 @@ export async function POST(req: NextRequest) {
const body = await req.json()
const { toolCallId, status, message } = ConfirmationSchema.parse(body)
logger.info(`[${tracker.requestId}] Tool call confirmation request`, {
userId: authenticatedUserId,
toolCallId,
status,
message,
})
// Update the tool call status in Redis
const updated = await updateToolCallStatus(toolCallId, status, message)
@@ -153,13 +122,6 @@ export async function POST(req: NextRequest) {
}
const duration = tracker.getDuration()
logger.info(`[${tracker.requestId}] Tool call confirmation completed`, {
userId: authenticatedUserId,
toolCallId,
status,
internalStatus: status,
duration,
})
return NextResponse.json({
success: true,

View File

@@ -69,12 +69,6 @@ async function pollRedisForTool(
const pollInterval = 1000 // 1 second
const startTime = Date.now()
logger.info('Starting to poll Redis for tool call status', {
toolCallId,
timeout,
pollInterval,
})
while (Date.now() - startTime < timeout) {
try {
const redisValue = await redis.get(key)
@@ -112,23 +106,6 @@ async function pollRedisForTool(
rawRedisValue: redisValue,
})
logger.info('Tool call status resolved', {
toolCallId,
status,
message,
duration: Date.now() - startTime,
rawRedisValue: redisValue,
parsedAsJSON: redisValue
? (() => {
try {
return JSON.parse(redisValue)
} catch {
return 'failed-to-parse'
}
})()
: null,
})
// Special logging for set environment variables tool when Redis status is found
if (toolCallId && (status === 'accepted' || status === 'rejected')) {
logger.info('SET_ENV_VARS: Redis polling found status update', {

View File

@@ -178,7 +178,7 @@ export function findLocalFile(filename: string): string | null {
* Create a file response with appropriate headers
*/
export function createFileResponse(file: FileResponse): NextResponse {
return new NextResponse(file.buffer, {
return new NextResponse(file.buffer as BodyInit, {
status: 200,
headers: {
'Content-Type': file.contentType,

View File

@@ -1,6 +1,7 @@
import { type NextRequest, NextResponse } from 'next/server'
import { Resend } from 'resend'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getEmailDomain } from '@/lib/urls/utils'
@@ -9,7 +10,6 @@ const resend = env.RESEND_API_KEY ? new Resend(env.RESEND_API_KEY) : null
const logger = createLogger('HelpAPI')
const helpFormSchema = z.object({
email: z.string().email('Invalid email address'),
subject: z.string().min(1, 'Subject is required'),
message: z.string().min(1, 'Message is required'),
type: z.enum(['bug', 'feedback', 'feature_request', 'other']),
@@ -19,6 +19,15 @@ export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
// Get user session
const session = await getSession()
if (!session?.user?.email) {
logger.warn(`[${requestId}] Unauthorized help request attempt`)
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
const email = session.user.email
// Check if Resend API key is configured
if (!resend) {
logger.error(`[${requestId}] RESEND_API_KEY not configured`)
@@ -35,7 +44,6 @@ export async function POST(req: NextRequest) {
const formData = await req.formData()
// Extract form fields
const email = formData.get('email') as string
const subject = formData.get('subject') as string
const message = formData.get('message') as string
const type = formData.get('type') as string
@@ -47,7 +55,6 @@ export async function POST(req: NextRequest) {
// Validate the form data
const result = helpFormSchema.safeParse({
email,
subject,
message,
type,
@@ -97,9 +104,9 @@ ${message}
}
// Send email using Resend
const { data, error } = await resend.emails.send({
from: `Sim <noreply@${getEmailDomain()}>`,
to: [`help@${getEmailDomain()}`],
const { error } = await resend.emails.send({
from: `Sim <noreply@${env.EMAIL_DOMAIN || getEmailDomain()}>`,
to: [`help@${env.EMAIL_DOMAIN || getEmailDomain()}`],
subject: `[${type.toUpperCase()}] ${subject}`,
replyTo: email,
text: emailText,
@@ -121,7 +128,7 @@ ${message}
// Send confirmation email to the user
await resend.emails
.send({
from: `Sim <noreply@${getEmailDomain()}>`,
from: `Sim <noreply@${env.EMAIL_DOMAIN || getEmailDomain()}>`,
to: [email],
subject: `Your ${type} request has been received: ${subject}`,
text: `
@@ -137,7 +144,7 @@ ${images.length > 0 ? `You attached ${images.length} image(s).` : ''}
Best regards,
The Sim Team
`,
replyTo: `help@${getEmailDomain()}`,
replyTo: `help@${env.EMAIL_DOMAIN || getEmailDomain()}`,
})
.catch((err) => {
logger.warn(`[${requestId}] Failed to send confirmation email`, err)

View File

@@ -46,20 +46,7 @@ export async function GET(
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString(),
totalDurationMs: workflowLog.totalDurationMs,
blockStats: {
total: workflowLog.blockCount,
success: workflowLog.successCount,
error: workflowLog.errorCount,
skipped: workflowLog.skippedCount,
},
cost: {
total: workflowLog.totalCost ? Number.parseFloat(workflowLog.totalCost) : null,
input: workflowLog.totalInputCost ? Number.parseFloat(workflowLog.totalInputCost) : null,
output: workflowLog.totalOutputCost
? Number.parseFloat(workflowLog.totalOutputCost)
: null,
},
totalTokens: workflowLog.totalTokens,
cost: workflowLog.cost || null,
},
}

View File

@@ -0,0 +1,102 @@
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { permissions, workflow, workflowExecutionLogs } from '@/db/schema'
const logger = createLogger('LogDetailsByIdAPI')
export const revalidate = 0
export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized log details access attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const { id } = await params
const rows = await db
.select({
id: workflowExecutionLogs.id,
workflowId: workflowExecutionLogs.workflowId,
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
createdAt: workflowExecutionLogs.createdAt,
workflowName: workflow.name,
workflowDescription: workflow.description,
workflowColor: workflow.color,
workflowFolderId: workflow.folderId,
workflowUserId: workflow.userId,
workflowWorkspaceId: workflow.workspaceId,
workflowCreatedAt: workflow.createdAt,
workflowUpdatedAt: workflow.updatedAt,
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, userId)
)
)
.where(eq(workflowExecutionLogs.id, id))
.limit(1)
const log = rows[0]
if (!log) {
return NextResponse.json({ error: 'Not found' }, { status: 404 })
}
const workflowSummary = {
id: log.workflowId,
name: log.workflowName,
description: log.workflowDescription,
color: log.workflowColor,
folderId: log.workflowFolderId,
userId: log.workflowUserId,
workspaceId: log.workflowWorkspaceId,
createdAt: log.workflowCreatedAt,
updatedAt: log.workflowUpdatedAt,
}
const response = {
id: log.id,
workflowId: log.workflowId,
executionId: log.executionId,
level: log.level,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger,
createdAt: log.startedAt.toISOString(),
files: log.files || undefined,
workflow: workflowSummary,
executionData: {
totalDuration: log.totalDurationMs,
...(log.executionData as any),
enhanced: true,
},
cost: log.cost as any,
}
return NextResponse.json({ data: response })
} catch (error: any) {
logger.error(`[${requestId}] log details fetch error`, error)
return NextResponse.json({ error: error.message }, { status: 500 })
}
}

View File

@@ -99,21 +99,13 @@ export async function GET(request: NextRequest) {
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalInputCost: workflowExecutionLogs.totalInputCost,
totalOutputCost: workflowExecutionLogs.totalOutputCost,
totalTokens: workflowExecutionLogs.totalTokens,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
metadata: workflowExecutionLogs.metadata,
createdAt: workflowExecutionLogs.createdAt,
})
.from(workflowExecutionLogs)

View File

@@ -1,4 +1,4 @@
import { and, desc, eq, gte, inArray, lte, or, type SQL, sql } from 'drizzle-orm'
import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
@@ -44,8 +44,7 @@ function extractBlockExecutionsFromTraceSpans(traceSpans: any[]): any[] {
export const revalidate = 0
const QueryParamsSchema = z.object({
includeWorkflow: z.coerce.boolean().optional().default(false),
includeBlocks: z.coerce.boolean().optional().default(false),
details: z.enum(['basic', 'full']).optional().default('basic'),
limit: z.coerce.number().optional().default(100),
offset: z.coerce.number().optional().default(0),
level: z.string().optional(),
@@ -81,20 +80,12 @@ export async function GET(request: NextRequest) {
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalInputCost: workflowExecutionLogs.totalInputCost,
totalOutputCost: workflowExecutionLogs.totalOutputCost,
totalTokens: workflowExecutionLogs.totalTokens,
metadata: workflowExecutionLogs.metadata,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
createdAt: workflowExecutionLogs.createdAt,
workflowName: workflow.name,
@@ -163,13 +154,8 @@ export async function GET(request: NextRequest) {
// Filter by search query
if (params.search) {
const searchTerm = `%${params.search}%`
conditions = and(
conditions,
or(
sql`${workflowExecutionLogs.message} ILIKE ${searchTerm}`,
sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`
)
)
// With message removed, restrict search to executionId only
conditions = and(conditions, sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`)
}
// Execute the query using the optimized join
@@ -290,31 +276,20 @@ export async function GET(request: NextRequest) {
const enhancedLogs = logs.map((log) => {
const blockExecutions = blockExecutionsByExecution[log.executionId] || []
// Use stored trace spans from metadata if available, otherwise create from block executions
const storedTraceSpans = (log.metadata as any)?.traceSpans
// Use stored trace spans if available, otherwise create from block executions
const storedTraceSpans = (log.executionData as any)?.traceSpans
const traceSpans =
storedTraceSpans && Array.isArray(storedTraceSpans) && storedTraceSpans.length > 0
? storedTraceSpans
: createTraceSpans(blockExecutions)
// Use extracted cost summary if available, otherwise use stored values
// Prefer stored cost JSON; otherwise synthesize from blocks
const costSummary =
blockExecutions.length > 0
? extractCostSummary(blockExecutions)
: {
input: Number(log.totalInputCost) || 0,
output: Number(log.totalOutputCost) || 0,
total: Number(log.totalCost) || 0,
tokens: {
total: log.totalTokens || 0,
prompt: (log.metadata as any)?.tokenBreakdown?.prompt || 0,
completion: (log.metadata as any)?.tokenBreakdown?.completion || 0,
},
models: (log.metadata as any)?.models || {},
}
log.cost && Object.keys(log.cost as any).length > 0
? (log.cost as any)
: extractCostSummary(blockExecutions)
// Build workflow object from joined data
const workflow = {
const workflowSummary = {
id: log.workflowId,
name: log.workflowName,
description: log.workflowDescription,
@@ -329,67 +304,28 @@ export async function GET(request: NextRequest) {
return {
id: log.id,
workflowId: log.workflowId,
executionId: log.executionId,
executionId: params.details === 'full' ? log.executionId : undefined,
level: log.level,
message: log.message,
duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null,
trigger: log.trigger,
createdAt: log.startedAt.toISOString(),
files: log.files || undefined,
workflow: params.includeWorkflow ? workflow : undefined,
metadata: {
totalDuration: log.totalDurationMs,
cost: costSummary,
blockStats: {
total: log.blockCount,
success: log.successCount,
error: log.errorCount,
skipped: log.skippedCount,
},
traceSpans,
blockExecutions,
enhanced: true,
},
files: params.details === 'full' ? log.files || undefined : undefined,
workflow: workflowSummary,
executionData:
params.details === 'full'
? {
totalDuration: log.totalDurationMs,
traceSpans,
blockExecutions,
enhanced: true,
}
: undefined,
cost:
params.details === 'full'
? (costSummary as any)
: { total: (costSummary as any)?.total || 0 },
}
})
// Include block execution data if requested
if (params.includeBlocks) {
// Block executions are now extracted from stored trace spans in metadata
const blockLogsByExecution: Record<string, any[]> = {}
logs.forEach((log) => {
const storedTraceSpans = (log.metadata as any)?.traceSpans
if (storedTraceSpans && Array.isArray(storedTraceSpans)) {
blockLogsByExecution[log.executionId] =
extractBlockExecutionsFromTraceSpans(storedTraceSpans)
} else {
blockLogsByExecution[log.executionId] = []
}
})
// Add block logs to metadata
const logsWithBlocks = enhancedLogs.map((log) => ({
...log,
metadata: {
...log.metadata,
blockExecutions: blockLogsByExecution[log.executionId] || [],
},
}))
return NextResponse.json(
{
data: logsWithBlocks,
total: Number(count),
page: Math.floor(params.offset / params.limit) + 1,
pageSize: params.limit,
totalPages: Math.ceil(Number(count) / params.limit),
},
{ status: 200 }
)
}
// Return basic logs
return NextResponse.json(
{
data: enhancedLogs,

View File

@@ -80,7 +80,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
workspaceId: workspaceId,
name: `${templateData.name} (copy)`,
description: templateData.description,
state: templateData.state,
color: templateData.color,
userId: session.user.id,
createdAt: now,
@@ -158,9 +157,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}))
}
// Update the workflow with the corrected state
await tx.update(workflow).set({ state: updatedState }).where(eq(workflow.id, newWorkflowId))
// Insert blocks and edges
if (blockEntries.length > 0) {
await tx.insert(workflowBlocks).values(blockEntries)

View File

@@ -17,12 +17,6 @@ export const dynamic = 'force-dynamic'
const logger = createLogger('AutoLayoutAPI')
// Check API key configuration at module level
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
if (!SIM_AGENT_API_KEY) {
logger.warn('SIM_AGENT_API_KEY not configured - autolayout requests will fail')
}
const AutoLayoutRequestSchema = z.object({
strategy: z
.enum(['smart', 'hierarchical', 'layered', 'force-directed'])
@@ -125,15 +119,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Could not load workflow data' }, { status: 500 })
}
// Apply autolayout
logger.info(
`[${requestId}] Applying autolayout to ${Object.keys(currentWorkflowData.blocks).length} blocks`,
{
hasApiKey: !!SIM_AGENT_API_KEY,
simAgentUrl: process.env.SIM_AGENT_API_URL || 'http://localhost:8000',
}
)
// Create workflow state for autolayout
const workflowState = {
blocks: currentWorkflowData.blocks,
@@ -184,7 +169,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
resolveOutputType: resolveOutputType.toString(),
},
},
apiKey: SIM_AGENT_API_KEY,
})
// Log the full response for debugging

View File

@@ -7,7 +7,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { db } from '@/db'
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@/db/schema'
import type { LoopConfig, ParallelConfig, WorkflowState } from '@/stores/workflows/workflow/types'
import type { LoopConfig, ParallelConfig } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowDuplicateAPI')
@@ -90,7 +90,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
folderId: folderId || source.folderId,
name,
description: description || source.description,
state: source.state, // We'll update this later with new block IDs
color: color || source.color,
lastSynced: now,
createdAt: now,
@@ -112,9 +111,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
// Create a mapping from old block IDs to new block IDs
const blockIdMapping = new Map<string, string>()
// Initialize state for updating with new block IDs
let updatedState: WorkflowState = source.state as WorkflowState
if (sourceBlocks.length > 0) {
// First pass: Create all block ID mappings
sourceBlocks.forEach((block) => {
@@ -265,86 +261,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
)
}
// Update the JSON state to use new block IDs
if (updatedState && typeof updatedState === 'object') {
updatedState = JSON.parse(JSON.stringify(updatedState)) as WorkflowState
// Update blocks object keys
if (updatedState.blocks && typeof updatedState.blocks === 'object') {
const newBlocks = {} as Record<string, (typeof updatedState.blocks)[string]>
for (const [oldId, blockData] of Object.entries(updatedState.blocks)) {
const newId = blockIdMapping.get(oldId) || oldId
newBlocks[newId] = {
...blockData,
id: newId,
// Update data.parentId and extent in the JSON state as well
data: (() => {
const block = blockData as any
if (block.data && typeof block.data === 'object' && block.data.parentId) {
return {
...block.data,
parentId: blockIdMapping.get(block.data.parentId) || block.data.parentId,
extent: 'parent', // Ensure extent is set for child blocks
}
}
return block.data
})(),
}
}
updatedState.blocks = newBlocks
}
// Update edges array
if (updatedState.edges && Array.isArray(updatedState.edges)) {
updatedState.edges = updatedState.edges.map((edge) => ({
...edge,
id: crypto.randomUUID(),
source: blockIdMapping.get(edge.source) || edge.source,
target: blockIdMapping.get(edge.target) || edge.target,
}))
}
// Update loops and parallels if they exist
if (updatedState.loops && typeof updatedState.loops === 'object') {
const newLoops = {} as Record<string, (typeof updatedState.loops)[string]>
for (const [oldId, loopData] of Object.entries(updatedState.loops)) {
const newId = blockIdMapping.get(oldId) || oldId
const loopConfig = loopData as any
newLoops[newId] = {
...loopConfig,
id: newId,
// Update node references in loop config
nodes: loopConfig.nodes
? loopConfig.nodes.map((nodeId: string) => blockIdMapping.get(nodeId) || nodeId)
: [],
}
}
updatedState.loops = newLoops
}
if (updatedState.parallels && typeof updatedState.parallels === 'object') {
const newParallels = {} as Record<string, (typeof updatedState.parallels)[string]>
for (const [oldId, parallelData] of Object.entries(updatedState.parallels)) {
const newId = blockIdMapping.get(oldId) || oldId
const parallelConfig = parallelData as any
newParallels[newId] = {
...parallelConfig,
id: newId,
// Update node references in parallel config
nodes: parallelConfig.nodes
? parallelConfig.nodes.map((nodeId: string) => blockIdMapping.get(nodeId) || nodeId)
: [],
}
}
updatedState.parallels = newParallels
}
}
// Update the workflow state with the new block IDs
// Update the workflow timestamp
await tx
.update(workflow)
.set({
state: updatedState,
updatedAt: now,
})
.where(eq(workflow.id, newWorkflowId))

View File

@@ -89,7 +89,14 @@ describe('Workflow By ID API Route', () => {
userId: 'user-123',
name: 'Test Workflow',
workspaceId: null,
state: { blocks: {}, edges: [] },
}
const mockNormalizedData = {
blocks: {},
edges: [],
loops: {},
parallels: {},
isFromNormalizedTables: true,
}
vi.doMock('@/lib/auth', () => ({
@@ -110,6 +117,10 @@ describe('Workflow By ID API Route', () => {
},
}))
vi.doMock('@/lib/workflows/db-helpers', () => ({
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
}))
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123')
const params = Promise.resolve({ id: 'workflow-123' })
@@ -127,7 +138,14 @@ describe('Workflow By ID API Route', () => {
userId: 'other-user',
name: 'Test Workflow',
workspaceId: 'workspace-456',
state: { blocks: {}, edges: [] },
}
const mockNormalizedData = {
blocks: {},
edges: [],
loops: {},
parallels: {},
isFromNormalizedTables: true,
}
vi.doMock('@/lib/auth', () => ({
@@ -148,6 +166,10 @@ describe('Workflow By ID API Route', () => {
},
}))
vi.doMock('@/lib/workflows/db-helpers', () => ({
loadWorkflowFromNormalizedTables: vi.fn().mockResolvedValue(mockNormalizedData),
}))
vi.doMock('@/lib/permissions/utils', () => ({
getUserEntityPermissions: vi.fn().mockResolvedValue('read'),
hasAdminPermission: vi.fn().mockResolvedValue(false),
@@ -170,7 +192,6 @@ describe('Workflow By ID API Route', () => {
userId: 'other-user',
name: 'Test Workflow',
workspaceId: 'workspace-456',
state: { blocks: {}, edges: [] },
}
vi.doMock('@/lib/auth', () => ({
@@ -213,7 +234,6 @@ describe('Workflow By ID API Route', () => {
userId: 'user-123',
name: 'Test Workflow',
workspaceId: null,
state: { blocks: {}, edges: [] },
}
const mockNormalizedData = {

View File

@@ -120,8 +120,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
logger.debug(`[${requestId}] Attempting to load workflow ${workflowId} from normalized tables`)
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
const finalWorkflowData = { ...workflowData }
if (normalizedData) {
logger.debug(`[${requestId}] Found normalized data for workflow ${workflowId}:`, {
blocksCount: Object.keys(normalizedData.blocks).length,
@@ -131,38 +129,31 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
loops: normalizedData.loops,
})
// Use normalized table data - reconstruct complete state object
// First get any existing state properties, then override with normalized data
const existingState =
workflowData.state && typeof workflowData.state === 'object' ? workflowData.state : {}
finalWorkflowData.state = {
// Default values for expected properties
deploymentStatuses: {},
hasActiveWebhook: false,
// Preserve any existing state properties
...existingState,
// Override with normalized data (this takes precedence)
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
lastSaved: Date.now(),
isDeployed: workflowData.isDeployed || false,
deployedAt: workflowData.deployedAt,
// Construct response object with workflow data and state from normalized tables
const finalWorkflowData = {
...workflowData,
state: {
// Default values for expected properties
deploymentStatuses: {},
hasActiveWebhook: false,
// Data from normalized tables
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
lastSaved: Date.now(),
isDeployed: workflowData.isDeployed || false,
deployedAt: workflowData.deployedAt,
},
}
logger.info(`[${requestId}] Loaded workflow ${workflowId} from normalized tables`)
} else {
// Fallback to JSON blob
logger.info(
`[${requestId}] Using JSON blob for workflow ${workflowId} - no normalized data found`
)
const elapsed = Date.now() - startTime
logger.info(`[${requestId}] Successfully fetched workflow ${workflowId} in ${elapsed}ms`)
return NextResponse.json({ data: finalWorkflowData }, { status: 200 })
}
const elapsed = Date.now() - startTime
logger.info(`[${requestId}] Successfully fetched workflow ${workflowId} in ${elapsed}ms`)
return NextResponse.json({ data: finalWorkflowData }, { status: 200 })
return NextResponse.json({ error: 'Workflow has no normalized data' }, { status: 400 })
} catch (error: any) {
const elapsed = Date.now() - startTime
logger.error(`[${requestId}] Error fetching workflow ${workflowId} after ${elapsed}ms`, error)

View File

@@ -220,7 +220,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
.set({
lastSynced: new Date(),
updatedAt: new Date(),
state: saveResult.jsonBlob, // Also update JSON blob for backward compatibility
})
.where(eq(workflow.id, workflowId))

View File

@@ -1,9 +1,10 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { simAgentClient } from '@/lib/sim-agent'
import { SIM_AGENT_API_URL_DEFAULT, simAgentClient } from '@/lib/sim-agent'
import {
loadWorkflowFromNormalizedTables,
saveWorkflowToNormalizedTables,
@@ -17,15 +18,12 @@ import { db } from '@/db'
import { workflowCheckpoints, workflow as workflowTable } from '@/db/schema'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
export const dynamic = 'force-dynamic'
const logger = createLogger('WorkflowYamlAPI')
// Request schema for YAML workflow operations
const YamlWorkflowRequestSchema = z.object({
yamlContent: z.string().min(1, 'YAML content is required'),
description: z.string().optional(),
@@ -74,7 +72,6 @@ async function createWorkflowCheckpoint(
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
workflowState: currentWorkflowData,
@@ -288,7 +285,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
yamlContent,
@@ -649,14 +645,13 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
.set({
lastSynced: new Date(),
updatedAt: new Date(),
state: saveResult.jsonBlob,
})
.where(eq(workflowTable.id, workflowId))
// Notify socket server for real-time collaboration (for copilot and editor)
if (source === 'copilot' || source === 'editor') {
try {
const socketUrl = process.env.SOCKET_URL || 'http://localhost:3002'
const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002'
await fetch(`${socketUrl}/api/copilot-workflow-edit`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },

View File

@@ -151,7 +151,6 @@ export async function POST(req: NextRequest) {
folderId: folderId || null,
name,
description,
state: initialState,
color,
lastSynced: now,
createdAt: now,

View File

@@ -8,9 +8,6 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('WorkflowYamlAPI')
// Get API key at module level like working routes
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
@@ -55,7 +52,6 @@ export async function POST(request: NextRequest) {
resolveOutputType: resolveOutputType.toString(),
},
},
apiKey: SIM_AGENT_API_KEY,
})
if (!result.success || !result.data?.yaml) {

View File

@@ -14,9 +14,6 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('WorkflowYamlExportAPI')
// Get API key at module level like working routes
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const url = new URL(request.url)
@@ -88,14 +85,10 @@ export async function GET(request: NextRequest) {
edgesCount: normalizedData.edges.length,
})
// Use normalized table data - reconstruct complete state object
const existingState =
workflowData.state && typeof workflowData.state === 'object' ? workflowData.state : {}
// Use normalized table data - construct state from normalized tables
workflowState = {
deploymentStatuses: {},
hasActiveWebhook: false,
...existingState,
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
@@ -119,33 +112,10 @@ export async function GET(request: NextRequest) {
logger.info(`[${requestId}] Loaded workflow ${workflowId} from normalized tables`)
} else {
// Fallback to JSON blob
logger.info(
`[${requestId}] Using JSON blob for workflow ${workflowId} - no normalized data found`
return NextResponse.json(
{ success: false, error: 'Workflow has no normalized data' },
{ status: 400 }
)
if (!workflowData.state || typeof workflowData.state !== 'object') {
return NextResponse.json(
{ success: false, error: 'Workflow has no valid state data' },
{ status: 400 }
)
}
workflowState = workflowData.state as any
// Extract subblock values from JSON blob state
if (workflowState.blocks) {
Object.entries(workflowState.blocks).forEach(([blockId, block]: [string, any]) => {
subBlockValues[blockId] = {}
if (block.subBlocks) {
Object.entries(block.subBlocks).forEach(([subBlockId, subBlock]: [string, any]) => {
if (subBlock && typeof subBlock === 'object' && 'value' in subBlock) {
subBlockValues[blockId][subBlockId] = subBlock.value
}
})
}
})
}
}
// Gather block registry and utilities for sim-agent
@@ -176,7 +146,6 @@ export async function GET(request: NextRequest) {
resolveOutputType: resolveOutputType.toString(),
},
},
apiKey: SIM_AGENT_API_KEY,
})
if (!result.success || !result.data?.yaml) {

View File

@@ -113,64 +113,6 @@ async function createWorkspace(userId: string, name: string) {
// Create initial workflow for the workspace with start block
const starterId = crypto.randomUUID()
const initialState = {
blocks: {
[starterId]: {
id: starterId,
type: 'starter',
name: 'Start',
position: { x: 100, y: 100 },
subBlocks: {
startWorkflow: {
id: 'startWorkflow',
type: 'dropdown',
value: 'manual',
},
webhookPath: {
id: 'webhookPath',
type: 'short-input',
value: '',
},
webhookSecret: {
id: 'webhookSecret',
type: 'short-input',
value: '',
},
scheduleType: {
id: 'scheduleType',
type: 'dropdown',
value: 'daily',
},
minutesInterval: {
id: 'minutesInterval',
type: 'short-input',
value: '',
},
minutesStartingAt: {
id: 'minutesStartingAt',
type: 'short-input',
value: '',
},
},
outputs: {
response: { type: { input: 'any' } },
},
enabled: true,
horizontalHandles: true,
isWide: false,
advancedMode: false,
height: 95,
},
},
edges: [],
subflows: {},
variables: {},
metadata: {
version: '1.0.0',
createdAt: now.toISOString(),
updatedAt: now.toISOString(),
},
}
// Create the workflow
await tx.insert(workflow).values({
@@ -180,7 +122,6 @@ async function createWorkspace(userId: string, name: string) {
folderId: null,
name: 'default-agent',
description: 'Your first workflow - start building here!',
state: initialState,
color: '#3972F6',
lastSynced: now,
createdAt: now,

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -16,8 +18,7 @@ import {
const logger = createLogger('YamlAutoLayoutAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const AutoLayoutRequestSchema = z.object({
workflowState: z.object({
@@ -58,7 +59,6 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Applying auto layout`, {
blockCount: Object.keys(workflowState.blocks).length,
edgeCount: workflowState.edges.length,
hasApiKey: !!SIM_AGENT_API_KEY,
strategy: options?.strategy || 'smart',
simAgentUrl: SIM_AGENT_API_URL,
})
@@ -102,7 +102,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
workflowState: {

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -16,8 +18,7 @@ import {
const logger = createLogger('YamlDiffCreateAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const CreateDiffRequestSchema = z.object({
yamlContent: z.string().min(1),
@@ -89,7 +90,6 @@ export async function POST(request: NextRequest) {
hasDiffAnalysis: !!diffAnalysis,
hasOptions: !!options,
options: options,
hasApiKey: !!SIM_AGENT_API_KEY,
hasCurrentWorkflowState: !!currentWorkflowState,
currentBlockCount: currentWorkflowState
? Object.keys(currentWorkflowState.blocks || {}).length
@@ -117,7 +117,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
yamlContent,

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -16,8 +18,7 @@ import {
const logger = createLogger('YamlDiffMergeAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const MergeDiffRequestSchema = z.object({
existingDiff: z.object({
@@ -64,7 +65,6 @@ export async function POST(request: NextRequest) {
hasDiffAnalysis: !!diffAnalysis,
hasOptions: !!options,
options: options,
hasApiKey: !!SIM_AGENT_API_KEY,
})
// Gather block registry
@@ -88,7 +88,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
existingDiff,

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -9,8 +11,7 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('YamlGenerateAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const GenerateRequestSchema = z.object({
workflowState: z.any(), // Let the yaml service handle validation
@@ -27,7 +28,6 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Generating YAML from workflow`, {
blocksCount: workflowState.blocks ? Object.keys(workflowState.blocks).length : 0,
edgesCount: workflowState.edges ? workflowState.edges.length : 0,
hasApiKey: !!SIM_AGENT_API_KEY,
})
// Gather block registry and utilities
@@ -51,7 +51,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
workflowState,

View File

@@ -1,26 +1,24 @@
import { NextResponse } from 'next/server'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
const logger = createLogger('YamlHealthAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
export async function GET() {
const requestId = crypto.randomUUID().slice(0, 8)
try {
logger.info(`[${requestId}] Checking YAML service health`, {
hasApiKey: !!SIM_AGENT_API_KEY,
})
logger.info(`[${requestId}] Checking YAML service health`)
// Check sim-agent health
const response = await fetch(`${SIM_AGENT_API_URL}/health`, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
})

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -9,11 +11,10 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('YamlParseAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const ParseRequestSchema = z.object({
yamlContent: z.string().min(1),
yamlContent: z.string(),
})
export async function POST(request: NextRequest) {
@@ -25,7 +26,6 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Parsing YAML`, {
contentLength: yamlContent.length,
hasApiKey: !!SIM_AGENT_API_KEY,
})
// Gather block registry and utilities
@@ -49,7 +49,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
yamlContent,

View File

@@ -1,6 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -9,8 +11,7 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('YamlToWorkflowAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = process.env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = process.env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
const ConvertRequestSchema = z.object({
yamlContent: z.string().min(1),
@@ -33,7 +34,6 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Converting YAML to workflow`, {
contentLength: yamlContent.length,
hasOptions: !!options,
hasApiKey: !!SIM_AGENT_API_KEY,
})
// Gather block registry and utilities
@@ -57,7 +57,6 @@ export async function POST(request: NextRequest) {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
yamlContent,

View File

@@ -14,7 +14,8 @@
}
.workflow-container .react-flow__node-loopNode,
.workflow-container .react-flow__node-parallelNode {
.workflow-container .react-flow__node-parallelNode,
.workflow-container .react-flow__node-subflowNode {
z-index: -1 !important;
}

View File

@@ -86,7 +86,7 @@ const getStatusDisplay = (doc: DocumentData) => {
</>
),
className:
'inline-flex items-center rounded-md bg-[var(--brand-primary-hex)]/10 px-2 py-1 text-xs font-medium text-[var(--brand-primary-hex)] dark:bg-[var(--brand-primary-hex)]/20 dark:text-[var(--brand-primary-hex)]',
'inline-flex items-center rounded-md bg-purple-100 px-2 py-1 text-xs font-medium text-[var(--brand-primary-hex)] dark:bg-purple-900/30 dark:text-[var(--brand-primary-hex)]',
}
case 'failed':
return {

View File

@@ -7,6 +7,7 @@ import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/u
import { Label } from '@/components/ui/label'
import { Progress } from '@/components/ui/progress'
import { createLogger } from '@/lib/logs/console/logger'
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
import { useKnowledgeUpload } from '@/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload'
const logger = createLogger('UploadModal')
@@ -152,6 +153,19 @@ export function UploadModal({
}
}
const getFileIcon = (mimeType: string, filename: string) => {
const IconComponent = getDocumentIcon(mimeType, filename)
return <IconComponent className='h-10 w-8' />
}
const formatFileSize = (bytes: number): string => {
if (bytes === 0) return '0 B'
const k = 1024
const sizes = ['B', 'KB', 'MB', 'GB']
const i = Math.floor(Math.log(bytes) / Math.log(k))
return `${Number.parseFloat((bytes / k ** i).toFixed(1))} ${sizes[i]}`
}
// Calculate progress percentage
const progressPercentage =
uploadProgress.totalFiles > 0
@@ -221,11 +235,11 @@ export function UploadModal({
multiple
/>
<p className='text-sm'>
{isDragging ? 'Drop more files here!' : 'Add more files'}
{isDragging ? 'Drop more files here!' : 'Drop more files or click to browse'}
</p>
</div>
<div className='max-h-60 space-y-1.5 overflow-auto'>
<div className='max-h-60 space-y-2 overflow-auto'>
{files.map((file, index) => {
const fileStatus = uploadProgress.fileStatuses?.[index]
const isCurrentlyUploading = fileStatus?.status === 'uploading'
@@ -233,26 +247,31 @@ export function UploadModal({
const isFailed = fileStatus?.status === 'failed'
return (
<div key={index} className='space-y-1.5 rounded-md border p-2'>
<div className='flex items-center justify-between'>
<div key={index} className='rounded-md border p-3'>
<div className='flex items-center gap-3'>
{getFileIcon(file.type, file.name)}
<div className='min-w-0 flex-1'>
<div className='flex items-center gap-2'>
{isCurrentlyUploading && (
<Loader2 className='h-4 w-4 animate-spin text-blue-500' />
<Loader2 className='h-4 w-4 animate-spin text-[var(--brand-primary-hex)]' />
)}
{isCompleted && <Check className='h-4 w-4 text-green-500' />}
{isFailed && <X className='h-4 w-4 text-red-500' />}
{!isCurrentlyUploading && !isCompleted && !isFailed && (
<div className='h-4 w-4' />
)}
<p className='truncate text-sm'>
<span className='font-medium'>{file.name}</span>
<span className='text-muted-foreground'>
{' '}
{(file.size / 1024 / 1024).toFixed(2)} MB
</span>
</p>
<p className='truncate font-medium text-sm'>{file.name}</p>
</div>
<div className='flex items-center gap-2'>
<p className='text-muted-foreground text-xs'>
{formatFileSize(file.size)}
</p>
{isCurrentlyUploading && (
<div className='min-w-0 max-w-32 flex-1'>
<Progress value={fileStatus?.progress || 0} className='h-1' />
</div>
)}
</div>
{isFailed && fileStatus?.error && (
<p className='mt-1 text-red-500 text-xs'>{fileStatus.error}</p>
)}
</div>
<Button
type='button'
@@ -260,17 +279,11 @@ export function UploadModal({
size='sm'
onClick={() => removeFile(index)}
disabled={isUploading}
className='h-8 w-8 p-0'
className='h-8 w-8 p-0 text-muted-foreground hover:text-destructive'
>
<X className='h-4 w-4' />
</Button>
</div>
{isCurrentlyUploading && (
<Progress value={fileStatus?.progress || 0} className='h-1' />
)}
{isFailed && fileStatus?.error && (
<p className='text-red-500 text-xs'>{fileStatus.error}</p>
)}
</div>
)
})}
@@ -287,7 +300,11 @@ export function UploadModal({
<Button variant='outline' onClick={handleClose} disabled={isUploading}>
Cancel
</Button>
<Button onClick={handleUpload} disabled={files.length === 0 || isUploading}>
<Button
onClick={handleUpload}
disabled={files.length === 0 || isUploading}
className='bg-[var(--brand-primary-hex)] font-[480] text-primary-foreground shadow-[0_0_0_0_var(--brand-primary-hex)] transition-all duration-200 hover:bg-[var(--brand-primary-hover-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
>
{isUploading
? uploadProgress.stage === 'uploading'
? `Uploading ${uploadProgress.filesCompleted + 1}/${uploadProgress.totalFiles}...`

View File

@@ -2,7 +2,7 @@
import { useEffect, useRef, useState } from 'react'
import { zodResolver } from '@hookform/resolvers/zod'
import { AlertCircle, CheckCircle2, X } from 'lucide-react'
import { AlertCircle, X } from 'lucide-react'
import { useParams } from 'next/navigation'
import { useForm } from 'react-hook-form'
import { z } from 'zod'
@@ -109,6 +109,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
register,
handleSubmit,
reset,
watch,
formState: { errors },
} = useForm<FormValues>({
resolver: zodResolver(FormSchema),
@@ -119,9 +120,32 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
maxChunkSize: 1024,
overlapSize: 200,
},
mode: 'onChange',
mode: 'onSubmit',
})
// Watch the name field to enable/disable the submit button
const nameValue = watch('name')
// Reset state when modal opens/closes
useEffect(() => {
if (open) {
// Reset states when modal opens
setSubmitStatus(null)
setFileError(null)
setFiles([])
setIsDragging(false)
setDragCounter(0)
// Reset form to default values
reset({
name: '',
description: '',
minChunkSize: 1,
maxChunkSize: 1024,
overlapSize: 200,
})
}
}, [open, reset])
const processFiles = async (fileList: FileList | File[]) => {
setFileError(null)
@@ -292,18 +316,6 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
logger.info(`Started processing ${uploadedFiles.length} documents in the background`)
}
setSubmitStatus({
type: 'success',
message: 'Your knowledge base has been created successfully!',
})
reset({
name: '',
description: '',
minChunkSize: 1,
maxChunkSize: 1024,
overlapSize: 200,
})
// Clean up file previews
files.forEach((file) => URL.revokeObjectURL(file.preview))
setFiles([])
@@ -313,10 +325,8 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
onKnowledgeBaseCreated(newKnowledgeBase)
}
// Close modal after a short delay to show success message
setTimeout(() => {
onOpenChange(false)
}, 1500)
// Close modal immediately - no need for success message
onOpenChange(false)
} catch (error) {
logger.error('Error creating knowledge base:', error)
setSubmitStatus({
@@ -357,31 +367,13 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
className='scrollbar-thin scrollbar-thumb-muted-foreground/20 hover:scrollbar-thumb-muted-foreground/25 scrollbar-track-transparent min-h-0 flex-1 overflow-y-auto px-6'
>
<div className='flex min-h-full flex-col py-4'>
{submitStatus && submitStatus.type === 'success' ? (
<Alert className='mb-6 border-border border-green-200 bg-green-50 dark:border-green-900 dark:bg-green-950/30'>
<div className='flex items-start gap-4 py-1'>
<div className='mt-[-1.5px] flex-shrink-0'>
<CheckCircle2 className='h-4 w-4 text-green-600 dark:text-green-400' />
</div>
<div className='mr-4 flex-1 space-y-2'>
<AlertTitle className='-mt-0.5 flex items-center justify-between'>
<span className='font-medium text-green-600 dark:text-green-400'>
Success
</span>
</AlertTitle>
<AlertDescription className='text-green-600 dark:text-green-400'>
{submitStatus.message}
</AlertDescription>
</div>
</div>
</Alert>
) : submitStatus && submitStatus.type === 'error' ? (
{submitStatus && submitStatus.type === 'error' && (
<Alert variant='destructive' className='mb-6'>
<AlertCircle className='h-4 w-4' />
<AlertTitle>Error</AlertTitle>
<AlertDescription>{submitStatus.message}</AlertDescription>
</Alert>
) : null}
)}
{/* Form Fields Section - Fixed at top */}
<div className='flex-shrink-0 space-y-4'>
@@ -611,8 +603,8 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
</Button>
<Button
type='submit'
disabled={isSubmitting}
className='bg-[var(--brand-primary-hex)] font-[480] text-primary-foreground shadow-[0_0_0_0_var(--brand-primary-hex)] transition-all duration-200 hover:bg-[var(--brand-primary-hover-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]'
disabled={isSubmitting || !nameValue?.trim()}
className='bg-[var(--brand-primary-hex)] font-[480] text-primary-foreground shadow-[0_0_0_0_var(--brand-primary-hex)] transition-all duration-200 hover:bg-[var(--brand-primary-hover-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)] disabled:opacity-50 disabled:hover:shadow-none'
>
{isSubmitting ? 'Creating...' : 'Create Knowledge Base'}
</Button>

View File

@@ -1,7 +1,7 @@
'use client'
import { useEffect, useMemo, useRef, useState } from 'react'
import { ChevronDown, ChevronUp, Eye, X } from 'lucide-react'
import { ChevronDown, ChevronUp, Eye, Loader2, X } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { CopyButton } from '@/components/ui/copy-button'
import { ScrollArea } from '@/components/ui/scroll-area'
@@ -209,29 +209,30 @@ export function Sidebar({
}
}, [log?.id])
const isLoadingDetails = useMemo(() => {
if (!log) return false
// Only show while we expect details to arrive (has executionId)
if (!log.executionId) return false
const hasEnhanced = !!log.executionData?.enhanced
const hasAnyDetails = hasEnhanced || !!log.cost || Array.isArray(log.executionData?.traceSpans)
return !hasAnyDetails
}, [log])
const formattedContent = useMemo(() => {
if (!log) return null
let blockInput: Record<string, any> | undefined
if (log.metadata?.blockInput) {
blockInput = log.metadata.blockInput
} else if (log.metadata?.traceSpans) {
const blockIdMatch = log.message.match(/Block .+?(\d+)/i)
const blockId = blockIdMatch ? blockIdMatch[1] : null
if (blockId) {
const matchingSpan = log.metadata.traceSpans.find(
(span) => span.blockId === blockId || span.name.includes(`Block ${blockId}`)
)
if (matchingSpan?.input) {
blockInput = matchingSpan.input
}
if (log.executionData?.blockInput) {
blockInput = log.executionData.blockInput
} else if (log.executionData?.traceSpans) {
const firstSpanWithInput = log.executionData.traceSpans.find((s) => s.input)
if (firstSpanWithInput?.input) {
blockInput = firstSpanWithInput.input as any
}
}
return formatJsonContent(log.message, blockInput)
return null
}, [log])
useEffect(() => {
@@ -243,22 +244,16 @@ export function Sidebar({
// Determine if this is a workflow execution log
const isWorkflowExecutionLog = useMemo(() => {
if (!log) return false
// Check if message contains workflow execution phrases (success or failure)
return (
log.message.toLowerCase().includes('workflow executed') ||
log.message.toLowerCase().includes('execution completed') ||
log.message.toLowerCase().includes('workflow execution failed') ||
log.message.toLowerCase().includes('execution failed') ||
(log.trigger === 'manual' && log.duration) ||
// Also check if we have enhanced logging metadata with trace spans
(log.metadata?.enhanced && log.metadata?.traceSpans)
(log.trigger === 'manual' && !!log.duration) ||
(log.executionData?.enhanced && log.executionData?.traceSpans)
)
}, [log])
// Helper to determine if we have cost information to display
// All workflow executions now have cost info (base charge + any model costs)
const hasCostInfo = useMemo(() => {
return isWorkflowExecutionLog && log?.metadata?.cost
return isWorkflowExecutionLog && log?.cost
}, [log, isWorkflowExecutionLog])
const isWorkflowWithCost = useMemo(() => {
@@ -490,6 +485,14 @@ export function Sidebar({
</div>
)}
{/* Suspense while details load (positioned after summary fields) */}
{isLoadingDetails && (
<div className='flex w-full items-center justify-start gap-2 py-2 text-muted-foreground'>
<Loader2 className='h-4 w-4 animate-spin' />
<span className='text-sm'>Loading details</span>
</div>
)}
{/* Files */}
{log.files && log.files.length > 0 && (
<div>
@@ -541,19 +544,15 @@ export function Sidebar({
</div>
)}
{/* Message Content */}
<div className='w-full pb-2'>
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Message</h3>
<div className='w-full'>{formattedContent}</div>
</div>
{/* end suspense */}
{/* Trace Spans (if available and this is a workflow execution log) */}
{isWorkflowExecutionLog && log.metadata?.traceSpans && (
{isWorkflowExecutionLog && log.executionData?.traceSpans && (
<div className='w-full'>
<div className='w-full overflow-x-hidden'>
<TraceSpansDisplay
traceSpans={log.metadata.traceSpans}
totalDuration={log.metadata.totalDuration}
traceSpans={log.executionData.traceSpans}
totalDuration={log.executionData.totalDuration}
onExpansionChange={handleTraceSpanToggle}
/>
</div>
@@ -561,11 +560,11 @@ export function Sidebar({
)}
{/* Tool Calls (if available) */}
{log.metadata?.toolCalls && log.metadata.toolCalls.length > 0 && (
{log.executionData?.toolCalls && log.executionData.toolCalls.length > 0 && (
<div className='w-full'>
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>Tool Calls</h3>
<div className='w-full overflow-x-hidden rounded-md bg-secondary/30 p-3'>
<ToolCallsDisplay metadata={log.metadata} />
<ToolCallsDisplay metadata={log.executionData} />
</div>
</div>
)}
@@ -584,86 +583,80 @@ export function Sidebar({
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-sm'>Model Input:</span>
<span className='text-sm'>
{formatCost(log.metadata?.cost?.input || 0)}
</span>
<span className='text-sm'>{formatCost(log.cost?.input || 0)}</span>
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-sm'>Model Output:</span>
<span className='text-sm'>
{formatCost(log.metadata?.cost?.output || 0)}
</span>
<span className='text-sm'>{formatCost(log.cost?.output || 0)}</span>
</div>
<div className='mt-1 flex items-center justify-between border-t pt-2'>
<span className='text-muted-foreground text-sm'>Total:</span>
<span className='text-foreground text-sm'>
{formatCost(log.metadata?.cost?.total || 0)}
{formatCost(log.cost?.total || 0)}
</span>
</div>
<div className='flex items-center justify-between'>
<span className='text-muted-foreground text-xs'>Tokens:</span>
<span className='text-muted-foreground text-xs'>
{log.metadata?.cost?.tokens?.prompt || 0} in /{' '}
{log.metadata?.cost?.tokens?.completion || 0} out
{log.cost?.tokens?.prompt || 0} in / {log.cost?.tokens?.completion || 0}{' '}
out
</span>
</div>
</div>
{/* Models Breakdown */}
{log.metadata?.cost?.models &&
Object.keys(log.metadata?.cost?.models).length > 0 && (
<div className='border-t'>
<button
onClick={() => setIsModelsExpanded(!isModelsExpanded)}
className='flex w-full items-center justify-between p-3 text-left transition-colors hover:bg-muted/50'
>
<span className='font-medium text-muted-foreground text-xs'>
Model Breakdown (
{Object.keys(log.metadata?.cost?.models || {}).length})
</span>
{isModelsExpanded ? (
<ChevronUp className='h-3 w-3 text-muted-foreground' />
) : (
<ChevronDown className='h-3 w-3 text-muted-foreground' />
)}
</button>
{log.cost?.models && Object.keys(log.cost?.models).length > 0 && (
<div className='border-t'>
<button
onClick={() => setIsModelsExpanded(!isModelsExpanded)}
className='flex w-full items-center justify-between p-3 text-left transition-colors hover:bg-muted/50'
>
<span className='font-medium text-muted-foreground text-xs'>
Model Breakdown ({Object.keys(log.cost?.models || {}).length})
</span>
{isModelsExpanded ? (
<ChevronUp className='h-3 w-3 text-muted-foreground' />
) : (
<ChevronDown className='h-3 w-3 text-muted-foreground' />
)}
</button>
{isModelsExpanded && (
<div className='space-y-3 border-t bg-muted/30 p-3'>
{Object.entries(log.metadata?.cost?.models || {}).map(
([model, cost]: [string, any]) => (
<div key={model} className='space-y-1'>
<div className='font-medium font-mono text-xs'>{model}</div>
<div className='space-y-1 text-xs'>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Input:</span>
<span>{formatCost(cost.input || 0)}</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Output:</span>
<span>{formatCost(cost.output || 0)}</span>
</div>
<div className='flex justify-between border-t pt-1'>
<span className='text-muted-foreground'>Total:</span>
<span className='font-medium'>
{formatCost(cost.total || 0)}
</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Tokens:</span>
<span>
{cost.tokens?.prompt || 0} in /{' '}
{cost.tokens?.completion || 0} out
</span>
</div>
{isModelsExpanded && (
<div className='space-y-3 border-t bg-muted/30 p-3'>
{Object.entries(log.cost?.models || {}).map(
([model, cost]: [string, any]) => (
<div key={model} className='space-y-1'>
<div className='font-medium font-mono text-xs'>{model}</div>
<div className='space-y-1 text-xs'>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Input:</span>
<span>{formatCost(cost.input || 0)}</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Output:</span>
<span>{formatCost(cost.output || 0)}</span>
</div>
<div className='flex justify-between border-t pt-1'>
<span className='text-muted-foreground'>Total:</span>
<span className='font-medium'>
{formatCost(cost.total || 0)}
</span>
</div>
<div className='flex justify-between'>
<span className='text-muted-foreground'>Tokens:</span>
<span>
{cost.tokens?.prompt || 0} in /{' '}
{cost.tokens?.completion || 0} out
</span>
</div>
</div>
)
)}
</div>
)}
</div>
)}
</div>
)
)}
</div>
)}
</div>
)}
{isWorkflowWithCost && (
<div className='border-t bg-muted p-3 text-muted-foreground text-xs'>
@@ -688,7 +681,7 @@ export function Sidebar({
executionId={log.executionId}
workflowName={log.workflow?.name}
trigger={log.trigger || undefined}
traceSpans={log.metadata?.traceSpans}
traceSpans={log.executionData?.traceSpans}
isOpen={isFrozenCanvasOpen}
onClose={() => setIsFrozenCanvasOpen(false)}
/>

View File

@@ -85,6 +85,10 @@ export default function Logs() {
const [selectedLog, setSelectedLog] = useState<WorkflowLog | null>(null)
const [selectedLogIndex, setSelectedLogIndex] = useState<number>(-1)
const [isSidebarOpen, setIsSidebarOpen] = useState(false)
const [isDetailsLoading, setIsDetailsLoading] = useState(false)
const detailsCacheRef = useRef<Map<string, any>>(new Map())
const detailsAbortRef = useRef<AbortController | null>(null)
const currentDetailsIdRef = useRef<string | null>(null)
const selectedRowRef = useRef<HTMLTableRowElement | null>(null)
const loaderRef = useRef<HTMLDivElement>(null)
const scrollContainerRef = useRef<HTMLDivElement>(null)
@@ -116,13 +120,122 @@ export default function Logs() {
const index = logs.findIndex((l) => l.id === log.id)
setSelectedLogIndex(index)
setIsSidebarOpen(true)
setIsDetailsLoading(true)
// Fetch details for current, previous, and next concurrently with cache
const currentId = log.id
const prevId = index > 0 ? logs[index - 1]?.id : undefined
const nextId = index < logs.length - 1 ? logs[index + 1]?.id : undefined
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
currentDetailsIdRef.current = currentId
const idsToFetch: Array<{ id: string; merge: boolean }> = []
const cachedCurrent = currentId ? detailsCacheRef.current.get(currentId) : undefined
if (currentId && !cachedCurrent) idsToFetch.push({ id: currentId, merge: true })
if (prevId && !detailsCacheRef.current.has(prevId))
idsToFetch.push({ id: prevId, merge: false })
if (nextId && !detailsCacheRef.current.has(nextId))
idsToFetch.push({ id: nextId, merge: false })
// Merge cached current immediately
if (cachedCurrent) {
setSelectedLog((prev) =>
prev && prev.id === currentId
? ({ ...(prev as any), ...(cachedCurrent as any) } as any)
: prev
)
setIsDetailsLoading(false)
}
if (idsToFetch.length === 0) return
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === currentId) {
setSelectedLog((prev) =>
prev && prev.id === id ? ({ ...(prev as any), ...(detailed as any) } as any) : prev
)
if (currentDetailsIdRef.current === id) setIsDetailsLoading(false)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
const handleNavigateNext = useCallback(() => {
if (selectedLogIndex < logs.length - 1) {
const nextIndex = selectedLogIndex + 1
setSelectedLogIndex(nextIndex)
setSelectedLog(logs[nextIndex])
const nextLog = logs[nextIndex]
setSelectedLog(nextLog)
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
const cached = detailsCacheRef.current.get(nextLog.id)
if (cached) {
setSelectedLog((prev) =>
prev && prev.id === nextLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev
)
} else {
const prevId = nextIndex > 0 ? logs[nextIndex - 1]?.id : undefined
const afterId = nextIndex < logs.length - 1 ? logs[nextIndex + 1]?.id : undefined
const idsToFetch: Array<{ id: string; merge: boolean }> = []
if (nextLog.id && !detailsCacheRef.current.has(nextLog.id))
idsToFetch.push({ id: nextLog.id, merge: true })
if (prevId && !detailsCacheRef.current.has(prevId))
idsToFetch.push({ id: prevId, merge: false })
if (afterId && !detailsCacheRef.current.has(afterId))
idsToFetch.push({ id: afterId, merge: false })
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === nextLog.id) {
setSelectedLog((prev) =>
prev && prev.id === id
? ({ ...(prev as any), ...(detailed as any) } as any)
: prev
)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
}
}, [selectedLogIndex, logs])
@@ -130,7 +243,57 @@ export default function Logs() {
if (selectedLogIndex > 0) {
const prevIndex = selectedLogIndex - 1
setSelectedLogIndex(prevIndex)
setSelectedLog(logs[prevIndex])
const prevLog = logs[prevIndex]
setSelectedLog(prevLog)
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
} catch {
/* no-op */
}
}
const controller = new AbortController()
detailsAbortRef.current = controller
const cached = detailsCacheRef.current.get(prevLog.id)
if (cached) {
setSelectedLog((prev) =>
prev && prev.id === prevLog.id ? ({ ...(prev as any), ...(cached as any) } as any) : prev
)
} else {
const beforeId = prevIndex > 0 ? logs[prevIndex - 1]?.id : undefined
const afterId = prevIndex < logs.length - 1 ? logs[prevIndex + 1]?.id : undefined
const idsToFetch: Array<{ id: string; merge: boolean }> = []
if (prevLog.id && !detailsCacheRef.current.has(prevLog.id))
idsToFetch.push({ id: prevLog.id, merge: true })
if (beforeId && !detailsCacheRef.current.has(beforeId))
idsToFetch.push({ id: beforeId, merge: false })
if (afterId && !detailsCacheRef.current.has(afterId))
idsToFetch.push({ id: afterId, merge: false })
Promise.all(
idsToFetch.map(async ({ id, merge }) => {
try {
const res = await fetch(`/api/logs/by-id/${id}`, { signal: controller.signal })
if (!res.ok) return
const body = await res.json()
const detailed = body?.data
if (detailed) {
detailsCacheRef.current.set(id, detailed)
if (merge && id === prevLog.id) {
setSelectedLog((prev) =>
prev && prev.id === id
? ({ ...(prev as any), ...(detailed as any) } as any)
: prev
)
}
}
} catch (e: any) {
if (e?.name === 'AbortError') return
}
})
).catch(() => {})
}
}
}, [selectedLogIndex, logs])
@@ -160,7 +323,7 @@ export default function Logs() {
// Get fresh query params by calling buildQueryParams from store
const { buildQueryParams: getCurrentQueryParams } = useFilterStore.getState()
const queryParams = getCurrentQueryParams(pageNum, LOGS_PER_PAGE)
const response = await fetch(`/api/logs?${queryParams}`)
const response = await fetch(`/api/logs?${queryParams}&details=basic`)
if (!response.ok) {
throw new Error(`Error fetching logs: ${response.statusText}`)
@@ -262,7 +425,7 @@ export default function Logs() {
// Build query params inline to avoid dependency issues
const params = new URLSearchParams()
params.set('includeWorkflow', 'true')
params.set('details', 'basic')
params.set('limit', LOGS_PER_PAGE.toString())
params.set('offset', '0') // Always start from page 1
params.set('workspaceId', workspaceId)
@@ -482,7 +645,7 @@ export default function Logs() {
{/* Header */}
<div>
<div className='border-border border-b'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_80px_1fr] gap-2 px-2 pb-3 md:grid-cols-[140px_90px_140px_90px_1fr] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_100px_1fr] lg:gap-4 xl:grid-cols-[160px_100px_160px_100px_100px_1fr_100px]'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px] gap-2 px-2 pb-3 md:grid-cols-[140px_90px_140px_120px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px]'>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Time
</div>
@@ -493,14 +656,12 @@ export default function Logs() {
Workflow
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
ID
Cost
</div>
<div className='hidden font-[480] font-sans text-[13px] text-muted-foreground leading-normal xl:block'>
Trigger
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Message
</div>
<div className='hidden font-[480] font-sans text-[13px] text-muted-foreground leading-normal xl:block'>
Duration
</div>
@@ -547,7 +708,7 @@ export default function Logs() {
}`}
onClick={() => handleLogClick(log)}
>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_80px_1fr] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_90px_1fr] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_100px_1fr] lg:gap-4 xl:grid-cols-[160px_100px_160px_100px_100px_1fr_100px]'>
<div className='grid min-w-[600px] grid-cols-[120px_80px_120px_120px] items-center gap-2 px-2 py-4 md:grid-cols-[140px_90px_140px_120px] md:gap-3 lg:min-w-0 lg:grid-cols-[160px_100px_160px_120px] lg:gap-4 xl:grid-cols-[160px_100px_160px_120px_120px_100px]'>
{/* Time */}
<div>
<div className='text-[13px]'>
@@ -584,10 +745,12 @@ export default function Logs() {
</div>
</div>
{/* ID */}
{/* Cost */}
<div>
<div className='font-medium text-muted-foreground text-xs'>
#{log.id.slice(-4)}
{typeof (log as any)?.cost?.total === 'number'
? `$${((log as any).cost.total as number).toFixed(4)}`
: '—'}
</div>
</div>
@@ -614,11 +777,6 @@ export default function Logs() {
)}
</div>
{/* Message */}
<div className='min-w-0'>
<div className='truncate font-[420] text-[13px]'>{log.message}</div>
</div>
{/* Duration */}
<div className='hidden xl:block'>
<div className='text-muted-foreground text-xs'>

View File

@@ -32,7 +32,6 @@ import {
TooltipTrigger,
} from '@/components/ui'
import { useSession } from '@/lib/auth-client'
import { isDev } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { cn } from '@/lib/utils'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
@@ -341,10 +340,11 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
* Handle deleting the current workflow
*/
const handleDeleteWorkflow = () => {
if (!activeWorkflowId || !userPermissions.canEdit) return
const currentWorkflowId = params.workflowId as string
if (!currentWorkflowId || !userPermissions.canEdit) return
const sidebarWorkflows = getSidebarOrderedWorkflows()
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === activeWorkflowId)
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === currentWorkflowId)
// Find next workflow: try next, then previous
let nextWorkflowId: string | null = null
@@ -363,8 +363,8 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
router.push(`/workspace/${workspaceId}`)
}
// Remove the workflow from the registry
useWorkflowRegistry.getState().removeWorkflow(activeWorkflowId)
// Remove the workflow from the registry using the URL parameter
useWorkflowRegistry.getState().removeWorkflow(currentWorkflowId)
}
// Helper function to open subscription settings
@@ -413,7 +413,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<Tooltip>
<TooltipTrigger asChild>
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
<Trash2 className='h-5 w-5' />
<Trash2 className='h-4 w-4' />
</div>
</TooltipTrigger>
<TooltipContent>{getTooltipText()}</TooltipContent>
@@ -498,7 +498,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<TooltipTrigger asChild>
{isDisabled ? (
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
<Copy className='h-5 w-5' />
<Copy className='h-4 w-4' />
</div>
) : (
<Button
@@ -563,9 +563,9 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
{isDisabled ? (
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
{isAutoLayouting ? (
<RefreshCw className='h-5 w-5 animate-spin' />
<RefreshCw className='h-4 w-4 animate-spin' />
) : (
<Layers className='h-5 w-5' />
<Layers className='h-4 w-4' />
)}
</div>
) : (
@@ -721,7 +721,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<TooltipTrigger asChild>
{isDisabled ? (
<div className='inline-flex h-12 w-12 cursor-not-allowed items-center justify-center rounded-[11px] border bg-card text-card-foreground opacity-50 shadow-xs transition-colors'>
<Store className='h-5 w-5' />
<Store className='h-4 w-4' />
</div>
) : (
<Button
@@ -775,7 +775,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
isDebugging && 'text-amber-500'
)}
>
<Bug className='h-5 w-5' />
<Bug className='h-4 w-4' />
</div>
) : (
<Button variant='outline' onClick={handleDebugToggle} className={buttonClass}>
@@ -999,14 +999,13 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
return (
<div className='fixed top-4 right-4 z-20 flex items-center gap-1'>
{renderDisconnectionNotice()}
{!isDev && renderToggleButton()}
{isExpanded && !isDev && <ExportControls />}
{isExpanded && !isDev && renderAutoLayoutButton()}
{!isDev && isExpanded && renderDuplicateButton()}
{isDev && renderDuplicateButton()}
{renderToggleButton()}
{isExpanded && <ExportControls />}
{isExpanded && renderAutoLayoutButton()}
{renderDuplicateButton()}
{renderDeleteButton()}
{!isDebugging && renderDebugModeToggle()}
{renderPublishButton()}
{isExpanded && renderPublishButton()}
{renderDeployButton()}
{isDebugging ? renderDebugControlsBar() : renderRunButton()}

View File

@@ -196,22 +196,17 @@ export function DiffControls() {
logger.warn('Failed to clear preview YAML:', error)
})
// Accept changes with automatic backup and rollback on failure
await acceptChanges()
// Accept changes without blocking the UI; errors will be logged by the store handler
acceptChanges().catch((error) => {
logger.error('Failed to accept changes (background):', error)
})
logger.info('Successfully accepted and saved workflow changes')
// Show success feedback if needed
logger.info('Accept triggered; UI will update optimistically')
} catch (error) {
logger.error('Failed to accept changes:', error)
// Show error notification to user
// Note: The acceptChanges function has already rolled back the state
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
// You could add toast notification here
console.error('Workflow update failed:', errorMessage)
// Optionally show user-facing error dialog
alert(`Failed to save workflow changes: ${errorMessage}`)
}
}

View File

@@ -4,6 +4,8 @@ import { Component, type ReactNode, useEffect } from 'react'
import { BotIcon } from 'lucide-react'
import { Card } from '@/components/ui/card'
import { createLogger } from '@/lib/logs/console/logger'
import { ControlBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/control-bar/control-bar'
import { Panel } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel'
const logger = createLogger('ErrorBoundary')
@@ -22,18 +24,32 @@ export function ErrorUI({
fullScreen = false,
}: ErrorUIProps) {
const containerClass = fullScreen
? 'flex items-center justify-center w-full h-screen bg-muted/40'
: 'flex items-center justify-center w-full h-full bg-muted/40'
? 'flex flex-col w-full h-screen bg-muted/40'
: 'flex flex-col w-full h-full bg-muted/40'
return (
<div className={containerClass}>
<Card className='max-w-md space-y-4 p-6 text-center'>
<div className='flex justify-center'>
<BotIcon className='h-16 w-16 text-muted-foreground' />
{/* Control bar */}
<ControlBar hasValidationErrors={false} />
{/* Main content area */}
<div className='relative flex flex-1'>
{/* Error message */}
<div className='flex flex-1 items-center justify-center'>
<Card className='max-w-md space-y-4 p-6 text-center'>
<div className='flex justify-center'>
<BotIcon className='h-16 w-16 text-muted-foreground' />
</div>
<h3 className='font-semibold text-lg'>{title}</h3>
<p className='text-muted-foreground'>{message}</p>
</Card>
</div>
<h3 className='font-semibold text-lg'>{title}</h3>
<p className='text-muted-foreground'>{message}</p>
</Card>
{/* Console panel */}
<div className='fixed top-0 right-0 z-10'>
<Panel />
</div>
</div>
</div>
)
}

View File

@@ -2,8 +2,7 @@ export { ControlBar } from './control-bar/control-bar'
export { ErrorBoundary } from './error/index'
export { Panel } from './panel/panel'
export { SkeletonLoading } from './skeleton-loading/skeleton-loading'
export { LoopNodeComponent } from './subflows/loop/loop-node'
export { ParallelNodeComponent } from './subflows/parallel/parallel-node'
export { SubflowNodeComponent } from './subflows/subflow-node'
export { WandPromptBar } from './wand-prompt-bar/wand-prompt-bar'
export { WorkflowBlock } from './workflow-block/workflow-block'
export { WorkflowEdge } from './workflow-edge/workflow-edge'

View File

@@ -10,12 +10,12 @@ import {
} from 'react'
import {
ArrowUp,
Boxes,
Brain,
BrainCircuit,
BrainCog,
Check,
FileText,
Image,
Infinity as InfinityIcon,
Loader2,
MessageCircle,
Package,
@@ -435,14 +435,14 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
}
const getDepthLabel = () => {
if (agentDepth === 0) return 'Lite'
if (agentDepth === 0) return 'Fast'
if (agentDepth === 1) return 'Auto'
if (agentDepth === 2) return 'Pro'
return 'Max'
}
const getDepthLabelFor = (value: 0 | 1 | 2 | 3) => {
if (value === 0) return 'Lite'
if (value === 0) return 'Fast'
if (value === 1) return 'Auto'
if (value === 2) return 'Pro'
return 'Max'
@@ -459,9 +459,9 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
const getDepthIconFor = (value: 0 | 1 | 2 | 3) => {
if (value === 0) return <Zap className='h-3 w-3 text-muted-foreground' />
if (value === 1) return <Boxes className='h-3 w-3 text-muted-foreground' />
if (value === 2) return <BrainCircuit className='h-3 w-3 text-muted-foreground' />
return <BrainCog className='h-3 w-3 text-muted-foreground' />
if (value === 1) return <InfinityIcon className='h-3 w-3 text-muted-foreground' />
if (value === 2) return <Brain className='h-3 w-3 text-muted-foreground' />
return <BrainCircuit className='h-3 w-3 text-muted-foreground' />
}
const getDepthIcon = () => getDepthIconFor(agentDepth)
@@ -654,7 +654,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
)}
>
<span className='flex items-center gap-1.5'>
<Boxes className='h-3 w-3 text-muted-foreground' />
<InfinityIcon className='h-3 w-3 text-muted-foreground' />
Auto
</span>
{agentDepth === 1 && (
@@ -682,7 +682,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
>
<span className='flex items-center gap-1.5'>
<Zap className='h-3 w-3 text-muted-foreground' />
Lite
Fast
</span>
{agentDepth === 0 && (
<Check className='h-3 w-3 text-muted-foreground' />
@@ -709,7 +709,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
)}
>
<span className='flex items-center gap-1.5'>
<BrainCircuit className='h-3 w-3 text-muted-foreground' />
<Brain className='h-3 w-3 text-muted-foreground' />
Pro
</span>
{agentDepth === 2 && (
@@ -737,7 +737,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
)}
>
<span className='flex items-center gap-1.5'>
<BrainCog className='h-3 w-3 text-muted-foreground' />
<BrainCircuit className='h-3 w-3 text-muted-foreground' />
Max
</span>
{agentDepth === 3 && (

View File

@@ -9,7 +9,6 @@ import {
} from '@/components/ui/dropdown-menu'
import { ScrollArea } from '@/components/ui/scroll-area'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { isDev } from '@/lib/environment'
import { useCopilotStore } from '@/stores/copilot/store'
import { useChatStore } from '@/stores/panel/chat/store'
import { useConsoleStore } from '@/stores/panel/console/store'
@@ -305,16 +304,14 @@ export function Panel() {
>
Console
</button>
{!isDev && (
<button
onClick={() => handleTabClick('copilot')}
className={`panel-tab-base inline-flex flex-1 cursor-pointer items-center justify-center rounded-[10px] border border-transparent py-1 font-[450] text-sm outline-none transition-colors duration-200 ${
isOpen && activeTab === 'copilot' ? 'panel-tab-active' : 'panel-tab-inactive'
}`}
>
Copilot
</button>
)}
<button
onClick={() => handleTabClick('copilot')}
className={`panel-tab-base inline-flex flex-1 cursor-pointer items-center justify-center rounded-[10px] border border-transparent py-1 font-[450] text-sm outline-none transition-colors duration-200 ${
isOpen && activeTab === 'copilot' ? 'panel-tab-active' : 'panel-tab-inactive'
}`}
>
Copilot
</button>
<button
onClick={() => handleTabClick('variables')}
className={`panel-tab-base inline-flex flex-1 cursor-pointer items-center justify-center rounded-[10px] border border-transparent py-1 font-[450] text-sm outline-none transition-colors duration-200 ${

View File

@@ -0,0 +1,388 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
// Mock hooks
const mockCollaborativeUpdates = {
collaborativeUpdateLoopType: vi.fn(),
collaborativeUpdateParallelType: vi.fn(),
collaborativeUpdateIterationCount: vi.fn(),
collaborativeUpdateIterationCollection: vi.fn(),
}
const mockStoreData = {
loops: {},
parallels: {},
}
vi.mock('@/hooks/use-collaborative-workflow', () => ({
useCollaborativeWorkflow: () => mockCollaborativeUpdates,
}))
vi.mock('@/stores/workflows/workflow/store', () => ({
useWorkflowStore: () => mockStoreData,
}))
vi.mock('@/components/ui/badge', () => ({
Badge: ({ children, ...props }: any) => (
<div data-testid='badge' {...props}>
{children}
</div>
),
}))
vi.mock('@/components/ui/input', () => ({
Input: (props: any) => <input data-testid='input' {...props} />,
}))
vi.mock('@/components/ui/popover', () => ({
Popover: ({ children }: any) => <div data-testid='popover'>{children}</div>,
PopoverContent: ({ children }: any) => <div data-testid='popover-content'>{children}</div>,
PopoverTrigger: ({ children }: any) => <div data-testid='popover-trigger'>{children}</div>,
}))
vi.mock('@/components/ui/tag-dropdown', () => ({
checkTagTrigger: vi.fn(() => ({ show: false })),
TagDropdown: ({ children }: any) => <div data-testid='tag-dropdown'>{children}</div>,
}))
vi.mock('react-simple-code-editor', () => ({
default: (props: any) => <textarea data-testid='code-editor' {...props} />,
}))
describe('IterationBadges', () => {
const defaultProps = {
nodeId: 'test-node-1',
data: {
width: 500,
height: 300,
isPreview: false,
},
iterationType: 'loop' as const,
}
beforeEach(() => {
vi.clearAllMocks()
mockStoreData.loops = {}
mockStoreData.parallels = {}
})
describe('Component Interface', () => {
it.concurrent('should accept required props', () => {
expect(defaultProps.nodeId).toBeDefined()
expect(defaultProps.data).toBeDefined()
expect(defaultProps.iterationType).toBeDefined()
})
it.concurrent('should handle loop iteration type prop', () => {
const loopProps = { ...defaultProps, iterationType: 'loop' as const }
expect(loopProps.iterationType).toBe('loop')
})
it.concurrent('should handle parallel iteration type prop', () => {
const parallelProps = { ...defaultProps, iterationType: 'parallel' as const }
expect(parallelProps.iterationType).toBe('parallel')
})
})
describe('Configuration System', () => {
it.concurrent('should use correct config for loop type', () => {
const CONFIG = {
loop: {
typeLabels: { for: 'For Loop', forEach: 'For Each' },
typeKey: 'loopType' as const,
storeKey: 'loops' as const,
maxIterations: 100,
configKeys: {
iterations: 'iterations' as const,
items: 'forEachItems' as const,
},
},
}
expect(CONFIG.loop.typeLabels.for).toBe('For Loop')
expect(CONFIG.loop.typeLabels.forEach).toBe('For Each')
expect(CONFIG.loop.maxIterations).toBe(100)
expect(CONFIG.loop.storeKey).toBe('loops')
})
it.concurrent('should use correct config for parallel type', () => {
const CONFIG = {
parallel: {
typeLabels: { count: 'Parallel Count', collection: 'Parallel Each' },
typeKey: 'parallelType' as const,
storeKey: 'parallels' as const,
maxIterations: 20,
configKeys: {
iterations: 'count' as const,
items: 'distribution' as const,
},
},
}
expect(CONFIG.parallel.typeLabels.count).toBe('Parallel Count')
expect(CONFIG.parallel.typeLabels.collection).toBe('Parallel Each')
expect(CONFIG.parallel.maxIterations).toBe(20)
expect(CONFIG.parallel.storeKey).toBe('parallels')
})
})
describe('Type Determination Logic', () => {
it.concurrent('should default to "for" for loop type', () => {
type IterationType = 'loop' | 'parallel'
const determineDefaultType = (iterationType: IterationType) => {
return iterationType === 'loop' ? 'for' : 'count'
}
const currentType = determineDefaultType('loop')
expect(currentType).toBe('for')
})
it.concurrent('should default to "count" for parallel type', () => {
type IterationType = 'loop' | 'parallel'
const determineDefaultType = (iterationType: IterationType) => {
return iterationType === 'loop' ? 'for' : 'count'
}
const currentType = determineDefaultType('parallel')
expect(currentType).toBe('count')
})
it.concurrent('should use explicit loopType when provided', () => {
type IterationType = 'loop' | 'parallel'
const determineType = (explicitType: string | undefined, iterationType: IterationType) => {
return explicitType || (iterationType === 'loop' ? 'for' : 'count')
}
const currentType = determineType('forEach', 'loop')
expect(currentType).toBe('forEach')
})
it.concurrent('should use explicit parallelType when provided', () => {
type IterationType = 'loop' | 'parallel'
const determineType = (explicitType: string | undefined, iterationType: IterationType) => {
return explicitType || (iterationType === 'loop' ? 'for' : 'count')
}
const currentType = determineType('collection', 'parallel')
expect(currentType).toBe('collection')
})
})
describe('Count Mode Detection', () => {
it.concurrent('should be in count mode for loop + for combination', () => {
type IterationType = 'loop' | 'parallel'
type LoopType = 'for' | 'forEach'
type ParallelType = 'count' | 'collection'
const iterationType: IterationType = 'loop'
const currentType: LoopType = 'for'
const isCountMode = iterationType === 'loop' && currentType === 'for'
expect(isCountMode).toBe(true)
})
it.concurrent('should be in count mode for parallel + count combination', () => {
type IterationType = 'loop' | 'parallel'
type ParallelType = 'count' | 'collection'
const iterationType: IterationType = 'parallel'
const currentType: ParallelType = 'count'
const isCountMode = iterationType === 'parallel' && currentType === 'count'
expect(isCountMode).toBe(true)
})
it.concurrent('should not be in count mode for loop + forEach combination', () => {
type IterationType = 'loop' | 'parallel'
const testCountMode = (iterationType: IterationType, currentType: string) => {
return iterationType === 'loop' && currentType === 'for'
}
const isCountMode = testCountMode('loop', 'forEach')
expect(isCountMode).toBe(false)
})
it.concurrent('should not be in count mode for parallel + collection combination', () => {
type IterationType = 'loop' | 'parallel'
const testCountMode = (iterationType: IterationType, currentType: string) => {
return iterationType === 'parallel' && currentType === 'count'
}
const isCountMode = testCountMode('parallel', 'collection')
expect(isCountMode).toBe(false)
})
})
describe('Configuration Values', () => {
it.concurrent('should handle default iteration count', () => {
const data = { count: undefined }
const configIterations = data.count ?? 5
expect(configIterations).toBe(5)
})
it.concurrent('should use provided iteration count', () => {
const data = { count: 10 }
const configIterations = data.count ?? 5
expect(configIterations).toBe(10)
})
it.concurrent('should handle string collection', () => {
const collection = '[1, 2, 3, 4, 5]'
const collectionString =
typeof collection === 'string' ? collection : JSON.stringify(collection) || ''
expect(collectionString).toBe('[1, 2, 3, 4, 5]')
})
it.concurrent('should handle object collection', () => {
const collection = { items: [1, 2, 3] }
const collectionString =
typeof collection === 'string' ? collection : JSON.stringify(collection) || ''
expect(collectionString).toBe('{"items":[1,2,3]}')
})
it.concurrent('should handle array collection', () => {
const collection = [1, 2, 3, 4, 5]
const collectionString =
typeof collection === 'string' ? collection : JSON.stringify(collection) || ''
expect(collectionString).toBe('[1,2,3,4,5]')
})
})
describe('Preview Mode Handling', () => {
it.concurrent('should handle preview mode for loops', () => {
const previewProps = {
...defaultProps,
data: { ...defaultProps.data, isPreview: true },
iterationType: 'loop' as const,
}
expect(previewProps.data.isPreview).toBe(true)
// In preview mode, collaborative functions shouldn't be called
expect(mockCollaborativeUpdates.collaborativeUpdateLoopType).not.toHaveBeenCalled()
})
it.concurrent('should handle preview mode for parallels', () => {
const previewProps = {
...defaultProps,
data: { ...defaultProps.data, isPreview: true },
iterationType: 'parallel' as const,
}
expect(previewProps.data.isPreview).toBe(true)
// In preview mode, collaborative functions shouldn't be called
expect(mockCollaborativeUpdates.collaborativeUpdateParallelType).not.toHaveBeenCalled()
})
})
describe('Store Integration', () => {
it.concurrent('should access loops store for loop iteration type', () => {
const nodeId = 'loop-node-1'
;(mockStoreData.loops as any)[nodeId] = { iterations: 10 }
const nodeConfig = (mockStoreData.loops as any)[nodeId]
expect(nodeConfig).toBeDefined()
expect(nodeConfig.iterations).toBe(10)
})
it.concurrent('should access parallels store for parallel iteration type', () => {
const nodeId = 'parallel-node-1'
;(mockStoreData.parallels as any)[nodeId] = { count: 5 }
const nodeConfig = (mockStoreData.parallels as any)[nodeId]
expect(nodeConfig).toBeDefined()
expect(nodeConfig.count).toBe(5)
})
it.concurrent('should handle missing node configuration gracefully', () => {
const nodeId = 'missing-node'
const nodeConfig = (mockStoreData.loops as any)[nodeId]
expect(nodeConfig).toBeUndefined()
})
})
describe('Max Iterations Limits', () => {
it.concurrent('should enforce max iterations for loops (100)', () => {
const maxIterations = 100
const testValue = 150
const clampedValue = Math.min(maxIterations, testValue)
expect(clampedValue).toBe(100)
})
it.concurrent('should enforce max iterations for parallels (20)', () => {
const maxIterations = 20
const testValue = 50
const clampedValue = Math.min(maxIterations, testValue)
expect(clampedValue).toBe(20)
})
it.concurrent('should allow values within limits', () => {
const loopMaxIterations = 100
const parallelMaxIterations = 20
expect(Math.min(loopMaxIterations, 50)).toBe(50)
expect(Math.min(parallelMaxIterations, 10)).toBe(10)
})
})
describe('Collaborative Update Functions', () => {
it.concurrent('should have the correct collaborative functions available', () => {
expect(mockCollaborativeUpdates.collaborativeUpdateLoopType).toBeDefined()
expect(mockCollaborativeUpdates.collaborativeUpdateParallelType).toBeDefined()
expect(mockCollaborativeUpdates.collaborativeUpdateIterationCount).toBeDefined()
expect(mockCollaborativeUpdates.collaborativeUpdateIterationCollection).toBeDefined()
})
it.concurrent('should call correct function for loop type updates', () => {
const handleTypeChange = (newType: string, iterationType: string, nodeId: string) => {
if (iterationType === 'loop') {
mockCollaborativeUpdates.collaborativeUpdateLoopType(nodeId, newType)
} else {
mockCollaborativeUpdates.collaborativeUpdateParallelType(nodeId, newType)
}
}
handleTypeChange('forEach', 'loop', 'test-node')
expect(mockCollaborativeUpdates.collaborativeUpdateLoopType).toHaveBeenCalledWith(
'test-node',
'forEach'
)
})
it.concurrent('should call correct function for parallel type updates', () => {
const handleTypeChange = (newType: string, iterationType: string, nodeId: string) => {
if (iterationType === 'loop') {
mockCollaborativeUpdates.collaborativeUpdateLoopType(nodeId, newType)
} else {
mockCollaborativeUpdates.collaborativeUpdateParallelType(nodeId, newType)
}
}
handleTypeChange('collection', 'parallel', 'test-node')
expect(mockCollaborativeUpdates.collaborativeUpdateParallelType).toHaveBeenCalledWith(
'test-node',
'collection'
)
})
})
describe('Input Sanitization', () => {
it.concurrent('should sanitize numeric input by removing non-digits', () => {
const testInput = 'abc123def456'
const sanitized = testInput.replace(/[^0-9]/g, '')
expect(sanitized).toBe('123456')
})
it.concurrent('should handle empty input', () => {
const testInput = ''
const sanitized = testInput.replace(/[^0-9]/g, '')
expect(sanitized).toBe('')
})
it.concurrent('should preserve valid numeric input', () => {
const testInput = '42'
const sanitized = testInput.replace(/[^0-9]/g, '')
expect(sanitized).toBe('42')
})
})
})

View File

@@ -1,452 +0,0 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-node'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
vi.mock('@/stores/workflows/workflow/store', () => ({
useWorkflowStore: vi.fn(),
}))
vi.mock('@/lib/logs/logger', () => ({
createLogger: vi.fn(() => ({
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
})),
}))
vi.mock('reactflow', () => ({
Handle: ({ id, type, position }: any) => ({ id, type, position }),
Position: {
Top: 'top',
Bottom: 'bottom',
Left: 'left',
Right: 'right',
},
useReactFlow: () => ({
getNodes: vi.fn(() => []),
}),
NodeResizer: ({ isVisible }: any) => ({ isVisible }),
memo: (component: any) => component,
}))
vi.mock('react', async () => {
const actual = await vi.importActual('react')
return {
...actual,
memo: (component: any) => component,
useMemo: (fn: any) => fn(),
useRef: () => ({ current: null }),
}
})
vi.mock('@/components/ui/button', () => ({
Button: ({ children, onClick, ...props }: any) => ({ children, onClick, ...props }),
}))
vi.mock('@/components/ui/card', () => ({
Card: ({ children, ...props }: any) => ({ children, ...props }),
}))
vi.mock('@/components/icons', async (importOriginal) => {
const actual = (await importOriginal()) as any
return {
...actual,
// Override specific icons if needed for testing
StartIcon: ({ className }: any) => ({ className }),
}
})
vi.mock('@/lib/utils', () => ({
cn: (...classes: any[]) => classes.filter(Boolean).join(' '),
}))
vi.mock('@/app/workspace/[workspaceId]/w/[workflowId]/components/loop-badges', () => ({
LoopBadges: ({ loopId }: any) => ({ loopId }),
}))
describe('LoopNodeComponent', () => {
const mockRemoveBlock = vi.fn()
const mockGetNodes = vi.fn()
const defaultProps = {
id: 'loop-1',
type: 'loopNode',
data: {
width: 500,
height: 300,
state: 'valid',
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
beforeEach(() => {
vi.clearAllMocks()
;(useWorkflowStore as any).mockImplementation((selector: any) => {
const state = {
removeBlock: mockRemoveBlock,
}
return selector(state)
})
mockGetNodes.mockReturnValue([])
})
describe('Component Definition and Structure', () => {
it('should be defined as a function component', () => {
expect(LoopNodeComponent).toBeDefined()
expect(typeof LoopNodeComponent).toBe('function')
})
it('should have correct display name', () => {
expect(LoopNodeComponent.displayName).toBe('LoopNodeComponent')
})
it('should be a memoized component', () => {
expect(LoopNodeComponent).toBeDefined()
})
})
describe('Props Validation and Type Safety', () => {
it('should accept NodeProps interface', () => {
const validProps = {
id: 'test-id',
type: 'loopNode' as const,
data: {
width: 400,
height: 300,
state: 'valid' as const,
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
expect(() => {
const _component: typeof LoopNodeComponent = LoopNodeComponent
expect(_component).toBeDefined()
}).not.toThrow()
})
it('should handle different data configurations', () => {
const configurations = [
{ width: 500, height: 300, state: 'valid' },
{ width: 800, height: 600, state: 'invalid' },
{ width: 0, height: 0, state: 'pending' },
{},
]
configurations.forEach((data) => {
const props = { ...defaultProps, data }
expect(() => {
const _component: typeof LoopNodeComponent = LoopNodeComponent
expect(_component).toBeDefined()
}).not.toThrow()
})
})
})
describe('Store Integration', () => {
it('should integrate with workflow store', () => {
expect(useWorkflowStore).toBeDefined()
const mockState = { removeBlock: mockRemoveBlock }
const selector = vi.fn((state) => state.removeBlock)
expect(() => {
selector(mockState)
}).not.toThrow()
expect(selector(mockState)).toBe(mockRemoveBlock)
})
it('should handle removeBlock function', () => {
expect(mockRemoveBlock).toBeDefined()
expect(typeof mockRemoveBlock).toBe('function')
mockRemoveBlock('test-id')
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
})
describe('Component Logic Tests', () => {
it('should handle nesting level calculation logic', () => {
const testCases = [
{ nodes: [], parentId: undefined, expectedLevel: 0 },
{ nodes: [{ id: 'parent', data: {} }], parentId: 'parent', expectedLevel: 1 },
{
nodes: [
{ id: 'parent', data: { parentId: 'grandparent' } },
{ id: 'grandparent', data: {} },
],
parentId: 'parent',
expectedLevel: 2,
},
]
testCases.forEach(({ nodes, parentId, expectedLevel }) => {
mockGetNodes.mockReturnValue(nodes)
// Simulate the nesting level calculation logic
let level = 0
let currentParentId = parentId
while (currentParentId) {
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(expectedLevel)
})
})
it('should handle nested styles generation', () => {
// Test the nested styles logic
const testCases = [
{ nestingLevel: 0, state: 'valid', expectedBg: 'rgba(34,197,94,0.05)' },
{ nestingLevel: 0, state: 'invalid', expectedBg: 'transparent' },
{ nestingLevel: 1, state: 'valid', expectedBg: '#e2e8f030' },
{ nestingLevel: 2, state: 'valid', expectedBg: '#cbd5e130' },
]
testCases.forEach(({ nestingLevel, state, expectedBg }) => {
// Simulate the getNestedStyles logic
const styles: Record<string, string> = {
backgroundColor: state === 'valid' ? 'rgba(34,197,94,0.05)' : 'transparent',
}
if (nestingLevel > 0) {
const colors = ['#e2e8f0', '#cbd5e1', '#94a3b8', '#64748b', '#475569']
const colorIndex = (nestingLevel - 1) % colors.length
styles.backgroundColor = `${colors[colorIndex]}30`
}
expect(styles.backgroundColor).toBe(expectedBg)
})
})
})
describe('Component Configuration', () => {
it('should handle different dimensions', () => {
const dimensionTests = [
{ width: 500, height: 300 },
{ width: 800, height: 600 },
{ width: 0, height: 0 },
{ width: 10000, height: 10000 },
]
dimensionTests.forEach(({ width, height }) => {
const data = { width, height, state: 'valid' }
expect(data.width).toBe(width)
expect(data.height).toBe(height)
})
})
it('should handle different states', () => {
const stateTests = ['valid', 'invalid', 'pending', 'executing']
stateTests.forEach((state) => {
const data = { width: 500, height: 300, state }
expect(data.state).toBe(state)
})
})
})
describe('Event Handling Logic', () => {
it('should handle delete button click logic', () => {
const mockEvent = {
stopPropagation: vi.fn(),
}
// Simulate the delete button click handler
const handleDelete = (e: any, nodeId: string) => {
e.stopPropagation()
mockRemoveBlock(nodeId)
}
handleDelete(mockEvent, 'test-id')
expect(mockEvent.stopPropagation).toHaveBeenCalled()
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
it('should handle event propagation prevention', () => {
const mockEvent = {
stopPropagation: vi.fn(),
}
// Test that stopPropagation is called
mockEvent.stopPropagation()
expect(mockEvent.stopPropagation).toHaveBeenCalled()
})
})
describe('Component Data Handling', () => {
it('should handle missing data properties gracefully', () => {
const testCases = [
undefined,
{},
{ width: 500 },
{ height: 300 },
{ state: 'valid' },
{ width: 500, height: 300 },
]
testCases.forEach((data) => {
const props = { ...defaultProps, data }
// Test default values logic
const width = Math.max(0, data?.width || 500)
const height = Math.max(0, data?.height || 300)
expect(width).toBeGreaterThanOrEqual(0)
expect(height).toBeGreaterThanOrEqual(0)
})
})
it('should handle parent ID relationships', () => {
const testCases = [
{ parentId: undefined, hasParent: false },
{ parentId: 'parent-1', hasParent: true },
{ parentId: '', hasParent: false },
]
testCases.forEach(({ parentId, hasParent }) => {
const data = { ...defaultProps.data, parentId }
expect(Boolean(data.parentId)).toBe(hasParent)
})
})
})
describe('Edge Cases and Error Handling', () => {
it('should handle circular parent references', () => {
// Test circular reference prevention
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node1' } },
]
mockGetNodes.mockReturnValue(nodes)
// Test the actual component's nesting level calculation logic
// This simulates the real useMemo logic from the component
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
// This is the actual logic pattern used in the component
while (currentParentId) {
// If we've seen this parent before, we have a cycle - break immediately
if (visited.has(currentParentId)) {
break
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// With proper circular reference detection, we should stop at level 2
// (node1 -> node2, then detect cycle when trying to go back to node1)
expect(level).toBe(2)
expect(visited.has('node1')).toBe(true)
expect(visited.has('node2')).toBe(true)
})
it('should handle complex circular reference chains', () => {
// Test more complex circular reference scenarios
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node3' } },
{ id: 'node3', data: { parentId: 'node1' } }, // Creates a 3-node cycle
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break // Cycle detected
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// Should traverse node1 -> node2 -> node3, then detect cycle
expect(level).toBe(3)
expect(visited.size).toBe(3)
})
it('should handle self-referencing nodes', () => {
// Test node that references itself
const nodes = [
{ id: 'node1', data: { parentId: 'node1' } }, // Self-reference
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break // Cycle detected immediately
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// Should detect self-reference immediately after first iteration
expect(level).toBe(1)
expect(visited.has('node1')).toBe(true)
})
it('should handle extreme values', () => {
const extremeValues = [
{ width: Number.MAX_SAFE_INTEGER, height: Number.MAX_SAFE_INTEGER },
{ width: -1, height: -1 },
{ width: 0, height: 0 },
{ width: null, height: null },
]
extremeValues.forEach((data) => {
expect(() => {
const width = data.width || 500
const height = data.height || 300
expect(typeof width).toBe('number')
expect(typeof height).toBe('number')
}).not.toThrow()
})
})
})
})

View File

@@ -1,585 +0,0 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-node'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
vi.mock('@/stores/workflows/workflow/store', () => ({
useWorkflowStore: vi.fn(),
}))
vi.mock('@/lib/logs/logger', () => ({
createLogger: vi.fn(() => ({
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
})),
}))
vi.mock('reactflow', () => ({
Handle: ({ id, type, position }: any) => ({ id, type, position }),
Position: {
Top: 'top',
Bottom: 'bottom',
Left: 'left',
Right: 'right',
},
useReactFlow: () => ({
getNodes: vi.fn(() => []),
}),
NodeResizer: ({ isVisible }: any) => ({ isVisible }),
memo: (component: any) => component,
}))
vi.mock('react', async () => {
const actual = await vi.importActual('react')
return {
...actual,
memo: (component: any) => component,
useMemo: (fn: any) => fn(),
useRef: () => ({ current: null }),
}
})
vi.mock('@/components/ui/button', () => ({
Button: ({ children, onClick, ...props }: any) => ({ children, onClick, ...props }),
}))
vi.mock('@/components/ui/card', () => ({
Card: ({ children, ...props }: any) => ({ children, ...props }),
}))
vi.mock('@/blocks/registry', () => ({
getBlock: vi.fn(() => ({
name: 'Mock Block',
description: 'Mock block description',
icon: () => null,
subBlocks: [],
outputs: {},
})),
getAllBlocks: vi.fn(() => ({})),
}))
vi.mock('@/lib/utils', () => ({
cn: (...classes: any[]) => classes.filter(Boolean).join(' '),
}))
vi.mock(
'@/app/workspace/[workspaceId]/w/[workflowId]/components/parallel-node/components/parallel-badges',
() => ({
ParallelBadges: ({ parallelId }: any) => ({ parallelId }),
})
)
describe('ParallelNodeComponent', () => {
const mockRemoveBlock = vi.fn()
const mockGetNodes = vi.fn()
const defaultProps = {
id: 'parallel-1',
type: 'parallelNode',
data: {
width: 500,
height: 300,
state: 'valid',
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
beforeEach(() => {
vi.clearAllMocks()
;(useWorkflowStore as any).mockImplementation((selector: any) => {
const state = {
removeBlock: mockRemoveBlock,
}
return selector(state)
})
mockGetNodes.mockReturnValue([])
})
describe('Component Definition and Structure', () => {
it.concurrent('should be defined as a function component', () => {
expect(ParallelNodeComponent).toBeDefined()
expect(typeof ParallelNodeComponent).toBe('function')
})
it.concurrent('should have correct display name', () => {
expect(ParallelNodeComponent.displayName).toBe('ParallelNodeComponent')
})
it.concurrent('should be a memoized component', () => {
expect(ParallelNodeComponent).toBeDefined()
})
})
describe('Props Validation and Type Safety', () => {
it.concurrent('should accept NodeProps interface', () => {
expect(() => {
const _component: typeof ParallelNodeComponent = ParallelNodeComponent
expect(_component).toBeDefined()
}).not.toThrow()
})
it.concurrent('should handle different data configurations', () => {
const configurations = [
{ width: 500, height: 300, state: 'valid' },
{ width: 800, height: 600, state: 'invalid' },
{ width: 0, height: 0, state: 'pending' },
{},
]
configurations.forEach((data) => {
const props = { ...defaultProps, data }
expect(() => {
const _component: typeof ParallelNodeComponent = ParallelNodeComponent
expect(_component).toBeDefined()
}).not.toThrow()
})
})
})
describe('Store Integration', () => {
it.concurrent('should integrate with workflow store', () => {
expect(useWorkflowStore).toBeDefined()
const mockState = { removeBlock: mockRemoveBlock }
const selector = vi.fn((state) => state.removeBlock)
expect(() => {
selector(mockState)
}).not.toThrow()
expect(selector(mockState)).toBe(mockRemoveBlock)
})
it.concurrent('should handle removeBlock function', () => {
expect(mockRemoveBlock).toBeDefined()
expect(typeof mockRemoveBlock).toBe('function')
mockRemoveBlock('test-id')
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
})
describe('Component Logic Tests', () => {
it.concurrent('should handle nesting level calculation logic', () => {
const testCases = [
{ nodes: [], parentId: undefined, expectedLevel: 0 },
{ nodes: [{ id: 'parent', data: {} }], parentId: 'parent', expectedLevel: 1 },
{
nodes: [
{ id: 'parent', data: { parentId: 'grandparent' } },
{ id: 'grandparent', data: {} },
],
parentId: 'parent',
expectedLevel: 2,
},
]
testCases.forEach(({ nodes, parentId, expectedLevel }) => {
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = parentId
while (currentParentId) {
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(expectedLevel)
})
})
it.concurrent('should handle nested styles generation for parallel nodes', () => {
const testCases = [
{ nestingLevel: 0, state: 'valid', expectedBg: 'rgba(254,225,43,0.05)' },
{ nestingLevel: 0, state: 'invalid', expectedBg: 'transparent' },
{ nestingLevel: 1, state: 'valid', expectedBg: '#e2e8f030' },
{ nestingLevel: 2, state: 'valid', expectedBg: '#cbd5e130' },
]
testCases.forEach(({ nestingLevel, state, expectedBg }) => {
const styles: Record<string, string> = {
backgroundColor: state === 'valid' ? 'rgba(254,225,43,0.05)' : 'transparent',
}
if (nestingLevel > 0) {
const colors = ['#e2e8f0', '#cbd5e1', '#94a3b8', '#64748b', '#475569']
const colorIndex = (nestingLevel - 1) % colors.length
styles.backgroundColor = `${colors[colorIndex]}30`
}
expect(styles.backgroundColor).toBe(expectedBg)
})
})
})
describe('Parallel-Specific Features', () => {
it.concurrent('should handle parallel execution states', () => {
const parallelStates = ['valid', 'invalid', 'executing', 'completed', 'pending']
parallelStates.forEach((state) => {
const data = { width: 500, height: 300, state }
expect(data.state).toBe(state)
const isExecuting = state === 'executing'
const isCompleted = state === 'completed'
expect(typeof isExecuting).toBe('boolean')
expect(typeof isCompleted).toBe('boolean')
})
})
it.concurrent('should handle parallel node color scheme', () => {
const parallelColors = {
background: 'rgba(254,225,43,0.05)',
ring: '#FEE12B',
startIcon: '#FEE12B',
}
expect(parallelColors.background).toContain('254,225,43')
expect(parallelColors.ring).toBe('#FEE12B')
expect(parallelColors.startIcon).toBe('#FEE12B')
})
it.concurrent('should differentiate from loop node styling', () => {
const loopColors = {
background: 'rgba(34,197,94,0.05)',
ring: '#2FB3FF',
startIcon: '#2FB3FF',
}
const parallelColors = {
background: 'rgba(254,225,43,0.05)',
ring: '#FEE12B',
startIcon: '#FEE12B',
}
expect(parallelColors.background).not.toBe(loopColors.background)
expect(parallelColors.ring).not.toBe(loopColors.ring)
expect(parallelColors.startIcon).not.toBe(loopColors.startIcon)
})
})
describe('Component Configuration', () => {
it.concurrent('should handle different dimensions', () => {
const dimensionTests = [
{ width: 500, height: 300 },
{ width: 800, height: 600 },
{ width: 0, height: 0 },
{ width: 10000, height: 10000 },
]
dimensionTests.forEach(({ width, height }) => {
const data = { width, height, state: 'valid' }
expect(data.width).toBe(width)
expect(data.height).toBe(height)
})
})
it.concurrent('should handle different states', () => {
const stateTests = ['valid', 'invalid', 'pending', 'executing', 'completed']
stateTests.forEach((state) => {
const data = { width: 500, height: 300, state }
expect(data.state).toBe(state)
})
})
})
describe('Event Handling Logic', () => {
it.concurrent('should handle delete button click logic', () => {
const mockEvent = {
stopPropagation: vi.fn(),
}
const handleDelete = (e: any, nodeId: string) => {
e.stopPropagation()
mockRemoveBlock(nodeId)
}
handleDelete(mockEvent, 'test-id')
expect(mockEvent.stopPropagation).toHaveBeenCalled()
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
it.concurrent('should handle event propagation prevention', () => {
const mockEvent = {
stopPropagation: vi.fn(),
}
mockEvent.stopPropagation()
expect(mockEvent.stopPropagation).toHaveBeenCalled()
})
})
describe('Component Data Handling', () => {
it.concurrent('should handle missing data properties gracefully', () => {
const testCases = [
undefined,
{},
{ width: 500 },
{ height: 300 },
{ state: 'valid' },
{ width: 500, height: 300 },
]
testCases.forEach((data) => {
const props = { ...defaultProps, data }
// Test default values logic
const width = data?.width || 500
const height = data?.height || 300
expect(width).toBeGreaterThanOrEqual(0)
expect(height).toBeGreaterThanOrEqual(0)
})
})
it.concurrent('should handle parent ID relationships', () => {
const testCases = [
{ parentId: undefined, hasParent: false },
{ parentId: 'parent-1', hasParent: true },
{ parentId: '', hasParent: false },
]
testCases.forEach(({ parentId, hasParent }) => {
const data = { ...defaultProps.data, parentId }
expect(Boolean(data.parentId)).toBe(hasParent)
})
})
})
describe('Handle Configuration', () => {
it.concurrent('should have correct handle IDs for parallel nodes', () => {
const handleIds = {
startSource: 'parallel-start-source',
endSource: 'parallel-end-source',
}
expect(handleIds.startSource).toContain('parallel')
expect(handleIds.endSource).toContain('parallel')
expect(handleIds.startSource).not.toContain('loop')
expect(handleIds.endSource).not.toContain('loop')
})
it.concurrent('should handle different handle positions', () => {
const positions = {
left: 'left',
right: 'right',
top: 'top',
bottom: 'bottom',
}
Object.values(positions).forEach((position) => {
expect(typeof position).toBe('string')
expect(position.length).toBeGreaterThan(0)
})
})
})
describe('Edge Cases and Error Handling', () => {
it.concurrent('should handle circular parent references', () => {
// Test circular reference prevention
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node1' } },
]
mockGetNodes.mockReturnValue(nodes)
// Test the actual component's nesting level calculation logic
// This simulates the real useMemo logic from the component
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
// This is the actual logic pattern used in the component
while (currentParentId) {
// If we've seen this parent before, we have a cycle - break immediately
if (visited.has(currentParentId)) {
break
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// With proper circular reference detection, we should stop at level 2
// (node1 -> node2, then detect cycle when trying to go back to node1)
expect(level).toBe(2)
expect(visited.has('node1')).toBe(true)
expect(visited.has('node2')).toBe(true)
})
it.concurrent('should handle complex circular reference chains', () => {
// Test more complex circular reference scenarios
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node3' } },
{ id: 'node3', data: { parentId: 'node1' } }, // Creates a 3-node cycle
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break // Cycle detected
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// Should traverse node1 -> node2 -> node3, then detect cycle
expect(level).toBe(3)
expect(visited.size).toBe(3)
})
it.concurrent('should handle self-referencing nodes', () => {
// Test node that references itself
const nodes = [
{ id: 'node1', data: { parentId: 'node1' } }, // Self-reference
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break // Cycle detected immediately
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
// Should detect self-reference immediately after first iteration
expect(level).toBe(1)
expect(visited.has('node1')).toBe(true)
})
it.concurrent('should handle extreme values', () => {
const extremeValues = [
{ width: Number.MAX_SAFE_INTEGER, height: Number.MAX_SAFE_INTEGER },
{ width: -1, height: -1 },
{ width: 0, height: 0 },
{ width: null, height: null },
]
extremeValues.forEach((data) => {
expect(() => {
const width = data.width || 500
const height = data.height || 300
expect(typeof width).toBe('number')
expect(typeof height).toBe('number')
}).not.toThrow()
})
})
it.concurrent('should handle negative position values', () => {
const positions = [
{ xPos: -100, yPos: -200 },
{ xPos: 0, yPos: 0 },
{ xPos: 1000, yPos: 2000 },
]
positions.forEach(({ xPos, yPos }) => {
const props = { ...defaultProps, xPos, yPos }
expect(props.xPos).toBe(xPos)
expect(props.yPos).toBe(yPos)
expect(typeof props.xPos).toBe('number')
expect(typeof props.yPos).toBe('number')
})
})
})
describe('Component Comparison with Loop Node', () => {
it.concurrent('should have similar structure to loop node but different type', () => {
expect(defaultProps.type).toBe('parallelNode')
expect(defaultProps.id).toContain('parallel')
// Should not be a loop node
expect(defaultProps.type).not.toBe('loopNode')
expect(defaultProps.id).not.toContain('loop')
})
it.concurrent('should handle the same prop structure as loop node', () => {
// Test that parallel node accepts the same prop structure as loop node
const sharedPropStructure = {
id: 'test-parallel',
type: 'parallelNode' as const,
data: {
width: 400,
height: 300,
state: 'valid' as const,
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
expect(() => {
const _component: typeof ParallelNodeComponent = ParallelNodeComponent
expect(_component).toBeDefined()
}).not.toThrow()
// Verify the structure
expect(sharedPropStructure.type).toBe('parallelNode')
expect(sharedPropStructure.data.width).toBe(400)
expect(sharedPropStructure.data.height).toBe(300)
})
it.concurrent('should maintain consistency with loop node interface', () => {
const baseProps = [
'id',
'type',
'data',
'selected',
'zIndex',
'isConnectable',
'xPos',
'yPos',
'dragging',
]
baseProps.forEach((prop) => {
expect(defaultProps).toHaveProperty(prop)
})
})
})
})

View File

@@ -1,273 +0,0 @@
import type React from 'react'
import { memo, useMemo, useRef } from 'react'
import { Trash2 } from 'lucide-react'
import { Handle, type NodeProps, Position, useReactFlow } from 'reactflow'
import { StartIcon } from '@/components/icons'
import { Button } from '@/components/ui/button'
import { Card } from '@/components/ui/card'
import { cn } from '@/lib/utils'
import { IterationBadges } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/components/iteration-badges/iteration-badges'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
const ParallelNodeStyles: React.FC = () => {
return (
<style jsx global>{`
@keyframes parallel-node-pulse {
0% {
box-shadow: 0 0 0 0 rgba(139, 195, 74, 0.3);
}
70% {
box-shadow: 0 0 0 6px rgba(139, 195, 74, 0);
}
100% {
box-shadow: 0 0 0 0 rgba(139, 195, 74, 0);
}
}
.parallel-node-drag-over {
animation: parallel-node-pulse 1.2s cubic-bezier(0.4, 0, 0.6, 1)
infinite;
border-style: solid !important;
background-color: rgba(139, 195, 74, 0.08) !important;
box-shadow: 0 0 0 8px rgba(139, 195, 74, 0.1);
}
/* Make resizer handles more visible */
.react-flow__resize-control {
z-index: 10;
pointer-events: all !important;
}
/* Ensure parent borders are visible when hovering over resize controls */
.react-flow__node-group:hover,
.hover-highlight {
border-color: #1e293b !important;
}
/* Ensure hover effects work well */
.group-node-container:hover .react-flow__resize-control.bottom-right {
opacity: 1 !important;
visibility: visible !important;
}
/* React Flow position transitions within parallel blocks */
.react-flow__node[data-parent-node-id] {
transition: transform 0.05s ease;
pointer-events: all;
}
/* Prevent jumpy drag behavior */
.parallel-drop-container .react-flow__node {
transform-origin: center;
position: absolute;
}
/* Remove default border from React Flow group nodes */
.react-flow__node-group {
border: none;
background-color: transparent;
outline: none;
box-shadow: none;
}
/* Ensure child nodes stay within parent bounds */
.react-flow__node[data-parent-node-id] .react-flow__handle {
z-index: 30;
}
/* Enhanced drag detection */
.react-flow__node-group.dragging-over {
background-color: rgba(139, 195, 74, 0.05);
transition: all 0.2s ease-in-out;
}
`}</style>
)
}
export const ParallelNodeComponent = memo(({ data, selected, id }: NodeProps) => {
const { getNodes } = useReactFlow()
const { collaborativeRemoveBlock } = useCollaborativeWorkflow()
const blockRef = useRef<HTMLDivElement>(null)
// Use the clean abstraction for current workflow state
const currentWorkflow = useCurrentWorkflow()
const currentBlock = currentWorkflow.getBlockById(id)
const diffStatus =
currentWorkflow.isDiffMode && currentBlock ? (currentBlock as any).is_diff : undefined
// Check if this is preview mode
const isPreview = data?.isPreview || false
// Determine nesting level by counting parents
const nestingLevel = useMemo(() => {
const maxDepth = 100 // Prevent infinite loops
let level = 0
let currentParentId = data?.parentId
while (currentParentId && level < maxDepth) {
level++
const parentNode = getNodes().find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
return level
}, [id, data?.parentId, getNodes])
// Generate different background styles based on nesting level
const getNestedStyles = () => {
// Base styles
const styles: Record<string, string> = {
backgroundColor: 'rgba(0, 0, 0, 0.02)',
}
// Apply nested styles
if (nestingLevel > 0) {
// Each nesting level gets a different color
const colors = ['#e2e8f0', '#cbd5e1', '#94a3b8', '#64748b', '#475569']
const colorIndex = (nestingLevel - 1) % colors.length
styles.backgroundColor = `${colors[colorIndex]}30` // Slightly more visible background
}
return styles
}
const nestedStyles = getNestedStyles()
return (
<>
<ParallelNodeStyles />
<div className='group relative'>
<Card
ref={blockRef}
className={cn(
'relative cursor-default select-none',
'transition-block-bg transition-ring',
'z-[20]',
data?.state === 'valid',
nestingLevel > 0 &&
`border border-[0.5px] ${nestingLevel % 2 === 0 ? 'border-slate-300/60' : 'border-slate-400/60'}`,
data?.hasNestedError && 'border-2 border-red-500 bg-red-50/50',
// Diff highlighting
diffStatus === 'new' && 'bg-green-50/50 ring-2 ring-green-500 dark:bg-green-900/10',
diffStatus === 'edited' &&
'bg-orange-50/50 ring-2 ring-orange-500 dark:bg-orange-900/10'
)}
style={{
width: data.width || 500,
height: data.height || 300,
position: 'relative',
overflow: 'visible',
...nestedStyles,
pointerEvents: isPreview ? 'none' : 'all',
}}
data-node-id={id}
data-type='parallelNode'
data-nesting-level={nestingLevel}
>
{/* Critical drag handle that controls only the parallel node movement */}
{!isPreview && (
<div
className='workflow-drag-handle absolute top-0 right-0 left-0 z-10 h-10 cursor-move'
style={{ pointerEvents: 'auto' }}
/>
)}
{/* Custom visible resize handle */}
{!isPreview && (
<div
className='absolute right-2 bottom-2 z-20 flex h-8 w-8 cursor-se-resize items-center justify-center text-muted-foreground'
style={{ pointerEvents: 'auto' }}
/>
)}
{/* Child nodes container - Set pointerEvents to allow dragging of children */}
<div
className='h-[calc(100%-10px)] p-4'
data-dragarea='true'
style={{
position: 'relative',
minHeight: '100%',
pointerEvents: isPreview ? 'none' : 'auto',
}}
>
{/* Delete button - styled like in action-bar.tsx */}
{!isPreview && (
<Button
variant='ghost'
size='sm'
onClick={(e) => {
e.stopPropagation()
collaborativeRemoveBlock(id)
}}
className='absolute top-2 right-2 z-20 text-gray-500 opacity-0 transition-opacity duration-200 hover:text-red-600 group-hover:opacity-100'
style={{ pointerEvents: 'auto' }}
>
<Trash2 className='h-4 w-4' />
</Button>
)}
{/* Parallel Start Block */}
<div
className='-translate-y-1/2 absolute top-1/2 left-8 flex h-10 w-10 transform items-center justify-center rounded-md bg-[#FEE12B] p-2'
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
data-parent-id={id}
data-node-role='parallel-start'
data-extent='parent'
>
<StartIcon className='h-6 w-6 text-white' />
<Handle
type='source'
position={Position.Right}
id='parallel-start-source'
className='!w-[6px] !h-4 !bg-slate-300 dark:!bg-slate-500 !rounded-[2px] !border-none !z-[30] hover:!w-[10px] hover:!right-[-10px] hover:!rounded-r-full hover:!rounded-l-none !cursor-crosshair transition-[colors] duration-150'
style={{
right: '-6px',
top: '50%',
transform: 'translateY(-50%)',
pointerEvents: 'auto',
}}
data-parent-id={id}
/>
</div>
</div>
{/* Input handle on left middle */}
<Handle
type='target'
position={Position.Left}
className='!w-[7px] !h-5 !bg-slate-300 dark:!bg-slate-500 !rounded-[2px] !border-none !z-[30] hover:!w-[10px] hover:!left-[-10px] hover:!rounded-l-full hover:!rounded-r-none !cursor-crosshair transition-[colors] duration-150'
style={{
left: '-7px',
top: '50%',
transform: 'translateY(-50%)',
pointerEvents: 'auto',
}}
/>
{/* Output handle on right middle */}
<Handle
type='source'
position={Position.Right}
className='!w-[7px] !h-5 !bg-slate-300 dark:!bg-slate-500 !rounded-[2px] !border-none !z-[30] hover:!w-[10px] hover:!right-[-10px] hover:!rounded-r-full hover:!rounded-l-none !cursor-crosshair transition-[colors] duration-150'
style={{
right: '-7px',
top: '50%',
transform: 'translateY(-50%)',
pointerEvents: 'auto',
}}
id='parallel-end-source'
/>
{/* Parallel Configuration Badges */}
<IterationBadges nodeId={id} data={data} iterationType='parallel' />
</Card>
</div>
</>
)
})
ParallelNodeComponent.displayName = 'ParallelNodeComponent'

View File

@@ -0,0 +1,579 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { SubflowNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node'
// Shared spies used across mocks
const mockRemoveBlock = vi.fn()
const mockGetNodes = vi.fn()
// Mocks
vi.mock('@/hooks/use-collaborative-workflow', () => ({
useCollaborativeWorkflow: vi.fn(() => ({
collaborativeRemoveBlock: mockRemoveBlock,
})),
}))
vi.mock('@/lib/logs/console/logger', () => ({
createLogger: vi.fn(() => ({
debug: vi.fn(),
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
})),
}))
vi.mock('reactflow', () => ({
Handle: ({ id, type, position }: any) => ({ id, type, position }),
Position: {
Top: 'top',
Bottom: 'bottom',
Left: 'left',
Right: 'right',
},
useReactFlow: () => ({
getNodes: mockGetNodes,
}),
memo: (component: any) => component,
}))
vi.mock('react', async () => {
const actual = await vi.importActual<any>('react')
return {
...actual,
memo: (component: any) => component,
useMemo: (fn: any) => fn(),
useRef: () => ({ current: null }),
}
})
vi.mock('@/components/ui/button', () => ({
Button: ({ children, onClick, ...props }: any) => ({ children, onClick, ...props }),
}))
vi.mock('@/components/ui/card', () => ({
Card: ({ children, ...props }: any) => ({ children, ...props }),
}))
vi.mock('@/components/icons', async (importOriginal) => {
const actual = (await importOriginal()) as any
return {
...actual,
StartIcon: ({ className }: any) => ({ className }),
}
})
vi.mock('@/lib/utils', () => ({
cn: (...classes: any[]) => classes.filter(Boolean).join(' '),
}))
vi.mock(
'@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/components/iteration-badges/iteration-badges',
() => ({
IterationBadges: ({ nodeId, iterationType }: any) => ({ nodeId, iterationType }),
})
)
describe('SubflowNodeComponent', () => {
const defaultProps = {
id: 'subflow-1',
type: 'subflowNode',
data: {
width: 500,
height: 300,
isPreview: false,
kind: 'loop' as const,
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
beforeEach(() => {
vi.clearAllMocks()
mockGetNodes.mockReturnValue([])
})
describe('Component Definition and Structure', () => {
it.concurrent('should be defined as a function component', () => {
expect(SubflowNodeComponent).toBeDefined()
expect(typeof SubflowNodeComponent).toBe('function')
})
it.concurrent('should have correct display name', () => {
expect(SubflowNodeComponent.displayName).toBe('SubflowNodeComponent')
})
it.concurrent('should be a memoized component', () => {
expect(SubflowNodeComponent).toBeDefined()
})
})
describe('Props Validation and Type Safety', () => {
it.concurrent('should accept NodeProps interface', () => {
const validProps = {
id: 'test-id',
type: 'subflowNode' as const,
data: {
width: 400,
height: 300,
isPreview: true,
kind: 'parallel' as const,
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
expect(() => {
const _component: typeof SubflowNodeComponent = SubflowNodeComponent
expect(_component).toBeDefined()
expect(validProps.type).toBe('subflowNode')
}).not.toThrow()
})
it.concurrent('should handle different data configurations', () => {
const configurations = [
{ width: 500, height: 300, isPreview: false, kind: 'loop' as const },
{ width: 800, height: 600, isPreview: true, kind: 'parallel' as const },
{ width: 0, height: 0, isPreview: false, kind: 'loop' as const },
{ kind: 'loop' as const },
]
configurations.forEach((data) => {
const props = { ...defaultProps, data }
expect(() => {
const _component: typeof SubflowNodeComponent = SubflowNodeComponent
expect(_component).toBeDefined()
expect(props.data).toBeDefined()
}).not.toThrow()
})
})
})
describe('Hook Integration', () => {
it.concurrent('should provide collaborativeRemoveBlock', () => {
expect(mockRemoveBlock).toBeDefined()
expect(typeof mockRemoveBlock).toBe('function')
mockRemoveBlock('test-id')
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
})
describe('Component Logic Tests', () => {
it.concurrent('should handle nesting level calculation logic', () => {
const testCases = [
{ nodes: [], parentId: undefined, expectedLevel: 0 },
{ nodes: [{ id: 'parent', data: {} }], parentId: 'parent', expectedLevel: 1 },
{
nodes: [
{ id: 'parent', data: { parentId: 'grandparent' } },
{ id: 'grandparent', data: {} },
],
parentId: 'parent',
expectedLevel: 2,
},
]
testCases.forEach(({ nodes, parentId, expectedLevel }) => {
mockGetNodes.mockReturnValue(nodes)
// Simulate the nesting level calculation logic
let level = 0
let currentParentId = parentId
while (currentParentId) {
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(expectedLevel)
})
})
it.concurrent('should handle nested styles generation', () => {
// Test the nested styles logic
const testCases = [
{ nestingLevel: 0, expectedBg: 'rgba(34,197,94,0.05)' },
{ nestingLevel: 1, expectedBg: '#e2e8f030' },
{ nestingLevel: 2, expectedBg: '#cbd5e130' },
]
testCases.forEach(({ nestingLevel, expectedBg }) => {
// Simulate the getNestedStyles logic
const styles: Record<string, string> = {
backgroundColor: 'rgba(34,197,94,0.05)',
}
if (nestingLevel > 0) {
const colors = ['#e2e8f0', '#cbd5e1', '#94a3b8', '#64748b', '#475569']
const colorIndex = (nestingLevel - 1) % colors.length
styles.backgroundColor = `${colors[colorIndex]}30`
}
expect(styles.backgroundColor).toBe(expectedBg)
})
})
})
describe('Component Configuration', () => {
it.concurrent('should handle different dimensions', () => {
const dimensionTests = [
{ width: 500, height: 300 },
{ width: 800, height: 600 },
{ width: 0, height: 0 },
{ width: 10000, height: 10000 },
]
dimensionTests.forEach(({ width, height }) => {
const data = { width, height }
expect(data.width).toBe(width)
expect(data.height).toBe(height)
})
})
})
describe('Event Handling Logic', () => {
it.concurrent('should handle delete button click logic (simulated)', () => {
const mockEvent = { stopPropagation: vi.fn() }
const handleDelete = (e: any, nodeId: string) => {
e.stopPropagation()
mockRemoveBlock(nodeId)
}
handleDelete(mockEvent, 'test-id')
expect(mockEvent.stopPropagation).toHaveBeenCalled()
expect(mockRemoveBlock).toHaveBeenCalledWith('test-id')
})
it.concurrent('should handle event propagation prevention', () => {
const mockEvent = { stopPropagation: vi.fn() }
mockEvent.stopPropagation()
expect(mockEvent.stopPropagation).toHaveBeenCalled()
})
})
describe('Component Data Handling', () => {
it.concurrent('should handle missing data properties gracefully', () => {
const testCases = [
undefined,
{},
{ width: 500 },
{ height: 300 },
{ width: 500, height: 300 },
]
testCases.forEach((data: any) => {
const props = { ...defaultProps, data }
const width = Math.max(0, data?.width || 500)
const height = Math.max(0, data?.height || 300)
expect(width).toBeGreaterThanOrEqual(0)
expect(height).toBeGreaterThanOrEqual(0)
expect(props.type).toBe('subflowNode')
})
})
it.concurrent('should handle parent ID relationships', () => {
const testCases = [
{ parentId: undefined, hasParent: false },
{ parentId: 'parent-1', hasParent: true },
{ parentId: '', hasParent: false },
]
testCases.forEach(({ parentId, hasParent }) => {
const data = { ...defaultProps.data, parentId }
expect(Boolean(data.parentId)).toBe(hasParent)
})
})
})
describe('Loop vs Parallel Kind Specific Tests', () => {
it.concurrent('should generate correct handle IDs for loop kind', () => {
const loopData = { ...defaultProps.data, kind: 'loop' as const }
const startHandleId = loopData.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = loopData.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
expect(startHandleId).toBe('loop-start-source')
expect(endHandleId).toBe('loop-end-source')
})
it.concurrent('should generate correct handle IDs for parallel kind', () => {
type SubflowKind = 'loop' | 'parallel'
const testHandleGeneration = (kind: SubflowKind) => {
const startHandleId = kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
return { startHandleId, endHandleId }
}
const result = testHandleGeneration('parallel')
expect(result.startHandleId).toBe('parallel-start-source')
expect(result.endHandleId).toBe('parallel-end-source')
})
it.concurrent('should generate correct background colors for loop kind', () => {
const loopData = { ...defaultProps.data, kind: 'loop' as const }
const startBg = loopData.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
expect(startBg).toBe('#2FB3FF')
})
it.concurrent('should generate correct background colors for parallel kind', () => {
type SubflowKind = 'loop' | 'parallel'
const testBgGeneration = (kind: SubflowKind) => {
return kind === 'loop' ? '#2FB3FF' : '#FEE12B'
}
const startBg = testBgGeneration('parallel')
expect(startBg).toBe('#FEE12B')
})
it.concurrent('should demonstrate handle ID generation for any kind', () => {
type SubflowKind = 'loop' | 'parallel'
const testKind = (kind: SubflowKind) => {
const data = { kind }
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
return { startHandleId, endHandleId }
}
const loopResult = testKind('loop')
expect(loopResult.startHandleId).toBe('loop-start-source')
expect(loopResult.endHandleId).toBe('loop-end-source')
const parallelResult = testKind('parallel')
expect(parallelResult.startHandleId).toBe('parallel-start-source')
expect(parallelResult.endHandleId).toBe('parallel-end-source')
})
it.concurrent('should pass correct iterationType to IterationBadges for loop', () => {
const loopProps = { ...defaultProps, data: { ...defaultProps.data, kind: 'loop' as const } }
// Mock IterationBadges should receive the kind as iterationType
expect(loopProps.data.kind).toBe('loop')
})
it.concurrent('should pass correct iterationType to IterationBadges for parallel', () => {
const parallelProps = {
...defaultProps,
data: { ...defaultProps.data, kind: 'parallel' as const },
}
// Mock IterationBadges should receive the kind as iterationType
expect(parallelProps.data.kind).toBe('parallel')
})
it.concurrent('should handle both kinds in configuration arrays', () => {
const bothKinds = ['loop', 'parallel'] as const
bothKinds.forEach((kind) => {
const data = { ...defaultProps.data, kind }
expect(['loop', 'parallel']).toContain(data.kind)
// Test handle ID generation for both kinds
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
const startBg = data.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
if (kind === 'loop') {
expect(startHandleId).toBe('loop-start-source')
expect(endHandleId).toBe('loop-end-source')
expect(startBg).toBe('#2FB3FF')
} else {
expect(startHandleId).toBe('parallel-start-source')
expect(endHandleId).toBe('parallel-end-source')
expect(startBg).toBe('#FEE12B')
}
})
})
it.concurrent('should maintain consistent styling behavior across both kinds', () => {
const loopProps = { ...defaultProps, data: { ...defaultProps.data, kind: 'loop' as const } }
const parallelProps = {
...defaultProps,
data: { ...defaultProps.data, kind: 'parallel' as const },
}
// Both should have same base properties except kind-specific ones
expect(loopProps.data.width).toBe(parallelProps.data.width)
expect(loopProps.data.height).toBe(parallelProps.data.height)
expect(loopProps.data.isPreview).toBe(parallelProps.data.isPreview)
// But different kinds
expect(loopProps.data.kind).toBe('loop')
expect(parallelProps.data.kind).toBe('parallel')
})
})
describe('Integration with IterationBadges', () => {
it.concurrent('should pass nodeId to IterationBadges', () => {
const testId = 'test-subflow-123'
const props = { ...defaultProps, id: testId }
// Verify the props would be passed correctly
expect(props.id).toBe(testId)
})
it.concurrent('should pass data object to IterationBadges', () => {
const testData = { ...defaultProps.data, customProperty: 'test' }
const props = { ...defaultProps, data: testData }
// Verify the data object structure
expect(props.data).toEqual(testData)
expect(props.data.kind).toBeDefined()
})
it.concurrent('should pass iterationType matching the kind', () => {
const loopProps = { ...defaultProps, data: { ...defaultProps.data, kind: 'loop' as const } }
const parallelProps = {
...defaultProps,
data: { ...defaultProps.data, kind: 'parallel' as const },
}
// The iterationType should match the kind
expect(loopProps.data.kind).toBe('loop')
expect(parallelProps.data.kind).toBe('parallel')
})
})
describe('CSS Class Generation', () => {
it.concurrent('should generate proper CSS classes for nested loops', () => {
const nestingLevel = 2
const expectedBorderClass =
nestingLevel > 0 &&
`border border-[0.5px] ${nestingLevel % 2 === 0 ? 'border-slate-300/60' : 'border-slate-400/60'}`
expect(expectedBorderClass).toBeTruthy()
expect(expectedBorderClass).toContain('border-slate-300/60') // even nesting level
})
it.concurrent('should generate proper CSS classes for odd nested levels', () => {
const nestingLevel = 3
const expectedBorderClass =
nestingLevel > 0 &&
`border border-[0.5px] ${nestingLevel % 2 === 0 ? 'border-slate-300/60' : 'border-slate-400/60'}`
expect(expectedBorderClass).toBeTruthy()
expect(expectedBorderClass).toContain('border-slate-400/60') // odd nesting level
})
it.concurrent('should handle error state styling', () => {
const hasNestedError = true
const errorClasses = hasNestedError && 'border-2 border-red-500 bg-red-50/50'
expect(errorClasses).toBe('border-2 border-red-500 bg-red-50/50')
})
it.concurrent('should handle diff status styling', () => {
const diffStatuses = ['new', 'edited'] as const
diffStatuses.forEach((status) => {
let diffClass = ''
if (status === 'new') {
diffClass = 'bg-green-50/50 ring-2 ring-green-500 dark:bg-green-900/10'
} else if (status === 'edited') {
diffClass = 'bg-orange-50/50 ring-2 ring-orange-500 dark:bg-orange-900/10'
}
expect(diffClass).toBeTruthy()
if (status === 'new') {
expect(diffClass).toContain('ring-green-500')
} else {
expect(diffClass).toContain('ring-orange-500')
}
})
})
})
describe('Edge Cases and Error Handling', () => {
it.concurrent('should handle circular parent references', () => {
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node1' } },
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(2)
expect(visited.has('node1')).toBe(true)
expect(visited.has('node2')).toBe(true)
})
it.concurrent('should handle complex circular reference chains', () => {
const nodes = [
{ id: 'node1', data: { parentId: 'node2' } },
{ id: 'node2', data: { parentId: 'node3' } },
{ id: 'node3', data: { parentId: 'node1' } },
]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(3)
expect(visited.size).toBe(3)
})
it.concurrent('should handle self-referencing nodes', () => {
const nodes = [{ id: 'node1', data: { parentId: 'node1' } }]
mockGetNodes.mockReturnValue(nodes)
let level = 0
let currentParentId = 'node1'
const visited = new Set<string>()
while (currentParentId) {
if (visited.has(currentParentId)) {
break
}
visited.add(currentParentId)
level++
const parentNode = nodes.find((n) => n.id === currentParentId)
if (!parentNode) break
currentParentId = parentNode.data?.parentId
}
expect(level).toBe(1)
expect(visited.has('node1')).toBe(true)
})
})
})

View File

@@ -6,60 +6,54 @@ import { StartIcon } from '@/components/icons'
import { Button } from '@/components/ui/button'
import { Card } from '@/components/ui/card'
import { cn } from '@/lib/utils'
import { type DiffStatus, hasDiffStatus } from '@/lib/workflows/diff/types'
import { IterationBadges } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/components/iteration-badges/iteration-badges'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
// Add these styles to your existing global CSS file or create a separate CSS module
const LoopNodeStyles: React.FC = () => {
const SubflowNodeStyles: React.FC = () => {
return (
<style jsx global>{`
@keyframes loop-node-pulse {
0% { box-shadow: 0 0 0 0 rgba(64, 224, 208, 0.3); }
70% { box-shadow: 0 0 0 6px rgba(64, 224, 208, 0); }
100% { box-shadow: 0 0 0 0 rgba(64, 224, 208, 0); }
0% { box-shadow: 0 0 0 0 rgba(47, 179, 255, 0.3); }
70% { box-shadow: 0 0 0 6px rgba(47, 179, 255, 0); }
100% { box-shadow: 0 0 0 0 rgba(47, 179, 255, 0); }
}
@keyframes parallel-node-pulse {
0% { box-shadow: 0 0 0 0 rgba(139, 195, 74, 0.3); }
70% { box-shadow: 0 0 0 6px rgba(139, 195, 74, 0); }
100% { box-shadow: 0 0 0 0 rgba(139, 195, 74, 0); }
}
.loop-node-drag-over {
animation: loop-node-pulse 1.2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
border-style: solid !important;
background-color: rgba(47, 179, 255, 0.08) !important;
box-shadow: 0 0 0 8px rgba(47, 179, 255, 0.1);
}
/* Ensure parent borders are visible when hovering over resize controls */
.parallel-node-drag-over {
animation: parallel-node-pulse 1.2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
border-style: solid !important;
background-color: rgba(139, 195, 74, 0.08) !important;
box-shadow: 0 0 0 8px rgba(139, 195, 74, 0.1);
}
.react-flow__node-group:hover,
.hover-highlight {
border-color: #1e293b !important;
}
/* Ensure hover effects work well */
.group-node-container:hover .react-flow__resize-control.bottom-right {
opacity: 1 !important;
visibility: visible !important;
}
/* Prevent jumpy drag behavior */
.loop-drop-container .react-flow__node {
transform-origin: center;
position: absolute;
}
/* Remove default border from React Flow group nodes */
.react-flow__node-group {
border: none;
background-color: transparent;
outline: none;
box-shadow: none;
}
/* Ensure child nodes stay within parent bounds */
.react-flow__node[data-parent-node-id] .react-flow__handle {
z-index: 30;
}
/* Enhanced drag detection */
.react-flow__node-group.dragging-over {
background-color: rgba(34,197,94,0.05);
transition: all 0.2s ease-in-out;
@@ -68,21 +62,30 @@ const LoopNodeStyles: React.FC = () => {
)
}
export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
export interface SubflowNodeData {
width?: number
height?: number
parentId?: string
extent?: 'parent'
hasNestedError?: boolean
isPreview?: boolean
kind: 'loop' | 'parallel'
}
export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeData>) => {
const { getNodes } = useReactFlow()
const { collaborativeRemoveBlock } = useCollaborativeWorkflow()
const blockRef = useRef<HTMLDivElement>(null)
// Use the clean abstraction for current workflow state
const currentWorkflow = useCurrentWorkflow()
const currentBlock = currentWorkflow.getBlockById(id)
const diffStatus =
currentWorkflow.isDiffMode && currentBlock ? (currentBlock as any).is_diff : undefined
const diffStatus: DiffStatus =
currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock)
? currentBlock.is_diff
: undefined
// Check if this is preview mode
const isPreview = data?.isPreview || false
// Determine nesting level by counting parents
const nestingLevel = useMemo(() => {
let level = 0
let currentParentId = data?.parentId
@@ -97,42 +100,37 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
return level
}, [id, data?.parentId, getNodes])
// Generate different background styles based on nesting level
const getNestedStyles = () => {
// Base styles
const styles: Record<string, string> = {
backgroundColor: 'rgba(0, 0, 0, 0.02)',
}
// Apply nested styles
if (nestingLevel > 0) {
// Each nesting level gets a different color
const colors = ['#e2e8f0', '#cbd5e1', '#94a3b8', '#64748b', '#475569']
const colorIndex = (nestingLevel - 1) % colors.length
styles.backgroundColor = `${colors[colorIndex]}30` // Slightly more visible background
styles.backgroundColor = `${colors[colorIndex]}30`
}
return styles
}
const nestedStyles = getNestedStyles()
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
const startBg = data.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
return (
<>
<LoopNodeStyles />
<SubflowNodeStyles />
<div className='group relative'>
<Card
ref={blockRef}
className={cn(
' relative cursor-default select-none',
'relative cursor-default select-none',
'transition-block-bg transition-ring',
'z-[20]',
data?.state === 'valid',
nestingLevel > 0 &&
`border border-[0.5px] ${nestingLevel % 2 === 0 ? 'border-slate-300/60' : 'border-slate-400/60'}`,
data?.hasNestedError && 'border-2 border-red-500 bg-red-50/50',
// Diff highlighting
diffStatus === 'new' && 'bg-green-50/50 ring-2 ring-green-500 dark:bg-green-900/10',
diffStatus === 'edited' &&
'bg-orange-50/50 ring-2 ring-orange-500 dark:bg-orange-900/10'
@@ -146,10 +144,9 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
pointerEvents: isPreview ? 'none' : 'all',
}}
data-node-id={id}
data-type='loopNode'
data-type='subflowNode'
data-nesting-level={nestingLevel}
>
{/* Critical drag handle that controls only the loop node movement */}
{!isPreview && (
<div
className='workflow-drag-handle absolute top-0 right-0 left-0 z-10 h-10 cursor-move'
@@ -157,7 +154,6 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
/>
)}
{/* Custom visible resize handle */}
{!isPreview && (
<div
className='absolute right-2 bottom-2 z-20 flex h-8 w-8 cursor-se-resize items-center justify-center text-muted-foreground'
@@ -165,7 +161,6 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
/>
)}
{/* Child nodes container - Enable pointer events to allow dragging of children */}
<div
className='h-[calc(100%-10px)] p-4'
data-dragarea='true'
@@ -175,7 +170,6 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
pointerEvents: isPreview ? 'none' : 'auto',
}}
>
{/* Delete button - styled like in action-bar.tsx */}
{!isPreview && (
<Button
variant='ghost'
@@ -191,12 +185,12 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
</Button>
)}
{/* Loop Start Block */}
{/* Subflow Start */}
<div
className='-translate-y-1/2 absolute top-1/2 left-8 flex h-10 w-10 transform items-center justify-center rounded-md bg-[#2FB3FF] p-2'
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
className='-translate-y-1/2 absolute top-1/2 left-8 flex h-10 w-10 transform items-center justify-center rounded-md p-2'
style={{ pointerEvents: isPreview ? 'none' : 'auto', backgroundColor: startBg }}
data-parent-id={id}
data-node-role='loop-start'
data-node-role={`${data.kind}-start`}
data-extent='parent'
>
<StartIcon className='h-6 w-6 text-white' />
@@ -204,7 +198,7 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
<Handle
type='source'
position={Position.Right}
id='loop-start-source'
id={startHandleId}
className='!w-[6px] !h-4 !bg-slate-300 dark:!bg-slate-500 !rounded-[2px] !border-none !z-[30] hover:!w-[10px] hover:!right-[-10px] hover:!rounded-r-full hover:!rounded-l-none !cursor-crosshair transition-[colors] duration-150'
style={{
right: '-6px',
@@ -241,15 +235,14 @@ export const LoopNodeComponent = memo(({ data, selected, id }: NodeProps) => {
transform: 'translateY(-50%)',
pointerEvents: 'auto',
}}
id='loop-end-source'
id={endHandleId}
/>
{/* Loop Configuration Badges */}
<IterationBadges nodeId={id} data={data} iterationType='loop' />
<IterationBadges nodeId={id} data={data} iterationType={data.kind} />
</Card>
</div>
</>
)
})
LoopNodeComponent.displayName = 'LoopNodeComponent'
SubflowNodeComponent.displayName = 'SubflowNodeComponent'

View File

@@ -1,4 +1,4 @@
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, Copy, Trash2 } from 'lucide-react'
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, Copy, LogOut, Trash2 } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { cn } from '@/lib/utils'
@@ -23,6 +23,10 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
const horizontalHandles = useWorkflowStore(
(state) => state.blocks[blockId]?.horizontalHandles ?? false
)
const parentId = useWorkflowStore((state) => state.blocks[blockId]?.data?.parentId)
const parentType = useWorkflowStore((state) =>
parentId ? state.blocks[parentId]?.type : undefined
)
const userPermissions = useUserPermissionsContext()
const isStarterBlock = blockType === 'starter'
@@ -102,6 +106,33 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
</Tooltip>
)}
{/* Remove from subflow - only show when inside loop/parallel */}
{!isStarterBlock && parentId && (parentType === 'loop' || parentType === 'parallel') && (
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='ghost'
size='sm'
onClick={() => {
if (!disabled && userPermissions.canEdit) {
window.dispatchEvent(
new CustomEvent('remove-from-subflow', { detail: { blockId } })
)
}
}}
className={cn(
'text-gray-500',
(disabled || !userPermissions.canEdit) && 'cursor-not-allowed opacity-50'
)}
disabled={disabled || !userPermissions.canEdit}
>
<LogOut className='h-4 w-4' />
</Button>
</TooltipTrigger>
<TooltipContent side='right'>{getTooltipMessage('Remove From Subflow')}</TooltipContent>
</Tooltip>
)}
<Tooltip>
<TooltipTrigger asChild>
<Button

View File

@@ -34,6 +34,7 @@ interface FileSelectorInputProps {
disabled: boolean
isPreview?: boolean
previewValue?: any | null
previewContextValues?: Record<string, any>
}
export function FileSelectorInput({
@@ -42,6 +43,7 @@ export function FileSelectorInput({
disabled,
isPreview = false,
previewValue,
previewContextValues,
}: FileSelectorInputProps) {
const { getValue } = useSubBlockStore()
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
@@ -49,6 +51,23 @@ export function FileSelectorInput({
const params = useParams()
const workflowIdFromUrl = (params?.workflowId as string) || activeWorkflowId || ''
// Helper to coerce various preview value shapes into a string ID
const coerceToIdString = (val: unknown): string => {
if (!val) return ''
if (typeof val === 'string') return val
if (typeof val === 'number') return String(val)
if (typeof val === 'object') {
const obj = val as Record<string, any>
return (obj.id ||
obj.fileId ||
obj.value ||
obj.documentId ||
obj.spreadsheetId ||
'') as string
}
return ''
}
// Use the proper hook to get the current value and setter
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
const [selectedFileId, setSelectedFileId] = useState<string>('')
@@ -108,19 +127,37 @@ export function FileSelectorInput({
const isMicrosoftSharePoint = provider === 'microsoft' && subBlock.serviceId === 'sharepoint'
const isMicrosoftPlanner = provider === 'microsoft-planner'
// For Confluence and Jira, we need the domain and credentials
const domain = isConfluence || isJira ? (getValue(blockId, 'domain') as string) || '' : ''
const jiraCredential = isJira ? (getValue(blockId, 'credential') as string) || '' : ''
const domain =
isConfluence || isJira
? (isPreview && previewContextValues?.domain?.value) ||
(getValue(blockId, 'domain') as string) ||
''
: ''
const jiraCredential = isJira
? (isPreview && previewContextValues?.credential?.value) ||
(getValue(blockId, 'credential') as string) ||
''
: ''
// For Discord, we need the bot token and server ID
const botToken = isDiscord ? (getValue(blockId, 'botToken') as string) || '' : ''
const serverId = isDiscord ? (getValue(blockId, 'serverId') as string) || '' : ''
const botToken = isDiscord
? (isPreview && previewContextValues?.botToken?.value) ||
(getValue(blockId, 'botToken') as string) ||
''
: ''
const serverId = isDiscord
? (isPreview && previewContextValues?.serverId?.value) ||
(getValue(blockId, 'serverId') as string) ||
''
: ''
// Use preview value when in preview mode, otherwise use store value
const value = isPreview ? previewValue : storeValue
// Keep local selection in sync with store value (and preview)
useEffect(() => {
const effective = isPreview && previewValue !== undefined ? previewValue : storeValue
if (typeof effective === 'string' && effective !== '') {
const raw = isPreview && previewValue !== undefined ? previewValue : storeValue
const effective = coerceToIdString(raw)
if (effective) {
if (isJira) {
setSelectedIssueId(effective)
} else if (isDiscord) {
@@ -385,7 +422,7 @@ export function FileSelectorInput({
<TooltipTrigger asChild>
<div className='w-full'>
<MicrosoftFileSelector
value={selectedFileId}
value={coerceToIdString(selectedFileId)}
onChange={handleFileChange}
provider='microsoft-excel'
requiredScopes={subBlock.requiredScopes || []}
@@ -418,7 +455,7 @@ export function FileSelectorInput({
<TooltipTrigger asChild>
<div className='w-full'>
<MicrosoftFileSelector
value={selectedFileId}
value={coerceToIdString(selectedFileId)}
onChange={handleFileChange}
provider='microsoft-word'
requiredScopes={subBlock.requiredScopes || []}
@@ -450,7 +487,7 @@ export function FileSelectorInput({
<TooltipTrigger asChild>
<div className='w-full'>
<MicrosoftFileSelector
value={selectedFileId}
value={coerceToIdString(selectedFileId)}
onChange={handleFileChange}
provider='microsoft'
requiredScopes={subBlock.requiredScopes || []}
@@ -482,7 +519,7 @@ export function FileSelectorInput({
<TooltipTrigger asChild>
<div className='w-full'>
<MicrosoftFileSelector
value={selectedFileId}
value={coerceToIdString(selectedFileId)}
onChange={handleFileChange}
provider='microsoft'
requiredScopes={subBlock.requiredScopes || []}
@@ -662,11 +699,9 @@ export function FileSelectorInput({
// Default to Google Drive picker
return (
<GoogleDrivePicker
value={
(isPreview && previewValue !== undefined
? (previewValue as string)
: (storeValue as string)) || ''
}
value={coerceToIdString(
(isPreview && previewValue !== undefined ? previewValue : storeValue) as any
)}
onChange={(val, info) => {
setSelectedFileId(val)
setFileInfo(info || null)
@@ -682,7 +717,11 @@ export function FileSelectorInput({
onFileInfoChange={setFileInfo}
clientId={clientId}
apiKey={apiKey}
credentialId={(getValue(blockId, 'credential') as string) || ''}
credentialId={
((isPreview && previewContextValues?.credential?.value) ||
(getValue(blockId, 'credential') as string) ||
'') as string
}
workflowId={workflowIdFromUrl}
/>
)

View File

@@ -389,6 +389,8 @@ export function LongInput({
fontFamily: 'inherit',
lineHeight: 'inherit',
height: `${height}px`,
wordBreak: 'break-word',
whiteSpace: 'pre-wrap',
}}
/>
<div
@@ -397,7 +399,7 @@ export function LongInput({
style={{
fontFamily: 'inherit',
lineHeight: 'inherit',
width: textareaRef.current ? `${textareaRef.current.clientWidth}px` : '100%',
width: '100%',
height: `${height}px`,
overflow: 'hidden',
}}

View File

@@ -55,6 +55,7 @@ interface ToolInputProps {
isPreview?: boolean
previewValue?: any
disabled?: boolean
allowExpandInPreview?: boolean
}
interface StoredTool {
@@ -105,6 +106,7 @@ function FileSelectorSyncWrapper({
onChange,
uiComponent,
disabled,
previewContextValues,
}: {
blockId: string
paramId: string
@@ -112,6 +114,7 @@ function FileSelectorSyncWrapper({
onChange: (value: string) => void
uiComponent: any
disabled: boolean
previewContextValues?: Record<string, any>
}) {
return (
<GenericSyncWrapper blockId={blockId} paramId={paramId} value={value} onChange={onChange}>
@@ -128,6 +131,7 @@ function FileSelectorSyncWrapper({
placeholder: uiComponent.placeholder,
}}
disabled={disabled}
previewContextValues={previewContextValues}
/>
</GenericSyncWrapper>
)
@@ -398,6 +402,7 @@ export function ToolInput({
isPreview = false,
previewValue,
disabled = false,
allowExpandInPreview,
}: ToolInputProps) {
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
const [open, setOpen] = useState(false)
@@ -775,8 +780,19 @@ export function ToolInput({
)
}
// Local expansion overrides for preview/diff mode
const [previewExpanded, setPreviewExpanded] = useState<Record<number, boolean>>({})
const toggleToolExpansion = (toolIndex: number) => {
if (isPreview || disabled) return
if ((isPreview && !allowExpandInPreview) || disabled) return
if (isPreview) {
setPreviewExpanded((prev) => ({
...prev,
[toolIndex]: !(prev[toolIndex] ?? !!selectedTools[toolIndex]?.isExpanded),
}))
return
}
setStoreValue(
selectedTools.map((tool, index) =>
@@ -929,7 +945,8 @@ export function ToolInput({
param: ToolParameterConfig,
value: string,
onChange: (value: string) => void,
toolIndex?: number
toolIndex?: number,
currentToolParams?: Record<string, string>
) => {
// Create unique blockId for tool parameters to avoid conflicts with main block
const uniqueBlockId = toolIndex !== undefined ? `${blockId}-tool-${toolIndex}` : blockId
@@ -1076,6 +1093,7 @@ export function ToolInput({
onChange={onChange}
uiComponent={uiComponent}
disabled={disabled}
previewContextValues={currentToolParams as any}
/>
)
@@ -1363,6 +1381,9 @@ export function ToolInput({
const oauthConfig = !isCustomTool ? getToolOAuthConfig(currentToolId) : null
// Tools are always expandable so users can access the interface
const isExpandedForDisplay = isPreview
? (previewExpanded[toolIndex] ?? !!tool.isExpanded)
: !!tool.isExpanded
return (
<div
@@ -1458,29 +1479,27 @@ export function ToolInput({
</span>
<span
className={`font-medium text-xs ${
tool.usageControl === 'force'
? 'block text-muted-foreground'
: 'hidden'
tool.usageControl === 'force' ? 'block' : 'hidden'
}`}
>
Force
</span>
<span
className={`font-medium text-xs ${
tool.usageControl === 'none'
? 'block text-muted-foreground'
: 'hidden'
tool.usageControl === 'none' ? 'block' : 'hidden'
}`}
>
Deny
None
</span>
</Toggle>
</TooltipTrigger>
<TooltipContent side='bottom' className='max-w-[240px] p-2'>
<p className='text-xs'>
<TooltipContent className='max-w-[280px] p-2' side='top'>
<p className='text-muted-foreground text-xs'>
Control how the model uses this tool in its response.
{tool.usageControl === 'auto' && (
<span>
<span className='font-medium'>Auto:</span> Let the agent decide
{' '}
<span className='font-medium'>Auto:</span> Let the model decide
when to use the tool
</span>
)}
@@ -1511,7 +1530,7 @@ export function ToolInput({
</div>
</div>
{!isCustomTool && tool.isExpanded && (
{!isCustomTool && isExpandedForDisplay && (
<div className='space-y-3 overflow-visible p-3'>
{/* Operation dropdown for tools with multiple operations */}
{(() => {
@@ -1660,7 +1679,8 @@ export function ToolInput({
param,
tool.params[param.id] || '',
(value) => handleParamChange(toolIndex, param.id, value),
toolIndex
toolIndex,
tool.params
)
) : (
<ShortInput

View File

@@ -3,6 +3,7 @@ import { useEffect, useState } from 'react'
import { AlertTriangle, Info } from 'lucide-react'
import { Label, Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui'
import { cn } from '@/lib/utils'
import type { FieldDiffStatus } from '@/lib/workflows/diff/types'
import {
ChannelSelectorInput,
CheckboxList,
@@ -43,7 +44,8 @@ interface SubBlockProps {
isPreview?: boolean
subBlockValues?: Record<string, any>
disabled?: boolean
fieldDiffStatus?: 'changed' | 'unchanged'
fieldDiffStatus?: FieldDiffStatus
allowExpandInPreview?: boolean
}
export function SubBlock({
@@ -54,6 +56,7 @@ export function SubBlock({
subBlockValues,
disabled = false,
fieldDiffStatus,
allowExpandInPreview,
}: SubBlockProps) {
const [isValidJson, setIsValidJson] = useState(true)
@@ -211,7 +214,8 @@ export function SubBlock({
subBlockId={config.id}
isPreview={isPreview}
previewValue={previewValue}
disabled={isDisabled}
disabled={allowExpandInPreview ? false : isDisabled}
allowExpandInPreview={allowExpandInPreview}
/>
)
case 'checkbox-list':
@@ -355,6 +359,7 @@ export function SubBlock({
disabled={isDisabled}
isPreview={isPreview}
previewValue={previewValue}
previewContextValues={subBlockValues}
/>
)
case 'project-selector':

View File

@@ -8,6 +8,7 @@ import { Card } from '@/components/ui/card'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { parseCronToHumanReadable } from '@/lib/schedules/utils'
import { cn, validateName } from '@/lib/utils'
import { type DiffStatus, hasDiffStatus } from '@/lib/workflows/diff/types'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import type { BlockConfig, SubBlockConfig, SubBlockType } from '@/blocks/types'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
@@ -76,12 +77,16 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
: (currentBlock?.enabled ?? true)
// Get diff status from the block itself (set by diff engine)
const diffStatus =
currentWorkflow.isDiffMode && currentBlock ? (currentBlock as any).is_diff : undefined
const diffStatus: DiffStatus =
currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock)
? currentBlock.is_diff
: undefined
// Get field-level diff information
// Get field-level diff information for this specific block
const fieldDiff =
currentWorkflow.isDiffMode && currentBlock ? (currentBlock as any).field_diffs : undefined
currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock)
? currentBlock.field_diffs?.[id]
: undefined
// Debug: Log diff status for this block
useEffect(() => {
@@ -151,6 +156,24 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
const blockAdvancedMode = useWorkflowStore((state) => state.blocks[id]?.advancedMode ?? false)
const blockTriggerMode = useWorkflowStore((state) => state.blocks[id]?.triggerMode ?? false)
// Local UI state for diff mode controls
const [diffIsWide, setDiffIsWide] = useState<boolean>(isWide)
const [diffAdvancedMode, setDiffAdvancedMode] = useState<boolean>(blockAdvancedMode)
const [diffTriggerMode, setDiffTriggerMode] = useState<boolean>(blockTriggerMode)
useEffect(() => {
if (currentWorkflow.isDiffMode) {
setDiffIsWide(isWide)
setDiffAdvancedMode(blockAdvancedMode)
setDiffTriggerMode(blockTriggerMode)
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [currentWorkflow.isDiffMode, id])
const displayIsWide = currentWorkflow.isDiffMode ? diffIsWide : isWide
const displayAdvancedMode = currentWorkflow.isDiffMode ? diffAdvancedMode : blockAdvancedMode
const displayTriggerMode = currentWorkflow.isDiffMode ? diffTriggerMode : blockTriggerMode
// Collaborative workflow actions
const {
collaborativeUpdateBlockName,
@@ -414,6 +437,8 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
const isAdvancedMode = useWorkflowStore.getState().blocks[blockId]?.advancedMode ?? false
const isTriggerMode = useWorkflowStore.getState().blocks[blockId]?.triggerMode ?? false
const effectiveAdvanced = currentWorkflow.isDiffMode ? displayAdvancedMode : isAdvancedMode
const effectiveTrigger = currentWorkflow.isDiffMode ? displayTriggerMode : isTriggerMode
// Filter visible blocks and those that meet their conditions
const visibleSubBlocks = subBlocks.filter((block) => {
@@ -423,18 +448,18 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
if (block.type === ('trigger-config' as SubBlockType)) {
// Show trigger-config blocks when in trigger mode OR for pure trigger blocks
const isPureTriggerBlock = config?.triggers?.enabled && config.category === 'triggers'
return isTriggerMode || isPureTriggerBlock
return effectiveTrigger || isPureTriggerBlock
}
if (isTriggerMode && block.type !== ('trigger-config' as SubBlockType)) {
if (effectiveTrigger && block.type !== ('trigger-config' as SubBlockType)) {
// In trigger mode, hide all non-trigger-config blocks
return false
}
// Filter by mode if specified
if (block.mode) {
if (block.mode === 'basic' && isAdvancedMode) return false
if (block.mode === 'advanced' && !isAdvancedMode) return false
if (block.mode === 'basic' && effectiveAdvanced) return false
if (block.mode === 'advanced' && !effectiveAdvanced) return false
}
// If there's no condition, the block should be shown
@@ -562,7 +587,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
className={cn(
'relative cursor-default select-none shadow-md',
'transition-block-bg transition-ring',
isWide ? 'w-[480px]' : 'w-[320px]',
displayIsWide ? 'w-[480px]' : 'w-[320px]',
!isEnabled && 'shadow-sm',
isActive && 'animate-pulse-ring ring-2 ring-blue-500',
isPending && 'ring-2 ring-amber-500',
@@ -658,7 +683,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
onClick={handleNameClick}
title={name}
style={{
maxWidth: !isEnabled ? (isWide ? '200px' : '140px') : '180px',
maxWidth: !isEnabled ? (displayIsWide ? '200px' : '140px') : '180px',
}}
>
{name}
@@ -758,26 +783,30 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
variant='ghost'
size='sm'
onClick={() => {
if (userPermissions.canEdit) {
if (currentWorkflow.isDiffMode) {
setDiffAdvancedMode((prev) => !prev)
} else if (userPermissions.canEdit) {
collaborativeToggleBlockAdvancedMode(id)
}
}}
className={cn(
'h-7 p-1 text-gray-500',
blockAdvancedMode && 'text-[var(--brand-primary-hex)]',
!userPermissions.canEdit && 'cursor-not-allowed opacity-50'
displayAdvancedMode && 'text-[var(--brand-primary-hex)]',
!userPermissions.canEdit &&
!currentWorkflow.isDiffMode &&
'cursor-not-allowed opacity-50'
)}
disabled={!userPermissions.canEdit}
disabled={!userPermissions.canEdit && !currentWorkflow.isDiffMode}
>
<Code className='h-5 w-5' />
</Button>
</TooltipTrigger>
<TooltipContent side='top'>
{!userPermissions.canEdit
{!userPermissions.canEdit && !currentWorkflow.isDiffMode
? userPermissions.isOfflineMode
? 'Connection lost - please refresh'
: 'Read-only mode'
: blockAdvancedMode
: displayAdvancedMode
? 'Switch to Basic Mode'
: 'Switch to Advanced Mode'}
</TooltipContent>
@@ -791,27 +820,31 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
variant='ghost'
size='sm'
onClick={() => {
if (userPermissions.canEdit) {
if (currentWorkflow.isDiffMode) {
setDiffTriggerMode((prev) => !prev)
} else if (userPermissions.canEdit) {
// Toggle trigger mode using collaborative function
collaborativeToggleBlockTriggerMode(id)
}
}}
className={cn(
'h-7 p-1 text-gray-500',
blockTriggerMode && 'text-[#22C55E]',
!userPermissions.canEdit && 'cursor-not-allowed opacity-50'
displayTriggerMode && 'text-[#22C55E]',
!userPermissions.canEdit &&
!currentWorkflow.isDiffMode &&
'cursor-not-allowed opacity-50'
)}
disabled={!userPermissions.canEdit}
disabled={!userPermissions.canEdit && !currentWorkflow.isDiffMode}
>
<Zap className='h-5 w-5' />
</Button>
</TooltipTrigger>
<TooltipContent side='top'>
{!userPermissions.canEdit
{!userPermissions.canEdit && !currentWorkflow.isDiffMode
? userPermissions.isOfflineMode
? 'Connection lost - please refresh'
: 'Read-only mode'
: blockTriggerMode
: displayTriggerMode
? 'Switch to Action Mode'
: 'Switch to Trigger Mode'}
</TooltipContent>
@@ -892,17 +925,21 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
variant='ghost'
size='sm'
onClick={() => {
if (userPermissions.canEdit) {
if (currentWorkflow.isDiffMode) {
setDiffIsWide((prev) => !prev)
} else if (userPermissions.canEdit) {
collaborativeToggleBlockWide(id)
}
}}
className={cn(
'h-7 p-1 text-gray-500',
!userPermissions.canEdit && 'cursor-not-allowed opacity-50'
!userPermissions.canEdit &&
!currentWorkflow.isDiffMode &&
'cursor-not-allowed opacity-50'
)}
disabled={!userPermissions.canEdit}
disabled={!userPermissions.canEdit && !currentWorkflow.isDiffMode}
>
{isWide ? (
{displayIsWide ? (
<RectangleHorizontal className='h-5 w-5' />
) : (
<RectangleVertical className='h-5 w-5' />
@@ -910,11 +947,11 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
</Button>
</TooltipTrigger>
<TooltipContent side='top'>
{!userPermissions.canEdit
{!userPermissions.canEdit && !currentWorkflow.isDiffMode
? userPermissions.isOfflineMode
? 'Connection lost - please refresh'
: 'Read-only mode'
: isWide
: displayIsWide
? 'Narrow Block'
: 'Expand Block'}
</TooltipContent>
@@ -942,8 +979,13 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
blockId={id}
config={subBlock}
isConnecting={isConnecting}
isPreview={data.isPreview}
subBlockValues={data.subBlockValues}
isPreview={data.isPreview || currentWorkflow.isDiffMode}
subBlockValues={
data.subBlockValues ||
(currentWorkflow.isDiffMode && currentBlock
? (currentBlock as any).subBlocks
: undefined)
}
disabled={!userPermissions.canEdit}
fieldDiffStatus={
fieldDiff?.changed_fields?.includes(subBlock.id)
@@ -952,6 +994,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
? 'unchanged'
: undefined
}
allowExpandInPreview={currentWorkflow.isDiffMode}
/>
</div>
))}

View File

@@ -1,6 +1,7 @@
import { useEffect } from 'react'
import { X } from 'lucide-react'
import { BaseEdge, EdgeLabelRenderer, type EdgeProps, getSmoothStepPath } from 'reactflow'
import type { EdgeDiffStatus } from '@/lib/workflows/diff/types'
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
import { useCurrentWorkflow } from '../../hooks'
@@ -114,7 +115,7 @@ export const WorkflowEdge = ({
}, [diffAnalysis, id, currentWorkflow.blocks, currentWorkflow.edges, isShowingDiff])
// Determine edge diff status
let edgeDiffStatus: 'new' | 'deleted' | 'unchanged' | null = null
let edgeDiffStatus: EdgeDiffStatus = null
// Only attempt to determine diff status if all required data is available
if (diffAnalysis?.edge_diff && edgeIdentifier && isDiffReady) {

View File

@@ -14,7 +14,8 @@ const isContainerType = (blockType: string): boolean => {
blockType === 'loop' ||
blockType === 'parallel' ||
blockType === 'loopNode' ||
blockType === 'parallelNode'
blockType === 'parallelNode' ||
blockType === 'subflowNode'
)
}
@@ -325,7 +326,10 @@ export const updateNodeParent = (
} else if (currentParentId) {
const absolutePosition = getNodeAbsolutePosition(nodeId, getNodes)
// First set the absolute position so the node visually stays in place
updateBlockPosition(nodeId, absolutePosition)
// Then clear the parent relationship in the store (empty string removes parentId/extent)
updateParentId(nodeId, '', 'parent')
}
resizeLoopNodes()

View File

@@ -18,8 +18,7 @@ import { ControlBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/compone
import { DiffControls } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/diff-controls'
import { ErrorBoundary } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/error/index'
import { Panel } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel'
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-node'
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-node'
import { SubflowNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node'
import { WorkflowBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block'
import { WorkflowEdge } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
@@ -48,8 +47,7 @@ const logger = createLogger('Workflow')
// Define custom node and edge types
const nodeTypes: NodeTypes = {
workflowBlock: WorkflowBlock,
loopNode: LoopNodeComponent,
parallelNode: ParallelNodeComponent,
subflowNode: SubflowNodeComponent,
}
const edgeTypes: EdgeTypes = { workflowEdge: WorkflowEdge }
@@ -343,6 +341,35 @@ const WorkflowContent = React.memo(() => {
}
}, [debouncedAutoLayout])
// Listen for explicit remove-from-subflow actions from ActionBar
useEffect(() => {
const handleRemoveFromSubflow = (event: Event) => {
const customEvent = event as CustomEvent<{ blockId: string }>
const { blockId } = customEvent.detail || ({} as any)
if (!blockId) return
try {
// Remove parent-child relationship while preserving absolute position
updateNodeParent(blockId, null)
// Remove all edges connected to this block
const connectedEdges = edgesForDisplay.filter(
(e) => e.source === blockId || e.target === blockId
)
connectedEdges.forEach((edge) => {
removeEdge(edge.id)
})
} catch (err) {
logger.error('Failed to remove from subflow', { err })
}
}
window.addEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener)
return () =>
window.removeEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener)
}, [getNodes, updateNodeParent, removeEdge, edgesForDisplay])
// Handle drops
const findClosestOutput = useCallback(
(newNodePosition: { x: number; y: number }): BlockData | null => {
@@ -451,7 +478,7 @@ const WorkflowContent = React.memo(() => {
{
width: 500,
height: 300,
type: type === 'loop' ? 'loopNode' : 'parallelNode',
type: 'subflowNode',
},
undefined,
undefined,
@@ -571,7 +598,7 @@ const WorkflowContent = React.memo(() => {
addBlock(id, data.type, name, relativePosition, {
width: 500,
height: 300,
type: data.type === 'loop' ? 'loopNode' : 'parallelNode',
type: 'subflowNode',
parentId: containerInfo.loopId,
extent: 'parent',
})
@@ -607,7 +634,7 @@ const WorkflowContent = React.memo(() => {
{
width: 500,
height: 300,
type: data.type === 'loop' ? 'loopNode' : 'parallelNode',
type: 'subflowNode',
},
undefined,
undefined,
@@ -657,10 +684,12 @@ const WorkflowContent = React.memo(() => {
const containerNode = getNodes().find((n) => n.id === containerInfo.loopId)
const containerType = containerNode?.type
if (containerType === 'loopNode' || containerType === 'parallelNode') {
if (containerType === 'subflowNode') {
// Connect from the container's start node to the new block
const startSourceHandle =
containerType === 'loopNode' ? 'loop-start-source' : 'parallel-start-source'
(containerNode?.data as any)?.kind === 'loop'
? 'loop-start-source'
: 'parallel-start-source'
addEdge({
id: crypto.randomUUID(),
@@ -781,9 +810,15 @@ const WorkflowContent = React.memo(() => {
if (containerElement) {
// Determine the type of container node for appropriate styling
const containerNode = getNodes().find((n) => n.id === containerInfo.loopId)
if (containerNode?.type === 'loopNode') {
if (
containerNode?.type === 'subflowNode' &&
(containerNode.data as any)?.kind === 'loop'
) {
containerElement.classList.add('loop-node-drag-over')
} else if (containerNode?.type === 'parallelNode') {
} else if (
containerNode?.type === 'subflowNode' &&
(containerNode.data as any)?.kind === 'parallel'
) {
containerElement.classList.add('parallel-node-drag-over')
}
document.body.style.cursor = 'copy'
@@ -918,31 +953,11 @@ const WorkflowContent = React.memo(() => {
}
// Handle container nodes differently
if (block.type === 'loop') {
if (block.type === 'loop' || block.type === 'parallel') {
const hasNestedError = nestedSubflowErrors.has(block.id)
nodeArray.push({
id: block.id,
type: 'loopNode',
position: block.position,
parentId: block.data?.parentId,
extent: block.data?.extent || undefined,
dragHandle: '.workflow-drag-handle',
data: {
...block.data,
width: block.data?.width || 500,
height: block.data?.height || 300,
hasNestedError,
},
})
return
}
// Handle parallel nodes
if (block.type === 'parallel') {
const hasNestedError = nestedSubflowErrors.has(block.id)
nodeArray.push({
id: block.id,
type: 'parallelNode',
type: 'subflowNode',
position: block.position,
parentId: block.data?.parentId,
extent: block.data?.extent || undefined,
@@ -952,6 +967,7 @@ const WorkflowContent = React.memo(() => {
width: block.data?.width || 500,
height: block.data?.height || 300,
hasNestedError,
kind: block.type === 'loop' ? 'loop' : 'parallel',
},
})
return
@@ -1191,13 +1207,13 @@ const WorkflowContent = React.memo(() => {
const intersectingNodes = getNodes()
.filter((n) => {
// Only consider container nodes that aren't the dragged node
if ((n.type !== 'loopNode' && n.type !== 'parallelNode') || n.id === node.id) return false
if (n.type !== 'subflowNode' || n.id === node.id) return false
// Skip if this container is already the parent of the node being dragged
if (n.id === currentParentId) return false
// Skip self-nesting: prevent a container from becoming its own descendant
if (node.type === 'loopNode' || node.type === 'parallelNode') {
if (node.type === 'subflowNode') {
// Get the full hierarchy of the potential parent
const hierarchy = getNodeHierarchyWrapper(n.id)
@@ -1212,14 +1228,14 @@ const WorkflowContent = React.memo(() => {
// Get dimensions based on node type
const nodeWidth =
node.type === 'loopNode' || node.type === 'parallelNode'
node.type === 'subflowNode'
? node.data?.width || 500
: node.type === 'condition'
? 250
: 350
const nodeHeight =
node.type === 'loopNode' || node.type === 'parallelNode'
node.type === 'subflowNode'
? node.data?.height || 300
: node.type === 'condition'
? 150
@@ -1286,9 +1302,15 @@ const WorkflowContent = React.memo(() => {
)
if (containerElement) {
// Apply appropriate class based on container type
if (bestContainerMatch.container.type === 'loopNode') {
if (
bestContainerMatch.container.type === 'subflowNode' &&
(bestContainerMatch.container.data as any)?.kind === 'loop'
) {
containerElement.classList.add('loop-node-drag-over')
} else if (bestContainerMatch.container.type === 'parallelNode') {
} else if (
bestContainerMatch.container.type === 'subflowNode' &&
(bestContainerMatch.container.data as any)?.kind === 'parallel'
) {
containerElement.classList.add('parallel-node-drag-over')
}
document.body.style.cursor = 'copy'
@@ -1356,7 +1378,7 @@ const WorkflowContent = React.memo(() => {
}
// If we're dragging a container node, do additional checks to prevent circular references
if ((node.type === 'loopNode' || node.type === 'parallelNode') && potentialParentId) {
if (node.type === 'subflowNode' && potentialParentId) {
// Get the hierarchy of the potential parent container
const parentHierarchy = getNodeHierarchyWrapper(potentialParentId)

View File

@@ -29,9 +29,7 @@ import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('HelpModal')
// Define form schema
const formSchema = z.object({
email: z.string().email('Please enter a valid email address'),
subject: z.string().min(1, 'Subject is required'),
message: z.string().min(1, 'Message is required'),
type: z.enum(['bug', 'feedback', 'feature_request', 'other'], {
@@ -77,17 +75,35 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) {
} = useForm<FormValues>({
resolver: zodResolver(formSchema),
defaultValues: {
email: '',
subject: '',
message: '',
type: 'bug', // Set default value to 'bug'
},
mode: 'onChange',
mode: 'onSubmit',
})
// Reset state when modal opens/closes
useEffect(() => {
if (open) {
// Reset states when modal opens
setSubmitStatus(null)
setErrorMessage('')
setImageError(null)
setImages([])
setIsDragging(false)
setIsProcessing(false)
// Reset form to default values
reset({
subject: '',
message: '',
type: 'bug',
})
}
}, [open, reset])
// Listen for the custom event to open the help modal
useEffect(() => {
const handleOpenHelp = (event: CustomEvent) => {
const handleOpenHelp = () => {
onOpenChange(true)
}
@@ -268,8 +284,7 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) {
// Create FormData to handle file uploads
const formData = new FormData()
// Add form fields
formData.append('email', data.email)
// Add form fields (email will be retrieved server-side from session)
formData.append('subject', data.subject)
formData.append('message', data.message)
formData.append('type', data.type)
@@ -377,19 +392,6 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) {
)}
</div>
<div className='space-y-2'>
<Label htmlFor='email'>Email</Label>
<Input
id='email'
placeholder='your.email@example.com'
{...register('email')}
className={`h-9 rounded-[8px] ${errors.email ? 'border-red-500' : ''}`}
/>
{errors.email && (
<p className='mt-1 text-red-500 text-sm'>{errors.email.message}</p>
)}
</div>
<div className='space-y-2'>
<Label htmlFor='subject'>Subject</Label>
<Input
@@ -408,7 +410,7 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) {
<Textarea
id='message'
placeholder='Please provide details about your request...'
rows={5}
rows={6}
{...register('message')}
className={`rounded-[8px] ${errors.message ? 'border-red-500' : ''}`}
/>
@@ -426,9 +428,10 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) {
onDragOver={handleDragOver}
onDragLeave={handleDragLeave}
onDrop={handleDrop}
className={`flex items-center gap-4 ${
isDragging ? 'rounded-md bg-primary/5 p-2' : ''
className={`cursor-pointer rounded-lg border-2 border-muted-foreground/25 border-dashed p-6 text-center transition-colors hover:bg-muted/50 ${
isDragging ? 'border-primary bg-primary/5' : ''
}`}
onClick={() => fileInputRef.current?.click()}
>
<input
ref={fileInputRef}
@@ -438,17 +441,12 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) {
className='hidden'
multiple
/>
<Button
type='button'
variant='outline'
onClick={() => fileInputRef.current?.click()}
className='flex h-9 items-center justify-center gap-2 rounded-[8px]'
>
<Upload className='h-4 w-4' />
Upload Images
</Button>
<p className='text-muted-foreground text-xs'>
Drop images here or click to upload. Max 20MB per image.
<Upload className='mx-auto mb-2 h-8 w-8 text-muted-foreground' />
<p className='text-sm'>
{isDragging ? 'Drop images here!' : 'Drop images here or click to browse'}
</p>
<p className='mt-1 text-muted-foreground text-xs'>
JPEG, PNG, WebP, GIF (max 20MB each)
</p>
</div>
{imageError && <p className='mt-1 text-red-500 text-sm'>{imageError}</p>}
@@ -494,18 +492,13 @@ export function HelpModal({ open, onOpenChange }: HelpModalProps) {
{/* Overlay Footer */}
<div className='absolute inset-x-0 bottom-0 bg-background'>
<div className='flex w-full items-center justify-between px-6 py-4'>
<Button
variant='outline'
onClick={handleClose}
type='button'
className='h-9 rounded-[8px]'
>
<Button variant='outline' onClick={handleClose} type='button'>
Cancel
</Button>
<Button
type='submit'
disabled={isSubmitting || isProcessing}
className='h-9 rounded-[8px]'
className='bg-[var(--brand-primary-hex)] font-[480] text-primary-foreground shadow-[0_0_0_0_var(--brand-primary-hex)] transition-all duration-200 hover:bg-[var(--brand-primary-hover-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)] disabled:opacity-50 disabled:hover:shadow-none'
>
{isSubmitting ? 'Submitting...' : 'Submit'}
</Button>

View File

@@ -0,0 +1,364 @@
import { useCallback, useEffect, useState } from 'react'
import { Check, Copy, Eye, EyeOff, KeySquare, Plus, Trash2 } from 'lucide-react'
import {
AlertDialog,
AlertDialogAction,
AlertDialogCancel,
AlertDialogContent,
AlertDialogDescription,
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
Button,
Card,
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
Input,
Label,
Skeleton,
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from '@/components/ui'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('CopilotSettings')
interface CopilotKey {
id: string
apiKey: string
}
export function Copilot() {
const [keys, setKeys] = useState<CopilotKey[]>([])
const [isLoading, setIsLoading] = useState(false)
const [visible, setVisible] = useState<Record<string, boolean>>({})
// Create flow state
const [showNewKeyDialog, setShowNewKeyDialog] = useState(false)
const [newKey, setNewKey] = useState<CopilotKey | null>(null)
const [copiedKeyIds, setCopiedKeyIds] = useState<Record<string, boolean>>({})
const [newKeyCopySuccess, setNewKeyCopySuccess] = useState(false)
// Delete flow state
const [deleteKey, setDeleteKey] = useState<CopilotKey | null>(null)
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const hasKeys = keys.length > 0
const maskedValue = useCallback((value: string, show: boolean) => {
if (show) return value
if (!value) return ''
const last6 = value.slice(-6)
return `••••••••••${last6}`
}, [])
const fetchKeys = useCallback(async () => {
try {
setIsLoading(true)
const res = await fetch('/api/copilot/api-keys')
if (!res.ok) throw new Error(`Failed to fetch: ${res.status}`)
const data = await res.json()
setKeys(Array.isArray(data.keys) ? data.keys : [])
} catch (error) {
logger.error('Failed to fetch copilot keys', { error })
setKeys([])
} finally {
setIsLoading(false)
}
}, [])
useEffect(() => {
fetchKeys()
}, [fetchKeys])
const onGenerate = async () => {
try {
setIsLoading(true)
const res = await fetch('/api/copilot/api-keys/generate', { method: 'POST' })
if (!res.ok) {
const body = await res.json().catch(() => ({}))
throw new Error(body.error || 'Failed to generate API key')
}
const data = await res.json()
// Show the new key dialog with the API key (only shown once)
if (data?.key) {
setNewKey(data.key)
setShowNewKeyDialog(true)
}
await fetchKeys()
} catch (error) {
logger.error('Failed to generate copilot API key', { error })
} finally {
setIsLoading(false)
}
}
const onDelete = async (id: string) => {
try {
setIsLoading(true)
const res = await fetch(`/api/copilot/api-keys?id=${encodeURIComponent(id)}`, {
method: 'DELETE',
})
if (!res.ok) {
const body = await res.json().catch(() => ({}))
throw new Error(body.error || 'Failed to delete API key')
}
await fetchKeys()
} catch (error) {
logger.error('Failed to delete copilot API key', { error })
} finally {
setIsLoading(false)
}
}
const onCopy = async (value: string, keyId?: string) => {
try {
await navigator.clipboard.writeText(value)
if (keyId) {
setCopiedKeyIds((prev) => ({ ...prev, [keyId]: true }))
setTimeout(() => {
setCopiedKeyIds((prev) => ({ ...prev, [keyId]: false }))
}, 1500)
} else {
setNewKeyCopySuccess(true)
setTimeout(() => setNewKeyCopySuccess(false), 1500)
}
} catch (error) {
logger.error('Copy failed', { error })
}
}
// UI helpers
const isFetching = isLoading && keys.length === 0
return (
<div className='space-y-6 p-6'>
<h2 className='font-semibold text-xl'>Copilot API Keys</h2>
<p className='text-muted-foreground text-sm leading-relaxed'>
Copilot API keys let you authenticate requests to the Copilot endpoints. Keep keys secret
and rotate them regularly.
</p>
<p className='text-muted-foreground text-xs italic'>
For external deployments, set the <span className='font-mono'>COPILOT_API_KEY</span>{' '}
environment variable on that instance to one of the keys generated here.
</p>
{isFetching ? (
<div className='mt-6 space-y-3'>
<Card className='p-4'>
<div className='flex items-center justify-between'>
<div>
<Skeleton className='mb-2 h-5 w-32' />
<Skeleton className='h-4 w-48' />
</div>
<Skeleton className='h-8 w-8 rounded-md' />
</div>
</Card>
<Card className='p-4'>
<div className='flex items-center justify-between'>
<div>
<Skeleton className='mb-2 h-5 w-28' />
<Skeleton className='h-4 w-40' />
</div>
<Skeleton className='h-8 w-8 rounded-md' />
</div>
</Card>
</div>
) : !hasKeys ? (
<div className='mt-6 rounded-md border border-dashed p-8'>
<div className='flex flex-col items-center justify-center text-center'>
<div className='flex h-12 w-12 items-center justify-center rounded-full bg-muted'>
<KeySquare className='h-6 w-6 text-primary' />
</div>
<h3 className='mt-4 font-medium text-lg'>No Copilot keys yet</h3>
<p className='mt-2 max-w-sm text-muted-foreground text-sm'>
Generate a Copilot API key to authenticate requests to the Copilot SDK and methods.
</p>
<Button
variant='default'
className='mt-4'
onClick={onGenerate}
size='sm'
disabled={isLoading}
>
<Plus className='mr-1.5 h-4 w-4' /> Generate Key
</Button>
</div>
</div>
) : (
<div className='mt-6 space-y-4'>
{keys.map((k) => {
const isVisible = !!visible[k.id]
const value = maskedValue(k.apiKey, isVisible)
return (
<Card key={k.id} className='p-4 transition-shadow hover:shadow-sm'>
<div className='flex items-center justify-between gap-4'>
<div className='min-w-0 flex-1'>
<div className='rounded bg-muted/50 px-2 py-1 font-mono text-sm'>{value}</div>
</div>
<div className='flex items-center gap-2'>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='secondary'
size='icon'
onClick={() => setVisible((v) => ({ ...v, [k.id]: !isVisible }))}
className='h-8 w-8'
>
{isVisible ? (
<EyeOff className='h-4 w-4' />
) : (
<Eye className='h-4 w-4' />
)}
</Button>
</TooltipTrigger>
<TooltipContent>{isVisible ? 'Hide' : 'Reveal'}</TooltipContent>
</Tooltip>
</TooltipProvider>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='secondary'
size='icon'
onClick={() => onCopy(k.apiKey, k.id)}
className='h-8 w-8'
>
{copiedKeyIds[k.id] ? (
<Check className='h-4 w-4 text-green-500' />
) : (
<Copy className='h-4 w-4' />
)}
</Button>
</TooltipTrigger>
<TooltipContent>Copy</TooltipContent>
</Tooltip>
</TooltipProvider>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='ghost'
size='icon'
onClick={() => {
setDeleteKey(k)
setShowDeleteDialog(true)
}}
className='h-8 w-8 text-destructive hover:bg-destructive/10'
>
<Trash2 className='h-4 w-4' />
</Button>
</TooltipTrigger>
<TooltipContent>Delete</TooltipContent>
</Tooltip>
</TooltipProvider>
</div>
</div>
</Card>
)
})}
</div>
)}
{/* New Key Dialog */}
<Dialog
open={showNewKeyDialog}
onOpenChange={(open) => {
setShowNewKeyDialog(open)
if (!open) setNewKey(null)
}}
>
<DialogContent className='sm:max-w-md'>
<DialogHeader>
<DialogTitle>Your Copilot API key has been created</DialogTitle>
<DialogDescription>
This is the only time you will see the full key. Copy it now and store it securely.
</DialogDescription>
</DialogHeader>
{newKey && (
<div className='space-y-4 py-3'>
<div className='space-y-2'>
<Label>API Key</Label>
<div className='relative'>
<Input
readOnly
value={newKey.apiKey}
className='border-slate-300 bg-muted/50 pr-10 font-mono text-sm'
/>
<Button
variant='ghost'
size='sm'
className='-translate-y-1/2 absolute top-1/2 right-1 h-7 w-7'
onClick={() => onCopy(newKey.apiKey)}
>
{newKeyCopySuccess ? (
<Check className='h-4 w-4 text-green-500' />
) : (
<Copy className='h-4 w-4' />
)}
<span className='sr-only'>Copy to clipboard</span>
</Button>
</div>
<p className='mt-1 text-muted-foreground text-xs'>
For security, we don't store the complete key. You won't be able to view it again.
</p>
</div>
</div>
)}
<DialogFooter className='sm:justify-end'>
<Button
onClick={() => {
setShowNewKeyDialog(false)
setNewKey(null)
}}
>
Close
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
{/* Delete Confirmation Dialog */}
<AlertDialog open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
<AlertDialogContent className='sm:max-w-md'>
<AlertDialogHeader>
<AlertDialogTitle>Delete Copilot API Key</AlertDialogTitle>
<AlertDialogDescription>
{deleteKey && (
<>
Are you sure you want to delete this Copilot API key? This action cannot be
undone.
</>
)}
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter className='gap-2 sm:justify-end'>
<AlertDialogCancel onClick={() => setDeleteKey(null)}>Cancel</AlertDialogCancel>
<AlertDialogAction
onClick={() => {
if (deleteKey) {
onDelete(deleteKey.id)
}
setShowDeleteDialog(false)
setDeleteKey(null)
}}
className='bg-destructive text-destructive-foreground hover:bg-destructive/90'
>
Delete
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
</div>
)
}

View File

@@ -1,5 +1,6 @@
export { Account } from './account/account'
export { ApiKeys } from './api-keys/api-keys'
export { Copilot } from './copilot/copilot'
export { Credentials } from './credentials/credentials'
export { EnvironmentVariables } from './environment/environment'
export { General } from './general/general'

View File

@@ -1,4 +1,5 @@
import {
Bot,
CreditCard,
KeyRound,
KeySquare,
@@ -8,10 +9,13 @@ import {
UserCircle,
Users,
} from 'lucide-react'
import { isBillingEnabled } from '@/lib/environment'
import { getEnv, isTruthy } from '@/lib/env'
import { isHosted } from '@/lib/environment'
import { cn } from '@/lib/utils'
import { useSubscriptionStore } from '@/stores/subscription/store'
const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
interface SettingsNavigationProps {
activeSection: string
onSectionChange: (
@@ -24,6 +28,7 @@ interface SettingsNavigationProps {
| 'subscription'
| 'team'
| 'privacy'
| 'copilot'
) => void
hasOrganization: boolean
}
@@ -37,6 +42,7 @@ type NavigationItem = {
| 'apikeys'
| 'subscription'
| 'team'
| 'copilot'
| 'privacy'
label: string
icon: React.ComponentType<{ className?: string }>
@@ -70,6 +76,11 @@ const allNavigationItems: NavigationItem[] = [
label: 'API Keys',
icon: KeySquare,
},
{
id: 'copilot',
label: 'Copilot',
icon: Bot,
},
{
id: 'privacy',
label: 'Privacy',
@@ -99,6 +110,9 @@ export function SettingsNavigation({
const subscription = getSubscriptionStatus()
const navigationItems = allNavigationItems.filter((item) => {
if (item.id === 'copilot' && !isHosted) {
return false
}
if (item.hideWhenBillingDisabled && !isBillingEnabled) {
return false
}

View File

@@ -3,12 +3,14 @@
import { useEffect, useRef, useState } from 'react'
import { X } from 'lucide-react'
import { Button, Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui'
import { isBillingEnabled } from '@/lib/environment'
import { getEnv, isTruthy } from '@/lib/env'
import { isHosted } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { cn } from '@/lib/utils'
import {
Account,
ApiKeys,
Copilot,
Credentials,
EnvironmentVariables,
General,
@@ -22,6 +24,8 @@ import { useGeneralStore } from '@/stores/settings/general/store'
const logger = createLogger('SettingsModal')
const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
interface SettingsModalProps {
open: boolean
onOpenChange: (open: boolean) => void
@@ -36,6 +40,7 @@ type SettingsSection =
| 'subscription'
| 'team'
| 'privacy'
| 'copilot'
export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
const [activeSection, setActiveSection] = useState<SettingsSection>('general')
@@ -146,6 +151,11 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
<TeamManagement />
</div>
)}
{isHosted && (
<div className={cn('h-full', activeSection === 'copilot' ? 'block' : 'hidden')}>
<Copilot />
</div>
)}
<div className={cn('h-full', activeSection === 'privacy' ? 'block' : 'hidden')}>
<Privacy />
</div>

View File

@@ -5,7 +5,7 @@ import { HelpCircle, LibraryBig, ScrollText, Search, Settings, Shapes } from 'lu
import { useParams, usePathname, useRouter } from 'next/navigation'
import { Button, ScrollArea, Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui'
import { useSession } from '@/lib/auth-client'
import { isBillingEnabled } from '@/lib/environment'
import { getEnv, isTruthy } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { generateWorkspaceName } from '@/lib/naming'
import { cn } from '@/lib/utils'
@@ -38,6 +38,8 @@ const logger = createLogger('Sidebar')
const SIDEBAR_GAP = 12 // 12px gap between components - easily editable
const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
/**
* Optimized auto-scroll hook for smooth drag operations
* Extracted outside component for better performance

View File

@@ -15,8 +15,7 @@ import 'reactflow/dist/style.css'
import { createLogger } from '@/lib/logs/console/logger'
import { cn } from '@/lib/utils'
import { LoopNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/loop/loop-node'
import { ParallelNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/parallel/parallel-node'
import { SubflowNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node'
import { WorkflowBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block'
import { WorkflowEdge } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-edge/workflow-edge'
import { getBlock } from '@/blocks'
@@ -39,8 +38,7 @@ interface WorkflowPreviewProps {
// Define node types - the components now handle preview mode internally
const nodeTypes: NodeTypes = {
workflowBlock: WorkflowBlock,
loopNode: LoopNodeComponent,
parallelNode: ParallelNodeComponent,
subflowNode: SubflowNodeComponent,
}
// Define edge types
@@ -131,7 +129,7 @@ export function WorkflowPreview({
if (block.type === 'loop') {
nodeArray.push({
id: block.id,
type: 'loopNode',
type: 'subflowNode',
position: absolutePosition,
parentId: block.data?.parentId,
extent: block.data?.extent || undefined,
@@ -142,6 +140,7 @@ export function WorkflowPreview({
height: block.data?.height || 300,
state: 'valid',
isPreview: true,
kind: 'loop',
},
})
return
@@ -150,7 +149,7 @@ export function WorkflowPreview({
if (block.type === 'parallel') {
nodeArray.push({
id: block.id,
type: 'parallelNode',
type: 'subflowNode',
position: absolutePosition,
parentId: block.data?.parentId,
extent: block.data?.extent || undefined,
@@ -161,6 +160,7 @@ export function WorkflowPreview({
height: block.data?.height || 300,
state: 'valid',
isPreview: true,
kind: 'parallel',
},
})
return

View File

@@ -5,12 +5,14 @@ import { BlockPathCalculator } from '@/lib/block-path-calculator'
import { extractFieldsFromSchema, parseResponseFormatSafely } from '@/lib/response-format'
import { cn } from '@/lib/utils'
import { getBlock } from '@/blocks'
import type { BlockConfig } from '@/blocks/types'
import { Serializer } from '@/serializer'
import { useVariablesStore } from '@/stores/panel/variables/store'
import type { Variable } from '@/stores/panel/variables/types'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState } from '@/stores/workflows/workflow/types'
import { getTool } from '@/tools/utils'
import { getTriggersByProvider } from '@/triggers'
@@ -51,8 +53,8 @@ export const checkTagTrigger = (text: string, cursorPosition: number): { show: b
const BLOCK_COLORS = {
VARIABLE: '#2F8BFF',
DEFAULT: '#2F55FF',
LOOP: '#8857E6',
PARALLEL: '#FF5757',
LOOP: '#2FB3FF',
PARALLEL: '#FEE12B',
} as const
const TAG_PREFIXES = {
@@ -73,11 +75,11 @@ const getSubBlockValue = (blockId: string, property: string): any => {
const createTagEventHandlers = (
tag: string,
group: any,
group: BlockTagGroup | undefined,
tagIndex: number,
handleTagSelect: (tag: string, group?: any) => void,
handleTagSelect: (tag: string, group?: BlockTagGroup) => void,
setSelectedIndex: (index: number) => void,
setHoveredNested: (value: any) => void
setHoveredNested: (value: { tag: string; index: number } | null) => void
) => ({
onMouseEnter: () => {
setSelectedIndex(tagIndex >= 0 ? tagIndex : 0)
@@ -96,8 +98,8 @@ const createTagEventHandlers = (
})
const getOutputTypeForPath = (
block: any,
blockConfig: any,
block: BlockState,
blockConfig: BlockConfig | null,
blockId: string,
outputPath: string
): string => {
@@ -137,7 +139,9 @@ const getOutputTypeForPath = (
// For API mode, check inputFormat for custom field types
const inputFormatValue = getSubBlockValue(blockId, 'inputFormat')
if (inputFormatValue && Array.isArray(inputFormatValue)) {
const field = inputFormatValue.find((f: any) => f.name === outputPath)
const field = inputFormatValue.find(
(f: { name?: string; type?: string }) => f.name === outputPath
)
if (field?.type) {
return field.type
}
@@ -224,7 +228,7 @@ const generateOutputPathsWithTypes = (
return paths
}
const generateToolOutputPaths = (blockConfig: any, operation: string): string[] => {
const generateToolOutputPaths = (blockConfig: BlockConfig, operation: string): string[] => {
if (!blockConfig?.tools?.config?.tool) return []
try {
@@ -244,7 +248,7 @@ const generateToolOutputPaths = (blockConfig: any, operation: string): string[]
}
}
const getToolOutputType = (blockConfig: any, operation: string, path: string): string => {
const getToolOutputType = (blockConfig: BlockConfig, operation: string, path: string): string => {
if (!blockConfig?.tools?.config?.tool) return 'any'
try {
@@ -366,9 +370,9 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const metricsValue = getSubBlockValue(activeSourceBlockId, 'metrics')
if (metricsValue && Array.isArray(metricsValue) && metricsValue.length > 0) {
const validMetrics = metricsValue.filter((metric: any) => metric?.name)
const validMetrics = metricsValue.filter((metric: { name?: string }) => metric?.name)
blockTags = validMetrics.map(
(metric: any) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
(metric: { name: string }) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
)
} else {
const outputPaths = generateOutputPaths(blockConfig.outputs)
@@ -402,8 +406,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
inputFormatValue.length > 0
) {
blockTags = inputFormatValue
.filter((field: any) => field.name && field.name.trim() !== '')
.map((field: any) => `${normalizedBlockName}.${field.name}`)
.filter((field: { name?: string }) => field.name && field.name.trim() !== '')
.map((field: { name: string }) => `${normalizedBlockName}.${field.name}`)
} else {
blockTags = [normalizedBlockName]
}
@@ -556,9 +560,14 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
)
let containingParallelBlockId: string | null = null
if (containingParallel) {
const [parallelId] = containingParallel
const [parallelId, parallel] = containingParallel
containingParallelBlockId = parallelId
const contextualTags: string[] = ['index', 'currentItem', 'items']
const parallelType = parallel.parallelType || 'count'
const contextualTags: string[] = ['index']
if (parallelType === 'collection') {
contextualTags.push('currentItem')
contextualTags.push('items')
}
const containingParallelBlock = blocks[parallelId]
if (containingParallelBlock) {
@@ -629,9 +638,9 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const metricsValue = getSubBlockValue(accessibleBlockId, 'metrics')
if (metricsValue && Array.isArray(metricsValue) && metricsValue.length > 0) {
const validMetrics = metricsValue.filter((metric: any) => metric?.name)
const validMetrics = metricsValue.filter((metric: { name?: string }) => metric?.name)
blockTags = validMetrics.map(
(metric: any) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
(metric: { name: string }) => `${normalizedBlockName}.${metric.name.toLowerCase()}`
)
} else {
const outputPaths = generateOutputPaths(blockConfig.outputs)
@@ -665,8 +674,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
inputFormatValue.length > 0
) {
blockTags = inputFormatValue
.filter((field: any) => field.name && field.name.trim() !== '')
.map((field: any) => `${normalizedBlockName}.${field.name}`)
.filter((field: { name?: string }) => field.name && field.name.trim() !== '')
.map((field: { name: string }) => `${normalizedBlockName}.${field.name}`)
} else {
blockTags = [normalizedBlockName]
}
@@ -880,8 +889,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
liveCursor = activeEl.selectionStart ?? cursorPosition
// Prefer the active element value if present. This ensures we include the most
// recently typed character(s) that might not yet be reflected in React state.
if (typeof (activeEl as any).value === 'string') {
liveValue = (activeEl as any).value
if ('value' in activeEl && typeof activeEl.value === 'string') {
liveValue = activeEl.value
}
}
}
@@ -1289,7 +1298,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
tagDescription = getOutputTypeForPath(
block,
blockConfig,
blockConfig || null,
group.blockId,
outputPath
)
@@ -1429,7 +1438,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
childType = getOutputTypeForPath(
block,
blockConfig,
blockConfig || null,
group.blockId,
childOutputPath
)

View File

@@ -0,0 +1,13 @@
CREATE TABLE "copilot_api_keys" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"user_id" text NOT NULL,
"api_key_encrypted" text NOT NULL,
"api_key_lookup" text NOT NULL
);
--> statement-breakpoint
ALTER TABLE "user_stats" ADD COLUMN "total_copilot_cost" numeric DEFAULT '0' NOT NULL;--> statement-breakpoint
ALTER TABLE "user_stats" ADD COLUMN "total_copilot_tokens" integer DEFAULT 0 NOT NULL;--> statement-breakpoint
ALTER TABLE "user_stats" ADD COLUMN "total_copilot_calls" integer DEFAULT 0 NOT NULL;--> statement-breakpoint
ALTER TABLE "copilot_api_keys" ADD CONSTRAINT "copilot_api_keys_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "copilot_api_keys_api_key_encrypted_hash_idx" ON "copilot_api_keys" USING hash ("api_key_encrypted");--> statement-breakpoint
CREATE INDEX "copilot_api_keys_lookup_hash_idx" ON "copilot_api_keys" USING hash ("api_key_lookup");

View File

@@ -0,0 +1 @@
ALTER TABLE "workflow" DROP COLUMN "state";

View File

@@ -0,0 +1,167 @@
-- One-shot data migration to create/populate execution_data & cost, then drop legacy columns
-- Safe on reruns and across differing prior schemas
-- Note: Depending on runner timeouts, might have to be run manually
-- 1) Ensure execution_data exists (prefer rename if only metadata exists)
DO $$
BEGIN
IF EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'workflow_execution_logs' AND column_name = 'metadata'
) AND NOT EXISTS (
SELECT 1 FROM information_schema.columns
WHERE table_schema = 'public' AND table_name = 'workflow_execution_logs' AND column_name = 'execution_data'
) THEN
EXECUTE 'ALTER TABLE workflow_execution_logs RENAME COLUMN metadata TO execution_data';
END IF;
END $$;--> statement-breakpoint
ALTER TABLE "workflow_execution_logs"
ADD COLUMN IF NOT EXISTS "execution_data" jsonb NOT NULL DEFAULT '{}'::jsonb,
ADD COLUMN IF NOT EXISTS "cost" jsonb;--> statement-breakpoint
-- Process the backfill in batches to avoid large temporary files on big datasets
DO $$
DECLARE
v_batch_size integer := 500; -- keep batches small to avoid timeouts/spills
v_rows_updated integer := 0;
v_rows_selected integer := 0;
v_last_id text := '';
v_last_created_at timestamp := '1970-01-01 00:00:00';
BEGIN
-- modest per-statement timeout; adjust based on observed per-batch runtime
PERFORM set_config('statement_timeout', '180s', true);
LOOP
CREATE TEMP TABLE IF NOT EXISTS _tmp_candidate_ids(id text, created_at timestamp) ON COMMIT DROP;
TRUNCATE _tmp_candidate_ids;
INSERT INTO _tmp_candidate_ids(id, created_at)
SELECT id, created_at
FROM workflow_execution_logs
WHERE (created_at, id) > (v_last_created_at, v_last_id) AND cost IS NULL
ORDER BY created_at, id
LIMIT v_batch_size;
SELECT COUNT(*) INTO v_rows_selected FROM _tmp_candidate_ids;
EXIT WHEN v_rows_selected = 0;
SELECT created_at, id
INTO v_last_created_at, v_last_id
FROM _tmp_candidate_ids
ORDER BY created_at DESC, id DESC
LIMIT 1;
WITH RECURSIVE
spans AS (
SELECT l.id, s.span
FROM workflow_execution_logs l
JOIN _tmp_candidate_ids c ON c.id = l.id
LEFT JOIN LATERAL jsonb_array_elements(
COALESCE(
CASE
WHEN jsonb_typeof(l.execution_data->'traceSpans') = 'array' THEN l.execution_data->'traceSpans'
ELSE '[]'::jsonb
END
)
) s(span) ON true
UNION ALL
SELECT spans.id, c.span
FROM spans
JOIN LATERAL jsonb_array_elements(COALESCE(spans.span->'children','[]'::jsonb)) c(span) ON true
),
agg AS (
SELECT id,
SUM(COALESCE((span->'cost'->>'input')::numeric,0)) AS agg_input,
SUM(COALESCE((span->'cost'->>'output')::numeric,0)) AS agg_output,
SUM(COALESCE((span->'cost'->>'total')::numeric,0)) AS agg_total,
SUM(COALESCE((span->'cost'->'tokens'->>'prompt')::numeric, COALESCE((span->'tokens'->>'prompt')::numeric,0))) AS agg_tokens_prompt,
SUM(COALESCE((span->'cost'->'tokens'->>'completion')::numeric, COALESCE((span->'tokens'->>'completion')::numeric,0))) AS agg_tokens_completion,
SUM(COALESCE((span->'cost'->'tokens'->>'total')::numeric, COALESCE((span->'tokens'->>'total')::numeric,0))) AS agg_tokens_total
FROM spans
GROUP BY id
),
model_rows AS (
SELECT id,
(span->'cost'->>'model') AS model,
COALESCE((span->'cost'->>'input')::numeric,0) AS input,
COALESCE((span->'cost'->>'output')::numeric,0) AS output,
COALESCE((span->'cost'->>'total')::numeric,0) AS total,
COALESCE((span->'cost'->'tokens'->>'prompt')::numeric,0) AS tokens_prompt,
COALESCE((span->'cost'->'tokens'->>'completion')::numeric,0) AS tokens_completion,
COALESCE((span->'cost'->'tokens'->>'total')::numeric,0) AS tokens_total
FROM spans
WHERE span ? 'cost' AND (span->'cost'->>'model') IS NOT NULL
),
model_sums AS (
SELECT id,
model,
SUM(input) AS input,
SUM(output) AS output,
SUM(total) AS total,
SUM(tokens_prompt) AS tokens_prompt,
SUM(tokens_completion) AS tokens_completion,
SUM(tokens_total) AS tokens_total
FROM model_rows
GROUP BY id, model
),
models AS (
SELECT id,
jsonb_object_agg(model, jsonb_build_object(
'input', input,
'output', output,
'total', total,
'tokens', jsonb_build_object(
'prompt', tokens_prompt,
'completion', tokens_completion,
'total', tokens_total
)
)) AS models
FROM model_sums
GROUP BY id
),
tb AS (
SELECT l.id,
NULLIF((l.execution_data->'tokenBreakdown'->>'prompt')::numeric, 0) AS prompt,
NULLIF((l.execution_data->'tokenBreakdown'->>'completion')::numeric, 0) AS completion
FROM workflow_execution_logs l
JOIN _tmp_candidate_ids c ON c.id = l.id
)
UPDATE workflow_execution_logs AS l
SET cost = jsonb_strip_nulls(
jsonb_build_object(
'total', COALESCE((to_jsonb(l)->>'total_cost')::numeric, NULLIF(agg.agg_total,0)),
'input', COALESCE((to_jsonb(l)->>'total_input_cost')::numeric, NULLIF(agg.agg_input,0)),
'output', COALESCE((to_jsonb(l)->>'total_output_cost')::numeric, NULLIF(agg.agg_output,0)),
'tokens', CASE
WHEN (to_jsonb(l) ? 'total_tokens') OR tb.prompt IS NOT NULL OR tb.completion IS NOT NULL OR NULLIF(agg.agg_tokens_total,0) IS NOT NULL THEN
jsonb_strip_nulls(
jsonb_build_object(
'total', COALESCE((to_jsonb(l)->>'total_tokens')::numeric, NULLIF(agg.agg_tokens_total,0)),
'prompt', COALESCE(tb.prompt, NULLIF(agg.agg_tokens_prompt,0)),
'completion', COALESCE(tb.completion, NULLIF(agg.agg_tokens_completion,0))
)
)
ELSE NULL
END,
'models', models.models
)
)
FROM agg
LEFT JOIN models ON models.id = agg.id
LEFT JOIN tb ON tb.id = agg.id
WHERE l.id = agg.id;
GET DIAGNOSTICS v_rows_updated = ROW_COUNT;
-- continue advancing by id until no more rows are selected
END LOOP;
END $$;--> statement-breakpoint
-- 3) Drop legacy columns now that backfill is complete
ALTER TABLE "workflow_execution_logs"
DROP COLUMN IF EXISTS "message",
DROP COLUMN IF EXISTS "block_count",
DROP COLUMN IF EXISTS "success_count",
DROP COLUMN IF EXISTS "error_count",
DROP COLUMN IF EXISTS "skipped_count",
DROP COLUMN IF EXISTS "total_cost",
DROP COLUMN IF EXISTS "total_input_cost",
DROP COLUMN IF EXISTS "total_output_cost",
DROP COLUMN IF EXISTS "total_tokens",
DROP COLUMN IF EXISTS "metadata";

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -512,6 +512,27 @@
"when": 1755286337930,
"tag": "0073_hot_champions",
"breakpoints": true
},
{
"idx": 74,
"version": "7",
"when": 1755304368539,
"tag": "0074_abnormal_dreadnoughts",
"breakpoints": true
},
{
"idx": 75,
"version": "7",
"when": 1755319635487,
"tag": "0075_lush_moonstone",
"breakpoints": true
},
{
"idx": 76,
"version": "7",
"when": 1755375658161,
"tag": "0076_damp_vector",
"breakpoints": true
}
]
}

View File

@@ -121,8 +121,6 @@ export const workflow = pgTable(
folderId: text('folder_id').references(() => workflowFolder.id, { onDelete: 'set null' }),
name: text('name').notNull(),
description: text('description'),
// DEPRECATED: Use normalized tables (workflow_blocks, workflow_edges, workflow_subflows) instead
state: json('state').notNull(),
color: text('color').notNull().default('#3972F6'),
lastSynced: timestamp('last_synced').notNull(),
createdAt: timestamp('created_at').notNull(),
@@ -130,7 +128,6 @@ export const workflow = pgTable(
isDeployed: boolean('is_deployed').notNull().default(false),
deployedState: json('deployed_state'),
deployedAt: timestamp('deployed_at'),
// When set, only this API key is authorized for execution
pinnedApiKey: text('pinned_api_key'),
collaborators: json('collaborators').notNull().default('[]'),
runCount: integer('run_count').notNull().default(0),
@@ -285,24 +282,14 @@ export const workflowExecutionLogs = pgTable(
.references(() => workflowExecutionSnapshots.id),
level: text('level').notNull(), // 'info', 'error'
message: text('message').notNull(),
trigger: text('trigger').notNull(), // 'api', 'webhook', 'schedule', 'manual', 'chat'
startedAt: timestamp('started_at').notNull(),
endedAt: timestamp('ended_at'),
totalDurationMs: integer('total_duration_ms'),
blockCount: integer('block_count').notNull().default(0),
successCount: integer('success_count').notNull().default(0),
errorCount: integer('error_count').notNull().default(0),
skippedCount: integer('skipped_count').notNull().default(0),
totalCost: decimal('total_cost', { precision: 10, scale: 6 }),
totalInputCost: decimal('total_input_cost', { precision: 10, scale: 6 }),
totalOutputCost: decimal('total_output_cost', { precision: 10, scale: 6 }),
totalTokens: integer('total_tokens'),
metadata: jsonb('metadata').notNull().default('{}'),
executionData: jsonb('execution_data').notNull().default('{}'),
cost: jsonb('cost'),
files: jsonb('files'), // File metadata for execution files
createdAt: timestamp('created_at').notNull().defaultNow(),
},
@@ -465,6 +452,10 @@ export const userStats = pgTable('user_stats', {
billingPeriodStart: timestamp('billing_period_start').defaultNow(), // When current billing period started
billingPeriodEnd: timestamp('billing_period_end'), // When current billing period ends
lastPeriodCost: decimal('last_period_cost').default('0'), // Usage from previous billing period
// Copilot usage tracking
totalCopilotCost: decimal('total_copilot_cost').notNull().default('0'),
totalCopilotTokens: integer('total_copilot_tokens').notNull().default(0),
totalCopilotCalls: integer('total_copilot_calls').notNull().default(0),
lastActive: timestamp('last_active').notNull().defaultNow(),
})
@@ -1178,3 +1169,25 @@ export const copilotFeedback = pgTable(
createdAtIdx: index('copilot_feedback_created_at_idx').on(table.createdAt),
})
)
export const copilotApiKeys = pgTable(
'copilot_api_keys',
{
id: uuid('id').primaryKey().defaultRandom(),
userId: text('user_id')
.notNull()
.references(() => user.id, { onDelete: 'cascade' }),
apiKeyEncrypted: text('api_key_encrypted').notNull(),
apiKeyLookup: text('api_key_lookup').notNull(),
},
(table) => ({
apiKeyEncryptedHashIdx: index('copilot_api_keys_api_key_encrypted_hash_idx').using(
'hash',
table.apiKeyEncrypted
),
apiKeyLookupHashIdx: index('copilot_api_keys_lookup_hash_idx').using(
'hash',
table.apiKeyLookup
),
})
)

View File

@@ -317,11 +317,13 @@ export class AgentBlockHandler implements BlockHandler {
}
private addUserPrompt(messages: Message[], userPrompt: any) {
let content = userPrompt
let content: string
if (typeof userPrompt === 'object' && userPrompt.input) {
content = userPrompt.input
content = String(userPrompt.input)
} else if (typeof userPrompt === 'object') {
content = JSON.stringify(userPrompt)
} else {
content = String(userPrompt)
}
messages.push({ role: 'user', content })

View File

@@ -1167,9 +1167,9 @@ export const auth = betterAuth({
stripeClient,
stripeWebhookSecret: env.STRIPE_WEBHOOK_SECRET || '',
createCustomerOnSignUp: true,
onCustomerCreate: async ({ customer, stripeCustomer, user }, request) => {
onCustomerCreate: async ({ stripeCustomer, user }, request) => {
logger.info('Stripe customer created', {
customerId: customer.id,
customerId: stripeCustomer.id,
userId: user.id,
})

View File

@@ -71,6 +71,7 @@ export interface SendMessageRequest {
export interface ApiResponse {
success: boolean
error?: string
status?: number
}
/**
@@ -86,7 +87,7 @@ export interface StreamingResponse extends ApiResponse {
async function handleApiError(response: Response, defaultMessage: string): Promise<string> {
try {
const data = await response.json()
return data.error || defaultMessage
return (data && (data.error || data.message)) || defaultMessage
} catch {
return `${defaultMessage} (${response.status})`
}
@@ -111,11 +112,19 @@ export async function sendStreamingMessage(
if (!response.ok) {
const errorMessage = await handleApiError(response, 'Failed to send streaming message')
throw new Error(errorMessage)
return {
success: false,
error: errorMessage,
status: response.status,
}
}
if (!response.body) {
throw new Error('No response body received')
return {
success: false,
error: 'No response body received',
status: 500,
}
}
return {

View File

@@ -402,11 +402,11 @@ export const SERVER_TOOL_METADATA: Record<ServerToolId, ToolMetadata> = {
id: SERVER_TOOL_IDS.GET_OAUTH_CREDENTIALS,
displayConfig: {
states: {
executing: { displayName: 'Retrieving OAuth credentials', icon: 'spinner' },
success: { displayName: 'Retrieved OAuth credentials', icon: 'key' },
rejected: { displayName: 'Skipped retrieving OAuth credentials', icon: 'skip' },
errored: { displayName: 'Failed to retrieve OAuth credentials', icon: 'error' },
aborted: { displayName: 'Retrieving OAuth credentials aborted', icon: 'x' },
executing: { displayName: 'Retrieving login IDs', icon: 'spinner' },
success: { displayName: 'Retrieved login IDs', icon: 'key' },
rejected: { displayName: 'Skipped retrieving login IDs', icon: 'skip' },
errored: { displayName: 'Failed to retrieve login IDs', icon: 'error' },
aborted: { displayName: 'Retrieving login IDs aborted', icon: 'x' },
},
},
schema: {

View File

@@ -1,6 +1,7 @@
import { eq } from 'drizzle-orm'
import { jwtDecode } from 'jwt-decode'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account, user } from '@/db/schema'
import { BaseCopilotTool } from '../base'
@@ -15,6 +16,7 @@ interface OAuthCredentialItem {
provider: string
lastUsed: string
isDefault: boolean
accessToken: string | null
}
interface GetOAuthCredentialsResult {
@@ -55,6 +57,9 @@ class GetOAuthCredentialsTool extends BaseCopilotTool<
const credentials: OAuthCredentialItem[] = []
// Short request id for log correlation
const requestId = crypto.randomUUID().slice(0, 8)
for (const acc of accounts) {
const providerId = acc.providerId
const [baseProvider, featureType = 'default'] = providerId.split('-')
@@ -90,12 +95,26 @@ class GetOAuthCredentialsTool extends BaseCopilotTool<
displayName = `${acc.accountId} (${baseProvider})`
}
// Ensure we return a valid access token, refreshing if needed
let accessToken: string | null = acc.accessToken ?? null
try {
const { accessToken: refreshedToken } = await refreshTokenIfNeeded(
requestId,
acc as any,
acc.id
)
accessToken = refreshedToken || accessToken
} catch (_error) {
// If refresh fails, we still return whatever we had (may be null)
}
credentials.push({
id: acc.id,
name: displayName,
provider: providerId,
lastUsed: acc.updatedAt.toISOString(),
isDefault: featureType === 'default',
accessToken,
})
}

View File

@@ -1,5 +1,6 @@
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -7,8 +8,7 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
import { BaseCopilotTool } from '../base'
// Sim Agent API configuration
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
interface BuildWorkflowParams {
yamlContent: string
@@ -71,7 +71,6 @@ async function buildWorkflow(params: BuildWorkflowParams): Promise<BuildWorkflow
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
yamlContent,

View File

@@ -1,5 +1,6 @@
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
@@ -8,8 +9,7 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
const logger = createLogger('EditWorkflowAPI')
// Sim Agent API configuration
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || 'http://localhost:8000'
const SIM_AGENT_API_KEY = env.SIM_AGENT_API_KEY
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
// Types for operations
interface EditWorkflowOperation {
@@ -46,7 +46,6 @@ async function applyOperationsToYaml(
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
yamlContent: currentYaml,
@@ -353,24 +352,8 @@ async function getUserWorkflow(workflowId: string): Promise<string> {
}
})
})
} else if (workflowRecord.state) {
// Fallback to JSON blob
const jsonState = workflowRecord.state as any
workflowState = {
blocks: jsonState.blocks || {},
edges: jsonState.edges || [],
loops: jsonState.loops || {},
parallels: jsonState.parallels || {},
}
// For JSON blob, subblock values are embedded in the block state
Object.entries((workflowState.blocks as any) || {}).forEach(([blockId, block]) => {
subBlockValues[blockId] = {}
Object.entries((block as any).subBlocks || {}).forEach(([subBlockId, subBlock]) => {
if ((subBlock as any).value !== undefined) {
subBlockValues[blockId][subBlockId] = (subBlock as any).value
}
})
})
} else {
throw new Error('Workflow has no normalized data')
}
if (!workflowState || !workflowState.blocks) {
@@ -481,7 +464,6 @@ async function editWorkflow(params: EditWorkflowParams): Promise<EditWorkflowRes
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(SIM_AGENT_API_KEY && { 'x-api-key': SIM_AGENT_API_KEY }),
},
body: JSON.stringify({
workflowState,

View File

@@ -39,15 +39,10 @@ interface ExecutionEntry {
id: string
executionId: string
level: string
message: string
trigger: string
startedAt: string
endedAt: string | null
durationMs: number | null
blockCount: number
successCount: number
errorCount: number
skippedCount: number
totalCost: number | null
totalTokens: number | null
blockExecutions: BlockExecution[]
@@ -124,18 +119,12 @@ async function getWorkflowConsole(
id: workflowExecutionLogs.id,
executionId: workflowExecutionLogs.executionId,
level: workflowExecutionLogs.level,
message: workflowExecutionLogs.message,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
blockCount: workflowExecutionLogs.blockCount,
successCount: workflowExecutionLogs.successCount,
errorCount: workflowExecutionLogs.errorCount,
skippedCount: workflowExecutionLogs.skippedCount,
totalCost: workflowExecutionLogs.totalCost,
totalTokens: workflowExecutionLogs.totalTokens,
metadata: workflowExecutionLogs.metadata,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
})
.from(workflowExecutionLogs)
.where(eq(workflowExecutionLogs.workflowId, workflowId))
@@ -144,9 +133,8 @@ async function getWorkflowConsole(
// Format the response with detailed block execution data
const formattedEntries: ExecutionEntry[] = executionLogs.map((log) => {
// Extract trace spans from metadata
const metadata = log.metadata as any
const traceSpans = metadata?.traceSpans || []
// Extract trace spans from execution data
const traceSpans = (log.executionData as any)?.traceSpans || []
const blockExecutions = extractBlockExecutionsFromTraceSpans(traceSpans)
// Try to find the final output from the last executed block
@@ -172,17 +160,12 @@ async function getWorkflowConsole(
id: log.id,
executionId: log.executionId,
level: log.level,
message: log.message,
trigger: log.trigger,
startedAt: log.startedAt.toISOString(),
endedAt: log.endedAt?.toISOString() || null,
durationMs: log.totalDurationMs,
blockCount: log.blockCount,
successCount: log.successCount,
errorCount: log.errorCount,
skippedCount: log.skippedCount || 0,
totalCost: log.totalCost ? Number.parseFloat(log.totalCost.toString()) : null,
totalTokens: log.totalTokens,
totalCost: (log.cost as any)?.total ?? null,
totalTokens: (log.cost as any)?.tokens?.total ?? null,
blockExecutions: includeDetails ? blockExecutions : [],
output: finalOutput,
}

View File

@@ -26,7 +26,6 @@ describe('Email Validation', () => {
it.concurrent('should accept legitimate business emails', async () => {
const legitimateEmails = [
'test@gmail.com',
'noreply@gmail.com',
'no-reply@yahoo.com',
'user12345@outlook.com',
'longusernamehere@gmail.com',

View File

@@ -24,8 +24,10 @@ export const env = createEnv({
ALLOWED_LOGIN_DOMAINS: z.string().optional(), // Comma-separated list of allowed email domains for login
ENCRYPTION_KEY: z.string().min(32), // Key for encrypting sensitive data
INTERNAL_API_SECRET: z.string().min(32), // Secret for internal API authentication
SIM_AGENT_API_KEY: z.string().min(1).optional(), // Secret for internal sim agent API authentication
COPILOT_API_KEY: z.string().min(1).optional(), // Secret for internal sim agent API authentication
SIM_AGENT_API_URL: z.string().url().optional(), // URL for internal sim agent API
AGENT_API_DB_ENCRYPTION_KEY: z.string().min(32).optional(), // Key for encrypting sensitive data for sim agent
AGENT_API_NETWORK_ENCRYPTION_KEY: z.string().min(32).optional(), // Key for encrypting sensitive data for sim agent
// Database & Storage
POSTGRES_URL: z.string().url().optional(), // Alternative PostgreSQL connection string
@@ -69,7 +71,6 @@ export const env = createEnv({
// Monitoring & Analytics
TELEMETRY_ENDPOINT: z.string().url().optional(), // Custom telemetry/analytics endpoint
COST_MULTIPLIER: z.number().optional(), // Multiplier for cost calculations
COPILOT_COST_MULTIPLIER: z.number().optional(), // Multiplier for copilot cost calculations
SENTRY_ORG: z.string().optional(), // Sentry organization for error tracking
SENTRY_PROJECT: z.string().optional(), // Sentry project for error tracking
SENTRY_AUTH_TOKEN: z.string().optional(), // Sentry authentication token

View File

@@ -1,7 +1,7 @@
/**
* Environment utility functions for consistent environment detection across the application
*/
import { env, getEnv, isTruthy } from './env'
import { env, isTruthy } from './env'
/**
* Is the application running in production mode
@@ -21,13 +21,14 @@ export const isTest = env.NODE_ENV === 'test'
/**
* Is this the hosted version of the application
*/
export const isHosted = env.NEXT_PUBLIC_APP_URL === 'https://www.sim.ai'
export const isHosted =
env.NEXT_PUBLIC_APP_URL === 'https://www.sim.ai' ||
env.NEXT_PUBLIC_APP_URL === 'https://www.staging.sim.ai'
/**
* Is billing enforcement enabled
*/
export const isBillingEnabled =
isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED')) || isTruthy(env.BILLING_ENABLED)
export const isBillingEnabled = isTruthy(env.BILLING_ENABLED)
/**
* Get cost multiplier based on environment

View File

@@ -59,20 +59,11 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId,
stateSnapshotId: snapshotResult.snapshot.id,
level: 'info',
message: `${this.getTriggerPrefix(trigger.type)} execution started`,
trigger: trigger.type,
startedAt: startTime,
endedAt: null,
totalDurationMs: null,
blockCount: 0,
successCount: 0,
errorCount: 0,
skippedCount: 0,
totalCost: null,
totalInputCost: null,
totalOutputCost: null,
totalTokens: null,
metadata: {
executionData: {
environment,
trigger,
},
@@ -88,20 +79,11 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId: workflowLog.executionId,
stateSnapshotId: workflowLog.stateSnapshotId,
level: workflowLog.level as 'info' | 'error',
message: workflowLog.message,
trigger: workflowLog.trigger as ExecutionTrigger['type'],
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString() || workflowLog.startedAt.toISOString(),
totalDurationMs: workflowLog.totalDurationMs || 0,
blockCount: workflowLog.blockCount,
successCount: workflowLog.successCount,
errorCount: workflowLog.errorCount,
skippedCount: workflowLog.skippedCount,
totalCost: Number(workflowLog.totalCost) || 0,
totalInputCost: Number(workflowLog.totalInputCost) || 0,
totalOutputCost: Number(workflowLog.totalOutputCost) || 0,
totalTokens: workflowLog.totalTokens || 0,
metadata: workflowLog.metadata as WorkflowExecutionLog['metadata'],
executionData: workflowLog.executionData as WorkflowExecutionLog['executionData'],
createdAt: workflowLog.createdAt.toISOString(),
},
snapshot: snapshotResult.snapshot,
@@ -151,7 +133,6 @@ export class ExecutionLogger implements IExecutionLoggerService {
})
const level = hasErrors ? 'error' : 'info'
const message = hasErrors ? 'Workflow execution failed' : 'Workflow execution completed'
// Extract files from trace spans and final output
const executionFiles = this.extractFilesFromExecution(traceSpans, finalOutput)
@@ -160,19 +141,10 @@ export class ExecutionLogger implements IExecutionLoggerService {
.update(workflowExecutionLogs)
.set({
level,
message,
endedAt: new Date(endedAt),
totalDurationMs,
blockCount: 0,
successCount: 0,
errorCount: 0,
skippedCount: 0,
totalCost: costSummary.totalCost.toString(),
totalInputCost: costSummary.totalInputCost.toString(),
totalOutputCost: costSummary.totalOutputCost.toString(),
totalTokens: costSummary.totalTokens,
files: executionFiles.length > 0 ? executionFiles : null,
metadata: {
executionData: {
traceSpans,
finalOutput,
tokenBreakdown: {
@@ -182,6 +154,17 @@ export class ExecutionLogger implements IExecutionLoggerService {
},
models: costSummary.models,
},
cost: {
total: costSummary.totalCost,
input: costSummary.totalInputCost,
output: costSummary.totalOutputCost,
tokens: {
prompt: costSummary.totalPromptTokens,
completion: costSummary.totalCompletionTokens,
total: costSummary.totalTokens,
},
models: costSummary.models,
},
})
.where(eq(workflowExecutionLogs.executionId, executionId))
.returning()
@@ -205,20 +188,12 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId: updatedLog.executionId,
stateSnapshotId: updatedLog.stateSnapshotId,
level: updatedLog.level as 'info' | 'error',
message: updatedLog.message,
trigger: updatedLog.trigger as ExecutionTrigger['type'],
startedAt: updatedLog.startedAt.toISOString(),
endedAt: updatedLog.endedAt?.toISOString() || endedAt,
totalDurationMs: updatedLog.totalDurationMs || totalDurationMs,
blockCount: updatedLog.blockCount,
successCount: updatedLog.successCount,
errorCount: updatedLog.errorCount,
skippedCount: updatedLog.skippedCount,
totalCost: Number(updatedLog.totalCost) || 0,
totalInputCost: Number(updatedLog.totalInputCost) || 0,
totalOutputCost: Number(updatedLog.totalOutputCost) || 0,
totalTokens: updatedLog.totalTokens || 0,
metadata: updatedLog.metadata as WorkflowExecutionLog['metadata'],
executionData: updatedLog.executionData as WorkflowExecutionLog['executionData'],
cost: updatedLog.cost as any,
createdAt: updatedLog.createdAt.toISOString(),
}
}
@@ -238,20 +213,12 @@ export class ExecutionLogger implements IExecutionLoggerService {
executionId: workflowLog.executionId,
stateSnapshotId: workflowLog.stateSnapshotId,
level: workflowLog.level as 'info' | 'error',
message: workflowLog.message,
trigger: workflowLog.trigger as ExecutionTrigger['type'],
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString() || workflowLog.startedAt.toISOString(),
totalDurationMs: workflowLog.totalDurationMs || 0,
blockCount: workflowLog.blockCount,
successCount: workflowLog.successCount,
errorCount: workflowLog.errorCount,
skippedCount: workflowLog.skippedCount,
totalCost: Number(workflowLog.totalCost) || 0,
totalInputCost: Number(workflowLog.totalInputCost) || 0,
totalOutputCost: Number(workflowLog.totalOutputCost) || 0,
totalTokens: workflowLog.totalTokens || 0,
metadata: workflowLog.metadata as WorkflowExecutionLog['metadata'],
executionData: workflowLog.executionData as WorkflowExecutionLog['executionData'],
cost: workflowLog.cost as any,
createdAt: workflowLog.createdAt.toISOString(),
}
}

View File

@@ -82,19 +82,10 @@ export interface WorkflowExecutionLog {
executionId: string
stateSnapshotId: string
level: 'info' | 'error'
message: string
trigger: ExecutionTrigger['type']
startedAt: string
endedAt: string
totalDurationMs: number
blockCount: number
successCount: number
errorCount: number
skippedCount: number
totalCost: number
totalInputCost: number
totalOutputCost: number
totalTokens: number
files?: Array<{
id: string
name: string
@@ -107,9 +98,10 @@ export interface WorkflowExecutionLog {
storageProvider?: 's3' | 'blob' | 'local'
bucketName?: string
}>
metadata: {
environment: ExecutionEnvironment
trigger: ExecutionTrigger
// Execution details
executionData: {
environment?: ExecutionEnvironment
trigger?: ExecutionTrigger
traceSpans?: TraceSpan[]
errorDetails?: {
blockId: string
@@ -118,6 +110,22 @@ export interface WorkflowExecutionLog {
stackTrace?: string
}
}
// Top-level cost information
cost?: {
input?: number
output?: number
total?: number
tokens?: { prompt?: number; completion?: number; total?: number }
models?: Record<
string,
{
input?: number
output?: number
total?: number
tokens?: { prompt?: number; completion?: number; total?: number }
}
>
}
duration?: string
createdAt: string
}

View File

@@ -1,12 +1,11 @@
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
const logger = createLogger('SimAgentClient')
// Base URL for the sim-agent service
const SIM_AGENT_BASE_URL =
process.env.NODE_ENV === 'development'
? 'http://localhost:8000'
: process.env.NEXT_PUBLIC_SIM_AGENT_URL || 'https://sim-agent.vercel.app'
const SIM_AGENT_BASE_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
export interface SimAgentRequest {
workflowId: string
@@ -28,31 +27,6 @@ class SimAgentClient {
this.baseUrl = SIM_AGENT_BASE_URL
}
/**
* Get the API key lazily to ensure environment variables are loaded
*/
private getApiKey(): string {
// Only try server-side env var (never expose to client)
let apiKey = process.env.SIM_AGENT_API_KEY || ''
// If not found, try importing env library as fallback
if (!apiKey) {
try {
const { env } = require('@/lib/env')
apiKey = env.SIM_AGENT_API_KEY || ''
} catch (e) {
// env library not available or failed to load
}
}
if (!apiKey && typeof window === 'undefined') {
// Only warn on server-side where API key should be available
logger.warn('SIM_AGENT_API_KEY not configured')
}
return apiKey
}
/**
* Make a request to the sim-agent service
*/
@@ -66,23 +40,20 @@ class SimAgentClient {
} = {}
): Promise<SimAgentResponse<T>> {
const requestId = crypto.randomUUID().slice(0, 8)
const { method = 'POST', body, headers = {}, apiKey: providedApiKey } = options
const { method = 'POST', body, headers = {} } = options
try {
const url = `${this.baseUrl}${endpoint}`
// Use provided API key or try to get it from environment
const apiKey = providedApiKey || this.getApiKey()
const requestHeaders: Record<string, string> = {
'Content-Type': 'application/json',
...(apiKey && { 'x-api-key': apiKey }),
...headers,
}
logger.info(`[${requestId}] Making request to sim-agent`, {
url,
method,
hasApiKey: !!apiKey,
hasBody: !!body,
})
@@ -157,7 +128,6 @@ class SimAgentClient {
getConfig() {
return {
baseUrl: this.baseUrl,
hasApiKey: !!this.getApiKey(),
environment: process.env.NODE_ENV,
}
}

View File

@@ -0,0 +1 @@
export const SIM_AGENT_API_URL_DEFAULT = 'https://agent.sim.ai'

View File

@@ -2,6 +2,7 @@
export type { SimAgentRequest, SimAgentResponse } from './client'
export { SimAgentClient, simAgentClient } from './client'
export { SIM_AGENT_API_URL_DEFAULT } from './constants'
// Import for default export
import { simAgentClient } from './client'

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@/lib/logs/console/logger'
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
import type { BlockWithDiff } from './types'
const logger = createLogger('WorkflowDiffEngine')
@@ -334,10 +335,10 @@ export class WorkflowDiffEngine {
for (const [blockId, block] of Object.entries(state.blocks)) {
const cleanBlock: BlockState = { ...block }
// Remove diff markers using bracket notation to avoid TypeScript errors
;(cleanBlock as any).is_diff = undefined
;(cleanBlock as any).field_diff = undefined
// Remove diff markers using proper typing
const blockWithDiff = cleanBlock as BlockState & BlockWithDiff
blockWithDiff.is_diff = undefined
blockWithDiff.field_diffs = undefined
// Ensure outputs is never null/undefined
if (cleanBlock.outputs === undefined || cleanBlock.outputs === null) {

View File

@@ -0,0 +1,18 @@
/**
* Type definitions for workflow diff functionality
*/
export type DiffStatus = 'new' | 'edited' | undefined
export type FieldDiffStatus = 'changed' | 'unchanged'
export type EdgeDiffStatus = 'new' | 'deleted' | 'unchanged' | null
export interface BlockWithDiff {
is_diff?: DiffStatus
field_diffs?: Record<string, { changed_fields: string[]; unchanged_fields: string[] }>
}
export function hasDiffStatus(block: any): block is BlockWithDiff {
return block && typeof block === 'object' && ('is_diff' in block || 'field_diffs' in block)
}

View File

@@ -208,13 +208,13 @@ const nextConfig: NextConfig = {
source: '/((?!api|_next|_vercel|favicon|static|.*\\..*).*)',
destination: 'https://www.sim.ai/$1',
permanent: true,
has: [{ type: 'host', key: 'host', value: 'simstudio.ai' }],
has: [{ type: 'host' as const, value: 'simstudio.ai' }],
},
{
source: '/((?!api|_next|_vercel|favicon|static|.*\\..*).*)',
destination: 'https://www.sim.ai/$1',
permanent: true,
has: [{ type: 'host', key: 'host', value: 'www.simstudio.ai' }],
has: [{ type: 'host' as const, value: 'www.simstudio.ai' }],
}
)
}

View File

@@ -1,171 +0,0 @@
#!/usr/bin/env bun
import { db } from '@/db'
import { user, workflow, workspace } from '@/db/schema'
const testWorkflowState = {
blocks: {
'start-block-123': {
id: 'start-block-123',
type: 'starter',
name: 'Start',
position: {
x: 100,
y: 100,
},
subBlocks: {
startWorkflow: {
id: 'startWorkflow',
type: 'dropdown',
value: 'manual',
},
},
outputs: {
response: {
input: 'any',
},
},
enabled: true,
horizontalHandles: true,
isWide: false,
advancedMode: false,
height: 90,
},
'loop-block-456': {
id: 'loop-block-456',
type: 'loop',
name: 'For Loop',
position: {
x: 400,
y: 100,
},
subBlocks: {},
outputs: {},
enabled: true,
horizontalHandles: true,
isWide: false,
advancedMode: false,
height: 0,
data: {
width: 400,
height: 200,
type: 'loopNode',
},
},
'function-block-789': {
id: 'function-block-789',
type: 'function',
name: 'Return X',
position: {
x: 50,
y: 50,
},
subBlocks: {
code: {
id: 'code',
type: 'code',
value: "return 'X'",
},
},
outputs: {
response: {
result: 'any',
stdout: 'string',
},
},
enabled: true,
horizontalHandles: true,
isWide: false,
advancedMode: false,
height: 144,
data: {
parentId: 'loop-block-456',
extent: 'parent',
},
},
},
edges: [
{
id: 'edge-start-to-loop',
source: 'start-block-123',
target: 'loop-block-456',
sourceHandle: 'source',
targetHandle: 'target',
},
{
id: 'edge-loop-to-function',
source: 'loop-block-456',
target: 'function-block-789',
sourceHandle: 'loop-start-source',
targetHandle: 'target',
},
],
loops: {
'loop-block-456': {
id: 'loop-block-456',
nodes: ['function-block-789'],
iterations: 3,
loopType: 'for',
forEachItems: '',
},
},
parallels: {},
lastSaved: Date.now(),
isDeployed: false,
}
async function insertTestWorkflow() {
try {
console.log('🔍 Finding first workspace and user...')
// Get the first workspace
const workspaces = await db.select().from(workspace).limit(1)
if (workspaces.length === 0) {
throw new Error('No workspaces found. Please create a workspace first.')
}
// Get the first user
const users = await db.select().from(user).limit(1)
if (users.length === 0) {
throw new Error('No users found. Please create a user first.')
}
const workspaceId = workspaces[0].id
const userId = users[0].id
console.log(`✅ Using workspace: ${workspaceId}`)
console.log(`✅ Using user: ${userId}`)
// Insert workflow with old JSON state format
const testWorkflowId = `test-migration-workflow-${Date.now()}`
const now = new Date()
await db.insert(workflow).values({
id: testWorkflowId,
name: 'Test Migration Workflow (Old JSON Format)',
workspaceId: workspaceId,
userId: userId,
state: testWorkflowState, // This is the old JSON format
lastSynced: now,
createdAt: now,
updatedAt: now,
isDeployed: false,
isPublished: false,
})
console.log(`✅ Inserted test workflow with old JSON format: ${testWorkflowId}`)
console.log(`🌐 Access it at: http://localhost:3000/w/${testWorkflowId}`)
console.log('')
console.log('📋 Test steps:')
console.log('1. Open the workflow in your browser')
console.log('2. Verify it renders correctly with all blocks and connections')
console.log('3. Try editing some subblock values')
console.log('4. Run the migration script')
console.log('5. Verify it still works after migration')
} catch (error) {
console.error('❌ Error inserting test workflow:', error)
process.exit(1)
}
}
insertTestWorkflow()

View File

@@ -1,306 +0,0 @@
#!/usr/bin/env bun
import { readFileSync } from 'fs'
import { and, eq, inArray, isNotNull } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { db } from '@/db'
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@/db/schema'
interface WorkflowState {
blocks: Record<string, any>
edges: any[]
loops?: Record<string, any>
parallels?: Record<string, any>
lastSaved?: number
isDeployed?: boolean
}
async function migrateWorkflowStates(specificWorkflowIds?: string[] | null) {
try {
if (specificWorkflowIds) {
console.log(`🔍 Finding ${specificWorkflowIds.length} specific workflows...`)
} else {
console.log('🔍 Finding workflows with old JSON state format...')
}
// Build the where condition based on whether we have specific IDs
const whereCondition = specificWorkflowIds
? and(
isNotNull(workflow.state), // Has JSON state
inArray(workflow.id, specificWorkflowIds) // Only specific IDs
)
: and(
isNotNull(workflow.state) // Has JSON state
// We'll check for normalized data existence per workflow
)
// Find workflows that have state but no normalized table entries
const workflowsToMigrate = await db
.select({
id: workflow.id,
name: workflow.name,
state: workflow.state,
})
.from(workflow)
.where(whereCondition)
console.log(`📊 Found ${workflowsToMigrate.length} workflows with JSON state`)
if (specificWorkflowIds) {
const foundIds = workflowsToMigrate.map((w) => w.id)
const missingIds = specificWorkflowIds.filter((id) => !foundIds.includes(id))
if (missingIds.length > 0) {
console.log(`⚠️ Warning: ${missingIds.length} specified workflow IDs not found:`)
missingIds.forEach((id) => console.log(` - ${id}`))
}
console.log('')
}
let migratedCount = 0
let skippedCount = 0
let errorCount = 0
for (const wf of workflowsToMigrate) {
try {
// Check if this workflow already has normalized data
const existingBlocks = await db
.select({ id: workflowBlocks.id })
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, wf.id))
.limit(1)
if (existingBlocks.length > 0) {
console.log(`⏭️ Skipping ${wf.name} (${wf.id}) - already has normalized data`)
skippedCount++
continue
}
console.log(`🔄 Migrating ${wf.name} (${wf.id})...`)
const state = wf.state as WorkflowState
if (!state || !state.blocks) {
console.log(`⚠️ Skipping ${wf.name} - invalid state format`)
skippedCount++
continue
}
// Clean up invalid blocks (those without an id field) before migration
const originalBlockCount = Object.keys(state.blocks).length
const validBlocks: Record<string, any> = {}
let removedBlockCount = 0
for (const [blockKey, block] of Object.entries(state.blocks)) {
if (block && typeof block === 'object' && block.id) {
// Valid block - has an id field
validBlocks[blockKey] = block
} else {
// Invalid block - missing id field
console.log(` 🗑️ Removing invalid block ${blockKey} (no id field)`)
removedBlockCount++
}
}
if (removedBlockCount > 0) {
console.log(
` 🧹 Cleaned up ${removedBlockCount} invalid blocks (${originalBlockCount}${Object.keys(validBlocks).length})`
)
state.blocks = validBlocks
}
await db.transaction(async (tx) => {
// Migrate blocks - generate new IDs and create mapping
const blocks = Object.values(state.blocks)
console.log(` 📦 Migrating ${blocks.length} blocks...`)
// Create mapping from old block IDs to new block IDs
const blockIdMapping: Record<string, string> = {}
for (const block of blocks) {
const newBlockId = nanoid()
blockIdMapping[block.id] = newBlockId
await tx.insert(workflowBlocks).values({
id: newBlockId,
workflowId: wf.id,
type: block.type,
name: block.name,
positionX: String(block.position?.x || 0),
positionY: String(block.position?.y || 0),
enabled: block.enabled ?? true,
horizontalHandles: block.horizontalHandles ?? true,
isWide: block.isWide ?? false,
advancedMode: block.advancedMode ?? false,
triggerMode: block.triggerMode ?? false,
height: String(block.height || 0),
subBlocks: block.subBlocks || {},
outputs: block.outputs || {},
data: block.data || {},
parentId: block.data?.parentId ? blockIdMapping[block.data.parentId] || null : null,
})
}
// Migrate edges - use new block IDs
const edges = state.edges || []
console.log(` 🔗 Migrating ${edges.length} edges...`)
for (const edge of edges) {
const newSourceId = blockIdMapping[edge.source]
const newTargetId = blockIdMapping[edge.target]
// Skip edges that reference blocks that don't exist in our mapping
if (!newSourceId || !newTargetId) {
console.log(` ⚠️ Skipping edge ${edge.id} - references missing blocks`)
continue
}
await tx.insert(workflowEdges).values({
id: nanoid(),
workflowId: wf.id,
sourceBlockId: newSourceId,
targetBlockId: newTargetId,
sourceHandle: edge.sourceHandle || null,
targetHandle: edge.targetHandle || null,
})
}
// Migrate loops - update node IDs to use new block IDs
const loops = state.loops || {}
const loopIds = Object.keys(loops)
console.log(` 🔄 Migrating ${loopIds.length} loops...`)
for (const loopId of loopIds) {
const loop = loops[loopId]
// Map old node IDs to new block IDs
const updatedNodes = (loop.nodes || [])
.map((nodeId: string) => blockIdMapping[nodeId])
.filter(Boolean)
await tx.insert(workflowSubflows).values({
id: nanoid(),
workflowId: wf.id,
type: 'loop',
config: {
id: loop.id,
nodes: updatedNodes,
iterationCount: loop.iterations || 5,
iterationType: loop.loopType || 'for',
collection: loop.forEachItems || '',
},
})
}
// Migrate parallels - update node IDs to use new block IDs
const parallels = state.parallels || {}
const parallelIds = Object.keys(parallels)
console.log(` ⚡ Migrating ${parallelIds.length} parallels...`)
for (const parallelId of parallelIds) {
const parallel = parallels[parallelId]
// Map old node IDs to new block IDs
const updatedNodes = (parallel.nodes || [])
.map((nodeId: string) => blockIdMapping[nodeId])
.filter(Boolean)
await tx.insert(workflowSubflows).values({
id: nanoid(),
workflowId: wf.id,
type: 'parallel',
config: {
id: parallel.id,
nodes: updatedNodes,
parallelCount: 2, // Default parallel count
collection: parallel.distribution || '',
},
})
}
})
console.log(`✅ Successfully migrated ${wf.name}`)
migratedCount++
} catch (error) {
console.error(`❌ Error migrating ${wf.name} (${wf.id}):`, error)
errorCount++
}
}
console.log('')
console.log('📊 Migration Summary:')
console.log(`✅ Migrated: ${migratedCount} workflows`)
console.log(`⏭️ Skipped: ${skippedCount} workflows`)
console.log(`❌ Errors: ${errorCount} workflows`)
console.log('')
if (migratedCount > 0) {
console.log('🎉 Migration completed successfully!')
console.log('')
console.log('📋 Next steps:')
console.log('1. Test the migrated workflows in your browser')
console.log('2. Verify all blocks, edges, and subflows work correctly')
console.log('3. Check that editing and collaboration still work')
console.log('4. Once confirmed, the workflow.state JSON field can be deprecated')
}
} catch (error) {
console.error('❌ Migration failed:', error)
process.exit(1)
}
}
// Add command line argument parsing
const args = process.argv.slice(2)
const dryRun = args.includes('--dry-run')
const showHelp = args.includes('--help') || args.includes('-h')
if (showHelp) {
console.log('🔄 Workflow State Migration Script')
console.log('')
console.log('Usage:')
console.log(' bun run scripts/migrate-workflow-states.ts [options]')
console.log('')
console.log('Options:')
console.log(' --dry-run Show what would be migrated without making changes')
console.log(' --file <path> Migrate only workflow IDs listed in file (comma-separated)')
console.log(' --help, -h Show this help message')
console.log('')
console.log('Examples:')
console.log(' bun run scripts/migrate-workflow-states.ts')
console.log(' bun run scripts/migrate-workflow-states.ts --dry-run')
console.log(' bun run scripts/migrate-workflow-states.ts --file workflow-ids.txt')
console.log(' bun run scripts/migrate-workflow-states.ts --dry-run --file workflow-ids.txt')
console.log('')
console.log('File format (workflow-ids.txt):')
console.log(' abc-123,def-456,ghi-789')
console.log('')
process.exit(0)
}
// Parse --file flag for workflow IDs
let specificWorkflowIds: string[] | null = null
const fileIndex = args.findIndex((arg) => arg === '--file')
if (fileIndex !== -1 && args[fileIndex + 1]) {
const filePath = args[fileIndex + 1]
try {
console.log(`📁 Reading workflow IDs from file: ${filePath}`)
const fileContent = readFileSync(filePath, 'utf-8')
specificWorkflowIds = fileContent
.split(',')
.map((id) => id.trim())
.filter((id) => id.length > 0)
console.log(`📋 Found ${specificWorkflowIds.length} workflow IDs in file`)
console.log('')
} catch (error) {
console.error(`❌ Error reading file ${filePath}:`, error)
process.exit(1)
}
}
if (dryRun) {
console.log('🔍 DRY RUN MODE - No changes will be made')
console.log('')
}
if (specificWorkflowIds) {
console.log('🎯 TARGETED MIGRATION - Only migrating specified workflow IDs')
console.log('')
}
migrateWorkflowStates(specificWorkflowIds)

View File

@@ -125,15 +125,11 @@ export async function getWorkflowState(workflowId: string) {
if (normalizedData) {
// Use normalized data as source of truth
const existingState = workflowData[0].state || {}
const finalState = {
// Default values for expected properties
deploymentStatuses: {},
hasActiveWebhook: false,
// Preserve any existing state properties
...existingState,
// Override with normalized data (this takes precedence)
// Data from normalized tables
blocks: normalizedData.blocks,
edges: normalizedData.edges,
loops: normalizedData.loops,
@@ -494,11 +490,25 @@ async function handleBlockOperationTx(
throw new Error('Missing block ID for update parent operation')
}
// Fetch current parent to update subflow node list when detaching or reparenting
const [existing] = await tx
.select({
id: workflowBlocks.id,
parentId: workflowBlocks.parentId,
})
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, payload.id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
const isRemovingFromParent = !payload.parentId
const updateResult = await tx
.update(workflowBlocks)
.set({
parentId: payload.parentId || null,
extent: payload.extent || null,
parentId: isRemovingFromParent ? null : payload.parentId || null,
extent: isRemovingFromParent ? null : payload.extent || null,
// When removing from a subflow, also clear data JSON entirely
...(isRemovingFromParent ? { data: {} } : {}),
updatedAt: new Date(),
})
.where(and(eq(workflowBlocks.id, payload.id), eq(workflowBlocks.workflowId, workflowId)))
@@ -508,13 +518,19 @@ async function handleBlockOperationTx(
throw new Error(`Block ${payload.id} not found in workflow ${workflowId}`)
}
// If the block now has a parent, update the parent's subflow node list
// If the block now has a parent, update the new parent's subflow node list
if (payload.parentId) {
await updateSubflowNodeList(tx, workflowId, payload.parentId)
}
// If the block had a previous parent, update that parent's node list as well
if (existing?.parentId && existing.parentId !== payload.parentId) {
await updateSubflowNodeList(tx, workflowId, existing.parentId)
}
logger.debug(
`Updated block parent: ${payload.id} -> parent: ${payload.parentId}, extent: ${payload.extent}`
`Updated block parent: ${payload.id} -> parent: ${payload.parentId || 'null'}, extent: ${payload.extent || 'null'}${
isRemovingFromParent ? ' (cleared data JSON)' : ''
}`
)
break
}
@@ -811,7 +827,7 @@ async function handleSubflowOperationTx(
collection: payload.config.forEachItems,
width: 500,
height: 300,
type: 'loopNode',
type: 'subflowNode',
},
updatedAt: new Date(),
})
@@ -822,7 +838,7 @@ async function handleSubflowOperationTx(
...payload.config,
width: 500,
height: 300,
type: 'parallelNode',
type: 'subflowNode',
}
// Include count if provided

View File

@@ -238,6 +238,13 @@ function createErrorMessage(messageId: string, content: string): CopilotMessage
role: 'assistant',
content,
timestamp: new Date().toISOString(),
contentBlocks: [
{
type: 'text',
content,
timestamp: Date.now(),
},
],
}
}
@@ -1803,6 +1810,12 @@ async function* parseSSEStream(
}
}
// Auth/usage assistant response messages for Copilot
const COPILOT_AUTH_REQUIRED_MESSAGE =
'*Authorization failed. An API key must be configured in order to use the copilot. You can configure an API key at [sim.ai](https://sim.ai).*'
const COPILOT_USAGE_EXCEEDED_MESSAGE =
'*Usage limit exceeded, please upgrade your plan or top up credits at [sim.ai](https://sim.ai) to continue using the copilot*'
/**
* Copilot store using the new unified API
*/
@@ -2249,7 +2262,28 @@ export const useCopilotStore = create<CopilotStore>()(
logger.info('Message sending was aborted by user')
return // Don't throw or update state, abort handler already did
}
throw new Error(result.error || 'Failed to send message')
// Handle specific upstream statuses
let displayError = result.error || 'Failed to send message'
if (result.status === 401) {
displayError = COPILOT_AUTH_REQUIRED_MESSAGE
} else if (result.status === 402) {
displayError = COPILOT_USAGE_EXCEEDED_MESSAGE
}
const errorMessage = createErrorMessage(streamingMessage.id, displayError)
// Show as a normal assistant response without global error for auth/usage cases
const isAuthOrUsage = result.status === 401 || result.status === 402
set((state) => ({
messages: state.messages.map((msg) =>
msg.id === streamingMessage.id ? errorMessage : msg
),
error: isAuthOrUsage ? null : displayError,
isSendingMessage: false,
abortController: null,
}))
}
} catch (error) {
// Check if this was an abort
@@ -2504,7 +2538,25 @@ export const useCopilotStore = create<CopilotStore>()(
logger.info('Implicit feedback sending was aborted by user')
return
}
throw new Error(result.error || 'Failed to send implicit feedback')
// Handle specific upstream statuses as normal assistant responses
let displayError = result.error || 'Failed to send implicit feedback'
if (result.status === 401) {
displayError = COPILOT_AUTH_REQUIRED_MESSAGE
} else if (result.status === 402) {
displayError = COPILOT_USAGE_EXCEEDED_MESSAGE
}
const errorMessage = createErrorMessage(newAssistantMessage.id, displayError)
const isAuthOrUsage = result.status === 401 || result.status === 402
set((state) => ({
messages: state.messages.map((msg) =>
msg.id === newAssistantMessage.id ? errorMessage : msg
),
error: isAuthOrUsage ? null : displayError,
isSendingMessage: false,
abortController: null,
}))
}
} catch (error) {
if (error instanceof Error && error.name === 'AbortError') {

View File

@@ -261,8 +261,6 @@ export const useFilterStore = create<FilterState>((set, get) => ({
buildQueryParams: (page: number, limit: number) => {
const { workspaceId, timeRange, level, workflowIds, folderIds, searchQuery, triggers } = get()
const params = new URLSearchParams()
params.set('includeWorkflow', 'true')
params.set('limit', limit.toString())
params.set('offset', ((page - 1) * limit).toString())

View File

@@ -71,9 +71,8 @@ export interface TraceSpan {
export interface WorkflowLog {
id: string
workflowId: string
executionId: string | null
executionId?: string | null
level: string
message: string
duration: string | null
trigger: string | null
createdAt: string
@@ -90,10 +89,10 @@ export interface WorkflowLog {
storageProvider?: 's3' | 'blob' | 'local'
bucketName?: string
}>
metadata?: ToolCallMetadata & {
cost?: CostMetadata
executionData?: ToolCallMetadata & {
traceSpans?: TraceSpan[]
totalDuration?: number
cost?: CostMetadata
blockInput?: Record<string, any>
enhanced?: boolean

Some files were not shown because too many files have changed in this diff Show More