Compare commits

..

9 Commits

Author SHA1 Message Date
Vikhyath Mondreti
66483c2fd5 improvement(langsmith): add wand for batch ingestion schemas 2026-01-14 19:47:48 -08:00
Waleed
3db9ad2d95 improvement(emails): update email footer links to link to sim.ai/provider instead of direct provider links (#2826) 2026-01-14 18:53:11 -08:00
Waleed
4195cfe1ff fix(otp): send welcome email even when user signs up via email/pass along with oauth providers (#2825) 2026-01-14 18:46:18 -08:00
Waleed
212933746e improvement(langsmith): ugpraded langsmith to use tool names directly in dropdown (#2824) 2026-01-14 18:46:06 -08:00
Waleed
5af72ea22f feat(dashboard): added stats endpoint to compute stats on server side and avoid limit (#2823)
* feat(dashboard): added stats endpoint to compute stats on server side and avoid limit

* updated query
2026-01-14 18:41:46 -08:00
Waleed
4899c28421 fix(notifications): consolidate notification utils, update email styling (#2822)
* fix(notifications): consolidate notification utils, update email styling

* fixed duplicate types
2026-01-14 18:35:51 -08:00
Vikhyath Mondreti
2cee30ff15 feat(langsmith): add langsmith tools for logging, output selector use tool-aware listing (#2821)
* feat(langsmith): add langsmith tools for logging, output selector use tool-aware listing

* fix

* fix docs

* fix positioning of outputs

* fix docs script
2026-01-14 16:14:24 -08:00
Waleed
41f9374b5c fix(agent-tools): added special handling for workflow tool in agent tool input, added react grab and feature flag (#2820)
* fix(agent-tools): added special handling for workflow tool in agent tool input, added react grab

* FF react grab

* ack comments

* updated to account for workflow input tool on top of just workflow as well
2026-01-14 15:53:42 -08:00
Vikhyath Mondreti
6c8c3d6368 feat(reorder): allow workflow/folder reordering (#2818)
* feat(reorder): allow workflow/folder reordering

* progress

* fix edge cases

* add migration

* fix bun lock

* updated to use brand tertiary color, allow worfklows to be dropped above/below folders at the same level

* cahnged color, removed flicker on folder container

* optimized

* ack pr comments

* removed empty placeholder images for drag, removed redundant local sanitization helper

---------

Co-authored-by: waleed <walif6@gmail.com>
2026-01-14 15:52:27 -08:00
53 changed files with 12363 additions and 934 deletions

View File

@@ -351,14 +351,16 @@ Enables AI-assisted field generation.
## Tools Configuration
### Simple Tool Selector
**Preferred:** Use tool names directly as dropdown option IDs to avoid switch cases:
```typescript
tools: {
access: ['service_create', 'service_read', 'service_update'],
config: {
tool: (params) => `service_${params.operation}`,
},
}
// Dropdown options use tool IDs directly
options: [
{ label: 'Create', id: 'service_create' },
{ label: 'Read', id: 'service_read' },
]
// Tool selector just returns the operation value
tool: (params) => params.operation,
```
### With Parameter Transformation

View File

@@ -15,6 +15,7 @@ import {
renderPlanWelcomeEmail,
renderUsageThresholdEmail,
renderWelcomeEmail,
renderWorkflowNotificationEmail,
renderWorkspaceInvitationEmail,
} from '@/components/emails'
@@ -108,6 +109,51 @@ const emailTemplates = {
message:
'I have 10 years of experience building scalable distributed systems. Most recently, I led a team at a Series B startup where we scaled from 100K to 10M users.',
}),
// Notification emails
'workflow-notification-success': () =>
renderWorkflowNotificationEmail({
workflowName: 'Customer Onboarding Flow',
status: 'success',
trigger: 'api',
duration: '2.3s',
cost: '$0.0042',
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
}),
'workflow-notification-error': () =>
renderWorkflowNotificationEmail({
workflowName: 'Customer Onboarding Flow',
status: 'error',
trigger: 'webhook',
duration: '1.1s',
cost: '$0.0021',
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
}),
'workflow-notification-alert': () =>
renderWorkflowNotificationEmail({
workflowName: 'Customer Onboarding Flow',
status: 'error',
trigger: 'schedule',
duration: '45.2s',
cost: '$0.0156',
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
alertReason: '3 consecutive failures detected',
}),
'workflow-notification-full': () =>
renderWorkflowNotificationEmail({
workflowName: 'Data Processing Pipeline',
status: 'success',
trigger: 'api',
duration: '12.5s',
cost: '$0.0234',
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
finalOutput: { processed: 150, skipped: 3, status: 'completed' },
rateLimits: {
sync: { requestsPerMinute: 60, remaining: 45 },
async: { requestsPerMinute: 120, remaining: 98 },
},
usageData: { currentPeriodCost: 12.45, limit: 50, percentUsed: 24.9 },
}),
} as const
type EmailTemplate = keyof typeof emailTemplates
@@ -131,6 +177,12 @@ export async function GET(request: NextRequest) {
'payment-failed',
],
Careers: ['careers-confirmation', 'careers-submission'],
Notifications: [
'workflow-notification-success',
'workflow-notification-error',
'workflow-notification-alert',
'workflow-notification-full',
],
}
const categoryHtml = Object.entries(categories)

View File

@@ -14,6 +14,7 @@ const updateFolderSchema = z.object({
color: z.string().optional(),
isExpanded: z.boolean().optional(),
parentId: z.string().nullable().optional(),
sortOrder: z.number().int().min(0).optional(),
})
// PUT - Update a folder
@@ -38,7 +39,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: `Validation failed: ${errorMessages}` }, { status: 400 })
}
const { name, color, isExpanded, parentId } = validationResult.data
const { name, color, isExpanded, parentId, sortOrder } = validationResult.data
// Verify the folder exists
const existingFolder = await db
@@ -81,12 +82,12 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
}
}
// Update the folder
const updates: any = { updatedAt: new Date() }
const updates: Record<string, unknown> = { updatedAt: new Date() }
if (name !== undefined) updates.name = name.trim()
if (color !== undefined) updates.color = color
if (isExpanded !== undefined) updates.isExpanded = isExpanded
if (parentId !== undefined) updates.parentId = parentId || null
if (sortOrder !== undefined) updates.sortOrder = sortOrder
const [updatedFolder] = await db
.update(workflowFolder)

View File

@@ -0,0 +1,91 @@
import { db } from '@sim/db'
import { workflowFolder } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq, inArray } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('FolderReorderAPI')
const ReorderSchema = z.object({
workspaceId: z.string(),
updates: z.array(
z.object({
id: z.string(),
sortOrder: z.number().int().min(0),
parentId: z.string().nullable().optional(),
})
),
})
export async function PUT(req: NextRequest) {
const requestId = generateRequestId()
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized folder reorder attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const body = await req.json()
const { workspaceId, updates } = ReorderSchema.parse(body)
const permission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
if (!permission || permission === 'read') {
logger.warn(
`[${requestId}] User ${session.user.id} lacks write permission for workspace ${workspaceId}`
)
return NextResponse.json({ error: 'Write access required' }, { status: 403 })
}
const folderIds = updates.map((u) => u.id)
const existingFolders = await db
.select({ id: workflowFolder.id, workspaceId: workflowFolder.workspaceId })
.from(workflowFolder)
.where(inArray(workflowFolder.id, folderIds))
const validIds = new Set(
existingFolders.filter((f) => f.workspaceId === workspaceId).map((f) => f.id)
)
const validUpdates = updates.filter((u) => validIds.has(u.id))
if (validUpdates.length === 0) {
return NextResponse.json({ error: 'No valid folders to update' }, { status: 400 })
}
await db.transaction(async (tx) => {
for (const update of validUpdates) {
const updateData: Record<string, unknown> = {
sortOrder: update.sortOrder,
updatedAt: new Date(),
}
if (update.parentId !== undefined) {
updateData.parentId = update.parentId
}
await tx.update(workflowFolder).set(updateData).where(eq(workflowFolder.id, update.id))
}
})
logger.info(
`[${requestId}] Reordered ${validUpdates.length} folders in workspace ${workspaceId}`
)
return NextResponse.json({ success: true, updated: validUpdates.length })
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid folder reorder data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error reordering folders`, error)
return NextResponse.json({ error: 'Failed to reorder folders' }, { status: 500 })
}
}

View File

@@ -58,7 +58,7 @@ export async function POST(request: NextRequest) {
}
const body = await request.json()
const { name, workspaceId, parentId, color } = body
const { name, workspaceId, parentId, color, sortOrder: providedSortOrder } = body
if (!name || !workspaceId) {
return NextResponse.json({ error: 'Name and workspace ID are required' }, { status: 400 })
@@ -81,25 +81,26 @@ export async function POST(request: NextRequest) {
// Generate a new ID
const id = crypto.randomUUID()
// Use transaction to ensure sortOrder consistency
const newFolder = await db.transaction(async (tx) => {
// Get the next sort order for the parent (or root level)
// Consider all folders in the workspace, not just those created by current user
const existingFolders = await tx
.select({ sortOrder: workflowFolder.sortOrder })
.from(workflowFolder)
.where(
and(
eq(workflowFolder.workspaceId, workspaceId),
parentId ? eq(workflowFolder.parentId, parentId) : isNull(workflowFolder.parentId)
let sortOrder: number
if (providedSortOrder !== undefined) {
sortOrder = providedSortOrder
} else {
const existingFolders = await tx
.select({ sortOrder: workflowFolder.sortOrder })
.from(workflowFolder)
.where(
and(
eq(workflowFolder.workspaceId, workspaceId),
parentId ? eq(workflowFolder.parentId, parentId) : isNull(workflowFolder.parentId)
)
)
)
.orderBy(desc(workflowFolder.sortOrder))
.limit(1)
.orderBy(desc(workflowFolder.sortOrder))
.limit(1)
const nextSortOrder = existingFolders.length > 0 ? existingFolders[0].sortOrder + 1 : 0
sortOrder = existingFolders.length > 0 ? existingFolders[0].sortOrder + 1 : 0
}
// Insert the new folder within the same transaction
const [folder] = await tx
.insert(workflowFolder)
.values({
@@ -109,7 +110,7 @@ export async function POST(request: NextRequest) {
workspaceId,
parentId: parentId || null,
color: color || '#6B7280',
sortOrder: nextSortOrder,
sortOrder,
})
.returning()

View File

@@ -0,0 +1,297 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters'
const logger = createLogger('LogsStatsAPI')
export const revalidate = 0
const StatsQueryParamsSchema = LogFilterParamsSchema.extend({
segmentCount: z.coerce.number().optional().default(72),
})
export interface SegmentStats {
timestamp: string
totalExecutions: number
successfulExecutions: number
avgDurationMs: number
}
export interface WorkflowStats {
workflowId: string
workflowName: string
segments: SegmentStats[]
overallSuccessRate: number
totalExecutions: number
totalSuccessful: number
}
export interface DashboardStatsResponse {
workflows: WorkflowStats[]
aggregateSegments: SegmentStats[]
totalRuns: number
totalErrors: number
avgLatency: number
timeBounds: {
start: string
end: string
}
segmentMs: number
}
export async function GET(request: NextRequest) {
const requestId = generateRequestId()
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized logs stats access attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
try {
const { searchParams } = new URL(request.url)
const params = StatsQueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
const workspaceFilter = eq(workflowExecutionLogs.workspaceId, params.workspaceId)
const commonFilters = buildFilterConditions(params, { useSimpleLevelFilter: true })
const whereCondition = commonFilters ? and(workspaceFilter, commonFilters) : workspaceFilter
const boundsQuery = await db
.select({
minTime: sql<string>`MIN(${workflowExecutionLogs.startedAt})`,
maxTime: sql<string>`MAX(${workflowExecutionLogs.startedAt})`,
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflowExecutionLogs.workspaceId),
eq(permissions.userId, userId)
)
)
.where(whereCondition)
const bounds = boundsQuery[0]
const now = new Date()
let startTime: Date
let endTime: Date
if (!bounds?.minTime || !bounds?.maxTime) {
endTime = now
startTime = new Date(now.getTime() - 24 * 60 * 60 * 1000)
} else {
startTime = new Date(bounds.minTime)
endTime = new Date(Math.max(new Date(bounds.maxTime).getTime(), now.getTime()))
}
const totalMs = Math.max(1, endTime.getTime() - startTime.getTime())
const segmentMs = Math.max(60000, Math.floor(totalMs / params.segmentCount))
const statsQuery = await db
.select({
workflowId: workflowExecutionLogs.workflowId,
workflowName: workflow.name,
segmentIndex:
sql<number>`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTime}::timestamp)) * 1000 / ${segmentMs})`.as(
'segment_index'
),
totalExecutions: sql<number>`COUNT(*)`.as('total_executions'),
successfulExecutions:
sql<number>`COUNT(*) FILTER (WHERE ${workflowExecutionLogs.level} != 'error')`.as(
'successful_executions'
),
avgDurationMs:
sql<number>`COALESCE(AVG(${workflowExecutionLogs.totalDurationMs}) FILTER (WHERE ${workflowExecutionLogs.totalDurationMs} > 0), 0)`.as(
'avg_duration_ms'
),
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflowExecutionLogs.workspaceId),
eq(permissions.userId, userId)
)
)
.where(whereCondition)
.groupBy(
workflowExecutionLogs.workflowId,
workflow.name,
sql`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTime}::timestamp)) * 1000 / ${segmentMs})`
)
.orderBy(workflowExecutionLogs.workflowId, sql`segment_index`)
const workflowMap = new Map<
string,
{
workflowId: string
workflowName: string
segments: Map<number, SegmentStats>
totalExecutions: number
totalSuccessful: number
}
>()
for (const row of statsQuery) {
const segmentIndex = Math.min(
params.segmentCount - 1,
Math.max(0, Math.floor(Number(row.segmentIndex)))
)
if (!workflowMap.has(row.workflowId)) {
workflowMap.set(row.workflowId, {
workflowId: row.workflowId,
workflowName: row.workflowName,
segments: new Map(),
totalExecutions: 0,
totalSuccessful: 0,
})
}
const wf = workflowMap.get(row.workflowId)!
wf.totalExecutions += Number(row.totalExecutions)
wf.totalSuccessful += Number(row.successfulExecutions)
const existing = wf.segments.get(segmentIndex)
if (existing) {
const oldTotal = existing.totalExecutions
const newTotal = oldTotal + Number(row.totalExecutions)
existing.totalExecutions = newTotal
existing.successfulExecutions += Number(row.successfulExecutions)
existing.avgDurationMs =
newTotal > 0
? (existing.avgDurationMs * oldTotal +
Number(row.avgDurationMs || 0) * Number(row.totalExecutions)) /
newTotal
: 0
} else {
wf.segments.set(segmentIndex, {
timestamp: new Date(startTime.getTime() + segmentIndex * segmentMs).toISOString(),
totalExecutions: Number(row.totalExecutions),
successfulExecutions: Number(row.successfulExecutions),
avgDurationMs: Number(row.avgDurationMs || 0),
})
}
}
const workflows: WorkflowStats[] = []
for (const wf of workflowMap.values()) {
const segments: SegmentStats[] = []
for (let i = 0; i < params.segmentCount; i++) {
const existing = wf.segments.get(i)
if (existing) {
segments.push(existing)
} else {
segments.push({
timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(),
totalExecutions: 0,
successfulExecutions: 0,
avgDurationMs: 0,
})
}
}
workflows.push({
workflowId: wf.workflowId,
workflowName: wf.workflowName,
segments,
totalExecutions: wf.totalExecutions,
totalSuccessful: wf.totalSuccessful,
overallSuccessRate:
wf.totalExecutions > 0 ? (wf.totalSuccessful / wf.totalExecutions) * 100 : 100,
})
}
workflows.sort((a, b) => {
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
if (errA !== errB) return errB - errA
return a.workflowName.localeCompare(b.workflowName)
})
const aggregateSegments: SegmentStats[] = []
let totalRuns = 0
let totalErrors = 0
let weightedLatencySum = 0
let latencyCount = 0
for (let i = 0; i < params.segmentCount; i++) {
let segTotal = 0
let segSuccess = 0
let segWeightedLatency = 0
let segLatencyCount = 0
for (const wf of workflows) {
const seg = wf.segments[i]
segTotal += seg.totalExecutions
segSuccess += seg.successfulExecutions
if (seg.avgDurationMs > 0 && seg.totalExecutions > 0) {
segWeightedLatency += seg.avgDurationMs * seg.totalExecutions
segLatencyCount += seg.totalExecutions
}
}
totalRuns += segTotal
totalErrors += segTotal - segSuccess
weightedLatencySum += segWeightedLatency
latencyCount += segLatencyCount
aggregateSegments.push({
timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(),
totalExecutions: segTotal,
successfulExecutions: segSuccess,
avgDurationMs: segLatencyCount > 0 ? segWeightedLatency / segLatencyCount : 0,
})
}
const avgLatency = latencyCount > 0 ? weightedLatencySum / latencyCount : 0
const response: DashboardStatsResponse = {
workflows,
aggregateSegments,
totalRuns,
totalErrors,
avgLatency,
timeBounds: {
start: startTime.toISOString(),
end: endTime.toISOString(),
},
segmentMs,
}
return NextResponse.json(response, { status: 200 })
} catch (validationError) {
if (validationError instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid logs stats request parameters`, {
errors: validationError.errors,
})
return NextResponse.json(
{
error: 'Invalid request parameters',
details: validationError.errors,
},
{ status: 400 }
)
}
throw validationError
}
} catch (error: any) {
logger.error(`[${requestId}] logs stats fetch error`, error)
return NextResponse.json({ error: error.message }, { status: 500 })
}
}

View File

@@ -20,6 +20,7 @@ const UpdateWorkflowSchema = z.object({
description: z.string().optional(),
color: z.string().optional(),
folderId: z.string().nullable().optional(),
sortOrder: z.number().int().min(0).optional(),
})
/**
@@ -438,12 +439,12 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Build update object
const updateData: any = { updatedAt: new Date() }
const updateData: Record<string, unknown> = { updatedAt: new Date() }
if (updates.name !== undefined) updateData.name = updates.name
if (updates.description !== undefined) updateData.description = updates.description
if (updates.color !== undefined) updateData.color = updates.color
if (updates.folderId !== undefined) updateData.folderId = updates.folderId
if (updates.sortOrder !== undefined) updateData.sortOrder = updates.sortOrder
// Update the workflow
const [updatedWorkflow] = await db

View File

@@ -0,0 +1,91 @@
import { db } from '@sim/db'
import { workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq, inArray } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WorkflowReorderAPI')
const ReorderSchema = z.object({
workspaceId: z.string(),
updates: z.array(
z.object({
id: z.string(),
sortOrder: z.number().int().min(0),
folderId: z.string().nullable().optional(),
})
),
})
export async function PUT(req: NextRequest) {
const requestId = generateRequestId()
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized reorder attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
try {
const body = await req.json()
const { workspaceId, updates } = ReorderSchema.parse(body)
const permission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
if (!permission || permission === 'read') {
logger.warn(
`[${requestId}] User ${session.user.id} lacks write permission for workspace ${workspaceId}`
)
return NextResponse.json({ error: 'Write access required' }, { status: 403 })
}
const workflowIds = updates.map((u) => u.id)
const existingWorkflows = await db
.select({ id: workflow.id, workspaceId: workflow.workspaceId })
.from(workflow)
.where(inArray(workflow.id, workflowIds))
const validIds = new Set(
existingWorkflows.filter((w) => w.workspaceId === workspaceId).map((w) => w.id)
)
const validUpdates = updates.filter((u) => validIds.has(u.id))
if (validUpdates.length === 0) {
return NextResponse.json({ error: 'No valid workflows to update' }, { status: 400 })
}
await db.transaction(async (tx) => {
for (const update of validUpdates) {
const updateData: Record<string, unknown> = {
sortOrder: update.sortOrder,
updatedAt: new Date(),
}
if (update.folderId !== undefined) {
updateData.folderId = update.folderId
}
await tx.update(workflow).set(updateData).where(eq(workflow.id, update.id))
}
})
logger.info(
`[${requestId}] Reordered ${validUpdates.length} workflows in workspace ${workspaceId}`
)
return NextResponse.json({ success: true, updated: validUpdates.length })
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid reorder data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error reordering workflows`, error)
return NextResponse.json({ error: 'Failed to reorder workflows' }, { status: 500 })
}
}

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { and, eq, isNull, max } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
@@ -17,6 +17,7 @@ const CreateWorkflowSchema = z.object({
color: z.string().optional().default('#3972F6'),
workspaceId: z.string().optional(),
folderId: z.string().nullable().optional(),
sortOrder: z.number().int().optional(),
})
// GET /api/workflows - Get workflows for user (optionally filtered by workspaceId)
@@ -89,7 +90,14 @@ export async function POST(req: NextRequest) {
try {
const body = await req.json()
const { name, description, color, workspaceId, folderId } = CreateWorkflowSchema.parse(body)
const {
name,
description,
color,
workspaceId,
folderId,
sortOrder: providedSortOrder,
} = CreateWorkflowSchema.parse(body)
if (workspaceId) {
const workspacePermission = await getUserEntityPermissions(
@@ -127,11 +135,28 @@ export async function POST(req: NextRequest) {
// Silently fail
})
let sortOrder: number
if (providedSortOrder !== undefined) {
sortOrder = providedSortOrder
} else {
const folderCondition = folderId ? eq(workflow.folderId, folderId) : isNull(workflow.folderId)
const [maxResult] = await db
.select({ maxOrder: max(workflow.sortOrder) })
.from(workflow)
.where(
workspaceId
? and(eq(workflow.workspaceId, workspaceId), folderCondition)
: and(eq(workflow.userId, session.user.id), folderCondition)
)
sortOrder = (maxResult?.maxOrder ?? -1) + 1
}
await db.insert(workflow).values({
id: workflowId,
userId: session.user.id,
workspaceId: workspaceId || null,
folderId: folderId || null,
sortOrder,
name,
description,
color,
@@ -152,6 +177,7 @@ export async function POST(req: NextRequest) {
color,
workspaceId,
folderId,
sortOrder,
createdAt: now,
updatedAt: now,
})

View File

@@ -80,7 +80,6 @@ const updateNotificationSchema = z
levelFilter: levelFilterSchema.optional(),
triggerFilter: triggerFilterSchema.optional(),
includeFinalOutput: z.boolean().optional(),
includeTraceSpans: z.boolean().optional(),
includeRateLimits: z.boolean().optional(),
includeUsageData: z.boolean().optional(),
alertConfig: alertConfigSchema.optional(),
@@ -147,7 +146,6 @@ export async function GET(request: NextRequest, { params }: RouteParams) {
levelFilter: subscription.levelFilter,
triggerFilter: subscription.triggerFilter,
includeFinalOutput: subscription.includeFinalOutput,
includeTraceSpans: subscription.includeTraceSpans,
includeRateLimits: subscription.includeRateLimits,
includeUsageData: subscription.includeUsageData,
webhookConfig: subscription.webhookConfig,
@@ -222,7 +220,6 @@ export async function PUT(request: NextRequest, { params }: RouteParams) {
if (data.triggerFilter !== undefined) updateData.triggerFilter = data.triggerFilter
if (data.includeFinalOutput !== undefined)
updateData.includeFinalOutput = data.includeFinalOutput
if (data.includeTraceSpans !== undefined) updateData.includeTraceSpans = data.includeTraceSpans
if (data.includeRateLimits !== undefined) updateData.includeRateLimits = data.includeRateLimits
if (data.includeUsageData !== undefined) updateData.includeUsageData = data.includeUsageData
if (data.alertConfig !== undefined) updateData.alertConfig = data.alertConfig
@@ -260,7 +257,6 @@ export async function PUT(request: NextRequest, { params }: RouteParams) {
levelFilter: subscription.levelFilter,
triggerFilter: subscription.triggerFilter,
includeFinalOutput: subscription.includeFinalOutput,
includeTraceSpans: subscription.includeTraceSpans,
includeRateLimits: subscription.includeRateLimits,
includeUsageData: subscription.includeUsageData,
webhookConfig: subscription.webhookConfig,

View File

@@ -5,8 +5,14 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import {
type EmailRateLimitsData,
type EmailUsageData,
renderWorkflowNotificationEmail,
} from '@/components/emails'
import { getSession } from '@/lib/auth'
import { decryptSecret } from '@/lib/core/security/encryption'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -67,29 +73,23 @@ function buildTestPayload(subscription: typeof workspaceNotificationSubscription
data.finalOutput = { message: 'This is a test notification', test: true }
}
if (subscription.includeTraceSpans) {
data.traceSpans = [
{
id: 'span_test_1',
name: 'Test Block',
type: 'block',
status: 'success',
startTime: new Date(timestamp - 5000).toISOString(),
endTime: new Date(timestamp).toISOString(),
duration: 5000,
},
]
}
if (subscription.includeRateLimits) {
data.rateLimits = {
sync: { limit: 150, remaining: 45, resetAt: new Date(timestamp + 60000).toISOString() },
async: { limit: 1000, remaining: 50, resetAt: new Date(timestamp + 60000).toISOString() },
sync: {
requestsPerMinute: 150,
remaining: 45,
resetAt: new Date(timestamp + 60000).toISOString(),
},
async: {
requestsPerMinute: 1000,
remaining: 50,
resetAt: new Date(timestamp + 60000).toISOString(),
},
}
}
if (subscription.includeUsageData) {
data.usage = { currentPeriodCost: 2.45, limit: 20, plan: 'pro', isExceeded: false }
data.usage = { currentPeriodCost: 2.45, limit: 20, percentUsed: 12.25, isExceeded: false }
}
return { payload, timestamp }
@@ -157,23 +157,26 @@ async function testEmail(subscription: typeof workspaceNotificationSubscription.
const { payload } = buildTestPayload(subscription)
const data = (payload as Record<string, unknown>).data as Record<string, unknown>
const baseUrl = getBaseUrl()
const logUrl = `${baseUrl}/workspace/${subscription.workspaceId}/logs`
const html = await renderWorkflowNotificationEmail({
workflowName: data.workflowName as string,
status: data.status as 'success' | 'error',
trigger: data.trigger as string,
duration: `${data.totalDurationMs}ms`,
cost: `$${(((data.cost as Record<string, unknown>)?.total as number) || 0).toFixed(4)}`,
logUrl,
finalOutput: data.finalOutput,
rateLimits: data.rateLimits as EmailRateLimitsData | undefined,
usageData: data.usage as EmailUsageData | undefined,
})
const result = await sendEmail({
to: subscription.emailRecipients,
subject: `[Test] Workflow Execution: ${data.workflowName}`,
text: `This is a test notification from Sim Studio.\n\nWorkflow: ${data.workflowName}\nStatus: ${data.status}\nDuration: ${data.totalDurationMs}ms\n\nThis notification is configured for workspace notifications.`,
html: `
<div style="font-family: sans-serif; max-width: 600px; margin: 0 auto;">
<h2 style="color: #7F2FFF;">Test Notification</h2>
<p>This is a test notification from Sim Studio.</p>
<table style="width: 100%; border-collapse: collapse; margin: 20px 0;">
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Workflow</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.workflowName}</td></tr>
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Status</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.status}</td></tr>
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Duration</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.totalDurationMs}ms</td></tr>
</table>
<p style="color: #666; font-size: 12px;">This notification is configured for workspace notifications.</p>
</div>
`,
html,
text: `This is a test notification from Sim.\n\nWorkflow: ${data.workflowName}\nStatus: ${data.status}\nDuration: ${data.totalDurationMs}ms\n\nView Log: ${logUrl}\n\nThis notification is configured for workspace notifications.`,
emailType: 'notifications',
})
@@ -227,7 +230,7 @@ async function testSlack(
elements: [
{
type: 'mrkdwn',
text: 'This is a test notification from Sim Studio workspace notifications.',
text: 'This is a test notification from Sim workspace notifications.',
},
],
},

View File

@@ -83,7 +83,6 @@ const createNotificationSchema = z
levelFilter: levelFilterSchema.default(['info', 'error']),
triggerFilter: triggerFilterSchema.default([...CORE_TRIGGER_TYPES]),
includeFinalOutput: z.boolean().default(false),
includeTraceSpans: z.boolean().default(false),
includeRateLimits: z.boolean().default(false),
includeUsageData: z.boolean().default(false),
alertConfig: alertConfigSchema.optional(),
@@ -138,7 +137,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
levelFilter: workspaceNotificationSubscription.levelFilter,
triggerFilter: workspaceNotificationSubscription.triggerFilter,
includeFinalOutput: workspaceNotificationSubscription.includeFinalOutput,
includeTraceSpans: workspaceNotificationSubscription.includeTraceSpans,
includeRateLimits: workspaceNotificationSubscription.includeRateLimits,
includeUsageData: workspaceNotificationSubscription.includeUsageData,
webhookConfig: workspaceNotificationSubscription.webhookConfig,
@@ -240,7 +238,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
levelFilter: data.levelFilter,
triggerFilter: data.triggerFilter,
includeFinalOutput: data.includeFinalOutput,
includeTraceSpans: data.includeTraceSpans,
includeRateLimits: data.includeRateLimits,
includeUsageData: data.includeUsageData,
alertConfig: data.alertConfig || null,
@@ -266,7 +263,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
levelFilter: subscription.levelFilter,
triggerFilter: subscription.triggerFilter,
includeFinalOutput: subscription.includeFinalOutput,
includeTraceSpans: subscription.includeTraceSpans,
includeRateLimits: subscription.includeRateLimits,
includeUsageData: subscription.includeUsageData,
webhookConfig: subscription.webhookConfig,

View File

@@ -13,6 +13,7 @@ const logger = createLogger('Workspaces')
const createWorkspaceSchema = z.object({
name: z.string().trim().min(1, 'Name is required'),
skipDefaultWorkflow: z.boolean().optional().default(false),
})
// Get all workspaces for the current user
@@ -63,9 +64,9 @@ export async function POST(req: Request) {
}
try {
const { name } = createWorkspaceSchema.parse(await req.json())
const { name, skipDefaultWorkflow } = createWorkspaceSchema.parse(await req.json())
const newWorkspace = await createWorkspace(session.user.id, name)
const newWorkspace = await createWorkspace(session.user.id, name, skipDefaultWorkflow)
return NextResponse.json({ workspace: newWorkspace })
} catch (error) {
@@ -80,7 +81,7 @@ async function createDefaultWorkspace(userId: string, userName?: string | null)
return createWorkspace(userId, workspaceName)
}
async function createWorkspace(userId: string, name: string) {
async function createWorkspace(userId: string, name: string, skipDefaultWorkflow = false) {
const workspaceId = crypto.randomUUID()
const workflowId = crypto.randomUUID()
const now = new Date()
@@ -97,7 +98,6 @@ async function createWorkspace(userId: string, name: string) {
updatedAt: now,
})
// Create admin permissions for the workspace owner
await tx.insert(permissions).values({
id: crypto.randomUUID(),
entityType: 'workspace' as const,
@@ -108,37 +108,41 @@ async function createWorkspace(userId: string, name: string) {
updatedAt: now,
})
// Create initial workflow for the workspace (empty canvas)
// Create the workflow
await tx.insert(workflow).values({
id: workflowId,
userId,
workspaceId,
folderId: null,
name: 'default-agent',
description: 'Your first workflow - start building here!',
color: '#3972F6',
lastSynced: now,
createdAt: now,
updatedAt: now,
isDeployed: false,
runCount: 0,
variables: {},
})
if (!skipDefaultWorkflow) {
await tx.insert(workflow).values({
id: workflowId,
userId,
workspaceId,
folderId: null,
name: 'default-agent',
description: 'Your first workflow - start building here!',
color: '#3972F6',
lastSynced: now,
createdAt: now,
updatedAt: now,
isDeployed: false,
runCount: 0,
variables: {},
})
}
logger.info(
`Created workspace ${workspaceId} with initial workflow ${workflowId} for user ${userId}`
skipDefaultWorkflow
? `Created workspace ${workspaceId} for user ${userId}`
: `Created workspace ${workspaceId} with initial workflow ${workflowId} for user ${userId}`
)
})
const { workflowState } = buildDefaultWorkflowArtifacts()
const seedResult = await saveWorkflowToNormalizedTables(workflowId, workflowState)
if (!skipDefaultWorkflow) {
const { workflowState } = buildDefaultWorkflowArtifacts()
const seedResult = await saveWorkflowToNormalizedTables(workflowId, workflowState)
if (!seedResult.success) {
throw new Error(seedResult.error || 'Failed to seed default workflow state')
if (!seedResult.success) {
throw new Error(seedResult.error || 'Failed to seed default workflow state')
}
}
} catch (error) {
logger.error(`Failed to create workspace ${workspaceId} with initial workflow:`, error)
logger.error(`Failed to create workspace ${workspaceId}:`, error)
throw error
}

View File

@@ -1,12 +1,13 @@
import type { Metadata, Viewport } from 'next'
import Script from 'next/script'
import { PublicEnvScript } from 'next-runtime-env'
import { BrandedLayout } from '@/components/branded-layout'
import { generateThemeCSS } from '@/lib/branding/inject-theme'
import { generateBrandedMetadata, generateStructuredData } from '@/lib/branding/metadata'
import { PostHogProvider } from '@/app/_shell/providers/posthog-provider'
import '@/app/_styles/globals.css'
import { OneDollarStats } from '@/components/analytics/onedollarstats'
import { isReactGrabEnabled } from '@/lib/core/config/feature-flags'
import { HydrationErrorHandler } from '@/app/_shell/hydration-error-handler'
import { QueryProvider } from '@/app/_shell/providers/query-provider'
import { SessionProvider } from '@/app/_shell/providers/session-provider'
@@ -33,6 +34,19 @@ export default function RootLayout({ children }: { children: React.ReactNode })
return (
<html lang='en' suppressHydrationWarning>
<head>
{isReactGrabEnabled && (
<Script
src='https://unpkg.com/react-grab/dist/index.global.js'
crossOrigin='anonymous'
strategy='beforeInteractive'
/>
)}
{isReactGrabEnabled && (
<Script
src='https://unpkg.com/@react-grab/cursor/dist/client.global.js'
strategy='lazyOnload'
/>
)}
{/* Structured Data for SEO */}
<script
type='application/ld+json'

View File

@@ -3,9 +3,9 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { Loader2 } from 'lucide-react'
import { Skeleton } from '@/components/ui/skeleton'
import { formatLatency, parseDuration } from '@/app/workspace/[workspaceId]/logs/utils'
import { formatLatency } from '@/app/workspace/[workspaceId]/logs/utils'
import type { DashboardStatsResponse, WorkflowStats } from '@/hooks/queries/logs'
import { useFilterStore } from '@/stores/logs/filters/store'
import type { WorkflowLog } from '@/stores/logs/filters/types'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { LineChart, WorkflowsList } from './components'
@@ -26,10 +26,6 @@ interface WorkflowExecution {
overallSuccessRate: number
}
const DEFAULT_SEGMENTS = 72
const MIN_SEGMENT_PX = 10
const MIN_SEGMENT_MS = 60000
const SKELETON_BAR_HEIGHTS = [
45, 72, 38, 85, 52, 68, 30, 90, 55, 42, 78, 35, 88, 48, 65, 28, 82, 58, 40, 75, 32, 95, 50, 70,
]
@@ -120,13 +116,32 @@ function DashboardSkeleton() {
}
interface DashboardProps {
logs: WorkflowLog[]
stats?: DashboardStatsResponse
isLoading: boolean
error?: Error | null
}
export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
const [segmentCount, setSegmentCount] = useState<number>(DEFAULT_SEGMENTS)
/**
* Converts server WorkflowStats to the internal WorkflowExecution format.
*/
function toWorkflowExecution(wf: WorkflowStats): WorkflowExecution {
return {
workflowId: wf.workflowId,
workflowName: wf.workflowName,
overallSuccessRate: wf.overallSuccessRate,
segments: wf.segments.map((seg) => ({
timestamp: seg.timestamp,
totalExecutions: seg.totalExecutions,
successfulExecutions: seg.successfulExecutions,
hasExecutions: seg.totalExecutions > 0,
successRate:
seg.totalExecutions > 0 ? (seg.successfulExecutions / seg.totalExecutions) * 100 : 100,
avgDurationMs: seg.avgDurationMs,
})),
}
}
export default function Dashboard({ stats, isLoading, error }: DashboardProps) {
const [selectedSegments, setSelectedSegments] = useState<Record<string, number[]>>({})
const [lastAnchorIndices, setLastAnchorIndices] = useState<Record<string, number>>({})
const barsAreaRef = useRef<HTMLDivElement | null>(null)
@@ -137,182 +152,32 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
const expandedWorkflowId = workflowIds.length === 1 ? workflowIds[0] : null
const lastExecutionByWorkflow = useMemo(() => {
const map = new Map<string, number>()
for (const log of logs) {
const wfId = log.workflowId
if (!wfId) continue
const ts = new Date(log.createdAt).getTime()
const existing = map.get(wfId)
if (!existing || ts > existing) {
map.set(wfId, ts)
}
}
return map
}, [logs])
const timeBounds = useMemo(() => {
if (logs.length === 0) {
const now = new Date()
return { start: now, end: now }
}
let minTime = Number.POSITIVE_INFINITY
let maxTime = Number.NEGATIVE_INFINITY
for (const log of logs) {
const ts = new Date(log.createdAt).getTime()
if (ts < minTime) minTime = ts
if (ts > maxTime) maxTime = ts
}
const end = new Date(Math.max(maxTime, Date.now()))
const start = new Date(minTime)
return { start, end }
}, [logs])
const { executions, aggregateSegments, segmentMs } = useMemo(() => {
const allWorkflowsList = Object.values(allWorkflows)
if (allWorkflowsList.length === 0) {
if (!stats) {
return { executions: [], aggregateSegments: [], segmentMs: 0 }
}
const { start, end } =
logs.length > 0
? timeBounds
: { start: new Date(Date.now() - 24 * 60 * 60 * 1000), end: new Date() }
const totalMs = Math.max(1, end.getTime() - start.getTime())
const calculatedSegmentMs = Math.max(
MIN_SEGMENT_MS,
Math.floor(totalMs / Math.max(1, segmentCount))
)
const logsByWorkflow = new Map<string, WorkflowLog[]>()
for (const log of logs) {
const wfId = log.workflowId
if (!logsByWorkflow.has(wfId)) {
logsByWorkflow.set(wfId, [])
}
logsByWorkflow.get(wfId)!.push(log)
}
const workflowExecutions: WorkflowExecution[] = []
for (const workflow of allWorkflowsList) {
const workflowLogs = logsByWorkflow.get(workflow.id) || []
const segments: WorkflowExecution['segments'] = Array.from(
{ length: segmentCount },
(_, i) => ({
timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(),
hasExecutions: false,
totalExecutions: 0,
successfulExecutions: 0,
successRate: 100,
avgDurationMs: 0,
})
)
const durations: number[][] = Array.from({ length: segmentCount }, () => [])
for (const log of workflowLogs) {
const logTime = new Date(log.createdAt).getTime()
const idx = Math.min(
segmentCount - 1,
Math.max(0, Math.floor((logTime - start.getTime()) / calculatedSegmentMs))
)
segments[idx].totalExecutions += 1
segments[idx].hasExecutions = true
if (log.level !== 'error') {
segments[idx].successfulExecutions += 1
}
const duration = parseDuration({ duration: log.duration ?? undefined })
if (duration !== null && duration > 0) {
durations[idx].push(duration)
}
}
let totalExecs = 0
let totalSuccess = 0
for (let i = 0; i < segmentCount; i++) {
const seg = segments[i]
totalExecs += seg.totalExecutions
totalSuccess += seg.successfulExecutions
if (seg.totalExecutions > 0) {
seg.successRate = (seg.successfulExecutions / seg.totalExecutions) * 100
}
if (durations[i].length > 0) {
seg.avgDurationMs = Math.round(
durations[i].reduce((sum, d) => sum + d, 0) / durations[i].length
)
}
}
const overallSuccessRate = totalExecs > 0 ? (totalSuccess / totalExecs) * 100 : 100
workflowExecutions.push({
workflowId: workflow.id,
workflowName: workflow.name,
segments,
overallSuccessRate,
})
}
workflowExecutions.sort((a, b) => {
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
if (errA !== errB) return errB - errA
return a.workflowName.localeCompare(b.workflowName)
})
const aggSegments: {
timestamp: string
totalExecutions: number
successfulExecutions: number
avgDurationMs: number
}[] = Array.from({ length: segmentCount }, (_, i) => ({
timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(),
totalExecutions: 0,
successfulExecutions: 0,
avgDurationMs: 0,
}))
const weightedDurationSums: number[] = Array(segmentCount).fill(0)
const executionCounts: number[] = Array(segmentCount).fill(0)
for (const wf of workflowExecutions) {
wf.segments.forEach((s, i) => {
aggSegments[i].totalExecutions += s.totalExecutions
aggSegments[i].successfulExecutions += s.successfulExecutions
if (s.avgDurationMs && s.avgDurationMs > 0 && s.totalExecutions > 0) {
weightedDurationSums[i] += s.avgDurationMs * s.totalExecutions
executionCounts[i] += s.totalExecutions
}
})
}
aggSegments.forEach((seg, i) => {
if (executionCounts[i] > 0) {
seg.avgDurationMs = weightedDurationSums[i] / executionCounts[i]
}
})
const workflowExecutions = stats.workflows.map(toWorkflowExecution)
return {
executions: workflowExecutions,
aggregateSegments: aggSegments,
segmentMs: calculatedSegmentMs,
aggregateSegments: stats.aggregateSegments,
segmentMs: stats.segmentMs,
}
}, [logs, timeBounds, segmentCount, allWorkflows])
}, [stats])
const lastExecutionByWorkflow = useMemo(() => {
const map = new Map<string, number>()
for (const wf of executions) {
for (let i = wf.segments.length - 1; i >= 0; i--) {
if (wf.segments[i].totalExecutions > 0) {
map.set(wf.workflowId, new Date(wf.segments[i].timestamp).getTime())
break
}
}
}
return map
}, [executions])
const filteredExecutions = useMemo(() => {
let filtered = executions
@@ -511,37 +376,12 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
useEffect(() => {
setSelectedSegments({})
setLastAnchorIndices({})
}, [logs, timeRange, workflowIds, searchQuery])
useEffect(() => {
if (!barsAreaRef.current) return
const el = barsAreaRef.current
let debounceId: ReturnType<typeof setTimeout> | null = null
const ro = new ResizeObserver(([entry]) => {
const w = entry?.contentRect?.width || 720
const n = Math.max(36, Math.min(96, Math.floor(w / MIN_SEGMENT_PX)))
if (debounceId) clearTimeout(debounceId)
debounceId = setTimeout(() => {
setSegmentCount(n)
}, 150)
})
ro.observe(el)
const rect = el.getBoundingClientRect()
if (rect?.width) {
const n = Math.max(36, Math.min(96, Math.floor(rect.width / MIN_SEGMENT_PX)))
setSegmentCount(n)
}
return () => {
if (debounceId) clearTimeout(debounceId)
ro.disconnect()
}
}, [])
}, [stats, timeRange, workflowIds, searchQuery])
if (isLoading) {
return <DashboardSkeleton />
}
// Show error state
if (error) {
return (
<div className='mt-[24px] flex flex-1 items-center justify-center'>

View File

@@ -136,7 +136,6 @@ export function NotificationSettings({
levelFilter: ['info', 'error'] as LogLevel[],
triggerFilter: [...CORE_TRIGGER_TYPES] as CoreTriggerType[],
includeFinalOutput: false,
includeTraceSpans: false,
includeRateLimits: false,
includeUsageData: false,
webhookUrl: '',
@@ -203,7 +202,6 @@ export function NotificationSettings({
levelFilter: ['info', 'error'],
triggerFilter: [...CORE_TRIGGER_TYPES],
includeFinalOutput: false,
includeTraceSpans: false,
includeRateLimits: false,
includeUsageData: false,
webhookUrl: '',
@@ -422,7 +420,6 @@ export function NotificationSettings({
levelFilter: formData.levelFilter,
triggerFilter: formData.triggerFilter,
includeFinalOutput: formData.includeFinalOutput,
includeTraceSpans: formData.includeTraceSpans,
includeRateLimits: formData.includeRateLimits,
includeUsageData: formData.includeUsageData,
alertConfig,
@@ -474,7 +471,6 @@ export function NotificationSettings({
levelFilter: subscription.levelFilter as LogLevel[],
triggerFilter: subscription.triggerFilter as CoreTriggerType[],
includeFinalOutput: subscription.includeFinalOutput,
includeTraceSpans: subscription.includeTraceSpans,
includeRateLimits: subscription.includeRateLimits,
includeUsageData: subscription.includeUsageData,
webhookUrl: subscription.webhookConfig?.url || '',
@@ -830,7 +826,6 @@ export function NotificationSettings({
<Combobox
options={[
{ label: 'Final Output', value: 'includeFinalOutput' },
{ label: 'Trace Spans', value: 'includeTraceSpans' },
{ label: 'Rate Limits', value: 'includeRateLimits' },
{ label: 'Usage Data', value: 'includeUsageData' },
]}
@@ -838,7 +833,6 @@ export function NotificationSettings({
multiSelectValues={
[
formData.includeFinalOutput && 'includeFinalOutput',
formData.includeTraceSpans && 'includeTraceSpans',
formData.includeRateLimits && 'includeRateLimits',
formData.includeUsageData && 'includeUsageData',
].filter(Boolean) as string[]
@@ -847,7 +841,6 @@ export function NotificationSettings({
setFormData({
...formData,
includeFinalOutput: values.includes('includeFinalOutput'),
includeTraceSpans: values.includes('includeTraceSpans'),
includeRateLimits: values.includes('includeRateLimits'),
includeUsageData: values.includes('includeUsageData'),
})
@@ -856,13 +849,11 @@ export function NotificationSettings({
overlayContent={(() => {
const labels: Record<string, string> = {
includeFinalOutput: 'Final Output',
includeTraceSpans: 'Trace Spans',
includeRateLimits: 'Rate Limits',
includeUsageData: 'Usage Data',
}
const selected = [
formData.includeFinalOutput && 'includeFinalOutput',
formData.includeTraceSpans && 'includeTraceSpans',
formData.includeRateLimits && 'includeRateLimits',
formData.includeUsageData && 'includeUsageData',
].filter(Boolean) as string[]

View File

@@ -11,7 +11,7 @@ import {
} from '@/lib/logs/filters'
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
import { useFolders } from '@/hooks/queries/folders'
import { useDashboardLogs, useLogDetail, useLogsList } from '@/hooks/queries/logs'
import { useDashboardStats, useLogDetail, useLogsList } from '@/hooks/queries/logs'
import { useDebounce } from '@/hooks/use-debounce'
import { useFilterStore } from '@/stores/logs/filters/store'
import type { WorkflowLog } from '@/stores/logs/filters/types'
@@ -130,7 +130,7 @@ export default function Logs() {
[timeRange, startDate, endDate, level, workflowIds, folderIds, triggers, debouncedSearchQuery]
)
const dashboardLogsQuery = useDashboardLogs(workspaceId, dashboardFilters, {
const dashboardStatsQuery = useDashboardStats(workspaceId, dashboardFilters, {
enabled: Boolean(workspaceId) && isInitialized.current,
refetchInterval: isLive ? 5000 : false,
})
@@ -417,9 +417,9 @@ export default function Logs() {
className={cn('flex min-h-0 flex-1 flex-col pr-[24px]', !isDashboardView && 'hidden')}
>
<Dashboard
logs={dashboardLogsQuery.data ?? []}
isLoading={!dashboardLogsQuery.data}
error={dashboardLogsQuery.error}
stats={dashboardStatsQuery.data}
isLoading={dashboardStatsQuery.isLoading}
error={dashboardStatsQuery.error}
/>
</div>

View File

@@ -1036,6 +1036,7 @@ export function ToolInput({
block.type === 'api' ||
block.type === 'webhook_request' ||
block.type === 'workflow' ||
block.type === 'workflow_input' ||
block.type === 'knowledge' ||
block.type === 'function') &&
block.type !== 'evaluator' &&
@@ -1761,7 +1762,7 @@ export function ToolInput({
iconElement: createToolIcon('#6366F1', WorkflowIcon),
onSelect: () => {
const newTool: StoredTool = {
type: 'workflow',
type: 'workflow_input',
title: 'Workflow',
toolId: 'workflow_executor',
params: {
@@ -2195,9 +2196,10 @@ export function ToolInput({
{/* Selected Tools List */}
{selectedTools.length > 0 &&
selectedTools.map((tool, toolIndex) => {
// Handle custom tools and MCP tools differently
// Handle custom tools, MCP tools, and workflow tools differently
const isCustomTool = tool.type === 'custom-tool'
const isMcpTool = tool.type === 'mcp'
const isWorkflowTool = tool.type === 'workflow'
const toolBlock =
!isCustomTool && !isMcpTool
? toolBlocks.find((block) => block.type === tool.type)
@@ -2323,13 +2325,17 @@ export function ToolInput({
? '#3B82F6'
: isMcpTool
? mcpTool?.bgColor || '#6366F1'
: toolBlock?.bgColor,
: isWorkflowTool
? '#6366F1'
: toolBlock?.bgColor,
}}
>
{isCustomTool ? (
<WrenchIcon className='h-[10px] w-[10px] text-white' />
) : isMcpTool ? (
<IconComponent icon={McpIcon} className='h-[10px] w-[10px] text-white' />
) : isWorkflowTool ? (
<IconComponent icon={WorkflowIcon} className='h-[10px] w-[10px] text-white' />
) : (
<IconComponent
icon={toolBlock?.icon}
@@ -2369,9 +2375,10 @@ export function ToolInput({
</Tooltip.Root>
)
})()}
{tool.type === 'workflow' && tool.params?.workflowId && (
<WorkflowToolDeployBadge workflowId={tool.params.workflowId} />
)}
{(tool.type === 'workflow' || tool.type === 'workflow_input') &&
tool.params?.workflowId && (
<WorkflowToolDeployBadge workflowId={tool.params.workflowId} />
)}
</div>
<div className='flex flex-shrink-0 items-center gap-[8px]'>
{supportsToolControl && !(isMcpTool && isMcpToolUnavailable(tool)) && (

View File

@@ -36,6 +36,8 @@ interface FolderItemProps {
onDragEnter?: (e: React.DragEvent<HTMLElement>) => void
onDragLeave?: (e: React.DragEvent<HTMLElement>) => void
}
onDragStart?: () => void
onDragEnd?: () => void
}
/**
@@ -46,7 +48,13 @@ interface FolderItemProps {
* @param props - Component props
* @returns Folder item with drag and expand support
*/
export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
export function FolderItem({
folder,
level,
hoverHandlers,
onDragStart: onDragStartProp,
onDragEnd: onDragEndProp,
}: FolderItemProps) {
const params = useParams()
const router = useRouter()
const workspaceId = params.workspaceId as string
@@ -135,11 +143,6 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
}
}, [createFolderMutation, workspaceId, folder.id, expandFolder])
/**
* Drag start handler - sets folder data for drag operation
*
* @param e - React drag event
*/
const onDragStart = useCallback(
(e: React.DragEvent) => {
if (isEditing) {
@@ -149,14 +152,25 @@ export function FolderItem({ folder, level, hoverHandlers }: FolderItemProps) {
e.dataTransfer.setData('folder-id', folder.id)
e.dataTransfer.effectAllowed = 'move'
onDragStartProp?.()
},
[folder.id]
[folder.id, onDragStartProp]
)
const { isDragging, shouldPreventClickRef, handleDragStart, handleDragEnd } = useItemDrag({
const {
isDragging,
shouldPreventClickRef,
handleDragStart,
handleDragEnd: handleDragEndBase,
} = useItemDrag({
onDragStart,
})
const handleDragEnd = useCallback(() => {
handleDragEndBase()
onDragEndProp?.()
}, [handleDragEndBase, onDragEndProp])
const {
isOpen: isContextMenuOpen,
position,

View File

@@ -29,6 +29,8 @@ interface WorkflowItemProps {
active: boolean
level: number
onWorkflowClick: (workflowId: string, shiftKey: boolean, metaKey: boolean) => void
onDragStart?: () => void
onDragEnd?: () => void
}
/**
@@ -38,7 +40,14 @@ interface WorkflowItemProps {
* @param props - Component props
* @returns Workflow item with drag and selection support
*/
export function WorkflowItem({ workflow, active, level, onWorkflowClick }: WorkflowItemProps) {
export function WorkflowItem({
workflow,
active,
level,
onWorkflowClick,
onDragStart: onDragStartProp,
onDragEnd: onDragEndProp,
}: WorkflowItemProps) {
const params = useParams()
const workspaceId = params.workspaceId as string
const { selectedWorkflows } = useFolderStore()
@@ -104,30 +113,7 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
[workflow.id, updateWorkflow]
)
/**
* Drag start handler - handles workflow dragging with multi-selection support
*
* @param e - React drag event
*/
const onDragStart = useCallback(
(e: React.DragEvent) => {
if (isEditing) {
e.preventDefault()
return
}
const workflowIds =
isSelected && selectedWorkflows.size > 1 ? Array.from(selectedWorkflows) : [workflow.id]
e.dataTransfer.setData('workflow-ids', JSON.stringify(workflowIds))
e.dataTransfer.effectAllowed = 'move'
},
[isSelected, selectedWorkflows, workflow.id]
)
const { isDragging, shouldPreventClickRef, handleDragStart, handleDragEnd } = useItemDrag({
onDragStart,
})
const isEditingRef = useRef(false)
const {
isOpen: isContextMenuOpen,
@@ -232,6 +218,43 @@ export function WorkflowItem({ workflow, active, level, onWorkflowClick }: Workf
itemId: workflow.id,
})
isEditingRef.current = isEditing
const onDragStart = useCallback(
(e: React.DragEvent) => {
if (isEditingRef.current) {
e.preventDefault()
return
}
const currentSelection = useFolderStore.getState().selectedWorkflows
const isCurrentlySelected = currentSelection.has(workflow.id)
const workflowIds =
isCurrentlySelected && currentSelection.size > 1
? Array.from(currentSelection)
: [workflow.id]
e.dataTransfer.setData('workflow-ids', JSON.stringify(workflowIds))
e.dataTransfer.effectAllowed = 'move'
onDragStartProp?.()
},
[workflow.id, onDragStartProp]
)
const {
isDragging,
shouldPreventClickRef,
handleDragStart,
handleDragEnd: handleDragEndBase,
} = useItemDrag({
onDragStart,
})
const handleDragEnd = useCallback(() => {
handleDragEndBase()
onDragEndProp?.()
}, [handleDragEndBase, onDragEndProp])
/**
* Handle double-click on workflow name to enter rename mode
*/

View File

@@ -1,6 +1,6 @@
'use client'
import { useCallback, useEffect, useMemo } from 'react'
import { memo, useCallback, useEffect, useMemo } from 'react'
import clsx from 'clsx'
import { useParams, usePathname } from 'next/navigation'
import { FolderItem } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/folder-item/folder-item'
@@ -14,9 +14,6 @@ import { useFolderStore } from '@/stores/folders/store'
import type { FolderTreeNode } from '@/stores/folders/types'
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
/**
* Constants for tree layout and styling
*/
const TREE_SPACING = {
INDENT_PER_LEVEL: 20,
} as const
@@ -29,12 +26,24 @@ interface WorkflowListProps {
scrollContainerRef: React.RefObject<HTMLDivElement | null>
}
/**
* WorkflowList component displays workflows organized by folders with drag-and-drop support.
*
* @param props - Component props
* @returns Workflow list with folders and drag-drop support
*/
const DropIndicatorLine = memo(function DropIndicatorLine({
show,
level = 0,
}: {
show: boolean
level?: number
}) {
if (!show) return null
return (
<div
className='pointer-events-none absolute right-0 left-0 z-20 flex items-center'
style={{ paddingLeft: `${level * TREE_SPACING.INDENT_PER_LEVEL}px` }}
>
<div className='h-[2px] flex-1 rounded-full bg-[#33b4ff]/70' />
</div>
)
})
export function WorkflowList({
regularWorkflows,
isLoading = false,
@@ -48,20 +57,21 @@ export function WorkflowList({
const workflowId = params.workflowId as string
const { isLoading: foldersLoading } = useFolders(workspaceId)
const { getFolderTree, expandedFolders, getFolderPath, setExpanded } = useFolderStore()
const {
dropTargetId,
dropIndicator,
isDragging,
setScrollContainer,
createWorkflowDragHandlers,
createFolderDragHandlers,
createItemDragHandlers,
createRootDragHandlers,
createFolderHeaderHoverHandlers,
createEmptyFolderDropZone,
createFolderContentDropZone,
createRootDropZone,
handleDragStart,
handleDragEnd,
} = useDragDrop()
// Set scroll container when ref changes
useEffect(() => {
if (scrollContainerRef.current) {
setScrollContainer(scrollContainerRef.current)
@@ -76,23 +86,22 @@ export function WorkflowList({
return activeWorkflow?.folderId || null
}, [workflowId, regularWorkflows, isLoading, foldersLoading])
const workflowsByFolder = useMemo(
() =>
regularWorkflows.reduce(
(acc, workflow) => {
const folderId = workflow.folderId || 'root'
if (!acc[folderId]) acc[folderId] = []
acc[folderId].push(workflow)
return acc
},
{} as Record<string, WorkflowMetadata[]>
),
[regularWorkflows]
)
const workflowsByFolder = useMemo(() => {
const grouped = regularWorkflows.reduce(
(acc, workflow) => {
const folderId = workflow.folderId || 'root'
if (!acc[folderId]) acc[folderId] = []
acc[folderId].push(workflow)
return acc
},
{} as Record<string, WorkflowMetadata[]>
)
for (const folderId of Object.keys(grouped)) {
grouped[folderId].sort((a, b) => a.sortOrder - b.sortOrder)
}
return grouped
}, [regularWorkflows])
/**
* Build a flat list of all workflow IDs in display order for range selection
*/
const orderedWorkflowIds = useMemo(() => {
const ids: string[] = []
@@ -106,12 +115,10 @@ export function WorkflowList({
}
}
// Collect from folders first
for (const folder of folderTree) {
collectWorkflowIds(folder)
}
// Then collect root workflows
const rootWorkflows = workflowsByFolder.root || []
for (const workflow of rootWorkflows) {
ids.push(workflow.id)
@@ -120,30 +127,24 @@ export function WorkflowList({
return ids
}, [folderTree, workflowsByFolder])
// Workflow selection hook - uses active workflow ID as anchor for range selection
const { handleWorkflowClick } = useWorkflowSelection({
workflowIds: orderedWorkflowIds,
activeWorkflowId: workflowId,
})
const isWorkflowActive = useCallback(
(workflowId: string) => pathname === `/workspace/${workspaceId}/w/${workflowId}`,
(wfId: string) => pathname === `/workspace/${workspaceId}/w/${wfId}`,
[pathname, workspaceId]
)
/**
* Auto-expand folders and select active workflow.
*/
useEffect(() => {
if (!workflowId || isLoading || foldersLoading) return
// Expand folder path to reveal workflow
if (activeWorkflowFolderId) {
const folderPath = getFolderPath(activeWorkflowFolderId)
folderPath.forEach((folder) => setExpanded(folder.id, true))
}
// Select workflow if not already selected
const { selectedWorkflows, selectOnly } = useFolderStore.getState()
if (!selectedWorkflows.has(workflowId)) {
selectOnly(workflowId)
@@ -151,23 +152,40 @@ export function WorkflowList({
}, [workflowId, activeWorkflowFolderId, isLoading, foldersLoading, getFolderPath, setExpanded])
const renderWorkflowItem = useCallback(
(workflow: WorkflowMetadata, level: number, parentFolderId: string | null = null) => (
<div key={workflow.id} className='relative' {...createItemDragHandlers(parentFolderId)}>
<div
style={{
paddingLeft: `${level * TREE_SPACING.INDENT_PER_LEVEL}px`,
}}
>
<WorkflowItem
workflow={workflow}
active={isWorkflowActive(workflow.id)}
level={level}
onWorkflowClick={handleWorkflowClick}
/>
(workflow: WorkflowMetadata, level: number, folderId: string | null = null) => {
const showBefore =
dropIndicator?.targetId === workflow.id && dropIndicator?.position === 'before'
const showAfter =
dropIndicator?.targetId === workflow.id && dropIndicator?.position === 'after'
return (
<div key={workflow.id} className='relative'>
<DropIndicatorLine show={showBefore} level={level} />
<div
style={{ paddingLeft: `${level * TREE_SPACING.INDENT_PER_LEVEL}px` }}
{...createWorkflowDragHandlers(workflow.id, folderId)}
>
<WorkflowItem
workflow={workflow}
active={isWorkflowActive(workflow.id)}
level={level}
onWorkflowClick={handleWorkflowClick}
onDragStart={() => handleDragStart('workflow', folderId)}
onDragEnd={handleDragEnd}
/>
</div>
<DropIndicatorLine show={showAfter} level={level} />
</div>
</div>
),
[isWorkflowActive, createItemDragHandlers, handleWorkflowClick]
)
},
[
dropIndicator,
isWorkflowActive,
createWorkflowDragHandlers,
handleWorkflowClick,
handleDragStart,
handleDragEnd,
]
)
const renderFolderSection = useCallback(
@@ -179,45 +197,75 @@ export function WorkflowList({
const workflowsInFolder = workflowsByFolder[folder.id] || []
const isExpanded = expandedFolders.has(folder.id)
const hasChildren = workflowsInFolder.length > 0 || folder.children.length > 0
const isDropTarget = dropTargetId === folder.id
const showBefore =
dropIndicator?.targetId === folder.id && dropIndicator?.position === 'before'
const showAfter = dropIndicator?.targetId === folder.id && dropIndicator?.position === 'after'
const showInside =
dropIndicator?.targetId === folder.id && dropIndicator?.position === 'inside'
const childItems: Array<{
type: 'folder' | 'workflow'
id: string
sortOrder: number
data: FolderTreeNode | WorkflowMetadata
}> = []
for (const childFolder of folder.children) {
childItems.push({
type: 'folder',
id: childFolder.id,
sortOrder: childFolder.sortOrder,
data: childFolder,
})
}
for (const workflow of workflowsInFolder) {
childItems.push({
type: 'workflow',
id: workflow.id,
sortOrder: workflow.sortOrder,
data: workflow,
})
}
childItems.sort((a, b) => a.sortOrder - b.sortOrder)
return (
<div key={folder.id} className='relative' {...createFolderDragHandlers(folder.id)}>
{/* Drop target highlight overlay - always rendered for stable DOM */}
<div key={folder.id} className='relative'>
<DropIndicatorLine show={showBefore} level={level} />
{/* Drop target highlight overlay - covers entire folder section */}
<div
className={clsx(
'pointer-events-none absolute inset-0 z-10 rounded-[4px] transition-opacity duration-75',
isDropTarget && isDragging ? 'bg-gray-400/20 opacity-100' : 'opacity-0'
showInside && isDragging ? 'bg-[#33b4ff1a] opacity-100' : 'opacity-0'
)}
/>
<div
style={{ paddingLeft: `${level * TREE_SPACING.INDENT_PER_LEVEL}px` }}
{...createItemDragHandlers(folder.id)}
{...createFolderDragHandlers(folder.id, parentFolderId)}
>
<FolderItem
folder={folder}
level={level}
hoverHandlers={createFolderHeaderHoverHandlers(folder.id)}
onDragStart={() => handleDragStart('folder', parentFolderId)}
onDragEnd={handleDragEnd}
/>
</div>
<DropIndicatorLine show={showAfter} level={level} />
{isExpanded && hasChildren && (
<div className='relative' {...createItemDragHandlers(folder.id)}>
{/* Vertical line - positioned to align under folder chevron */}
{isExpanded && (hasChildren || isDragging) && (
<div className='relative' {...createFolderContentDropZone(folder.id)}>
<div
className='pointer-events-none absolute top-0 bottom-0 w-px bg-[var(--border)]'
style={{ left: `${level * TREE_SPACING.INDENT_PER_LEVEL + 12}px` }}
/>
<div className='mt-[2px] space-y-[2px] pl-[2px]'>
{workflowsInFolder.map((workflow: WorkflowMetadata) =>
renderWorkflowItem(workflow, level + 1, folder.id)
{childItems.map((item) =>
item.type === 'folder'
? renderFolderSection(item.data as FolderTreeNode, level + 1, folder.id)
: renderWorkflowItem(item.data as WorkflowMetadata, level + 1, folder.id)
)}
{!hasChildren && isDragging && (
<div className='h-[24px]' {...createEmptyFolderDropZone(folder.id)} />
)}
{folder.children.map((childFolder) => (
<div key={childFolder.id} className='relative'>
{renderFolderSection(childFolder, level + 1, folder.id)}
</div>
))}
</div>
</div>
)}
@@ -227,29 +275,47 @@ export function WorkflowList({
[
workflowsByFolder,
expandedFolders,
dropTargetId,
dropIndicator,
isDragging,
createFolderDragHandlers,
createItemDragHandlers,
createFolderHeaderHoverHandlers,
createEmptyFolderDropZone,
createFolderContentDropZone,
handleDragStart,
handleDragEnd,
renderWorkflowItem,
]
)
const handleRootDragEvents = createRootDragHandlers()
const rootDropZoneHandlers = createRootDropZone()
const rootWorkflows = workflowsByFolder.root || []
const isRootDropTarget = dropTargetId === 'root'
const hasRootWorkflows = rootWorkflows.length > 0
const hasFolders = folderTree.length > 0
/**
* Handle click on empty space to revert to active workflow selection
*/
const rootItems = useMemo(() => {
const items: Array<{
type: 'folder' | 'workflow'
id: string
sortOrder: number
data: FolderTreeNode | WorkflowMetadata
}> = []
for (const folder of folderTree) {
items.push({ type: 'folder', id: folder.id, sortOrder: folder.sortOrder, data: folder })
}
for (const workflow of rootWorkflows) {
items.push({
type: 'workflow',
id: workflow.id,
sortOrder: workflow.sortOrder,
data: workflow,
})
}
return items.sort((a, b) => a.sortOrder - b.sortOrder)
}, [folderTree, rootWorkflows])
const hasRootItems = rootItems.length > 0
const showRootInside = dropIndicator?.targetId === 'root' && dropIndicator?.position === 'inside'
const handleContainerClick = useCallback(
(e: React.MouseEvent<HTMLDivElement>) => {
// Only handle clicks directly on the container (empty space)
if (e.target !== e.currentTarget) return
const { selectOnly, clearSelection } = useFolderStore.getState()
workflowId ? selectOnly(workflowId) : clearSelection()
},
@@ -258,36 +324,23 @@ export function WorkflowList({
return (
<div className='flex min-h-full flex-col pb-[8px]' onClick={handleContainerClick}>
{/* Folders Section */}
{hasFolders && (
<div className='mb-[2px] space-y-[2px]'>
{folderTree.map((folder) => renderFolderSection(folder, 0))}
</div>
)}
{/* Root Workflows Section - Expands to fill remaining space */}
<div
className={clsx('relative flex-1', !hasRootWorkflows && 'min-h-[26px]')}
{...handleRootDragEvents}
className={clsx('relative flex-1 rounded-[4px]', !hasRootItems && 'min-h-[26px]')}
{...rootDropZoneHandlers}
>
{/* Root drop target highlight overlay - always rendered for stable DOM */}
{/* Root drop target highlight overlay */}
<div
className={clsx(
'pointer-events-none absolute inset-0 z-10 rounded-[4px] transition-opacity duration-75',
isRootDropTarget && isDragging ? 'bg-gray-400/20 opacity-100' : 'opacity-0'
showRootInside && isDragging ? 'bg-[#33b4ff1a] opacity-100' : 'opacity-0'
)}
/>
<div className='space-y-[2px]'>
{rootWorkflows.map((workflow: WorkflowMetadata) => (
<WorkflowItem
key={workflow.id}
workflow={workflow}
active={isWorkflowActive(workflow.id)}
level={0}
onWorkflowClick={handleWorkflowClick}
/>
))}
{rootItems.map((item) =>
item.type === 'folder'
? renderFolderSection(item.data as FolderTreeNode, 0, null)
: renderWorkflowItem(item.data as WorkflowMetadata, 0, null)
)}
</div>
</div>

View File

@@ -1,6 +1,6 @@
export { useAutoScroll } from './use-auto-scroll'
export { useContextMenu } from './use-context-menu'
export { useDragDrop } from './use-drag-drop'
export { type DropIndicator, useDragDrop } from './use-drag-drop'
export { useFolderExpand } from './use-folder-expand'
export { useFolderOperations } from './use-folder-operations'
export { useItemDrag } from './use-item-drag'

View File

@@ -1,47 +1,40 @@
import { useCallback, useEffect, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useParams } from 'next/navigation'
import { useUpdateFolder } from '@/hooks/queries/folders'
import { useReorderFolders } from '@/hooks/queries/folders'
import { useReorderWorkflows } from '@/hooks/queries/workflows'
import { useFolderStore } from '@/stores/folders/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
const logger = createLogger('WorkflowList:DragDrop')
/**
* Constants for auto-scroll behavior
*/
const SCROLL_THRESHOLD = 60 // Distance from edge to trigger scroll
const SCROLL_SPEED = 8 // Pixels per frame
const SCROLL_THRESHOLD = 60
const SCROLL_SPEED = 8
const HOVER_EXPAND_DELAY = 400
/**
* Constants for folder auto-expand on hover during drag
*/
const HOVER_EXPAND_DELAY = 400 // Milliseconds to wait before expanding folder
export interface DropIndicator {
targetId: string
position: 'before' | 'after' | 'inside'
folderId: string | null
}
/**
* Custom hook for handling drag and drop operations for workflows and folders.
* Includes auto-scrolling, drop target highlighting, and hover-to-expand.
*
* @returns Drag and drop state and event handlers
*/
export function useDragDrop() {
const [dropTargetId, setDropTargetId] = useState<string | null>(null)
const [dropIndicator, setDropIndicator] = useState<DropIndicator | null>(null)
const [isDragging, setIsDragging] = useState(false)
const [hoverFolderId, setHoverFolderId] = useState<string | null>(null)
const scrollContainerRef = useRef<HTMLDivElement | null>(null)
const scrollIntervalRef = useRef<number | null>(null)
const hoverExpandTimerRef = useRef<number | null>(null)
const lastDragYRef = useRef<number>(0)
const draggedTypeRef = useRef<'workflow' | 'folder' | null>(null)
const draggedSourceFolderRef = useRef<string | null>(null)
const params = useParams()
const workspaceId = params.workspaceId as string | undefined
const updateFolderMutation = useUpdateFolder()
const reorderWorkflowsMutation = useReorderWorkflows()
const reorderFoldersMutation = useReorderFolders()
const { setExpanded, expandedFolders } = useFolderStore()
const { updateWorkflow } = useWorkflowRegistry()
/**
* Auto-scroll handler - scrolls container when dragging near edges
*/
const handleAutoScroll = useCallback(() => {
if (!scrollContainerRef.current || !isDragging) return
@@ -49,22 +42,17 @@ export function useDragDrop() {
const rect = container.getBoundingClientRect()
const mouseY = lastDragYRef.current
// Only scroll if mouse is within container bounds
if (mouseY < rect.top || mouseY > rect.bottom) return
// Calculate distance from top and bottom edges
const distanceFromTop = mouseY - rect.top
const distanceFromBottom = rect.bottom - mouseY
let scrollDelta = 0
// Scroll up if near top and not at scroll top
if (distanceFromTop < SCROLL_THRESHOLD && container.scrollTop > 0) {
const intensity = Math.max(0, Math.min(1, 1 - distanceFromTop / SCROLL_THRESHOLD))
scrollDelta = -SCROLL_SPEED * intensity
}
// Scroll down if near bottom and not at scroll bottom
else if (distanceFromBottom < SCROLL_THRESHOLD) {
} else if (distanceFromBottom < SCROLL_THRESHOLD) {
const maxScroll = container.scrollHeight - container.clientHeight
if (container.scrollTop < maxScroll) {
const intensity = Math.max(0, Math.min(1, 1 - distanceFromBottom / SCROLL_THRESHOLD))
@@ -77,12 +65,9 @@ export function useDragDrop() {
}
}, [isDragging])
/**
* Start auto-scroll animation loop
*/
useEffect(() => {
if (isDragging) {
scrollIntervalRef.current = window.setInterval(handleAutoScroll, 10) // ~100fps for smoother response
scrollIntervalRef.current = window.setInterval(handleAutoScroll, 10)
} else {
if (scrollIntervalRef.current) {
clearInterval(scrollIntervalRef.current)
@@ -97,30 +82,17 @@ export function useDragDrop() {
}
}, [isDragging, handleAutoScroll])
/**
* Handle hover folder changes - start/clear expand timer
*/
useEffect(() => {
// Clear existing timer when hover folder changes
if (hoverExpandTimerRef.current) {
clearTimeout(hoverExpandTimerRef.current)
hoverExpandTimerRef.current = null
}
// Don't start timer if not dragging or no folder is hovered
if (!isDragging || !hoverFolderId) {
return
}
if (!isDragging || !hoverFolderId) return
if (expandedFolders.has(hoverFolderId)) return
// Don't expand if folder is already expanded
if (expandedFolders.has(hoverFolderId)) {
return
}
// Start timer to expand folder after delay
hoverExpandTimerRef.current = window.setTimeout(() => {
setExpanded(hoverFolderId, true)
logger.info(`Auto-expanded folder ${hoverFolderId} during drag`)
}, HOVER_EXPAND_DELAY)
return () => {
@@ -131,249 +103,471 @@ export function useDragDrop() {
}
}, [hoverFolderId, isDragging, expandedFolders, setExpanded])
/**
* Cleanup hover state when dragging stops
*/
useEffect(() => {
if (!isDragging) {
setHoverFolderId(null)
setDropIndicator(null)
draggedTypeRef.current = null
}
}, [isDragging])
/**
* Moves one or more workflows to a target folder
*
* @param workflowIds - Array of workflow IDs to move
* @param targetFolderId - Target folder ID or null for root
*/
const handleWorkflowDrop = useCallback(
async (workflowIds: string[], targetFolderId: string | null) => {
if (!workflowIds.length) {
logger.warn('No workflows to move')
return
}
try {
await Promise.all(
workflowIds.map((workflowId) => updateWorkflow(workflowId, { folderId: targetFolderId }))
)
logger.info(`Moved ${workflowIds.length} workflow(s)`)
} catch (error) {
logger.error('Failed to move workflows:', error)
}
const calculateDropPosition = useCallback(
(e: React.DragEvent, element: HTMLElement): 'before' | 'after' => {
const rect = element.getBoundingClientRect()
const midY = rect.top + rect.height / 2
return e.clientY < midY ? 'before' : 'after'
},
[updateWorkflow]
[]
)
/**
* Moves a folder to a new parent folder, with validation
*
* @param draggedFolderId - ID of the folder being moved
* @param targetFolderId - Target folder ID or null for root
*/
const handleFolderMove = useCallback(
async (draggedFolderId: string, targetFolderId: string | null) => {
if (!draggedFolderId) {
logger.warn('No folder to move')
return
const calculateFolderDropPosition = useCallback(
(e: React.DragEvent, element: HTMLElement): 'before' | 'inside' | 'after' => {
const rect = element.getBoundingClientRect()
const relativeY = e.clientY - rect.top
const height = rect.height
// Top 25% = before, middle 50% = inside, bottom 25% = after
if (relativeY < height * 0.25) return 'before'
if (relativeY > height * 0.75) return 'after'
return 'inside'
},
[]
)
type SiblingItem = { type: 'folder' | 'workflow'; id: string; sortOrder: number }
const getDestinationFolderId = useCallback((indicator: DropIndicator): string | null => {
return indicator.position === 'inside'
? indicator.targetId === 'root'
? null
: indicator.targetId
: indicator.folderId
}, [])
const calculateInsertIndex = useCallback(
(remaining: SiblingItem[], indicator: DropIndicator): number => {
return indicator.position === 'inside'
? remaining.length
: remaining.findIndex((item) => item.id === indicator.targetId) +
(indicator.position === 'after' ? 1 : 0)
},
[]
)
const buildAndSubmitUpdates = useCallback(
async (newOrder: SiblingItem[], destinationFolderId: string | null) => {
const indexed = newOrder.map((item, i) => ({ ...item, sortOrder: i }))
const folderUpdates = indexed
.filter((item) => item.type === 'folder')
.map((item) => ({ id: item.id, sortOrder: item.sortOrder, parentId: destinationFolderId }))
const workflowUpdates = indexed
.filter((item) => item.type === 'workflow')
.map((item) => ({ id: item.id, sortOrder: item.sortOrder, folderId: destinationFolderId }))
await Promise.all(
[
folderUpdates.length > 0 &&
reorderFoldersMutation.mutateAsync({
workspaceId: workspaceId!,
updates: folderUpdates,
}),
workflowUpdates.length > 0 &&
reorderWorkflowsMutation.mutateAsync({
workspaceId: workspaceId!,
updates: workflowUpdates,
}),
].filter(Boolean)
)
},
[workspaceId, reorderFoldersMutation, reorderWorkflowsMutation]
)
const isLeavingElement = useCallback((e: React.DragEvent<HTMLElement>): boolean => {
const relatedTarget = e.relatedTarget as HTMLElement | null
const currentTarget = e.currentTarget as HTMLElement
return !relatedTarget || !currentTarget.contains(relatedTarget)
}, [])
const initDragOver = useCallback((e: React.DragEvent<HTMLElement>, stopPropagation = true) => {
e.preventDefault()
if (stopPropagation) e.stopPropagation()
lastDragYRef.current = e.clientY
setIsDragging(true)
}, [])
const getSiblingItems = useCallback((folderId: string | null): SiblingItem[] => {
const currentFolders = useFolderStore.getState().folders
const currentWorkflows = useWorkflowRegistry.getState().workflows
return [
...Object.values(currentFolders)
.filter((f) => f.parentId === folderId)
.map((f) => ({ type: 'folder' as const, id: f.id, sortOrder: f.sortOrder })),
...Object.values(currentWorkflows)
.filter((w) => w.folderId === folderId)
.map((w) => ({ type: 'workflow' as const, id: w.id, sortOrder: w.sortOrder })),
].sort((a, b) => a.sortOrder - b.sortOrder)
}, [])
const setNormalizedDropIndicator = useCallback(
(indicator: DropIndicator | null) => {
setDropIndicator((prev) => {
let next: DropIndicator | null = indicator
if (indicator && indicator.position === 'after' && indicator.targetId !== 'root') {
const siblings = getSiblingItems(indicator.folderId)
const currentIdx = siblings.findIndex((s) => s.id === indicator.targetId)
const nextSibling = siblings[currentIdx + 1]
if (nextSibling) {
next = {
targetId: nextSibling.id,
position: 'before',
folderId: indicator.folderId,
}
}
}
if (
prev?.targetId === next?.targetId &&
prev?.position === next?.position &&
prev?.folderId === next?.folderId
) {
return prev
}
return next
})
},
[getSiblingItems]
)
const isNoOpMove = useCallback(
(
indicator: DropIndicator,
draggedIds: string[],
draggedType: 'folder' | 'workflow',
destinationFolderId: string | null,
currentFolderId: string | null | undefined
): boolean => {
if (indicator.position !== 'inside' && draggedIds.includes(indicator.targetId)) {
return true
}
if (currentFolderId !== destinationFolderId) {
return false
}
const siblingItems = getSiblingItems(destinationFolderId)
const remaining = siblingItems.filter(
(item) => !(item.type === draggedType && draggedIds.includes(item.id))
)
const insertAt = calculateInsertIndex(remaining, indicator)
const originalIdx = siblingItems.findIndex(
(item) => item.type === draggedType && item.id === draggedIds[0]
)
return insertAt === originalIdx
},
[getSiblingItems, calculateInsertIndex]
)
const handleWorkflowDrop = useCallback(
async (workflowIds: string[], indicator: DropIndicator) => {
if (!workflowIds.length || !workspaceId) return
try {
const folderStore = useFolderStore.getState()
const draggedFolderPath = folderStore.getFolderPath(draggedFolderId)
const destinationFolderId = getDestinationFolderId(indicator)
const currentWorkflows = useWorkflowRegistry.getState().workflows
const firstWorkflow = currentWorkflows[workflowIds[0]]
// Prevent moving folder into its own descendant
if (
targetFolderId &&
draggedFolderPath.some((ancestor) => ancestor.id === targetFolderId)
isNoOpMove(
indicator,
workflowIds,
'workflow',
destinationFolderId,
firstWorkflow?.folderId
)
) {
logger.info('Cannot move folder into its own descendant')
return
}
// Prevent moving folder into itself
if (draggedFolderId === targetFolderId) {
const siblingItems = getSiblingItems(destinationFolderId)
const movingSet = new Set(workflowIds)
const remaining = siblingItems.filter(
(item) => !(item.type === 'workflow' && movingSet.has(item.id))
)
const moving = workflowIds
.map((id) => ({
type: 'workflow' as const,
id,
sortOrder: currentWorkflows[id]?.sortOrder ?? 0,
}))
.sort((a, b) => a.sortOrder - b.sortOrder)
const insertAt = calculateInsertIndex(remaining, indicator)
const newOrder: SiblingItem[] = [
...remaining.slice(0, insertAt),
...moving,
...remaining.slice(insertAt),
]
await buildAndSubmitUpdates(newOrder, destinationFolderId)
} catch (error) {
logger.error('Failed to reorder workflows:', error)
}
},
[
getDestinationFolderId,
getSiblingItems,
calculateInsertIndex,
isNoOpMove,
buildAndSubmitUpdates,
]
)
const handleFolderDrop = useCallback(
async (draggedFolderId: string, indicator: DropIndicator) => {
if (!draggedFolderId || !workspaceId) return
try {
const folderStore = useFolderStore.getState()
const currentFolders = folderStore.folders
const targetParentId = getDestinationFolderId(indicator)
if (draggedFolderId === targetParentId) {
logger.info('Cannot move folder into itself')
return
}
if (!workspaceId) {
logger.warn('No workspaceId available for folder move')
if (targetParentId) {
const targetPath = folderStore.getFolderPath(targetParentId)
if (targetPath.some((f) => f.id === draggedFolderId)) {
logger.info('Cannot move folder into its own descendant')
return
}
}
const draggedFolder = currentFolders[draggedFolderId]
if (
isNoOpMove(
indicator,
[draggedFolderId],
'folder',
targetParentId,
draggedFolder?.parentId
)
) {
return
}
await updateFolderMutation.mutateAsync({
workspaceId,
id: draggedFolderId,
updates: { parentId: targetFolderId },
})
logger.info(`Moved folder to ${targetFolderId ? `folder ${targetFolderId}` : 'root'}`)
const siblingItems = getSiblingItems(targetParentId)
const remaining = siblingItems.filter(
(item) => !(item.type === 'folder' && item.id === draggedFolderId)
)
const insertAt = calculateInsertIndex(remaining, indicator)
const newOrder: SiblingItem[] = [
...remaining.slice(0, insertAt),
{ type: 'folder', id: draggedFolderId, sortOrder: 0 },
...remaining.slice(insertAt),
]
await buildAndSubmitUpdates(newOrder, targetParentId)
} catch (error) {
logger.error('Failed to move folder:', error)
logger.error('Failed to reorder folder:', error)
}
},
[updateFolderMutation, workspaceId]
[
workspaceId,
getDestinationFolderId,
getSiblingItems,
calculateInsertIndex,
isNoOpMove,
buildAndSubmitUpdates,
]
)
/**
* Handles drop events for both workflows and folders
*
* @param e - React drag event
* @param targetFolderId - Target folder ID or null for root
*/
const handleFolderDrop = useCallback(
async (e: React.DragEvent, targetFolderId: string | null) => {
const handleDrop = useCallback(
async (e: React.DragEvent) => {
e.preventDefault()
e.stopPropagation()
setDropTargetId(null)
const indicator = dropIndicator
setDropIndicator(null)
setIsDragging(false)
if (!indicator) return
try {
// Check if dropping workflows
const workflowIdsData = e.dataTransfer.getData('workflow-ids')
if (workflowIdsData) {
const workflowIds = JSON.parse(workflowIdsData) as string[]
await handleWorkflowDrop(workflowIds, targetFolderId)
await handleWorkflowDrop(workflowIds, indicator)
return
}
// Check if dropping a folder
const folderIdData = e.dataTransfer.getData('folder-id')
if (folderIdData && targetFolderId !== folderIdData) {
await handleFolderMove(folderIdData, targetFolderId)
if (folderIdData) {
await handleFolderDrop(folderIdData, indicator)
}
} catch (error) {
logger.error('Failed to handle drop:', error)
}
},
[handleWorkflowDrop, handleFolderMove]
[dropIndicator, handleWorkflowDrop, handleFolderDrop]
)
/**
* Creates drag event handlers for a specific folder section
* These handlers are attached to the entire folder section container
*
* @param folderId - Folder ID to create handlers for
* @returns Object containing drag event handlers
*/
const createFolderDragHandlers = useCallback(
(folderId: string) => ({
onDragEnter: (e: React.DragEvent<HTMLElement>) => {
e.preventDefault()
setIsDragging(true)
},
const createWorkflowDragHandlers = useCallback(
(workflowId: string, folderId: string | null) => ({
onDragOver: (e: React.DragEvent<HTMLElement>) => {
e.preventDefault()
lastDragYRef.current = e.clientY
setDropTargetId(folderId)
setIsDragging(true)
},
onDragLeave: (e: React.DragEvent<HTMLElement>) => {
e.preventDefault()
const relatedTarget = e.relatedTarget as HTMLElement | null
const currentTarget = e.currentTarget as HTMLElement
// Only clear if we're leaving the folder section completely
if (!relatedTarget || !currentTarget.contains(relatedTarget)) {
setDropTargetId(null)
initDragOver(e)
const isSameFolder = draggedSourceFolderRef.current === folderId
if (isSameFolder) {
const position = calculateDropPosition(e, e.currentTarget)
setNormalizedDropIndicator({ targetId: workflowId, position, folderId })
} else {
setNormalizedDropIndicator({
targetId: folderId || 'root',
position: 'inside',
folderId: null,
})
}
},
onDrop: (e: React.DragEvent<HTMLElement>) => handleFolderDrop(e, folderId),
onDrop: handleDrop,
}),
[handleFolderDrop]
[initDragOver, calculateDropPosition, setNormalizedDropIndicator, handleDrop]
)
/**
* Creates drag event handlers for items (workflows/folders) that belong to a parent folder
* When dragging over an item, highlights the parent folder section
*
* @param parentFolderId - Parent folder ID or null for root
* @returns Object containing drag event handlers
*/
const createItemDragHandlers = useCallback(
(parentFolderId: string | null) => ({
const createFolderDragHandlers = useCallback(
(folderId: string, parentFolderId: string | null) => ({
onDragOver: (e: React.DragEvent<HTMLElement>) => {
initDragOver(e)
if (draggedTypeRef.current === 'folder') {
const isSameParent = draggedSourceFolderRef.current === parentFolderId
if (isSameParent) {
const position = calculateDropPosition(e, e.currentTarget)
setNormalizedDropIndicator({ targetId: folderId, position, folderId: parentFolderId })
} else {
setNormalizedDropIndicator({
targetId: folderId,
position: 'inside',
folderId: parentFolderId,
})
setHoverFolderId(folderId)
}
} else {
// Workflow being dragged over a folder
const isSameParent = draggedSourceFolderRef.current === parentFolderId
if (isSameParent) {
// Same level - use three zones: top=before, middle=inside, bottom=after
const position = calculateFolderDropPosition(e, e.currentTarget)
setNormalizedDropIndicator({ targetId: folderId, position, folderId: parentFolderId })
if (position === 'inside') {
setHoverFolderId(folderId)
} else {
setHoverFolderId(null)
}
} else {
// Different container - drop into folder
setNormalizedDropIndicator({
targetId: folderId,
position: 'inside',
folderId: parentFolderId,
})
setHoverFolderId(folderId)
}
}
},
onDragLeave: (e: React.DragEvent<HTMLElement>) => {
if (isLeavingElement(e)) setHoverFolderId(null)
},
onDrop: handleDrop,
}),
[
initDragOver,
calculateDropPosition,
calculateFolderDropPosition,
setNormalizedDropIndicator,
isLeavingElement,
handleDrop,
]
)
const createEmptyFolderDropZone = useCallback(
(folderId: string) => ({
onDragOver: (e: React.DragEvent<HTMLElement>) => {
initDragOver(e)
setNormalizedDropIndicator({ targetId: folderId, position: 'inside', folderId })
},
onDrop: handleDrop,
}),
[initDragOver, setNormalizedDropIndicator, handleDrop]
)
const createFolderContentDropZone = useCallback(
(folderId: string) => ({
onDragOver: (e: React.DragEvent<HTMLElement>) => {
e.preventDefault()
e.stopPropagation()
lastDragYRef.current = e.clientY
setDropTargetId(parentFolderId || 'root')
setIsDragging(true)
if (e.target === e.currentTarget && draggedSourceFolderRef.current !== folderId) {
setNormalizedDropIndicator({ targetId: folderId, position: 'inside', folderId: null })
}
},
onDrop: handleDrop,
}),
[setNormalizedDropIndicator, handleDrop]
)
const createRootDropZone = useCallback(
() => ({
onDragOver: (e: React.DragEvent<HTMLElement>) => {
initDragOver(e, false)
if (e.target === e.currentTarget) {
setNormalizedDropIndicator({ targetId: 'root', position: 'inside', folderId: null })
}
},
onDragLeave: (e: React.DragEvent<HTMLElement>) => {
if (isLeavingElement(e)) setNormalizedDropIndicator(null)
},
onDrop: handleDrop,
}),
[initDragOver, setNormalizedDropIndicator, isLeavingElement, handleDrop]
)
const handleDragStart = useCallback(
(type: 'workflow' | 'folder', sourceFolderId: string | null) => {
draggedTypeRef.current = type
draggedSourceFolderRef.current = sourceFolderId
setIsDragging(true)
},
[]
)
/**
* Creates drag event handlers for the root drop zone
*
* @returns Object containing drag event handlers for root
*/
const createRootDragHandlers = useCallback(
() => ({
onDragEnter: (e: React.DragEvent<HTMLElement>) => {
e.preventDefault()
setIsDragging(true)
},
onDragOver: (e: React.DragEvent<HTMLElement>) => {
e.preventDefault()
lastDragYRef.current = e.clientY
setDropTargetId('root')
setIsDragging(true)
},
onDragLeave: (e: React.DragEvent<HTMLElement>) => {
e.preventDefault()
const relatedTarget = e.relatedTarget as HTMLElement | null
const currentTarget = e.currentTarget as HTMLElement
// Only clear if we're leaving the root completely
if (!relatedTarget || !currentTarget.contains(relatedTarget)) {
setDropTargetId(null)
}
},
onDrop: (e: React.DragEvent<HTMLElement>) => handleFolderDrop(e, null),
}),
[handleFolderDrop]
)
const handleDragEnd = useCallback(() => {
setIsDragging(false)
setDropIndicator(null)
draggedTypeRef.current = null
draggedSourceFolderRef.current = null
setHoverFolderId(null)
}, [])
/**
* Creates drag event handlers for folder header (the clickable part)
* These handlers trigger folder expansion on hover during drag
*
* @param folderId - Folder ID to handle hover for
* @returns Object containing drag event handlers for folder header
*/
const createFolderHeaderHoverHandlers = useCallback(
(folderId: string) => ({
onDragEnter: (e: React.DragEvent<HTMLElement>) => {
if (isDragging) {
setHoverFolderId(folderId)
}
},
onDragLeave: (e: React.DragEvent<HTMLElement>) => {
const relatedTarget = e.relatedTarget as HTMLElement | null
const currentTarget = e.currentTarget as HTMLElement
// Only clear if we're leaving the folder header completely
if (!relatedTarget || !currentTarget.contains(relatedTarget)) {
setHoverFolderId(null)
}
},
}),
[isDragging]
)
/**
* Set the scroll container ref for auto-scrolling
*
* @param element - Scrollable container element
*/
const setScrollContainer = useCallback((element: HTMLDivElement | null) => {
scrollContainerRef.current = element
}, [])
return {
dropTargetId,
dropIndicator,
isDragging,
setScrollContainer,
createWorkflowDragHandlers,
createFolderDragHandlers,
createItemDragHandlers,
createRootDragHandlers,
createFolderHeaderHoverHandlers,
createEmptyFolderDropZone,
createFolderContentDropZone,
createRootDropZone,
handleDragStart,
handleDragEnd,
}
}

View File

@@ -64,6 +64,7 @@ export function useExportWorkspace({ onSuccess }: UseExportWorkspaceProps = {})
id: folder.id,
name: folder.name,
parentId: folder.parentId,
sortOrder: folder.sortOrder,
})
)

View File

@@ -7,6 +7,7 @@ import {
extractWorkflowsFromFiles,
extractWorkflowsFromZip,
parseWorkflowJson,
sanitizePathSegment,
} from '@/lib/workflows/operations/import-export'
import { folderKeys, useCreateFolder } from '@/hooks/queries/folders'
import { useCreateWorkflow, workflowKeys } from '@/hooks/queries/workflows'
@@ -40,7 +41,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
* Import a single workflow
*/
const importSingleWorkflow = useCallback(
async (content: string, filename: string, folderId?: string) => {
async (content: string, filename: string, folderId?: string, sortOrder?: number) => {
const { data: workflowData, errors: parseErrors } = parseWorkflowJson(content)
if (!workflowData || parseErrors.length > 0) {
@@ -60,6 +61,7 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
description: workflowData.metadata?.description || 'Imported from JSON',
workspaceId,
folderId: folderId || undefined,
sortOrder,
})
const newWorkflowId = result.id
@@ -140,6 +142,55 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
})
const folderMap = new Map<string, string>()
if (metadata?.folders && metadata.folders.length > 0) {
type ExportedFolder = {
id: string
name: string
parentId: string | null
sortOrder?: number
}
const foldersById = new Map<string, ExportedFolder>(
metadata.folders.map((f) => [f.id, f])
)
const oldIdToNewId = new Map<string, string>()
const buildPath = (folderId: string): string => {
const pathParts: string[] = []
let currentId: string | null = folderId
while (currentId && foldersById.has(currentId)) {
const folder: ExportedFolder = foldersById.get(currentId)!
pathParts.unshift(sanitizePathSegment(folder.name))
currentId = folder.parentId
}
return pathParts.join('/')
}
const createFolderRecursive = async (folder: ExportedFolder): Promise<string> => {
if (oldIdToNewId.has(folder.id)) {
return oldIdToNewId.get(folder.id)!
}
let parentId = importFolder.id
if (folder.parentId && foldersById.has(folder.parentId)) {
parentId = await createFolderRecursive(foldersById.get(folder.parentId)!)
}
const newFolder = await createFolderMutation.mutateAsync({
name: folder.name,
workspaceId,
parentId,
sortOrder: folder.sortOrder,
})
oldIdToNewId.set(folder.id, newFolder.id)
folderMap.set(buildPath(folder.id), newFolder.id)
return newFolder.id
}
for (const folder of metadata.folders) {
await createFolderRecursive(folder)
}
}
for (const workflow of extractedWorkflows) {
try {
let targetFolderId = importFolder.id
@@ -147,15 +198,17 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
if (workflow.folderPath.length > 0) {
const folderPathKey = workflow.folderPath.join('/')
if (!folderMap.has(folderPathKey)) {
if (folderMap.has(folderPathKey)) {
targetFolderId = folderMap.get(folderPathKey)!
} else {
let parentId = importFolder.id
for (let i = 0; i < workflow.folderPath.length; i++) {
const pathSegment = workflow.folderPath.slice(0, i + 1).join('/')
const folderNameForSegment = workflow.folderPath[i]
if (!folderMap.has(pathSegment)) {
const subFolder = await createFolderMutation.mutateAsync({
name: workflow.folderPath[i],
name: folderNameForSegment,
workspaceId,
parentId,
})
@@ -165,15 +218,15 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
parentId = folderMap.get(pathSegment)!
}
}
targetFolderId = folderMap.get(folderPathKey)!
}
targetFolderId = folderMap.get(folderPathKey)!
}
const workflowId = await importSingleWorkflow(
workflow.content,
workflow.name,
targetFolderId
targetFolderId,
workflow.sortOrder
)
if (workflowId) importedWorkflowIds.push(workflowId)
} catch (error) {

View File

@@ -5,6 +5,7 @@ import {
extractWorkflowName,
extractWorkflowsFromZip,
parseWorkflowJson,
sanitizePathSegment,
} from '@/lib/workflows/operations/import-export'
import { useCreateFolder } from '@/hooks/queries/folders'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
@@ -59,7 +60,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
const createResponse = await fetch('/api/workspaces', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ name: workspaceName }),
body: JSON.stringify({ name: workspaceName, skipDefaultWorkflow: true }),
})
if (!createResponse.ok) {
@@ -71,6 +72,55 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
const folderMap = new Map<string, string>()
if (metadata?.folders && metadata.folders.length > 0) {
type ExportedFolder = {
id: string
name: string
parentId: string | null
sortOrder?: number
}
const foldersById = new Map<string, ExportedFolder>(
metadata.folders.map((f) => [f.id, f])
)
const oldIdToNewId = new Map<string, string>()
const buildPath = (folderId: string): string => {
const pathParts: string[] = []
let currentId: string | null = folderId
while (currentId && foldersById.has(currentId)) {
const folder: ExportedFolder = foldersById.get(currentId)!
pathParts.unshift(sanitizePathSegment(folder.name))
currentId = folder.parentId
}
return pathParts.join('/')
}
const createFolderRecursive = async (folder: ExportedFolder): Promise<string> => {
if (oldIdToNewId.has(folder.id)) {
return oldIdToNewId.get(folder.id)!
}
let parentId: string | undefined
if (folder.parentId && foldersById.has(folder.parentId)) {
parentId = await createFolderRecursive(foldersById.get(folder.parentId)!)
}
const newFolder = await createFolderMutation.mutateAsync({
name: folder.name,
workspaceId: newWorkspace.id,
parentId,
sortOrder: folder.sortOrder,
})
oldIdToNewId.set(folder.id, newFolder.id)
folderMap.set(buildPath(folder.id), newFolder.id)
return newFolder.id
}
for (const folder of metadata.folders) {
await createFolderRecursive(folder)
}
}
for (const workflow of extractedWorkflows) {
try {
const { data: workflowData, errors: parseErrors } = parseWorkflowJson(workflow.content)
@@ -84,9 +134,10 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
if (workflow.folderPath.length > 0) {
const folderPathKey = workflow.folderPath.join('/')
if (!folderMap.has(folderPathKey)) {
let parentId: string | null = null
if (folderMap.has(folderPathKey)) {
targetFolderId = folderMap.get(folderPathKey)!
} else {
let parentId: string | undefined
for (let i = 0; i < workflow.folderPath.length; i++) {
const pathSegment = workflow.folderPath.slice(0, i + 1).join('/')
@@ -94,7 +145,7 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
const subFolder = await createFolderMutation.mutateAsync({
name: workflow.folderPath[i],
workspaceId: newWorkspace.id,
parentId: parentId || undefined,
parentId,
})
folderMap.set(pathSegment, subFolder.id)
parentId = subFolder.id
@@ -102,9 +153,8 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
parentId = folderMap.get(pathSegment)!
}
}
targetFolderId = folderMap.get(folderPathKey) || null
}
targetFolderId = folderMap.get(folderPathKey) || null
}
const workflowName = extractWorkflowName(workflow.content, workflow.name)

View File

@@ -10,12 +10,17 @@ import { createLogger } from '@sim/logger'
import { task } from '@trigger.dev/sdk'
import { and, eq, isNull, lte, or, sql } from 'drizzle-orm'
import { v4 as uuidv4 } from 'uuid'
import {
type EmailRateLimitsData,
type EmailUsageData,
renderWorkflowNotificationEmail,
} from '@/components/emails'
import { checkUsageStatus } from '@/lib/billing/calculations/usage-monitor'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { RateLimiter } from '@/lib/core/rate-limiter'
import { decryptSecret } from '@/lib/core/security/encryption'
import { getBaseUrl } from '@/lib/core/utils/urls'
import type { TraceSpan, WorkflowExecutionLog } from '@/lib/logs/types'
import type { WorkflowExecutionLog } from '@/lib/logs/types'
import { sendEmail } from '@/lib/messaging/email/mailer'
import type { AlertConfig } from '@/lib/notifications/alert-rules'
@@ -45,9 +50,8 @@ interface NotificationPayload {
totalDurationMs: number
cost?: Record<string, unknown>
finalOutput?: unknown
traceSpans?: unknown[]
rateLimits?: Record<string, unknown>
usage?: Record<string, unknown>
rateLimits?: EmailRateLimitsData
usage?: EmailUsageData
}
}
@@ -94,10 +98,6 @@ async function buildPayload(
payload.data.finalOutput = executionData.finalOutput
}
if (subscription.includeTraceSpans && executionData.traceSpans) {
payload.data.traceSpans = executionData.traceSpans as unknown[]
}
if (subscription.includeRateLimits && userId) {
try {
const userSubscription = await getHighestPrioritySubscription(userId)
@@ -251,18 +251,6 @@ function formatAlertReason(alertConfig: AlertConfig): string {
}
}
function formatJsonForEmail(data: unknown, label: string): string {
if (!data) return ''
const json = JSON.stringify(data, null, 2)
const escapedJson = json.replace(/</g, '&lt;').replace(/>/g, '&gt;')
return `
<div style="margin-top: 20px;">
<h3 style="color: #1a1a1a; font-size: 14px; margin-bottom: 8px;">${label}</h3>
<pre style="background: #f5f5f5; padding: 12px; border-radius: 6px; overflow-x: auto; font-size: 12px; color: #333; white-space: pre-wrap; word-wrap: break-word;">${escapedJson}</pre>
</div>
`
}
async function deliverEmail(
subscription: typeof workspaceNotificationSubscription.$inferSelect,
payload: NotificationPayload,
@@ -275,8 +263,7 @@ async function deliverEmail(
const isError = payload.data.status !== 'success'
const statusText = isError ? 'Error' : 'Success'
const logUrl = buildLogUrl(subscription.workspaceId, payload.data.executionId)
const baseUrl = getBaseUrl()
const alertReason = alertConfig ? formatAlertReason(alertConfig) : null
const alertReason = alertConfig ? formatAlertReason(alertConfig) : undefined
// Build subject line
const subject = alertReason
@@ -285,113 +272,36 @@ async function deliverEmail(
? `Error Alert: ${payload.data.workflowName}`
: `Workflow Completed: ${payload.data.workflowName}`
let includedDataHtml = ''
// Build plain text for fallback
let includedDataText = ''
if (payload.data.finalOutput) {
includedDataHtml += formatJsonForEmail(payload.data.finalOutput, 'Final Output')
includedDataText += `\n\nFinal Output:\n${JSON.stringify(payload.data.finalOutput, null, 2)}`
}
if (
payload.data.traceSpans &&
Array.isArray(payload.data.traceSpans) &&
payload.data.traceSpans.length > 0
) {
includedDataHtml += formatJsonForEmail(payload.data.traceSpans, 'Trace Spans')
includedDataText += `\n\nTrace Spans:\n${JSON.stringify(payload.data.traceSpans, null, 2)}`
}
if (payload.data.rateLimits) {
includedDataHtml += formatJsonForEmail(payload.data.rateLimits, 'Rate Limits')
includedDataText += `\n\nRate Limits:\n${JSON.stringify(payload.data.rateLimits, null, 2)}`
}
if (payload.data.usage) {
includedDataHtml += formatJsonForEmail(payload.data.usage, 'Usage Data')
includedDataText += `\n\nUsage Data:\n${JSON.stringify(payload.data.usage, null, 2)}`
}
// Render the email using the shared template
const html = await renderWorkflowNotificationEmail({
workflowName: payload.data.workflowName || 'Unknown Workflow',
status: payload.data.status,
trigger: payload.data.trigger,
duration: formatDuration(payload.data.totalDurationMs),
cost: formatCost(payload.data.cost),
logUrl,
alertReason,
finalOutput: payload.data.finalOutput,
rateLimits: payload.data.rateLimits,
usageData: payload.data.usage,
})
const result = await sendEmail({
to: subscription.emailRecipients,
subject,
html: `
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body style="background-color: #f5f5f7; font-family: HelveticaNeue, Helvetica, Arial, sans-serif; margin: 0; padding: 0;">
<div style="max-width: 580px; margin: 30px auto; background-color: #ffffff; border-radius: 5px; overflow: hidden;">
<!-- Header with Logo -->
<div style="padding: 30px 0; text-align: center;">
<img src="${baseUrl}/logo/reverse/text/medium.png" width="114" alt="Sim Studio" style="margin: 0 auto;" />
</div>
<!-- Section Border -->
<div style="display: flex; width: 100%;">
<div style="border-bottom: 1px solid #eeeeee; width: 249px;"></div>
<div style="border-bottom: 1px solid #6F3DFA; width: 102px;"></div>
<div style="border-bottom: 1px solid #eeeeee; width: 249px;"></div>
</div>
<!-- Content -->
<div style="padding: 5px 30px 20px 30px;">
<h2 style="font-size: 20px; color: #333333; margin: 20px 0;">
${alertReason ? 'Alert Triggered' : isError ? 'Workflow Execution Failed' : 'Workflow Execution Completed'}
</h2>
${alertReason ? `<p style="color: #d97706; background: #fef3c7; padding: 12px; border-radius: 6px; margin-bottom: 20px; font-size: 14px;"><strong>Reason:</strong> ${alertReason}</p>` : ''}
<table style="width: 100%; border-collapse: collapse; margin-bottom: 20px;">
<tr style="border-bottom: 1px solid #eee;">
<td style="padding: 12px 0; color: #666; width: 140px;">Workflow</td>
<td style="padding: 12px 0; color: #333; font-weight: 500;">${payload.data.workflowName}</td>
</tr>
<tr style="border-bottom: 1px solid #eee;">
<td style="padding: 12px 0; color: #666;">Status</td>
<td style="padding: 12px 0; color: ${isError ? '#ef4444' : '#22c55e'}; font-weight: 500;">${statusText}</td>
</tr>
<tr style="border-bottom: 1px solid #eee;">
<td style="padding: 12px 0; color: #666;">Trigger</td>
<td style="padding: 12px 0; color: #333;">${payload.data.trigger}</td>
</tr>
<tr style="border-bottom: 1px solid #eee;">
<td style="padding: 12px 0; color: #666;">Duration</td>
<td style="padding: 12px 0; color: #333;">${formatDuration(payload.data.totalDurationMs)}</td>
</tr>
<tr style="border-bottom: 1px solid #eee;">
<td style="padding: 12px 0; color: #666;">Cost</td>
<td style="padding: 12px 0; color: #333;">${formatCost(payload.data.cost)}</td>
</tr>
</table>
<a href="${logUrl}" style="display: inline-block; background-color: #6F3DFA; color: #ffffff; font-weight: bold; font-size: 16px; padding: 12px 30px; border-radius: 5px; text-decoration: none; text-align: center; margin: 20px 0;">
View Execution Log →
</a>
${includedDataHtml}
<p style="font-size: 16px; line-height: 1.5; color: #333333; margin-top: 30px;">
Best regards,<br />
The Sim Team
</p>
</div>
</div>
<!-- Footer -->
<div style="max-width: 580px; margin: 0 auto; padding: 20px 0; text-align: center;">
<p style="font-size: 12px; color: #706a7b; margin: 8px 0 0 0;">
© ${new Date().getFullYear()} Sim Studio, All Rights Reserved
</p>
<p style="font-size: 12px; color: #706a7b; margin: 8px 0 0 0;">
<a href="${baseUrl}/privacy" style="color: #706a7b; text-decoration: underline;">Privacy Policy</a> •
<a href="${baseUrl}/terms" style="color: #706a7b; text-decoration: underline;">Terms of Service</a>
</p>
</div>
</body>
</html>
`,
html,
text: `${subject}\n${alertReason ? `\nReason: ${alertReason}\n` : ''}\nWorkflow: ${payload.data.workflowName}\nStatus: ${statusText}\nTrigger: ${payload.data.trigger}\nDuration: ${formatDuration(payload.data.totalDurationMs)}\nCost: ${formatCost(payload.data.cost)}\n\nView Log: ${logUrl}${includedDataText}`,
emailType: 'notifications',
})
@@ -479,26 +389,6 @@ async function deliverSlack(
})
}
if (
payload.data.traceSpans &&
Array.isArray(payload.data.traceSpans) &&
payload.data.traceSpans.length > 0
) {
const spansSummary = (payload.data.traceSpans as TraceSpan[])
.map((span) => {
const status = span.status === 'success' ? '✓' : '✗'
return `${status} ${span.name || 'Unknown'} (${formatDuration(span.duration || 0)})`
})
.join('\n')
blocks.push({
type: 'section',
text: {
type: 'mrkdwn',
text: `*Trace Spans:*\n\`\`\`${spansSummary}\`\`\``,
},
})
}
if (payload.data.rateLimits) {
const limitsStr = JSON.stringify(payload.data.rateLimits, null, 2)
blocks.push({

View File

@@ -19,10 +19,10 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'Create Run', id: 'create_run' },
{ label: 'Create Runs Batch', id: 'create_runs_batch' },
{ label: 'Create Run', id: 'langsmith_create_run' },
{ label: 'Create Runs Batch', id: 'langsmith_create_runs_batch' },
],
value: () => 'create_run',
value: () => 'langsmith_create_run',
},
{
id: 'apiKey',
@@ -37,15 +37,15 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Run ID',
type: 'short-input',
placeholder: 'Auto-generated if blank',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
},
{
id: 'name',
title: 'Name',
type: 'short-input',
placeholder: 'Run name',
required: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'create_run' },
required: { field: 'operation', value: 'langsmith_create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
},
{
id: 'run_type',
@@ -61,23 +61,22 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
{ label: 'Parser', id: 'parser' },
],
value: () => 'chain',
required: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'create_run' },
required: { field: 'operation', value: 'langsmith_create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
},
{
id: 'start_time',
title: 'Start Time',
type: 'short-input',
placeholder: '2025-01-01T12:00:00Z',
condition: { field: 'operation', value: 'create_run' },
value: () => new Date().toISOString(),
placeholder: 'e.g. 2025-01-01T12:00:00Z (defaults to now)',
condition: { field: 'operation', value: 'langsmith_create_run' },
},
{
id: 'end_time',
title: 'End Time',
type: 'short-input',
placeholder: '2025-01-01T12:00:30Z',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -85,7 +84,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Inputs',
type: 'code',
placeholder: '{"input":"value"}',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -93,7 +92,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Outputs',
type: 'code',
placeholder: '{"output":"value"}',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -101,7 +100,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Metadata',
type: 'code',
placeholder: '{"ls_model":"gpt-4"}',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -109,7 +108,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Tags',
type: 'code',
placeholder: '["production","workflow"]',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -117,7 +116,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Parent Run ID',
type: 'short-input',
placeholder: 'Parent run identifier',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -125,7 +124,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Trace ID',
type: 'short-input',
placeholder: 'Auto-generated if blank',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -133,7 +132,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Session ID',
type: 'short-input',
placeholder: 'Session identifier',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -141,7 +140,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Session Name',
type: 'short-input',
placeholder: 'Session name',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -149,7 +148,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Status',
type: 'short-input',
placeholder: 'success',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -157,7 +156,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Error',
type: 'long-input',
placeholder: 'Error message',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -165,7 +164,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Dotted Order',
type: 'short-input',
placeholder: 'Defaults to <YYYYMMDDTHHMMSSffffff>Z<id>',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -173,7 +172,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Events',
type: 'code',
placeholder: '[{"event":"token","value":1}]',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -181,29 +180,36 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Post Runs',
type: 'code',
placeholder: '[{"id":"...","name":"...","run_type":"chain","start_time":"..."}]',
condition: { field: 'operation', value: 'create_runs_batch' },
condition: { field: 'operation', value: 'langsmith_create_runs_batch' },
wandConfig: {
enabled: true,
generationType: 'json-object',
prompt: `Output ONLY a JSON array with a single LangSmith run object. No explanation.
Required: name (string), run_type ("tool"|"chain"|"llm"|"retriever"|"embedding"|"prompt"|"parser")
Optional: inputs, outputs, tags, extra, session_name, end_time
Fields id, trace_id, dotted_order, start_time are auto-generated if omitted.`,
},
},
{
id: 'patch',
title: 'Patch Runs',
type: 'code',
placeholder: '[{"id":"...","name":"...","run_type":"chain","start_time":"..."}]',
condition: { field: 'operation', value: 'create_runs_batch' },
condition: { field: 'operation', value: 'langsmith_create_runs_batch' },
mode: 'advanced',
wandConfig: {
enabled: true,
generationType: 'json-object',
prompt: `Output ONLY a JSON array with a single LangSmith run object to update. No explanation.
Required: id (existing run UUID), name, run_type ("tool"|"chain"|"llm"|"retriever"|"embedding"|"prompt"|"parser")
Common patch fields: outputs, end_time, status, error`,
},
},
],
tools: {
access: ['langsmith_create_run', 'langsmith_create_runs_batch'],
config: {
tool: (params) => {
switch (params.operation) {
case 'create_runs_batch':
return 'langsmith_create_runs_batch'
case 'create_run':
default:
return 'langsmith_create_run'
}
},
tool: (params) => params.operation,
params: (params) => {
const parseJsonValue = (value: unknown, label: string) => {
if (value === undefined || value === null || value === '') {
@@ -221,7 +227,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
return value
}
if (params.operation === 'create_runs_batch') {
if (params.operation === 'langsmith_create_runs_batch') {
const post = parseJsonValue(params.post, 'post runs')
const patch = parseJsonValue(params.patch, 'patch runs')

View File

@@ -173,6 +173,17 @@ export const baseStyles = {
margin: 0,
},
/** Code block text (for JSON/code display) */
codeBlock: {
fontSize: typography.fontSize.caption,
lineHeight: typography.lineHeight.caption,
color: colors.textSecondary,
fontFamily: 'monospace',
whiteSpace: 'pre-wrap' as const,
wordWrap: 'break-word' as const,
margin: 0,
},
/** Highlighted info box (e.g., "What you get with Pro") */
infoBox: {
backgroundColor: colors.bgOuter,

View File

@@ -61,7 +61,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
<tbody>
<tr>
<td align='left' style={{ padding: '0 8px 0 0' }}>
<Link href='https://x.com/simdotai' rel='noopener noreferrer'>
<Link href={`${baseUrl}/x`} rel='noopener noreferrer'>
<Img
src={`${baseUrl}/static/x-icon.png`}
width='20'
@@ -71,7 +71,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
</Link>
</td>
<td align='left' style={{ padding: '0 8px' }}>
<Link href='https://discord.gg/Hr4UWYEcTT' rel='noopener noreferrer'>
<Link href={`${baseUrl}/discord`} rel='noopener noreferrer'>
<Img
src={`${baseUrl}/static/discord-icon.png`}
width='20'
@@ -81,7 +81,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
</Link>
</td>
<td align='left' style={{ padding: '0 8px' }}>
<Link href='https://github.com/simstudioai/sim' rel='noopener noreferrer'>
<Link href={`${baseUrl}/github`} rel='noopener noreferrer'>
<Img
src={`${baseUrl}/static/github-icon.png`}
width='20'

View File

@@ -10,6 +10,8 @@ export * from './careers'
export * from './components'
// Invitation emails
export * from './invitations'
// Notification emails
export * from './notifications'
// Render functions and subjects
export * from './render'
export * from './subjects'

View File

@@ -0,0 +1,7 @@
export type {
EmailRateLimitStatus,
EmailRateLimitsData,
EmailUsageData,
WorkflowNotificationEmailProps,
} from './workflow-notification-email'
export { WorkflowNotificationEmail } from './workflow-notification-email'

View File

@@ -0,0 +1,161 @@
import { Link, Section, Text } from '@react-email/components'
import { baseStyles } from '@/components/emails/_styles'
import { EmailLayout } from '@/components/emails/components'
import { getBrandConfig } from '@/lib/branding/branding'
/**
* Serialized rate limit status for email payloads.
* Note: This differs from the canonical RateLimitStatus in @/lib/core/rate-limiter
* which uses Date for resetAt. This version uses string for JSON serialization.
*/
export interface EmailRateLimitStatus {
requestsPerMinute: number
remaining: number
maxBurst?: number
resetAt?: string
}
export interface EmailRateLimitsData {
sync?: EmailRateLimitStatus
async?: EmailRateLimitStatus
}
export interface EmailUsageData {
currentPeriodCost: number
limit: number
percentUsed: number
isExceeded?: boolean
}
export interface WorkflowNotificationEmailProps {
workflowName: string
status: 'success' | 'error'
trigger: string
duration: string
cost: string
logUrl: string
alertReason?: string
finalOutput?: unknown
rateLimits?: EmailRateLimitsData
usageData?: EmailUsageData
}
function formatJsonForEmail(data: unknown): string {
return JSON.stringify(data, null, 2)
}
export function WorkflowNotificationEmail({
workflowName,
status,
trigger,
duration,
cost,
logUrl,
alertReason,
finalOutput,
rateLimits,
usageData,
}: WorkflowNotificationEmailProps) {
const brand = getBrandConfig()
const isError = status === 'error'
const statusText = isError ? 'Error' : 'Success'
const previewText = alertReason
? `${brand.name}: Alert - ${workflowName}`
: isError
? `${brand.name}: Workflow Failed - ${workflowName}`
: `${brand.name}: Workflow Completed - ${workflowName}`
const message = alertReason
? 'An alert was triggered for your workflow.'
: isError
? 'Your workflow execution failed.'
: 'Your workflow completed successfully.'
return (
<EmailLayout preview={previewText}>
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>Hello,</Text>
<Text style={baseStyles.paragraph}>{message}</Text>
<Section style={baseStyles.infoBox}>
{alertReason && (
<Text style={baseStyles.infoBoxList}>
<strong>Reason:</strong> {alertReason}
</Text>
)}
<Text style={{ ...baseStyles.infoBoxList, marginTop: alertReason ? '4px' : 0 }}>
<strong>Workflow:</strong> {workflowName}
</Text>
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
<strong>Status:</strong> {statusText}
</Text>
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
<strong>Trigger:</strong> {trigger}
</Text>
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
<strong>Duration:</strong> {duration}
</Text>
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
<strong>Cost:</strong> {cost}
</Text>
</Section>
<Link href={logUrl} style={{ textDecoration: 'none' }}>
<Text style={baseStyles.button}>View Execution Log</Text>
</Link>
{rateLimits && (rateLimits.sync || rateLimits.async) ? (
<>
<div style={baseStyles.divider} />
<Section style={baseStyles.infoBox}>
<Text style={baseStyles.infoBoxTitle}>Rate Limits</Text>
{rateLimits.sync && (
<Text style={baseStyles.infoBoxList}>
Sync: {rateLimits.sync.remaining} of {rateLimits.sync.requestsPerMinute} remaining
</Text>
)}
{rateLimits.async && (
<Text style={{ ...baseStyles.infoBoxList, marginTop: rateLimits.sync ? '4px' : 0 }}>
Async: {rateLimits.async.remaining} of {rateLimits.async.requestsPerMinute}{' '}
remaining
</Text>
)}
</Section>
</>
) : null}
{usageData ? (
<>
<div style={baseStyles.divider} />
<Section style={baseStyles.infoBox}>
<Text style={baseStyles.infoBoxTitle}>Usage</Text>
<Text style={baseStyles.infoBoxList}>
${usageData.currentPeriodCost.toFixed(2)} of ${usageData.limit.toFixed(2)} used (
{usageData.percentUsed.toFixed(1)}%)
</Text>
</Section>
</>
) : null}
{finalOutput ? (
<>
<div style={baseStyles.divider} />
<Section style={baseStyles.infoBox}>
<Text style={baseStyles.infoBoxTitle}>Final Output</Text>
<Text style={{ ...baseStyles.codeBlock, marginTop: '8px' }}>
{formatJsonForEmail(finalOutput)}
</Text>
</Section>
</>
) : null}
<div style={baseStyles.divider} />
<Text style={{ ...baseStyles.footerText, textAlign: 'left' }}>
You're receiving this because you subscribed to workflow notifications.
</Text>
</EmailLayout>
)
}
export default WorkflowNotificationEmail

View File

@@ -15,6 +15,10 @@ import {
PollingGroupInvitationEmail,
WorkspaceInvitationEmail,
} from '@/components/emails/invitations'
import {
WorkflowNotificationEmail,
type WorkflowNotificationEmailProps,
} from '@/components/emails/notifications'
import { HelpConfirmationEmail } from '@/components/emails/support'
import { getBaseUrl } from '@/lib/core/utils/urls'
@@ -258,3 +262,9 @@ export async function renderCareersSubmissionEmail(params: {
})
)
}
export async function renderWorkflowNotificationEmail(
params: WorkflowNotificationEmailProps
): Promise<string> {
return await render(WorkflowNotificationEmail(params))
}

View File

@@ -256,6 +256,7 @@ const Combobox = forwardRef<HTMLDivElement, ComboboxProps>(
customOnSelect()
setOpen(false)
setHighlightedIndex(-1)
setSearchQuery('')
return
}
@@ -269,6 +270,7 @@ const Combobox = forwardRef<HTMLDivElement, ComboboxProps>(
onChange?.(selectedValue)
setOpen(false)
setHighlightedIndex(-1)
setSearchQuery('')
if (editable && inputRef.current) {
inputRef.current.blur()
}
@@ -312,6 +314,7 @@ const Combobox = forwardRef<HTMLDivElement, ComboboxProps>(
if (!activeElement || (!isInContainer && !isInDropdown && !isSearchInput)) {
setOpen(false)
setHighlightedIndex(-1)
setSearchQuery('')
}
}, 150)
}, [])
@@ -326,6 +329,7 @@ const Combobox = forwardRef<HTMLDivElement, ComboboxProps>(
if (e.key === 'Escape') {
setOpen(false)
setHighlightedIndex(-1)
setSearchQuery('')
if (editable && inputRef.current) {
inputRef.current.blur()
}

View File

@@ -68,6 +68,7 @@ interface CreateFolderVariables {
name: string
parentId?: string
color?: string
sortOrder?: number
}
interface UpdateFolderVariables {
@@ -160,18 +161,20 @@ export function useCreateFolder() {
parentId: variables.parentId || null,
color: variables.color || '#808080',
isExpanded: false,
sortOrder: getNextSortOrder(previousFolders, variables.workspaceId, variables.parentId),
sortOrder:
variables.sortOrder ??
getNextSortOrder(previousFolders, variables.workspaceId, variables.parentId),
createdAt: new Date(),
updatedAt: new Date(),
})
)
return useMutation({
mutationFn: async ({ workspaceId, ...payload }: CreateFolderVariables) => {
mutationFn: async ({ workspaceId, sortOrder, ...payload }: CreateFolderVariables) => {
const response = await fetch('/api/folders', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ ...payload, workspaceId }),
body: JSON.stringify({ ...payload, workspaceId, sortOrder }),
})
if (!response.ok) {
@@ -285,9 +288,66 @@ export function useDuplicateFolderMutation() {
},
...handlers,
onSettled: (_data, _error, variables) => {
// Invalidate both folders and workflows (duplicated folder may contain workflows)
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
queryClient.invalidateQueries({ queryKey: workflowKeys.list(variables.workspaceId) })
},
})
}
interface ReorderFoldersVariables {
workspaceId: string
updates: Array<{
id: string
sortOrder: number
parentId?: string | null
}>
}
export function useReorderFolders() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (variables: ReorderFoldersVariables): Promise<void> => {
const response = await fetch('/api/folders/reorder', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(variables),
})
if (!response.ok) {
const error = await response.json().catch(() => ({}))
throw new Error(error.error || 'Failed to reorder folders')
}
},
onMutate: async (variables) => {
await queryClient.cancelQueries({ queryKey: folderKeys.list(variables.workspaceId) })
const snapshot = { ...useFolderStore.getState().folders }
useFolderStore.setState((state) => {
const updated = { ...state.folders }
for (const update of variables.updates) {
if (updated[update.id]) {
updated[update.id] = {
...updated[update.id],
sortOrder: update.sortOrder,
parentId:
update.parentId !== undefined ? update.parentId : updated[update.id].parentId,
}
}
}
return { folders: updated }
})
return { snapshot }
},
onError: (_error, _variables, context) => {
if (context?.snapshot) {
useFolderStore.setState({ folders: context.snapshot })
}
},
onSettled: (_data, _error, variables) => {
queryClient.invalidateQueries({ queryKey: folderKeys.list(variables.workspaceId) })
},
})
}

View File

@@ -1,8 +1,15 @@
import { keepPreviousData, useInfiniteQuery, useQuery } from '@tanstack/react-query'
import { getEndDateFromTimeRange, getStartDateFromTimeRange } from '@/lib/logs/filters'
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
import type {
DashboardStatsResponse,
SegmentStats,
WorkflowStats,
} from '@/app/api/logs/stats/route'
import type { LogsResponse, TimeRange, WorkflowLog } from '@/stores/logs/filters/types'
export type { DashboardStatsResponse, SegmentStats, WorkflowStats }
export const logKeys = {
all: ['logs'] as const,
lists: () => [...logKeys.all, 'list'] as const,
@@ -10,8 +17,8 @@ export const logKeys = {
[...logKeys.lists(), workspaceId ?? '', filters] as const,
details: () => [...logKeys.all, 'detail'] as const,
detail: (logId: string | undefined) => [...logKeys.details(), logId ?? ''] as const,
dashboard: (workspaceId: string | undefined, filters: Record<string, unknown>) =>
[...logKeys.all, 'dashboard', workspaceId ?? '', filters] as const,
stats: (workspaceId: string | undefined, filters: object) =>
[...logKeys.all, 'stats', workspaceId ?? '', filters] as const,
executionSnapshots: () => [...logKeys.all, 'executionSnapshot'] as const,
executionSnapshot: (executionId: string | undefined) =>
[...logKeys.executionSnapshots(), executionId ?? ''] as const,
@@ -147,52 +154,96 @@ export function useLogDetail(logId: string | undefined) {
})
}
const DASHBOARD_LOGS_LIMIT = 10000
interface DashboardFilters {
timeRange: TimeRange
startDate?: string
endDate?: string
level: string
workflowIds: string[]
folderIds: string[]
triggers: string[]
searchQuery: string
segmentCount?: number
}
/**
* Fetches all logs for dashboard metrics (non-paginated).
* Uses same filters as the logs list but with a high limit to get all data.
* Fetches aggregated dashboard statistics from the server.
* Uses SQL aggregation for efficient computation without row limits.
*/
async function fetchAllLogs(
async function fetchDashboardStats(
workspaceId: string,
filters: Omit<LogFilters, 'limit'>
): Promise<WorkflowLog[]> {
filters: DashboardFilters
): Promise<DashboardStatsResponse> {
const params = new URLSearchParams()
params.set('workspaceId', workspaceId)
params.set('limit', DASHBOARD_LOGS_LIMIT.toString())
params.set('offset', '0')
applyFilterParams(params, filters)
const response = await fetch(`/api/logs?${params.toString()}`)
if (!response.ok) {
throw new Error('Failed to fetch logs for dashboard')
if (filters.segmentCount) {
params.set('segmentCount', filters.segmentCount.toString())
}
const apiData: LogsResponse = await response.json()
return apiData.data || []
if (filters.level !== 'all') {
params.set('level', filters.level)
}
if (filters.triggers.length > 0) {
params.set('triggers', filters.triggers.join(','))
}
if (filters.workflowIds.length > 0) {
params.set('workflowIds', filters.workflowIds.join(','))
}
if (filters.folderIds.length > 0) {
params.set('folderIds', filters.folderIds.join(','))
}
const startDate = getStartDateFromTimeRange(filters.timeRange, filters.startDate)
if (startDate) {
params.set('startDate', startDate.toISOString())
}
const endDate = getEndDateFromTimeRange(filters.timeRange, filters.endDate)
if (endDate) {
params.set('endDate', endDate.toISOString())
}
if (filters.searchQuery.trim()) {
const parsedQuery = parseQuery(filters.searchQuery.trim())
const searchParams = queryToApiParams(parsedQuery)
for (const [key, value] of Object.entries(searchParams)) {
params.set(key, value)
}
}
const response = await fetch(`/api/logs/stats?${params.toString()}`)
if (!response.ok) {
throw new Error('Failed to fetch dashboard stats')
}
return response.json()
}
interface UseDashboardLogsOptions {
interface UseDashboardStatsOptions {
enabled?: boolean
refetchInterval?: number | false
}
/**
* Hook for fetching all logs for dashboard metrics.
* Unlike useLogsList, this fetches all logs in a single request
* to ensure dashboard metrics are computed from complete data.
* Hook for fetching aggregated dashboard statistics.
* Uses server-side SQL aggregation for efficient computation
* without any row limits - all matching logs are included in the stats.
*/
export function useDashboardLogs(
export function useDashboardStats(
workspaceId: string | undefined,
filters: Omit<LogFilters, 'limit'>,
options?: UseDashboardLogsOptions
filters: DashboardFilters,
options?: UseDashboardStatsOptions
) {
return useQuery({
queryKey: logKeys.dashboard(workspaceId, filters),
queryFn: () => fetchAllLogs(workspaceId as string, filters),
queryKey: logKeys.stats(workspaceId, filters),
queryFn: () => fetchDashboardStats(workspaceId as string, filters),
enabled: Boolean(workspaceId) && (options?.enabled ?? true),
refetchInterval: options?.refetchInterval ?? false,
staleTime: 0,

View File

@@ -61,7 +61,6 @@ export interface NotificationSubscription {
levelFilter: LogLevel[]
triggerFilter: TriggerType[]
includeFinalOutput: boolean
includeTraceSpans: boolean
includeRateLimits: boolean
includeUsageData: boolean
webhookConfig?: WebhookConfig | null
@@ -106,7 +105,6 @@ interface CreateNotificationParams {
levelFilter: LogLevel[]
triggerFilter: TriggerType[]
includeFinalOutput: boolean
includeTraceSpans: boolean
includeRateLimits: boolean
includeUsageData: boolean
alertConfig?: AlertConfig | null

View File

@@ -32,6 +32,7 @@ function mapWorkflow(workflow: any): WorkflowMetadata {
color: workflow.color,
workspaceId: workflow.workspaceId,
folderId: workflow.folderId,
sortOrder: workflow.sortOrder ?? 0,
createdAt: new Date(workflow.createdAt),
lastModified: new Date(workflow.updatedAt || workflow.createdAt),
}
@@ -91,6 +92,7 @@ interface CreateWorkflowVariables {
description?: string
color?: string
folderId?: string | null
sortOrder?: number
}
interface CreateWorkflowResult {
@@ -100,6 +102,7 @@ interface CreateWorkflowResult {
color: string
workspaceId: string
folderId?: string | null
sortOrder: number
}
interface DuplicateWorkflowVariables {
@@ -118,6 +121,7 @@ interface DuplicateWorkflowResult {
color: string
workspaceId: string
folderId?: string | null
sortOrder: number
blocksCount: number
edgesCount: number
subflowsCount: number
@@ -161,6 +165,7 @@ function createWorkflowMutationHandlers<TVariables extends { workspaceId: string
color: data.color,
workspaceId: data.workspaceId,
folderId: data.folderId,
sortOrder: 'sortOrder' in data ? data.sortOrder : 0,
},
},
error: null,
@@ -179,21 +184,36 @@ export function useCreateWorkflow() {
const handlers = createWorkflowMutationHandlers<CreateWorkflowVariables>(
queryClient,
'CreateWorkflow',
(variables, tempId) => ({
id: tempId,
name: variables.name || generateCreativeWorkflowName(),
lastModified: new Date(),
createdAt: new Date(),
description: variables.description || 'New workflow',
color: variables.color || getNextWorkflowColor(),
workspaceId: variables.workspaceId,
folderId: variables.folderId || null,
})
(variables, tempId) => {
let sortOrder: number
if (variables.sortOrder !== undefined) {
sortOrder = variables.sortOrder
} else {
const currentWorkflows = useWorkflowRegistry.getState().workflows
const targetFolderId = variables.folderId || null
const workflowsInFolder = Object.values(currentWorkflows).filter(
(w) => w.folderId === targetFolderId
)
sortOrder = workflowsInFolder.reduce((max, w) => Math.max(max, w.sortOrder ?? 0), -1) + 1
}
return {
id: tempId,
name: variables.name || generateCreativeWorkflowName(),
lastModified: new Date(),
createdAt: new Date(),
description: variables.description || 'New workflow',
color: variables.color || getNextWorkflowColor(),
workspaceId: variables.workspaceId,
folderId: variables.folderId || null,
sortOrder,
}
}
)
return useMutation({
mutationFn: async (variables: CreateWorkflowVariables): Promise<CreateWorkflowResult> => {
const { workspaceId, name, description, color, folderId } = variables
const { workspaceId, name, description, color, folderId, sortOrder } = variables
logger.info(`Creating new workflow in workspace: ${workspaceId}`)
@@ -206,6 +226,7 @@ export function useCreateWorkflow() {
color: color || getNextWorkflowColor(),
workspaceId,
folderId: folderId || null,
sortOrder,
}),
})
@@ -243,13 +264,13 @@ export function useCreateWorkflow() {
color: createdWorkflow.color,
workspaceId,
folderId: createdWorkflow.folderId,
sortOrder: createdWorkflow.sortOrder ?? 0,
}
},
...handlers,
onSuccess: (data, variables, context) => {
handlers.onSuccess(data, variables, context)
// Initialize subblock values for new workflow
const { subBlockValues } = buildDefaultWorkflowArtifacts()
useSubBlockStore.setState((state) => ({
workflowValues: {
@@ -267,16 +288,26 @@ export function useDuplicateWorkflowMutation() {
const handlers = createWorkflowMutationHandlers<DuplicateWorkflowVariables>(
queryClient,
'DuplicateWorkflow',
(variables, tempId) => ({
id: tempId,
name: variables.name,
lastModified: new Date(),
createdAt: new Date(),
description: variables.description,
color: variables.color,
workspaceId: variables.workspaceId,
folderId: variables.folderId || null,
})
(variables, tempId) => {
const currentWorkflows = useWorkflowRegistry.getState().workflows
const targetFolderId = variables.folderId || null
const workflowsInFolder = Object.values(currentWorkflows).filter(
(w) => w.folderId === targetFolderId
)
const maxSortOrder = workflowsInFolder.reduce((max, w) => Math.max(max, w.sortOrder ?? 0), -1)
return {
id: tempId,
name: variables.name,
lastModified: new Date(),
createdAt: new Date(),
description: variables.description,
color: variables.color,
workspaceId: variables.workspaceId,
folderId: targetFolderId,
sortOrder: maxSortOrder + 1,
}
}
)
return useMutation({
@@ -317,6 +348,7 @@ export function useDuplicateWorkflowMutation() {
color: duplicatedWorkflow.color || color,
workspaceId,
folderId: duplicatedWorkflow.folderId ?? folderId,
sortOrder: duplicatedWorkflow.sortOrder ?? 0,
blocksCount: duplicatedWorkflow.blocksCount || 0,
edgesCount: duplicatedWorkflow.edgesCount || 0,
subflowsCount: duplicatedWorkflow.subflowsCount || 0,
@@ -398,3 +430,61 @@ export function useRevertToVersion() {
},
})
}
interface ReorderWorkflowsVariables {
workspaceId: string
updates: Array<{
id: string
sortOrder: number
folderId?: string | null
}>
}
export function useReorderWorkflows() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async (variables: ReorderWorkflowsVariables): Promise<void> => {
const response = await fetch('/api/workflows/reorder', {
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(variables),
})
if (!response.ok) {
const error = await response.json().catch(() => ({}))
throw new Error(error.error || 'Failed to reorder workflows')
}
},
onMutate: async (variables) => {
await queryClient.cancelQueries({ queryKey: workflowKeys.list(variables.workspaceId) })
const snapshot = { ...useWorkflowRegistry.getState().workflows }
useWorkflowRegistry.setState((state) => {
const updated = { ...state.workflows }
for (const update of variables.updates) {
if (updated[update.id]) {
updated[update.id] = {
...updated[update.id],
sortOrder: update.sortOrder,
folderId:
update.folderId !== undefined ? update.folderId : updated[update.id].folderId,
}
}
}
return { workflows: updated }
})
return { snapshot }
},
onError: (_error, _variables, context) => {
if (context?.snapshot) {
useWorkflowRegistry.setState({ workflows: context.snapshot })
}
},
onSettled: (_data, _error, variables) => {
queryClient.invalidateQueries({ queryKey: workflowKeys.list(variables.workspaceId) })
},
})
}

View File

@@ -592,6 +592,7 @@ export const auth = betterAuth({
sendVerificationOnSignUp: false,
otpLength: 6, // Explicitly set the OTP length
expiresIn: 15 * 60, // 15 minutes in seconds
overrideDefaultEmailVerification: true,
}),
genericOAuth({
config: [

View File

@@ -260,6 +260,9 @@ export const env = createEnv({
// Invitations - for self-hosted deployments
DISABLE_INVITATIONS: z.boolean().optional(), // Disable workspace invitations globally (for self-hosted deployments)
// Development Tools
REACT_GRAB_ENABLED: z.boolean().optional(), // Enable React Grab for UI element debugging in Cursor/AI agents (dev only)
// SSO Configuration (for script-based registration)
SSO_ENABLED: z.boolean().optional(), // Enable SSO functionality
SSO_PROVIDER_TYPE: z.enum(['oidc', 'saml']).optional(), // [REQUIRED] SSO provider type

View File

@@ -111,6 +111,12 @@ export const isE2bEnabled = isTruthy(env.E2B_ENABLED)
*/
export const isInvitationsDisabled = isTruthy(env.DISABLE_INVITATIONS)
/**
* Is React Grab enabled for UI element debugging
* When true and in development mode, enables React Grab for copying UI element context to clipboard
*/
export const isReactGrabEnabled = isDev && isTruthy(env.REACT_GRAB_ENABLED)
/**
* Get cost multiplier based on environment
*/

View File

@@ -25,7 +25,6 @@ function prepareLogData(
log: WorkflowExecutionLog,
subscription: {
includeFinalOutput: boolean
includeTraceSpans: boolean
}
) {
const preparedLog = { ...log, executionData: {} as Record<string, unknown> }
@@ -38,10 +37,6 @@ function prepareLogData(
webhookData.finalOutput = data.finalOutput
}
if (subscription.includeTraceSpans && data.traceSpans) {
webhookData.traceSpans = data.traceSpans
}
preparedLog.executionData = webhookData
}

View File

@@ -16,6 +16,7 @@ export interface WorkflowExportData {
description?: string
color?: string
folderId?: string | null
sortOrder?: number
}
state: WorkflowState
variables?: Record<string, Variable>
@@ -25,6 +26,7 @@ export interface FolderExportData {
id: string
name: string
parentId: string | null
sortOrder?: number
}
export interface WorkspaceExportStructure {
@@ -186,7 +188,12 @@ export async function exportWorkspaceToZip(
name: workspaceName,
exportedAt: new Date().toISOString(),
},
folders: folders.map((f) => ({ id: f.id, name: f.name, parentId: f.parentId })),
folders: folders.map((f) => ({
id: f.id,
name: f.name,
parentId: f.parentId,
sortOrder: f.sortOrder,
})),
}
zip.file('_workspace.json', JSON.stringify(metadata, null, 2))
@@ -199,6 +206,7 @@ export async function exportWorkspaceToZip(
name: workflow.workflow.name,
description: workflow.workflow.description,
color: workflow.workflow.color,
sortOrder: workflow.workflow.sortOrder,
exportedAt: new Date().toISOString(),
},
variables: workflow.variables,
@@ -279,11 +287,27 @@ export interface ImportedWorkflow {
content: string
name: string
folderPath: string[]
sortOrder?: number
}
export interface WorkspaceImportMetadata {
workspaceName: string
exportedAt?: string
folders?: Array<{
id: string
name: string
parentId: string | null
sortOrder?: number
}>
}
function extractSortOrder(content: string): number | undefined {
try {
const parsed = JSON.parse(content)
return parsed.state?.metadata?.sortOrder ?? parsed.metadata?.sortOrder
} catch {
return undefined
}
}
export async function extractWorkflowsFromZip(
@@ -303,6 +327,7 @@ export async function extractWorkflowsFromZip(
metadata = {
workspaceName: parsed.workspace?.name || 'Imported Workspace',
exportedAt: parsed.workspace?.exportedAt,
folders: parsed.folders,
}
} catch (error) {
logger.error('Failed to parse workspace metadata:', error)
@@ -321,6 +346,7 @@ export async function extractWorkflowsFromZip(
content,
name: filename,
folderPath: pathParts,
sortOrder: extractSortOrder(content),
})
} catch (error) {
logger.error(`Failed to extract ${path}:`, error)
@@ -338,10 +364,12 @@ export async function extractWorkflowsFromFiles(files: File[]): Promise<Imported
try {
const content = await file.text()
workflows.push({
content,
name: file.name,
folderPath: [],
sortOrder: extractSortOrder(content),
})
} catch (error) {
logger.error(`Failed to read ${file.name}:`, error)

View File

@@ -53,6 +53,8 @@ export interface ExportWorkflowState {
metadata?: {
name?: string
description?: string
color?: string
sortOrder?: number
exportedAt?: string
}
variables?: Array<{

View File

@@ -253,6 +253,25 @@ const nextConfig: NextConfig = {
async redirects() {
const redirects = []
// Social link redirects (used in emails to avoid spam filter issues)
redirects.push(
{
source: '/discord',
destination: 'https://discord.gg/Hr4UWYEcTT',
permanent: false,
},
{
source: '/x',
destination: 'https://x.com/simdotai',
permanent: false,
},
{
source: '/github',
destination: 'https://github.com/simstudioai/sim',
permanent: false,
}
)
// Redirect /building and /blog to /studio (legacy URL support)
redirects.push(
{

View File

@@ -476,7 +476,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
// Use the server-generated ID
const id = duplicatedWorkflow.id
// Generate new workflow metadata using the server-generated ID
const newWorkflow: WorkflowMetadata = {
id,
name: `${sourceWorkflow.name} (Copy)`,
@@ -484,8 +483,9 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
createdAt: new Date(),
description: sourceWorkflow.description,
color: getNextWorkflowColor(),
workspaceId, // Include the workspaceId in the new workflow
folderId: sourceWorkflow.folderId, // Include the folderId from source workflow
workspaceId,
folderId: sourceWorkflow.folderId,
sortOrder: duplicatedWorkflow.sortOrder ?? 0,
}
// Get the current workflow state to copy from

View File

@@ -26,6 +26,7 @@ export interface WorkflowMetadata {
color: string
workspaceId?: string
folderId?: string | null
sortOrder: number
}
export type HydrationPhase =

View File

@@ -1,6 +1,5 @@
{
"lockfileVersion": 1,
"configVersion": 0,
"workspaces": {
"": {
"name": "simstudio",
@@ -3154,6 +3153,10 @@
"seq-queue": ["seq-queue@0.0.5", "", {}, "sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q=="],
"seroval": ["seroval@1.3.2", "", {}, "sha512-RbcPH1n5cfwKrru7v7+zrZvjLurgHhGyso3HTyGtRivGWgYjbOmGuivCQaORNELjNONoK35nj28EoWul9sb1zQ=="],
"seroval-plugins": ["seroval-plugins@1.3.3", "", { "peerDependencies": { "seroval": "^1.0" } }, "sha512-16OL3NnUBw8JG1jBLUoZJsLnQq0n5Ua6aHalhJK4fMQkz1lqR7Osz1sA30trBtd9VUDc2NgkuRCn8+/pBwqZ+w=="],
"serve-static": ["serve-static@2.2.1", "", { "dependencies": { "encodeurl": "^2.0.0", "escape-html": "^1.0.3", "parseurl": "^1.3.3", "send": "^1.2.0" } }, "sha512-xRXBn0pPqQTVQiC8wyQrKs2MOlX24zQ0POGaj0kultvoOCstBQM5yvOhAVSUwOMjQtTvsPWoNCHfPGwaaQJhTw=="],
"set-cookie-parser": ["set-cookie-parser@2.7.2", "", {}, "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw=="],
@@ -3220,6 +3223,8 @@
"socks-proxy-agent": ["socks-proxy-agent@8.0.5", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "^4.3.4", "socks": "^2.8.3" } }, "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw=="],
"solid-js": ["solid-js@1.9.10", "", { "dependencies": { "csstype": "^3.1.0", "seroval": "~1.3.0", "seroval-plugins": "~1.3.0" } }, "sha512-Coz956cos/EPDlhs6+jsdTxKuJDPT7B5SVIWgABwROyxjY7Xbr8wkzD68Et+NxnV7DLJ3nJdAC2r9InuV/4Jew=="],
"sonic-boom": ["sonic-boom@4.2.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww=="],
"source-map": ["source-map@0.7.6", "", {}, "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ=="],

View File

@@ -0,0 +1,2 @@
ALTER TABLE "workflow" ADD COLUMN "sort_order" integer DEFAULT 0 NOT NULL;--> statement-breakpoint
CREATE INDEX "workflow_folder_sort_idx" ON "workflow" USING btree ("folder_id","sort_order");

File diff suppressed because it is too large Load Diff

View File

@@ -981,6 +981,13 @@
"when": 1768366574848,
"tag": "0140_fuzzy_the_twelve",
"breakpoints": true
},
{
"idx": 141,
"version": "7",
"when": 1768421319400,
"tag": "0141_daffy_marten_broadcloak",
"breakpoints": true
}
]
}

View File

@@ -149,6 +149,7 @@ export const workflow = pgTable(
.references(() => user.id, { onDelete: 'cascade' }),
workspaceId: text('workspace_id').references(() => workspace.id, { onDelete: 'cascade' }),
folderId: text('folder_id').references(() => workflowFolder.id, { onDelete: 'set null' }),
sortOrder: integer('sort_order').notNull().default(0),
name: text('name').notNull(),
description: text('description'),
color: text('color').notNull().default('#3972F6'),
@@ -165,6 +166,7 @@ export const workflow = pgTable(
userIdIdx: index('workflow_user_id_idx').on(table.userId),
workspaceIdIdx: index('workflow_workspace_id_idx').on(table.workspaceId),
userWorkspaceIdx: index('workflow_user_workspace_idx').on(table.userId, table.workspaceId),
folderSortIdx: index('workflow_folder_sort_idx').on(table.folderId, table.sortOrder),
})
)