Compare commits

...

11 Commits

Author SHA1 Message Date
Vikhyath Mondreti
d4c171c6d7 fix(sortOrder): initial ordering must be deterministic (#2833)
* fix(sortOrder): initial ordering must be deterministic

* fix initial ordering issue

* add created at to child item
2026-01-14 22:21:49 -08:00
Waleed
26d0799d22 fix(popover): fix frozen workspace popover (#2832) 2026-01-14 22:08:10 -08:00
Waleed
45bd1e8cd7 feat(starter): in start block input format, don't prevent deletion if only one field remaining, just clear form (#2830) 2026-01-14 21:24:02 -08:00
Waleed
85d6e3e3bd fix(misc): added trace spans back to notifications for webhooks, updated verification code for users signing in with email, updated welcome email (#2828)
* added back trace spans to notifications

* fixed double verification code

* fix dashboard

* updated welcome email

* added link to cal for team

* update dashboard stats route

* added react grab URL to CSP if FF is enabled, removed dead db hook

* fix failing test

* ensure MCP add server tool is centered

* updated A2A copy button and MCP location, and default description matching

* updated button on chat page

* added vite version override

* fix
2026-01-14 21:17:20 -08:00
Siddharth Ganesan
ccf268595e improvement(copilot): update copilot to match copilot repo (#2829)
* Ux

* Fix lint

* Clean up model options

* Codex
2026-01-14 20:52:49 -08:00
Vikhyath Mondreti
5eca660c5c improvement(langsmith): add wand for batch ingestion schemas (#2827) 2026-01-14 19:50:35 -08:00
Waleed
3db9ad2d95 improvement(emails): update email footer links to link to sim.ai/provider instead of direct provider links (#2826) 2026-01-14 18:53:11 -08:00
Waleed
4195cfe1ff fix(otp): send welcome email even when user signs up via email/pass along with oauth providers (#2825) 2026-01-14 18:46:18 -08:00
Waleed
212933746e improvement(langsmith): ugpraded langsmith to use tool names directly in dropdown (#2824) 2026-01-14 18:46:06 -08:00
Waleed
5af72ea22f feat(dashboard): added stats endpoint to compute stats on server side and avoid limit (#2823)
* feat(dashboard): added stats endpoint to compute stats on server side and avoid limit

* updated query
2026-01-14 18:41:46 -08:00
Waleed
4899c28421 fix(notifications): consolidate notification utils, update email styling (#2822)
* fix(notifications): consolidate notification utils, update email styling

* fixed duplicate types
2026-01-14 18:35:51 -08:00
50 changed files with 1286 additions and 874 deletions

View File

@@ -351,14 +351,16 @@ Enables AI-assisted field generation.
## Tools Configuration
### Simple Tool Selector
**Preferred:** Use tool names directly as dropdown option IDs to avoid switch cases:
```typescript
tools: {
access: ['service_create', 'service_read', 'service_update'],
config: {
tool: (params) => `service_${params.operation}`,
},
}
// Dropdown options use tool IDs directly
options: [
{ label: 'Create', id: 'service_create' },
{ label: 'Read', id: 'service_read' },
]
// Tool selector just returns the operation value
tool: (params) => params.operation,
```
### With Parameter Transformation

View File

@@ -359,15 +359,6 @@ function SignupFormContent({
}
}
try {
await client.emailOtp.sendVerificationOtp({
email: emailValue,
type: 'sign-in',
})
} catch (otpErr) {
logger.warn('Failed to send sign-in OTP after signup; user can press Resend', otpErr)
}
router.push('/verify?fromSignup=true')
} catch (error) {
logger.error('Signup error:', error)

View File

@@ -93,7 +93,7 @@ export function useVerification({
try {
const normalizedEmail = email.trim().toLowerCase()
const response = await client.signIn.emailOtp({
const response = await client.emailOtp.verifyEmail({
email: normalizedEmail,
otp,
})
@@ -169,7 +169,7 @@ export function useVerification({
client.emailOtp
.sendVerificationOtp({
email: normalizedEmail,
type: 'sign-in',
type: 'email-verification',
})
.then(() => {})
.catch(() => {

View File

@@ -52,6 +52,9 @@ const ChatMessageSchema = z.object({
'gpt-5.1-high',
'gpt-5-codex',
'gpt-5.1-codex',
'gpt-5.2',
'gpt-5.2-codex',
'gpt-5.2-pro',
'gpt-4o',
'gpt-4.1',
'o3',

View File

@@ -15,11 +15,14 @@ const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
'gpt-5-medium': false,
'gpt-5-high': false,
'gpt-5.1-fast': false,
'gpt-5.1': true,
'gpt-5.1-medium': true,
'gpt-5.1': false,
'gpt-5.1-medium': false,
'gpt-5.1-high': false,
'gpt-5-codex': false,
'gpt-5.1-codex': true,
'gpt-5.1-codex': false,
'gpt-5.2': false,
'gpt-5.2-codex': true,
'gpt-5.2-pro': true,
o3: true,
'claude-4-sonnet': false,
'claude-4.5-haiku': true,

View File

@@ -15,6 +15,7 @@ import {
renderPlanWelcomeEmail,
renderUsageThresholdEmail,
renderWelcomeEmail,
renderWorkflowNotificationEmail,
renderWorkspaceInvitationEmail,
} from '@/components/emails'
@@ -108,6 +109,51 @@ const emailTemplates = {
message:
'I have 10 years of experience building scalable distributed systems. Most recently, I led a team at a Series B startup where we scaled from 100K to 10M users.',
}),
// Notification emails
'workflow-notification-success': () =>
renderWorkflowNotificationEmail({
workflowName: 'Customer Onboarding Flow',
status: 'success',
trigger: 'api',
duration: '2.3s',
cost: '$0.0042',
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
}),
'workflow-notification-error': () =>
renderWorkflowNotificationEmail({
workflowName: 'Customer Onboarding Flow',
status: 'error',
trigger: 'webhook',
duration: '1.1s',
cost: '$0.0021',
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
}),
'workflow-notification-alert': () =>
renderWorkflowNotificationEmail({
workflowName: 'Customer Onboarding Flow',
status: 'error',
trigger: 'schedule',
duration: '45.2s',
cost: '$0.0156',
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
alertReason: '3 consecutive failures detected',
}),
'workflow-notification-full': () =>
renderWorkflowNotificationEmail({
workflowName: 'Data Processing Pipeline',
status: 'success',
trigger: 'api',
duration: '12.5s',
cost: '$0.0234',
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
finalOutput: { processed: 150, skipped: 3, status: 'completed' },
rateLimits: {
sync: { requestsPerMinute: 60, remaining: 45 },
async: { requestsPerMinute: 120, remaining: 98 },
},
usageData: { currentPeriodCost: 12.45, limit: 50, percentUsed: 24.9 },
}),
} as const
type EmailTemplate = keyof typeof emailTemplates
@@ -131,6 +177,12 @@ export async function GET(request: NextRequest) {
'payment-failed',
],
Careers: ['careers-confirmation', 'careers-submission'],
Notifications: [
'workflow-notification-success',
'workflow-notification-error',
'workflow-notification-alert',
'workflow-notification-full',
],
}
const categoryHtml = Object.entries(categories)

View File

@@ -0,0 +1,293 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters'
const logger = createLogger('LogsStatsAPI')
export const revalidate = 0
const StatsQueryParamsSchema = LogFilterParamsSchema.extend({
segmentCount: z.coerce.number().optional().default(72),
})
export interface SegmentStats {
timestamp: string
totalExecutions: number
successfulExecutions: number
avgDurationMs: number
}
export interface WorkflowStats {
workflowId: string
workflowName: string
segments: SegmentStats[]
overallSuccessRate: number
totalExecutions: number
totalSuccessful: number
}
export interface DashboardStatsResponse {
workflows: WorkflowStats[]
aggregateSegments: SegmentStats[]
totalRuns: number
totalErrors: number
avgLatency: number
timeBounds: {
start: string
end: string
}
segmentMs: number
}
export async function GET(request: NextRequest) {
const requestId = generateRequestId()
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized logs stats access attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
try {
const { searchParams } = new URL(request.url)
const params = StatsQueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
const workspaceFilter = eq(workflowExecutionLogs.workspaceId, params.workspaceId)
const commonFilters = buildFilterConditions(params, { useSimpleLevelFilter: true })
const whereCondition = commonFilters ? and(workspaceFilter, commonFilters) : workspaceFilter
const boundsQuery = await db
.select({
minTime: sql<string>`MIN(${workflowExecutionLogs.startedAt})`,
maxTime: sql<string>`MAX(${workflowExecutionLogs.startedAt})`,
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflowExecutionLogs.workspaceId),
eq(permissions.userId, userId)
)
)
.where(whereCondition)
const bounds = boundsQuery[0]
const now = new Date()
let startTime: Date
let endTime: Date
if (!bounds?.minTime || !bounds?.maxTime) {
endTime = now
startTime = new Date(now.getTime() - 24 * 60 * 60 * 1000)
} else {
startTime = new Date(bounds.minTime)
endTime = new Date(Math.max(new Date(bounds.maxTime).getTime(), now.getTime()))
}
const totalMs = Math.max(1, endTime.getTime() - startTime.getTime())
const segmentMs = Math.max(60000, Math.floor(totalMs / params.segmentCount))
const startTimeIso = startTime.toISOString()
const statsQuery = await db
.select({
workflowId: workflowExecutionLogs.workflowId,
workflowName: workflow.name,
segmentIndex:
sql<number>`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTimeIso}::timestamp)) * 1000 / ${segmentMs})`.as(
'segment_index'
),
totalExecutions: sql<number>`COUNT(*)`.as('total_executions'),
successfulExecutions:
sql<number>`COUNT(*) FILTER (WHERE ${workflowExecutionLogs.level} != 'error')`.as(
'successful_executions'
),
avgDurationMs:
sql<number>`COALESCE(AVG(${workflowExecutionLogs.totalDurationMs}) FILTER (WHERE ${workflowExecutionLogs.totalDurationMs} > 0), 0)`.as(
'avg_duration_ms'
),
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflowExecutionLogs.workspaceId),
eq(permissions.userId, userId)
)
)
.where(whereCondition)
.groupBy(workflowExecutionLogs.workflowId, workflow.name, sql`segment_index`)
const workflowMap = new Map<
string,
{
workflowId: string
workflowName: string
segments: Map<number, SegmentStats>
totalExecutions: number
totalSuccessful: number
}
>()
for (const row of statsQuery) {
const segmentIndex = Math.min(
params.segmentCount - 1,
Math.max(0, Math.floor(Number(row.segmentIndex)))
)
if (!workflowMap.has(row.workflowId)) {
workflowMap.set(row.workflowId, {
workflowId: row.workflowId,
workflowName: row.workflowName,
segments: new Map(),
totalExecutions: 0,
totalSuccessful: 0,
})
}
const wf = workflowMap.get(row.workflowId)!
wf.totalExecutions += Number(row.totalExecutions)
wf.totalSuccessful += Number(row.successfulExecutions)
const existing = wf.segments.get(segmentIndex)
if (existing) {
const oldTotal = existing.totalExecutions
const newTotal = oldTotal + Number(row.totalExecutions)
existing.totalExecutions = newTotal
existing.successfulExecutions += Number(row.successfulExecutions)
existing.avgDurationMs =
newTotal > 0
? (existing.avgDurationMs * oldTotal +
Number(row.avgDurationMs || 0) * Number(row.totalExecutions)) /
newTotal
: 0
} else {
wf.segments.set(segmentIndex, {
timestamp: new Date(startTime.getTime() + segmentIndex * segmentMs).toISOString(),
totalExecutions: Number(row.totalExecutions),
successfulExecutions: Number(row.successfulExecutions),
avgDurationMs: Number(row.avgDurationMs || 0),
})
}
}
const workflows: WorkflowStats[] = []
for (const wf of workflowMap.values()) {
const segments: SegmentStats[] = []
for (let i = 0; i < params.segmentCount; i++) {
const existing = wf.segments.get(i)
if (existing) {
segments.push(existing)
} else {
segments.push({
timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(),
totalExecutions: 0,
successfulExecutions: 0,
avgDurationMs: 0,
})
}
}
workflows.push({
workflowId: wf.workflowId,
workflowName: wf.workflowName,
segments,
totalExecutions: wf.totalExecutions,
totalSuccessful: wf.totalSuccessful,
overallSuccessRate:
wf.totalExecutions > 0 ? (wf.totalSuccessful / wf.totalExecutions) * 100 : 100,
})
}
workflows.sort((a, b) => {
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
if (errA !== errB) return errB - errA
return a.workflowName.localeCompare(b.workflowName)
})
const aggregateSegments: SegmentStats[] = []
let totalRuns = 0
let totalErrors = 0
let weightedLatencySum = 0
let latencyCount = 0
for (let i = 0; i < params.segmentCount; i++) {
let segTotal = 0
let segSuccess = 0
let segWeightedLatency = 0
let segLatencyCount = 0
for (const wf of workflows) {
const seg = wf.segments[i]
segTotal += seg.totalExecutions
segSuccess += seg.successfulExecutions
if (seg.avgDurationMs > 0 && seg.totalExecutions > 0) {
segWeightedLatency += seg.avgDurationMs * seg.totalExecutions
segLatencyCount += seg.totalExecutions
}
}
totalRuns += segTotal
totalErrors += segTotal - segSuccess
weightedLatencySum += segWeightedLatency
latencyCount += segLatencyCount
aggregateSegments.push({
timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(),
totalExecutions: segTotal,
successfulExecutions: segSuccess,
avgDurationMs: segLatencyCount > 0 ? segWeightedLatency / segLatencyCount : 0,
})
}
const avgLatency = latencyCount > 0 ? weightedLatencySum / latencyCount : 0
const response: DashboardStatsResponse = {
workflows,
aggregateSegments,
totalRuns,
totalErrors,
avgLatency,
timeBounds: {
start: startTime.toISOString(),
end: endTime.toISOString(),
},
segmentMs,
}
return NextResponse.json(response, { status: 200 })
} catch (validationError) {
if (validationError instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid logs stats request parameters`, {
errors: validationError.errors,
})
return NextResponse.json(
{
error: 'Invalid request parameters',
details: validationError.errors,
},
{ status: 400 }
)
}
throw validationError
}
} catch (error: any) {
logger.error(`[${requestId}] logs stats fetch error`, error)
return NextResponse.json({ error: error.message }, { status: 500 })
}
}

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull, max } from 'drizzle-orm'
import { and, asc, eq, isNull, min } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
@@ -64,10 +64,20 @@ export async function GET(request: Request) {
let workflows
const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)]
if (workspaceId) {
workflows = await db.select().from(workflow).where(eq(workflow.workspaceId, workspaceId))
workflows = await db
.select()
.from(workflow)
.where(eq(workflow.workspaceId, workspaceId))
.orderBy(...orderByClause)
} else {
workflows = await db.select().from(workflow).where(eq(workflow.userId, userId))
workflows = await db
.select()
.from(workflow)
.where(eq(workflow.userId, userId))
.orderBy(...orderByClause)
}
return NextResponse.json({ data: workflows }, { status: 200 })
@@ -140,15 +150,15 @@ export async function POST(req: NextRequest) {
sortOrder = providedSortOrder
} else {
const folderCondition = folderId ? eq(workflow.folderId, folderId) : isNull(workflow.folderId)
const [maxResult] = await db
.select({ maxOrder: max(workflow.sortOrder) })
const [minResult] = await db
.select({ minOrder: min(workflow.sortOrder) })
.from(workflow)
.where(
workspaceId
? and(eq(workflow.workspaceId, workspaceId), folderCondition)
: and(eq(workflow.userId, session.user.id), folderCondition)
)
sortOrder = (maxResult?.maxOrder ?? -1) + 1
sortOrder = (minResult?.minOrder ?? 1) - 1
}
await db.insert(workflow).values({

View File

@@ -5,8 +5,14 @@ import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import {
type EmailRateLimitsData,
type EmailUsageData,
renderWorkflowNotificationEmail,
} from '@/components/emails'
import { getSession } from '@/lib/auth'
import { decryptSecret } from '@/lib/core/security/encryption'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -67,29 +73,39 @@ function buildTestPayload(subscription: typeof workspaceNotificationSubscription
data.finalOutput = { message: 'This is a test notification', test: true }
}
if (subscription.includeTraceSpans) {
data.traceSpans = [
{
id: 'span_test_1',
name: 'Test Block',
type: 'block',
status: 'success',
startTime: new Date(timestamp - 5000).toISOString(),
endTime: new Date(timestamp).toISOString(),
duration: 5000,
},
]
}
if (subscription.includeRateLimits) {
data.rateLimits = {
sync: { limit: 150, remaining: 45, resetAt: new Date(timestamp + 60000).toISOString() },
async: { limit: 1000, remaining: 50, resetAt: new Date(timestamp + 60000).toISOString() },
sync: {
requestsPerMinute: 150,
remaining: 45,
resetAt: new Date(timestamp + 60000).toISOString(),
},
async: {
requestsPerMinute: 1000,
remaining: 50,
resetAt: new Date(timestamp + 60000).toISOString(),
},
}
}
if (subscription.includeUsageData) {
data.usage = { currentPeriodCost: 2.45, limit: 20, plan: 'pro', isExceeded: false }
data.usage = { currentPeriodCost: 2.45, limit: 20, percentUsed: 12.25, isExceeded: false }
}
if (subscription.includeTraceSpans && subscription.notificationType === 'webhook') {
data.traceSpans = [
{
name: 'test-block',
startTime: timestamp,
endTime: timestamp + 150,
duration: 150,
status: 'success',
blockId: 'block_test_1',
blockType: 'agent',
blockName: 'Test Agent',
children: [],
},
]
}
return { payload, timestamp }
@@ -157,23 +173,26 @@ async function testEmail(subscription: typeof workspaceNotificationSubscription.
const { payload } = buildTestPayload(subscription)
const data = (payload as Record<string, unknown>).data as Record<string, unknown>
const baseUrl = getBaseUrl()
const logUrl = `${baseUrl}/workspace/${subscription.workspaceId}/logs`
const html = await renderWorkflowNotificationEmail({
workflowName: data.workflowName as string,
status: data.status as 'success' | 'error',
trigger: data.trigger as string,
duration: `${data.totalDurationMs}ms`,
cost: `$${(((data.cost as Record<string, unknown>)?.total as number) || 0).toFixed(4)}`,
logUrl,
finalOutput: data.finalOutput,
rateLimits: data.rateLimits as EmailRateLimitsData | undefined,
usageData: data.usage as EmailUsageData | undefined,
})
const result = await sendEmail({
to: subscription.emailRecipients,
subject: `[Test] Workflow Execution: ${data.workflowName}`,
text: `This is a test notification from Sim Studio.\n\nWorkflow: ${data.workflowName}\nStatus: ${data.status}\nDuration: ${data.totalDurationMs}ms\n\nThis notification is configured for workspace notifications.`,
html: `
<div style="font-family: sans-serif; max-width: 600px; margin: 0 auto;">
<h2 style="color: #7F2FFF;">Test Notification</h2>
<p>This is a test notification from Sim Studio.</p>
<table style="width: 100%; border-collapse: collapse; margin: 20px 0;">
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Workflow</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.workflowName}</td></tr>
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Status</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.status}</td></tr>
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Duration</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.totalDurationMs}ms</td></tr>
</table>
<p style="color: #666; font-size: 12px;">This notification is configured for workspace notifications.</p>
</div>
`,
html,
text: `This is a test notification from Sim.\n\nWorkflow: ${data.workflowName}\nStatus: ${data.status}\nDuration: ${data.totalDurationMs}ms\n\nView Log: ${logUrl}\n\nThis notification is configured for workspace notifications.`,
emailType: 'notifications',
})
@@ -227,7 +246,7 @@ async function testSlack(
elements: [
{
type: 'mrkdwn',
text: 'This is a test notification from Sim Studio workspace notifications.',
text: 'This is a test notification from Sim workspace notifications.',
},
],
},

View File

@@ -222,7 +222,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
}
// Encrypt webhook secret if provided
let webhookConfig = data.webhookConfig || null
if (webhookConfig?.secret) {
const { encrypted } = await encryptSecret(webhookConfig.secret)

View File

@@ -3,9 +3,9 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { Loader2 } from 'lucide-react'
import { Skeleton } from '@/components/ui/skeleton'
import { formatLatency, parseDuration } from '@/app/workspace/[workspaceId]/logs/utils'
import { formatLatency } from '@/app/workspace/[workspaceId]/logs/utils'
import type { DashboardStatsResponse, WorkflowStats } from '@/hooks/queries/logs'
import { useFilterStore } from '@/stores/logs/filters/store'
import type { WorkflowLog } from '@/stores/logs/filters/types'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { LineChart, WorkflowsList } from './components'
@@ -26,10 +26,6 @@ interface WorkflowExecution {
overallSuccessRate: number
}
const DEFAULT_SEGMENTS = 72
const MIN_SEGMENT_PX = 10
const MIN_SEGMENT_MS = 60000
const SKELETON_BAR_HEIGHTS = [
45, 72, 38, 85, 52, 68, 30, 90, 55, 42, 78, 35, 88, 48, 65, 28, 82, 58, 40, 75, 32, 95, 50, 70,
]
@@ -120,13 +116,32 @@ function DashboardSkeleton() {
}
interface DashboardProps {
logs: WorkflowLog[]
stats?: DashboardStatsResponse
isLoading: boolean
error?: Error | null
}
export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
const [segmentCount, setSegmentCount] = useState<number>(DEFAULT_SEGMENTS)
/**
* Converts server WorkflowStats to the internal WorkflowExecution format.
*/
function toWorkflowExecution(wf: WorkflowStats): WorkflowExecution {
return {
workflowId: wf.workflowId,
workflowName: wf.workflowName,
overallSuccessRate: wf.overallSuccessRate,
segments: wf.segments.map((seg) => ({
timestamp: seg.timestamp,
totalExecutions: seg.totalExecutions,
successfulExecutions: seg.successfulExecutions,
hasExecutions: seg.totalExecutions > 0,
successRate:
seg.totalExecutions > 0 ? (seg.successfulExecutions / seg.totalExecutions) * 100 : 100,
avgDurationMs: seg.avgDurationMs,
})),
}
}
export default function Dashboard({ stats, isLoading, error }: DashboardProps) {
const [selectedSegments, setSelectedSegments] = useState<Record<string, number[]>>({})
const [lastAnchorIndices, setLastAnchorIndices] = useState<Record<string, number>>({})
const barsAreaRef = useRef<HTMLDivElement | null>(null)
@@ -137,182 +152,32 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
const expandedWorkflowId = workflowIds.length === 1 ? workflowIds[0] : null
const lastExecutionByWorkflow = useMemo(() => {
const map = new Map<string, number>()
for (const log of logs) {
const wfId = log.workflowId
if (!wfId) continue
const ts = new Date(log.createdAt).getTime()
const existing = map.get(wfId)
if (!existing || ts > existing) {
map.set(wfId, ts)
}
}
return map
}, [logs])
const timeBounds = useMemo(() => {
if (logs.length === 0) {
const now = new Date()
return { start: now, end: now }
}
let minTime = Number.POSITIVE_INFINITY
let maxTime = Number.NEGATIVE_INFINITY
for (const log of logs) {
const ts = new Date(log.createdAt).getTime()
if (ts < minTime) minTime = ts
if (ts > maxTime) maxTime = ts
}
const end = new Date(Math.max(maxTime, Date.now()))
const start = new Date(minTime)
return { start, end }
}, [logs])
const { executions, aggregateSegments, segmentMs } = useMemo(() => {
const allWorkflowsList = Object.values(allWorkflows)
if (allWorkflowsList.length === 0) {
if (!stats) {
return { executions: [], aggregateSegments: [], segmentMs: 0 }
}
const { start, end } =
logs.length > 0
? timeBounds
: { start: new Date(Date.now() - 24 * 60 * 60 * 1000), end: new Date() }
const totalMs = Math.max(1, end.getTime() - start.getTime())
const calculatedSegmentMs = Math.max(
MIN_SEGMENT_MS,
Math.floor(totalMs / Math.max(1, segmentCount))
)
const logsByWorkflow = new Map<string, WorkflowLog[]>()
for (const log of logs) {
const wfId = log.workflowId
if (!logsByWorkflow.has(wfId)) {
logsByWorkflow.set(wfId, [])
}
logsByWorkflow.get(wfId)!.push(log)
}
const workflowExecutions: WorkflowExecution[] = []
for (const workflow of allWorkflowsList) {
const workflowLogs = logsByWorkflow.get(workflow.id) || []
const segments: WorkflowExecution['segments'] = Array.from(
{ length: segmentCount },
(_, i) => ({
timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(),
hasExecutions: false,
totalExecutions: 0,
successfulExecutions: 0,
successRate: 100,
avgDurationMs: 0,
})
)
const durations: number[][] = Array.from({ length: segmentCount }, () => [])
for (const log of workflowLogs) {
const logTime = new Date(log.createdAt).getTime()
const idx = Math.min(
segmentCount - 1,
Math.max(0, Math.floor((logTime - start.getTime()) / calculatedSegmentMs))
)
segments[idx].totalExecutions += 1
segments[idx].hasExecutions = true
if (log.level !== 'error') {
segments[idx].successfulExecutions += 1
}
const duration = parseDuration({ duration: log.duration ?? undefined })
if (duration !== null && duration > 0) {
durations[idx].push(duration)
}
}
let totalExecs = 0
let totalSuccess = 0
for (let i = 0; i < segmentCount; i++) {
const seg = segments[i]
totalExecs += seg.totalExecutions
totalSuccess += seg.successfulExecutions
if (seg.totalExecutions > 0) {
seg.successRate = (seg.successfulExecutions / seg.totalExecutions) * 100
}
if (durations[i].length > 0) {
seg.avgDurationMs = Math.round(
durations[i].reduce((sum, d) => sum + d, 0) / durations[i].length
)
}
}
const overallSuccessRate = totalExecs > 0 ? (totalSuccess / totalExecs) * 100 : 100
workflowExecutions.push({
workflowId: workflow.id,
workflowName: workflow.name,
segments,
overallSuccessRate,
})
}
workflowExecutions.sort((a, b) => {
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
if (errA !== errB) return errB - errA
return a.workflowName.localeCompare(b.workflowName)
})
const aggSegments: {
timestamp: string
totalExecutions: number
successfulExecutions: number
avgDurationMs: number
}[] = Array.from({ length: segmentCount }, (_, i) => ({
timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(),
totalExecutions: 0,
successfulExecutions: 0,
avgDurationMs: 0,
}))
const weightedDurationSums: number[] = Array(segmentCount).fill(0)
const executionCounts: number[] = Array(segmentCount).fill(0)
for (const wf of workflowExecutions) {
wf.segments.forEach((s, i) => {
aggSegments[i].totalExecutions += s.totalExecutions
aggSegments[i].successfulExecutions += s.successfulExecutions
if (s.avgDurationMs && s.avgDurationMs > 0 && s.totalExecutions > 0) {
weightedDurationSums[i] += s.avgDurationMs * s.totalExecutions
executionCounts[i] += s.totalExecutions
}
})
}
aggSegments.forEach((seg, i) => {
if (executionCounts[i] > 0) {
seg.avgDurationMs = weightedDurationSums[i] / executionCounts[i]
}
})
const workflowExecutions = stats.workflows.map(toWorkflowExecution)
return {
executions: workflowExecutions,
aggregateSegments: aggSegments,
segmentMs: calculatedSegmentMs,
aggregateSegments: stats.aggregateSegments,
segmentMs: stats.segmentMs,
}
}, [logs, timeBounds, segmentCount, allWorkflows])
}, [stats])
const lastExecutionByWorkflow = useMemo(() => {
const map = new Map<string, number>()
for (const wf of executions) {
for (let i = wf.segments.length - 1; i >= 0; i--) {
if (wf.segments[i].totalExecutions > 0) {
map.set(wf.workflowId, new Date(wf.segments[i].timestamp).getTime())
break
}
}
}
return map
}, [executions])
const filteredExecutions = useMemo(() => {
let filtered = executions
@@ -511,37 +376,12 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
useEffect(() => {
setSelectedSegments({})
setLastAnchorIndices({})
}, [logs, timeRange, workflowIds, searchQuery])
useEffect(() => {
if (!barsAreaRef.current) return
const el = barsAreaRef.current
let debounceId: ReturnType<typeof setTimeout> | null = null
const ro = new ResizeObserver(([entry]) => {
const w = entry?.contentRect?.width || 720
const n = Math.max(36, Math.min(96, Math.floor(w / MIN_SEGMENT_PX)))
if (debounceId) clearTimeout(debounceId)
debounceId = setTimeout(() => {
setSegmentCount(n)
}, 150)
})
ro.observe(el)
const rect = el.getBoundingClientRect()
if (rect?.width) {
const n = Math.max(36, Math.min(96, Math.floor(rect.width / MIN_SEGMENT_PX)))
setSegmentCount(n)
}
return () => {
if (debounceId) clearTimeout(debounceId)
ro.disconnect()
}
}, [])
}, [stats, timeRange, workflowIds, searchQuery])
if (isLoading) {
return <DashboardSkeleton />
}
// Show error state
if (error) {
return (
<div className='mt-[24px] flex flex-1 items-center justify-center'>

View File

@@ -422,7 +422,8 @@ export function NotificationSettings({
levelFilter: formData.levelFilter,
triggerFilter: formData.triggerFilter,
includeFinalOutput: formData.includeFinalOutput,
includeTraceSpans: formData.includeTraceSpans,
// Trace spans only available for webhooks (too large for email/Slack)
includeTraceSpans: activeTab === 'webhook' ? formData.includeTraceSpans : false,
includeRateLimits: formData.includeRateLimits,
includeUsageData: formData.includeUsageData,
alertConfig,
@@ -830,7 +831,10 @@ export function NotificationSettings({
<Combobox
options={[
{ label: 'Final Output', value: 'includeFinalOutput' },
{ label: 'Trace Spans', value: 'includeTraceSpans' },
// Trace spans only available for webhooks (too large for email/Slack)
...(activeTab === 'webhook'
? [{ label: 'Trace Spans', value: 'includeTraceSpans' }]
: []),
{ label: 'Rate Limits', value: 'includeRateLimits' },
{ label: 'Usage Data', value: 'includeUsageData' },
]}
@@ -838,7 +842,7 @@ export function NotificationSettings({
multiSelectValues={
[
formData.includeFinalOutput && 'includeFinalOutput',
formData.includeTraceSpans && 'includeTraceSpans',
formData.includeTraceSpans && activeTab === 'webhook' && 'includeTraceSpans',
formData.includeRateLimits && 'includeRateLimits',
formData.includeUsageData && 'includeUsageData',
].filter(Boolean) as string[]
@@ -862,7 +866,7 @@ export function NotificationSettings({
}
const selected = [
formData.includeFinalOutput && 'includeFinalOutput',
formData.includeTraceSpans && 'includeTraceSpans',
formData.includeTraceSpans && activeTab === 'webhook' && 'includeTraceSpans',
formData.includeRateLimits && 'includeRateLimits',
formData.includeUsageData && 'includeUsageData',
].filter(Boolean) as string[]

View File

@@ -11,7 +11,7 @@ import {
} from '@/lib/logs/filters'
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
import { useFolders } from '@/hooks/queries/folders'
import { useDashboardLogs, useLogDetail, useLogsList } from '@/hooks/queries/logs'
import { useDashboardStats, useLogDetail, useLogsList } from '@/hooks/queries/logs'
import { useDebounce } from '@/hooks/use-debounce'
import { useFilterStore } from '@/stores/logs/filters/store'
import type { WorkflowLog } from '@/stores/logs/filters/types'
@@ -130,7 +130,7 @@ export default function Logs() {
[timeRange, startDate, endDate, level, workflowIds, folderIds, triggers, debouncedSearchQuery]
)
const dashboardLogsQuery = useDashboardLogs(workspaceId, dashboardFilters, {
const dashboardStatsQuery = useDashboardStats(workspaceId, dashboardFilters, {
enabled: Boolean(workspaceId) && isInitialized.current,
refetchInterval: isLive ? 5000 : false,
})
@@ -417,9 +417,9 @@ export default function Logs() {
className={cn('flex min-h-0 flex-1 flex-col pr-[24px]', !isDashboardView && 'hidden')}
>
<Dashboard
logs={dashboardLogsQuery.data ?? []}
isLoading={!dashboardLogsQuery.data}
error={dashboardLogsQuery.error}
stats={dashboardStatsQuery.data}
isLoading={dashboardStatsQuery.isLoading}
error={dashboardStatsQuery.error}
/>
</div>

View File

@@ -2,29 +2,9 @@ import { memo, useEffect, useRef, useState } from 'react'
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
/**
* Minimum delay between characters (fast catch-up mode)
* Character animation delay in milliseconds
*/
const MIN_DELAY = 1
/**
* Maximum delay between characters (when waiting for content)
*/
const MAX_DELAY = 12
/**
* Default delay when streaming normally
*/
const DEFAULT_DELAY = 4
/**
* How far behind (in characters) before we speed up
*/
const CATCH_UP_THRESHOLD = 20
/**
* How close to content before we slow down
*/
const SLOW_DOWN_THRESHOLD = 5
const CHARACTER_DELAY = 3
/**
* StreamingIndicator shows animated dots during message streaming
@@ -54,50 +34,21 @@ interface SmoothStreamingTextProps {
isStreaming: boolean
}
/**
* Calculates adaptive delay based on how far behind animation is from actual content
*
* @param displayedLength - Current displayed content length
* @param totalLength - Total available content length
* @returns Delay in milliseconds
*/
function calculateAdaptiveDelay(displayedLength: number, totalLength: number): number {
const charsRemaining = totalLength - displayedLength
if (charsRemaining > CATCH_UP_THRESHOLD) {
// Far behind - speed up to catch up
// Scale from MIN_DELAY to DEFAULT_DELAY based on how far behind
const catchUpFactor = Math.min(1, (charsRemaining - CATCH_UP_THRESHOLD) / 50)
return MIN_DELAY + (DEFAULT_DELAY - MIN_DELAY) * (1 - catchUpFactor)
}
if (charsRemaining <= SLOW_DOWN_THRESHOLD) {
// Close to content edge - slow down to feel natural
// The closer we are, the slower we go (up to MAX_DELAY)
const slowFactor = 1 - charsRemaining / SLOW_DOWN_THRESHOLD
return DEFAULT_DELAY + (MAX_DELAY - DEFAULT_DELAY) * slowFactor
}
// Normal streaming speed
return DEFAULT_DELAY
}
/**
* SmoothStreamingText component displays text with character-by-character animation
* Creates a smooth streaming effect for AI responses with adaptive speed
*
* Uses adaptive pacing: speeds up when catching up, slows down near content edge
* Creates a smooth streaming effect for AI responses
*
* @param props - Component props
* @returns Streaming text with smooth animation
*/
export const SmoothStreamingText = memo(
({ content, isStreaming }: SmoothStreamingTextProps) => {
const [displayedContent, setDisplayedContent] = useState('')
// Initialize with full content when not streaming to avoid flash on page load
const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content))
const contentRef = useRef(content)
const rafRef = useRef<number | null>(null)
const indexRef = useRef(0)
const lastFrameTimeRef = useRef<number>(0)
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
// Initialize index based on streaming state
const indexRef = useRef(isStreaming ? 0 : content.length)
const isAnimatingRef = useRef(false)
useEffect(() => {
@@ -110,42 +61,33 @@ export const SmoothStreamingText = memo(
}
if (isStreaming) {
if (indexRef.current < content.length && !isAnimatingRef.current) {
isAnimatingRef.current = true
lastFrameTimeRef.current = performance.now()
const animateText = (timestamp: number) => {
if (indexRef.current < content.length) {
const animateText = () => {
const currentContent = contentRef.current
const currentIndex = indexRef.current
const elapsed = timestamp - lastFrameTimeRef.current
// Calculate adaptive delay based on how far behind we are
const delay = calculateAdaptiveDelay(currentIndex, currentContent.length)
if (elapsed >= delay) {
if (currentIndex < currentContent.length) {
const newDisplayed = currentContent.slice(0, currentIndex + 1)
setDisplayedContent(newDisplayed)
indexRef.current = currentIndex + 1
lastFrameTimeRef.current = timestamp
}
}
if (indexRef.current < currentContent.length) {
rafRef.current = requestAnimationFrame(animateText)
if (currentIndex < currentContent.length) {
const newDisplayed = currentContent.slice(0, currentIndex + 1)
setDisplayedContent(newDisplayed)
indexRef.current = currentIndex + 1
timeoutRef.current = setTimeout(animateText, CHARACTER_DELAY)
} else {
isAnimatingRef.current = false
}
}
rafRef.current = requestAnimationFrame(animateText)
} else if (indexRef.current < content.length && isAnimatingRef.current) {
// Animation already running, it will pick up new content automatically
if (!isAnimatingRef.current) {
if (timeoutRef.current) {
clearTimeout(timeoutRef.current)
}
isAnimatingRef.current = true
animateText()
}
}
} else {
// Streaming ended - show full content immediately
if (rafRef.current) {
cancelAnimationFrame(rafRef.current)
if (timeoutRef.current) {
clearTimeout(timeoutRef.current)
}
setDisplayedContent(content)
indexRef.current = content.length
@@ -153,8 +95,8 @@ export const SmoothStreamingText = memo(
}
return () => {
if (rafRef.current) {
cancelAnimationFrame(rafRef.current)
if (timeoutRef.current) {
clearTimeout(timeoutRef.current)
}
isAnimatingRef.current = false
}

View File

@@ -46,12 +46,14 @@ interface SmoothThinkingTextProps {
*/
const SmoothThinkingText = memo(
({ content, isStreaming }: SmoothThinkingTextProps) => {
const [displayedContent, setDisplayedContent] = useState('')
// Initialize with full content when not streaming to avoid flash on page load
const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content))
const [showGradient, setShowGradient] = useState(false)
const contentRef = useRef(content)
const textRef = useRef<HTMLDivElement>(null)
const rafRef = useRef<number | null>(null)
const indexRef = useRef(0)
// Initialize index based on streaming state
const indexRef = useRef(isStreaming ? 0 : content.length)
const lastFrameTimeRef = useRef<number>(0)
const isAnimatingRef = useRef(false)

View File

@@ -1952,7 +1952,12 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
}, [params])
// Skip rendering some internal tools
if (toolCall.name === 'checkoff_todo' || toolCall.name === 'mark_todo_in_progress') return null
if (
toolCall.name === 'checkoff_todo' ||
toolCall.name === 'mark_todo_in_progress' ||
toolCall.name === 'tool_search_tool_regex'
)
return null
// Special rendering for subagent tools - show as thinking text with tool calls at top level
const SUBAGENT_TOOLS = [

View File

@@ -32,13 +32,6 @@ function getModelIconComponent(modelValue: string) {
return <IconComponent className='h-3.5 w-3.5' />
}
/**
* Checks if a model should display the MAX badge
*/
function isMaxModel(modelValue: string): boolean {
return modelValue === 'claude-4.5-sonnet' || modelValue === 'claude-4.5-opus'
}
/**
* Model selector dropdown for choosing AI model.
* Displays model icon and label.
@@ -139,11 +132,6 @@ export function ModelSelector({ selectedModel, isNearTop, onModelSelect }: Model
>
{getModelIconComponent(option.value)}
<span>{option.label}</span>
{isMaxModel(option.value) && (
<Badge size='sm' className='ml-auto'>
MAX
</Badge>
)}
</PopoverItem>
))}
</PopoverScrollArea>

View File

@@ -238,8 +238,8 @@ export const MODEL_OPTIONS = [
{ value: 'claude-4.5-opus', label: 'Claude 4.5 Opus' },
{ value: 'claude-4.5-sonnet', label: 'Claude 4.5 Sonnet' },
{ value: 'claude-4.5-haiku', label: 'Claude 4.5 Haiku' },
{ value: 'gpt-5.1-codex', label: 'GPT 5.1 Codex' },
{ value: 'gpt-5.1-medium', label: 'GPT 5.1 Medium' },
{ value: 'gpt-5.2-codex', label: 'GPT 5.2 Codex' },
{ value: 'gpt-5.2-pro', label: 'GPT 5.2 Pro' },
{ value: 'gemini-3-pro', label: 'Gemini 3 Pro' },
] as const

View File

@@ -52,7 +52,10 @@ function isDefaultDescription(desc: string | null | undefined, workflowName: str
if (!desc) return true
const normalized = desc.toLowerCase().trim()
return (
normalized === '' || normalized === 'new workflow' || normalized === workflowName.toLowerCase()
normalized === '' ||
normalized === 'new workflow' ||
normalized === 'your first workflow - start building here!' ||
normalized === workflowName.toLowerCase()
)
}
@@ -685,9 +688,31 @@ console.log(data);`
{/* Endpoint URL (shown when agent exists) */}
{existingAgent && endpoint && (
<div>
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
URL
</Label>
<div className='mb-[6.5px] flex items-center justify-between'>
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
URL
</Label>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
type='button'
variant='ghost'
onClick={() => {
navigator.clipboard.writeText(endpoint)
setUrlCopied(true)
setTimeout(() => setUrlCopied(false), 2000)
}}
aria-label='Copy URL'
className='!p-1.5 -my-1.5'
>
{urlCopied ? <Check className='h-3 w-3' /> : <Clipboard className='h-3 w-3' />}
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<span>{urlCopied ? 'Copied' : 'Copy'}</span>
</Tooltip.Content>
</Tooltip.Root>
</div>
<div className='relative flex items-stretch overflow-hidden rounded-[4px] border border-[var(--border-1)]'>
<div className='flex items-center whitespace-nowrap bg-[var(--surface-5)] pr-[6px] pl-[8px] font-medium text-[var(--text-secondary)] text-sm dark:bg-[var(--surface-5)]'>
{baseUrl.replace(/^https?:\/\//, '')}/api/a2a/serve/
@@ -696,30 +721,8 @@ console.log(data);`
<Input
value={existingAgent.id}
readOnly
className='rounded-none border-0 pr-[32px] pl-0 text-[var(--text-tertiary)] shadow-none'
className='rounded-none border-0 pl-0 text-[var(--text-tertiary)] shadow-none'
/>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<button
type='button'
onClick={() => {
navigator.clipboard.writeText(endpoint)
setUrlCopied(true)
setTimeout(() => setUrlCopied(false), 2000)
}}
className='-translate-y-1/2 absolute top-1/2 right-2'
>
{urlCopied ? (
<Check className='h-3 w-3 text-[var(--brand-tertiary-2)]' />
) : (
<Clipboard className='h-3 w-3 text-[var(--text-tertiary)]' />
)}
</button>
</Tooltip.Trigger>
<Tooltip.Content>
<span>{urlCopied ? 'Copied' : 'Copy'}</span>
</Tooltip.Content>
</Tooltip.Root>
</div>
</div>
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>

View File

@@ -415,7 +415,7 @@ export function ChatDeploy({
>
Cancel
</Button>
<Button variant='destructive' onClick={handleDelete} disabled={isDeleting}>
<Button variant='default' onClick={handleDelete} disabled={isDeleting}>
{isDeleting ? 'Deleting...' : 'Delete'}
</Button>
</ModalFooter>
@@ -532,7 +532,8 @@ function IdentifierInput({
</div>
) : (
isValid &&
value && (
value &&
value !== originalIdentifier && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<div className='-translate-y-1/2 absolute top-1/2 right-2'>

View File

@@ -138,10 +138,12 @@ export function McpDeploy({
const [toolName, setToolName] = useState(() => sanitizeToolName(workflowName))
const [toolDescription, setToolDescription] = useState(() => {
const normalizedDesc = workflowDescription?.toLowerCase().trim()
const isDefaultDescription =
!workflowDescription ||
workflowDescription === workflowName ||
workflowDescription.toLowerCase() === 'new workflow'
normalizedDesc === 'new workflow' ||
normalizedDesc === 'your first workflow - start building here!'
return isDefaultDescription ? '' : workflowDescription
})
@@ -193,10 +195,12 @@ export function McpDeploy({
setToolName(toolInfo.tool.toolName)
const loadedDescription = toolInfo.tool.toolDescription || ''
const normalizedLoadedDesc = loadedDescription.toLowerCase().trim()
const isDefaultDescription =
!loadedDescription ||
loadedDescription === workflowName ||
loadedDescription.toLowerCase() === 'new workflow'
normalizedLoadedDesc === 'new workflow' ||
normalizedLoadedDesc === 'your first workflow - start building here!'
setToolDescription(isDefaultDescription ? '' : loadedDescription)
const schema = toolInfo.tool.parameterSchema as Record<string, unknown> | undefined

View File

@@ -734,7 +734,7 @@ export function DeployModal({
)}
</ModalTabsContent> */}
<ModalTabsContent value='mcp'>
<ModalTabsContent value='mcp' className='h-full'>
{workflowId && (
<McpDeploy
workflowId={workflowId}
@@ -800,7 +800,7 @@ export function DeployModal({
{chatExists && (
<Button
type='button'
variant='destructive'
variant='default'
onClick={handleChatDelete}
disabled={chatSubmitting}
>

View File

@@ -125,10 +125,16 @@ export function FieldFormat({
}
/**
* Removes a field by ID, preventing removal of the last field
* Removes a field by ID, or clears it if it's the last field
*/
const removeField = (id: string) => {
if (isReadOnly || fields.length === 1) return
if (isReadOnly) return
if (fields.length === 1) {
setStoreValue([createDefaultField()])
return
}
setStoreValue(fields.filter((field) => field.id !== id))
}
@@ -273,7 +279,7 @@ export function FieldFormat({
<Button
variant='ghost'
onClick={() => removeField(field.id)}
disabled={isReadOnly || fields.length === 1}
disabled={isReadOnly}
className='h-auto p-0 text-[var(--text-error)] hover:text-[var(--text-error)]'
>
<Trash className='h-[14px] w-[14px]' />

View File

@@ -414,7 +414,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
Cancel
</Button>
<Button
variant='ghost'
variant='destructive'
onClick={handleDeleteKey}
disabled={deleteApiKeyMutation.isPending}
>

View File

@@ -268,14 +268,7 @@ export function ContextMenu({
height: '1px',
}}
/>
<PopoverContent
ref={menuRef}
align='start'
side='bottom'
sideOffset={4}
onPointerDownOutside={(e) => e.preventDefault()}
onInteractOutside={(e) => e.preventDefault()}
>
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
{/* Back button - shown only when in a folder */}
<PopoverBackButton />

View File

@@ -18,6 +18,17 @@ const TREE_SPACING = {
INDENT_PER_LEVEL: 20,
} as const
function compareByOrder<T extends { sortOrder: number; createdAt?: Date; id: string }>(
a: T,
b: T
): number {
if (a.sortOrder !== b.sortOrder) return a.sortOrder - b.sortOrder
const timeA = a.createdAt?.getTime() ?? 0
const timeB = b.createdAt?.getTime() ?? 0
if (timeA !== timeB) return timeA - timeB
return a.id.localeCompare(b.id)
}
interface WorkflowListProps {
regularWorkflows: WorkflowMetadata[]
isLoading?: boolean
@@ -97,7 +108,7 @@ export function WorkflowList({
{} as Record<string, WorkflowMetadata[]>
)
for (const folderId of Object.keys(grouped)) {
grouped[folderId].sort((a, b) => a.sortOrder - b.sortOrder)
grouped[folderId].sort(compareByOrder)
}
return grouped
}, [regularWorkflows])
@@ -208,6 +219,7 @@ export function WorkflowList({
type: 'folder' | 'workflow'
id: string
sortOrder: number
createdAt?: Date
data: FolderTreeNode | WorkflowMetadata
}> = []
for (const childFolder of folder.children) {
@@ -215,6 +227,7 @@ export function WorkflowList({
type: 'folder',
id: childFolder.id,
sortOrder: childFolder.sortOrder,
createdAt: childFolder.createdAt,
data: childFolder,
})
}
@@ -223,10 +236,11 @@ export function WorkflowList({
type: 'workflow',
id: workflow.id,
sortOrder: workflow.sortOrder,
createdAt: workflow.createdAt,
data: workflow,
})
}
childItems.sort((a, b) => a.sortOrder - b.sortOrder)
childItems.sort(compareByOrder)
return (
<div key={folder.id} className='relative'>
@@ -294,20 +308,28 @@ export function WorkflowList({
type: 'folder' | 'workflow'
id: string
sortOrder: number
createdAt?: Date
data: FolderTreeNode | WorkflowMetadata
}> = []
for (const folder of folderTree) {
items.push({ type: 'folder', id: folder.id, sortOrder: folder.sortOrder, data: folder })
items.push({
type: 'folder',
id: folder.id,
sortOrder: folder.sortOrder,
createdAt: folder.createdAt,
data: folder,
})
}
for (const workflow of rootWorkflows) {
items.push({
type: 'workflow',
id: workflow.id,
sortOrder: workflow.sortOrder,
createdAt: workflow.createdAt,
data: workflow,
})
}
return items.sort((a, b) => a.sortOrder - b.sortOrder)
return items.sort(compareByOrder)
}, [folderTree, rootWorkflows])
const hasRootItems = rootItems.length > 0

View File

@@ -211,10 +211,11 @@ export function WorkspaceHeader({
}
/**
* Close context menu
* Close context menu and the workspace dropdown
*/
const closeContextMenu = () => {
setIsContextMenuOpen(false)
setIsWorkspaceMenuOpen(false)
}
/**

View File

@@ -133,7 +133,20 @@ export function useDragDrop() {
[]
)
type SiblingItem = { type: 'folder' | 'workflow'; id: string; sortOrder: number }
type SiblingItem = {
type: 'folder' | 'workflow'
id: string
sortOrder: number
createdAt: Date
}
const compareSiblingItems = (a: SiblingItem, b: SiblingItem): number => {
if (a.sortOrder !== b.sortOrder) return a.sortOrder - b.sortOrder
const timeA = a.createdAt.getTime()
const timeB = b.createdAt.getTime()
if (timeA !== timeB) return timeA - timeB
return a.id.localeCompare(b.id)
}
const getDestinationFolderId = useCallback((indicator: DropIndicator): string | null => {
return indicator.position === 'inside'
@@ -202,11 +215,21 @@ export function useDragDrop() {
return [
...Object.values(currentFolders)
.filter((f) => f.parentId === folderId)
.map((f) => ({ type: 'folder' as const, id: f.id, sortOrder: f.sortOrder })),
.map((f) => ({
type: 'folder' as const,
id: f.id,
sortOrder: f.sortOrder,
createdAt: f.createdAt,
})),
...Object.values(currentWorkflows)
.filter((w) => w.folderId === folderId)
.map((w) => ({ type: 'workflow' as const, id: w.id, sortOrder: w.sortOrder })),
].sort((a, b) => a.sortOrder - b.sortOrder)
.map((w) => ({
type: 'workflow' as const,
id: w.id,
sortOrder: w.sortOrder,
createdAt: w.createdAt,
})),
].sort(compareSiblingItems)
}, [])
const setNormalizedDropIndicator = useCallback(
@@ -299,8 +322,9 @@ export function useDragDrop() {
type: 'workflow' as const,
id,
sortOrder: currentWorkflows[id]?.sortOrder ?? 0,
createdAt: currentWorkflows[id]?.createdAt ?? new Date(),
}))
.sort((a, b) => a.sortOrder - b.sortOrder)
.sort(compareSiblingItems)
const insertAt = calculateInsertIndex(remaining, indicator)
@@ -369,7 +393,12 @@ export function useDragDrop() {
const newOrder: SiblingItem[] = [
...remaining.slice(0, insertAt),
{ type: 'folder', id: draggedFolderId, sortOrder: 0 },
{
type: 'folder',
id: draggedFolderId,
sortOrder: 0,
createdAt: draggedFolder?.createdAt ?? new Date(),
},
...remaining.slice(insertAt),
]

View File

@@ -10,6 +10,11 @@ import { createLogger } from '@sim/logger'
import { task } from '@trigger.dev/sdk'
import { and, eq, isNull, lte, or, sql } from 'drizzle-orm'
import { v4 as uuidv4 } from 'uuid'
import {
type EmailRateLimitsData,
type EmailUsageData,
renderWorkflowNotificationEmail,
} from '@/components/emails'
import { checkUsageStatus } from '@/lib/billing/calculations/usage-monitor'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { RateLimiter } from '@/lib/core/rate-limiter'
@@ -45,9 +50,9 @@ interface NotificationPayload {
totalDurationMs: number
cost?: Record<string, unknown>
finalOutput?: unknown
traceSpans?: unknown[]
rateLimits?: Record<string, unknown>
usage?: Record<string, unknown>
traceSpans?: TraceSpan[]
rateLimits?: EmailRateLimitsData
usage?: EmailUsageData
}
}
@@ -94,8 +99,13 @@ async function buildPayload(
payload.data.finalOutput = executionData.finalOutput
}
if (subscription.includeTraceSpans && executionData.traceSpans) {
payload.data.traceSpans = executionData.traceSpans as unknown[]
// Trace spans only included for webhooks (too large for email/Slack)
if (
subscription.includeTraceSpans &&
subscription.notificationType === 'webhook' &&
executionData.traceSpans
) {
payload.data.traceSpans = executionData.traceSpans as TraceSpan[]
}
if (subscription.includeRateLimits && userId) {
@@ -251,18 +261,6 @@ function formatAlertReason(alertConfig: AlertConfig): string {
}
}
function formatJsonForEmail(data: unknown, label: string): string {
if (!data) return ''
const json = JSON.stringify(data, null, 2)
const escapedJson = json.replace(/</g, '&lt;').replace(/>/g, '&gt;')
return `
<div style="margin-top: 20px;">
<h3 style="color: #1a1a1a; font-size: 14px; margin-bottom: 8px;">${label}</h3>
<pre style="background: #f5f5f5; padding: 12px; border-radius: 6px; overflow-x: auto; font-size: 12px; color: #333; white-space: pre-wrap; word-wrap: break-word;">${escapedJson}</pre>
</div>
`
}
async function deliverEmail(
subscription: typeof workspaceNotificationSubscription.$inferSelect,
payload: NotificationPayload,
@@ -275,8 +273,7 @@ async function deliverEmail(
const isError = payload.data.status !== 'success'
const statusText = isError ? 'Error' : 'Success'
const logUrl = buildLogUrl(subscription.workspaceId, payload.data.executionId)
const baseUrl = getBaseUrl()
const alertReason = alertConfig ? formatAlertReason(alertConfig) : null
const alertReason = alertConfig ? formatAlertReason(alertConfig) : undefined
// Build subject line
const subject = alertReason
@@ -285,113 +282,36 @@ async function deliverEmail(
? `Error Alert: ${payload.data.workflowName}`
: `Workflow Completed: ${payload.data.workflowName}`
let includedDataHtml = ''
// Build plain text for fallback
let includedDataText = ''
if (payload.data.finalOutput) {
includedDataHtml += formatJsonForEmail(payload.data.finalOutput, 'Final Output')
includedDataText += `\n\nFinal Output:\n${JSON.stringify(payload.data.finalOutput, null, 2)}`
}
if (
payload.data.traceSpans &&
Array.isArray(payload.data.traceSpans) &&
payload.data.traceSpans.length > 0
) {
includedDataHtml += formatJsonForEmail(payload.data.traceSpans, 'Trace Spans')
includedDataText += `\n\nTrace Spans:\n${JSON.stringify(payload.data.traceSpans, null, 2)}`
}
if (payload.data.rateLimits) {
includedDataHtml += formatJsonForEmail(payload.data.rateLimits, 'Rate Limits')
includedDataText += `\n\nRate Limits:\n${JSON.stringify(payload.data.rateLimits, null, 2)}`
}
if (payload.data.usage) {
includedDataHtml += formatJsonForEmail(payload.data.usage, 'Usage Data')
includedDataText += `\n\nUsage Data:\n${JSON.stringify(payload.data.usage, null, 2)}`
}
// Render the email using the shared template
const html = await renderWorkflowNotificationEmail({
workflowName: payload.data.workflowName || 'Unknown Workflow',
status: payload.data.status,
trigger: payload.data.trigger,
duration: formatDuration(payload.data.totalDurationMs),
cost: formatCost(payload.data.cost),
logUrl,
alertReason,
finalOutput: payload.data.finalOutput,
rateLimits: payload.data.rateLimits,
usageData: payload.data.usage,
})
const result = await sendEmail({
to: subscription.emailRecipients,
subject,
html: `
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
</head>
<body style="background-color: #f5f5f7; font-family: HelveticaNeue, Helvetica, Arial, sans-serif; margin: 0; padding: 0;">
<div style="max-width: 580px; margin: 30px auto; background-color: #ffffff; border-radius: 5px; overflow: hidden;">
<!-- Header with Logo -->
<div style="padding: 30px 0; text-align: center;">
<img src="${baseUrl}/logo/reverse/text/medium.png" width="114" alt="Sim Studio" style="margin: 0 auto;" />
</div>
<!-- Section Border -->
<div style="display: flex; width: 100%;">
<div style="border-bottom: 1px solid #eeeeee; width: 249px;"></div>
<div style="border-bottom: 1px solid #6F3DFA; width: 102px;"></div>
<div style="border-bottom: 1px solid #eeeeee; width: 249px;"></div>
</div>
<!-- Content -->
<div style="padding: 5px 30px 20px 30px;">
<h2 style="font-size: 20px; color: #333333; margin: 20px 0;">
${alertReason ? 'Alert Triggered' : isError ? 'Workflow Execution Failed' : 'Workflow Execution Completed'}
</h2>
${alertReason ? `<p style="color: #d97706; background: #fef3c7; padding: 12px; border-radius: 6px; margin-bottom: 20px; font-size: 14px;"><strong>Reason:</strong> ${alertReason}</p>` : ''}
<table style="width: 100%; border-collapse: collapse; margin-bottom: 20px;">
<tr style="border-bottom: 1px solid #eee;">
<td style="padding: 12px 0; color: #666; width: 140px;">Workflow</td>
<td style="padding: 12px 0; color: #333; font-weight: 500;">${payload.data.workflowName}</td>
</tr>
<tr style="border-bottom: 1px solid #eee;">
<td style="padding: 12px 0; color: #666;">Status</td>
<td style="padding: 12px 0; color: ${isError ? '#ef4444' : '#22c55e'}; font-weight: 500;">${statusText}</td>
</tr>
<tr style="border-bottom: 1px solid #eee;">
<td style="padding: 12px 0; color: #666;">Trigger</td>
<td style="padding: 12px 0; color: #333;">${payload.data.trigger}</td>
</tr>
<tr style="border-bottom: 1px solid #eee;">
<td style="padding: 12px 0; color: #666;">Duration</td>
<td style="padding: 12px 0; color: #333;">${formatDuration(payload.data.totalDurationMs)}</td>
</tr>
<tr style="border-bottom: 1px solid #eee;">
<td style="padding: 12px 0; color: #666;">Cost</td>
<td style="padding: 12px 0; color: #333;">${formatCost(payload.data.cost)}</td>
</tr>
</table>
<a href="${logUrl}" style="display: inline-block; background-color: #6F3DFA; color: #ffffff; font-weight: bold; font-size: 16px; padding: 12px 30px; border-radius: 5px; text-decoration: none; text-align: center; margin: 20px 0;">
View Execution Log →
</a>
${includedDataHtml}
<p style="font-size: 16px; line-height: 1.5; color: #333333; margin-top: 30px;">
Best regards,<br />
The Sim Team
</p>
</div>
</div>
<!-- Footer -->
<div style="max-width: 580px; margin: 0 auto; padding: 20px 0; text-align: center;">
<p style="font-size: 12px; color: #706a7b; margin: 8px 0 0 0;">
© ${new Date().getFullYear()} Sim Studio, All Rights Reserved
</p>
<p style="font-size: 12px; color: #706a7b; margin: 8px 0 0 0;">
<a href="${baseUrl}/privacy" style="color: #706a7b; text-decoration: underline;">Privacy Policy</a> •
<a href="${baseUrl}/terms" style="color: #706a7b; text-decoration: underline;">Terms of Service</a>
</p>
</div>
</body>
</html>
`,
html,
text: `${subject}\n${alertReason ? `\nReason: ${alertReason}\n` : ''}\nWorkflow: ${payload.data.workflowName}\nStatus: ${statusText}\nTrigger: ${payload.data.trigger}\nDuration: ${formatDuration(payload.data.totalDurationMs)}\nCost: ${formatCost(payload.data.cost)}\n\nView Log: ${logUrl}${includedDataText}`,
emailType: 'notifications',
})
@@ -479,26 +399,6 @@ async function deliverSlack(
})
}
if (
payload.data.traceSpans &&
Array.isArray(payload.data.traceSpans) &&
payload.data.traceSpans.length > 0
) {
const spansSummary = (payload.data.traceSpans as TraceSpan[])
.map((span) => {
const status = span.status === 'success' ? '✓' : '✗'
return `${status} ${span.name || 'Unknown'} (${formatDuration(span.duration || 0)})`
})
.join('\n')
blocks.push({
type: 'section',
text: {
type: 'mrkdwn',
text: `*Trace Spans:*\n\`\`\`${spansSummary}\`\`\``,
},
})
}
if (payload.data.rateLimits) {
const limitsStr = JSON.stringify(payload.data.rateLimits, null, 2)
blocks.push({

View File

@@ -19,10 +19,10 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'Create Run', id: 'create_run' },
{ label: 'Create Runs Batch', id: 'create_runs_batch' },
{ label: 'Create Run', id: 'langsmith_create_run' },
{ label: 'Create Runs Batch', id: 'langsmith_create_runs_batch' },
],
value: () => 'create_run',
value: () => 'langsmith_create_run',
},
{
id: 'apiKey',
@@ -37,15 +37,15 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Run ID',
type: 'short-input',
placeholder: 'Auto-generated if blank',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
},
{
id: 'name',
title: 'Name',
type: 'short-input',
placeholder: 'Run name',
required: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'create_run' },
required: { field: 'operation', value: 'langsmith_create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
},
{
id: 'run_type',
@@ -61,23 +61,22 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
{ label: 'Parser', id: 'parser' },
],
value: () => 'chain',
required: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'create_run' },
required: { field: 'operation', value: 'langsmith_create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
},
{
id: 'start_time',
title: 'Start Time',
type: 'short-input',
placeholder: '2025-01-01T12:00:00Z',
condition: { field: 'operation', value: 'create_run' },
value: () => new Date().toISOString(),
placeholder: 'e.g. 2025-01-01T12:00:00Z (defaults to now)',
condition: { field: 'operation', value: 'langsmith_create_run' },
},
{
id: 'end_time',
title: 'End Time',
type: 'short-input',
placeholder: '2025-01-01T12:00:30Z',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -85,7 +84,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Inputs',
type: 'code',
placeholder: '{"input":"value"}',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -93,7 +92,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Outputs',
type: 'code',
placeholder: '{"output":"value"}',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -101,7 +100,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Metadata',
type: 'code',
placeholder: '{"ls_model":"gpt-4"}',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -109,7 +108,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Tags',
type: 'code',
placeholder: '["production","workflow"]',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -117,7 +116,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Parent Run ID',
type: 'short-input',
placeholder: 'Parent run identifier',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -125,7 +124,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Trace ID',
type: 'short-input',
placeholder: 'Auto-generated if blank',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -133,7 +132,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Session ID',
type: 'short-input',
placeholder: 'Session identifier',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -141,7 +140,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Session Name',
type: 'short-input',
placeholder: 'Session name',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -149,7 +148,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Status',
type: 'short-input',
placeholder: 'success',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -157,7 +156,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Error',
type: 'long-input',
placeholder: 'Error message',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -165,7 +164,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Dotted Order',
type: 'short-input',
placeholder: 'Defaults to <YYYYMMDDTHHMMSSffffff>Z<id>',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -173,7 +172,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Events',
type: 'code',
placeholder: '[{"event":"token","value":1}]',
condition: { field: 'operation', value: 'create_run' },
condition: { field: 'operation', value: 'langsmith_create_run' },
mode: 'advanced',
},
{
@@ -181,29 +180,36 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
title: 'Post Runs',
type: 'code',
placeholder: '[{"id":"...","name":"...","run_type":"chain","start_time":"..."}]',
condition: { field: 'operation', value: 'create_runs_batch' },
condition: { field: 'operation', value: 'langsmith_create_runs_batch' },
wandConfig: {
enabled: true,
generationType: 'json-object',
prompt: `Output ONLY a JSON array with a single LangSmith run object. No explanation.
Required: name (string), run_type ("tool"|"chain"|"llm"|"retriever"|"embedding"|"prompt"|"parser")
Optional: inputs, outputs, tags, extra, session_name, end_time
Fields id, trace_id, dotted_order, start_time are auto-generated if omitted.`,
},
},
{
id: 'patch',
title: 'Patch Runs',
type: 'code',
placeholder: '[{"id":"...","name":"...","run_type":"chain","start_time":"..."}]',
condition: { field: 'operation', value: 'create_runs_batch' },
condition: { field: 'operation', value: 'langsmith_create_runs_batch' },
mode: 'advanced',
wandConfig: {
enabled: true,
generationType: 'json-object',
prompt: `Output ONLY a JSON array with a single LangSmith run object to update. No explanation.
Required: id (existing run UUID), name, run_type ("tool"|"chain"|"llm"|"retriever"|"embedding"|"prompt"|"parser")
Common patch fields: outputs, end_time, status, error`,
},
},
],
tools: {
access: ['langsmith_create_run', 'langsmith_create_runs_batch'],
config: {
tool: (params) => {
switch (params.operation) {
case 'create_runs_batch':
return 'langsmith_create_runs_batch'
case 'create_run':
default:
return 'langsmith_create_run'
}
},
tool: (params) => params.operation,
params: (params) => {
const parseJsonValue = (value: unknown, label: string) => {
if (value === undefined || value === null || value === '') {
@@ -221,7 +227,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
return value
}
if (params.operation === 'create_runs_batch') {
if (params.operation === 'langsmith_create_runs_batch') {
const post = parseJsonValue(params.post, 'post runs')
const patch = parseJsonValue(params.patch, 'patch runs')

View File

@@ -173,6 +173,17 @@ export const baseStyles = {
margin: 0,
},
/** Code block text (for JSON/code display) */
codeBlock: {
fontSize: typography.fontSize.caption,
lineHeight: typography.lineHeight.caption,
color: colors.textSecondary,
fontFamily: 'monospace',
whiteSpace: 'pre-wrap' as const,
wordWrap: 'break-word' as const,
margin: 0,
},
/** Highlighted info box (e.g., "What you get with Pro") */
infoBox: {
backgroundColor: colors.bgOuter,

View File

@@ -22,7 +22,7 @@ export function WelcomeEmail({ userName }: WelcomeEmailProps) {
workflows in minutes.
</Text>
<Link href={`${baseUrl}/w`} style={{ textDecoration: 'none' }}>
<Link href={`${baseUrl}/login`} style={{ textDecoration: 'none' }}>
<Text style={baseStyles.button}>Get Started</Text>
</Link>
@@ -30,13 +30,21 @@ export function WelcomeEmail({ userName }: WelcomeEmailProps) {
If you have any questions or feedback, just reply to this email. I read every message!
</Text>
<Text style={baseStyles.paragraph}>
Want to chat?{' '}
<Link href={`${baseUrl}/team`} style={baseStyles.link}>
Schedule a call
</Link>{' '}
with our team.
</Text>
<Text style={baseStyles.paragraph}>- Emir, co-founder of {brand.name}</Text>
{/* Divider */}
<div style={baseStyles.divider} />
<Text style={{ ...baseStyles.footerText, textAlign: 'left' }}>
You're on the free plan with $10 in credits to get started.
You're on the free plan with $20 in credits to get started.
</Text>
</EmailLayout>
)

View File

@@ -33,7 +33,7 @@ export function PlanWelcomeEmail({ planName, userName, loginLink }: PlanWelcomeE
<Text style={baseStyles.paragraph}>
Want help getting started?{' '}
<Link href='https://cal.com/emirkarabeg/sim-team' style={baseStyles.link}>
<Link href={`${baseUrl}/team`} style={baseStyles.link}>
Schedule a call
</Link>{' '}
with our team.

View File

@@ -61,7 +61,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
<tbody>
<tr>
<td align='left' style={{ padding: '0 8px 0 0' }}>
<Link href='https://x.com/simdotai' rel='noopener noreferrer'>
<Link href={`${baseUrl}/x`} rel='noopener noreferrer'>
<Img
src={`${baseUrl}/static/x-icon.png`}
width='20'
@@ -71,7 +71,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
</Link>
</td>
<td align='left' style={{ padding: '0 8px' }}>
<Link href='https://discord.gg/Hr4UWYEcTT' rel='noopener noreferrer'>
<Link href={`${baseUrl}/discord`} rel='noopener noreferrer'>
<Img
src={`${baseUrl}/static/discord-icon.png`}
width='20'
@@ -81,7 +81,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
</Link>
</td>
<td align='left' style={{ padding: '0 8px' }}>
<Link href='https://github.com/simstudioai/sim' rel='noopener noreferrer'>
<Link href={`${baseUrl}/github`} rel='noopener noreferrer'>
<Img
src={`${baseUrl}/static/github-icon.png`}
width='20'

View File

@@ -10,6 +10,8 @@ export * from './careers'
export * from './components'
// Invitation emails
export * from './invitations'
// Notification emails
export * from './notifications'
// Render functions and subjects
export * from './render'
export * from './subjects'

View File

@@ -0,0 +1,7 @@
export type {
EmailRateLimitStatus,
EmailRateLimitsData,
EmailUsageData,
WorkflowNotificationEmailProps,
} from './workflow-notification-email'
export { WorkflowNotificationEmail } from './workflow-notification-email'

View File

@@ -0,0 +1,161 @@
import { Link, Section, Text } from '@react-email/components'
import { baseStyles } from '@/components/emails/_styles'
import { EmailLayout } from '@/components/emails/components'
import { getBrandConfig } from '@/lib/branding/branding'
/**
* Serialized rate limit status for email payloads.
* Note: This differs from the canonical RateLimitStatus in @/lib/core/rate-limiter
* which uses Date for resetAt. This version uses string for JSON serialization.
*/
export interface EmailRateLimitStatus {
requestsPerMinute: number
remaining: number
maxBurst?: number
resetAt?: string
}
export interface EmailRateLimitsData {
sync?: EmailRateLimitStatus
async?: EmailRateLimitStatus
}
export interface EmailUsageData {
currentPeriodCost: number
limit: number
percentUsed: number
isExceeded?: boolean
}
export interface WorkflowNotificationEmailProps {
workflowName: string
status: 'success' | 'error'
trigger: string
duration: string
cost: string
logUrl: string
alertReason?: string
finalOutput?: unknown
rateLimits?: EmailRateLimitsData
usageData?: EmailUsageData
}
function formatJsonForEmail(data: unknown): string {
return JSON.stringify(data, null, 2)
}
export function WorkflowNotificationEmail({
workflowName,
status,
trigger,
duration,
cost,
logUrl,
alertReason,
finalOutput,
rateLimits,
usageData,
}: WorkflowNotificationEmailProps) {
const brand = getBrandConfig()
const isError = status === 'error'
const statusText = isError ? 'Error' : 'Success'
const previewText = alertReason
? `${brand.name}: Alert - ${workflowName}`
: isError
? `${brand.name}: Workflow Failed - ${workflowName}`
: `${brand.name}: Workflow Completed - ${workflowName}`
const message = alertReason
? 'An alert was triggered for your workflow.'
: isError
? 'Your workflow execution failed.'
: 'Your workflow completed successfully.'
return (
<EmailLayout preview={previewText}>
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>Hello,</Text>
<Text style={baseStyles.paragraph}>{message}</Text>
<Section style={baseStyles.infoBox}>
{alertReason && (
<Text style={baseStyles.infoBoxList}>
<strong>Reason:</strong> {alertReason}
</Text>
)}
<Text style={{ ...baseStyles.infoBoxList, marginTop: alertReason ? '4px' : 0 }}>
<strong>Workflow:</strong> {workflowName}
</Text>
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
<strong>Status:</strong> {statusText}
</Text>
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
<strong>Trigger:</strong> {trigger}
</Text>
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
<strong>Duration:</strong> {duration}
</Text>
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
<strong>Cost:</strong> {cost}
</Text>
</Section>
<Link href={logUrl} style={{ textDecoration: 'none' }}>
<Text style={baseStyles.button}>View Execution Log</Text>
</Link>
{rateLimits && (rateLimits.sync || rateLimits.async) ? (
<>
<div style={baseStyles.divider} />
<Section style={baseStyles.infoBox}>
<Text style={baseStyles.infoBoxTitle}>Rate Limits</Text>
{rateLimits.sync && (
<Text style={baseStyles.infoBoxList}>
Sync: {rateLimits.sync.remaining} of {rateLimits.sync.requestsPerMinute} remaining
</Text>
)}
{rateLimits.async && (
<Text style={{ ...baseStyles.infoBoxList, marginTop: rateLimits.sync ? '4px' : 0 }}>
Async: {rateLimits.async.remaining} of {rateLimits.async.requestsPerMinute}{' '}
remaining
</Text>
)}
</Section>
</>
) : null}
{usageData ? (
<>
<div style={baseStyles.divider} />
<Section style={baseStyles.infoBox}>
<Text style={baseStyles.infoBoxTitle}>Usage</Text>
<Text style={baseStyles.infoBoxList}>
${usageData.currentPeriodCost.toFixed(2)} of ${usageData.limit.toFixed(2)} used (
{usageData.percentUsed.toFixed(1)}%)
</Text>
</Section>
</>
) : null}
{finalOutput ? (
<>
<div style={baseStyles.divider} />
<Section style={baseStyles.infoBox}>
<Text style={baseStyles.infoBoxTitle}>Final Output</Text>
<Text style={{ ...baseStyles.codeBlock, marginTop: '8px' }}>
{formatJsonForEmail(finalOutput)}
</Text>
</Section>
</>
) : null}
<div style={baseStyles.divider} />
<Text style={{ ...baseStyles.footerText, textAlign: 'left' }}>
You're receiving this because you subscribed to workflow notifications.
</Text>
</EmailLayout>
)
}
export default WorkflowNotificationEmail

View File

@@ -15,6 +15,10 @@ import {
PollingGroupInvitationEmail,
WorkspaceInvitationEmail,
} from '@/components/emails/invitations'
import {
WorkflowNotificationEmail,
type WorkflowNotificationEmailProps,
} from '@/components/emails/notifications'
import { HelpConfirmationEmail } from '@/components/emails/support'
import { getBaseUrl } from '@/lib/core/utils/urls'
@@ -258,3 +262,9 @@ export async function renderCareersSubmissionEmail(params: {
})
)
}
export async function renderWorkflowNotificationEmail(
params: WorkflowNotificationEmailProps
): Promise<string> {
return await render(WorkflowNotificationEmail(params))
}

View File

@@ -1,8 +1,15 @@
import { keepPreviousData, useInfiniteQuery, useQuery } from '@tanstack/react-query'
import { getEndDateFromTimeRange, getStartDateFromTimeRange } from '@/lib/logs/filters'
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
import type {
DashboardStatsResponse,
SegmentStats,
WorkflowStats,
} from '@/app/api/logs/stats/route'
import type { LogsResponse, TimeRange, WorkflowLog } from '@/stores/logs/filters/types'
export type { DashboardStatsResponse, SegmentStats, WorkflowStats }
export const logKeys = {
all: ['logs'] as const,
lists: () => [...logKeys.all, 'list'] as const,
@@ -10,8 +17,8 @@ export const logKeys = {
[...logKeys.lists(), workspaceId ?? '', filters] as const,
details: () => [...logKeys.all, 'detail'] as const,
detail: (logId: string | undefined) => [...logKeys.details(), logId ?? ''] as const,
dashboard: (workspaceId: string | undefined, filters: Record<string, unknown>) =>
[...logKeys.all, 'dashboard', workspaceId ?? '', filters] as const,
stats: (workspaceId: string | undefined, filters: object) =>
[...logKeys.all, 'stats', workspaceId ?? '', filters] as const,
executionSnapshots: () => [...logKeys.all, 'executionSnapshot'] as const,
executionSnapshot: (executionId: string | undefined) =>
[...logKeys.executionSnapshots(), executionId ?? ''] as const,
@@ -147,52 +154,45 @@ export function useLogDetail(logId: string | undefined) {
})
}
const DASHBOARD_LOGS_LIMIT = 10000
/**
* Fetches all logs for dashboard metrics (non-paginated).
* Uses same filters as the logs list but with a high limit to get all data.
* Fetches dashboard stats from the server-side aggregation endpoint.
* Uses SQL aggregation for efficient computation without arbitrary limits.
*/
async function fetchAllLogs(
async function fetchDashboardStats(
workspaceId: string,
filters: Omit<LogFilters, 'limit'>
): Promise<WorkflowLog[]> {
): Promise<DashboardStatsResponse> {
const params = new URLSearchParams()
params.set('workspaceId', workspaceId)
params.set('limit', DASHBOARD_LOGS_LIMIT.toString())
params.set('offset', '0')
applyFilterParams(params, filters)
const response = await fetch(`/api/logs?${params.toString()}`)
const response = await fetch(`/api/logs/stats?${params.toString()}`)
if (!response.ok) {
throw new Error('Failed to fetch logs for dashboard')
throw new Error('Failed to fetch dashboard stats')
}
const apiData: LogsResponse = await response.json()
return apiData.data || []
return response.json()
}
interface UseDashboardLogsOptions {
interface UseDashboardStatsOptions {
enabled?: boolean
refetchInterval?: number | false
}
/**
* Hook for fetching all logs for dashboard metrics.
* Unlike useLogsList, this fetches all logs in a single request
* to ensure dashboard metrics are computed from complete data.
* Hook for fetching dashboard stats using server-side aggregation.
* No arbitrary limits - uses SQL aggregation for accurate metrics.
*/
export function useDashboardLogs(
export function useDashboardStats(
workspaceId: string | undefined,
filters: Omit<LogFilters, 'limit'>,
options?: UseDashboardLogsOptions
options?: UseDashboardStatsOptions
) {
return useQuery({
queryKey: logKeys.dashboard(workspaceId, filters),
queryFn: () => fetchAllLogs(workspaceId as string, filters),
queryKey: logKeys.stats(workspaceId, filters),
queryFn: () => fetchDashboardStats(workspaceId as string, filters),
enabled: Boolean(workspaceId) && (options?.enabled ?? true),
refetchInterval: options?.refetchInterval ?? false,
staleTime: 0,

View File

@@ -194,7 +194,7 @@ export function useCreateWorkflow() {
const workflowsInFolder = Object.values(currentWorkflows).filter(
(w) => w.folderId === targetFolderId
)
sortOrder = workflowsInFolder.reduce((max, w) => Math.max(max, w.sortOrder ?? 0), -1) + 1
sortOrder = workflowsInFolder.reduce((min, w) => Math.min(min, w.sortOrder ?? 0), 1) - 1
}
return {
@@ -294,7 +294,7 @@ export function useDuplicateWorkflowMutation() {
const workflowsInFolder = Object.values(currentWorkflows).filter(
(w) => w.folderId === targetFolderId
)
const maxSortOrder = workflowsInFolder.reduce((max, w) => Math.max(max, w.sortOrder ?? 0), -1)
const minSortOrder = workflowsInFolder.reduce((min, w) => Math.min(min, w.sortOrder ?? 0), 1)
return {
id: tempId,
@@ -305,7 +305,7 @@ export function useDuplicateWorkflowMutation() {
color: variables.color,
workspaceId: variables.workspaceId,
folderId: targetFolderId,
sortOrder: maxSortOrder + 1,
sortOrder: minSortOrder - 1,
}
}
)

View File

@@ -426,7 +426,8 @@ export const auth = betterAuth({
},
emailVerification: {
autoSignInAfterVerification: true,
afterEmailVerification: async (user) => {
// onEmailVerification is called by the emailOTP plugin when email is verified via OTP
onEmailVerification: async (user) => {
if (isHosted && user.email) {
try {
const html = await renderWelcomeEmail(user.name || undefined)
@@ -441,11 +442,11 @@ export const auth = betterAuth({
emailType: 'transactional',
})
logger.info('[emailVerification.afterEmailVerification] Welcome email sent', {
logger.info('[emailVerification.onEmailVerification] Welcome email sent', {
userId: user.id,
})
} catch (error) {
logger.error('[emailVerification.afterEmailVerification] Failed to send welcome email', {
logger.error('[emailVerification.onEmailVerification] Failed to send welcome email', {
userId: user.id,
error,
})
@@ -456,7 +457,7 @@ export const auth = betterAuth({
emailAndPassword: {
enabled: true,
requireEmailVerification: isEmailVerificationEnabled,
sendVerificationOnSignUp: false,
sendVerificationOnSignUp: isEmailVerificationEnabled, // Auto-send verification OTP on signup when verification is required
throwOnMissingCredentials: true,
throwOnInvalidCredentials: true,
sendResetPassword: async ({ user, url, token }, request) => {
@@ -592,6 +593,7 @@ export const auth = betterAuth({
sendVerificationOnSignUp: false,
otpLength: 6, // Explicitly set the OTP length
expiresIn: 15 * 60, // 15 minutes in seconds
overrideDefaultEmailVerification: true,
}),
genericOAuth({
config: [

View File

@@ -77,6 +77,9 @@ export interface SendMessageRequest {
| 'gpt-5.1-high'
| 'gpt-5-codex'
| 'gpt-5.1-codex'
| 'gpt-5.2'
| 'gpt-5.2-codex'
| 'gpt-5.2-pro'
| 'gpt-4o'
| 'gpt-4.1'
| 'o3'

View File

@@ -19,6 +19,7 @@ vi.mock('@/lib/core/config/env', () =>
vi.mock('@/lib/core/config/feature-flags', () => ({
isDev: false,
isReactGrabEnabled: false,
}))
import {

View File

@@ -1,5 +1,5 @@
import { env, getEnv } from '../config/env'
import { isDev } from '../config/feature-flags'
import { isDev, isReactGrabEnabled } from '../config/feature-flags'
/**
* Content Security Policy (CSP) configuration builder
@@ -40,6 +40,7 @@ export const buildTimeCSPDirectives: CSPDirectives = {
'https://*.google.com',
'https://apis.google.com',
'https://assets.onedollarstats.com',
...(isReactGrabEnabled ? ['https://unpkg.com'] : []),
],
'style-src': ["'self'", "'unsafe-inline'", 'https://fonts.googleapis.com'],
@@ -166,10 +167,11 @@ export function generateRuntimeCSP(): string {
const dynamicDomainsStr = uniqueDynamicDomains.join(' ')
const brandLogoDomain = brandLogoDomains[0] || ''
const brandFaviconDomain = brandFaviconDomains[0] || ''
const reactGrabScript = isReactGrabEnabled ? 'https://unpkg.com' : ''
return `
default-src 'self';
script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://assets.onedollarstats.com;
script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://assets.onedollarstats.com ${reactGrabScript};
style-src 'self' 'unsafe-inline' https://fonts.googleapis.com;
img-src 'self' data: blob: https://*.googleusercontent.com https://*.google.com https://*.atlassian.com https://cdn.discordapp.com https://*.githubusercontent.com https://*.s3.amazonaws.com https://s3.amazonaws.com https://*.amazonaws.com https://*.blob.core.windows.net https://github.com/* https://collector.onedollarstats.com ${brandLogoDomain} ${brandFaviconDomain};
media-src 'self' blob:;

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { and, eq, isNull, min } from 'drizzle-orm'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import type { Variable } from '@/stores/panel/variables/types'
import type { LoopConfig, ParallelConfig } from '@/stores/workflows/workflow/types'
@@ -26,6 +26,7 @@ interface DuplicateWorkflowResult {
color: string
workspaceId: string
folderId: string | null
sortOrder: number
blocksCount: number
edgesCount: number
subflowsCount: number
@@ -88,12 +89,29 @@ export async function duplicateWorkflow(
throw new Error('Source workflow not found or access denied')
}
const targetWorkspaceId = workspaceId || source.workspaceId
const targetFolderId = folderId !== undefined ? folderId : source.folderId
const folderCondition = targetFolderId
? eq(workflow.folderId, targetFolderId)
: isNull(workflow.folderId)
const [minResult] = await tx
.select({ minOrder: min(workflow.sortOrder) })
.from(workflow)
.where(
targetWorkspaceId
? and(eq(workflow.workspaceId, targetWorkspaceId), folderCondition)
: and(eq(workflow.userId, userId), folderCondition)
)
const sortOrder = (minResult?.minOrder ?? 1) - 1
// Create the new workflow first (required for foreign key constraints)
await tx.insert(workflow).values({
id: newWorkflowId,
userId,
workspaceId: workspaceId || source.workspaceId,
folderId: folderId !== undefined ? folderId : source.folderId,
workspaceId: targetWorkspaceId,
folderId: targetFolderId,
sortOrder,
name,
description: description || source.description,
color: color || source.color,
@@ -286,7 +304,8 @@ export async function duplicateWorkflow(
description: description || source.description,
color: color || source.color,
workspaceId: finalWorkspaceId,
folderId: folderId !== undefined ? folderId : source.folderId,
folderId: targetFolderId,
sortOrder,
blocksCount: sourceBlocks.length,
edgesCount: sourceEdges.length,
subflowsCount: sourceSubflows.length,

View File

@@ -253,6 +253,30 @@ const nextConfig: NextConfig = {
async redirects() {
const redirects = []
// Social link redirects (used in emails to avoid spam filter issues)
redirects.push(
{
source: '/discord',
destination: 'https://discord.gg/Hr4UWYEcTT',
permanent: false,
},
{
source: '/x',
destination: 'https://x.com/simdotai',
permanent: false,
},
{
source: '/github',
destination: 'https://github.com/simstudioai/sim',
permanent: false,
},
{
source: '/team',
destination: 'https://cal.com/emirkarabeg/sim-team',
permanent: false,
}
)
// Redirect /building and /blog to /studio (legacy URL support)
redirects.push(
{

View File

@@ -422,7 +422,8 @@ function abortAllInProgressTools(set: any, get: () => CopilotStore) {
* Loads messages from DB for UI rendering.
* Messages are stored exactly as they render, so we just need to:
* 1. Register client tool instances for any tool calls
* 2. Return the messages as-is
* 2. Clear any streaming flags (messages loaded from DB are never actively streaming)
* 3. Return the messages
*/
function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
try {
@@ -438,23 +439,54 @@ function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
}
}
// Register client tool instances for all tool calls so they can be looked up
// Register client tool instances and clear streaming flags for all tool calls
for (const message of messages) {
if (message.contentBlocks) {
for (const block of message.contentBlocks as any[]) {
if (block?.type === 'tool_call' && block.toolCall) {
registerToolCallInstances(block.toolCall)
clearStreamingFlags(block.toolCall)
}
}
}
// Also clear from toolCalls array (legacy format)
if (message.toolCalls) {
for (const toolCall of message.toolCalls) {
clearStreamingFlags(toolCall)
}
}
}
// Return messages as-is - they're already in the correct format for rendering
return messages
} catch {
return messages
}
}
/**
* Recursively clears streaming flags from a tool call and its nested subagent tool calls.
* This ensures messages loaded from DB don't appear to be streaming.
*/
function clearStreamingFlags(toolCall: any): void {
if (!toolCall) return
// Always set subAgentStreaming to false - messages loaded from DB are never streaming
toolCall.subAgentStreaming = false
// Clear nested subagent tool calls
if (Array.isArray(toolCall.subAgentBlocks)) {
for (const block of toolCall.subAgentBlocks) {
if (block?.type === 'subagent_tool_call' && block.toolCall) {
clearStreamingFlags(block.toolCall)
}
}
}
if (Array.isArray(toolCall.subAgentToolCalls)) {
for (const subTc of toolCall.subAgentToolCalls) {
clearStreamingFlags(subTc)
}
}
}
/**
* Recursively registers client tool instances for a tool call and its nested subagent tool calls.
*/

View File

@@ -106,6 +106,9 @@ export interface CopilotState {
| 'gpt-5.1-high'
| 'gpt-5-codex'
| 'gpt-5.1-codex'
| 'gpt-5.2'
| 'gpt-5.2-codex'
| 'gpt-5.2-pro'
| 'gpt-4o'
| 'gpt-4.1'
| 'o3'

View File

@@ -32,5 +32,5 @@
"trigger.config.ts",
".next/dev/types/**/*.ts"
],
"exclude": ["node_modules"]
"exclude": ["node_modules", "vitest.config.ts", "vitest.setup.ts"]
}

473
bun.lock

File diff suppressed because it is too large Load Diff