mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-15 01:47:59 -05:00
Compare commits
6 Commits
fix/webhoo
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66483c2fd5 | ||
|
|
3db9ad2d95 | ||
|
|
4195cfe1ff | ||
|
|
212933746e | ||
|
|
5af72ea22f | ||
|
|
4899c28421 |
@@ -351,14 +351,16 @@ Enables AI-assisted field generation.
|
||||
|
||||
## Tools Configuration
|
||||
|
||||
### Simple Tool Selector
|
||||
**Preferred:** Use tool names directly as dropdown option IDs to avoid switch cases:
|
||||
```typescript
|
||||
tools: {
|
||||
access: ['service_create', 'service_read', 'service_update'],
|
||||
config: {
|
||||
tool: (params) => `service_${params.operation}`,
|
||||
},
|
||||
}
|
||||
// Dropdown options use tool IDs directly
|
||||
options: [
|
||||
{ label: 'Create', id: 'service_create' },
|
||||
{ label: 'Read', id: 'service_read' },
|
||||
]
|
||||
|
||||
// Tool selector just returns the operation value
|
||||
tool: (params) => params.operation,
|
||||
```
|
||||
|
||||
### With Parameter Transformation
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
renderPlanWelcomeEmail,
|
||||
renderUsageThresholdEmail,
|
||||
renderWelcomeEmail,
|
||||
renderWorkflowNotificationEmail,
|
||||
renderWorkspaceInvitationEmail,
|
||||
} from '@/components/emails'
|
||||
|
||||
@@ -108,6 +109,51 @@ const emailTemplates = {
|
||||
message:
|
||||
'I have 10 years of experience building scalable distributed systems. Most recently, I led a team at a Series B startup where we scaled from 100K to 10M users.',
|
||||
}),
|
||||
|
||||
// Notification emails
|
||||
'workflow-notification-success': () =>
|
||||
renderWorkflowNotificationEmail({
|
||||
workflowName: 'Customer Onboarding Flow',
|
||||
status: 'success',
|
||||
trigger: 'api',
|
||||
duration: '2.3s',
|
||||
cost: '$0.0042',
|
||||
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
|
||||
}),
|
||||
'workflow-notification-error': () =>
|
||||
renderWorkflowNotificationEmail({
|
||||
workflowName: 'Customer Onboarding Flow',
|
||||
status: 'error',
|
||||
trigger: 'webhook',
|
||||
duration: '1.1s',
|
||||
cost: '$0.0021',
|
||||
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
|
||||
}),
|
||||
'workflow-notification-alert': () =>
|
||||
renderWorkflowNotificationEmail({
|
||||
workflowName: 'Customer Onboarding Flow',
|
||||
status: 'error',
|
||||
trigger: 'schedule',
|
||||
duration: '45.2s',
|
||||
cost: '$0.0156',
|
||||
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
|
||||
alertReason: '3 consecutive failures detected',
|
||||
}),
|
||||
'workflow-notification-full': () =>
|
||||
renderWorkflowNotificationEmail({
|
||||
workflowName: 'Data Processing Pipeline',
|
||||
status: 'success',
|
||||
trigger: 'api',
|
||||
duration: '12.5s',
|
||||
cost: '$0.0234',
|
||||
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
|
||||
finalOutput: { processed: 150, skipped: 3, status: 'completed' },
|
||||
rateLimits: {
|
||||
sync: { requestsPerMinute: 60, remaining: 45 },
|
||||
async: { requestsPerMinute: 120, remaining: 98 },
|
||||
},
|
||||
usageData: { currentPeriodCost: 12.45, limit: 50, percentUsed: 24.9 },
|
||||
}),
|
||||
} as const
|
||||
|
||||
type EmailTemplate = keyof typeof emailTemplates
|
||||
@@ -131,6 +177,12 @@ export async function GET(request: NextRequest) {
|
||||
'payment-failed',
|
||||
],
|
||||
Careers: ['careers-confirmation', 'careers-submission'],
|
||||
Notifications: [
|
||||
'workflow-notification-success',
|
||||
'workflow-notification-error',
|
||||
'workflow-notification-alert',
|
||||
'workflow-notification-full',
|
||||
],
|
||||
}
|
||||
|
||||
const categoryHtml = Object.entries(categories)
|
||||
|
||||
297
apps/sim/app/api/logs/stats/route.ts
Normal file
297
apps/sim/app/api/logs/stats/route.ts
Normal file
@@ -0,0 +1,297 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters'
|
||||
|
||||
const logger = createLogger('LogsStatsAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
const StatsQueryParamsSchema = LogFilterParamsSchema.extend({
|
||||
segmentCount: z.coerce.number().optional().default(72),
|
||||
})
|
||||
|
||||
export interface SegmentStats {
|
||||
timestamp: string
|
||||
totalExecutions: number
|
||||
successfulExecutions: number
|
||||
avgDurationMs: number
|
||||
}
|
||||
|
||||
export interface WorkflowStats {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
segments: SegmentStats[]
|
||||
overallSuccessRate: number
|
||||
totalExecutions: number
|
||||
totalSuccessful: number
|
||||
}
|
||||
|
||||
export interface DashboardStatsResponse {
|
||||
workflows: WorkflowStats[]
|
||||
aggregateSegments: SegmentStats[]
|
||||
totalRuns: number
|
||||
totalErrors: number
|
||||
avgLatency: number
|
||||
timeBounds: {
|
||||
start: string
|
||||
end: string
|
||||
}
|
||||
segmentMs: number
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized logs stats access attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = StatsQueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
const workspaceFilter = eq(workflowExecutionLogs.workspaceId, params.workspaceId)
|
||||
|
||||
const commonFilters = buildFilterConditions(params, { useSimpleLevelFilter: true })
|
||||
const whereCondition = commonFilters ? and(workspaceFilter, commonFilters) : workspaceFilter
|
||||
|
||||
const boundsQuery = await db
|
||||
.select({
|
||||
minTime: sql<string>`MIN(${workflowExecutionLogs.startedAt})`,
|
||||
maxTime: sql<string>`MAX(${workflowExecutionLogs.startedAt})`,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workflowExecutionLogs.workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(whereCondition)
|
||||
|
||||
const bounds = boundsQuery[0]
|
||||
const now = new Date()
|
||||
|
||||
let startTime: Date
|
||||
let endTime: Date
|
||||
|
||||
if (!bounds?.minTime || !bounds?.maxTime) {
|
||||
endTime = now
|
||||
startTime = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
} else {
|
||||
startTime = new Date(bounds.minTime)
|
||||
endTime = new Date(Math.max(new Date(bounds.maxTime).getTime(), now.getTime()))
|
||||
}
|
||||
|
||||
const totalMs = Math.max(1, endTime.getTime() - startTime.getTime())
|
||||
const segmentMs = Math.max(60000, Math.floor(totalMs / params.segmentCount))
|
||||
|
||||
const statsQuery = await db
|
||||
.select({
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
workflowName: workflow.name,
|
||||
segmentIndex:
|
||||
sql<number>`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTime}::timestamp)) * 1000 / ${segmentMs})`.as(
|
||||
'segment_index'
|
||||
),
|
||||
totalExecutions: sql<number>`COUNT(*)`.as('total_executions'),
|
||||
successfulExecutions:
|
||||
sql<number>`COUNT(*) FILTER (WHERE ${workflowExecutionLogs.level} != 'error')`.as(
|
||||
'successful_executions'
|
||||
),
|
||||
avgDurationMs:
|
||||
sql<number>`COALESCE(AVG(${workflowExecutionLogs.totalDurationMs}) FILTER (WHERE ${workflowExecutionLogs.totalDurationMs} > 0), 0)`.as(
|
||||
'avg_duration_ms'
|
||||
),
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workflowExecutionLogs.workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(whereCondition)
|
||||
.groupBy(
|
||||
workflowExecutionLogs.workflowId,
|
||||
workflow.name,
|
||||
sql`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTime}::timestamp)) * 1000 / ${segmentMs})`
|
||||
)
|
||||
.orderBy(workflowExecutionLogs.workflowId, sql`segment_index`)
|
||||
|
||||
const workflowMap = new Map<
|
||||
string,
|
||||
{
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
segments: Map<number, SegmentStats>
|
||||
totalExecutions: number
|
||||
totalSuccessful: number
|
||||
}
|
||||
>()
|
||||
|
||||
for (const row of statsQuery) {
|
||||
const segmentIndex = Math.min(
|
||||
params.segmentCount - 1,
|
||||
Math.max(0, Math.floor(Number(row.segmentIndex)))
|
||||
)
|
||||
|
||||
if (!workflowMap.has(row.workflowId)) {
|
||||
workflowMap.set(row.workflowId, {
|
||||
workflowId: row.workflowId,
|
||||
workflowName: row.workflowName,
|
||||
segments: new Map(),
|
||||
totalExecutions: 0,
|
||||
totalSuccessful: 0,
|
||||
})
|
||||
}
|
||||
|
||||
const wf = workflowMap.get(row.workflowId)!
|
||||
wf.totalExecutions += Number(row.totalExecutions)
|
||||
wf.totalSuccessful += Number(row.successfulExecutions)
|
||||
|
||||
const existing = wf.segments.get(segmentIndex)
|
||||
if (existing) {
|
||||
const oldTotal = existing.totalExecutions
|
||||
const newTotal = oldTotal + Number(row.totalExecutions)
|
||||
existing.totalExecutions = newTotal
|
||||
existing.successfulExecutions += Number(row.successfulExecutions)
|
||||
existing.avgDurationMs =
|
||||
newTotal > 0
|
||||
? (existing.avgDurationMs * oldTotal +
|
||||
Number(row.avgDurationMs || 0) * Number(row.totalExecutions)) /
|
||||
newTotal
|
||||
: 0
|
||||
} else {
|
||||
wf.segments.set(segmentIndex, {
|
||||
timestamp: new Date(startTime.getTime() + segmentIndex * segmentMs).toISOString(),
|
||||
totalExecutions: Number(row.totalExecutions),
|
||||
successfulExecutions: Number(row.successfulExecutions),
|
||||
avgDurationMs: Number(row.avgDurationMs || 0),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const workflows: WorkflowStats[] = []
|
||||
for (const wf of workflowMap.values()) {
|
||||
const segments: SegmentStats[] = []
|
||||
for (let i = 0; i < params.segmentCount; i++) {
|
||||
const existing = wf.segments.get(i)
|
||||
if (existing) {
|
||||
segments.push(existing)
|
||||
} else {
|
||||
segments.push({
|
||||
timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(),
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
avgDurationMs: 0,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
workflows.push({
|
||||
workflowId: wf.workflowId,
|
||||
workflowName: wf.workflowName,
|
||||
segments,
|
||||
totalExecutions: wf.totalExecutions,
|
||||
totalSuccessful: wf.totalSuccessful,
|
||||
overallSuccessRate:
|
||||
wf.totalExecutions > 0 ? (wf.totalSuccessful / wf.totalExecutions) * 100 : 100,
|
||||
})
|
||||
}
|
||||
|
||||
workflows.sort((a, b) => {
|
||||
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
|
||||
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
|
||||
if (errA !== errB) return errB - errA
|
||||
return a.workflowName.localeCompare(b.workflowName)
|
||||
})
|
||||
|
||||
const aggregateSegments: SegmentStats[] = []
|
||||
let totalRuns = 0
|
||||
let totalErrors = 0
|
||||
let weightedLatencySum = 0
|
||||
let latencyCount = 0
|
||||
|
||||
for (let i = 0; i < params.segmentCount; i++) {
|
||||
let segTotal = 0
|
||||
let segSuccess = 0
|
||||
let segWeightedLatency = 0
|
||||
let segLatencyCount = 0
|
||||
|
||||
for (const wf of workflows) {
|
||||
const seg = wf.segments[i]
|
||||
segTotal += seg.totalExecutions
|
||||
segSuccess += seg.successfulExecutions
|
||||
if (seg.avgDurationMs > 0 && seg.totalExecutions > 0) {
|
||||
segWeightedLatency += seg.avgDurationMs * seg.totalExecutions
|
||||
segLatencyCount += seg.totalExecutions
|
||||
}
|
||||
}
|
||||
|
||||
totalRuns += segTotal
|
||||
totalErrors += segTotal - segSuccess
|
||||
weightedLatencySum += segWeightedLatency
|
||||
latencyCount += segLatencyCount
|
||||
|
||||
aggregateSegments.push({
|
||||
timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(),
|
||||
totalExecutions: segTotal,
|
||||
successfulExecutions: segSuccess,
|
||||
avgDurationMs: segLatencyCount > 0 ? segWeightedLatency / segLatencyCount : 0,
|
||||
})
|
||||
}
|
||||
|
||||
const avgLatency = latencyCount > 0 ? weightedLatencySum / latencyCount : 0
|
||||
|
||||
const response: DashboardStatsResponse = {
|
||||
workflows,
|
||||
aggregateSegments,
|
||||
totalRuns,
|
||||
totalErrors,
|
||||
avgLatency,
|
||||
timeBounds: {
|
||||
start: startTime.toISOString(),
|
||||
end: endTime.toISOString(),
|
||||
},
|
||||
segmentMs,
|
||||
}
|
||||
|
||||
return NextResponse.json(response, { status: 200 })
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid logs stats request parameters`, {
|
||||
errors: validationError.errors,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid request parameters',
|
||||
details: validationError.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
throw validationError
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] logs stats fetch error`, error)
|
||||
return NextResponse.json({ error: error.message }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -80,7 +80,6 @@ const updateNotificationSchema = z
|
||||
levelFilter: levelFilterSchema.optional(),
|
||||
triggerFilter: triggerFilterSchema.optional(),
|
||||
includeFinalOutput: z.boolean().optional(),
|
||||
includeTraceSpans: z.boolean().optional(),
|
||||
includeRateLimits: z.boolean().optional(),
|
||||
includeUsageData: z.boolean().optional(),
|
||||
alertConfig: alertConfigSchema.optional(),
|
||||
@@ -147,7 +146,6 @@ export async function GET(request: NextRequest, { params }: RouteParams) {
|
||||
levelFilter: subscription.levelFilter,
|
||||
triggerFilter: subscription.triggerFilter,
|
||||
includeFinalOutput: subscription.includeFinalOutput,
|
||||
includeTraceSpans: subscription.includeTraceSpans,
|
||||
includeRateLimits: subscription.includeRateLimits,
|
||||
includeUsageData: subscription.includeUsageData,
|
||||
webhookConfig: subscription.webhookConfig,
|
||||
@@ -222,7 +220,6 @@ export async function PUT(request: NextRequest, { params }: RouteParams) {
|
||||
if (data.triggerFilter !== undefined) updateData.triggerFilter = data.triggerFilter
|
||||
if (data.includeFinalOutput !== undefined)
|
||||
updateData.includeFinalOutput = data.includeFinalOutput
|
||||
if (data.includeTraceSpans !== undefined) updateData.includeTraceSpans = data.includeTraceSpans
|
||||
if (data.includeRateLimits !== undefined) updateData.includeRateLimits = data.includeRateLimits
|
||||
if (data.includeUsageData !== undefined) updateData.includeUsageData = data.includeUsageData
|
||||
if (data.alertConfig !== undefined) updateData.alertConfig = data.alertConfig
|
||||
@@ -260,7 +257,6 @@ export async function PUT(request: NextRequest, { params }: RouteParams) {
|
||||
levelFilter: subscription.levelFilter,
|
||||
triggerFilter: subscription.triggerFilter,
|
||||
includeFinalOutput: subscription.includeFinalOutput,
|
||||
includeTraceSpans: subscription.includeTraceSpans,
|
||||
includeRateLimits: subscription.includeRateLimits,
|
||||
includeUsageData: subscription.includeUsageData,
|
||||
webhookConfig: subscription.webhookConfig,
|
||||
|
||||
@@ -5,8 +5,14 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import {
|
||||
type EmailRateLimitsData,
|
||||
type EmailUsageData,
|
||||
renderWorkflowNotificationEmail,
|
||||
} from '@/components/emails'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
@@ -67,29 +73,23 @@ function buildTestPayload(subscription: typeof workspaceNotificationSubscription
|
||||
data.finalOutput = { message: 'This is a test notification', test: true }
|
||||
}
|
||||
|
||||
if (subscription.includeTraceSpans) {
|
||||
data.traceSpans = [
|
||||
{
|
||||
id: 'span_test_1',
|
||||
name: 'Test Block',
|
||||
type: 'block',
|
||||
status: 'success',
|
||||
startTime: new Date(timestamp - 5000).toISOString(),
|
||||
endTime: new Date(timestamp).toISOString(),
|
||||
duration: 5000,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
if (subscription.includeRateLimits) {
|
||||
data.rateLimits = {
|
||||
sync: { limit: 150, remaining: 45, resetAt: new Date(timestamp + 60000).toISOString() },
|
||||
async: { limit: 1000, remaining: 50, resetAt: new Date(timestamp + 60000).toISOString() },
|
||||
sync: {
|
||||
requestsPerMinute: 150,
|
||||
remaining: 45,
|
||||
resetAt: new Date(timestamp + 60000).toISOString(),
|
||||
},
|
||||
async: {
|
||||
requestsPerMinute: 1000,
|
||||
remaining: 50,
|
||||
resetAt: new Date(timestamp + 60000).toISOString(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (subscription.includeUsageData) {
|
||||
data.usage = { currentPeriodCost: 2.45, limit: 20, plan: 'pro', isExceeded: false }
|
||||
data.usage = { currentPeriodCost: 2.45, limit: 20, percentUsed: 12.25, isExceeded: false }
|
||||
}
|
||||
|
||||
return { payload, timestamp }
|
||||
@@ -157,23 +157,26 @@ async function testEmail(subscription: typeof workspaceNotificationSubscription.
|
||||
|
||||
const { payload } = buildTestPayload(subscription)
|
||||
const data = (payload as Record<string, unknown>).data as Record<string, unknown>
|
||||
const baseUrl = getBaseUrl()
|
||||
const logUrl = `${baseUrl}/workspace/${subscription.workspaceId}/logs`
|
||||
|
||||
const html = await renderWorkflowNotificationEmail({
|
||||
workflowName: data.workflowName as string,
|
||||
status: data.status as 'success' | 'error',
|
||||
trigger: data.trigger as string,
|
||||
duration: `${data.totalDurationMs}ms`,
|
||||
cost: `$${(((data.cost as Record<string, unknown>)?.total as number) || 0).toFixed(4)}`,
|
||||
logUrl,
|
||||
finalOutput: data.finalOutput,
|
||||
rateLimits: data.rateLimits as EmailRateLimitsData | undefined,
|
||||
usageData: data.usage as EmailUsageData | undefined,
|
||||
})
|
||||
|
||||
const result = await sendEmail({
|
||||
to: subscription.emailRecipients,
|
||||
subject: `[Test] Workflow Execution: ${data.workflowName}`,
|
||||
text: `This is a test notification from Sim Studio.\n\nWorkflow: ${data.workflowName}\nStatus: ${data.status}\nDuration: ${data.totalDurationMs}ms\n\nThis notification is configured for workspace notifications.`,
|
||||
html: `
|
||||
<div style="font-family: sans-serif; max-width: 600px; margin: 0 auto;">
|
||||
<h2 style="color: #7F2FFF;">Test Notification</h2>
|
||||
<p>This is a test notification from Sim Studio.</p>
|
||||
<table style="width: 100%; border-collapse: collapse; margin: 20px 0;">
|
||||
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Workflow</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.workflowName}</td></tr>
|
||||
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Status</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.status}</td></tr>
|
||||
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Duration</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.totalDurationMs}ms</td></tr>
|
||||
</table>
|
||||
<p style="color: #666; font-size: 12px;">This notification is configured for workspace notifications.</p>
|
||||
</div>
|
||||
`,
|
||||
html,
|
||||
text: `This is a test notification from Sim.\n\nWorkflow: ${data.workflowName}\nStatus: ${data.status}\nDuration: ${data.totalDurationMs}ms\n\nView Log: ${logUrl}\n\nThis notification is configured for workspace notifications.`,
|
||||
emailType: 'notifications',
|
||||
})
|
||||
|
||||
@@ -227,7 +230,7 @@ async function testSlack(
|
||||
elements: [
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: 'This is a test notification from Sim Studio workspace notifications.',
|
||||
text: 'This is a test notification from Sim workspace notifications.',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -83,7 +83,6 @@ const createNotificationSchema = z
|
||||
levelFilter: levelFilterSchema.default(['info', 'error']),
|
||||
triggerFilter: triggerFilterSchema.default([...CORE_TRIGGER_TYPES]),
|
||||
includeFinalOutput: z.boolean().default(false),
|
||||
includeTraceSpans: z.boolean().default(false),
|
||||
includeRateLimits: z.boolean().default(false),
|
||||
includeUsageData: z.boolean().default(false),
|
||||
alertConfig: alertConfigSchema.optional(),
|
||||
@@ -138,7 +137,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
levelFilter: workspaceNotificationSubscription.levelFilter,
|
||||
triggerFilter: workspaceNotificationSubscription.triggerFilter,
|
||||
includeFinalOutput: workspaceNotificationSubscription.includeFinalOutput,
|
||||
includeTraceSpans: workspaceNotificationSubscription.includeTraceSpans,
|
||||
includeRateLimits: workspaceNotificationSubscription.includeRateLimits,
|
||||
includeUsageData: workspaceNotificationSubscription.includeUsageData,
|
||||
webhookConfig: workspaceNotificationSubscription.webhookConfig,
|
||||
@@ -240,7 +238,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
levelFilter: data.levelFilter,
|
||||
triggerFilter: data.triggerFilter,
|
||||
includeFinalOutput: data.includeFinalOutput,
|
||||
includeTraceSpans: data.includeTraceSpans,
|
||||
includeRateLimits: data.includeRateLimits,
|
||||
includeUsageData: data.includeUsageData,
|
||||
alertConfig: data.alertConfig || null,
|
||||
@@ -266,7 +263,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
levelFilter: subscription.levelFilter,
|
||||
triggerFilter: subscription.triggerFilter,
|
||||
includeFinalOutput: subscription.includeFinalOutput,
|
||||
includeTraceSpans: subscription.includeTraceSpans,
|
||||
includeRateLimits: subscription.includeRateLimits,
|
||||
includeUsageData: subscription.includeUsageData,
|
||||
webhookConfig: subscription.webhookConfig,
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Loader2 } from 'lucide-react'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { formatLatency, parseDuration } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import { formatLatency } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import type { DashboardStatsResponse, WorkflowStats } from '@/hooks/queries/logs'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
import type { WorkflowLog } from '@/stores/logs/filters/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { LineChart, WorkflowsList } from './components'
|
||||
|
||||
@@ -26,10 +26,6 @@ interface WorkflowExecution {
|
||||
overallSuccessRate: number
|
||||
}
|
||||
|
||||
const DEFAULT_SEGMENTS = 72
|
||||
const MIN_SEGMENT_PX = 10
|
||||
const MIN_SEGMENT_MS = 60000
|
||||
|
||||
const SKELETON_BAR_HEIGHTS = [
|
||||
45, 72, 38, 85, 52, 68, 30, 90, 55, 42, 78, 35, 88, 48, 65, 28, 82, 58, 40, 75, 32, 95, 50, 70,
|
||||
]
|
||||
@@ -120,13 +116,32 @@ function DashboardSkeleton() {
|
||||
}
|
||||
|
||||
interface DashboardProps {
|
||||
logs: WorkflowLog[]
|
||||
stats?: DashboardStatsResponse
|
||||
isLoading: boolean
|
||||
error?: Error | null
|
||||
}
|
||||
|
||||
export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
|
||||
const [segmentCount, setSegmentCount] = useState<number>(DEFAULT_SEGMENTS)
|
||||
/**
|
||||
* Converts server WorkflowStats to the internal WorkflowExecution format.
|
||||
*/
|
||||
function toWorkflowExecution(wf: WorkflowStats): WorkflowExecution {
|
||||
return {
|
||||
workflowId: wf.workflowId,
|
||||
workflowName: wf.workflowName,
|
||||
overallSuccessRate: wf.overallSuccessRate,
|
||||
segments: wf.segments.map((seg) => ({
|
||||
timestamp: seg.timestamp,
|
||||
totalExecutions: seg.totalExecutions,
|
||||
successfulExecutions: seg.successfulExecutions,
|
||||
hasExecutions: seg.totalExecutions > 0,
|
||||
successRate:
|
||||
seg.totalExecutions > 0 ? (seg.successfulExecutions / seg.totalExecutions) * 100 : 100,
|
||||
avgDurationMs: seg.avgDurationMs,
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
export default function Dashboard({ stats, isLoading, error }: DashboardProps) {
|
||||
const [selectedSegments, setSelectedSegments] = useState<Record<string, number[]>>({})
|
||||
const [lastAnchorIndices, setLastAnchorIndices] = useState<Record<string, number>>({})
|
||||
const barsAreaRef = useRef<HTMLDivElement | null>(null)
|
||||
@@ -137,182 +152,32 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
|
||||
|
||||
const expandedWorkflowId = workflowIds.length === 1 ? workflowIds[0] : null
|
||||
|
||||
const lastExecutionByWorkflow = useMemo(() => {
|
||||
const map = new Map<string, number>()
|
||||
for (const log of logs) {
|
||||
const wfId = log.workflowId
|
||||
if (!wfId) continue
|
||||
const ts = new Date(log.createdAt).getTime()
|
||||
const existing = map.get(wfId)
|
||||
if (!existing || ts > existing) {
|
||||
map.set(wfId, ts)
|
||||
}
|
||||
}
|
||||
return map
|
||||
}, [logs])
|
||||
|
||||
const timeBounds = useMemo(() => {
|
||||
if (logs.length === 0) {
|
||||
const now = new Date()
|
||||
return { start: now, end: now }
|
||||
}
|
||||
|
||||
let minTime = Number.POSITIVE_INFINITY
|
||||
let maxTime = Number.NEGATIVE_INFINITY
|
||||
|
||||
for (const log of logs) {
|
||||
const ts = new Date(log.createdAt).getTime()
|
||||
if (ts < minTime) minTime = ts
|
||||
if (ts > maxTime) maxTime = ts
|
||||
}
|
||||
|
||||
const end = new Date(Math.max(maxTime, Date.now()))
|
||||
const start = new Date(minTime)
|
||||
|
||||
return { start, end }
|
||||
}, [logs])
|
||||
|
||||
const { executions, aggregateSegments, segmentMs } = useMemo(() => {
|
||||
const allWorkflowsList = Object.values(allWorkflows)
|
||||
|
||||
if (allWorkflowsList.length === 0) {
|
||||
if (!stats) {
|
||||
return { executions: [], aggregateSegments: [], segmentMs: 0 }
|
||||
}
|
||||
|
||||
const { start, end } =
|
||||
logs.length > 0
|
||||
? timeBounds
|
||||
: { start: new Date(Date.now() - 24 * 60 * 60 * 1000), end: new Date() }
|
||||
|
||||
const totalMs = Math.max(1, end.getTime() - start.getTime())
|
||||
const calculatedSegmentMs = Math.max(
|
||||
MIN_SEGMENT_MS,
|
||||
Math.floor(totalMs / Math.max(1, segmentCount))
|
||||
)
|
||||
|
||||
const logsByWorkflow = new Map<string, WorkflowLog[]>()
|
||||
for (const log of logs) {
|
||||
const wfId = log.workflowId
|
||||
if (!logsByWorkflow.has(wfId)) {
|
||||
logsByWorkflow.set(wfId, [])
|
||||
}
|
||||
logsByWorkflow.get(wfId)!.push(log)
|
||||
}
|
||||
|
||||
const workflowExecutions: WorkflowExecution[] = []
|
||||
|
||||
for (const workflow of allWorkflowsList) {
|
||||
const workflowLogs = logsByWorkflow.get(workflow.id) || []
|
||||
|
||||
const segments: WorkflowExecution['segments'] = Array.from(
|
||||
{ length: segmentCount },
|
||||
(_, i) => ({
|
||||
timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(),
|
||||
hasExecutions: false,
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
successRate: 100,
|
||||
avgDurationMs: 0,
|
||||
})
|
||||
)
|
||||
|
||||
const durations: number[][] = Array.from({ length: segmentCount }, () => [])
|
||||
|
||||
for (const log of workflowLogs) {
|
||||
const logTime = new Date(log.createdAt).getTime()
|
||||
const idx = Math.min(
|
||||
segmentCount - 1,
|
||||
Math.max(0, Math.floor((logTime - start.getTime()) / calculatedSegmentMs))
|
||||
)
|
||||
|
||||
segments[idx].totalExecutions += 1
|
||||
segments[idx].hasExecutions = true
|
||||
|
||||
if (log.level !== 'error') {
|
||||
segments[idx].successfulExecutions += 1
|
||||
}
|
||||
|
||||
const duration = parseDuration({ duration: log.duration ?? undefined })
|
||||
if (duration !== null && duration > 0) {
|
||||
durations[idx].push(duration)
|
||||
}
|
||||
}
|
||||
|
||||
let totalExecs = 0
|
||||
let totalSuccess = 0
|
||||
|
||||
for (let i = 0; i < segmentCount; i++) {
|
||||
const seg = segments[i]
|
||||
totalExecs += seg.totalExecutions
|
||||
totalSuccess += seg.successfulExecutions
|
||||
|
||||
if (seg.totalExecutions > 0) {
|
||||
seg.successRate = (seg.successfulExecutions / seg.totalExecutions) * 100
|
||||
}
|
||||
|
||||
if (durations[i].length > 0) {
|
||||
seg.avgDurationMs = Math.round(
|
||||
durations[i].reduce((sum, d) => sum + d, 0) / durations[i].length
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const overallSuccessRate = totalExecs > 0 ? (totalSuccess / totalExecs) * 100 : 100
|
||||
|
||||
workflowExecutions.push({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
segments,
|
||||
overallSuccessRate,
|
||||
})
|
||||
}
|
||||
|
||||
workflowExecutions.sort((a, b) => {
|
||||
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
|
||||
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
|
||||
if (errA !== errB) return errB - errA
|
||||
return a.workflowName.localeCompare(b.workflowName)
|
||||
})
|
||||
|
||||
const aggSegments: {
|
||||
timestamp: string
|
||||
totalExecutions: number
|
||||
successfulExecutions: number
|
||||
avgDurationMs: number
|
||||
}[] = Array.from({ length: segmentCount }, (_, i) => ({
|
||||
timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(),
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
avgDurationMs: 0,
|
||||
}))
|
||||
|
||||
const weightedDurationSums: number[] = Array(segmentCount).fill(0)
|
||||
const executionCounts: number[] = Array(segmentCount).fill(0)
|
||||
|
||||
for (const wf of workflowExecutions) {
|
||||
wf.segments.forEach((s, i) => {
|
||||
aggSegments[i].totalExecutions += s.totalExecutions
|
||||
aggSegments[i].successfulExecutions += s.successfulExecutions
|
||||
|
||||
if (s.avgDurationMs && s.avgDurationMs > 0 && s.totalExecutions > 0) {
|
||||
weightedDurationSums[i] += s.avgDurationMs * s.totalExecutions
|
||||
executionCounts[i] += s.totalExecutions
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
aggSegments.forEach((seg, i) => {
|
||||
if (executionCounts[i] > 0) {
|
||||
seg.avgDurationMs = weightedDurationSums[i] / executionCounts[i]
|
||||
}
|
||||
})
|
||||
const workflowExecutions = stats.workflows.map(toWorkflowExecution)
|
||||
|
||||
return {
|
||||
executions: workflowExecutions,
|
||||
aggregateSegments: aggSegments,
|
||||
segmentMs: calculatedSegmentMs,
|
||||
aggregateSegments: stats.aggregateSegments,
|
||||
segmentMs: stats.segmentMs,
|
||||
}
|
||||
}, [logs, timeBounds, segmentCount, allWorkflows])
|
||||
}, [stats])
|
||||
|
||||
const lastExecutionByWorkflow = useMemo(() => {
|
||||
const map = new Map<string, number>()
|
||||
for (const wf of executions) {
|
||||
for (let i = wf.segments.length - 1; i >= 0; i--) {
|
||||
if (wf.segments[i].totalExecutions > 0) {
|
||||
map.set(wf.workflowId, new Date(wf.segments[i].timestamp).getTime())
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return map
|
||||
}, [executions])
|
||||
|
||||
const filteredExecutions = useMemo(() => {
|
||||
let filtered = executions
|
||||
@@ -511,37 +376,12 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
|
||||
useEffect(() => {
|
||||
setSelectedSegments({})
|
||||
setLastAnchorIndices({})
|
||||
}, [logs, timeRange, workflowIds, searchQuery])
|
||||
|
||||
useEffect(() => {
|
||||
if (!barsAreaRef.current) return
|
||||
const el = barsAreaRef.current
|
||||
let debounceId: ReturnType<typeof setTimeout> | null = null
|
||||
const ro = new ResizeObserver(([entry]) => {
|
||||
const w = entry?.contentRect?.width || 720
|
||||
const n = Math.max(36, Math.min(96, Math.floor(w / MIN_SEGMENT_PX)))
|
||||
if (debounceId) clearTimeout(debounceId)
|
||||
debounceId = setTimeout(() => {
|
||||
setSegmentCount(n)
|
||||
}, 150)
|
||||
})
|
||||
ro.observe(el)
|
||||
const rect = el.getBoundingClientRect()
|
||||
if (rect?.width) {
|
||||
const n = Math.max(36, Math.min(96, Math.floor(rect.width / MIN_SEGMENT_PX)))
|
||||
setSegmentCount(n)
|
||||
}
|
||||
return () => {
|
||||
if (debounceId) clearTimeout(debounceId)
|
||||
ro.disconnect()
|
||||
}
|
||||
}, [])
|
||||
}, [stats, timeRange, workflowIds, searchQuery])
|
||||
|
||||
if (isLoading) {
|
||||
return <DashboardSkeleton />
|
||||
}
|
||||
|
||||
// Show error state
|
||||
if (error) {
|
||||
return (
|
||||
<div className='mt-[24px] flex flex-1 items-center justify-center'>
|
||||
|
||||
@@ -136,7 +136,6 @@ export function NotificationSettings({
|
||||
levelFilter: ['info', 'error'] as LogLevel[],
|
||||
triggerFilter: [...CORE_TRIGGER_TYPES] as CoreTriggerType[],
|
||||
includeFinalOutput: false,
|
||||
includeTraceSpans: false,
|
||||
includeRateLimits: false,
|
||||
includeUsageData: false,
|
||||
webhookUrl: '',
|
||||
@@ -203,7 +202,6 @@ export function NotificationSettings({
|
||||
levelFilter: ['info', 'error'],
|
||||
triggerFilter: [...CORE_TRIGGER_TYPES],
|
||||
includeFinalOutput: false,
|
||||
includeTraceSpans: false,
|
||||
includeRateLimits: false,
|
||||
includeUsageData: false,
|
||||
webhookUrl: '',
|
||||
@@ -422,7 +420,6 @@ export function NotificationSettings({
|
||||
levelFilter: formData.levelFilter,
|
||||
triggerFilter: formData.triggerFilter,
|
||||
includeFinalOutput: formData.includeFinalOutput,
|
||||
includeTraceSpans: formData.includeTraceSpans,
|
||||
includeRateLimits: formData.includeRateLimits,
|
||||
includeUsageData: formData.includeUsageData,
|
||||
alertConfig,
|
||||
@@ -474,7 +471,6 @@ export function NotificationSettings({
|
||||
levelFilter: subscription.levelFilter as LogLevel[],
|
||||
triggerFilter: subscription.triggerFilter as CoreTriggerType[],
|
||||
includeFinalOutput: subscription.includeFinalOutput,
|
||||
includeTraceSpans: subscription.includeTraceSpans,
|
||||
includeRateLimits: subscription.includeRateLimits,
|
||||
includeUsageData: subscription.includeUsageData,
|
||||
webhookUrl: subscription.webhookConfig?.url || '',
|
||||
@@ -830,7 +826,6 @@ export function NotificationSettings({
|
||||
<Combobox
|
||||
options={[
|
||||
{ label: 'Final Output', value: 'includeFinalOutput' },
|
||||
{ label: 'Trace Spans', value: 'includeTraceSpans' },
|
||||
{ label: 'Rate Limits', value: 'includeRateLimits' },
|
||||
{ label: 'Usage Data', value: 'includeUsageData' },
|
||||
]}
|
||||
@@ -838,7 +833,6 @@ export function NotificationSettings({
|
||||
multiSelectValues={
|
||||
[
|
||||
formData.includeFinalOutput && 'includeFinalOutput',
|
||||
formData.includeTraceSpans && 'includeTraceSpans',
|
||||
formData.includeRateLimits && 'includeRateLimits',
|
||||
formData.includeUsageData && 'includeUsageData',
|
||||
].filter(Boolean) as string[]
|
||||
@@ -847,7 +841,6 @@ export function NotificationSettings({
|
||||
setFormData({
|
||||
...formData,
|
||||
includeFinalOutput: values.includes('includeFinalOutput'),
|
||||
includeTraceSpans: values.includes('includeTraceSpans'),
|
||||
includeRateLimits: values.includes('includeRateLimits'),
|
||||
includeUsageData: values.includes('includeUsageData'),
|
||||
})
|
||||
@@ -856,13 +849,11 @@ export function NotificationSettings({
|
||||
overlayContent={(() => {
|
||||
const labels: Record<string, string> = {
|
||||
includeFinalOutput: 'Final Output',
|
||||
includeTraceSpans: 'Trace Spans',
|
||||
includeRateLimits: 'Rate Limits',
|
||||
includeUsageData: 'Usage Data',
|
||||
}
|
||||
const selected = [
|
||||
formData.includeFinalOutput && 'includeFinalOutput',
|
||||
formData.includeTraceSpans && 'includeTraceSpans',
|
||||
formData.includeRateLimits && 'includeRateLimits',
|
||||
formData.includeUsageData && 'includeUsageData',
|
||||
].filter(Boolean) as string[]
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
} from '@/lib/logs/filters'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useDashboardLogs, useLogDetail, useLogsList } from '@/hooks/queries/logs'
|
||||
import { useDashboardStats, useLogDetail, useLogsList } from '@/hooks/queries/logs'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
import type { WorkflowLog } from '@/stores/logs/filters/types'
|
||||
@@ -130,7 +130,7 @@ export default function Logs() {
|
||||
[timeRange, startDate, endDate, level, workflowIds, folderIds, triggers, debouncedSearchQuery]
|
||||
)
|
||||
|
||||
const dashboardLogsQuery = useDashboardLogs(workspaceId, dashboardFilters, {
|
||||
const dashboardStatsQuery = useDashboardStats(workspaceId, dashboardFilters, {
|
||||
enabled: Boolean(workspaceId) && isInitialized.current,
|
||||
refetchInterval: isLive ? 5000 : false,
|
||||
})
|
||||
@@ -417,9 +417,9 @@ export default function Logs() {
|
||||
className={cn('flex min-h-0 flex-1 flex-col pr-[24px]', !isDashboardView && 'hidden')}
|
||||
>
|
||||
<Dashboard
|
||||
logs={dashboardLogsQuery.data ?? []}
|
||||
isLoading={!dashboardLogsQuery.data}
|
||||
error={dashboardLogsQuery.error}
|
||||
stats={dashboardStatsQuery.data}
|
||||
isLoading={dashboardStatsQuery.isLoading}
|
||||
error={dashboardStatsQuery.error}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -10,12 +10,17 @@ import { createLogger } from '@sim/logger'
|
||||
import { task } from '@trigger.dev/sdk'
|
||||
import { and, eq, isNull, lte, or, sql } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import {
|
||||
type EmailRateLimitsData,
|
||||
type EmailUsageData,
|
||||
renderWorkflowNotificationEmail,
|
||||
} from '@/components/emails'
|
||||
import { checkUsageStatus } from '@/lib/billing/calculations/usage-monitor'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import type { TraceSpan, WorkflowExecutionLog } from '@/lib/logs/types'
|
||||
import type { WorkflowExecutionLog } from '@/lib/logs/types'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
import type { AlertConfig } from '@/lib/notifications/alert-rules'
|
||||
|
||||
@@ -45,9 +50,8 @@ interface NotificationPayload {
|
||||
totalDurationMs: number
|
||||
cost?: Record<string, unknown>
|
||||
finalOutput?: unknown
|
||||
traceSpans?: unknown[]
|
||||
rateLimits?: Record<string, unknown>
|
||||
usage?: Record<string, unknown>
|
||||
rateLimits?: EmailRateLimitsData
|
||||
usage?: EmailUsageData
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,10 +98,6 @@ async function buildPayload(
|
||||
payload.data.finalOutput = executionData.finalOutput
|
||||
}
|
||||
|
||||
if (subscription.includeTraceSpans && executionData.traceSpans) {
|
||||
payload.data.traceSpans = executionData.traceSpans as unknown[]
|
||||
}
|
||||
|
||||
if (subscription.includeRateLimits && userId) {
|
||||
try {
|
||||
const userSubscription = await getHighestPrioritySubscription(userId)
|
||||
@@ -251,18 +251,6 @@ function formatAlertReason(alertConfig: AlertConfig): string {
|
||||
}
|
||||
}
|
||||
|
||||
function formatJsonForEmail(data: unknown, label: string): string {
|
||||
if (!data) return ''
|
||||
const json = JSON.stringify(data, null, 2)
|
||||
const escapedJson = json.replace(/</g, '<').replace(/>/g, '>')
|
||||
return `
|
||||
<div style="margin-top: 20px;">
|
||||
<h3 style="color: #1a1a1a; font-size: 14px; margin-bottom: 8px;">${label}</h3>
|
||||
<pre style="background: #f5f5f5; padding: 12px; border-radius: 6px; overflow-x: auto; font-size: 12px; color: #333; white-space: pre-wrap; word-wrap: break-word;">${escapedJson}</pre>
|
||||
</div>
|
||||
`
|
||||
}
|
||||
|
||||
async function deliverEmail(
|
||||
subscription: typeof workspaceNotificationSubscription.$inferSelect,
|
||||
payload: NotificationPayload,
|
||||
@@ -275,8 +263,7 @@ async function deliverEmail(
|
||||
const isError = payload.data.status !== 'success'
|
||||
const statusText = isError ? 'Error' : 'Success'
|
||||
const logUrl = buildLogUrl(subscription.workspaceId, payload.data.executionId)
|
||||
const baseUrl = getBaseUrl()
|
||||
const alertReason = alertConfig ? formatAlertReason(alertConfig) : null
|
||||
const alertReason = alertConfig ? formatAlertReason(alertConfig) : undefined
|
||||
|
||||
// Build subject line
|
||||
const subject = alertReason
|
||||
@@ -285,113 +272,36 @@ async function deliverEmail(
|
||||
? `Error Alert: ${payload.data.workflowName}`
|
||||
: `Workflow Completed: ${payload.data.workflowName}`
|
||||
|
||||
let includedDataHtml = ''
|
||||
// Build plain text for fallback
|
||||
let includedDataText = ''
|
||||
|
||||
if (payload.data.finalOutput) {
|
||||
includedDataHtml += formatJsonForEmail(payload.data.finalOutput, 'Final Output')
|
||||
includedDataText += `\n\nFinal Output:\n${JSON.stringify(payload.data.finalOutput, null, 2)}`
|
||||
}
|
||||
|
||||
if (
|
||||
payload.data.traceSpans &&
|
||||
Array.isArray(payload.data.traceSpans) &&
|
||||
payload.data.traceSpans.length > 0
|
||||
) {
|
||||
includedDataHtml += formatJsonForEmail(payload.data.traceSpans, 'Trace Spans')
|
||||
includedDataText += `\n\nTrace Spans:\n${JSON.stringify(payload.data.traceSpans, null, 2)}`
|
||||
}
|
||||
|
||||
if (payload.data.rateLimits) {
|
||||
includedDataHtml += formatJsonForEmail(payload.data.rateLimits, 'Rate Limits')
|
||||
includedDataText += `\n\nRate Limits:\n${JSON.stringify(payload.data.rateLimits, null, 2)}`
|
||||
}
|
||||
|
||||
if (payload.data.usage) {
|
||||
includedDataHtml += formatJsonForEmail(payload.data.usage, 'Usage Data')
|
||||
includedDataText += `\n\nUsage Data:\n${JSON.stringify(payload.data.usage, null, 2)}`
|
||||
}
|
||||
|
||||
// Render the email using the shared template
|
||||
const html = await renderWorkflowNotificationEmail({
|
||||
workflowName: payload.data.workflowName || 'Unknown Workflow',
|
||||
status: payload.data.status,
|
||||
trigger: payload.data.trigger,
|
||||
duration: formatDuration(payload.data.totalDurationMs),
|
||||
cost: formatCost(payload.data.cost),
|
||||
logUrl,
|
||||
alertReason,
|
||||
finalOutput: payload.data.finalOutput,
|
||||
rateLimits: payload.data.rateLimits,
|
||||
usageData: payload.data.usage,
|
||||
})
|
||||
|
||||
const result = await sendEmail({
|
||||
to: subscription.emailRecipients,
|
||||
subject,
|
||||
html: `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
</head>
|
||||
<body style="background-color: #f5f5f7; font-family: HelveticaNeue, Helvetica, Arial, sans-serif; margin: 0; padding: 0;">
|
||||
<div style="max-width: 580px; margin: 30px auto; background-color: #ffffff; border-radius: 5px; overflow: hidden;">
|
||||
<!-- Header with Logo -->
|
||||
<div style="padding: 30px 0; text-align: center;">
|
||||
<img src="${baseUrl}/logo/reverse/text/medium.png" width="114" alt="Sim Studio" style="margin: 0 auto;" />
|
||||
</div>
|
||||
|
||||
<!-- Section Border -->
|
||||
<div style="display: flex; width: 100%;">
|
||||
<div style="border-bottom: 1px solid #eeeeee; width: 249px;"></div>
|
||||
<div style="border-bottom: 1px solid #6F3DFA; width: 102px;"></div>
|
||||
<div style="border-bottom: 1px solid #eeeeee; width: 249px;"></div>
|
||||
</div>
|
||||
|
||||
<!-- Content -->
|
||||
<div style="padding: 5px 30px 20px 30px;">
|
||||
<h2 style="font-size: 20px; color: #333333; margin: 20px 0;">
|
||||
${alertReason ? 'Alert Triggered' : isError ? 'Workflow Execution Failed' : 'Workflow Execution Completed'}
|
||||
</h2>
|
||||
${alertReason ? `<p style="color: #d97706; background: #fef3c7; padding: 12px; border-radius: 6px; margin-bottom: 20px; font-size: 14px;"><strong>Reason:</strong> ${alertReason}</p>` : ''}
|
||||
|
||||
<table style="width: 100%; border-collapse: collapse; margin-bottom: 20px;">
|
||||
<tr style="border-bottom: 1px solid #eee;">
|
||||
<td style="padding: 12px 0; color: #666; width: 140px;">Workflow</td>
|
||||
<td style="padding: 12px 0; color: #333; font-weight: 500;">${payload.data.workflowName}</td>
|
||||
</tr>
|
||||
<tr style="border-bottom: 1px solid #eee;">
|
||||
<td style="padding: 12px 0; color: #666;">Status</td>
|
||||
<td style="padding: 12px 0; color: ${isError ? '#ef4444' : '#22c55e'}; font-weight: 500;">${statusText}</td>
|
||||
</tr>
|
||||
<tr style="border-bottom: 1px solid #eee;">
|
||||
<td style="padding: 12px 0; color: #666;">Trigger</td>
|
||||
<td style="padding: 12px 0; color: #333;">${payload.data.trigger}</td>
|
||||
</tr>
|
||||
<tr style="border-bottom: 1px solid #eee;">
|
||||
<td style="padding: 12px 0; color: #666;">Duration</td>
|
||||
<td style="padding: 12px 0; color: #333;">${formatDuration(payload.data.totalDurationMs)}</td>
|
||||
</tr>
|
||||
<tr style="border-bottom: 1px solid #eee;">
|
||||
<td style="padding: 12px 0; color: #666;">Cost</td>
|
||||
<td style="padding: 12px 0; color: #333;">${formatCost(payload.data.cost)}</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<a href="${logUrl}" style="display: inline-block; background-color: #6F3DFA; color: #ffffff; font-weight: bold; font-size: 16px; padding: 12px 30px; border-radius: 5px; text-decoration: none; text-align: center; margin: 20px 0;">
|
||||
View Execution Log →
|
||||
</a>
|
||||
|
||||
${includedDataHtml}
|
||||
|
||||
<p style="font-size: 16px; line-height: 1.5; color: #333333; margin-top: 30px;">
|
||||
Best regards,<br />
|
||||
The Sim Team
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Footer -->
|
||||
<div style="max-width: 580px; margin: 0 auto; padding: 20px 0; text-align: center;">
|
||||
<p style="font-size: 12px; color: #706a7b; margin: 8px 0 0 0;">
|
||||
© ${new Date().getFullYear()} Sim Studio, All Rights Reserved
|
||||
</p>
|
||||
<p style="font-size: 12px; color: #706a7b; margin: 8px 0 0 0;">
|
||||
<a href="${baseUrl}/privacy" style="color: #706a7b; text-decoration: underline;">Privacy Policy</a> •
|
||||
<a href="${baseUrl}/terms" style="color: #706a7b; text-decoration: underline;">Terms of Service</a>
|
||||
</p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
`,
|
||||
html,
|
||||
text: `${subject}\n${alertReason ? `\nReason: ${alertReason}\n` : ''}\nWorkflow: ${payload.data.workflowName}\nStatus: ${statusText}\nTrigger: ${payload.data.trigger}\nDuration: ${formatDuration(payload.data.totalDurationMs)}\nCost: ${formatCost(payload.data.cost)}\n\nView Log: ${logUrl}${includedDataText}`,
|
||||
emailType: 'notifications',
|
||||
})
|
||||
@@ -479,26 +389,6 @@ async function deliverSlack(
|
||||
})
|
||||
}
|
||||
|
||||
if (
|
||||
payload.data.traceSpans &&
|
||||
Array.isArray(payload.data.traceSpans) &&
|
||||
payload.data.traceSpans.length > 0
|
||||
) {
|
||||
const spansSummary = (payload.data.traceSpans as TraceSpan[])
|
||||
.map((span) => {
|
||||
const status = span.status === 'success' ? '✓' : '✗'
|
||||
return `${status} ${span.name || 'Unknown'} (${formatDuration(span.duration || 0)})`
|
||||
})
|
||||
.join('\n')
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: `*Trace Spans:*\n\`\`\`${spansSummary}\`\`\``,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (payload.data.rateLimits) {
|
||||
const limitsStr = JSON.stringify(payload.data.rateLimits, null, 2)
|
||||
blocks.push({
|
||||
|
||||
@@ -19,10 +19,10 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Create Run', id: 'create_run' },
|
||||
{ label: 'Create Runs Batch', id: 'create_runs_batch' },
|
||||
{ label: 'Create Run', id: 'langsmith_create_run' },
|
||||
{ label: 'Create Runs Batch', id: 'langsmith_create_runs_batch' },
|
||||
],
|
||||
value: () => 'create_run',
|
||||
value: () => 'langsmith_create_run',
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
@@ -37,15 +37,15 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Run ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Auto-generated if blank',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
},
|
||||
{
|
||||
id: 'name',
|
||||
title: 'Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Run name',
|
||||
required: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
required: { field: 'operation', value: 'langsmith_create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
},
|
||||
{
|
||||
id: 'run_type',
|
||||
@@ -61,23 +61,22 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
{ label: 'Parser', id: 'parser' },
|
||||
],
|
||||
value: () => 'chain',
|
||||
required: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
required: { field: 'operation', value: 'langsmith_create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
},
|
||||
{
|
||||
id: 'start_time',
|
||||
title: 'Start Time',
|
||||
type: 'short-input',
|
||||
placeholder: '2025-01-01T12:00:00Z',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
value: () => new Date().toISOString(),
|
||||
placeholder: 'e.g. 2025-01-01T12:00:00Z (defaults to now)',
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
},
|
||||
{
|
||||
id: 'end_time',
|
||||
title: 'End Time',
|
||||
type: 'short-input',
|
||||
placeholder: '2025-01-01T12:00:30Z',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -85,7 +84,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Inputs',
|
||||
type: 'code',
|
||||
placeholder: '{"input":"value"}',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -93,7 +92,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Outputs',
|
||||
type: 'code',
|
||||
placeholder: '{"output":"value"}',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -101,7 +100,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Metadata',
|
||||
type: 'code',
|
||||
placeholder: '{"ls_model":"gpt-4"}',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -109,7 +108,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Tags',
|
||||
type: 'code',
|
||||
placeholder: '["production","workflow"]',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -117,7 +116,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Parent Run ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Parent run identifier',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -125,7 +124,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Trace ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Auto-generated if blank',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -133,7 +132,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Session ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Session identifier',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -141,7 +140,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Session Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Session name',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -149,7 +148,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Status',
|
||||
type: 'short-input',
|
||||
placeholder: 'success',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -157,7 +156,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Error',
|
||||
type: 'long-input',
|
||||
placeholder: 'Error message',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -165,7 +164,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Dotted Order',
|
||||
type: 'short-input',
|
||||
placeholder: 'Defaults to <YYYYMMDDTHHMMSSffffff>Z<id>',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -173,7 +172,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Events',
|
||||
type: 'code',
|
||||
placeholder: '[{"event":"token","value":1}]',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -181,29 +180,36 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Post Runs',
|
||||
type: 'code',
|
||||
placeholder: '[{"id":"...","name":"...","run_type":"chain","start_time":"..."}]',
|
||||
condition: { field: 'operation', value: 'create_runs_batch' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_runs_batch' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
generationType: 'json-object',
|
||||
prompt: `Output ONLY a JSON array with a single LangSmith run object. No explanation.
|
||||
Required: name (string), run_type ("tool"|"chain"|"llm"|"retriever"|"embedding"|"prompt"|"parser")
|
||||
Optional: inputs, outputs, tags, extra, session_name, end_time
|
||||
Fields id, trace_id, dotted_order, start_time are auto-generated if omitted.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'patch',
|
||||
title: 'Patch Runs',
|
||||
type: 'code',
|
||||
placeholder: '[{"id":"...","name":"...","run_type":"chain","start_time":"..."}]',
|
||||
condition: { field: 'operation', value: 'create_runs_batch' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_runs_batch' },
|
||||
mode: 'advanced',
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
generationType: 'json-object',
|
||||
prompt: `Output ONLY a JSON array with a single LangSmith run object to update. No explanation.
|
||||
Required: id (existing run UUID), name, run_type ("tool"|"chain"|"llm"|"retriever"|"embedding"|"prompt"|"parser")
|
||||
Common patch fields: outputs, end_time, status, error`,
|
||||
},
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['langsmith_create_run', 'langsmith_create_runs_batch'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'create_runs_batch':
|
||||
return 'langsmith_create_runs_batch'
|
||||
case 'create_run':
|
||||
default:
|
||||
return 'langsmith_create_run'
|
||||
}
|
||||
},
|
||||
tool: (params) => params.operation,
|
||||
params: (params) => {
|
||||
const parseJsonValue = (value: unknown, label: string) => {
|
||||
if (value === undefined || value === null || value === '') {
|
||||
@@ -221,7 +227,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
return value
|
||||
}
|
||||
|
||||
if (params.operation === 'create_runs_batch') {
|
||||
if (params.operation === 'langsmith_create_runs_batch') {
|
||||
const post = parseJsonValue(params.post, 'post runs')
|
||||
const patch = parseJsonValue(params.patch, 'patch runs')
|
||||
|
||||
|
||||
@@ -173,6 +173,17 @@ export const baseStyles = {
|
||||
margin: 0,
|
||||
},
|
||||
|
||||
/** Code block text (for JSON/code display) */
|
||||
codeBlock: {
|
||||
fontSize: typography.fontSize.caption,
|
||||
lineHeight: typography.lineHeight.caption,
|
||||
color: colors.textSecondary,
|
||||
fontFamily: 'monospace',
|
||||
whiteSpace: 'pre-wrap' as const,
|
||||
wordWrap: 'break-word' as const,
|
||||
margin: 0,
|
||||
},
|
||||
|
||||
/** Highlighted info box (e.g., "What you get with Pro") */
|
||||
infoBox: {
|
||||
backgroundColor: colors.bgOuter,
|
||||
|
||||
@@ -61,7 +61,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align='left' style={{ padding: '0 8px 0 0' }}>
|
||||
<Link href='https://x.com/simdotai' rel='noopener noreferrer'>
|
||||
<Link href={`${baseUrl}/x`} rel='noopener noreferrer'>
|
||||
<Img
|
||||
src={`${baseUrl}/static/x-icon.png`}
|
||||
width='20'
|
||||
@@ -71,7 +71,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
|
||||
</Link>
|
||||
</td>
|
||||
<td align='left' style={{ padding: '0 8px' }}>
|
||||
<Link href='https://discord.gg/Hr4UWYEcTT' rel='noopener noreferrer'>
|
||||
<Link href={`${baseUrl}/discord`} rel='noopener noreferrer'>
|
||||
<Img
|
||||
src={`${baseUrl}/static/discord-icon.png`}
|
||||
width='20'
|
||||
@@ -81,7 +81,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
|
||||
</Link>
|
||||
</td>
|
||||
<td align='left' style={{ padding: '0 8px' }}>
|
||||
<Link href='https://github.com/simstudioai/sim' rel='noopener noreferrer'>
|
||||
<Link href={`${baseUrl}/github`} rel='noopener noreferrer'>
|
||||
<Img
|
||||
src={`${baseUrl}/static/github-icon.png`}
|
||||
width='20'
|
||||
|
||||
@@ -10,6 +10,8 @@ export * from './careers'
|
||||
export * from './components'
|
||||
// Invitation emails
|
||||
export * from './invitations'
|
||||
// Notification emails
|
||||
export * from './notifications'
|
||||
// Render functions and subjects
|
||||
export * from './render'
|
||||
export * from './subjects'
|
||||
|
||||
7
apps/sim/components/emails/notifications/index.ts
Normal file
7
apps/sim/components/emails/notifications/index.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export type {
|
||||
EmailRateLimitStatus,
|
||||
EmailRateLimitsData,
|
||||
EmailUsageData,
|
||||
WorkflowNotificationEmailProps,
|
||||
} from './workflow-notification-email'
|
||||
export { WorkflowNotificationEmail } from './workflow-notification-email'
|
||||
@@ -0,0 +1,161 @@
|
||||
import { Link, Section, Text } from '@react-email/components'
|
||||
import { baseStyles } from '@/components/emails/_styles'
|
||||
import { EmailLayout } from '@/components/emails/components'
|
||||
import { getBrandConfig } from '@/lib/branding/branding'
|
||||
|
||||
/**
|
||||
* Serialized rate limit status for email payloads.
|
||||
* Note: This differs from the canonical RateLimitStatus in @/lib/core/rate-limiter
|
||||
* which uses Date for resetAt. This version uses string for JSON serialization.
|
||||
*/
|
||||
export interface EmailRateLimitStatus {
|
||||
requestsPerMinute: number
|
||||
remaining: number
|
||||
maxBurst?: number
|
||||
resetAt?: string
|
||||
}
|
||||
|
||||
export interface EmailRateLimitsData {
|
||||
sync?: EmailRateLimitStatus
|
||||
async?: EmailRateLimitStatus
|
||||
}
|
||||
|
||||
export interface EmailUsageData {
|
||||
currentPeriodCost: number
|
||||
limit: number
|
||||
percentUsed: number
|
||||
isExceeded?: boolean
|
||||
}
|
||||
|
||||
export interface WorkflowNotificationEmailProps {
|
||||
workflowName: string
|
||||
status: 'success' | 'error'
|
||||
trigger: string
|
||||
duration: string
|
||||
cost: string
|
||||
logUrl: string
|
||||
alertReason?: string
|
||||
finalOutput?: unknown
|
||||
rateLimits?: EmailRateLimitsData
|
||||
usageData?: EmailUsageData
|
||||
}
|
||||
|
||||
function formatJsonForEmail(data: unknown): string {
|
||||
return JSON.stringify(data, null, 2)
|
||||
}
|
||||
|
||||
export function WorkflowNotificationEmail({
|
||||
workflowName,
|
||||
status,
|
||||
trigger,
|
||||
duration,
|
||||
cost,
|
||||
logUrl,
|
||||
alertReason,
|
||||
finalOutput,
|
||||
rateLimits,
|
||||
usageData,
|
||||
}: WorkflowNotificationEmailProps) {
|
||||
const brand = getBrandConfig()
|
||||
const isError = status === 'error'
|
||||
const statusText = isError ? 'Error' : 'Success'
|
||||
|
||||
const previewText = alertReason
|
||||
? `${brand.name}: Alert - ${workflowName}`
|
||||
: isError
|
||||
? `${brand.name}: Workflow Failed - ${workflowName}`
|
||||
: `${brand.name}: Workflow Completed - ${workflowName}`
|
||||
|
||||
const message = alertReason
|
||||
? 'An alert was triggered for your workflow.'
|
||||
: isError
|
||||
? 'Your workflow execution failed.'
|
||||
: 'Your workflow completed successfully.'
|
||||
|
||||
return (
|
||||
<EmailLayout preview={previewText}>
|
||||
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>Hello,</Text>
|
||||
<Text style={baseStyles.paragraph}>{message}</Text>
|
||||
|
||||
<Section style={baseStyles.infoBox}>
|
||||
{alertReason && (
|
||||
<Text style={baseStyles.infoBoxList}>
|
||||
<strong>Reason:</strong> {alertReason}
|
||||
</Text>
|
||||
)}
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: alertReason ? '4px' : 0 }}>
|
||||
<strong>Workflow:</strong> {workflowName}
|
||||
</Text>
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
|
||||
<strong>Status:</strong> {statusText}
|
||||
</Text>
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
|
||||
<strong>Trigger:</strong> {trigger}
|
||||
</Text>
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
|
||||
<strong>Duration:</strong> {duration}
|
||||
</Text>
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
|
||||
<strong>Cost:</strong> {cost}
|
||||
</Text>
|
||||
</Section>
|
||||
|
||||
<Link href={logUrl} style={{ textDecoration: 'none' }}>
|
||||
<Text style={baseStyles.button}>View Execution Log</Text>
|
||||
</Link>
|
||||
|
||||
{rateLimits && (rateLimits.sync || rateLimits.async) ? (
|
||||
<>
|
||||
<div style={baseStyles.divider} />
|
||||
<Section style={baseStyles.infoBox}>
|
||||
<Text style={baseStyles.infoBoxTitle}>Rate Limits</Text>
|
||||
{rateLimits.sync && (
|
||||
<Text style={baseStyles.infoBoxList}>
|
||||
Sync: {rateLimits.sync.remaining} of {rateLimits.sync.requestsPerMinute} remaining
|
||||
</Text>
|
||||
)}
|
||||
{rateLimits.async && (
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: rateLimits.sync ? '4px' : 0 }}>
|
||||
Async: {rateLimits.async.remaining} of {rateLimits.async.requestsPerMinute}{' '}
|
||||
remaining
|
||||
</Text>
|
||||
)}
|
||||
</Section>
|
||||
</>
|
||||
) : null}
|
||||
|
||||
{usageData ? (
|
||||
<>
|
||||
<div style={baseStyles.divider} />
|
||||
<Section style={baseStyles.infoBox}>
|
||||
<Text style={baseStyles.infoBoxTitle}>Usage</Text>
|
||||
<Text style={baseStyles.infoBoxList}>
|
||||
${usageData.currentPeriodCost.toFixed(2)} of ${usageData.limit.toFixed(2)} used (
|
||||
{usageData.percentUsed.toFixed(1)}%)
|
||||
</Text>
|
||||
</Section>
|
||||
</>
|
||||
) : null}
|
||||
|
||||
{finalOutput ? (
|
||||
<>
|
||||
<div style={baseStyles.divider} />
|
||||
<Section style={baseStyles.infoBox}>
|
||||
<Text style={baseStyles.infoBoxTitle}>Final Output</Text>
|
||||
<Text style={{ ...baseStyles.codeBlock, marginTop: '8px' }}>
|
||||
{formatJsonForEmail(finalOutput)}
|
||||
</Text>
|
||||
</Section>
|
||||
</>
|
||||
) : null}
|
||||
|
||||
<div style={baseStyles.divider} />
|
||||
|
||||
<Text style={{ ...baseStyles.footerText, textAlign: 'left' }}>
|
||||
You're receiving this because you subscribed to workflow notifications.
|
||||
</Text>
|
||||
</EmailLayout>
|
||||
)
|
||||
}
|
||||
|
||||
export default WorkflowNotificationEmail
|
||||
@@ -15,6 +15,10 @@ import {
|
||||
PollingGroupInvitationEmail,
|
||||
WorkspaceInvitationEmail,
|
||||
} from '@/components/emails/invitations'
|
||||
import {
|
||||
WorkflowNotificationEmail,
|
||||
type WorkflowNotificationEmailProps,
|
||||
} from '@/components/emails/notifications'
|
||||
import { HelpConfirmationEmail } from '@/components/emails/support'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
@@ -258,3 +262,9 @@ export async function renderCareersSubmissionEmail(params: {
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
export async function renderWorkflowNotificationEmail(
|
||||
params: WorkflowNotificationEmailProps
|
||||
): Promise<string> {
|
||||
return await render(WorkflowNotificationEmail(params))
|
||||
}
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import { keepPreviousData, useInfiniteQuery, useQuery } from '@tanstack/react-query'
|
||||
import { getEndDateFromTimeRange, getStartDateFromTimeRange } from '@/lib/logs/filters'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
import type {
|
||||
DashboardStatsResponse,
|
||||
SegmentStats,
|
||||
WorkflowStats,
|
||||
} from '@/app/api/logs/stats/route'
|
||||
import type { LogsResponse, TimeRange, WorkflowLog } from '@/stores/logs/filters/types'
|
||||
|
||||
export type { DashboardStatsResponse, SegmentStats, WorkflowStats }
|
||||
|
||||
export const logKeys = {
|
||||
all: ['logs'] as const,
|
||||
lists: () => [...logKeys.all, 'list'] as const,
|
||||
@@ -10,8 +17,8 @@ export const logKeys = {
|
||||
[...logKeys.lists(), workspaceId ?? '', filters] as const,
|
||||
details: () => [...logKeys.all, 'detail'] as const,
|
||||
detail: (logId: string | undefined) => [...logKeys.details(), logId ?? ''] as const,
|
||||
dashboard: (workspaceId: string | undefined, filters: Record<string, unknown>) =>
|
||||
[...logKeys.all, 'dashboard', workspaceId ?? '', filters] as const,
|
||||
stats: (workspaceId: string | undefined, filters: object) =>
|
||||
[...logKeys.all, 'stats', workspaceId ?? '', filters] as const,
|
||||
executionSnapshots: () => [...logKeys.all, 'executionSnapshot'] as const,
|
||||
executionSnapshot: (executionId: string | undefined) =>
|
||||
[...logKeys.executionSnapshots(), executionId ?? ''] as const,
|
||||
@@ -147,52 +154,96 @@ export function useLogDetail(logId: string | undefined) {
|
||||
})
|
||||
}
|
||||
|
||||
const DASHBOARD_LOGS_LIMIT = 10000
|
||||
interface DashboardFilters {
|
||||
timeRange: TimeRange
|
||||
startDate?: string
|
||||
endDate?: string
|
||||
level: string
|
||||
workflowIds: string[]
|
||||
folderIds: string[]
|
||||
triggers: string[]
|
||||
searchQuery: string
|
||||
segmentCount?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches all logs for dashboard metrics (non-paginated).
|
||||
* Uses same filters as the logs list but with a high limit to get all data.
|
||||
* Fetches aggregated dashboard statistics from the server.
|
||||
* Uses SQL aggregation for efficient computation without row limits.
|
||||
*/
|
||||
async function fetchAllLogs(
|
||||
async function fetchDashboardStats(
|
||||
workspaceId: string,
|
||||
filters: Omit<LogFilters, 'limit'>
|
||||
): Promise<WorkflowLog[]> {
|
||||
filters: DashboardFilters
|
||||
): Promise<DashboardStatsResponse> {
|
||||
const params = new URLSearchParams()
|
||||
|
||||
params.set('workspaceId', workspaceId)
|
||||
params.set('limit', DASHBOARD_LOGS_LIMIT.toString())
|
||||
params.set('offset', '0')
|
||||
|
||||
applyFilterParams(params, filters)
|
||||
|
||||
const response = await fetch(`/api/logs?${params.toString()}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch logs for dashboard')
|
||||
if (filters.segmentCount) {
|
||||
params.set('segmentCount', filters.segmentCount.toString())
|
||||
}
|
||||
|
||||
const apiData: LogsResponse = await response.json()
|
||||
return apiData.data || []
|
||||
if (filters.level !== 'all') {
|
||||
params.set('level', filters.level)
|
||||
}
|
||||
|
||||
if (filters.triggers.length > 0) {
|
||||
params.set('triggers', filters.triggers.join(','))
|
||||
}
|
||||
|
||||
if (filters.workflowIds.length > 0) {
|
||||
params.set('workflowIds', filters.workflowIds.join(','))
|
||||
}
|
||||
|
||||
if (filters.folderIds.length > 0) {
|
||||
params.set('folderIds', filters.folderIds.join(','))
|
||||
}
|
||||
|
||||
const startDate = getStartDateFromTimeRange(filters.timeRange, filters.startDate)
|
||||
if (startDate) {
|
||||
params.set('startDate', startDate.toISOString())
|
||||
}
|
||||
|
||||
const endDate = getEndDateFromTimeRange(filters.timeRange, filters.endDate)
|
||||
if (endDate) {
|
||||
params.set('endDate', endDate.toISOString())
|
||||
}
|
||||
|
||||
if (filters.searchQuery.trim()) {
|
||||
const parsedQuery = parseQuery(filters.searchQuery.trim())
|
||||
const searchParams = queryToApiParams(parsedQuery)
|
||||
|
||||
for (const [key, value] of Object.entries(searchParams)) {
|
||||
params.set(key, value)
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/logs/stats?${params.toString()}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch dashboard stats')
|
||||
}
|
||||
|
||||
return response.json()
|
||||
}
|
||||
|
||||
interface UseDashboardLogsOptions {
|
||||
interface UseDashboardStatsOptions {
|
||||
enabled?: boolean
|
||||
refetchInterval?: number | false
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching all logs for dashboard metrics.
|
||||
* Unlike useLogsList, this fetches all logs in a single request
|
||||
* to ensure dashboard metrics are computed from complete data.
|
||||
* Hook for fetching aggregated dashboard statistics.
|
||||
* Uses server-side SQL aggregation for efficient computation
|
||||
* without any row limits - all matching logs are included in the stats.
|
||||
*/
|
||||
export function useDashboardLogs(
|
||||
export function useDashboardStats(
|
||||
workspaceId: string | undefined,
|
||||
filters: Omit<LogFilters, 'limit'>,
|
||||
options?: UseDashboardLogsOptions
|
||||
filters: DashboardFilters,
|
||||
options?: UseDashboardStatsOptions
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: logKeys.dashboard(workspaceId, filters),
|
||||
queryFn: () => fetchAllLogs(workspaceId as string, filters),
|
||||
queryKey: logKeys.stats(workspaceId, filters),
|
||||
queryFn: () => fetchDashboardStats(workspaceId as string, filters),
|
||||
enabled: Boolean(workspaceId) && (options?.enabled ?? true),
|
||||
refetchInterval: options?.refetchInterval ?? false,
|
||||
staleTime: 0,
|
||||
|
||||
@@ -61,7 +61,6 @@ export interface NotificationSubscription {
|
||||
levelFilter: LogLevel[]
|
||||
triggerFilter: TriggerType[]
|
||||
includeFinalOutput: boolean
|
||||
includeTraceSpans: boolean
|
||||
includeRateLimits: boolean
|
||||
includeUsageData: boolean
|
||||
webhookConfig?: WebhookConfig | null
|
||||
@@ -106,7 +105,6 @@ interface CreateNotificationParams {
|
||||
levelFilter: LogLevel[]
|
||||
triggerFilter: TriggerType[]
|
||||
includeFinalOutput: boolean
|
||||
includeTraceSpans: boolean
|
||||
includeRateLimits: boolean
|
||||
includeUsageData: boolean
|
||||
alertConfig?: AlertConfig | null
|
||||
|
||||
@@ -592,6 +592,7 @@ export const auth = betterAuth({
|
||||
sendVerificationOnSignUp: false,
|
||||
otpLength: 6, // Explicitly set the OTP length
|
||||
expiresIn: 15 * 60, // 15 minutes in seconds
|
||||
overrideDefaultEmailVerification: true,
|
||||
}),
|
||||
genericOAuth({
|
||||
config: [
|
||||
|
||||
@@ -25,7 +25,6 @@ function prepareLogData(
|
||||
log: WorkflowExecutionLog,
|
||||
subscription: {
|
||||
includeFinalOutput: boolean
|
||||
includeTraceSpans: boolean
|
||||
}
|
||||
) {
|
||||
const preparedLog = { ...log, executionData: {} as Record<string, unknown> }
|
||||
@@ -38,10 +37,6 @@ function prepareLogData(
|
||||
webhookData.finalOutput = data.finalOutput
|
||||
}
|
||||
|
||||
if (subscription.includeTraceSpans && data.traceSpans) {
|
||||
webhookData.traceSpans = data.traceSpans
|
||||
}
|
||||
|
||||
preparedLog.executionData = webhookData
|
||||
}
|
||||
|
||||
|
||||
@@ -253,6 +253,25 @@ const nextConfig: NextConfig = {
|
||||
async redirects() {
|
||||
const redirects = []
|
||||
|
||||
// Social link redirects (used in emails to avoid spam filter issues)
|
||||
redirects.push(
|
||||
{
|
||||
source: '/discord',
|
||||
destination: 'https://discord.gg/Hr4UWYEcTT',
|
||||
permanent: false,
|
||||
},
|
||||
{
|
||||
source: '/x',
|
||||
destination: 'https://x.com/simdotai',
|
||||
permanent: false,
|
||||
},
|
||||
{
|
||||
source: '/github',
|
||||
destination: 'https://github.com/simstudioai/sim',
|
||||
permanent: false,
|
||||
}
|
||||
)
|
||||
|
||||
// Redirect /building and /blog to /studio (legacy URL support)
|
||||
redirects.push(
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user