mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-15 01:47:59 -05:00
Compare commits
11 Commits
fix/webhoo
...
staging
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d4c171c6d7 | ||
|
|
26d0799d22 | ||
|
|
45bd1e8cd7 | ||
|
|
85d6e3e3bd | ||
|
|
ccf268595e | ||
|
|
5eca660c5c | ||
|
|
3db9ad2d95 | ||
|
|
4195cfe1ff | ||
|
|
212933746e | ||
|
|
5af72ea22f | ||
|
|
4899c28421 |
@@ -351,14 +351,16 @@ Enables AI-assisted field generation.
|
||||
|
||||
## Tools Configuration
|
||||
|
||||
### Simple Tool Selector
|
||||
**Preferred:** Use tool names directly as dropdown option IDs to avoid switch cases:
|
||||
```typescript
|
||||
tools: {
|
||||
access: ['service_create', 'service_read', 'service_update'],
|
||||
config: {
|
||||
tool: (params) => `service_${params.operation}`,
|
||||
},
|
||||
}
|
||||
// Dropdown options use tool IDs directly
|
||||
options: [
|
||||
{ label: 'Create', id: 'service_create' },
|
||||
{ label: 'Read', id: 'service_read' },
|
||||
]
|
||||
|
||||
// Tool selector just returns the operation value
|
||||
tool: (params) => params.operation,
|
||||
```
|
||||
|
||||
### With Parameter Transformation
|
||||
|
||||
@@ -359,15 +359,6 @@ function SignupFormContent({
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await client.emailOtp.sendVerificationOtp({
|
||||
email: emailValue,
|
||||
type: 'sign-in',
|
||||
})
|
||||
} catch (otpErr) {
|
||||
logger.warn('Failed to send sign-in OTP after signup; user can press Resend', otpErr)
|
||||
}
|
||||
|
||||
router.push('/verify?fromSignup=true')
|
||||
} catch (error) {
|
||||
logger.error('Signup error:', error)
|
||||
|
||||
@@ -93,7 +93,7 @@ export function useVerification({
|
||||
|
||||
try {
|
||||
const normalizedEmail = email.trim().toLowerCase()
|
||||
const response = await client.signIn.emailOtp({
|
||||
const response = await client.emailOtp.verifyEmail({
|
||||
email: normalizedEmail,
|
||||
otp,
|
||||
})
|
||||
@@ -169,7 +169,7 @@ export function useVerification({
|
||||
client.emailOtp
|
||||
.sendVerificationOtp({
|
||||
email: normalizedEmail,
|
||||
type: 'sign-in',
|
||||
type: 'email-verification',
|
||||
})
|
||||
.then(() => {})
|
||||
.catch(() => {
|
||||
|
||||
@@ -52,6 +52,9 @@ const ChatMessageSchema = z.object({
|
||||
'gpt-5.1-high',
|
||||
'gpt-5-codex',
|
||||
'gpt-5.1-codex',
|
||||
'gpt-5.2',
|
||||
'gpt-5.2-codex',
|
||||
'gpt-5.2-pro',
|
||||
'gpt-4o',
|
||||
'gpt-4.1',
|
||||
'o3',
|
||||
|
||||
@@ -15,11 +15,14 @@ const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
|
||||
'gpt-5-medium': false,
|
||||
'gpt-5-high': false,
|
||||
'gpt-5.1-fast': false,
|
||||
'gpt-5.1': true,
|
||||
'gpt-5.1-medium': true,
|
||||
'gpt-5.1': false,
|
||||
'gpt-5.1-medium': false,
|
||||
'gpt-5.1-high': false,
|
||||
'gpt-5-codex': false,
|
||||
'gpt-5.1-codex': true,
|
||||
'gpt-5.1-codex': false,
|
||||
'gpt-5.2': false,
|
||||
'gpt-5.2-codex': true,
|
||||
'gpt-5.2-pro': true,
|
||||
o3: true,
|
||||
'claude-4-sonnet': false,
|
||||
'claude-4.5-haiku': true,
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
renderPlanWelcomeEmail,
|
||||
renderUsageThresholdEmail,
|
||||
renderWelcomeEmail,
|
||||
renderWorkflowNotificationEmail,
|
||||
renderWorkspaceInvitationEmail,
|
||||
} from '@/components/emails'
|
||||
|
||||
@@ -108,6 +109,51 @@ const emailTemplates = {
|
||||
message:
|
||||
'I have 10 years of experience building scalable distributed systems. Most recently, I led a team at a Series B startup where we scaled from 100K to 10M users.',
|
||||
}),
|
||||
|
||||
// Notification emails
|
||||
'workflow-notification-success': () =>
|
||||
renderWorkflowNotificationEmail({
|
||||
workflowName: 'Customer Onboarding Flow',
|
||||
status: 'success',
|
||||
trigger: 'api',
|
||||
duration: '2.3s',
|
||||
cost: '$0.0042',
|
||||
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
|
||||
}),
|
||||
'workflow-notification-error': () =>
|
||||
renderWorkflowNotificationEmail({
|
||||
workflowName: 'Customer Onboarding Flow',
|
||||
status: 'error',
|
||||
trigger: 'webhook',
|
||||
duration: '1.1s',
|
||||
cost: '$0.0021',
|
||||
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
|
||||
}),
|
||||
'workflow-notification-alert': () =>
|
||||
renderWorkflowNotificationEmail({
|
||||
workflowName: 'Customer Onboarding Flow',
|
||||
status: 'error',
|
||||
trigger: 'schedule',
|
||||
duration: '45.2s',
|
||||
cost: '$0.0156',
|
||||
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
|
||||
alertReason: '3 consecutive failures detected',
|
||||
}),
|
||||
'workflow-notification-full': () =>
|
||||
renderWorkflowNotificationEmail({
|
||||
workflowName: 'Data Processing Pipeline',
|
||||
status: 'success',
|
||||
trigger: 'api',
|
||||
duration: '12.5s',
|
||||
cost: '$0.0234',
|
||||
logUrl: 'https://sim.ai/workspace/ws_123/logs?search=exec_abc123',
|
||||
finalOutput: { processed: 150, skipped: 3, status: 'completed' },
|
||||
rateLimits: {
|
||||
sync: { requestsPerMinute: 60, remaining: 45 },
|
||||
async: { requestsPerMinute: 120, remaining: 98 },
|
||||
},
|
||||
usageData: { currentPeriodCost: 12.45, limit: 50, percentUsed: 24.9 },
|
||||
}),
|
||||
} as const
|
||||
|
||||
type EmailTemplate = keyof typeof emailTemplates
|
||||
@@ -131,6 +177,12 @@ export async function GET(request: NextRequest) {
|
||||
'payment-failed',
|
||||
],
|
||||
Careers: ['careers-confirmation', 'careers-submission'],
|
||||
Notifications: [
|
||||
'workflow-notification-success',
|
||||
'workflow-notification-error',
|
||||
'workflow-notification-alert',
|
||||
'workflow-notification-full',
|
||||
],
|
||||
}
|
||||
|
||||
const categoryHtml = Object.entries(categories)
|
||||
|
||||
293
apps/sim/app/api/logs/stats/route.ts
Normal file
293
apps/sim/app/api/logs/stats/route.ts
Normal file
@@ -0,0 +1,293 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { buildFilterConditions, LogFilterParamsSchema } from '@/lib/logs/filters'
|
||||
|
||||
const logger = createLogger('LogsStatsAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
const StatsQueryParamsSchema = LogFilterParamsSchema.extend({
|
||||
segmentCount: z.coerce.number().optional().default(72),
|
||||
})
|
||||
|
||||
export interface SegmentStats {
|
||||
timestamp: string
|
||||
totalExecutions: number
|
||||
successfulExecutions: number
|
||||
avgDurationMs: number
|
||||
}
|
||||
|
||||
export interface WorkflowStats {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
segments: SegmentStats[]
|
||||
overallSuccessRate: number
|
||||
totalExecutions: number
|
||||
totalSuccessful: number
|
||||
}
|
||||
|
||||
export interface DashboardStatsResponse {
|
||||
workflows: WorkflowStats[]
|
||||
aggregateSegments: SegmentStats[]
|
||||
totalRuns: number
|
||||
totalErrors: number
|
||||
avgLatency: number
|
||||
timeBounds: {
|
||||
start: string
|
||||
end: string
|
||||
}
|
||||
segmentMs: number
|
||||
}
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized logs stats access attempt`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const params = StatsQueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
|
||||
|
||||
const workspaceFilter = eq(workflowExecutionLogs.workspaceId, params.workspaceId)
|
||||
|
||||
const commonFilters = buildFilterConditions(params, { useSimpleLevelFilter: true })
|
||||
const whereCondition = commonFilters ? and(workspaceFilter, commonFilters) : workspaceFilter
|
||||
|
||||
const boundsQuery = await db
|
||||
.select({
|
||||
minTime: sql<string>`MIN(${workflowExecutionLogs.startedAt})`,
|
||||
maxTime: sql<string>`MAX(${workflowExecutionLogs.startedAt})`,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workflowExecutionLogs.workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(whereCondition)
|
||||
|
||||
const bounds = boundsQuery[0]
|
||||
const now = new Date()
|
||||
|
||||
let startTime: Date
|
||||
let endTime: Date
|
||||
|
||||
if (!bounds?.minTime || !bounds?.maxTime) {
|
||||
endTime = now
|
||||
startTime = new Date(now.getTime() - 24 * 60 * 60 * 1000)
|
||||
} else {
|
||||
startTime = new Date(bounds.minTime)
|
||||
endTime = new Date(Math.max(new Date(bounds.maxTime).getTime(), now.getTime()))
|
||||
}
|
||||
|
||||
const totalMs = Math.max(1, endTime.getTime() - startTime.getTime())
|
||||
const segmentMs = Math.max(60000, Math.floor(totalMs / params.segmentCount))
|
||||
const startTimeIso = startTime.toISOString()
|
||||
|
||||
const statsQuery = await db
|
||||
.select({
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
workflowName: workflow.name,
|
||||
segmentIndex:
|
||||
sql<number>`FLOOR(EXTRACT(EPOCH FROM (${workflowExecutionLogs.startedAt} - ${startTimeIso}::timestamp)) * 1000 / ${segmentMs})`.as(
|
||||
'segment_index'
|
||||
),
|
||||
totalExecutions: sql<number>`COUNT(*)`.as('total_executions'),
|
||||
successfulExecutions:
|
||||
sql<number>`COUNT(*) FILTER (WHERE ${workflowExecutionLogs.level} != 'error')`.as(
|
||||
'successful_executions'
|
||||
),
|
||||
avgDurationMs:
|
||||
sql<number>`COALESCE(AVG(${workflowExecutionLogs.totalDurationMs}) FILTER (WHERE ${workflowExecutionLogs.totalDurationMs} > 0), 0)`.as(
|
||||
'avg_duration_ms'
|
||||
),
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workflowExecutionLogs.workspaceId),
|
||||
eq(permissions.userId, userId)
|
||||
)
|
||||
)
|
||||
.where(whereCondition)
|
||||
.groupBy(workflowExecutionLogs.workflowId, workflow.name, sql`segment_index`)
|
||||
|
||||
const workflowMap = new Map<
|
||||
string,
|
||||
{
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
segments: Map<number, SegmentStats>
|
||||
totalExecutions: number
|
||||
totalSuccessful: number
|
||||
}
|
||||
>()
|
||||
|
||||
for (const row of statsQuery) {
|
||||
const segmentIndex = Math.min(
|
||||
params.segmentCount - 1,
|
||||
Math.max(0, Math.floor(Number(row.segmentIndex)))
|
||||
)
|
||||
|
||||
if (!workflowMap.has(row.workflowId)) {
|
||||
workflowMap.set(row.workflowId, {
|
||||
workflowId: row.workflowId,
|
||||
workflowName: row.workflowName,
|
||||
segments: new Map(),
|
||||
totalExecutions: 0,
|
||||
totalSuccessful: 0,
|
||||
})
|
||||
}
|
||||
|
||||
const wf = workflowMap.get(row.workflowId)!
|
||||
wf.totalExecutions += Number(row.totalExecutions)
|
||||
wf.totalSuccessful += Number(row.successfulExecutions)
|
||||
|
||||
const existing = wf.segments.get(segmentIndex)
|
||||
if (existing) {
|
||||
const oldTotal = existing.totalExecutions
|
||||
const newTotal = oldTotal + Number(row.totalExecutions)
|
||||
existing.totalExecutions = newTotal
|
||||
existing.successfulExecutions += Number(row.successfulExecutions)
|
||||
existing.avgDurationMs =
|
||||
newTotal > 0
|
||||
? (existing.avgDurationMs * oldTotal +
|
||||
Number(row.avgDurationMs || 0) * Number(row.totalExecutions)) /
|
||||
newTotal
|
||||
: 0
|
||||
} else {
|
||||
wf.segments.set(segmentIndex, {
|
||||
timestamp: new Date(startTime.getTime() + segmentIndex * segmentMs).toISOString(),
|
||||
totalExecutions: Number(row.totalExecutions),
|
||||
successfulExecutions: Number(row.successfulExecutions),
|
||||
avgDurationMs: Number(row.avgDurationMs || 0),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const workflows: WorkflowStats[] = []
|
||||
for (const wf of workflowMap.values()) {
|
||||
const segments: SegmentStats[] = []
|
||||
for (let i = 0; i < params.segmentCount; i++) {
|
||||
const existing = wf.segments.get(i)
|
||||
if (existing) {
|
||||
segments.push(existing)
|
||||
} else {
|
||||
segments.push({
|
||||
timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(),
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
avgDurationMs: 0,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
workflows.push({
|
||||
workflowId: wf.workflowId,
|
||||
workflowName: wf.workflowName,
|
||||
segments,
|
||||
totalExecutions: wf.totalExecutions,
|
||||
totalSuccessful: wf.totalSuccessful,
|
||||
overallSuccessRate:
|
||||
wf.totalExecutions > 0 ? (wf.totalSuccessful / wf.totalExecutions) * 100 : 100,
|
||||
})
|
||||
}
|
||||
|
||||
workflows.sort((a, b) => {
|
||||
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
|
||||
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
|
||||
if (errA !== errB) return errB - errA
|
||||
return a.workflowName.localeCompare(b.workflowName)
|
||||
})
|
||||
|
||||
const aggregateSegments: SegmentStats[] = []
|
||||
let totalRuns = 0
|
||||
let totalErrors = 0
|
||||
let weightedLatencySum = 0
|
||||
let latencyCount = 0
|
||||
|
||||
for (let i = 0; i < params.segmentCount; i++) {
|
||||
let segTotal = 0
|
||||
let segSuccess = 0
|
||||
let segWeightedLatency = 0
|
||||
let segLatencyCount = 0
|
||||
|
||||
for (const wf of workflows) {
|
||||
const seg = wf.segments[i]
|
||||
segTotal += seg.totalExecutions
|
||||
segSuccess += seg.successfulExecutions
|
||||
if (seg.avgDurationMs > 0 && seg.totalExecutions > 0) {
|
||||
segWeightedLatency += seg.avgDurationMs * seg.totalExecutions
|
||||
segLatencyCount += seg.totalExecutions
|
||||
}
|
||||
}
|
||||
|
||||
totalRuns += segTotal
|
||||
totalErrors += segTotal - segSuccess
|
||||
weightedLatencySum += segWeightedLatency
|
||||
latencyCount += segLatencyCount
|
||||
|
||||
aggregateSegments.push({
|
||||
timestamp: new Date(startTime.getTime() + i * segmentMs).toISOString(),
|
||||
totalExecutions: segTotal,
|
||||
successfulExecutions: segSuccess,
|
||||
avgDurationMs: segLatencyCount > 0 ? segWeightedLatency / segLatencyCount : 0,
|
||||
})
|
||||
}
|
||||
|
||||
const avgLatency = latencyCount > 0 ? weightedLatencySum / latencyCount : 0
|
||||
|
||||
const response: DashboardStatsResponse = {
|
||||
workflows,
|
||||
aggregateSegments,
|
||||
totalRuns,
|
||||
totalErrors,
|
||||
avgLatency,
|
||||
timeBounds: {
|
||||
start: startTime.toISOString(),
|
||||
end: endTime.toISOString(),
|
||||
},
|
||||
segmentMs,
|
||||
}
|
||||
|
||||
return NextResponse.json(response, { status: 200 })
|
||||
} catch (validationError) {
|
||||
if (validationError instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid logs stats request parameters`, {
|
||||
errors: validationError.errors,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid request parameters',
|
||||
details: validationError.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
throw validationError
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] logs stats fetch error`, error)
|
||||
return NextResponse.json({ error: error.message }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -7,11 +7,6 @@ import { getSession } from '@/lib/auth'
|
||||
import { validateInteger } from '@/lib/core/security/input-validation'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import {
|
||||
cleanupExternalWebhook,
|
||||
createExternalWebhookSubscription,
|
||||
shouldRecreateExternalWebhookSubscription,
|
||||
} from '@/lib/webhooks/provider-subscriptions'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('WebhookAPI')
|
||||
@@ -182,46 +177,6 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
const existingProviderConfig =
|
||||
(webhookData.webhook.providerConfig as Record<string, unknown>) || {}
|
||||
let nextProviderConfig =
|
||||
providerConfig !== undefined &&
|
||||
resolvedProviderConfig &&
|
||||
typeof resolvedProviderConfig === 'object'
|
||||
? (resolvedProviderConfig as Record<string, unknown>)
|
||||
: existingProviderConfig
|
||||
const nextProvider = (provider ?? webhookData.webhook.provider) as string
|
||||
|
||||
if (
|
||||
providerConfig !== undefined &&
|
||||
shouldRecreateExternalWebhookSubscription({
|
||||
previousProvider: webhookData.webhook.provider as string,
|
||||
nextProvider,
|
||||
previousConfig: existingProviderConfig,
|
||||
nextConfig: nextProviderConfig,
|
||||
})
|
||||
) {
|
||||
await cleanupExternalWebhook(
|
||||
{ ...webhookData.webhook, providerConfig: existingProviderConfig },
|
||||
webhookData.workflow,
|
||||
requestId
|
||||
)
|
||||
|
||||
const result = await createExternalWebhookSubscription(
|
||||
request,
|
||||
{
|
||||
...webhookData.webhook,
|
||||
provider: nextProvider,
|
||||
providerConfig: nextProviderConfig,
|
||||
},
|
||||
webhookData.workflow,
|
||||
session.user.id,
|
||||
requestId
|
||||
)
|
||||
|
||||
nextProviderConfig = result.updatedProviderConfig as Record<string, unknown>
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Updating webhook properties`, {
|
||||
hasPathUpdate: path !== undefined,
|
||||
hasProviderUpdate: provider !== undefined,
|
||||
@@ -233,16 +188,16 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
// Merge providerConfig to preserve credential-related fields
|
||||
let finalProviderConfig = webhooks[0].webhook.providerConfig
|
||||
if (providerConfig !== undefined) {
|
||||
const existingConfig = existingProviderConfig
|
||||
const existingConfig = (webhooks[0].webhook.providerConfig as Record<string, unknown>) || {}
|
||||
finalProviderConfig = {
|
||||
...nextProviderConfig,
|
||||
...resolvedProviderConfig,
|
||||
credentialId: existingConfig.credentialId,
|
||||
credentialSetId: existingConfig.credentialSetId,
|
||||
userId: existingConfig.userId,
|
||||
historyId: existingConfig.historyId,
|
||||
lastCheckedTimestamp: existingConfig.lastCheckedTimestamp,
|
||||
setupCompleted: existingConfig.setupCompleted,
|
||||
externalId: nextProviderConfig.externalId ?? existingConfig.externalId,
|
||||
externalId: existingConfig.externalId,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -7,8 +7,9 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createExternalWebhookSubscription } from '@/lib/webhooks/provider-subscriptions'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
|
||||
|
||||
const logger = createLogger('WebhooksAPI')
|
||||
|
||||
@@ -256,7 +257,7 @@ export async function POST(request: NextRequest) {
|
||||
const finalProviderConfig = providerConfig || {}
|
||||
|
||||
const { resolveEnvVarsInObject } = await import('@/lib/webhooks/env-resolver')
|
||||
let resolvedProviderConfig = await resolveEnvVarsInObject(
|
||||
const resolvedProviderConfig = await resolveEnvVarsInObject(
|
||||
finalProviderConfig,
|
||||
userId,
|
||||
workflowRecord.workspaceId || undefined
|
||||
@@ -413,33 +414,149 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
// --- End Credential Set Handling ---
|
||||
|
||||
// Create external subscriptions before saving to DB to prevent orphaned records
|
||||
let externalSubscriptionId: string | undefined
|
||||
let externalSubscriptionCreated = false
|
||||
const createTempWebhookData = (providerConfigOverride = resolvedProviderConfig) => ({
|
||||
|
||||
const createTempWebhookData = () => ({
|
||||
id: targetWebhookId || nanoid(),
|
||||
path: finalPath,
|
||||
provider,
|
||||
providerConfig: providerConfigOverride,
|
||||
providerConfig: resolvedProviderConfig,
|
||||
})
|
||||
|
||||
try {
|
||||
const result = await createExternalWebhookSubscription(
|
||||
request,
|
||||
createTempWebhookData(),
|
||||
workflowRecord,
|
||||
userId,
|
||||
requestId
|
||||
)
|
||||
resolvedProviderConfig = result.updatedProviderConfig as Record<string, unknown>
|
||||
externalSubscriptionCreated = result.externalSubscriptionCreated
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating external webhook subscription`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create external webhook subscription',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
if (provider === 'airtable') {
|
||||
logger.info(`[${requestId}] Creating Airtable subscription before saving to database`)
|
||||
try {
|
||||
externalSubscriptionId = await createAirtableWebhookSubscription(
|
||||
request,
|
||||
userId,
|
||||
createTempWebhookData(),
|
||||
requestId
|
||||
)
|
||||
if (externalSubscriptionId) {
|
||||
resolvedProviderConfig.externalId = externalSubscriptionId
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Airtable webhook subscription`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Airtable',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'calendly') {
|
||||
logger.info(`[${requestId}] Creating Calendly subscription before saving to database`)
|
||||
try {
|
||||
externalSubscriptionId = await createCalendlyWebhookSubscription(
|
||||
request,
|
||||
userId,
|
||||
createTempWebhookData(),
|
||||
requestId
|
||||
)
|
||||
if (externalSubscriptionId) {
|
||||
resolvedProviderConfig.externalId = externalSubscriptionId
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Calendly webhook subscription`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Calendly',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'microsoft-teams') {
|
||||
const { createTeamsSubscription } = await import('@/lib/webhooks/provider-subscriptions')
|
||||
logger.info(`[${requestId}] Creating Teams subscription before saving to database`)
|
||||
try {
|
||||
await createTeamsSubscription(request, createTempWebhookData(), workflowRecord, requestId)
|
||||
externalSubscriptionCreated = true
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Teams subscription`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create Teams subscription',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'telegram') {
|
||||
const { createTelegramWebhook } = await import('@/lib/webhooks/provider-subscriptions')
|
||||
logger.info(`[${requestId}] Creating Telegram webhook before saving to database`)
|
||||
try {
|
||||
await createTelegramWebhook(request, createTempWebhookData(), requestId)
|
||||
externalSubscriptionCreated = true
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Telegram webhook`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create Telegram webhook',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'webflow') {
|
||||
logger.info(`[${requestId}] Creating Webflow subscription before saving to database`)
|
||||
try {
|
||||
externalSubscriptionId = await createWebflowWebhookSubscription(
|
||||
request,
|
||||
userId,
|
||||
createTempWebhookData(),
|
||||
requestId
|
||||
)
|
||||
if (externalSubscriptionId) {
|
||||
resolvedProviderConfig.externalId = externalSubscriptionId
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Webflow webhook subscription`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Webflow',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'typeform') {
|
||||
const { createTypeformWebhook } = await import('@/lib/webhooks/provider-subscriptions')
|
||||
logger.info(`[${requestId}] Creating Typeform webhook before saving to database`)
|
||||
try {
|
||||
const usedTag = await createTypeformWebhook(request, createTempWebhookData(), requestId)
|
||||
|
||||
if (!resolvedProviderConfig.webhookTag) {
|
||||
resolvedProviderConfig.webhookTag = usedTag
|
||||
logger.info(`[${requestId}] Stored auto-generated webhook tag: ${usedTag}`)
|
||||
}
|
||||
|
||||
externalSubscriptionCreated = true
|
||||
} catch (err) {
|
||||
logger.error(`[${requestId}] Error creating Typeform webhook`, err)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Typeform',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Now save to database (only if subscription succeeded or provider doesn't need external subscription)
|
||||
@@ -500,11 +617,7 @@ export async function POST(request: NextRequest) {
|
||||
logger.error(`[${requestId}] DB save failed, cleaning up external subscription`, dbError)
|
||||
try {
|
||||
const { cleanupExternalWebhook } = await import('@/lib/webhooks/provider-subscriptions')
|
||||
await cleanupExternalWebhook(
|
||||
createTempWebhookData(resolvedProviderConfig),
|
||||
workflowRecord,
|
||||
requestId
|
||||
)
|
||||
await cleanupExternalWebhook(createTempWebhookData(), workflowRecord, requestId)
|
||||
} catch (cleanupError) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to cleanup external subscription after DB save failure`,
|
||||
@@ -628,6 +741,110 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
// --- End RSS specific logic ---
|
||||
|
||||
if (savedWebhook && provider === 'grain') {
|
||||
logger.info(`[${requestId}] Grain provider detected. Creating Grain webhook subscription.`)
|
||||
try {
|
||||
const grainResult = await createGrainWebhookSubscription(
|
||||
request,
|
||||
{
|
||||
id: savedWebhook.id,
|
||||
path: savedWebhook.path,
|
||||
providerConfig: savedWebhook.providerConfig,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
|
||||
if (grainResult) {
|
||||
// Update the webhook record with the external Grain hook ID and event types for filtering
|
||||
const updatedConfig = {
|
||||
...(savedWebhook.providerConfig as Record<string, any>),
|
||||
externalId: grainResult.id,
|
||||
eventTypes: grainResult.eventTypes,
|
||||
}
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
providerConfig: updatedConfig,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, savedWebhook.id))
|
||||
|
||||
savedWebhook.providerConfig = updatedConfig
|
||||
logger.info(`[${requestId}] Successfully created Grain webhook`, {
|
||||
grainHookId: grainResult.id,
|
||||
eventTypes: grainResult.eventTypes,
|
||||
webhookId: savedWebhook.id,
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error creating Grain webhook subscription, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Grain',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
// --- End Grain specific logic ---
|
||||
|
||||
// --- Lemlist specific logic ---
|
||||
if (savedWebhook && provider === 'lemlist') {
|
||||
logger.info(
|
||||
`[${requestId}] Lemlist provider detected. Creating Lemlist webhook subscription.`
|
||||
)
|
||||
try {
|
||||
const lemlistResult = await createLemlistWebhookSubscription(
|
||||
{
|
||||
id: savedWebhook.id,
|
||||
path: savedWebhook.path,
|
||||
providerConfig: savedWebhook.providerConfig,
|
||||
},
|
||||
requestId
|
||||
)
|
||||
|
||||
if (lemlistResult) {
|
||||
// Update the webhook record with the external Lemlist hook ID
|
||||
const updatedConfig = {
|
||||
...(savedWebhook.providerConfig as Record<string, any>),
|
||||
externalId: lemlistResult.id,
|
||||
}
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
providerConfig: updatedConfig,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, savedWebhook.id))
|
||||
|
||||
savedWebhook.providerConfig = updatedConfig
|
||||
logger.info(`[${requestId}] Successfully created Lemlist webhook`, {
|
||||
lemlistHookId: lemlistResult.id,
|
||||
webhookId: savedWebhook.id,
|
||||
})
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
`[${requestId}] Error creating Lemlist webhook subscription, rolling back webhook`,
|
||||
err
|
||||
)
|
||||
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Failed to create webhook in Lemlist',
|
||||
details: err instanceof Error ? err.message : 'Unknown error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
// --- End Lemlist specific logic ---
|
||||
|
||||
if (!targetWebhookId && savedWebhook) {
|
||||
try {
|
||||
PlatformEvents.webhookCreated({
|
||||
@@ -651,3 +868,616 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create the webhook subscription in Airtable
|
||||
async function createAirtableWebhookSubscription(
|
||||
request: NextRequest,
|
||||
userId: string,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { baseId, tableId, includeCellValuesInFieldIds } = providerConfig || {}
|
||||
|
||||
if (!baseId || !tableId) {
|
||||
logger.warn(`[${requestId}] Missing baseId or tableId for Airtable webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Base ID and Table ID are required to create Airtable webhook. Please provide valid Airtable base and table IDs.'
|
||||
)
|
||||
}
|
||||
|
||||
const accessToken = await getOAuthToken(userId, 'airtable')
|
||||
if (!accessToken) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not retrieve Airtable access token for user ${userId}. Cannot create webhook in Airtable.`
|
||||
)
|
||||
throw new Error(
|
||||
'Airtable account connection required. Please connect your Airtable account in the trigger configuration and try again.'
|
||||
)
|
||||
}
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const airtableApiUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`
|
||||
|
||||
const specification: any = {
|
||||
options: {
|
||||
filters: {
|
||||
dataTypes: ['tableData'], // Watch table data changes
|
||||
recordChangeScope: tableId, // Watch only the specified table
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Conditionally add the 'includes' field based on the config
|
||||
if (includeCellValuesInFieldIds === 'all') {
|
||||
specification.options.includes = {
|
||||
includeCellValuesInFieldIds: 'all',
|
||||
}
|
||||
}
|
||||
|
||||
const requestBody: any = {
|
||||
notificationUrl: notificationUrl,
|
||||
specification: specification,
|
||||
}
|
||||
|
||||
const airtableResponse = await fetch(airtableApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
// Airtable often returns 200 OK even for errors in the body, check payload
|
||||
const responseBody = await airtableResponse.json()
|
||||
|
||||
if (!airtableResponse.ok || responseBody.error) {
|
||||
const errorMessage =
|
||||
responseBody.error?.message || responseBody.error || 'Unknown Airtable API error'
|
||||
const errorType = responseBody.error?.type
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create webhook in Airtable for webhook ${webhookData.id}. Status: ${airtableResponse.status}`,
|
||||
{ type: errorType, message: errorMessage, response: responseBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Airtable'
|
||||
if (airtableResponse.status === 404) {
|
||||
userFriendlyMessage =
|
||||
'Airtable base or table not found. Please verify that the Base ID and Table ID are correct and that you have access to them.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Airtable API error') {
|
||||
userFriendlyMessage = `Airtable error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created webhook in Airtable for webhook ${webhookData.id}.`,
|
||||
{
|
||||
airtableWebhookId: responseBody.id,
|
||||
}
|
||||
)
|
||||
return responseBody.id
|
||||
} catch (error: any) {
|
||||
logger.error(
|
||||
`[${requestId}] Exception during Airtable webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
// Re-throw the error so it can be caught by the outer try-catch
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create the webhook subscription in Calendly
|
||||
async function createCalendlyWebhookSubscription(
|
||||
request: NextRequest,
|
||||
userId: string,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, organization, triggerId } = providerConfig || {}
|
||||
|
||||
if (!apiKey) {
|
||||
logger.warn(`[${requestId}] Missing apiKey for Calendly webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Personal Access Token is required to create Calendly webhook. Please provide your Calendly Personal Access Token.'
|
||||
)
|
||||
}
|
||||
|
||||
if (!organization) {
|
||||
logger.warn(`[${requestId}] Missing organization URI for Calendly webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Organization URI is required to create Calendly webhook. Please provide your Organization URI from the "Get Current User" operation.'
|
||||
)
|
||||
}
|
||||
|
||||
if (!triggerId) {
|
||||
logger.warn(`[${requestId}] Missing triggerId for Calendly webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error('Trigger ID is required to create Calendly webhook')
|
||||
}
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
// Map trigger IDs to Calendly event types
|
||||
const eventTypeMap: Record<string, string[]> = {
|
||||
calendly_invitee_created: ['invitee.created'],
|
||||
calendly_invitee_canceled: ['invitee.canceled'],
|
||||
calendly_routing_form_submitted: ['routing_form_submission.created'],
|
||||
calendly_webhook: ['invitee.created', 'invitee.canceled', 'routing_form_submission.created'],
|
||||
}
|
||||
|
||||
const events = eventTypeMap[triggerId] || ['invitee.created']
|
||||
|
||||
const calendlyApiUrl = 'https://api.calendly.com/webhook_subscriptions'
|
||||
|
||||
const requestBody = {
|
||||
url: notificationUrl,
|
||||
events,
|
||||
organization,
|
||||
scope: 'organization',
|
||||
}
|
||||
|
||||
const calendlyResponse = await fetch(calendlyApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
if (!calendlyResponse.ok) {
|
||||
const errorBody = await calendlyResponse.json().catch(() => ({}))
|
||||
const errorMessage = errorBody.message || errorBody.title || 'Unknown Calendly API error'
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create webhook in Calendly for webhook ${webhookData.id}. Status: ${calendlyResponse.status}`,
|
||||
{ response: errorBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Calendly'
|
||||
if (calendlyResponse.status === 401) {
|
||||
userFriendlyMessage =
|
||||
'Calendly authentication failed. Please verify your Personal Access Token is correct.'
|
||||
} else if (calendlyResponse.status === 403) {
|
||||
userFriendlyMessage =
|
||||
'Calendly access denied. Please ensure you have appropriate permissions and a paid Calendly subscription.'
|
||||
} else if (calendlyResponse.status === 404) {
|
||||
userFriendlyMessage =
|
||||
'Calendly organization not found. Please verify the Organization URI is correct.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Calendly API error') {
|
||||
userFriendlyMessage = `Calendly error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
const responseBody = await calendlyResponse.json()
|
||||
const webhookUri = responseBody.resource?.uri
|
||||
|
||||
if (!webhookUri) {
|
||||
logger.error(
|
||||
`[${requestId}] Calendly webhook created but no webhook URI returned for webhook ${webhookData.id}`,
|
||||
{ response: responseBody }
|
||||
)
|
||||
throw new Error('Calendly webhook creation succeeded but no webhook URI was returned')
|
||||
}
|
||||
|
||||
// Extract the webhook ID from the URI (e.g., https://api.calendly.com/webhook_subscriptions/WEBHOOK_ID)
|
||||
const webhookId = webhookUri.split('/').pop()
|
||||
|
||||
if (!webhookId) {
|
||||
logger.error(`[${requestId}] Could not extract webhook ID from Calendly URI: ${webhookUri}`, {
|
||||
response: responseBody,
|
||||
})
|
||||
throw new Error('Failed to extract webhook ID from Calendly response')
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created webhook in Calendly for webhook ${webhookData.id}.`,
|
||||
{
|
||||
calendlyWebhookUri: webhookUri,
|
||||
calendlyWebhookId: webhookId,
|
||||
}
|
||||
)
|
||||
return webhookId
|
||||
} catch (error: any) {
|
||||
logger.error(
|
||||
`[${requestId}] Exception during Calendly webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
// Re-throw the error so it can be caught by the outer try-catch
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create the webhook subscription in Webflow
|
||||
async function createWebflowWebhookSubscription(
|
||||
request: NextRequest,
|
||||
userId: string,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { siteId, triggerId, collectionId, formId } = providerConfig || {}
|
||||
|
||||
if (!siteId) {
|
||||
logger.warn(`[${requestId}] Missing siteId for Webflow webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error('Site ID is required to create Webflow webhook')
|
||||
}
|
||||
|
||||
if (!triggerId) {
|
||||
logger.warn(`[${requestId}] Missing triggerId for Webflow webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error('Trigger type is required to create Webflow webhook')
|
||||
}
|
||||
|
||||
const accessToken = await getOAuthToken(userId, 'webflow')
|
||||
if (!accessToken) {
|
||||
logger.warn(
|
||||
`[${requestId}] Could not retrieve Webflow access token for user ${userId}. Cannot create webhook in Webflow.`
|
||||
)
|
||||
throw new Error(
|
||||
'Webflow account connection required. Please connect your Webflow account in the trigger configuration and try again.'
|
||||
)
|
||||
}
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
// Map trigger IDs to Webflow trigger types
|
||||
const triggerTypeMap: Record<string, string> = {
|
||||
webflow_collection_item_created: 'collection_item_created',
|
||||
webflow_collection_item_changed: 'collection_item_changed',
|
||||
webflow_collection_item_deleted: 'collection_item_deleted',
|
||||
webflow_form_submission: 'form_submission',
|
||||
}
|
||||
|
||||
const webflowTriggerType = triggerTypeMap[triggerId]
|
||||
if (!webflowTriggerType) {
|
||||
logger.warn(`[${requestId}] Invalid triggerId for Webflow: ${triggerId}`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(`Invalid Webflow trigger type: ${triggerId}`)
|
||||
}
|
||||
|
||||
const webflowApiUrl = `https://api.webflow.com/v2/sites/${siteId}/webhooks`
|
||||
|
||||
const requestBody: any = {
|
||||
triggerType: webflowTriggerType,
|
||||
url: notificationUrl,
|
||||
}
|
||||
|
||||
// Add filter for collection-based triggers
|
||||
if (collectionId && webflowTriggerType.startsWith('collection_item_')) {
|
||||
requestBody.filter = {
|
||||
resource_type: 'collection',
|
||||
resource_id: collectionId,
|
||||
}
|
||||
}
|
||||
|
||||
// Add filter for form submissions
|
||||
if (formId && webflowTriggerType === 'form_submission') {
|
||||
requestBody.filter = {
|
||||
resource_type: 'form',
|
||||
resource_id: formId,
|
||||
}
|
||||
}
|
||||
|
||||
const webflowResponse = await fetch(webflowApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
const responseBody = await webflowResponse.json()
|
||||
|
||||
if (!webflowResponse.ok || responseBody.error) {
|
||||
const errorMessage = responseBody.message || responseBody.error || 'Unknown Webflow API error'
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create webhook in Webflow for webhook ${webhookData.id}. Status: ${webflowResponse.status}`,
|
||||
{ message: errorMessage, response: responseBody }
|
||||
)
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created webhook in Webflow for webhook ${webhookData.id}.`,
|
||||
{
|
||||
webflowWebhookId: responseBody.id || responseBody._id,
|
||||
}
|
||||
)
|
||||
|
||||
return responseBody.id || responseBody._id
|
||||
} catch (error: any) {
|
||||
logger.error(
|
||||
`[${requestId}] Exception during Webflow webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create the webhook subscription in Grain
|
||||
async function createGrainWebhookSubscription(
|
||||
request: NextRequest,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<{ id: string; eventTypes: string[] } | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, triggerId, includeHighlights, includeParticipants, includeAiSummary } =
|
||||
providerConfig || {}
|
||||
|
||||
if (!apiKey) {
|
||||
logger.warn(`[${requestId}] Missing apiKey for Grain webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Grain API Key is required. Please provide your Grain Personal Access Token in the trigger configuration.'
|
||||
)
|
||||
}
|
||||
|
||||
// Map trigger IDs to Grain API hook_type (only 2 options: recording_added, upload_status)
|
||||
const hookTypeMap: Record<string, string> = {
|
||||
grain_webhook: 'recording_added',
|
||||
grain_recording_created: 'recording_added',
|
||||
grain_recording_updated: 'recording_added',
|
||||
grain_highlight_created: 'recording_added',
|
||||
grain_highlight_updated: 'recording_added',
|
||||
grain_story_created: 'recording_added',
|
||||
grain_upload_status: 'upload_status',
|
||||
}
|
||||
|
||||
const eventTypeMap: Record<string, string[]> = {
|
||||
grain_webhook: [],
|
||||
grain_recording_created: ['recording_added'],
|
||||
grain_recording_updated: ['recording_updated'],
|
||||
grain_highlight_created: ['highlight_created'],
|
||||
grain_highlight_updated: ['highlight_updated'],
|
||||
grain_story_created: ['story_created'],
|
||||
grain_upload_status: ['upload_status'],
|
||||
}
|
||||
|
||||
const hookType = hookTypeMap[triggerId] ?? 'recording_added'
|
||||
const eventTypes = eventTypeMap[triggerId] ?? []
|
||||
|
||||
if (!hookTypeMap[triggerId]) {
|
||||
logger.warn(
|
||||
`[${requestId}] Unknown triggerId for Grain: ${triggerId}, defaulting to recording_added`,
|
||||
{
|
||||
webhookId: webhookData.id,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Creating Grain webhook`, {
|
||||
triggerId,
|
||||
hookType,
|
||||
eventTypes,
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const grainApiUrl = 'https://api.grain.com/_/public-api/v2/hooks/create'
|
||||
|
||||
const requestBody: Record<string, any> = {
|
||||
hook_url: notificationUrl,
|
||||
hook_type: hookType,
|
||||
}
|
||||
|
||||
// Build include object based on configuration
|
||||
const include: Record<string, boolean> = {}
|
||||
if (includeHighlights) {
|
||||
include.highlights = true
|
||||
}
|
||||
if (includeParticipants) {
|
||||
include.participants = true
|
||||
}
|
||||
if (includeAiSummary) {
|
||||
include.ai_summary = true
|
||||
}
|
||||
if (Object.keys(include).length > 0) {
|
||||
requestBody.include = include
|
||||
}
|
||||
|
||||
const grainResponse = await fetch(grainApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
'Public-Api-Version': '2025-10-31',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
const responseBody = await grainResponse.json()
|
||||
|
||||
if (!grainResponse.ok || responseBody.error || responseBody.errors) {
|
||||
logger.warn('[App] Grain response body:', responseBody)
|
||||
const errorMessage =
|
||||
responseBody.errors?.detail ||
|
||||
responseBody.error?.message ||
|
||||
responseBody.error ||
|
||||
responseBody.message ||
|
||||
'Unknown Grain API error'
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create webhook in Grain for webhook ${webhookData.id}. Status: ${grainResponse.status}`,
|
||||
{ message: errorMessage, response: responseBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Grain'
|
||||
if (grainResponse.status === 401) {
|
||||
userFriendlyMessage =
|
||||
'Invalid Grain API Key. Please verify your Personal Access Token is correct.'
|
||||
} else if (grainResponse.status === 403) {
|
||||
userFriendlyMessage =
|
||||
'Access denied. Please ensure your Grain API Key has appropriate permissions.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Grain API error') {
|
||||
userFriendlyMessage = `Grain error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created webhook in Grain for webhook ${webhookData.id}.`,
|
||||
{
|
||||
grainWebhookId: responseBody.id,
|
||||
eventTypes,
|
||||
}
|
||||
)
|
||||
|
||||
return { id: responseBody.id, eventTypes }
|
||||
} catch (error: any) {
|
||||
logger.error(
|
||||
`[${requestId}] Exception during Grain webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create the webhook subscription in Lemlist
|
||||
async function createLemlistWebhookSubscription(
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<{ id: string } | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, triggerId, campaignId } = providerConfig || {}
|
||||
|
||||
if (!apiKey) {
|
||||
logger.warn(`[${requestId}] Missing apiKey for Lemlist webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Lemlist API Key is required. Please provide your Lemlist API Key in the trigger configuration.'
|
||||
)
|
||||
}
|
||||
|
||||
// Map trigger IDs to Lemlist event types
|
||||
const eventTypeMap: Record<string, string | undefined> = {
|
||||
lemlist_email_replied: 'emailsReplied',
|
||||
lemlist_linkedin_replied: 'linkedinReplied',
|
||||
lemlist_interested: 'interested',
|
||||
lemlist_not_interested: 'notInterested',
|
||||
lemlist_email_opened: 'emailsOpened',
|
||||
lemlist_email_clicked: 'emailsClicked',
|
||||
lemlist_email_bounced: 'emailsBounced',
|
||||
lemlist_email_sent: 'emailsSent',
|
||||
lemlist_webhook: undefined, // Generic webhook - no type filter
|
||||
}
|
||||
|
||||
const eventType = eventTypeMap[triggerId]
|
||||
|
||||
logger.info(`[${requestId}] Creating Lemlist webhook`, {
|
||||
triggerId,
|
||||
eventType,
|
||||
hasCampaignId: !!campaignId,
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const lemlistApiUrl = 'https://api.lemlist.com/api/hooks'
|
||||
|
||||
// Build request body
|
||||
const requestBody: Record<string, any> = {
|
||||
targetUrl: notificationUrl,
|
||||
}
|
||||
|
||||
// Add event type if specified (omit for generic webhook to receive all events)
|
||||
if (eventType) {
|
||||
requestBody.type = eventType
|
||||
}
|
||||
|
||||
// Add campaign filter if specified
|
||||
if (campaignId) {
|
||||
requestBody.campaignId = campaignId
|
||||
}
|
||||
|
||||
// Lemlist uses Basic Auth with empty username and API key as password
|
||||
const authString = Buffer.from(`:${apiKey}`).toString('base64')
|
||||
|
||||
const lemlistResponse = await fetch(lemlistApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Basic ${authString}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
const responseBody = await lemlistResponse.json()
|
||||
|
||||
if (!lemlistResponse.ok || responseBody.error) {
|
||||
const errorMessage = responseBody.message || responseBody.error || 'Unknown Lemlist API error'
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create webhook in Lemlist for webhook ${webhookData.id}. Status: ${lemlistResponse.status}`,
|
||||
{ message: errorMessage, response: responseBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Lemlist'
|
||||
if (lemlistResponse.status === 401) {
|
||||
userFriendlyMessage = 'Invalid Lemlist API Key. Please verify your API Key is correct.'
|
||||
} else if (lemlistResponse.status === 403) {
|
||||
userFriendlyMessage =
|
||||
'Access denied. Please ensure your Lemlist API Key has appropriate permissions.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Lemlist API error') {
|
||||
userFriendlyMessage = `Lemlist error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully created webhook in Lemlist for webhook ${webhookData.id}.`,
|
||||
{
|
||||
lemlistWebhookId: responseBody._id,
|
||||
}
|
||||
)
|
||||
|
||||
return { id: responseBody._id }
|
||||
} catch (error: any) {
|
||||
logger.error(
|
||||
`[${requestId}] Exception during Lemlist webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import { and, desc, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
|
||||
import { saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
|
||||
import {
|
||||
deployWorkflow,
|
||||
loadWorkflowFromNormalizedTables,
|
||||
@@ -131,22 +130,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
|
||||
}
|
||||
|
||||
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
|
||||
request,
|
||||
workflowId: id,
|
||||
workflow: workflowData,
|
||||
userId: actorUserId,
|
||||
blocks: normalizedData.blocks,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (!triggerSaveResult.success) {
|
||||
return createErrorResponse(
|
||||
triggerSaveResult.error?.message || 'Failed to save trigger configuration',
|
||||
triggerSaveResult.error?.status || 500
|
||||
)
|
||||
}
|
||||
|
||||
const deployResult = await deployWorkflow({
|
||||
workflowId: id,
|
||||
deployedBy: actorUserId,
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
import { db } from '@sim/db'
|
||||
import { webhook, workflow, workflowBlocks } from '@sim/db/schema'
|
||||
import { webhook, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq, inArray } from 'drizzle-orm'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { cleanupExternalWebhook } from '@/lib/webhooks/provider-subscriptions'
|
||||
import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence'
|
||||
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validation'
|
||||
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
const logger = createLogger('WorkflowStateAPI')
|
||||
|
||||
@@ -193,59 +193,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
deployedAt: state.deployedAt,
|
||||
}
|
||||
|
||||
// Find blocks that were deleted or edited
|
||||
const currentBlockIds = new Set(Object.keys(filteredBlocks))
|
||||
const previousBlocks = await db
|
||||
.select({ id: workflowBlocks.id, data: workflowBlocks.data })
|
||||
.from(workflowBlocks)
|
||||
.where(eq(workflowBlocks.workflowId, workflowId))
|
||||
|
||||
const blocksToCleanup: string[] = []
|
||||
for (const prevBlock of previousBlocks) {
|
||||
if (!currentBlockIds.has(prevBlock.id)) {
|
||||
// Block was deleted
|
||||
blocksToCleanup.push(prevBlock.id)
|
||||
} else {
|
||||
// Block still exists - check if it was edited
|
||||
const newBlock = filteredBlocks[prevBlock.id]
|
||||
const prevData = prevBlock.data as Record<string, unknown> | null
|
||||
if (prevData && JSON.stringify(prevData) !== JSON.stringify(newBlock)) {
|
||||
blocksToCleanup.push(prevBlock.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (blocksToCleanup.length > 0) {
|
||||
const webhooksToCleanup = await db
|
||||
.select()
|
||||
.from(webhook)
|
||||
.where(inArray(webhook.blockId, blocksToCleanup))
|
||||
|
||||
if (webhooksToCleanup.length > 0) {
|
||||
logger.info(`[${requestId}] Cleaning up ${webhooksToCleanup.length} webhook(s)`, {
|
||||
workflowId,
|
||||
blocksEdited: blocksToCleanup.length,
|
||||
})
|
||||
|
||||
const webhookIdsToDelete: string[] = []
|
||||
for (const wh of webhooksToCleanup) {
|
||||
try {
|
||||
await cleanupExternalWebhook(wh, workflowData, requestId)
|
||||
} catch (cleanupError) {
|
||||
logger.warn(
|
||||
`[${requestId}] Failed to cleanup external webhook ${wh.id} during workflow save`,
|
||||
cleanupError
|
||||
)
|
||||
}
|
||||
webhookIdsToDelete.push(wh.id)
|
||||
}
|
||||
|
||||
if (webhookIdsToDelete.length > 0) {
|
||||
await db.delete(webhook).where(inArray(webhook.id, webhookIdsToDelete))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const saveResult = await saveWorkflowToNormalizedTables(workflowId, workflowState as any)
|
||||
|
||||
if (!saveResult.success) {
|
||||
@@ -256,6 +203,8 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
)
|
||||
}
|
||||
|
||||
await syncWorkflowWebhooks(workflowId, workflowState.blocks)
|
||||
|
||||
// Extract and persist custom tools to database
|
||||
try {
|
||||
const workspaceId = workflowData.workspaceId
|
||||
@@ -341,3 +290,213 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
function getSubBlockValue<T = unknown>(block: BlockState, subBlockId: string): T | undefined {
|
||||
const value = block.subBlocks?.[subBlockId]?.value
|
||||
if (value === undefined || value === null) {
|
||||
return undefined
|
||||
}
|
||||
return value as T
|
||||
}
|
||||
|
||||
async function syncWorkflowWebhooks(
|
||||
workflowId: string,
|
||||
blocks: Record<string, any>
|
||||
): Promise<void> {
|
||||
await syncBlockResources(workflowId, blocks, {
|
||||
resourceName: 'webhook',
|
||||
subBlockId: 'webhookId',
|
||||
buildMetadata: buildWebhookMetadata,
|
||||
applyMetadata: upsertWebhookRecord,
|
||||
})
|
||||
}
|
||||
|
||||
interface WebhookMetadata {
|
||||
triggerPath: string
|
||||
provider: string | null
|
||||
providerConfig: Record<string, any>
|
||||
}
|
||||
|
||||
const CREDENTIAL_SET_PREFIX = 'credentialSet:'
|
||||
|
||||
function buildWebhookMetadata(block: BlockState): WebhookMetadata | null {
|
||||
const triggerId =
|
||||
getSubBlockValue<string>(block, 'triggerId') ||
|
||||
getSubBlockValue<string>(block, 'selectedTriggerId')
|
||||
const triggerConfig = getSubBlockValue<Record<string, any>>(block, 'triggerConfig') || {}
|
||||
const triggerCredentials = getSubBlockValue<string>(block, 'triggerCredentials')
|
||||
const triggerPath = getSubBlockValue<string>(block, 'triggerPath') || block.id
|
||||
|
||||
const triggerDef = triggerId ? getTrigger(triggerId) : undefined
|
||||
const provider = triggerDef?.provider || null
|
||||
|
||||
// Handle credential sets vs individual credentials
|
||||
const isCredentialSet = triggerCredentials?.startsWith(CREDENTIAL_SET_PREFIX)
|
||||
const credentialSetId = isCredentialSet
|
||||
? triggerCredentials!.slice(CREDENTIAL_SET_PREFIX.length)
|
||||
: undefined
|
||||
const credentialId = isCredentialSet ? undefined : triggerCredentials
|
||||
|
||||
const providerConfig = {
|
||||
...(typeof triggerConfig === 'object' ? triggerConfig : {}),
|
||||
...(credentialId ? { credentialId } : {}),
|
||||
...(credentialSetId ? { credentialSetId } : {}),
|
||||
...(triggerId ? { triggerId } : {}),
|
||||
}
|
||||
|
||||
return {
|
||||
triggerPath,
|
||||
provider,
|
||||
providerConfig,
|
||||
}
|
||||
}
|
||||
|
||||
async function upsertWebhookRecord(
|
||||
workflowId: string,
|
||||
block: BlockState,
|
||||
webhookId: string,
|
||||
metadata: WebhookMetadata
|
||||
): Promise<void> {
|
||||
const providerConfig = metadata.providerConfig as Record<string, unknown>
|
||||
const credentialSetId = providerConfig?.credentialSetId as string | undefined
|
||||
|
||||
// For credential sets, delegate to the sync function which handles fan-out
|
||||
if (credentialSetId && metadata.provider) {
|
||||
const { syncWebhooksForCredentialSet } = await import('@/lib/webhooks/utils.server')
|
||||
const { getProviderIdFromServiceId } = await import('@/lib/oauth')
|
||||
|
||||
const oauthProviderId = getProviderIdFromServiceId(metadata.provider)
|
||||
const requestId = crypto.randomUUID().slice(0, 8)
|
||||
|
||||
// Extract base config (without credential-specific fields)
|
||||
const {
|
||||
credentialId: _cId,
|
||||
credentialSetId: _csId,
|
||||
userId: _uId,
|
||||
...baseConfig
|
||||
} = providerConfig
|
||||
|
||||
try {
|
||||
await syncWebhooksForCredentialSet({
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
provider: metadata.provider,
|
||||
basePath: metadata.triggerPath,
|
||||
credentialSetId,
|
||||
oauthProviderId,
|
||||
providerConfig: baseConfig as Record<string, any>,
|
||||
requestId,
|
||||
})
|
||||
|
||||
logger.info('Synced credential set webhooks during workflow save', {
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
credentialSetId,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Failed to sync credential set webhooks during workflow save', {
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
credentialSetId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// For individual credentials, use the existing single webhook logic
|
||||
const [existing] = await db.select().from(webhook).where(eq(webhook.id, webhookId)).limit(1)
|
||||
|
||||
if (existing) {
|
||||
const needsUpdate =
|
||||
existing.blockId !== block.id ||
|
||||
existing.workflowId !== workflowId ||
|
||||
existing.path !== metadata.triggerPath
|
||||
|
||||
if (needsUpdate) {
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
path: metadata.triggerPath,
|
||||
provider: metadata.provider || existing.provider,
|
||||
providerConfig: Object.keys(metadata.providerConfig).length
|
||||
? metadata.providerConfig
|
||||
: existing.providerConfig,
|
||||
isActive: true,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, webhookId))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
await db.insert(webhook).values({
|
||||
id: webhookId,
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
path: metadata.triggerPath,
|
||||
provider: metadata.provider,
|
||||
providerConfig: metadata.providerConfig,
|
||||
credentialSetId: null,
|
||||
isActive: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
|
||||
logger.info('Recreated missing webhook after workflow save', {
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
webhookId,
|
||||
})
|
||||
}
|
||||
|
||||
interface BlockResourceSyncConfig<T> {
|
||||
resourceName: string
|
||||
subBlockId: string
|
||||
buildMetadata: (block: BlockState, resourceId: string) => T | null
|
||||
applyMetadata: (
|
||||
workflowId: string,
|
||||
block: BlockState,
|
||||
resourceId: string,
|
||||
metadata: T
|
||||
) => Promise<void>
|
||||
}
|
||||
|
||||
async function syncBlockResources<T>(
|
||||
workflowId: string,
|
||||
blocks: Record<string, any>,
|
||||
config: BlockResourceSyncConfig<T>
|
||||
): Promise<void> {
|
||||
const blockEntries = Object.values(blocks || {}).filter(Boolean) as BlockState[]
|
||||
if (blockEntries.length === 0) return
|
||||
|
||||
for (const block of blockEntries) {
|
||||
const resourceId = getSubBlockValue<string>(block, config.subBlockId)
|
||||
if (!resourceId) continue
|
||||
|
||||
const metadata = config.buildMetadata(block, resourceId)
|
||||
if (!metadata) {
|
||||
logger.warn(`Skipping ${config.resourceName} sync due to invalid configuration`, {
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
resourceId,
|
||||
resourceName: config.resourceName,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
try {
|
||||
await config.applyMetadata(workflowId, block, resourceId, metadata)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to sync ${config.resourceName}`, {
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
resourceId,
|
||||
resourceName: config.resourceName,
|
||||
error,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, max } from 'drizzle-orm'
|
||||
import { and, asc, eq, isNull, min } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
@@ -64,10 +64,20 @@ export async function GET(request: Request) {
|
||||
|
||||
let workflows
|
||||
|
||||
const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)]
|
||||
|
||||
if (workspaceId) {
|
||||
workflows = await db.select().from(workflow).where(eq(workflow.workspaceId, workspaceId))
|
||||
workflows = await db
|
||||
.select()
|
||||
.from(workflow)
|
||||
.where(eq(workflow.workspaceId, workspaceId))
|
||||
.orderBy(...orderByClause)
|
||||
} else {
|
||||
workflows = await db.select().from(workflow).where(eq(workflow.userId, userId))
|
||||
workflows = await db
|
||||
.select()
|
||||
.from(workflow)
|
||||
.where(eq(workflow.userId, userId))
|
||||
.orderBy(...orderByClause)
|
||||
}
|
||||
|
||||
return NextResponse.json({ data: workflows }, { status: 200 })
|
||||
@@ -140,15 +150,15 @@ export async function POST(req: NextRequest) {
|
||||
sortOrder = providedSortOrder
|
||||
} else {
|
||||
const folderCondition = folderId ? eq(workflow.folderId, folderId) : isNull(workflow.folderId)
|
||||
const [maxResult] = await db
|
||||
.select({ maxOrder: max(workflow.sortOrder) })
|
||||
const [minResult] = await db
|
||||
.select({ minOrder: min(workflow.sortOrder) })
|
||||
.from(workflow)
|
||||
.where(
|
||||
workspaceId
|
||||
? and(eq(workflow.workspaceId, workspaceId), folderCondition)
|
||||
: and(eq(workflow.userId, session.user.id), folderCondition)
|
||||
)
|
||||
sortOrder = (maxResult?.maxOrder ?? -1) + 1
|
||||
sortOrder = (minResult?.minOrder ?? 1) - 1
|
||||
}
|
||||
|
||||
await db.insert(workflow).values({
|
||||
|
||||
@@ -5,8 +5,14 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import {
|
||||
type EmailRateLimitsData,
|
||||
type EmailUsageData,
|
||||
renderWorkflowNotificationEmail,
|
||||
} from '@/components/emails'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
@@ -67,29 +73,39 @@ function buildTestPayload(subscription: typeof workspaceNotificationSubscription
|
||||
data.finalOutput = { message: 'This is a test notification', test: true }
|
||||
}
|
||||
|
||||
if (subscription.includeTraceSpans) {
|
||||
data.traceSpans = [
|
||||
{
|
||||
id: 'span_test_1',
|
||||
name: 'Test Block',
|
||||
type: 'block',
|
||||
status: 'success',
|
||||
startTime: new Date(timestamp - 5000).toISOString(),
|
||||
endTime: new Date(timestamp).toISOString(),
|
||||
duration: 5000,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
if (subscription.includeRateLimits) {
|
||||
data.rateLimits = {
|
||||
sync: { limit: 150, remaining: 45, resetAt: new Date(timestamp + 60000).toISOString() },
|
||||
async: { limit: 1000, remaining: 50, resetAt: new Date(timestamp + 60000).toISOString() },
|
||||
sync: {
|
||||
requestsPerMinute: 150,
|
||||
remaining: 45,
|
||||
resetAt: new Date(timestamp + 60000).toISOString(),
|
||||
},
|
||||
async: {
|
||||
requestsPerMinute: 1000,
|
||||
remaining: 50,
|
||||
resetAt: new Date(timestamp + 60000).toISOString(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if (subscription.includeUsageData) {
|
||||
data.usage = { currentPeriodCost: 2.45, limit: 20, plan: 'pro', isExceeded: false }
|
||||
data.usage = { currentPeriodCost: 2.45, limit: 20, percentUsed: 12.25, isExceeded: false }
|
||||
}
|
||||
|
||||
if (subscription.includeTraceSpans && subscription.notificationType === 'webhook') {
|
||||
data.traceSpans = [
|
||||
{
|
||||
name: 'test-block',
|
||||
startTime: timestamp,
|
||||
endTime: timestamp + 150,
|
||||
duration: 150,
|
||||
status: 'success',
|
||||
blockId: 'block_test_1',
|
||||
blockType: 'agent',
|
||||
blockName: 'Test Agent',
|
||||
children: [],
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
return { payload, timestamp }
|
||||
@@ -157,23 +173,26 @@ async function testEmail(subscription: typeof workspaceNotificationSubscription.
|
||||
|
||||
const { payload } = buildTestPayload(subscription)
|
||||
const data = (payload as Record<string, unknown>).data as Record<string, unknown>
|
||||
const baseUrl = getBaseUrl()
|
||||
const logUrl = `${baseUrl}/workspace/${subscription.workspaceId}/logs`
|
||||
|
||||
const html = await renderWorkflowNotificationEmail({
|
||||
workflowName: data.workflowName as string,
|
||||
status: data.status as 'success' | 'error',
|
||||
trigger: data.trigger as string,
|
||||
duration: `${data.totalDurationMs}ms`,
|
||||
cost: `$${(((data.cost as Record<string, unknown>)?.total as number) || 0).toFixed(4)}`,
|
||||
logUrl,
|
||||
finalOutput: data.finalOutput,
|
||||
rateLimits: data.rateLimits as EmailRateLimitsData | undefined,
|
||||
usageData: data.usage as EmailUsageData | undefined,
|
||||
})
|
||||
|
||||
const result = await sendEmail({
|
||||
to: subscription.emailRecipients,
|
||||
subject: `[Test] Workflow Execution: ${data.workflowName}`,
|
||||
text: `This is a test notification from Sim Studio.\n\nWorkflow: ${data.workflowName}\nStatus: ${data.status}\nDuration: ${data.totalDurationMs}ms\n\nThis notification is configured for workspace notifications.`,
|
||||
html: `
|
||||
<div style="font-family: sans-serif; max-width: 600px; margin: 0 auto;">
|
||||
<h2 style="color: #7F2FFF;">Test Notification</h2>
|
||||
<p>This is a test notification from Sim Studio.</p>
|
||||
<table style="width: 100%; border-collapse: collapse; margin: 20px 0;">
|
||||
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Workflow</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.workflowName}</td></tr>
|
||||
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Status</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.status}</td></tr>
|
||||
<tr><td style="padding: 8px; border: 1px solid #eee;"><strong>Duration</strong></td><td style="padding: 8px; border: 1px solid #eee;">${data.totalDurationMs}ms</td></tr>
|
||||
</table>
|
||||
<p style="color: #666; font-size: 12px;">This notification is configured for workspace notifications.</p>
|
||||
</div>
|
||||
`,
|
||||
html,
|
||||
text: `This is a test notification from Sim.\n\nWorkflow: ${data.workflowName}\nStatus: ${data.status}\nDuration: ${data.totalDurationMs}ms\n\nView Log: ${logUrl}\n\nThis notification is configured for workspace notifications.`,
|
||||
emailType: 'notifications',
|
||||
})
|
||||
|
||||
@@ -227,7 +246,7 @@ async function testSlack(
|
||||
elements: [
|
||||
{
|
||||
type: 'mrkdwn',
|
||||
text: 'This is a test notification from Sim Studio workspace notifications.',
|
||||
text: 'This is a test notification from Sim workspace notifications.',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -222,7 +222,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
}
|
||||
}
|
||||
|
||||
// Encrypt webhook secret if provided
|
||||
let webhookConfig = data.webhookConfig || null
|
||||
if (webhookConfig?.secret) {
|
||||
const { encrypted } = await encryptSecret(webhookConfig.secret)
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Loader2 } from 'lucide-react'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { formatLatency, parseDuration } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import { formatLatency } from '@/app/workspace/[workspaceId]/logs/utils'
|
||||
import type { DashboardStatsResponse, WorkflowStats } from '@/hooks/queries/logs'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
import type { WorkflowLog } from '@/stores/logs/filters/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { LineChart, WorkflowsList } from './components'
|
||||
|
||||
@@ -26,10 +26,6 @@ interface WorkflowExecution {
|
||||
overallSuccessRate: number
|
||||
}
|
||||
|
||||
const DEFAULT_SEGMENTS = 72
|
||||
const MIN_SEGMENT_PX = 10
|
||||
const MIN_SEGMENT_MS = 60000
|
||||
|
||||
const SKELETON_BAR_HEIGHTS = [
|
||||
45, 72, 38, 85, 52, 68, 30, 90, 55, 42, 78, 35, 88, 48, 65, 28, 82, 58, 40, 75, 32, 95, 50, 70,
|
||||
]
|
||||
@@ -120,13 +116,32 @@ function DashboardSkeleton() {
|
||||
}
|
||||
|
||||
interface DashboardProps {
|
||||
logs: WorkflowLog[]
|
||||
stats?: DashboardStatsResponse
|
||||
isLoading: boolean
|
||||
error?: Error | null
|
||||
}
|
||||
|
||||
export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
|
||||
const [segmentCount, setSegmentCount] = useState<number>(DEFAULT_SEGMENTS)
|
||||
/**
|
||||
* Converts server WorkflowStats to the internal WorkflowExecution format.
|
||||
*/
|
||||
function toWorkflowExecution(wf: WorkflowStats): WorkflowExecution {
|
||||
return {
|
||||
workflowId: wf.workflowId,
|
||||
workflowName: wf.workflowName,
|
||||
overallSuccessRate: wf.overallSuccessRate,
|
||||
segments: wf.segments.map((seg) => ({
|
||||
timestamp: seg.timestamp,
|
||||
totalExecutions: seg.totalExecutions,
|
||||
successfulExecutions: seg.successfulExecutions,
|
||||
hasExecutions: seg.totalExecutions > 0,
|
||||
successRate:
|
||||
seg.totalExecutions > 0 ? (seg.successfulExecutions / seg.totalExecutions) * 100 : 100,
|
||||
avgDurationMs: seg.avgDurationMs,
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
export default function Dashboard({ stats, isLoading, error }: DashboardProps) {
|
||||
const [selectedSegments, setSelectedSegments] = useState<Record<string, number[]>>({})
|
||||
const [lastAnchorIndices, setLastAnchorIndices] = useState<Record<string, number>>({})
|
||||
const barsAreaRef = useRef<HTMLDivElement | null>(null)
|
||||
@@ -137,182 +152,32 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
|
||||
|
||||
const expandedWorkflowId = workflowIds.length === 1 ? workflowIds[0] : null
|
||||
|
||||
const lastExecutionByWorkflow = useMemo(() => {
|
||||
const map = new Map<string, number>()
|
||||
for (const log of logs) {
|
||||
const wfId = log.workflowId
|
||||
if (!wfId) continue
|
||||
const ts = new Date(log.createdAt).getTime()
|
||||
const existing = map.get(wfId)
|
||||
if (!existing || ts > existing) {
|
||||
map.set(wfId, ts)
|
||||
}
|
||||
}
|
||||
return map
|
||||
}, [logs])
|
||||
|
||||
const timeBounds = useMemo(() => {
|
||||
if (logs.length === 0) {
|
||||
const now = new Date()
|
||||
return { start: now, end: now }
|
||||
}
|
||||
|
||||
let minTime = Number.POSITIVE_INFINITY
|
||||
let maxTime = Number.NEGATIVE_INFINITY
|
||||
|
||||
for (const log of logs) {
|
||||
const ts = new Date(log.createdAt).getTime()
|
||||
if (ts < minTime) minTime = ts
|
||||
if (ts > maxTime) maxTime = ts
|
||||
}
|
||||
|
||||
const end = new Date(Math.max(maxTime, Date.now()))
|
||||
const start = new Date(minTime)
|
||||
|
||||
return { start, end }
|
||||
}, [logs])
|
||||
|
||||
const { executions, aggregateSegments, segmentMs } = useMemo(() => {
|
||||
const allWorkflowsList = Object.values(allWorkflows)
|
||||
|
||||
if (allWorkflowsList.length === 0) {
|
||||
if (!stats) {
|
||||
return { executions: [], aggregateSegments: [], segmentMs: 0 }
|
||||
}
|
||||
|
||||
const { start, end } =
|
||||
logs.length > 0
|
||||
? timeBounds
|
||||
: { start: new Date(Date.now() - 24 * 60 * 60 * 1000), end: new Date() }
|
||||
|
||||
const totalMs = Math.max(1, end.getTime() - start.getTime())
|
||||
const calculatedSegmentMs = Math.max(
|
||||
MIN_SEGMENT_MS,
|
||||
Math.floor(totalMs / Math.max(1, segmentCount))
|
||||
)
|
||||
|
||||
const logsByWorkflow = new Map<string, WorkflowLog[]>()
|
||||
for (const log of logs) {
|
||||
const wfId = log.workflowId
|
||||
if (!logsByWorkflow.has(wfId)) {
|
||||
logsByWorkflow.set(wfId, [])
|
||||
}
|
||||
logsByWorkflow.get(wfId)!.push(log)
|
||||
}
|
||||
|
||||
const workflowExecutions: WorkflowExecution[] = []
|
||||
|
||||
for (const workflow of allWorkflowsList) {
|
||||
const workflowLogs = logsByWorkflow.get(workflow.id) || []
|
||||
|
||||
const segments: WorkflowExecution['segments'] = Array.from(
|
||||
{ length: segmentCount },
|
||||
(_, i) => ({
|
||||
timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(),
|
||||
hasExecutions: false,
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
successRate: 100,
|
||||
avgDurationMs: 0,
|
||||
})
|
||||
)
|
||||
|
||||
const durations: number[][] = Array.from({ length: segmentCount }, () => [])
|
||||
|
||||
for (const log of workflowLogs) {
|
||||
const logTime = new Date(log.createdAt).getTime()
|
||||
const idx = Math.min(
|
||||
segmentCount - 1,
|
||||
Math.max(0, Math.floor((logTime - start.getTime()) / calculatedSegmentMs))
|
||||
)
|
||||
|
||||
segments[idx].totalExecutions += 1
|
||||
segments[idx].hasExecutions = true
|
||||
|
||||
if (log.level !== 'error') {
|
||||
segments[idx].successfulExecutions += 1
|
||||
}
|
||||
|
||||
const duration = parseDuration({ duration: log.duration ?? undefined })
|
||||
if (duration !== null && duration > 0) {
|
||||
durations[idx].push(duration)
|
||||
}
|
||||
}
|
||||
|
||||
let totalExecs = 0
|
||||
let totalSuccess = 0
|
||||
|
||||
for (let i = 0; i < segmentCount; i++) {
|
||||
const seg = segments[i]
|
||||
totalExecs += seg.totalExecutions
|
||||
totalSuccess += seg.successfulExecutions
|
||||
|
||||
if (seg.totalExecutions > 0) {
|
||||
seg.successRate = (seg.successfulExecutions / seg.totalExecutions) * 100
|
||||
}
|
||||
|
||||
if (durations[i].length > 0) {
|
||||
seg.avgDurationMs = Math.round(
|
||||
durations[i].reduce((sum, d) => sum + d, 0) / durations[i].length
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const overallSuccessRate = totalExecs > 0 ? (totalSuccess / totalExecs) * 100 : 100
|
||||
|
||||
workflowExecutions.push({
|
||||
workflowId: workflow.id,
|
||||
workflowName: workflow.name,
|
||||
segments,
|
||||
overallSuccessRate,
|
||||
})
|
||||
}
|
||||
|
||||
workflowExecutions.sort((a, b) => {
|
||||
const errA = a.overallSuccessRate < 100 ? 1 - a.overallSuccessRate / 100 : 0
|
||||
const errB = b.overallSuccessRate < 100 ? 1 - b.overallSuccessRate / 100 : 0
|
||||
if (errA !== errB) return errB - errA
|
||||
return a.workflowName.localeCompare(b.workflowName)
|
||||
})
|
||||
|
||||
const aggSegments: {
|
||||
timestamp: string
|
||||
totalExecutions: number
|
||||
successfulExecutions: number
|
||||
avgDurationMs: number
|
||||
}[] = Array.from({ length: segmentCount }, (_, i) => ({
|
||||
timestamp: new Date(start.getTime() + i * calculatedSegmentMs).toISOString(),
|
||||
totalExecutions: 0,
|
||||
successfulExecutions: 0,
|
||||
avgDurationMs: 0,
|
||||
}))
|
||||
|
||||
const weightedDurationSums: number[] = Array(segmentCount).fill(0)
|
||||
const executionCounts: number[] = Array(segmentCount).fill(0)
|
||||
|
||||
for (const wf of workflowExecutions) {
|
||||
wf.segments.forEach((s, i) => {
|
||||
aggSegments[i].totalExecutions += s.totalExecutions
|
||||
aggSegments[i].successfulExecutions += s.successfulExecutions
|
||||
|
||||
if (s.avgDurationMs && s.avgDurationMs > 0 && s.totalExecutions > 0) {
|
||||
weightedDurationSums[i] += s.avgDurationMs * s.totalExecutions
|
||||
executionCounts[i] += s.totalExecutions
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
aggSegments.forEach((seg, i) => {
|
||||
if (executionCounts[i] > 0) {
|
||||
seg.avgDurationMs = weightedDurationSums[i] / executionCounts[i]
|
||||
}
|
||||
})
|
||||
const workflowExecutions = stats.workflows.map(toWorkflowExecution)
|
||||
|
||||
return {
|
||||
executions: workflowExecutions,
|
||||
aggregateSegments: aggSegments,
|
||||
segmentMs: calculatedSegmentMs,
|
||||
aggregateSegments: stats.aggregateSegments,
|
||||
segmentMs: stats.segmentMs,
|
||||
}
|
||||
}, [logs, timeBounds, segmentCount, allWorkflows])
|
||||
}, [stats])
|
||||
|
||||
const lastExecutionByWorkflow = useMemo(() => {
|
||||
const map = new Map<string, number>()
|
||||
for (const wf of executions) {
|
||||
for (let i = wf.segments.length - 1; i >= 0; i--) {
|
||||
if (wf.segments[i].totalExecutions > 0) {
|
||||
map.set(wf.workflowId, new Date(wf.segments[i].timestamp).getTime())
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return map
|
||||
}, [executions])
|
||||
|
||||
const filteredExecutions = useMemo(() => {
|
||||
let filtered = executions
|
||||
@@ -511,37 +376,12 @@ export default function Dashboard({ logs, isLoading, error }: DashboardProps) {
|
||||
useEffect(() => {
|
||||
setSelectedSegments({})
|
||||
setLastAnchorIndices({})
|
||||
}, [logs, timeRange, workflowIds, searchQuery])
|
||||
|
||||
useEffect(() => {
|
||||
if (!barsAreaRef.current) return
|
||||
const el = barsAreaRef.current
|
||||
let debounceId: ReturnType<typeof setTimeout> | null = null
|
||||
const ro = new ResizeObserver(([entry]) => {
|
||||
const w = entry?.contentRect?.width || 720
|
||||
const n = Math.max(36, Math.min(96, Math.floor(w / MIN_SEGMENT_PX)))
|
||||
if (debounceId) clearTimeout(debounceId)
|
||||
debounceId = setTimeout(() => {
|
||||
setSegmentCount(n)
|
||||
}, 150)
|
||||
})
|
||||
ro.observe(el)
|
||||
const rect = el.getBoundingClientRect()
|
||||
if (rect?.width) {
|
||||
const n = Math.max(36, Math.min(96, Math.floor(rect.width / MIN_SEGMENT_PX)))
|
||||
setSegmentCount(n)
|
||||
}
|
||||
return () => {
|
||||
if (debounceId) clearTimeout(debounceId)
|
||||
ro.disconnect()
|
||||
}
|
||||
}, [])
|
||||
}, [stats, timeRange, workflowIds, searchQuery])
|
||||
|
||||
if (isLoading) {
|
||||
return <DashboardSkeleton />
|
||||
}
|
||||
|
||||
// Show error state
|
||||
if (error) {
|
||||
return (
|
||||
<div className='mt-[24px] flex flex-1 items-center justify-center'>
|
||||
|
||||
@@ -422,7 +422,8 @@ export function NotificationSettings({
|
||||
levelFilter: formData.levelFilter,
|
||||
triggerFilter: formData.triggerFilter,
|
||||
includeFinalOutput: formData.includeFinalOutput,
|
||||
includeTraceSpans: formData.includeTraceSpans,
|
||||
// Trace spans only available for webhooks (too large for email/Slack)
|
||||
includeTraceSpans: activeTab === 'webhook' ? formData.includeTraceSpans : false,
|
||||
includeRateLimits: formData.includeRateLimits,
|
||||
includeUsageData: formData.includeUsageData,
|
||||
alertConfig,
|
||||
@@ -830,7 +831,10 @@ export function NotificationSettings({
|
||||
<Combobox
|
||||
options={[
|
||||
{ label: 'Final Output', value: 'includeFinalOutput' },
|
||||
{ label: 'Trace Spans', value: 'includeTraceSpans' },
|
||||
// Trace spans only available for webhooks (too large for email/Slack)
|
||||
...(activeTab === 'webhook'
|
||||
? [{ label: 'Trace Spans', value: 'includeTraceSpans' }]
|
||||
: []),
|
||||
{ label: 'Rate Limits', value: 'includeRateLimits' },
|
||||
{ label: 'Usage Data', value: 'includeUsageData' },
|
||||
]}
|
||||
@@ -838,7 +842,7 @@ export function NotificationSettings({
|
||||
multiSelectValues={
|
||||
[
|
||||
formData.includeFinalOutput && 'includeFinalOutput',
|
||||
formData.includeTraceSpans && 'includeTraceSpans',
|
||||
formData.includeTraceSpans && activeTab === 'webhook' && 'includeTraceSpans',
|
||||
formData.includeRateLimits && 'includeRateLimits',
|
||||
formData.includeUsageData && 'includeUsageData',
|
||||
].filter(Boolean) as string[]
|
||||
@@ -862,7 +866,7 @@ export function NotificationSettings({
|
||||
}
|
||||
const selected = [
|
||||
formData.includeFinalOutput && 'includeFinalOutput',
|
||||
formData.includeTraceSpans && 'includeTraceSpans',
|
||||
formData.includeTraceSpans && activeTab === 'webhook' && 'includeTraceSpans',
|
||||
formData.includeRateLimits && 'includeRateLimits',
|
||||
formData.includeUsageData && 'includeUsageData',
|
||||
].filter(Boolean) as string[]
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
} from '@/lib/logs/filters'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
import { useFolders } from '@/hooks/queries/folders'
|
||||
import { useDashboardLogs, useLogDetail, useLogsList } from '@/hooks/queries/logs'
|
||||
import { useDashboardStats, useLogDetail, useLogsList } from '@/hooks/queries/logs'
|
||||
import { useDebounce } from '@/hooks/use-debounce'
|
||||
import { useFilterStore } from '@/stores/logs/filters/store'
|
||||
import type { WorkflowLog } from '@/stores/logs/filters/types'
|
||||
@@ -130,7 +130,7 @@ export default function Logs() {
|
||||
[timeRange, startDate, endDate, level, workflowIds, folderIds, triggers, debouncedSearchQuery]
|
||||
)
|
||||
|
||||
const dashboardLogsQuery = useDashboardLogs(workspaceId, dashboardFilters, {
|
||||
const dashboardStatsQuery = useDashboardStats(workspaceId, dashboardFilters, {
|
||||
enabled: Boolean(workspaceId) && isInitialized.current,
|
||||
refetchInterval: isLive ? 5000 : false,
|
||||
})
|
||||
@@ -417,9 +417,9 @@ export default function Logs() {
|
||||
className={cn('flex min-h-0 flex-1 flex-col pr-[24px]', !isDashboardView && 'hidden')}
|
||||
>
|
||||
<Dashboard
|
||||
logs={dashboardLogsQuery.data ?? []}
|
||||
isLoading={!dashboardLogsQuery.data}
|
||||
error={dashboardLogsQuery.error}
|
||||
stats={dashboardStatsQuery.data}
|
||||
isLoading={dashboardStatsQuery.isLoading}
|
||||
error={dashboardStatsQuery.error}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -2,29 +2,9 @@ import { memo, useEffect, useRef, useState } from 'react'
|
||||
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
|
||||
/**
|
||||
* Minimum delay between characters (fast catch-up mode)
|
||||
* Character animation delay in milliseconds
|
||||
*/
|
||||
const MIN_DELAY = 1
|
||||
|
||||
/**
|
||||
* Maximum delay between characters (when waiting for content)
|
||||
*/
|
||||
const MAX_DELAY = 12
|
||||
|
||||
/**
|
||||
* Default delay when streaming normally
|
||||
*/
|
||||
const DEFAULT_DELAY = 4
|
||||
|
||||
/**
|
||||
* How far behind (in characters) before we speed up
|
||||
*/
|
||||
const CATCH_UP_THRESHOLD = 20
|
||||
|
||||
/**
|
||||
* How close to content before we slow down
|
||||
*/
|
||||
const SLOW_DOWN_THRESHOLD = 5
|
||||
const CHARACTER_DELAY = 3
|
||||
|
||||
/**
|
||||
* StreamingIndicator shows animated dots during message streaming
|
||||
@@ -54,50 +34,21 @@ interface SmoothStreamingTextProps {
|
||||
isStreaming: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates adaptive delay based on how far behind animation is from actual content
|
||||
*
|
||||
* @param displayedLength - Current displayed content length
|
||||
* @param totalLength - Total available content length
|
||||
* @returns Delay in milliseconds
|
||||
*/
|
||||
function calculateAdaptiveDelay(displayedLength: number, totalLength: number): number {
|
||||
const charsRemaining = totalLength - displayedLength
|
||||
|
||||
if (charsRemaining > CATCH_UP_THRESHOLD) {
|
||||
// Far behind - speed up to catch up
|
||||
// Scale from MIN_DELAY to DEFAULT_DELAY based on how far behind
|
||||
const catchUpFactor = Math.min(1, (charsRemaining - CATCH_UP_THRESHOLD) / 50)
|
||||
return MIN_DELAY + (DEFAULT_DELAY - MIN_DELAY) * (1 - catchUpFactor)
|
||||
}
|
||||
|
||||
if (charsRemaining <= SLOW_DOWN_THRESHOLD) {
|
||||
// Close to content edge - slow down to feel natural
|
||||
// The closer we are, the slower we go (up to MAX_DELAY)
|
||||
const slowFactor = 1 - charsRemaining / SLOW_DOWN_THRESHOLD
|
||||
return DEFAULT_DELAY + (MAX_DELAY - DEFAULT_DELAY) * slowFactor
|
||||
}
|
||||
|
||||
// Normal streaming speed
|
||||
return DEFAULT_DELAY
|
||||
}
|
||||
|
||||
/**
|
||||
* SmoothStreamingText component displays text with character-by-character animation
|
||||
* Creates a smooth streaming effect for AI responses with adaptive speed
|
||||
*
|
||||
* Uses adaptive pacing: speeds up when catching up, slows down near content edge
|
||||
* Creates a smooth streaming effect for AI responses
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Streaming text with smooth animation
|
||||
*/
|
||||
export const SmoothStreamingText = memo(
|
||||
({ content, isStreaming }: SmoothStreamingTextProps) => {
|
||||
const [displayedContent, setDisplayedContent] = useState('')
|
||||
// Initialize with full content when not streaming to avoid flash on page load
|
||||
const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content))
|
||||
const contentRef = useRef(content)
|
||||
const rafRef = useRef<number | null>(null)
|
||||
const indexRef = useRef(0)
|
||||
const lastFrameTimeRef = useRef<number>(0)
|
||||
const timeoutRef = useRef<NodeJS.Timeout | null>(null)
|
||||
// Initialize index based on streaming state
|
||||
const indexRef = useRef(isStreaming ? 0 : content.length)
|
||||
const isAnimatingRef = useRef(false)
|
||||
|
||||
useEffect(() => {
|
||||
@@ -110,42 +61,33 @@ export const SmoothStreamingText = memo(
|
||||
}
|
||||
|
||||
if (isStreaming) {
|
||||
if (indexRef.current < content.length && !isAnimatingRef.current) {
|
||||
isAnimatingRef.current = true
|
||||
lastFrameTimeRef.current = performance.now()
|
||||
|
||||
const animateText = (timestamp: number) => {
|
||||
if (indexRef.current < content.length) {
|
||||
const animateText = () => {
|
||||
const currentContent = contentRef.current
|
||||
const currentIndex = indexRef.current
|
||||
const elapsed = timestamp - lastFrameTimeRef.current
|
||||
|
||||
// Calculate adaptive delay based on how far behind we are
|
||||
const delay = calculateAdaptiveDelay(currentIndex, currentContent.length)
|
||||
|
||||
if (elapsed >= delay) {
|
||||
if (currentIndex < currentContent.length) {
|
||||
const newDisplayed = currentContent.slice(0, currentIndex + 1)
|
||||
setDisplayedContent(newDisplayed)
|
||||
indexRef.current = currentIndex + 1
|
||||
lastFrameTimeRef.current = timestamp
|
||||
}
|
||||
}
|
||||
|
||||
if (indexRef.current < currentContent.length) {
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
if (currentIndex < currentContent.length) {
|
||||
const newDisplayed = currentContent.slice(0, currentIndex + 1)
|
||||
setDisplayedContent(newDisplayed)
|
||||
indexRef.current = currentIndex + 1
|
||||
timeoutRef.current = setTimeout(animateText, CHARACTER_DELAY)
|
||||
} else {
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
}
|
||||
|
||||
rafRef.current = requestAnimationFrame(animateText)
|
||||
} else if (indexRef.current < content.length && isAnimatingRef.current) {
|
||||
// Animation already running, it will pick up new content automatically
|
||||
if (!isAnimatingRef.current) {
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
}
|
||||
isAnimatingRef.current = true
|
||||
animateText()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Streaming ended - show full content immediately
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
}
|
||||
setDisplayedContent(content)
|
||||
indexRef.current = content.length
|
||||
@@ -153,8 +95,8 @@ export const SmoothStreamingText = memo(
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (rafRef.current) {
|
||||
cancelAnimationFrame(rafRef.current)
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current)
|
||||
}
|
||||
isAnimatingRef.current = false
|
||||
}
|
||||
|
||||
@@ -46,12 +46,14 @@ interface SmoothThinkingTextProps {
|
||||
*/
|
||||
const SmoothThinkingText = memo(
|
||||
({ content, isStreaming }: SmoothThinkingTextProps) => {
|
||||
const [displayedContent, setDisplayedContent] = useState('')
|
||||
// Initialize with full content when not streaming to avoid flash on page load
|
||||
const [displayedContent, setDisplayedContent] = useState(() => (isStreaming ? '' : content))
|
||||
const [showGradient, setShowGradient] = useState(false)
|
||||
const contentRef = useRef(content)
|
||||
const textRef = useRef<HTMLDivElement>(null)
|
||||
const rafRef = useRef<number | null>(null)
|
||||
const indexRef = useRef(0)
|
||||
// Initialize index based on streaming state
|
||||
const indexRef = useRef(isStreaming ? 0 : content.length)
|
||||
const lastFrameTimeRef = useRef<number>(0)
|
||||
const isAnimatingRef = useRef(false)
|
||||
|
||||
|
||||
@@ -1952,7 +1952,12 @@ export function ToolCall({ toolCall: toolCallProp, toolCallId, onStateChange }:
|
||||
}, [params])
|
||||
|
||||
// Skip rendering some internal tools
|
||||
if (toolCall.name === 'checkoff_todo' || toolCall.name === 'mark_todo_in_progress') return null
|
||||
if (
|
||||
toolCall.name === 'checkoff_todo' ||
|
||||
toolCall.name === 'mark_todo_in_progress' ||
|
||||
toolCall.name === 'tool_search_tool_regex'
|
||||
)
|
||||
return null
|
||||
|
||||
// Special rendering for subagent tools - show as thinking text with tool calls at top level
|
||||
const SUBAGENT_TOOLS = [
|
||||
|
||||
@@ -32,13 +32,6 @@ function getModelIconComponent(modelValue: string) {
|
||||
return <IconComponent className='h-3.5 w-3.5' />
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a model should display the MAX badge
|
||||
*/
|
||||
function isMaxModel(modelValue: string): boolean {
|
||||
return modelValue === 'claude-4.5-sonnet' || modelValue === 'claude-4.5-opus'
|
||||
}
|
||||
|
||||
/**
|
||||
* Model selector dropdown for choosing AI model.
|
||||
* Displays model icon and label.
|
||||
@@ -139,11 +132,6 @@ export function ModelSelector({ selectedModel, isNearTop, onModelSelect }: Model
|
||||
>
|
||||
{getModelIconComponent(option.value)}
|
||||
<span>{option.label}</span>
|
||||
{isMaxModel(option.value) && (
|
||||
<Badge size='sm' className='ml-auto'>
|
||||
MAX
|
||||
</Badge>
|
||||
)}
|
||||
</PopoverItem>
|
||||
))}
|
||||
</PopoverScrollArea>
|
||||
|
||||
@@ -238,8 +238,8 @@ export const MODEL_OPTIONS = [
|
||||
{ value: 'claude-4.5-opus', label: 'Claude 4.5 Opus' },
|
||||
{ value: 'claude-4.5-sonnet', label: 'Claude 4.5 Sonnet' },
|
||||
{ value: 'claude-4.5-haiku', label: 'Claude 4.5 Haiku' },
|
||||
{ value: 'gpt-5.1-codex', label: 'GPT 5.1 Codex' },
|
||||
{ value: 'gpt-5.1-medium', label: 'GPT 5.1 Medium' },
|
||||
{ value: 'gpt-5.2-codex', label: 'GPT 5.2 Codex' },
|
||||
{ value: 'gpt-5.2-pro', label: 'GPT 5.2 Pro' },
|
||||
{ value: 'gemini-3-pro', label: 'Gemini 3 Pro' },
|
||||
] as const
|
||||
|
||||
|
||||
@@ -52,7 +52,10 @@ function isDefaultDescription(desc: string | null | undefined, workflowName: str
|
||||
if (!desc) return true
|
||||
const normalized = desc.toLowerCase().trim()
|
||||
return (
|
||||
normalized === '' || normalized === 'new workflow' || normalized === workflowName.toLowerCase()
|
||||
normalized === '' ||
|
||||
normalized === 'new workflow' ||
|
||||
normalized === 'your first workflow - start building here!' ||
|
||||
normalized === workflowName.toLowerCase()
|
||||
)
|
||||
}
|
||||
|
||||
@@ -685,9 +688,31 @@ console.log(data);`
|
||||
{/* Endpoint URL (shown when agent exists) */}
|
||||
{existingAgent && endpoint && (
|
||||
<div>
|
||||
<Label className='mb-[6.5px] block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
URL
|
||||
</Label>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
URL
|
||||
</Label>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
type='button'
|
||||
variant='ghost'
|
||||
onClick={() => {
|
||||
navigator.clipboard.writeText(endpoint)
|
||||
setUrlCopied(true)
|
||||
setTimeout(() => setUrlCopied(false), 2000)
|
||||
}}
|
||||
aria-label='Copy URL'
|
||||
className='!p-1.5 -my-1.5'
|
||||
>
|
||||
{urlCopied ? <Check className='h-3 w-3' /> : <Clipboard className='h-3 w-3' />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>{urlCopied ? 'Copied' : 'Copy'}</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
<div className='relative flex items-stretch overflow-hidden rounded-[4px] border border-[var(--border-1)]'>
|
||||
<div className='flex items-center whitespace-nowrap bg-[var(--surface-5)] pr-[6px] pl-[8px] font-medium text-[var(--text-secondary)] text-sm dark:bg-[var(--surface-5)]'>
|
||||
{baseUrl.replace(/^https?:\/\//, '')}/api/a2a/serve/
|
||||
@@ -696,30 +721,8 @@ console.log(data);`
|
||||
<Input
|
||||
value={existingAgent.id}
|
||||
readOnly
|
||||
className='rounded-none border-0 pr-[32px] pl-0 text-[var(--text-tertiary)] shadow-none'
|
||||
className='rounded-none border-0 pl-0 text-[var(--text-tertiary)] shadow-none'
|
||||
/>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<button
|
||||
type='button'
|
||||
onClick={() => {
|
||||
navigator.clipboard.writeText(endpoint)
|
||||
setUrlCopied(true)
|
||||
setTimeout(() => setUrlCopied(false), 2000)
|
||||
}}
|
||||
className='-translate-y-1/2 absolute top-1/2 right-2'
|
||||
>
|
||||
{urlCopied ? (
|
||||
<Check className='h-3 w-3 text-[var(--brand-tertiary-2)]' />
|
||||
) : (
|
||||
<Clipboard className='h-3 w-3 text-[var(--text-tertiary)]' />
|
||||
)}
|
||||
</button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>{urlCopied ? 'Copied' : 'Copy'}</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
</div>
|
||||
</div>
|
||||
<p className='mt-[6.5px] text-[11px] text-[var(--text-secondary)]'>
|
||||
|
||||
@@ -415,7 +415,7 @@ export function ChatDeploy({
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='destructive' onClick={handleDelete} disabled={isDeleting}>
|
||||
<Button variant='default' onClick={handleDelete} disabled={isDeleting}>
|
||||
{isDeleting ? 'Deleting...' : 'Delete'}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
@@ -532,7 +532,8 @@ function IdentifierInput({
|
||||
</div>
|
||||
) : (
|
||||
isValid &&
|
||||
value && (
|
||||
value &&
|
||||
value !== originalIdentifier && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<div className='-translate-y-1/2 absolute top-1/2 right-2'>
|
||||
|
||||
@@ -138,10 +138,12 @@ export function McpDeploy({
|
||||
|
||||
const [toolName, setToolName] = useState(() => sanitizeToolName(workflowName))
|
||||
const [toolDescription, setToolDescription] = useState(() => {
|
||||
const normalizedDesc = workflowDescription?.toLowerCase().trim()
|
||||
const isDefaultDescription =
|
||||
!workflowDescription ||
|
||||
workflowDescription === workflowName ||
|
||||
workflowDescription.toLowerCase() === 'new workflow'
|
||||
normalizedDesc === 'new workflow' ||
|
||||
normalizedDesc === 'your first workflow - start building here!'
|
||||
|
||||
return isDefaultDescription ? '' : workflowDescription
|
||||
})
|
||||
@@ -193,10 +195,12 @@ export function McpDeploy({
|
||||
setToolName(toolInfo.tool.toolName)
|
||||
|
||||
const loadedDescription = toolInfo.tool.toolDescription || ''
|
||||
const normalizedLoadedDesc = loadedDescription.toLowerCase().trim()
|
||||
const isDefaultDescription =
|
||||
!loadedDescription ||
|
||||
loadedDescription === workflowName ||
|
||||
loadedDescription.toLowerCase() === 'new workflow'
|
||||
normalizedLoadedDesc === 'new workflow' ||
|
||||
normalizedLoadedDesc === 'your first workflow - start building here!'
|
||||
setToolDescription(isDefaultDescription ? '' : loadedDescription)
|
||||
|
||||
const schema = toolInfo.tool.parameterSchema as Record<string, unknown> | undefined
|
||||
|
||||
@@ -663,12 +663,6 @@ export function DeployModal({
|
||||
</ModalTabsList>
|
||||
|
||||
<ModalBody className='min-h-0 flex-1'>
|
||||
{apiDeployError && (
|
||||
<div className='mb-3 rounded-[4px] border border-destructive/30 bg-destructive/10 p-3 text-destructive text-sm'>
|
||||
<div className='font-semibold'>Deployment Error</div>
|
||||
<div>{apiDeployError}</div>
|
||||
</div>
|
||||
)}
|
||||
<ModalTabsContent value='general'>
|
||||
<GeneralDeploy
|
||||
workflowId={workflowId}
|
||||
@@ -740,7 +734,7 @@ export function DeployModal({
|
||||
)}
|
||||
</ModalTabsContent> */}
|
||||
|
||||
<ModalTabsContent value='mcp'>
|
||||
<ModalTabsContent value='mcp' className='h-full'>
|
||||
{workflowId && (
|
||||
<McpDeploy
|
||||
workflowId={workflowId}
|
||||
@@ -806,7 +800,7 @@ export function DeployModal({
|
||||
{chatExists && (
|
||||
<Button
|
||||
type='button'
|
||||
variant='destructive'
|
||||
variant='default'
|
||||
onClick={handleChatDelete}
|
||||
disabled={chatSubmitting}
|
||||
>
|
||||
|
||||
@@ -32,4 +32,5 @@ export { Table } from './table/table'
|
||||
export { Text } from './text/text'
|
||||
export { TimeInput } from './time-input/time-input'
|
||||
export { ToolInput } from './tool-input/tool-input'
|
||||
export { TriggerSave } from './trigger-save/trigger-save'
|
||||
export { VariablesInput } from './variables-input/variables-input'
|
||||
|
||||
@@ -125,10 +125,16 @@ export function FieldFormat({
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a field by ID, preventing removal of the last field
|
||||
* Removes a field by ID, or clears it if it's the last field
|
||||
*/
|
||||
const removeField = (id: string) => {
|
||||
if (isReadOnly || fields.length === 1) return
|
||||
if (isReadOnly) return
|
||||
|
||||
if (fields.length === 1) {
|
||||
setStoreValue([createDefaultField()])
|
||||
return
|
||||
}
|
||||
|
||||
setStoreValue(fields.filter((field) => field.id !== id))
|
||||
}
|
||||
|
||||
@@ -273,7 +279,7 @@ export function FieldFormat({
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => removeField(field.id)}
|
||||
disabled={isReadOnly || fields.length === 1}
|
||||
disabled={isReadOnly}
|
||||
className='h-auto p-0 text-[var(--text-error)] hover:text-[var(--text-error)]'
|
||||
>
|
||||
<Trash className='h-[14px] w-[14px]' />
|
||||
|
||||
@@ -0,0 +1,348 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import {
|
||||
Button,
|
||||
Modal,
|
||||
ModalBody,
|
||||
ModalContent,
|
||||
ModalFooter,
|
||||
ModalHeader,
|
||||
} from '@/components/emcn/components'
|
||||
import { Trash } from '@/components/emcn/icons/trash'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useTriggerConfigAggregation } from '@/hooks/use-trigger-config-aggregation'
|
||||
import { useWebhookManagement } from '@/hooks/use-webhook-management'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { getTrigger, isTriggerValid } from '@/triggers'
|
||||
import { SYSTEM_SUBBLOCK_IDS } from '@/triggers/constants'
|
||||
|
||||
const logger = createLogger('TriggerSave')
|
||||
|
||||
interface TriggerSaveProps {
|
||||
blockId: string
|
||||
subBlockId: string
|
||||
triggerId?: string
|
||||
isPreview?: boolean
|
||||
disabled?: boolean
|
||||
}
|
||||
|
||||
type SaveStatus = 'idle' | 'saving' | 'saved' | 'error'
|
||||
|
||||
export function TriggerSave({
|
||||
blockId,
|
||||
subBlockId,
|
||||
triggerId,
|
||||
isPreview = false,
|
||||
disabled = false,
|
||||
}: TriggerSaveProps) {
|
||||
const [saveStatus, setSaveStatus] = useState<SaveStatus>('idle')
|
||||
const [errorMessage, setErrorMessage] = useState<string | null>(null)
|
||||
const [deleteStatus, setDeleteStatus] = useState<'idle' | 'deleting'>('idle')
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||
|
||||
const effectiveTriggerId = useMemo(() => {
|
||||
if (triggerId && isTriggerValid(triggerId)) {
|
||||
return triggerId
|
||||
}
|
||||
const selectedTriggerId = useSubBlockStore.getState().getValue(blockId, 'selectedTriggerId')
|
||||
if (typeof selectedTriggerId === 'string' && isTriggerValid(selectedTriggerId)) {
|
||||
return selectedTriggerId
|
||||
}
|
||||
return triggerId
|
||||
}, [blockId, triggerId])
|
||||
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
|
||||
const { webhookId, saveConfig, deleteConfig, isLoading } = useWebhookManagement({
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
isPreview,
|
||||
useWebhookUrl: true, // to store the webhook url in the store
|
||||
})
|
||||
|
||||
const triggerConfig = useSubBlockStore((state) => state.getValue(blockId, 'triggerConfig'))
|
||||
const triggerCredentials = useSubBlockStore((state) =>
|
||||
state.getValue(blockId, 'triggerCredentials')
|
||||
)
|
||||
|
||||
const triggerDef =
|
||||
effectiveTriggerId && isTriggerValid(effectiveTriggerId) ? getTrigger(effectiveTriggerId) : null
|
||||
|
||||
const validateRequiredFields = useCallback(
|
||||
(
|
||||
configToCheck: Record<string, any> | null | undefined
|
||||
): { valid: boolean; missingFields: string[] } => {
|
||||
if (!triggerDef) {
|
||||
return { valid: true, missingFields: [] }
|
||||
}
|
||||
|
||||
const missingFields: string[] = []
|
||||
|
||||
triggerDef.subBlocks
|
||||
.filter(
|
||||
(sb) => sb.required && sb.mode === 'trigger' && !SYSTEM_SUBBLOCK_IDS.includes(sb.id)
|
||||
)
|
||||
.forEach((subBlock) => {
|
||||
if (subBlock.id === 'triggerCredentials') {
|
||||
if (!triggerCredentials) {
|
||||
missingFields.push(subBlock.title || 'Credentials')
|
||||
}
|
||||
} else {
|
||||
const value = configToCheck?.[subBlock.id]
|
||||
if (value === undefined || value === null || value === '') {
|
||||
missingFields.push(subBlock.title || subBlock.id)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
valid: missingFields.length === 0,
|
||||
missingFields,
|
||||
}
|
||||
},
|
||||
[triggerDef, triggerCredentials]
|
||||
)
|
||||
|
||||
const requiredSubBlockIds = useMemo(() => {
|
||||
if (!triggerDef) return []
|
||||
return triggerDef.subBlocks
|
||||
.filter((sb) => sb.required && sb.mode === 'trigger' && !SYSTEM_SUBBLOCK_IDS.includes(sb.id))
|
||||
.map((sb) => sb.id)
|
||||
}, [triggerDef])
|
||||
|
||||
const subscribedSubBlockValues = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!triggerDef) return {}
|
||||
const values: Record<string, any> = {}
|
||||
requiredSubBlockIds.forEach((subBlockId) => {
|
||||
const value = state.getValue(blockId, subBlockId)
|
||||
if (value !== null && value !== undefined && value !== '') {
|
||||
values[subBlockId] = value
|
||||
}
|
||||
})
|
||||
return values
|
||||
},
|
||||
[blockId, triggerDef, requiredSubBlockIds]
|
||||
)
|
||||
)
|
||||
|
||||
const previousValuesRef = useRef<Record<string, any>>({})
|
||||
const validationTimeoutRef = useRef<NodeJS.Timeout | null>(null)
|
||||
|
||||
useEffect(() => {
|
||||
if (saveStatus !== 'error' || !triggerDef) {
|
||||
previousValuesRef.current = subscribedSubBlockValues
|
||||
return
|
||||
}
|
||||
|
||||
const hasChanges = Object.keys(subscribedSubBlockValues).some(
|
||||
(key) =>
|
||||
previousValuesRef.current[key] !== (subscribedSubBlockValues as Record<string, any>)[key]
|
||||
)
|
||||
|
||||
if (!hasChanges) {
|
||||
return
|
||||
}
|
||||
|
||||
if (validationTimeoutRef.current) {
|
||||
clearTimeout(validationTimeoutRef.current)
|
||||
}
|
||||
|
||||
validationTimeoutRef.current = setTimeout(() => {
|
||||
const aggregatedConfig = useTriggerConfigAggregation(blockId, effectiveTriggerId)
|
||||
|
||||
if (aggregatedConfig) {
|
||||
useSubBlockStore.getState().setValue(blockId, 'triggerConfig', aggregatedConfig)
|
||||
}
|
||||
|
||||
const validation = validateRequiredFields(aggregatedConfig)
|
||||
|
||||
if (validation.valid) {
|
||||
setErrorMessage(null)
|
||||
setSaveStatus('idle')
|
||||
logger.debug('Error cleared after validation passed', {
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
})
|
||||
} else {
|
||||
setErrorMessage(`Missing required fields: ${validation.missingFields.join(', ')}`)
|
||||
logger.debug('Error message updated', {
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
missingFields: validation.missingFields,
|
||||
})
|
||||
}
|
||||
|
||||
previousValuesRef.current = subscribedSubBlockValues
|
||||
}, 300)
|
||||
|
||||
return () => {
|
||||
if (validationTimeoutRef.current) {
|
||||
clearTimeout(validationTimeoutRef.current)
|
||||
}
|
||||
}
|
||||
}, [
|
||||
blockId,
|
||||
effectiveTriggerId,
|
||||
triggerDef,
|
||||
subscribedSubBlockValues,
|
||||
saveStatus,
|
||||
validateRequiredFields,
|
||||
])
|
||||
|
||||
const handleSave = async () => {
|
||||
if (isPreview || disabled) return
|
||||
|
||||
setSaveStatus('saving')
|
||||
setErrorMessage(null)
|
||||
|
||||
try {
|
||||
const aggregatedConfig = useTriggerConfigAggregation(blockId, effectiveTriggerId)
|
||||
|
||||
if (aggregatedConfig) {
|
||||
useSubBlockStore.getState().setValue(blockId, 'triggerConfig', aggregatedConfig)
|
||||
logger.debug('Stored aggregated trigger config', {
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
aggregatedConfig,
|
||||
})
|
||||
}
|
||||
|
||||
const validation = validateRequiredFields(aggregatedConfig)
|
||||
if (!validation.valid) {
|
||||
setErrorMessage(`Missing required fields: ${validation.missingFields.join(', ')}`)
|
||||
setSaveStatus('error')
|
||||
return
|
||||
}
|
||||
|
||||
const success = await saveConfig()
|
||||
if (!success) {
|
||||
throw new Error('Save config returned false')
|
||||
}
|
||||
|
||||
setSaveStatus('saved')
|
||||
setErrorMessage(null)
|
||||
|
||||
const savedWebhookId = useSubBlockStore.getState().getValue(blockId, 'webhookId')
|
||||
const savedTriggerPath = useSubBlockStore.getState().getValue(blockId, 'triggerPath')
|
||||
const savedTriggerId = useSubBlockStore.getState().getValue(blockId, 'triggerId')
|
||||
const savedTriggerConfig = useSubBlockStore.getState().getValue(blockId, 'triggerConfig')
|
||||
|
||||
collaborativeSetSubblockValue(blockId, 'webhookId', savedWebhookId)
|
||||
collaborativeSetSubblockValue(blockId, 'triggerPath', savedTriggerPath)
|
||||
collaborativeSetSubblockValue(blockId, 'triggerId', savedTriggerId)
|
||||
collaborativeSetSubblockValue(blockId, 'triggerConfig', savedTriggerConfig)
|
||||
|
||||
setTimeout(() => {
|
||||
setSaveStatus('idle')
|
||||
}, 2000)
|
||||
|
||||
logger.info('Trigger configuration saved successfully', {
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
hasWebhookId: !!webhookId,
|
||||
})
|
||||
} catch (error: any) {
|
||||
setSaveStatus('error')
|
||||
setErrorMessage(error.message || 'An error occurred while saving.')
|
||||
logger.error('Error saving trigger configuration', { error })
|
||||
}
|
||||
}
|
||||
|
||||
const handleDeleteClick = () => {
|
||||
if (isPreview || disabled || !webhookId) return
|
||||
setShowDeleteDialog(true)
|
||||
}
|
||||
|
||||
const handleDeleteConfirm = async () => {
|
||||
setShowDeleteDialog(false)
|
||||
setDeleteStatus('deleting')
|
||||
setErrorMessage(null)
|
||||
|
||||
try {
|
||||
const success = await deleteConfig()
|
||||
|
||||
if (success) {
|
||||
setDeleteStatus('idle')
|
||||
setSaveStatus('idle')
|
||||
setErrorMessage(null)
|
||||
|
||||
collaborativeSetSubblockValue(blockId, 'triggerPath', '')
|
||||
collaborativeSetSubblockValue(blockId, 'webhookId', null)
|
||||
collaborativeSetSubblockValue(blockId, 'triggerConfig', null)
|
||||
|
||||
logger.info('Trigger configuration deleted successfully', {
|
||||
blockId,
|
||||
triggerId: effectiveTriggerId,
|
||||
})
|
||||
} else {
|
||||
setDeleteStatus('idle')
|
||||
setErrorMessage('Failed to delete trigger configuration.')
|
||||
logger.error('Failed to delete trigger configuration')
|
||||
}
|
||||
} catch (error: any) {
|
||||
setDeleteStatus('idle')
|
||||
setErrorMessage(error.message || 'An error occurred while deleting.')
|
||||
logger.error('Error deleting trigger configuration', { error })
|
||||
}
|
||||
}
|
||||
|
||||
if (isPreview) {
|
||||
return null
|
||||
}
|
||||
|
||||
const isProcessing = saveStatus === 'saving' || deleteStatus === 'deleting' || isLoading
|
||||
|
||||
return (
|
||||
<div id={`${blockId}-${subBlockId}`}>
|
||||
<div className='flex gap-2'>
|
||||
<Button
|
||||
variant='default'
|
||||
onClick={handleSave}
|
||||
disabled={disabled || isProcessing}
|
||||
className={cn(
|
||||
'flex-1',
|
||||
saveStatus === 'saved' && '!bg-green-600 !text-white hover:!bg-green-700',
|
||||
saveStatus === 'error' && '!bg-red-600 !text-white hover:!bg-red-700'
|
||||
)}
|
||||
>
|
||||
{saveStatus === 'saving' && 'Saving...'}
|
||||
{saveStatus === 'saved' && 'Saved'}
|
||||
{saveStatus === 'error' && 'Error'}
|
||||
{saveStatus === 'idle' && (webhookId ? 'Update Configuration' : 'Save Configuration')}
|
||||
</Button>
|
||||
|
||||
{webhookId && (
|
||||
<Button variant='default' onClick={handleDeleteClick} disabled={disabled || isProcessing}>
|
||||
<Trash className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{errorMessage && <p className='mt-2 text-[12px] text-[var(--text-error)]'>{errorMessage}</p>}
|
||||
|
||||
<Modal open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
|
||||
<ModalContent size='sm'>
|
||||
<ModalHeader>Delete Trigger</ModalHeader>
|
||||
<ModalBody>
|
||||
<p className='text-[12px] text-[var(--text-secondary)]'>
|
||||
Are you sure you want to delete this trigger configuration? This will remove the
|
||||
webhook and stop all incoming triggers.{' '}
|
||||
<span className='text-[var(--text-error)]'>This action cannot be undone.</span>
|
||||
</p>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant='active' onClick={() => setShowDeleteDialog(false)}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button variant='destructive' onClick={handleDeleteConfirm}>
|
||||
Delete
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</ModalContent>
|
||||
</Modal>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -38,6 +38,7 @@ import {
|
||||
Text,
|
||||
TimeInput,
|
||||
ToolInput,
|
||||
TriggerSave,
|
||||
VariablesInput,
|
||||
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
@@ -853,6 +854,17 @@ function SubBlockComponent({
|
||||
}
|
||||
/>
|
||||
)
|
||||
case 'trigger-save':
|
||||
return (
|
||||
<TriggerSave
|
||||
blockId={blockId}
|
||||
subBlockId={config.id}
|
||||
triggerId={config.triggerId}
|
||||
isPreview={isPreview}
|
||||
disabled={disabled}
|
||||
/>
|
||||
)
|
||||
|
||||
case 'messages-input':
|
||||
return (
|
||||
<MessagesInput
|
||||
|
||||
@@ -414,7 +414,7 @@ export function ApiKeys({ onOpenChange, registerCloseHandler }: ApiKeysProps) {
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='ghost'
|
||||
variant='destructive'
|
||||
onClick={handleDeleteKey}
|
||||
disabled={deleteApiKeyMutation.isPending}
|
||||
>
|
||||
|
||||
@@ -268,14 +268,7 @@ export function ContextMenu({
|
||||
height: '1px',
|
||||
}}
|
||||
/>
|
||||
<PopoverContent
|
||||
ref={menuRef}
|
||||
align='start'
|
||||
side='bottom'
|
||||
sideOffset={4}
|
||||
onPointerDownOutside={(e) => e.preventDefault()}
|
||||
onInteractOutside={(e) => e.preventDefault()}
|
||||
>
|
||||
<PopoverContent ref={menuRef} align='start' side='bottom' sideOffset={4}>
|
||||
{/* Back button - shown only when in a folder */}
|
||||
<PopoverBackButton />
|
||||
|
||||
|
||||
@@ -18,6 +18,17 @@ const TREE_SPACING = {
|
||||
INDENT_PER_LEVEL: 20,
|
||||
} as const
|
||||
|
||||
function compareByOrder<T extends { sortOrder: number; createdAt?: Date; id: string }>(
|
||||
a: T,
|
||||
b: T
|
||||
): number {
|
||||
if (a.sortOrder !== b.sortOrder) return a.sortOrder - b.sortOrder
|
||||
const timeA = a.createdAt?.getTime() ?? 0
|
||||
const timeB = b.createdAt?.getTime() ?? 0
|
||||
if (timeA !== timeB) return timeA - timeB
|
||||
return a.id.localeCompare(b.id)
|
||||
}
|
||||
|
||||
interface WorkflowListProps {
|
||||
regularWorkflows: WorkflowMetadata[]
|
||||
isLoading?: boolean
|
||||
@@ -97,7 +108,7 @@ export function WorkflowList({
|
||||
{} as Record<string, WorkflowMetadata[]>
|
||||
)
|
||||
for (const folderId of Object.keys(grouped)) {
|
||||
grouped[folderId].sort((a, b) => a.sortOrder - b.sortOrder)
|
||||
grouped[folderId].sort(compareByOrder)
|
||||
}
|
||||
return grouped
|
||||
}, [regularWorkflows])
|
||||
@@ -208,6 +219,7 @@ export function WorkflowList({
|
||||
type: 'folder' | 'workflow'
|
||||
id: string
|
||||
sortOrder: number
|
||||
createdAt?: Date
|
||||
data: FolderTreeNode | WorkflowMetadata
|
||||
}> = []
|
||||
for (const childFolder of folder.children) {
|
||||
@@ -215,6 +227,7 @@ export function WorkflowList({
|
||||
type: 'folder',
|
||||
id: childFolder.id,
|
||||
sortOrder: childFolder.sortOrder,
|
||||
createdAt: childFolder.createdAt,
|
||||
data: childFolder,
|
||||
})
|
||||
}
|
||||
@@ -223,10 +236,11 @@ export function WorkflowList({
|
||||
type: 'workflow',
|
||||
id: workflow.id,
|
||||
sortOrder: workflow.sortOrder,
|
||||
createdAt: workflow.createdAt,
|
||||
data: workflow,
|
||||
})
|
||||
}
|
||||
childItems.sort((a, b) => a.sortOrder - b.sortOrder)
|
||||
childItems.sort(compareByOrder)
|
||||
|
||||
return (
|
||||
<div key={folder.id} className='relative'>
|
||||
@@ -294,20 +308,28 @@ export function WorkflowList({
|
||||
type: 'folder' | 'workflow'
|
||||
id: string
|
||||
sortOrder: number
|
||||
createdAt?: Date
|
||||
data: FolderTreeNode | WorkflowMetadata
|
||||
}> = []
|
||||
for (const folder of folderTree) {
|
||||
items.push({ type: 'folder', id: folder.id, sortOrder: folder.sortOrder, data: folder })
|
||||
items.push({
|
||||
type: 'folder',
|
||||
id: folder.id,
|
||||
sortOrder: folder.sortOrder,
|
||||
createdAt: folder.createdAt,
|
||||
data: folder,
|
||||
})
|
||||
}
|
||||
for (const workflow of rootWorkflows) {
|
||||
items.push({
|
||||
type: 'workflow',
|
||||
id: workflow.id,
|
||||
sortOrder: workflow.sortOrder,
|
||||
createdAt: workflow.createdAt,
|
||||
data: workflow,
|
||||
})
|
||||
}
|
||||
return items.sort((a, b) => a.sortOrder - b.sortOrder)
|
||||
return items.sort(compareByOrder)
|
||||
}, [folderTree, rootWorkflows])
|
||||
|
||||
const hasRootItems = rootItems.length > 0
|
||||
|
||||
@@ -211,10 +211,11 @@ export function WorkspaceHeader({
|
||||
}
|
||||
|
||||
/**
|
||||
* Close context menu
|
||||
* Close context menu and the workspace dropdown
|
||||
*/
|
||||
const closeContextMenu = () => {
|
||||
setIsContextMenuOpen(false)
|
||||
setIsWorkspaceMenuOpen(false)
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -133,7 +133,20 @@ export function useDragDrop() {
|
||||
[]
|
||||
)
|
||||
|
||||
type SiblingItem = { type: 'folder' | 'workflow'; id: string; sortOrder: number }
|
||||
type SiblingItem = {
|
||||
type: 'folder' | 'workflow'
|
||||
id: string
|
||||
sortOrder: number
|
||||
createdAt: Date
|
||||
}
|
||||
|
||||
const compareSiblingItems = (a: SiblingItem, b: SiblingItem): number => {
|
||||
if (a.sortOrder !== b.sortOrder) return a.sortOrder - b.sortOrder
|
||||
const timeA = a.createdAt.getTime()
|
||||
const timeB = b.createdAt.getTime()
|
||||
if (timeA !== timeB) return timeA - timeB
|
||||
return a.id.localeCompare(b.id)
|
||||
}
|
||||
|
||||
const getDestinationFolderId = useCallback((indicator: DropIndicator): string | null => {
|
||||
return indicator.position === 'inside'
|
||||
@@ -202,11 +215,21 @@ export function useDragDrop() {
|
||||
return [
|
||||
...Object.values(currentFolders)
|
||||
.filter((f) => f.parentId === folderId)
|
||||
.map((f) => ({ type: 'folder' as const, id: f.id, sortOrder: f.sortOrder })),
|
||||
.map((f) => ({
|
||||
type: 'folder' as const,
|
||||
id: f.id,
|
||||
sortOrder: f.sortOrder,
|
||||
createdAt: f.createdAt,
|
||||
})),
|
||||
...Object.values(currentWorkflows)
|
||||
.filter((w) => w.folderId === folderId)
|
||||
.map((w) => ({ type: 'workflow' as const, id: w.id, sortOrder: w.sortOrder })),
|
||||
].sort((a, b) => a.sortOrder - b.sortOrder)
|
||||
.map((w) => ({
|
||||
type: 'workflow' as const,
|
||||
id: w.id,
|
||||
sortOrder: w.sortOrder,
|
||||
createdAt: w.createdAt,
|
||||
})),
|
||||
].sort(compareSiblingItems)
|
||||
}, [])
|
||||
|
||||
const setNormalizedDropIndicator = useCallback(
|
||||
@@ -299,8 +322,9 @@ export function useDragDrop() {
|
||||
type: 'workflow' as const,
|
||||
id,
|
||||
sortOrder: currentWorkflows[id]?.sortOrder ?? 0,
|
||||
createdAt: currentWorkflows[id]?.createdAt ?? new Date(),
|
||||
}))
|
||||
.sort((a, b) => a.sortOrder - b.sortOrder)
|
||||
.sort(compareSiblingItems)
|
||||
|
||||
const insertAt = calculateInsertIndex(remaining, indicator)
|
||||
|
||||
@@ -369,7 +393,12 @@ export function useDragDrop() {
|
||||
|
||||
const newOrder: SiblingItem[] = [
|
||||
...remaining.slice(0, insertAt),
|
||||
{ type: 'folder', id: draggedFolderId, sortOrder: 0 },
|
||||
{
|
||||
type: 'folder',
|
||||
id: draggedFolderId,
|
||||
sortOrder: 0,
|
||||
createdAt: draggedFolder?.createdAt ?? new Date(),
|
||||
},
|
||||
...remaining.slice(insertAt),
|
||||
]
|
||||
|
||||
|
||||
@@ -10,6 +10,11 @@ import { createLogger } from '@sim/logger'
|
||||
import { task } from '@trigger.dev/sdk'
|
||||
import { and, eq, isNull, lte, or, sql } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import {
|
||||
type EmailRateLimitsData,
|
||||
type EmailUsageData,
|
||||
renderWorkflowNotificationEmail,
|
||||
} from '@/components/emails'
|
||||
import { checkUsageStatus } from '@/lib/billing/calculations/usage-monitor'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||
@@ -45,9 +50,9 @@ interface NotificationPayload {
|
||||
totalDurationMs: number
|
||||
cost?: Record<string, unknown>
|
||||
finalOutput?: unknown
|
||||
traceSpans?: unknown[]
|
||||
rateLimits?: Record<string, unknown>
|
||||
usage?: Record<string, unknown>
|
||||
traceSpans?: TraceSpan[]
|
||||
rateLimits?: EmailRateLimitsData
|
||||
usage?: EmailUsageData
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,8 +99,13 @@ async function buildPayload(
|
||||
payload.data.finalOutput = executionData.finalOutput
|
||||
}
|
||||
|
||||
if (subscription.includeTraceSpans && executionData.traceSpans) {
|
||||
payload.data.traceSpans = executionData.traceSpans as unknown[]
|
||||
// Trace spans only included for webhooks (too large for email/Slack)
|
||||
if (
|
||||
subscription.includeTraceSpans &&
|
||||
subscription.notificationType === 'webhook' &&
|
||||
executionData.traceSpans
|
||||
) {
|
||||
payload.data.traceSpans = executionData.traceSpans as TraceSpan[]
|
||||
}
|
||||
|
||||
if (subscription.includeRateLimits && userId) {
|
||||
@@ -251,18 +261,6 @@ function formatAlertReason(alertConfig: AlertConfig): string {
|
||||
}
|
||||
}
|
||||
|
||||
function formatJsonForEmail(data: unknown, label: string): string {
|
||||
if (!data) return ''
|
||||
const json = JSON.stringify(data, null, 2)
|
||||
const escapedJson = json.replace(/</g, '<').replace(/>/g, '>')
|
||||
return `
|
||||
<div style="margin-top: 20px;">
|
||||
<h3 style="color: #1a1a1a; font-size: 14px; margin-bottom: 8px;">${label}</h3>
|
||||
<pre style="background: #f5f5f5; padding: 12px; border-radius: 6px; overflow-x: auto; font-size: 12px; color: #333; white-space: pre-wrap; word-wrap: break-word;">${escapedJson}</pre>
|
||||
</div>
|
||||
`
|
||||
}
|
||||
|
||||
async function deliverEmail(
|
||||
subscription: typeof workspaceNotificationSubscription.$inferSelect,
|
||||
payload: NotificationPayload,
|
||||
@@ -275,8 +273,7 @@ async function deliverEmail(
|
||||
const isError = payload.data.status !== 'success'
|
||||
const statusText = isError ? 'Error' : 'Success'
|
||||
const logUrl = buildLogUrl(subscription.workspaceId, payload.data.executionId)
|
||||
const baseUrl = getBaseUrl()
|
||||
const alertReason = alertConfig ? formatAlertReason(alertConfig) : null
|
||||
const alertReason = alertConfig ? formatAlertReason(alertConfig) : undefined
|
||||
|
||||
// Build subject line
|
||||
const subject = alertReason
|
||||
@@ -285,113 +282,36 @@ async function deliverEmail(
|
||||
? `Error Alert: ${payload.data.workflowName}`
|
||||
: `Workflow Completed: ${payload.data.workflowName}`
|
||||
|
||||
let includedDataHtml = ''
|
||||
// Build plain text for fallback
|
||||
let includedDataText = ''
|
||||
|
||||
if (payload.data.finalOutput) {
|
||||
includedDataHtml += formatJsonForEmail(payload.data.finalOutput, 'Final Output')
|
||||
includedDataText += `\n\nFinal Output:\n${JSON.stringify(payload.data.finalOutput, null, 2)}`
|
||||
}
|
||||
|
||||
if (
|
||||
payload.data.traceSpans &&
|
||||
Array.isArray(payload.data.traceSpans) &&
|
||||
payload.data.traceSpans.length > 0
|
||||
) {
|
||||
includedDataHtml += formatJsonForEmail(payload.data.traceSpans, 'Trace Spans')
|
||||
includedDataText += `\n\nTrace Spans:\n${JSON.stringify(payload.data.traceSpans, null, 2)}`
|
||||
}
|
||||
|
||||
if (payload.data.rateLimits) {
|
||||
includedDataHtml += formatJsonForEmail(payload.data.rateLimits, 'Rate Limits')
|
||||
includedDataText += `\n\nRate Limits:\n${JSON.stringify(payload.data.rateLimits, null, 2)}`
|
||||
}
|
||||
|
||||
if (payload.data.usage) {
|
||||
includedDataHtml += formatJsonForEmail(payload.data.usage, 'Usage Data')
|
||||
includedDataText += `\n\nUsage Data:\n${JSON.stringify(payload.data.usage, null, 2)}`
|
||||
}
|
||||
|
||||
// Render the email using the shared template
|
||||
const html = await renderWorkflowNotificationEmail({
|
||||
workflowName: payload.data.workflowName || 'Unknown Workflow',
|
||||
status: payload.data.status,
|
||||
trigger: payload.data.trigger,
|
||||
duration: formatDuration(payload.data.totalDurationMs),
|
||||
cost: formatCost(payload.data.cost),
|
||||
logUrl,
|
||||
alertReason,
|
||||
finalOutput: payload.data.finalOutput,
|
||||
rateLimits: payload.data.rateLimits,
|
||||
usageData: payload.data.usage,
|
||||
})
|
||||
|
||||
const result = await sendEmail({
|
||||
to: subscription.emailRecipients,
|
||||
subject,
|
||||
html: `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
</head>
|
||||
<body style="background-color: #f5f5f7; font-family: HelveticaNeue, Helvetica, Arial, sans-serif; margin: 0; padding: 0;">
|
||||
<div style="max-width: 580px; margin: 30px auto; background-color: #ffffff; border-radius: 5px; overflow: hidden;">
|
||||
<!-- Header with Logo -->
|
||||
<div style="padding: 30px 0; text-align: center;">
|
||||
<img src="${baseUrl}/logo/reverse/text/medium.png" width="114" alt="Sim Studio" style="margin: 0 auto;" />
|
||||
</div>
|
||||
|
||||
<!-- Section Border -->
|
||||
<div style="display: flex; width: 100%;">
|
||||
<div style="border-bottom: 1px solid #eeeeee; width: 249px;"></div>
|
||||
<div style="border-bottom: 1px solid #6F3DFA; width: 102px;"></div>
|
||||
<div style="border-bottom: 1px solid #eeeeee; width: 249px;"></div>
|
||||
</div>
|
||||
|
||||
<!-- Content -->
|
||||
<div style="padding: 5px 30px 20px 30px;">
|
||||
<h2 style="font-size: 20px; color: #333333; margin: 20px 0;">
|
||||
${alertReason ? 'Alert Triggered' : isError ? 'Workflow Execution Failed' : 'Workflow Execution Completed'}
|
||||
</h2>
|
||||
${alertReason ? `<p style="color: #d97706; background: #fef3c7; padding: 12px; border-radius: 6px; margin-bottom: 20px; font-size: 14px;"><strong>Reason:</strong> ${alertReason}</p>` : ''}
|
||||
|
||||
<table style="width: 100%; border-collapse: collapse; margin-bottom: 20px;">
|
||||
<tr style="border-bottom: 1px solid #eee;">
|
||||
<td style="padding: 12px 0; color: #666; width: 140px;">Workflow</td>
|
||||
<td style="padding: 12px 0; color: #333; font-weight: 500;">${payload.data.workflowName}</td>
|
||||
</tr>
|
||||
<tr style="border-bottom: 1px solid #eee;">
|
||||
<td style="padding: 12px 0; color: #666;">Status</td>
|
||||
<td style="padding: 12px 0; color: ${isError ? '#ef4444' : '#22c55e'}; font-weight: 500;">${statusText}</td>
|
||||
</tr>
|
||||
<tr style="border-bottom: 1px solid #eee;">
|
||||
<td style="padding: 12px 0; color: #666;">Trigger</td>
|
||||
<td style="padding: 12px 0; color: #333;">${payload.data.trigger}</td>
|
||||
</tr>
|
||||
<tr style="border-bottom: 1px solid #eee;">
|
||||
<td style="padding: 12px 0; color: #666;">Duration</td>
|
||||
<td style="padding: 12px 0; color: #333;">${formatDuration(payload.data.totalDurationMs)}</td>
|
||||
</tr>
|
||||
<tr style="border-bottom: 1px solid #eee;">
|
||||
<td style="padding: 12px 0; color: #666;">Cost</td>
|
||||
<td style="padding: 12px 0; color: #333;">${formatCost(payload.data.cost)}</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
<a href="${logUrl}" style="display: inline-block; background-color: #6F3DFA; color: #ffffff; font-weight: bold; font-size: 16px; padding: 12px 30px; border-radius: 5px; text-decoration: none; text-align: center; margin: 20px 0;">
|
||||
View Execution Log →
|
||||
</a>
|
||||
|
||||
${includedDataHtml}
|
||||
|
||||
<p style="font-size: 16px; line-height: 1.5; color: #333333; margin-top: 30px;">
|
||||
Best regards,<br />
|
||||
The Sim Team
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Footer -->
|
||||
<div style="max-width: 580px; margin: 0 auto; padding: 20px 0; text-align: center;">
|
||||
<p style="font-size: 12px; color: #706a7b; margin: 8px 0 0 0;">
|
||||
© ${new Date().getFullYear()} Sim Studio, All Rights Reserved
|
||||
</p>
|
||||
<p style="font-size: 12px; color: #706a7b; margin: 8px 0 0 0;">
|
||||
<a href="${baseUrl}/privacy" style="color: #706a7b; text-decoration: underline;">Privacy Policy</a> •
|
||||
<a href="${baseUrl}/terms" style="color: #706a7b; text-decoration: underline;">Terms of Service</a>
|
||||
</p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
`,
|
||||
html,
|
||||
text: `${subject}\n${alertReason ? `\nReason: ${alertReason}\n` : ''}\nWorkflow: ${payload.data.workflowName}\nStatus: ${statusText}\nTrigger: ${payload.data.trigger}\nDuration: ${formatDuration(payload.data.totalDurationMs)}\nCost: ${formatCost(payload.data.cost)}\n\nView Log: ${logUrl}${includedDataText}`,
|
||||
emailType: 'notifications',
|
||||
})
|
||||
@@ -479,26 +399,6 @@ async function deliverSlack(
|
||||
})
|
||||
}
|
||||
|
||||
if (
|
||||
payload.data.traceSpans &&
|
||||
Array.isArray(payload.data.traceSpans) &&
|
||||
payload.data.traceSpans.length > 0
|
||||
) {
|
||||
const spansSummary = (payload.data.traceSpans as TraceSpan[])
|
||||
.map((span) => {
|
||||
const status = span.status === 'success' ? '✓' : '✗'
|
||||
return `${status} ${span.name || 'Unknown'} (${formatDuration(span.duration || 0)})`
|
||||
})
|
||||
.join('\n')
|
||||
blocks.push({
|
||||
type: 'section',
|
||||
text: {
|
||||
type: 'mrkdwn',
|
||||
text: `*Trace Spans:*\n\`\`\`${spansSummary}\`\`\``,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (payload.data.rateLimits) {
|
||||
const limitsStr = JSON.stringify(payload.data.rateLimits, null, 2)
|
||||
blocks.push({
|
||||
|
||||
@@ -19,10 +19,10 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Create Run', id: 'create_run' },
|
||||
{ label: 'Create Runs Batch', id: 'create_runs_batch' },
|
||||
{ label: 'Create Run', id: 'langsmith_create_run' },
|
||||
{ label: 'Create Runs Batch', id: 'langsmith_create_runs_batch' },
|
||||
],
|
||||
value: () => 'create_run',
|
||||
value: () => 'langsmith_create_run',
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
@@ -37,15 +37,15 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Run ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Auto-generated if blank',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
},
|
||||
{
|
||||
id: 'name',
|
||||
title: 'Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Run name',
|
||||
required: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
required: { field: 'operation', value: 'langsmith_create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
},
|
||||
{
|
||||
id: 'run_type',
|
||||
@@ -61,23 +61,22 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
{ label: 'Parser', id: 'parser' },
|
||||
],
|
||||
value: () => 'chain',
|
||||
required: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
required: { field: 'operation', value: 'langsmith_create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
},
|
||||
{
|
||||
id: 'start_time',
|
||||
title: 'Start Time',
|
||||
type: 'short-input',
|
||||
placeholder: '2025-01-01T12:00:00Z',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
value: () => new Date().toISOString(),
|
||||
placeholder: 'e.g. 2025-01-01T12:00:00Z (defaults to now)',
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
},
|
||||
{
|
||||
id: 'end_time',
|
||||
title: 'End Time',
|
||||
type: 'short-input',
|
||||
placeholder: '2025-01-01T12:00:30Z',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -85,7 +84,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Inputs',
|
||||
type: 'code',
|
||||
placeholder: '{"input":"value"}',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -93,7 +92,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Outputs',
|
||||
type: 'code',
|
||||
placeholder: '{"output":"value"}',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -101,7 +100,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Metadata',
|
||||
type: 'code',
|
||||
placeholder: '{"ls_model":"gpt-4"}',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -109,7 +108,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Tags',
|
||||
type: 'code',
|
||||
placeholder: '["production","workflow"]',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -117,7 +116,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Parent Run ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Parent run identifier',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -125,7 +124,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Trace ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Auto-generated if blank',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -133,7 +132,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Session ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Session identifier',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -141,7 +140,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Session Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Session name',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -149,7 +148,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Status',
|
||||
type: 'short-input',
|
||||
placeholder: 'success',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -157,7 +156,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Error',
|
||||
type: 'long-input',
|
||||
placeholder: 'Error message',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -165,7 +164,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Dotted Order',
|
||||
type: 'short-input',
|
||||
placeholder: 'Defaults to <YYYYMMDDTHHMMSSffffff>Z<id>',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -173,7 +172,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Events',
|
||||
type: 'code',
|
||||
placeholder: '[{"event":"token","value":1}]',
|
||||
condition: { field: 'operation', value: 'create_run' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_run' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
@@ -181,29 +180,36 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
title: 'Post Runs',
|
||||
type: 'code',
|
||||
placeholder: '[{"id":"...","name":"...","run_type":"chain","start_time":"..."}]',
|
||||
condition: { field: 'operation', value: 'create_runs_batch' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_runs_batch' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
generationType: 'json-object',
|
||||
prompt: `Output ONLY a JSON array with a single LangSmith run object. No explanation.
|
||||
Required: name (string), run_type ("tool"|"chain"|"llm"|"retriever"|"embedding"|"prompt"|"parser")
|
||||
Optional: inputs, outputs, tags, extra, session_name, end_time
|
||||
Fields id, trace_id, dotted_order, start_time are auto-generated if omitted.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'patch',
|
||||
title: 'Patch Runs',
|
||||
type: 'code',
|
||||
placeholder: '[{"id":"...","name":"...","run_type":"chain","start_time":"..."}]',
|
||||
condition: { field: 'operation', value: 'create_runs_batch' },
|
||||
condition: { field: 'operation', value: 'langsmith_create_runs_batch' },
|
||||
mode: 'advanced',
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
generationType: 'json-object',
|
||||
prompt: `Output ONLY a JSON array with a single LangSmith run object to update. No explanation.
|
||||
Required: id (existing run UUID), name, run_type ("tool"|"chain"|"llm"|"retriever"|"embedding"|"prompt"|"parser")
|
||||
Common patch fields: outputs, end_time, status, error`,
|
||||
},
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['langsmith_create_run', 'langsmith_create_runs_batch'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'create_runs_batch':
|
||||
return 'langsmith_create_runs_batch'
|
||||
case 'create_run':
|
||||
default:
|
||||
return 'langsmith_create_run'
|
||||
}
|
||||
},
|
||||
tool: (params) => params.operation,
|
||||
params: (params) => {
|
||||
const parseJsonValue = (value: unknown, label: string) => {
|
||||
if (value === undefined || value === null || value === '') {
|
||||
@@ -221,7 +227,7 @@ export const LangsmithBlock: BlockConfig<LangsmithResponse> = {
|
||||
return value
|
||||
}
|
||||
|
||||
if (params.operation === 'create_runs_batch') {
|
||||
if (params.operation === 'langsmith_create_runs_batch') {
|
||||
const post = parseJsonValue(params.post, 'post runs')
|
||||
const patch = parseJsonValue(params.patch, 'patch runs')
|
||||
|
||||
|
||||
@@ -71,9 +71,6 @@ export type SubBlockType =
|
||||
| 'mcp-dynamic-args' // MCP dynamic arguments based on tool schema
|
||||
| 'input-format' // Input structure format
|
||||
| 'response-format' // Response structure format
|
||||
/**
|
||||
* @deprecated Legacy trigger save subblock type.
|
||||
*/
|
||||
| 'trigger-save' // Trigger save button with validation
|
||||
| 'file-upload' // File uploader
|
||||
| 'input-mapping' // Map parent variables to child workflow input schema
|
||||
|
||||
@@ -173,6 +173,17 @@ export const baseStyles = {
|
||||
margin: 0,
|
||||
},
|
||||
|
||||
/** Code block text (for JSON/code display) */
|
||||
codeBlock: {
|
||||
fontSize: typography.fontSize.caption,
|
||||
lineHeight: typography.lineHeight.caption,
|
||||
color: colors.textSecondary,
|
||||
fontFamily: 'monospace',
|
||||
whiteSpace: 'pre-wrap' as const,
|
||||
wordWrap: 'break-word' as const,
|
||||
margin: 0,
|
||||
},
|
||||
|
||||
/** Highlighted info box (e.g., "What you get with Pro") */
|
||||
infoBox: {
|
||||
backgroundColor: colors.bgOuter,
|
||||
|
||||
@@ -22,7 +22,7 @@ export function WelcomeEmail({ userName }: WelcomeEmailProps) {
|
||||
workflows in minutes.
|
||||
</Text>
|
||||
|
||||
<Link href={`${baseUrl}/w`} style={{ textDecoration: 'none' }}>
|
||||
<Link href={`${baseUrl}/login`} style={{ textDecoration: 'none' }}>
|
||||
<Text style={baseStyles.button}>Get Started</Text>
|
||||
</Link>
|
||||
|
||||
@@ -30,13 +30,21 @@ export function WelcomeEmail({ userName }: WelcomeEmailProps) {
|
||||
If you have any questions or feedback, just reply to this email. I read every message!
|
||||
</Text>
|
||||
|
||||
<Text style={baseStyles.paragraph}>
|
||||
Want to chat?{' '}
|
||||
<Link href={`${baseUrl}/team`} style={baseStyles.link}>
|
||||
Schedule a call
|
||||
</Link>{' '}
|
||||
with our team.
|
||||
</Text>
|
||||
|
||||
<Text style={baseStyles.paragraph}>- Emir, co-founder of {brand.name}</Text>
|
||||
|
||||
{/* Divider */}
|
||||
<div style={baseStyles.divider} />
|
||||
|
||||
<Text style={{ ...baseStyles.footerText, textAlign: 'left' }}>
|
||||
You're on the free plan with $10 in credits to get started.
|
||||
You're on the free plan with $20 in credits to get started.
|
||||
</Text>
|
||||
</EmailLayout>
|
||||
)
|
||||
|
||||
@@ -33,7 +33,7 @@ export function PlanWelcomeEmail({ planName, userName, loginLink }: PlanWelcomeE
|
||||
|
||||
<Text style={baseStyles.paragraph}>
|
||||
Want help getting started?{' '}
|
||||
<Link href='https://cal.com/emirkarabeg/sim-team' style={baseStyles.link}>
|
||||
<Link href={`${baseUrl}/team`} style={baseStyles.link}>
|
||||
Schedule a call
|
||||
</Link>{' '}
|
||||
with our team.
|
||||
|
||||
@@ -61,7 +61,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align='left' style={{ padding: '0 8px 0 0' }}>
|
||||
<Link href='https://x.com/simdotai' rel='noopener noreferrer'>
|
||||
<Link href={`${baseUrl}/x`} rel='noopener noreferrer'>
|
||||
<Img
|
||||
src={`${baseUrl}/static/x-icon.png`}
|
||||
width='20'
|
||||
@@ -71,7 +71,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
|
||||
</Link>
|
||||
</td>
|
||||
<td align='left' style={{ padding: '0 8px' }}>
|
||||
<Link href='https://discord.gg/Hr4UWYEcTT' rel='noopener noreferrer'>
|
||||
<Link href={`${baseUrl}/discord`} rel='noopener noreferrer'>
|
||||
<Img
|
||||
src={`${baseUrl}/static/discord-icon.png`}
|
||||
width='20'
|
||||
@@ -81,7 +81,7 @@ export function EmailFooter({ baseUrl = getBaseUrl(), unsubscribe, messageId }:
|
||||
</Link>
|
||||
</td>
|
||||
<td align='left' style={{ padding: '0 8px' }}>
|
||||
<Link href='https://github.com/simstudioai/sim' rel='noopener noreferrer'>
|
||||
<Link href={`${baseUrl}/github`} rel='noopener noreferrer'>
|
||||
<Img
|
||||
src={`${baseUrl}/static/github-icon.png`}
|
||||
width='20'
|
||||
|
||||
@@ -10,6 +10,8 @@ export * from './careers'
|
||||
export * from './components'
|
||||
// Invitation emails
|
||||
export * from './invitations'
|
||||
// Notification emails
|
||||
export * from './notifications'
|
||||
// Render functions and subjects
|
||||
export * from './render'
|
||||
export * from './subjects'
|
||||
|
||||
7
apps/sim/components/emails/notifications/index.ts
Normal file
7
apps/sim/components/emails/notifications/index.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export type {
|
||||
EmailRateLimitStatus,
|
||||
EmailRateLimitsData,
|
||||
EmailUsageData,
|
||||
WorkflowNotificationEmailProps,
|
||||
} from './workflow-notification-email'
|
||||
export { WorkflowNotificationEmail } from './workflow-notification-email'
|
||||
@@ -0,0 +1,161 @@
|
||||
import { Link, Section, Text } from '@react-email/components'
|
||||
import { baseStyles } from '@/components/emails/_styles'
|
||||
import { EmailLayout } from '@/components/emails/components'
|
||||
import { getBrandConfig } from '@/lib/branding/branding'
|
||||
|
||||
/**
|
||||
* Serialized rate limit status for email payloads.
|
||||
* Note: This differs from the canonical RateLimitStatus in @/lib/core/rate-limiter
|
||||
* which uses Date for resetAt. This version uses string for JSON serialization.
|
||||
*/
|
||||
export interface EmailRateLimitStatus {
|
||||
requestsPerMinute: number
|
||||
remaining: number
|
||||
maxBurst?: number
|
||||
resetAt?: string
|
||||
}
|
||||
|
||||
export interface EmailRateLimitsData {
|
||||
sync?: EmailRateLimitStatus
|
||||
async?: EmailRateLimitStatus
|
||||
}
|
||||
|
||||
export interface EmailUsageData {
|
||||
currentPeriodCost: number
|
||||
limit: number
|
||||
percentUsed: number
|
||||
isExceeded?: boolean
|
||||
}
|
||||
|
||||
export interface WorkflowNotificationEmailProps {
|
||||
workflowName: string
|
||||
status: 'success' | 'error'
|
||||
trigger: string
|
||||
duration: string
|
||||
cost: string
|
||||
logUrl: string
|
||||
alertReason?: string
|
||||
finalOutput?: unknown
|
||||
rateLimits?: EmailRateLimitsData
|
||||
usageData?: EmailUsageData
|
||||
}
|
||||
|
||||
function formatJsonForEmail(data: unknown): string {
|
||||
return JSON.stringify(data, null, 2)
|
||||
}
|
||||
|
||||
export function WorkflowNotificationEmail({
|
||||
workflowName,
|
||||
status,
|
||||
trigger,
|
||||
duration,
|
||||
cost,
|
||||
logUrl,
|
||||
alertReason,
|
||||
finalOutput,
|
||||
rateLimits,
|
||||
usageData,
|
||||
}: WorkflowNotificationEmailProps) {
|
||||
const brand = getBrandConfig()
|
||||
const isError = status === 'error'
|
||||
const statusText = isError ? 'Error' : 'Success'
|
||||
|
||||
const previewText = alertReason
|
||||
? `${brand.name}: Alert - ${workflowName}`
|
||||
: isError
|
||||
? `${brand.name}: Workflow Failed - ${workflowName}`
|
||||
: `${brand.name}: Workflow Completed - ${workflowName}`
|
||||
|
||||
const message = alertReason
|
||||
? 'An alert was triggered for your workflow.'
|
||||
: isError
|
||||
? 'Your workflow execution failed.'
|
||||
: 'Your workflow completed successfully.'
|
||||
|
||||
return (
|
||||
<EmailLayout preview={previewText}>
|
||||
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>Hello,</Text>
|
||||
<Text style={baseStyles.paragraph}>{message}</Text>
|
||||
|
||||
<Section style={baseStyles.infoBox}>
|
||||
{alertReason && (
|
||||
<Text style={baseStyles.infoBoxList}>
|
||||
<strong>Reason:</strong> {alertReason}
|
||||
</Text>
|
||||
)}
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: alertReason ? '4px' : 0 }}>
|
||||
<strong>Workflow:</strong> {workflowName}
|
||||
</Text>
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
|
||||
<strong>Status:</strong> {statusText}
|
||||
</Text>
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
|
||||
<strong>Trigger:</strong> {trigger}
|
||||
</Text>
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
|
||||
<strong>Duration:</strong> {duration}
|
||||
</Text>
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: '4px' }}>
|
||||
<strong>Cost:</strong> {cost}
|
||||
</Text>
|
||||
</Section>
|
||||
|
||||
<Link href={logUrl} style={{ textDecoration: 'none' }}>
|
||||
<Text style={baseStyles.button}>View Execution Log</Text>
|
||||
</Link>
|
||||
|
||||
{rateLimits && (rateLimits.sync || rateLimits.async) ? (
|
||||
<>
|
||||
<div style={baseStyles.divider} />
|
||||
<Section style={baseStyles.infoBox}>
|
||||
<Text style={baseStyles.infoBoxTitle}>Rate Limits</Text>
|
||||
{rateLimits.sync && (
|
||||
<Text style={baseStyles.infoBoxList}>
|
||||
Sync: {rateLimits.sync.remaining} of {rateLimits.sync.requestsPerMinute} remaining
|
||||
</Text>
|
||||
)}
|
||||
{rateLimits.async && (
|
||||
<Text style={{ ...baseStyles.infoBoxList, marginTop: rateLimits.sync ? '4px' : 0 }}>
|
||||
Async: {rateLimits.async.remaining} of {rateLimits.async.requestsPerMinute}{' '}
|
||||
remaining
|
||||
</Text>
|
||||
)}
|
||||
</Section>
|
||||
</>
|
||||
) : null}
|
||||
|
||||
{usageData ? (
|
||||
<>
|
||||
<div style={baseStyles.divider} />
|
||||
<Section style={baseStyles.infoBox}>
|
||||
<Text style={baseStyles.infoBoxTitle}>Usage</Text>
|
||||
<Text style={baseStyles.infoBoxList}>
|
||||
${usageData.currentPeriodCost.toFixed(2)} of ${usageData.limit.toFixed(2)} used (
|
||||
{usageData.percentUsed.toFixed(1)}%)
|
||||
</Text>
|
||||
</Section>
|
||||
</>
|
||||
) : null}
|
||||
|
||||
{finalOutput ? (
|
||||
<>
|
||||
<div style={baseStyles.divider} />
|
||||
<Section style={baseStyles.infoBox}>
|
||||
<Text style={baseStyles.infoBoxTitle}>Final Output</Text>
|
||||
<Text style={{ ...baseStyles.codeBlock, marginTop: '8px' }}>
|
||||
{formatJsonForEmail(finalOutput)}
|
||||
</Text>
|
||||
</Section>
|
||||
</>
|
||||
) : null}
|
||||
|
||||
<div style={baseStyles.divider} />
|
||||
|
||||
<Text style={{ ...baseStyles.footerText, textAlign: 'left' }}>
|
||||
You're receiving this because you subscribed to workflow notifications.
|
||||
</Text>
|
||||
</EmailLayout>
|
||||
)
|
||||
}
|
||||
|
||||
export default WorkflowNotificationEmail
|
||||
@@ -15,6 +15,10 @@ import {
|
||||
PollingGroupInvitationEmail,
|
||||
WorkspaceInvitationEmail,
|
||||
} from '@/components/emails/invitations'
|
||||
import {
|
||||
WorkflowNotificationEmail,
|
||||
type WorkflowNotificationEmailProps,
|
||||
} from '@/components/emails/notifications'
|
||||
import { HelpConfirmationEmail } from '@/components/emails/support'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
@@ -258,3 +262,9 @@ export async function renderCareersSubmissionEmail(params: {
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
export async function renderWorkflowNotificationEmail(
|
||||
params: WorkflowNotificationEmailProps
|
||||
): Promise<string> {
|
||||
return await render(WorkflowNotificationEmail(params))
|
||||
}
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
import { keepPreviousData, useInfiniteQuery, useQuery } from '@tanstack/react-query'
|
||||
import { getEndDateFromTimeRange, getStartDateFromTimeRange } from '@/lib/logs/filters'
|
||||
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
|
||||
import type {
|
||||
DashboardStatsResponse,
|
||||
SegmentStats,
|
||||
WorkflowStats,
|
||||
} from '@/app/api/logs/stats/route'
|
||||
import type { LogsResponse, TimeRange, WorkflowLog } from '@/stores/logs/filters/types'
|
||||
|
||||
export type { DashboardStatsResponse, SegmentStats, WorkflowStats }
|
||||
|
||||
export const logKeys = {
|
||||
all: ['logs'] as const,
|
||||
lists: () => [...logKeys.all, 'list'] as const,
|
||||
@@ -10,8 +17,8 @@ export const logKeys = {
|
||||
[...logKeys.lists(), workspaceId ?? '', filters] as const,
|
||||
details: () => [...logKeys.all, 'detail'] as const,
|
||||
detail: (logId: string | undefined) => [...logKeys.details(), logId ?? ''] as const,
|
||||
dashboard: (workspaceId: string | undefined, filters: Record<string, unknown>) =>
|
||||
[...logKeys.all, 'dashboard', workspaceId ?? '', filters] as const,
|
||||
stats: (workspaceId: string | undefined, filters: object) =>
|
||||
[...logKeys.all, 'stats', workspaceId ?? '', filters] as const,
|
||||
executionSnapshots: () => [...logKeys.all, 'executionSnapshot'] as const,
|
||||
executionSnapshot: (executionId: string | undefined) =>
|
||||
[...logKeys.executionSnapshots(), executionId ?? ''] as const,
|
||||
@@ -147,52 +154,45 @@ export function useLogDetail(logId: string | undefined) {
|
||||
})
|
||||
}
|
||||
|
||||
const DASHBOARD_LOGS_LIMIT = 10000
|
||||
|
||||
/**
|
||||
* Fetches all logs for dashboard metrics (non-paginated).
|
||||
* Uses same filters as the logs list but with a high limit to get all data.
|
||||
* Fetches dashboard stats from the server-side aggregation endpoint.
|
||||
* Uses SQL aggregation for efficient computation without arbitrary limits.
|
||||
*/
|
||||
async function fetchAllLogs(
|
||||
async function fetchDashboardStats(
|
||||
workspaceId: string,
|
||||
filters: Omit<LogFilters, 'limit'>
|
||||
): Promise<WorkflowLog[]> {
|
||||
): Promise<DashboardStatsResponse> {
|
||||
const params = new URLSearchParams()
|
||||
|
||||
params.set('workspaceId', workspaceId)
|
||||
params.set('limit', DASHBOARD_LOGS_LIMIT.toString())
|
||||
params.set('offset', '0')
|
||||
|
||||
applyFilterParams(params, filters)
|
||||
|
||||
const response = await fetch(`/api/logs?${params.toString()}`)
|
||||
const response = await fetch(`/api/logs/stats?${params.toString()}`)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch logs for dashboard')
|
||||
throw new Error('Failed to fetch dashboard stats')
|
||||
}
|
||||
|
||||
const apiData: LogsResponse = await response.json()
|
||||
return apiData.data || []
|
||||
return response.json()
|
||||
}
|
||||
|
||||
interface UseDashboardLogsOptions {
|
||||
interface UseDashboardStatsOptions {
|
||||
enabled?: boolean
|
||||
refetchInterval?: number | false
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching all logs for dashboard metrics.
|
||||
* Unlike useLogsList, this fetches all logs in a single request
|
||||
* to ensure dashboard metrics are computed from complete data.
|
||||
* Hook for fetching dashboard stats using server-side aggregation.
|
||||
* No arbitrary limits - uses SQL aggregation for accurate metrics.
|
||||
*/
|
||||
export function useDashboardLogs(
|
||||
export function useDashboardStats(
|
||||
workspaceId: string | undefined,
|
||||
filters: Omit<LogFilters, 'limit'>,
|
||||
options?: UseDashboardLogsOptions
|
||||
options?: UseDashboardStatsOptions
|
||||
) {
|
||||
return useQuery({
|
||||
queryKey: logKeys.dashboard(workspaceId, filters),
|
||||
queryFn: () => fetchAllLogs(workspaceId as string, filters),
|
||||
queryKey: logKeys.stats(workspaceId, filters),
|
||||
queryFn: () => fetchDashboardStats(workspaceId as string, filters),
|
||||
enabled: Boolean(workspaceId) && (options?.enabled ?? true),
|
||||
refetchInterval: options?.refetchInterval ?? false,
|
||||
staleTime: 0,
|
||||
|
||||
@@ -194,7 +194,7 @@ export function useCreateWorkflow() {
|
||||
const workflowsInFolder = Object.values(currentWorkflows).filter(
|
||||
(w) => w.folderId === targetFolderId
|
||||
)
|
||||
sortOrder = workflowsInFolder.reduce((max, w) => Math.max(max, w.sortOrder ?? 0), -1) + 1
|
||||
sortOrder = workflowsInFolder.reduce((min, w) => Math.min(min, w.sortOrder ?? 0), 1) - 1
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -294,7 +294,7 @@ export function useDuplicateWorkflowMutation() {
|
||||
const workflowsInFolder = Object.values(currentWorkflows).filter(
|
||||
(w) => w.folderId === targetFolderId
|
||||
)
|
||||
const maxSortOrder = workflowsInFolder.reduce((max, w) => Math.max(max, w.sortOrder ?? 0), -1)
|
||||
const minSortOrder = workflowsInFolder.reduce((min, w) => Math.min(min, w.sortOrder ?? 0), 1)
|
||||
|
||||
return {
|
||||
id: tempId,
|
||||
@@ -305,7 +305,7 @@ export function useDuplicateWorkflowMutation() {
|
||||
color: variables.color,
|
||||
workspaceId: variables.workspaceId,
|
||||
folderId: targetFolderId,
|
||||
sortOrder: maxSortOrder + 1,
|
||||
sortOrder: minSortOrder - 1,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
@@ -426,7 +426,8 @@ export const auth = betterAuth({
|
||||
},
|
||||
emailVerification: {
|
||||
autoSignInAfterVerification: true,
|
||||
afterEmailVerification: async (user) => {
|
||||
// onEmailVerification is called by the emailOTP plugin when email is verified via OTP
|
||||
onEmailVerification: async (user) => {
|
||||
if (isHosted && user.email) {
|
||||
try {
|
||||
const html = await renderWelcomeEmail(user.name || undefined)
|
||||
@@ -441,11 +442,11 @@ export const auth = betterAuth({
|
||||
emailType: 'transactional',
|
||||
})
|
||||
|
||||
logger.info('[emailVerification.afterEmailVerification] Welcome email sent', {
|
||||
logger.info('[emailVerification.onEmailVerification] Welcome email sent', {
|
||||
userId: user.id,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('[emailVerification.afterEmailVerification] Failed to send welcome email', {
|
||||
logger.error('[emailVerification.onEmailVerification] Failed to send welcome email', {
|
||||
userId: user.id,
|
||||
error,
|
||||
})
|
||||
@@ -456,7 +457,7 @@ export const auth = betterAuth({
|
||||
emailAndPassword: {
|
||||
enabled: true,
|
||||
requireEmailVerification: isEmailVerificationEnabled,
|
||||
sendVerificationOnSignUp: false,
|
||||
sendVerificationOnSignUp: isEmailVerificationEnabled, // Auto-send verification OTP on signup when verification is required
|
||||
throwOnMissingCredentials: true,
|
||||
throwOnInvalidCredentials: true,
|
||||
sendResetPassword: async ({ user, url, token }, request) => {
|
||||
@@ -592,6 +593,7 @@ export const auth = betterAuth({
|
||||
sendVerificationOnSignUp: false,
|
||||
otpLength: 6, // Explicitly set the OTP length
|
||||
expiresIn: 15 * 60, // 15 minutes in seconds
|
||||
overrideDefaultEmailVerification: true,
|
||||
}),
|
||||
genericOAuth({
|
||||
config: [
|
||||
|
||||
@@ -77,6 +77,9 @@ export interface SendMessageRequest {
|
||||
| 'gpt-5.1-high'
|
||||
| 'gpt-5-codex'
|
||||
| 'gpt-5.1-codex'
|
||||
| 'gpt-5.2'
|
||||
| 'gpt-5.2-codex'
|
||||
| 'gpt-5.2-pro'
|
||||
| 'gpt-4o'
|
||||
| 'gpt-4.1'
|
||||
| 'o3'
|
||||
|
||||
@@ -19,6 +19,7 @@ vi.mock('@/lib/core/config/env', () =>
|
||||
|
||||
vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
isDev: false,
|
||||
isReactGrabEnabled: false,
|
||||
}))
|
||||
|
||||
import {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { env, getEnv } from '../config/env'
|
||||
import { isDev } from '../config/feature-flags'
|
||||
import { isDev, isReactGrabEnabled } from '../config/feature-flags'
|
||||
|
||||
/**
|
||||
* Content Security Policy (CSP) configuration builder
|
||||
@@ -40,6 +40,7 @@ export const buildTimeCSPDirectives: CSPDirectives = {
|
||||
'https://*.google.com',
|
||||
'https://apis.google.com',
|
||||
'https://assets.onedollarstats.com',
|
||||
...(isReactGrabEnabled ? ['https://unpkg.com'] : []),
|
||||
],
|
||||
|
||||
'style-src': ["'self'", "'unsafe-inline'", 'https://fonts.googleapis.com'],
|
||||
@@ -166,10 +167,11 @@ export function generateRuntimeCSP(): string {
|
||||
const dynamicDomainsStr = uniqueDynamicDomains.join(' ')
|
||||
const brandLogoDomain = brandLogoDomains[0] || ''
|
||||
const brandFaviconDomain = brandFaviconDomains[0] || ''
|
||||
const reactGrabScript = isReactGrabEnabled ? 'https://unpkg.com' : ''
|
||||
|
||||
return `
|
||||
default-src 'self';
|
||||
script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://assets.onedollarstats.com;
|
||||
script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://assets.onedollarstats.com ${reactGrabScript};
|
||||
style-src 'self' 'unsafe-inline' https://fonts.googleapis.com;
|
||||
img-src 'self' data: blob: https://*.googleusercontent.com https://*.google.com https://*.atlassian.com https://cdn.discordapp.com https://*.githubusercontent.com https://*.s3.amazonaws.com https://s3.amazonaws.com https://*.amazonaws.com https://*.blob.core.windows.net https://github.com/* https://collector.onedollarstats.com ${brandLogoDomain} ${brandFaviconDomain};
|
||||
media-src 'self' blob:;
|
||||
|
||||
@@ -1,525 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { webhook } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import {
|
||||
cleanupExternalWebhook,
|
||||
createExternalWebhookSubscription,
|
||||
shouldRecreateExternalWebhookSubscription,
|
||||
} from '@/lib/webhooks/provider-subscriptions'
|
||||
import {
|
||||
configureGmailPolling,
|
||||
configureOutlookPolling,
|
||||
syncWebhooksForCredentialSet,
|
||||
} from '@/lib/webhooks/utils.server'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
import { getTrigger, isTriggerValid } from '@/triggers'
|
||||
import { SYSTEM_SUBBLOCK_IDS } from '@/triggers/constants'
|
||||
|
||||
const logger = createLogger('DeployWebhookSync')
|
||||
const CREDENTIAL_SET_PREFIX = 'credentialSet:'
|
||||
|
||||
interface TriggerSaveError {
|
||||
message: string
|
||||
status: number
|
||||
}
|
||||
|
||||
interface TriggerSaveResult {
|
||||
success: boolean
|
||||
error?: TriggerSaveError
|
||||
}
|
||||
|
||||
interface SaveTriggerWebhooksInput {
|
||||
request: NextRequest
|
||||
workflowId: string
|
||||
workflow: Record<string, unknown>
|
||||
userId: string
|
||||
blocks: Record<string, BlockState>
|
||||
requestId: string
|
||||
}
|
||||
|
||||
function getSubBlockValue(block: BlockState, subBlockId: string): unknown {
|
||||
return block.subBlocks?.[subBlockId]?.value
|
||||
}
|
||||
|
||||
function isFieldRequired(
|
||||
config: SubBlockConfig,
|
||||
subBlockValues: Record<string, { value?: unknown }>
|
||||
): boolean {
|
||||
if (!config.required) return false
|
||||
if (typeof config.required === 'boolean') return config.required
|
||||
|
||||
const evalCond = (
|
||||
cond: {
|
||||
field: string
|
||||
value: string | number | boolean | Array<string | number | boolean>
|
||||
not?: boolean
|
||||
and?: {
|
||||
field: string
|
||||
value: string | number | boolean | Array<string | number | boolean> | undefined
|
||||
not?: boolean
|
||||
}
|
||||
},
|
||||
values: Record<string, { value?: unknown }>
|
||||
): boolean => {
|
||||
const fieldValue = values[cond.field]?.value
|
||||
const condValue = cond.value
|
||||
|
||||
let match = Array.isArray(condValue)
|
||||
? condValue.includes(fieldValue as string | number | boolean)
|
||||
: fieldValue === condValue
|
||||
|
||||
if (cond.not) match = !match
|
||||
|
||||
if (cond.and) {
|
||||
const andFieldValue = values[cond.and.field]?.value
|
||||
const andCondValue = cond.and.value
|
||||
let andMatch = Array.isArray(andCondValue)
|
||||
? (andCondValue || []).includes(andFieldValue as string | number | boolean)
|
||||
: andFieldValue === andCondValue
|
||||
if (cond.and.not) andMatch = !andMatch
|
||||
match = match && andMatch
|
||||
}
|
||||
|
||||
return match
|
||||
}
|
||||
|
||||
const condition = typeof config.required === 'function' ? config.required() : config.required
|
||||
return evalCond(condition, subBlockValues)
|
||||
}
|
||||
|
||||
function resolveTriggerId(block: BlockState): string | undefined {
|
||||
const selectedTriggerId = getSubBlockValue(block, 'selectedTriggerId')
|
||||
if (typeof selectedTriggerId === 'string' && isTriggerValid(selectedTriggerId)) {
|
||||
return selectedTriggerId
|
||||
}
|
||||
|
||||
const storedTriggerId = getSubBlockValue(block, 'triggerId')
|
||||
if (typeof storedTriggerId === 'string' && isTriggerValid(storedTriggerId)) {
|
||||
return storedTriggerId
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(block.type)
|
||||
if (blockConfig?.category === 'triggers' && isTriggerValid(block.type)) {
|
||||
return block.type
|
||||
}
|
||||
|
||||
if (block.triggerMode && blockConfig?.triggers?.enabled) {
|
||||
const configuredTriggerId =
|
||||
typeof selectedTriggerId === 'string' ? selectedTriggerId : undefined
|
||||
if (configuredTriggerId && isTriggerValid(configuredTriggerId)) {
|
||||
return configuredTriggerId
|
||||
}
|
||||
|
||||
const available = blockConfig.triggers?.available?.[0]
|
||||
if (available && isTriggerValid(available)) {
|
||||
return available
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
function getConfigValue(block: BlockState, subBlock: SubBlockConfig): unknown {
|
||||
const fieldValue = getSubBlockValue(block, subBlock.id)
|
||||
|
||||
if (
|
||||
(fieldValue === null || fieldValue === undefined || fieldValue === '') &&
|
||||
Boolean(subBlock.required) &&
|
||||
subBlock.defaultValue !== undefined
|
||||
) {
|
||||
return subBlock.defaultValue
|
||||
}
|
||||
|
||||
return fieldValue
|
||||
}
|
||||
|
||||
function buildProviderConfig(
|
||||
block: BlockState,
|
||||
triggerId: string,
|
||||
triggerDef: { subBlocks: SubBlockConfig[] }
|
||||
): {
|
||||
providerConfig: Record<string, unknown>
|
||||
missingFields: string[]
|
||||
credentialId?: string
|
||||
credentialSetId?: string
|
||||
triggerPath: string
|
||||
} {
|
||||
const triggerConfigValue = getSubBlockValue(block, 'triggerConfig')
|
||||
const baseConfig =
|
||||
triggerConfigValue && typeof triggerConfigValue === 'object'
|
||||
? (triggerConfigValue as Record<string, unknown>)
|
||||
: {}
|
||||
|
||||
const providerConfig: Record<string, unknown> = { ...baseConfig }
|
||||
const missingFields: string[] = []
|
||||
const subBlockValues = Object.fromEntries(
|
||||
Object.entries(block.subBlocks || {}).map(([key, value]) => [key, { value: value.value }])
|
||||
)
|
||||
|
||||
triggerDef.subBlocks
|
||||
.filter((subBlock) => subBlock.mode === 'trigger' && !SYSTEM_SUBBLOCK_IDS.includes(subBlock.id))
|
||||
.forEach((subBlock) => {
|
||||
const valueToUse = getConfigValue(block, subBlock)
|
||||
if (valueToUse !== null && valueToUse !== undefined && valueToUse !== '') {
|
||||
providerConfig[subBlock.id] = valueToUse
|
||||
} else if (isFieldRequired(subBlock, subBlockValues)) {
|
||||
missingFields.push(subBlock.title || subBlock.id)
|
||||
}
|
||||
})
|
||||
|
||||
const credentialConfig = triggerDef.subBlocks.find(
|
||||
(subBlock) => subBlock.id === 'triggerCredentials'
|
||||
)
|
||||
const triggerCredentials = getSubBlockValue(block, 'triggerCredentials')
|
||||
if (
|
||||
credentialConfig &&
|
||||
isFieldRequired(credentialConfig, subBlockValues) &&
|
||||
!triggerCredentials
|
||||
) {
|
||||
missingFields.push(credentialConfig.title || 'Credentials')
|
||||
}
|
||||
|
||||
let credentialId: string | undefined
|
||||
let credentialSetId: string | undefined
|
||||
if (typeof triggerCredentials === 'string' && triggerCredentials.length > 0) {
|
||||
if (triggerCredentials.startsWith(CREDENTIAL_SET_PREFIX)) {
|
||||
credentialSetId = triggerCredentials.slice(CREDENTIAL_SET_PREFIX.length)
|
||||
providerConfig.credentialSetId = credentialSetId
|
||||
} else {
|
||||
credentialId = triggerCredentials
|
||||
providerConfig.credentialId = credentialId
|
||||
}
|
||||
}
|
||||
|
||||
providerConfig.triggerId = triggerId
|
||||
|
||||
const triggerPathValue = getSubBlockValue(block, 'triggerPath')
|
||||
const triggerPath =
|
||||
typeof triggerPathValue === 'string' && triggerPathValue.length > 0
|
||||
? triggerPathValue
|
||||
: block.id
|
||||
|
||||
return { providerConfig, missingFields, credentialId, credentialSetId, triggerPath }
|
||||
}
|
||||
|
||||
async function configurePollingIfNeeded(
|
||||
provider: string,
|
||||
savedWebhook: any,
|
||||
requestId: string
|
||||
): Promise<TriggerSaveError | null> {
|
||||
if (provider === 'gmail') {
|
||||
const success = await configureGmailPolling(savedWebhook, requestId)
|
||||
if (!success) {
|
||||
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
|
||||
return {
|
||||
message: 'Failed to configure Gmail polling. Please check your Gmail account permissions.',
|
||||
status: 500,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'outlook') {
|
||||
const success = await configureOutlookPolling(savedWebhook, requestId)
|
||||
if (!success) {
|
||||
await db.delete(webhook).where(eq(webhook.id, savedWebhook.id))
|
||||
return {
|
||||
message:
|
||||
'Failed to configure Outlook polling. Please check your Outlook account permissions.',
|
||||
status: 500,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async function syncCredentialSetWebhooks(params: {
|
||||
workflowId: string
|
||||
blockId: string
|
||||
provider: string
|
||||
triggerPath: string
|
||||
providerConfig: Record<string, unknown>
|
||||
requestId: string
|
||||
}): Promise<TriggerSaveError | null> {
|
||||
const { workflowId, blockId, provider, triggerPath, providerConfig, requestId } = params
|
||||
|
||||
const credentialSetId = providerConfig.credentialSetId as string | undefined
|
||||
if (!credentialSetId) {
|
||||
return null
|
||||
}
|
||||
|
||||
const oauthProviderId = getProviderIdFromServiceId(provider)
|
||||
|
||||
const { credentialId: _cId, credentialSetId: _csId, userId: _uId, ...baseConfig } = providerConfig
|
||||
|
||||
const syncResult = await syncWebhooksForCredentialSet({
|
||||
workflowId,
|
||||
blockId,
|
||||
provider,
|
||||
basePath: triggerPath,
|
||||
credentialSetId,
|
||||
oauthProviderId,
|
||||
providerConfig: baseConfig as Record<string, any>,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (syncResult.webhooks.length === 0) {
|
||||
return {
|
||||
message: `No valid credentials found in credential set for ${provider}. Please connect accounts and try again.`,
|
||||
status: 400,
|
||||
}
|
||||
}
|
||||
|
||||
if (provider === 'gmail' || provider === 'outlook') {
|
||||
const configureFunc = provider === 'gmail' ? configureGmailPolling : configureOutlookPolling
|
||||
for (const wh of syncResult.webhooks) {
|
||||
if (wh.isNew) {
|
||||
const rows = await db.select().from(webhook).where(eq(webhook.id, wh.id)).limit(1)
|
||||
if (rows.length > 0) {
|
||||
const success = await configureFunc(rows[0], requestId)
|
||||
if (!success) {
|
||||
await db.delete(webhook).where(eq(webhook.id, wh.id))
|
||||
return {
|
||||
message: `Failed to configure ${provider} polling. Please check account permissions.`,
|
||||
status: 500,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
async function upsertSingleWebhook(params: {
|
||||
request: NextRequest
|
||||
workflowId: string
|
||||
workflow: Record<string, unknown>
|
||||
userId: string
|
||||
block: BlockState
|
||||
provider: string
|
||||
providerConfig: Record<string, unknown>
|
||||
triggerPath: string
|
||||
requestId: string
|
||||
}): Promise<TriggerSaveError | null> {
|
||||
const {
|
||||
request,
|
||||
workflowId,
|
||||
workflow,
|
||||
userId,
|
||||
block,
|
||||
provider,
|
||||
providerConfig,
|
||||
triggerPath,
|
||||
requestId,
|
||||
} = params
|
||||
|
||||
const existingWebhooks = await db
|
||||
.select()
|
||||
.from(webhook)
|
||||
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, block.id)))
|
||||
.limit(1)
|
||||
|
||||
const existing = existingWebhooks[0]
|
||||
if (existing) {
|
||||
const existingConfig = (existing.providerConfig as Record<string, unknown>) || {}
|
||||
let nextProviderConfig = providerConfig
|
||||
|
||||
if (
|
||||
shouldRecreateExternalWebhookSubscription({
|
||||
previousProvider: existing.provider as string,
|
||||
nextProvider: provider,
|
||||
previousConfig: existingConfig,
|
||||
nextConfig: nextProviderConfig,
|
||||
})
|
||||
) {
|
||||
await cleanupExternalWebhook(existing, workflow, requestId)
|
||||
const result = await createExternalWebhookSubscription(
|
||||
request,
|
||||
{
|
||||
...existing,
|
||||
provider,
|
||||
path: triggerPath,
|
||||
providerConfig: nextProviderConfig,
|
||||
},
|
||||
workflow,
|
||||
userId,
|
||||
requestId
|
||||
)
|
||||
nextProviderConfig = result.updatedProviderConfig as Record<string, unknown>
|
||||
}
|
||||
|
||||
const finalProviderConfig = {
|
||||
...nextProviderConfig,
|
||||
credentialId: existingConfig.credentialId,
|
||||
credentialSetId: existingConfig.credentialSetId,
|
||||
userId: existingConfig.userId,
|
||||
historyId: existingConfig.historyId,
|
||||
lastCheckedTimestamp: existingConfig.lastCheckedTimestamp,
|
||||
setupCompleted: existingConfig.setupCompleted,
|
||||
externalId: nextProviderConfig.externalId ?? existingConfig.externalId,
|
||||
}
|
||||
|
||||
await db
|
||||
.update(webhook)
|
||||
.set({
|
||||
path: triggerPath,
|
||||
provider,
|
||||
providerConfig: finalProviderConfig,
|
||||
isActive: true,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(webhook.id, existing.id))
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
const webhookId = nanoid()
|
||||
const createPayload = {
|
||||
id: webhookId,
|
||||
path: triggerPath,
|
||||
provider,
|
||||
providerConfig,
|
||||
}
|
||||
|
||||
const result = await createExternalWebhookSubscription(
|
||||
request,
|
||||
createPayload,
|
||||
workflow,
|
||||
userId,
|
||||
requestId
|
||||
)
|
||||
|
||||
const updatedProviderConfig = result.updatedProviderConfig as Record<string, unknown>
|
||||
let savedWebhook: any
|
||||
|
||||
try {
|
||||
const createdRows = await db
|
||||
.insert(webhook)
|
||||
.values({
|
||||
id: webhookId,
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
path: triggerPath,
|
||||
provider,
|
||||
providerConfig: updatedProviderConfig,
|
||||
credentialSetId: (updatedProviderConfig.credentialSetId as string | undefined) || null,
|
||||
isActive: true,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.returning()
|
||||
savedWebhook = createdRows[0]
|
||||
} catch (error) {
|
||||
if (result.externalSubscriptionCreated) {
|
||||
await cleanupExternalWebhook(createPayload, workflow, requestId)
|
||||
}
|
||||
throw error
|
||||
}
|
||||
|
||||
const pollingError = await configurePollingIfNeeded(provider, savedWebhook, requestId)
|
||||
if (pollingError) {
|
||||
return pollingError
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves trigger webhook configurations as part of workflow deployment.
|
||||
*/
|
||||
export async function saveTriggerWebhooksForDeploy({
|
||||
request,
|
||||
workflowId,
|
||||
workflow,
|
||||
userId,
|
||||
blocks,
|
||||
requestId,
|
||||
}: SaveTriggerWebhooksInput): Promise<TriggerSaveResult> {
|
||||
const triggerBlocks = Object.values(blocks || {}).filter(Boolean)
|
||||
|
||||
if (triggerBlocks.length === 0) {
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
for (const block of triggerBlocks) {
|
||||
const triggerId = resolveTriggerId(block)
|
||||
if (!triggerId) continue
|
||||
|
||||
if (!isTriggerValid(triggerId)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const triggerDef = getTrigger(triggerId)
|
||||
const provider = triggerDef.provider
|
||||
|
||||
const { providerConfig, missingFields, triggerPath } = buildProviderConfig(
|
||||
block,
|
||||
triggerId,
|
||||
triggerDef
|
||||
)
|
||||
|
||||
if (missingFields.length > 0) {
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
message: `Missing required fields for ${triggerDef.name || triggerId}: ${missingFields.join(', ')}`,
|
||||
status: 400,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const credentialSetError = await syncCredentialSetWebhooks({
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
provider,
|
||||
triggerPath,
|
||||
providerConfig,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (credentialSetError) {
|
||||
return { success: false, error: credentialSetError }
|
||||
}
|
||||
|
||||
if (providerConfig.credentialSetId) {
|
||||
continue
|
||||
}
|
||||
|
||||
const upsertError = await upsertSingleWebhook({
|
||||
request,
|
||||
workflowId,
|
||||
workflow,
|
||||
userId,
|
||||
block,
|
||||
provider,
|
||||
providerConfig,
|
||||
triggerPath,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (upsertError) {
|
||||
return { success: false, error: upsertError }
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Failed to save trigger config for ${block.id}`, error)
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
message: error?.message || 'Failed to save trigger configuration',
|
||||
status: 500,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true }
|
||||
}
|
||||
@@ -10,7 +10,6 @@ const typeformLogger = createLogger('TypeformWebhook')
|
||||
const calendlyLogger = createLogger('CalendlyWebhook')
|
||||
const grainLogger = createLogger('GrainWebhook')
|
||||
const lemlistLogger = createLogger('LemlistWebhook')
|
||||
const webflowLogger = createLogger('WebflowWebhook')
|
||||
|
||||
function getProviderConfig(webhook: any): Record<string, any> {
|
||||
return (webhook.providerConfig as Record<string, any>) || {}
|
||||
@@ -761,775 +760,6 @@ export async function deleteLemlistWebhook(webhook: any, requestId: string): Pro
|
||||
}
|
||||
}
|
||||
|
||||
export async function deleteWebflowWebhook(
|
||||
webhook: any,
|
||||
workflow: any,
|
||||
requestId: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
const config = getProviderConfig(webhook)
|
||||
const siteId = config.siteId as string | undefined
|
||||
const externalId = config.externalId as string | undefined
|
||||
|
||||
if (!siteId) {
|
||||
webflowLogger.warn(
|
||||
`[${requestId}] Missing siteId for Webflow webhook deletion ${webhook.id}, skipping cleanup`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
if (!externalId) {
|
||||
webflowLogger.warn(
|
||||
`[${requestId}] Missing externalId for Webflow webhook deletion ${webhook.id}, skipping cleanup`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const accessToken = await getOAuthToken(workflow.userId, 'webflow')
|
||||
if (!accessToken) {
|
||||
webflowLogger.warn(
|
||||
`[${requestId}] Could not retrieve Webflow access token for user ${workflow.userId}. Cannot delete webhook.`,
|
||||
{ webhookId: webhook.id }
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const webflowApiUrl = `https://api.webflow.com/v2/sites/${siteId}/webhooks/${externalId}`
|
||||
|
||||
const webflowResponse = await fetch(webflowApiUrl, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!webflowResponse.ok && webflowResponse.status !== 404) {
|
||||
const responseBody = await webflowResponse.json().catch(() => ({}))
|
||||
webflowLogger.warn(
|
||||
`[${requestId}] Failed to delete Webflow webhook (non-fatal): ${webflowResponse.status}`,
|
||||
{ response: responseBody }
|
||||
)
|
||||
} else {
|
||||
webflowLogger.info(`[${requestId}] Successfully deleted Webflow webhook ${externalId}`)
|
||||
}
|
||||
} catch (error) {
|
||||
webflowLogger.warn(`[${requestId}] Error deleting Webflow webhook (non-fatal)`, error)
|
||||
}
|
||||
}
|
||||
|
||||
export async function createGrainWebhookSubscription(
|
||||
_request: NextRequest,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<{ id: string; eventTypes: string[] } | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, triggerId, includeHighlights, includeParticipants, includeAiSummary } =
|
||||
providerConfig || {}
|
||||
|
||||
if (!apiKey) {
|
||||
grainLogger.warn(`[${requestId}] Missing apiKey for Grain webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Grain API Key is required. Please provide your Grain Personal Access Token in the trigger configuration.'
|
||||
)
|
||||
}
|
||||
|
||||
const hookTypeMap: Record<string, string> = {
|
||||
grain_webhook: 'recording_added',
|
||||
grain_recording_created: 'recording_added',
|
||||
grain_recording_updated: 'recording_added',
|
||||
grain_highlight_created: 'recording_added',
|
||||
grain_highlight_updated: 'recording_added',
|
||||
grain_story_created: 'recording_added',
|
||||
grain_upload_status: 'upload_status',
|
||||
}
|
||||
|
||||
const eventTypeMap: Record<string, string[]> = {
|
||||
grain_webhook: [],
|
||||
grain_recording_created: ['recording_added'],
|
||||
grain_recording_updated: ['recording_updated'],
|
||||
grain_highlight_created: ['highlight_created'],
|
||||
grain_highlight_updated: ['highlight_updated'],
|
||||
grain_story_created: ['story_created'],
|
||||
grain_upload_status: ['upload_status'],
|
||||
}
|
||||
|
||||
const hookType = hookTypeMap[triggerId] ?? 'recording_added'
|
||||
const eventTypes = eventTypeMap[triggerId] ?? []
|
||||
|
||||
if (!hookTypeMap[triggerId]) {
|
||||
grainLogger.warn(
|
||||
`[${requestId}] Unknown triggerId for Grain: ${triggerId}, defaulting to recording_added`,
|
||||
{
|
||||
webhookId: webhookData.id,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
grainLogger.info(`[${requestId}] Creating Grain webhook`, {
|
||||
triggerId,
|
||||
hookType,
|
||||
eventTypes,
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const grainApiUrl = 'https://api.grain.com/_/public-api/v2/hooks/create'
|
||||
|
||||
const requestBody: Record<string, any> = {
|
||||
hook_url: notificationUrl,
|
||||
hook_type: hookType,
|
||||
}
|
||||
|
||||
const include: Record<string, boolean> = {}
|
||||
if (includeHighlights) {
|
||||
include.highlights = true
|
||||
}
|
||||
if (includeParticipants) {
|
||||
include.participants = true
|
||||
}
|
||||
if (includeAiSummary) {
|
||||
include.ai_summary = true
|
||||
}
|
||||
if (Object.keys(include).length > 0) {
|
||||
requestBody.include = include
|
||||
}
|
||||
|
||||
const grainResponse = await fetch(grainApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
'Public-Api-Version': '2025-10-31',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
const responseBody = await grainResponse.json()
|
||||
|
||||
if (!grainResponse.ok || responseBody.error || responseBody.errors) {
|
||||
const errorMessage =
|
||||
responseBody.errors?.detail ||
|
||||
responseBody.error?.message ||
|
||||
responseBody.error ||
|
||||
responseBody.message ||
|
||||
'Unknown Grain API error'
|
||||
grainLogger.error(
|
||||
`[${requestId}] Failed to create webhook in Grain for webhook ${webhookData.id}. Status: ${grainResponse.status}`,
|
||||
{ message: errorMessage, response: responseBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Grain'
|
||||
if (grainResponse.status === 401) {
|
||||
userFriendlyMessage =
|
||||
'Invalid Grain API Key. Please verify your Personal Access Token is correct.'
|
||||
} else if (grainResponse.status === 403) {
|
||||
userFriendlyMessage =
|
||||
'Access denied. Please ensure your Grain API Key has appropriate permissions.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Grain API error') {
|
||||
userFriendlyMessage = `Grain error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
grainLogger.info(
|
||||
`[${requestId}] Successfully created webhook in Grain for webhook ${webhookData.id}.`,
|
||||
{
|
||||
grainWebhookId: responseBody.id,
|
||||
eventTypes,
|
||||
}
|
||||
)
|
||||
|
||||
return { id: responseBody.id, eventTypes }
|
||||
} catch (error: any) {
|
||||
grainLogger.error(
|
||||
`[${requestId}] Exception during Grain webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export async function createLemlistWebhookSubscription(
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<{ id: string } | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, triggerId, campaignId } = providerConfig || {}
|
||||
|
||||
if (!apiKey) {
|
||||
lemlistLogger.warn(`[${requestId}] Missing apiKey for Lemlist webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Lemlist API Key is required. Please provide your Lemlist API Key in the trigger configuration.'
|
||||
)
|
||||
}
|
||||
|
||||
const eventTypeMap: Record<string, string | undefined> = {
|
||||
lemlist_email_replied: 'emailsReplied',
|
||||
lemlist_linkedin_replied: 'linkedinReplied',
|
||||
lemlist_interested: 'interested',
|
||||
lemlist_not_interested: 'notInterested',
|
||||
lemlist_email_opened: 'emailsOpened',
|
||||
lemlist_email_clicked: 'emailsClicked',
|
||||
lemlist_email_bounced: 'emailsBounced',
|
||||
lemlist_email_sent: 'emailsSent',
|
||||
lemlist_webhook: undefined,
|
||||
}
|
||||
|
||||
const eventType = eventTypeMap[triggerId]
|
||||
|
||||
lemlistLogger.info(`[${requestId}] Creating Lemlist webhook`, {
|
||||
triggerId,
|
||||
eventType,
|
||||
hasCampaignId: !!campaignId,
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const lemlistApiUrl = 'https://api.lemlist.com/api/hooks'
|
||||
|
||||
const requestBody: Record<string, any> = {
|
||||
targetUrl: notificationUrl,
|
||||
}
|
||||
|
||||
if (eventType) {
|
||||
requestBody.type = eventType
|
||||
}
|
||||
|
||||
if (campaignId) {
|
||||
requestBody.campaignId = campaignId
|
||||
}
|
||||
|
||||
const authString = Buffer.from(`:${apiKey}`).toString('base64')
|
||||
|
||||
const lemlistResponse = await fetch(lemlistApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Basic ${authString}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
const responseBody = await lemlistResponse.json()
|
||||
|
||||
if (!lemlistResponse.ok || responseBody.error) {
|
||||
const errorMessage = responseBody.message || responseBody.error || 'Unknown Lemlist API error'
|
||||
lemlistLogger.error(
|
||||
`[${requestId}] Failed to create webhook in Lemlist for webhook ${webhookData.id}. Status: ${lemlistResponse.status}`,
|
||||
{ message: errorMessage, response: responseBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Lemlist'
|
||||
if (lemlistResponse.status === 401) {
|
||||
userFriendlyMessage = 'Invalid Lemlist API Key. Please verify your API Key is correct.'
|
||||
} else if (lemlistResponse.status === 403) {
|
||||
userFriendlyMessage =
|
||||
'Access denied. Please ensure your Lemlist API Key has appropriate permissions.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Lemlist API error') {
|
||||
userFriendlyMessage = `Lemlist error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
lemlistLogger.info(
|
||||
`[${requestId}] Successfully created webhook in Lemlist for webhook ${webhookData.id}.`,
|
||||
{
|
||||
lemlistWebhookId: responseBody._id,
|
||||
}
|
||||
)
|
||||
|
||||
return { id: responseBody._id }
|
||||
} catch (error: any) {
|
||||
lemlistLogger.error(
|
||||
`[${requestId}] Exception during Lemlist webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export async function createAirtableWebhookSubscription(
|
||||
userId: string,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { baseId, tableId, includeCellValuesInFieldIds } = providerConfig || {}
|
||||
|
||||
if (!baseId || !tableId) {
|
||||
airtableLogger.warn(
|
||||
`[${requestId}] Missing baseId or tableId for Airtable webhook creation.`,
|
||||
{
|
||||
webhookId: webhookData.id,
|
||||
}
|
||||
)
|
||||
throw new Error(
|
||||
'Base ID and Table ID are required to create Airtable webhook. Please provide valid Airtable base and table IDs.'
|
||||
)
|
||||
}
|
||||
|
||||
const accessToken = await getOAuthToken(userId, 'airtable')
|
||||
if (!accessToken) {
|
||||
airtableLogger.warn(
|
||||
`[${requestId}] Could not retrieve Airtable access token for user ${userId}. Cannot create webhook in Airtable.`
|
||||
)
|
||||
throw new Error(
|
||||
'Airtable account connection required. Please connect your Airtable account in the trigger configuration and try again.'
|
||||
)
|
||||
}
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const airtableApiUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`
|
||||
|
||||
const specification: any = {
|
||||
options: {
|
||||
filters: {
|
||||
dataTypes: ['tableData'],
|
||||
recordChangeScope: tableId,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if (includeCellValuesInFieldIds === 'all') {
|
||||
specification.options.includes = {
|
||||
includeCellValuesInFieldIds: 'all',
|
||||
}
|
||||
}
|
||||
|
||||
const requestBody: any = {
|
||||
notificationUrl: notificationUrl,
|
||||
specification: specification,
|
||||
}
|
||||
|
||||
const airtableResponse = await fetch(airtableApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
const responseBody = await airtableResponse.json()
|
||||
|
||||
if (!airtableResponse.ok || responseBody.error) {
|
||||
const errorMessage =
|
||||
responseBody.error?.message || responseBody.error || 'Unknown Airtable API error'
|
||||
const errorType = responseBody.error?.type
|
||||
airtableLogger.error(
|
||||
`[${requestId}] Failed to create webhook in Airtable for webhook ${webhookData.id}. Status: ${airtableResponse.status}`,
|
||||
{ type: errorType, message: errorMessage, response: responseBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Airtable'
|
||||
if (airtableResponse.status === 404) {
|
||||
userFriendlyMessage =
|
||||
'Airtable base or table not found. Please verify that the Base ID and Table ID are correct and that you have access to them.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Airtable API error') {
|
||||
userFriendlyMessage = `Airtable error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
airtableLogger.info(
|
||||
`[${requestId}] Successfully created webhook in Airtable for webhook ${webhookData.id}.`,
|
||||
{
|
||||
airtableWebhookId: responseBody.id,
|
||||
}
|
||||
)
|
||||
return responseBody.id
|
||||
} catch (error: any) {
|
||||
airtableLogger.error(
|
||||
`[${requestId}] Exception during Airtable webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export async function createCalendlyWebhookSubscription(
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { apiKey, organization, triggerId } = providerConfig || {}
|
||||
|
||||
if (!apiKey) {
|
||||
calendlyLogger.warn(`[${requestId}] Missing apiKey for Calendly webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(
|
||||
'Personal Access Token is required to create Calendly webhook. Please provide your Calendly Personal Access Token.'
|
||||
)
|
||||
}
|
||||
|
||||
if (!organization) {
|
||||
calendlyLogger.warn(
|
||||
`[${requestId}] Missing organization URI for Calendly webhook creation.`,
|
||||
{
|
||||
webhookId: webhookData.id,
|
||||
}
|
||||
)
|
||||
throw new Error(
|
||||
'Organization URI is required to create Calendly webhook. Please provide your Organization URI from the "Get Current User" operation.'
|
||||
)
|
||||
}
|
||||
|
||||
if (!triggerId) {
|
||||
calendlyLogger.warn(`[${requestId}] Missing triggerId for Calendly webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error('Trigger ID is required to create Calendly webhook')
|
||||
}
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const eventTypeMap: Record<string, string[]> = {
|
||||
calendly_invitee_created: ['invitee.created'],
|
||||
calendly_invitee_canceled: ['invitee.canceled'],
|
||||
calendly_routing_form_submitted: ['routing_form_submission.created'],
|
||||
calendly_webhook: ['invitee.created', 'invitee.canceled', 'routing_form_submission.created'],
|
||||
}
|
||||
|
||||
const events = eventTypeMap[triggerId] || ['invitee.created']
|
||||
|
||||
const calendlyApiUrl = 'https://api.calendly.com/webhook_subscriptions'
|
||||
|
||||
const requestBody = {
|
||||
url: notificationUrl,
|
||||
events,
|
||||
organization,
|
||||
scope: 'organization',
|
||||
}
|
||||
|
||||
const calendlyResponse = await fetch(calendlyApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
if (!calendlyResponse.ok) {
|
||||
const errorBody = await calendlyResponse.json().catch(() => ({}))
|
||||
const errorMessage = errorBody.message || errorBody.title || 'Unknown Calendly API error'
|
||||
calendlyLogger.error(
|
||||
`[${requestId}] Failed to create webhook in Calendly for webhook ${webhookData.id}. Status: ${calendlyResponse.status}`,
|
||||
{ response: errorBody }
|
||||
)
|
||||
|
||||
let userFriendlyMessage = 'Failed to create webhook subscription in Calendly'
|
||||
if (calendlyResponse.status === 401) {
|
||||
userFriendlyMessage =
|
||||
'Calendly authentication failed. Please verify your Personal Access Token is correct.'
|
||||
} else if (calendlyResponse.status === 403) {
|
||||
userFriendlyMessage =
|
||||
'Calendly access denied. Please ensure you have appropriate permissions and a paid Calendly subscription.'
|
||||
} else if (calendlyResponse.status === 404) {
|
||||
userFriendlyMessage =
|
||||
'Calendly organization not found. Please verify the Organization URI is correct.'
|
||||
} else if (errorMessage && errorMessage !== 'Unknown Calendly API error') {
|
||||
userFriendlyMessage = `Calendly error: ${errorMessage}`
|
||||
}
|
||||
|
||||
throw new Error(userFriendlyMessage)
|
||||
}
|
||||
|
||||
const responseBody = await calendlyResponse.json()
|
||||
const webhookUri = responseBody.resource?.uri
|
||||
|
||||
if (!webhookUri) {
|
||||
calendlyLogger.error(
|
||||
`[${requestId}] Calendly webhook created but no webhook URI returned for webhook ${webhookData.id}`,
|
||||
{ response: responseBody }
|
||||
)
|
||||
throw new Error('Calendly webhook creation succeeded but no webhook URI was returned')
|
||||
}
|
||||
|
||||
const webhookId = webhookUri.split('/').pop()
|
||||
|
||||
if (!webhookId) {
|
||||
calendlyLogger.error(
|
||||
`[${requestId}] Could not extract webhook ID from Calendly URI: ${webhookUri}`,
|
||||
{
|
||||
response: responseBody,
|
||||
}
|
||||
)
|
||||
throw new Error('Failed to extract webhook ID from Calendly response')
|
||||
}
|
||||
|
||||
calendlyLogger.info(
|
||||
`[${requestId}] Successfully created webhook in Calendly for webhook ${webhookData.id}.`,
|
||||
{
|
||||
calendlyWebhookUri: webhookUri,
|
||||
calendlyWebhookId: webhookId,
|
||||
}
|
||||
)
|
||||
return webhookId
|
||||
} catch (error: any) {
|
||||
calendlyLogger.error(
|
||||
`[${requestId}] Exception during Calendly webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
export async function createWebflowWebhookSubscription(
|
||||
userId: string,
|
||||
webhookData: any,
|
||||
requestId: string
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const { path, providerConfig } = webhookData
|
||||
const { siteId, triggerId, collectionId, formId } = providerConfig || {}
|
||||
|
||||
if (!siteId) {
|
||||
webflowLogger.warn(`[${requestId}] Missing siteId for Webflow webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error('Site ID is required to create Webflow webhook')
|
||||
}
|
||||
|
||||
if (!triggerId) {
|
||||
webflowLogger.warn(`[${requestId}] Missing triggerId for Webflow webhook creation.`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error('Trigger type is required to create Webflow webhook')
|
||||
}
|
||||
|
||||
const accessToken = await getOAuthToken(userId, 'webflow')
|
||||
if (!accessToken) {
|
||||
webflowLogger.warn(
|
||||
`[${requestId}] Could not retrieve Webflow access token for user ${userId}. Cannot create webhook in Webflow.`
|
||||
)
|
||||
throw new Error(
|
||||
'Webflow account connection required. Please connect your Webflow account in the trigger configuration and try again.'
|
||||
)
|
||||
}
|
||||
|
||||
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
|
||||
|
||||
const triggerTypeMap: Record<string, string> = {
|
||||
webflow_collection_item_created: 'collection_item_created',
|
||||
webflow_collection_item_changed: 'collection_item_changed',
|
||||
webflow_collection_item_deleted: 'collection_item_deleted',
|
||||
webflow_form_submission: 'form_submission',
|
||||
}
|
||||
|
||||
const webflowTriggerType = triggerTypeMap[triggerId]
|
||||
if (!webflowTriggerType) {
|
||||
webflowLogger.warn(`[${requestId}] Invalid triggerId for Webflow: ${triggerId}`, {
|
||||
webhookId: webhookData.id,
|
||||
})
|
||||
throw new Error(`Invalid Webflow trigger type: ${triggerId}`)
|
||||
}
|
||||
|
||||
const webflowApiUrl = `https://api.webflow.com/v2/sites/${siteId}/webhooks`
|
||||
|
||||
const requestBody: any = {
|
||||
triggerType: webflowTriggerType,
|
||||
url: notificationUrl,
|
||||
}
|
||||
|
||||
if (collectionId && webflowTriggerType.startsWith('collection_item_')) {
|
||||
requestBody.filter = {
|
||||
resource_type: 'collection',
|
||||
resource_id: collectionId,
|
||||
}
|
||||
}
|
||||
|
||||
if (formId && webflowTriggerType === 'form_submission') {
|
||||
requestBody.filter = {
|
||||
resource_type: 'form',
|
||||
resource_id: formId,
|
||||
}
|
||||
}
|
||||
|
||||
const webflowResponse = await fetch(webflowApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
accept: 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
const responseBody = await webflowResponse.json()
|
||||
|
||||
if (!webflowResponse.ok || responseBody.error) {
|
||||
const errorMessage = responseBody.message || responseBody.error || 'Unknown Webflow API error'
|
||||
webflowLogger.error(
|
||||
`[${requestId}] Failed to create webhook in Webflow for webhook ${webhookData.id}. Status: ${webflowResponse.status}`,
|
||||
{ message: errorMessage, response: responseBody }
|
||||
)
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
webflowLogger.info(
|
||||
`[${requestId}] Successfully created webhook in Webflow for webhook ${webhookData.id}.`,
|
||||
{
|
||||
webflowWebhookId: responseBody.id || responseBody._id,
|
||||
}
|
||||
)
|
||||
|
||||
return responseBody.id || responseBody._id
|
||||
} catch (error: any) {
|
||||
webflowLogger.error(
|
||||
`[${requestId}] Exception during Webflow webhook creation for webhook ${webhookData.id}.`,
|
||||
{
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
}
|
||||
)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
type ExternalSubscriptionResult = {
|
||||
updatedProviderConfig: Record<string, unknown>
|
||||
externalSubscriptionCreated: boolean
|
||||
}
|
||||
|
||||
type RecreateCheckInput = {
|
||||
previousProvider: string
|
||||
nextProvider: string
|
||||
previousConfig: Record<string, unknown>
|
||||
nextConfig: Record<string, unknown>
|
||||
}
|
||||
|
||||
function areValuesEqual(a: unknown, b: unknown): boolean {
|
||||
if (a === b) return true
|
||||
if (Array.isArray(a) || Array.isArray(b) || typeof a === 'object' || typeof b === 'object') {
|
||||
return JSON.stringify(a ?? null) === JSON.stringify(b ?? null)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
export function shouldRecreateExternalWebhookSubscription({
|
||||
previousProvider,
|
||||
nextProvider,
|
||||
previousConfig,
|
||||
nextConfig,
|
||||
}: RecreateCheckInput): boolean {
|
||||
const relevantKeysByProvider: Record<string, string[]> = {
|
||||
airtable: ['baseId', 'tableId', 'includeCellValues', 'includeCellValuesInFieldIds'],
|
||||
calendly: ['apiKey', 'organization', 'triggerId'],
|
||||
webflow: ['siteId', 'collectionId', 'formId', 'triggerId'],
|
||||
typeform: ['formId', 'apiKey', 'secret', 'webhookTag'],
|
||||
grain: ['apiKey', 'triggerId', 'includeHighlights', 'includeParticipants', 'includeAiSummary'],
|
||||
lemlist: ['apiKey', 'triggerId', 'campaignId'],
|
||||
telegram: ['botToken'],
|
||||
'microsoft-teams': ['triggerId', 'chatId', 'credentialId', 'credentialSetId'],
|
||||
}
|
||||
|
||||
if (previousProvider !== nextProvider) {
|
||||
return (
|
||||
Boolean(relevantKeysByProvider[previousProvider]) ||
|
||||
Boolean(relevantKeysByProvider[nextProvider])
|
||||
)
|
||||
}
|
||||
|
||||
const keys = relevantKeysByProvider[nextProvider]
|
||||
if (!keys) {
|
||||
return false
|
||||
}
|
||||
|
||||
return keys.some((key) => !areValuesEqual(previousConfig[key], nextConfig[key]))
|
||||
}
|
||||
|
||||
export async function createExternalWebhookSubscription(
|
||||
request: NextRequest,
|
||||
webhookData: any,
|
||||
workflow: any,
|
||||
userId: string,
|
||||
requestId: string
|
||||
): Promise<ExternalSubscriptionResult> {
|
||||
const provider = webhookData.provider as string
|
||||
const providerConfig = (webhookData.providerConfig as Record<string, unknown>) || {}
|
||||
let updatedProviderConfig = providerConfig
|
||||
let externalSubscriptionCreated = false
|
||||
|
||||
if (provider === 'airtable') {
|
||||
const externalId = await createAirtableWebhookSubscription(userId, webhookData, requestId)
|
||||
if (externalId) {
|
||||
updatedProviderConfig = { ...updatedProviderConfig, externalId }
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
} else if (provider === 'calendly') {
|
||||
const externalId = await createCalendlyWebhookSubscription(webhookData, requestId)
|
||||
if (externalId) {
|
||||
updatedProviderConfig = { ...updatedProviderConfig, externalId }
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
} else if (provider === 'microsoft-teams') {
|
||||
await createTeamsSubscription(request, webhookData, workflow, requestId)
|
||||
externalSubscriptionCreated =
|
||||
(providerConfig.triggerId as string | undefined) === 'microsoftteams_chat_subscription'
|
||||
} else if (provider === 'telegram') {
|
||||
await createTelegramWebhook(request, webhookData, requestId)
|
||||
externalSubscriptionCreated = true
|
||||
} else if (provider === 'webflow') {
|
||||
const externalId = await createWebflowWebhookSubscription(userId, webhookData, requestId)
|
||||
if (externalId) {
|
||||
updatedProviderConfig = { ...updatedProviderConfig, externalId }
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
} else if (provider === 'typeform') {
|
||||
const usedTag = await createTypeformWebhook(request, webhookData, requestId)
|
||||
if (!updatedProviderConfig.webhookTag && usedTag) {
|
||||
updatedProviderConfig = { ...updatedProviderConfig, webhookTag: usedTag }
|
||||
}
|
||||
externalSubscriptionCreated = true
|
||||
} else if (provider === 'grain') {
|
||||
const result = await createGrainWebhookSubscription(request, webhookData, requestId)
|
||||
if (result) {
|
||||
updatedProviderConfig = {
|
||||
...updatedProviderConfig,
|
||||
externalId: result.id,
|
||||
eventTypes: result.eventTypes,
|
||||
}
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
} else if (provider === 'lemlist') {
|
||||
const result = await createLemlistWebhookSubscription(webhookData, requestId)
|
||||
if (result) {
|
||||
updatedProviderConfig = { ...updatedProviderConfig, externalId: result.id }
|
||||
externalSubscriptionCreated = true
|
||||
}
|
||||
}
|
||||
|
||||
return { updatedProviderConfig, externalSubscriptionCreated }
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up external webhook subscriptions for a webhook
|
||||
* Handles Airtable, Teams, Telegram, Typeform, Calendly, Grain, and Lemlist cleanup
|
||||
@@ -1550,8 +780,6 @@ export async function cleanupExternalWebhook(
|
||||
await deleteTypeformWebhook(webhook, requestId)
|
||||
} else if (webhook.provider === 'calendly') {
|
||||
await deleteCalendlyWebhook(webhook, requestId)
|
||||
} else if (webhook.provider === 'webflow') {
|
||||
await deleteWebflowWebhook(webhook, workflow, requestId)
|
||||
} else if (webhook.provider === 'grain') {
|
||||
await deleteGrainWebhook(webhook, requestId)
|
||||
} else if (webhook.provider === 'lemlist') {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { and, eq, isNull, min } from 'drizzle-orm'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import type { Variable } from '@/stores/panel/variables/types'
|
||||
import type { LoopConfig, ParallelConfig } from '@/stores/workflows/workflow/types'
|
||||
@@ -26,6 +26,7 @@ interface DuplicateWorkflowResult {
|
||||
color: string
|
||||
workspaceId: string
|
||||
folderId: string | null
|
||||
sortOrder: number
|
||||
blocksCount: number
|
||||
edgesCount: number
|
||||
subflowsCount: number
|
||||
@@ -88,12 +89,29 @@ export async function duplicateWorkflow(
|
||||
throw new Error('Source workflow not found or access denied')
|
||||
}
|
||||
|
||||
const targetWorkspaceId = workspaceId || source.workspaceId
|
||||
const targetFolderId = folderId !== undefined ? folderId : source.folderId
|
||||
const folderCondition = targetFolderId
|
||||
? eq(workflow.folderId, targetFolderId)
|
||||
: isNull(workflow.folderId)
|
||||
|
||||
const [minResult] = await tx
|
||||
.select({ minOrder: min(workflow.sortOrder) })
|
||||
.from(workflow)
|
||||
.where(
|
||||
targetWorkspaceId
|
||||
? and(eq(workflow.workspaceId, targetWorkspaceId), folderCondition)
|
||||
: and(eq(workflow.userId, userId), folderCondition)
|
||||
)
|
||||
const sortOrder = (minResult?.minOrder ?? 1) - 1
|
||||
|
||||
// Create the new workflow first (required for foreign key constraints)
|
||||
await tx.insert(workflow).values({
|
||||
id: newWorkflowId,
|
||||
userId,
|
||||
workspaceId: workspaceId || source.workspaceId,
|
||||
folderId: folderId !== undefined ? folderId : source.folderId,
|
||||
workspaceId: targetWorkspaceId,
|
||||
folderId: targetFolderId,
|
||||
sortOrder,
|
||||
name,
|
||||
description: description || source.description,
|
||||
color: color || source.color,
|
||||
@@ -286,7 +304,8 @@ export async function duplicateWorkflow(
|
||||
description: description || source.description,
|
||||
color: color || source.color,
|
||||
workspaceId: finalWorkspaceId,
|
||||
folderId: folderId !== undefined ? folderId : source.folderId,
|
||||
folderId: targetFolderId,
|
||||
sortOrder,
|
||||
blocksCount: sourceBlocks.length,
|
||||
edgesCount: sourceEdges.length,
|
||||
subflowsCount: sourceSubflows.length,
|
||||
|
||||
@@ -253,6 +253,30 @@ const nextConfig: NextConfig = {
|
||||
async redirects() {
|
||||
const redirects = []
|
||||
|
||||
// Social link redirects (used in emails to avoid spam filter issues)
|
||||
redirects.push(
|
||||
{
|
||||
source: '/discord',
|
||||
destination: 'https://discord.gg/Hr4UWYEcTT',
|
||||
permanent: false,
|
||||
},
|
||||
{
|
||||
source: '/x',
|
||||
destination: 'https://x.com/simdotai',
|
||||
permanent: false,
|
||||
},
|
||||
{
|
||||
source: '/github',
|
||||
destination: 'https://github.com/simstudioai/sim',
|
||||
permanent: false,
|
||||
},
|
||||
{
|
||||
source: '/team',
|
||||
destination: 'https://cal.com/emirkarabeg/sim-team',
|
||||
permanent: false,
|
||||
}
|
||||
)
|
||||
|
||||
// Redirect /building and /blog to /studio (legacy URL support)
|
||||
redirects.push(
|
||||
{
|
||||
|
||||
@@ -422,7 +422,8 @@ function abortAllInProgressTools(set: any, get: () => CopilotStore) {
|
||||
* Loads messages from DB for UI rendering.
|
||||
* Messages are stored exactly as they render, so we just need to:
|
||||
* 1. Register client tool instances for any tool calls
|
||||
* 2. Return the messages as-is
|
||||
* 2. Clear any streaming flags (messages loaded from DB are never actively streaming)
|
||||
* 3. Return the messages
|
||||
*/
|
||||
function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
|
||||
try {
|
||||
@@ -438,23 +439,54 @@ function normalizeMessagesForUI(messages: CopilotMessage[]): CopilotMessage[] {
|
||||
}
|
||||
}
|
||||
|
||||
// Register client tool instances for all tool calls so they can be looked up
|
||||
// Register client tool instances and clear streaming flags for all tool calls
|
||||
for (const message of messages) {
|
||||
if (message.contentBlocks) {
|
||||
for (const block of message.contentBlocks as any[]) {
|
||||
if (block?.type === 'tool_call' && block.toolCall) {
|
||||
registerToolCallInstances(block.toolCall)
|
||||
clearStreamingFlags(block.toolCall)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Also clear from toolCalls array (legacy format)
|
||||
if (message.toolCalls) {
|
||||
for (const toolCall of message.toolCalls) {
|
||||
clearStreamingFlags(toolCall)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Return messages as-is - they're already in the correct format for rendering
|
||||
return messages
|
||||
} catch {
|
||||
return messages
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively clears streaming flags from a tool call and its nested subagent tool calls.
|
||||
* This ensures messages loaded from DB don't appear to be streaming.
|
||||
*/
|
||||
function clearStreamingFlags(toolCall: any): void {
|
||||
if (!toolCall) return
|
||||
|
||||
// Always set subAgentStreaming to false - messages loaded from DB are never streaming
|
||||
toolCall.subAgentStreaming = false
|
||||
|
||||
// Clear nested subagent tool calls
|
||||
if (Array.isArray(toolCall.subAgentBlocks)) {
|
||||
for (const block of toolCall.subAgentBlocks) {
|
||||
if (block?.type === 'subagent_tool_call' && block.toolCall) {
|
||||
clearStreamingFlags(block.toolCall)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (Array.isArray(toolCall.subAgentToolCalls)) {
|
||||
for (const subTc of toolCall.subAgentToolCalls) {
|
||||
clearStreamingFlags(subTc)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively registers client tool instances for a tool call and its nested subagent tool calls.
|
||||
*/
|
||||
|
||||
@@ -106,6 +106,9 @@ export interface CopilotState {
|
||||
| 'gpt-5.1-high'
|
||||
| 'gpt-5-codex'
|
||||
| 'gpt-5.1-codex'
|
||||
| 'gpt-5.2'
|
||||
| 'gpt-5.2-codex'
|
||||
| 'gpt-5.2-pro'
|
||||
| 'gpt-4o'
|
||||
| 'gpt-4.1'
|
||||
| 'o3'
|
||||
|
||||
@@ -8,8 +8,24 @@ export const SYSTEM_SUBBLOCK_IDS: string[] = [
|
||||
'triggerCredentials', // OAuth credentials subblock
|
||||
'triggerInstructions', // Setup instructions text
|
||||
'webhookUrlDisplay', // Webhook URL display
|
||||
'triggerSave', // Save configuration button
|
||||
'samplePayload', // Example payload display
|
||||
'setupScript', // Setup script code (e.g., Apps Script)
|
||||
'triggerId', // Stored trigger ID
|
||||
'selectedTriggerId', // Selected trigger from dropdown (multi-trigger blocks)
|
||||
]
|
||||
|
||||
/**
|
||||
* Trigger-related subblock IDs whose values should be persisted and
|
||||
* propagated when workflows are edited programmatically.
|
||||
*/
|
||||
export const TRIGGER_PERSISTED_SUBBLOCK_IDS: string[] = [
|
||||
'triggerConfig',
|
||||
'triggerCredentials',
|
||||
'triggerId',
|
||||
'selectedTriggerId',
|
||||
'webhookId',
|
||||
'triggerPath',
|
||||
]
|
||||
|
||||
/**
|
||||
|
||||
@@ -19,6 +19,7 @@ export function grainSetupInstructions(eventType: string): string {
|
||||
const instructions = [
|
||||
'Enter your Grain API Key (Personal Access Token) above.',
|
||||
'You can find or create your API key in Grain at <strong>Settings > Integrations > API</strong>.',
|
||||
`Click <strong>"Save Configuration"</strong> to automatically create the webhook in Grain for <strong>${eventType}</strong> events.`,
|
||||
'The webhook will be automatically deleted when you remove this trigger.',
|
||||
]
|
||||
|
||||
|
||||
@@ -82,8 +82,9 @@ export function hubspotSetupInstructions(eventType: string, additionalNotes?: st
|
||||
'<strong>Step 3: Configure OAuth Settings</strong><br/>After creating your app via CLI, configure it to add the OAuth Redirect URL: <code>https://www.sim.ai/api/auth/oauth2/callback/hubspot</code>. Then retrieve your <strong>Client ID</strong> and <strong>Client Secret</strong> from your app configuration and enter them in the fields above.',
|
||||
"<strong>Step 4: Get App ID and Developer API Key</strong><br/>In your HubSpot developer account, find your <strong>App ID</strong> (shown below your app name) and your <strong>Developer API Key</strong> (in app settings). You'll need both for the next steps.",
|
||||
'<strong>Step 5: Set Required Scopes</strong><br/>Configure your app to include the required OAuth scope: <code>crm.objects.contacts.read</code>',
|
||||
'<strong>Step 6: Configure Webhook in HubSpot via API</strong><br/>After saving above, copy the <strong>Webhook URL</strong> and run the two curl commands below (replace <code>{YOUR_APP_ID}</code>, <code>{YOUR_DEVELOPER_API_KEY}</code>, and <code>{YOUR_WEBHOOK_URL_FROM_ABOVE}</code> with your actual values).',
|
||||
"<strong>Step 7: Test Your Webhook</strong><br/>Create or modify a contact in HubSpot to trigger the webhook. Check your workflow execution logs in Sim to verify it's working.",
|
||||
'<strong>Step 6: Save Configuration in Sim</strong><br/>Click the <strong>"Save Configuration"</strong> button above. This will generate your unique webhook URL.',
|
||||
'<strong>Step 7: Configure Webhook in HubSpot via API</strong><br/>After saving above, copy the <strong>Webhook URL</strong> and run the two curl commands below (replace <code>{YOUR_APP_ID}</code>, <code>{YOUR_DEVELOPER_API_KEY}</code>, and <code>{YOUR_WEBHOOK_URL_FROM_ABOVE}</code> with your actual values).',
|
||||
"<strong>Step 8: Test Your Webhook</strong><br/>Create or modify a contact in HubSpot to trigger the webhook. Check your workflow execution logs in Sim to verify it's working.",
|
||||
]
|
||||
|
||||
if (additionalNotes) {
|
||||
|
||||
@@ -14,9 +14,6 @@ export function getTrigger(triggerId: string): TriggerConfig {
|
||||
}
|
||||
|
||||
const clonedTrigger = { ...trigger, subBlocks: [...trigger.subBlocks] }
|
||||
clonedTrigger.subBlocks = clonedTrigger.subBlocks.filter(
|
||||
(subBlock) => subBlock.id !== 'triggerSave' && subBlock.type !== 'trigger-save'
|
||||
)
|
||||
|
||||
// Inject samplePayload for webhooks/pollers with condition
|
||||
if (trigger.webhook || trigger.id.includes('webhook') || trigger.id.includes('poller')) {
|
||||
@@ -158,6 +155,16 @@ export function buildTriggerSubBlocks(options: BuildTriggerSubBlocksOptions): Su
|
||||
}
|
||||
|
||||
// Save button
|
||||
blocks.push({
|
||||
id: 'triggerSave',
|
||||
title: '',
|
||||
type: 'trigger-save',
|
||||
hideFromPreview: true,
|
||||
mode: 'trigger',
|
||||
triggerId: triggerId,
|
||||
condition: { field: 'selectedTriggerId', value: triggerId },
|
||||
})
|
||||
|
||||
// Setup instructions
|
||||
blocks.push({
|
||||
id: 'triggerInstructions',
|
||||
|
||||
@@ -23,6 +23,7 @@ export function lemlistSetupInstructions(eventType: string): string {
|
||||
const instructions = [
|
||||
'Enter your Lemlist API Key above.',
|
||||
'You can find your API key in Lemlist at <strong>Settings > Integrations > API</strong>.',
|
||||
`Click <strong>"Save Configuration"</strong> to automatically create the webhook in Lemlist for <strong>${eventType}</strong> events.`,
|
||||
'The webhook will be automatically deleted when you remove this trigger.',
|
||||
]
|
||||
|
||||
|
||||
@@ -129,6 +129,7 @@ Return ONLY the TwiML with square brackets - no explanations, no markdown, no ex
|
||||
'Scroll down to the "Voice Configuration" section.',
|
||||
'In the "A CALL COMES IN" field, select "Webhook" and paste the Webhook URL (from above).',
|
||||
'Ensure the HTTP method is set to POST.',
|
||||
'Click "Save configuration".',
|
||||
'How it works: When a call comes in, Twilio receives your TwiML response immediately and executes those instructions. Your workflow runs in the background with access to caller information, call status, and any recorded/transcribed data.',
|
||||
]
|
||||
.map((instruction, index) => `${index + 1}. ${instruction}`)
|
||||
|
||||
@@ -32,5 +32,5 @@
|
||||
"trigger.config.ts",
|
||||
".next/dev/types/**/*.ts"
|
||||
],
|
||||
"exclude": ["node_modules"]
|
||||
"exclude": ["node_modules", "vitest.config.ts", "vitest.setup.ts"]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user