Compare commits

..

17 Commits

Author SHA1 Message Date
Waleed Latif
ad109712c0 move userstats record creation inside tx 2026-02-11 20:22:27 -08:00
Waleed Latif
720137414f lint 2026-02-11 20:16:57 -08:00
Waleed Latif
043f060c24 reran migrations 2026-02-11 20:14:34 -08:00
Waleed Latif
8abe8af289 update admin routes 2026-02-11 19:57:46 -08:00
Waleed
85284eb7c4 fix(terminal): reconnect to running executions after page refresh (#3200)
* fix(terminal): reconnect to running executions after page refresh

* fix(terminal): use ExecutionEvent type instead of any in reconnection stream

* fix(execution): type event buffer with ExecutionEvent instead of Record<string, unknown>

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* fix(execution): validate fromEventId query param in reconnection endpoint

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* Fix some bugs

* fix(variables): fix tag dropdown and cursor alignment in variables block (#3199)

* feat(confluence): added list space labels, delete label, delete page prop (#3201)

* updated route

* ack comments

* fix(execution): reset execution state in reconnection cleanup to unblock re-entry

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* fix(execution): restore running entries when reconnection is interrupted by navigation

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* done

* remove cast in ioredis types

* ack PR comments

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: Siddharth Ganesan <siddharthganesan@gmail.com>
2026-02-11 19:37:12 -08:00
Waleed
d068ed6d65 fix(change-detection): resolve false positive trigger block change detection (#3204) 2026-02-11 19:37:12 -08:00
Vikhyath Mondreti
bef43f3e84 fix build 2026-02-11 19:37:12 -08:00
Waleed
7453a2bd27 fix(confl): use recommended query param pattern for confluence route (#3202)
* fix(confl): use recommended query param pattern for confluence route

* use unused var
2026-02-11 19:37:12 -08:00
Waleed
bf41c78815 feat(confluence): added list space labels, delete label, delete page prop (#3201) 2026-02-11 19:37:12 -08:00
Waleed
ccdd42639f fix(variables): fix tag dropdown and cursor alignment in variables block (#3199) 2026-02-11 19:37:12 -08:00
Waleed
4a1dd261da fix(hotkeys): remove C, T, E tab-switching hotkeys (#3197) 2026-02-11 19:37:12 -08:00
Waleed
ae43131cfe improvement(oom): increase trigger machine size (#3196) 2026-02-11 19:37:12 -08:00
Waleed Latif
808dc4f8b4 remove duplicate index 2026-02-11 13:14:52 -08:00
Waleed Latif
27e03c44dc remove default 2026-02-11 13:07:42 -08:00
Waleed Latif
05d1c92e1a added zod 2026-02-11 12:29:37 -08:00
Waleed Latif
9228893c19 more 2026-02-11 12:25:19 -08:00
Waleed Latif
eedf67013c feat(creators): added referrers, code redemption, campaign tracking, etc 2026-02-11 11:54:50 -08:00
37 changed files with 13527 additions and 175 deletions

View File

@@ -0,0 +1,215 @@
/**
* POST /api/attribution
*
* Automatic UTM-based referral attribution for new signups.
*
* Reads the `sim_utm` cookie (set by proxy on auth pages), verifies the user
* account was created after the cookie was set, matches a campaign by UTM
* specificity, and atomically inserts an attribution record + applies bonus credits.
*
* Idempotent — the unique constraint on `userId` prevents double-attribution.
*/
import { db } from '@sim/db'
import { referralAttribution, referralCampaigns, user, userStats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { cookies } from 'next/headers'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
const logger = createLogger('AttributionAPI')
const COOKIE_NAME = 'sim_utm'
const CLOCK_DRIFT_TOLERANCE_MS = 60 * 1000
const UtmCookieSchema = z.object({
utm_source: z.string().optional(),
utm_medium: z.string().optional(),
utm_campaign: z.string().optional(),
utm_content: z.string().optional(),
referrer_url: z.string().optional(),
landing_page: z.string().optional(),
created_at: z.string().min(1),
})
/**
* Finds the most specific active campaign matching the given UTM params.
* Null fields on a campaign act as wildcards. Ties broken by newest campaign.
*/
async function findMatchingCampaign(utmData: z.infer<typeof UtmCookieSchema>) {
const campaigns = await db
.select()
.from(referralCampaigns)
.where(eq(referralCampaigns.isActive, true))
let bestMatch: (typeof campaigns)[number] | null = null
let bestScore = -1
for (const campaign of campaigns) {
let score = 0
let mismatch = false
const fields = [
{ campaignVal: campaign.utmSource, utmVal: utmData.utm_source },
{ campaignVal: campaign.utmMedium, utmVal: utmData.utm_medium },
{ campaignVal: campaign.utmCampaign, utmVal: utmData.utm_campaign },
{ campaignVal: campaign.utmContent, utmVal: utmData.utm_content },
] as const
for (const { campaignVal, utmVal } of fields) {
if (campaignVal === null) continue
if (campaignVal === utmVal) {
score++
} else {
mismatch = true
break
}
}
if (!mismatch && score > 0) {
if (
score > bestScore ||
(score === bestScore &&
bestMatch &&
campaign.createdAt.getTime() > bestMatch.createdAt.getTime())
) {
bestScore = score
bestMatch = campaign
}
}
}
return bestMatch
}
export async function POST() {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const cookieStore = await cookies()
const utmCookie = cookieStore.get(COOKIE_NAME)
if (!utmCookie?.value) {
return NextResponse.json({ attributed: false, reason: 'no_utm_cookie' })
}
let utmData: z.infer<typeof UtmCookieSchema>
try {
let decoded: string
try {
decoded = decodeURIComponent(utmCookie.value)
} catch {
decoded = utmCookie.value
}
utmData = UtmCookieSchema.parse(JSON.parse(decoded))
} catch {
logger.warn('Failed to parse UTM cookie', { userId: session.user.id })
cookieStore.delete(COOKIE_NAME)
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
}
const cookieCreatedAt = Number(utmData.created_at)
if (!Number.isFinite(cookieCreatedAt)) {
logger.warn('UTM cookie has invalid created_at timestamp', { userId: session.user.id })
cookieStore.delete(COOKIE_NAME)
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
}
const userRows = await db
.select({ createdAt: user.createdAt })
.from(user)
.where(eq(user.id, session.user.id))
.limit(1)
if (userRows.length === 0) {
return NextResponse.json({ error: 'User not found' }, { status: 404 })
}
const userCreatedAt = userRows[0].createdAt.getTime()
if (userCreatedAt < cookieCreatedAt - CLOCK_DRIFT_TOLERANCE_MS) {
logger.info('User account predates UTM cookie, skipping attribution', {
userId: session.user.id,
userCreatedAt: new Date(userCreatedAt).toISOString(),
cookieCreatedAt: new Date(cookieCreatedAt).toISOString(),
})
cookieStore.delete(COOKIE_NAME)
return NextResponse.json({ attributed: false, reason: 'account_predates_cookie' })
}
const matchedCampaign = await findMatchingCampaign(utmData)
if (!matchedCampaign) {
cookieStore.delete(COOKIE_NAME)
return NextResponse.json({ attributed: false, reason: 'no_matching_campaign' })
}
const bonusAmount = Number(matchedCampaign.bonusCreditAmount)
let attributed = false
await db.transaction(async (tx) => {
const [existingStats] = await tx
.select({ id: userStats.id })
.from(userStats)
.where(eq(userStats.userId, session.user.id))
.limit(1)
if (!existingStats) {
await tx.insert(userStats).values({
id: nanoid(),
userId: session.user.id,
})
}
const result = await tx
.insert(referralAttribution)
.values({
id: nanoid(),
userId: session.user.id,
campaignId: matchedCampaign.id,
utmSource: utmData.utm_source || null,
utmMedium: utmData.utm_medium || null,
utmCampaign: utmData.utm_campaign || null,
utmContent: utmData.utm_content || null,
referrerUrl: utmData.referrer_url || null,
landingPage: utmData.landing_page || null,
bonusCreditAmount: bonusAmount.toString(),
})
.onConflictDoNothing({ target: referralAttribution.userId })
.returning({ id: referralAttribution.id })
if (result.length > 0) {
await applyBonusCredits(session.user.id, bonusAmount, tx)
attributed = true
}
})
if (attributed) {
logger.info('Referral attribution created and bonus credits applied', {
userId: session.user.id,
campaignId: matchedCampaign.id,
campaignName: matchedCampaign.name,
utmSource: utmData.utm_source,
utmCampaign: utmData.utm_campaign,
utmContent: utmData.utm_content,
bonusAmount,
})
} else {
logger.info('User already attributed, skipping', { userId: session.user.id })
}
cookieStore.delete(COOKIE_NAME)
return NextResponse.json({
attributed,
bonusAmount: attributed ? bonusAmount : undefined,
})
} catch (error) {
logger.error('Attribution error', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,170 @@
/**
* POST /api/referral-code/redeem
*
* Redeem a referral/promo code to receive bonus credits.
*
* Body:
* - code: string — The referral code to redeem
*
* Response: { redeemed: boolean, bonusAmount?: number, error?: string }
*
* Constraints:
* - Enterprise users cannot redeem codes
* - One redemption per user, ever (unique constraint on userId)
* - One redemption per organization for team users (partial unique on organizationId)
*/
import { db } from '@sim/db'
import { referralAttribution, referralCampaigns, userStats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
const logger = createLogger('ReferralCodeRedemption')
const RedeemCodeSchema = z.object({
code: z.string().min(1, 'Code is required'),
})
export async function POST(request: Request) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const { code } = RedeemCodeSchema.parse(body)
const subscription = await getHighestPrioritySubscription(session.user.id)
if (subscription?.plan === 'enterprise') {
return NextResponse.json({
redeemed: false,
error: 'Enterprise accounts cannot redeem referral codes',
})
}
const isTeam = subscription?.plan === 'team'
const orgId = isTeam ? subscription.referenceId : null
const normalizedCode = code.trim().toUpperCase()
const [campaign] = await db
.select()
.from(referralCampaigns)
.where(and(eq(referralCampaigns.code, normalizedCode), eq(referralCampaigns.isActive, true)))
.limit(1)
if (!campaign) {
logger.info('Invalid code redemption attempt', {
userId: session.user.id,
code: normalizedCode,
})
return NextResponse.json({ error: 'Invalid or expired code' }, { status: 404 })
}
const [existingUserAttribution] = await db
.select({ id: referralAttribution.id })
.from(referralAttribution)
.where(eq(referralAttribution.userId, session.user.id))
.limit(1)
if (existingUserAttribution) {
return NextResponse.json({
redeemed: false,
error: 'You have already redeemed a code',
})
}
if (orgId) {
const [existingOrgAttribution] = await db
.select({ id: referralAttribution.id })
.from(referralAttribution)
.where(eq(referralAttribution.organizationId, orgId))
.limit(1)
if (existingOrgAttribution) {
return NextResponse.json({
redeemed: false,
error: 'A code has already been redeemed for your organization',
})
}
}
const bonusAmount = Number(campaign.bonusCreditAmount)
let redeemed = false
await db.transaction(async (tx) => {
const [existingStats] = await tx
.select({ id: userStats.id })
.from(userStats)
.where(eq(userStats.userId, session.user.id))
.limit(1)
if (!existingStats) {
await tx.insert(userStats).values({
id: nanoid(),
userId: session.user.id,
})
}
const result = await tx
.insert(referralAttribution)
.values({
id: nanoid(),
userId: session.user.id,
organizationId: orgId,
campaignId: campaign.id,
utmSource: null,
utmMedium: null,
utmCampaign: null,
utmContent: null,
referrerUrl: null,
landingPage: null,
bonusCreditAmount: bonusAmount.toString(),
})
.onConflictDoNothing()
.returning({ id: referralAttribution.id })
if (result.length > 0) {
await applyBonusCredits(session.user.id, bonusAmount, tx)
redeemed = true
}
})
if (redeemed) {
logger.info('Referral code redeemed', {
userId: session.user.id,
organizationId: orgId,
code: normalizedCode,
campaignId: campaign.id,
campaignName: campaign.name,
bonusAmount,
})
}
if (!redeemed) {
return NextResponse.json({
redeemed: false,
error: 'You have already redeemed a code',
})
}
return NextResponse.json({
redeemed: true,
bonusAmount,
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
}
logger.error('Referral code redemption error', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -66,6 +66,12 @@
* Credits: * Credits:
* POST /api/v1/admin/credits - Issue credits to user (by userId or email) * POST /api/v1/admin/credits - Issue credits to user (by userId or email)
* *
* Referral Campaigns:
* GET /api/v1/admin/referral-campaigns - List campaigns (?active=true/false)
* POST /api/v1/admin/referral-campaigns - Create campaign
* GET /api/v1/admin/referral-campaigns/:id - Get campaign details
* PATCH /api/v1/admin/referral-campaigns/:id - Update campaign fields
*
* Access Control (Permission Groups): * Access Control (Permission Groups):
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X) * GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X) * DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
@@ -97,6 +103,7 @@ export type {
AdminOrganization, AdminOrganization,
AdminOrganizationBillingSummary, AdminOrganizationBillingSummary,
AdminOrganizationDetail, AdminOrganizationDetail,
AdminReferralCampaign,
AdminSeatAnalytics, AdminSeatAnalytics,
AdminSingleResponse, AdminSingleResponse,
AdminSubscription, AdminSubscription,
@@ -111,6 +118,7 @@ export type {
AdminWorkspaceMember, AdminWorkspaceMember,
DbMember, DbMember,
DbOrganization, DbOrganization,
DbReferralCampaign,
DbSubscription, DbSubscription,
DbUser, DbUser,
DbUserStats, DbUserStats,
@@ -139,6 +147,7 @@ export {
parseWorkflowVariables, parseWorkflowVariables,
toAdminFolder, toAdminFolder,
toAdminOrganization, toAdminOrganization,
toAdminReferralCampaign,
toAdminSubscription, toAdminSubscription,
toAdminUser, toAdminUser,
toAdminWorkflow, toAdminWorkflow,

View File

@@ -0,0 +1,142 @@
/**
* GET /api/v1/admin/referral-campaigns/:id
*
* Get a single referral campaign by ID.
*
* PATCH /api/v1/admin/referral-campaigns/:id
*
* Update campaign fields. All fields are optional.
*
* Body:
* - name: string (non-empty) - Campaign name
* - bonusCreditAmount: number (> 0) - Bonus credits in dollars
* - isActive: boolean - Enable/disable the campaign
* - code: string | null (min 6 chars, auto-uppercased, null to remove) - Redeemable code
* - utmSource: string | null - UTM source match (null = wildcard)
* - utmMedium: string | null - UTM medium match (null = wildcard)
* - utmCampaign: string | null - UTM campaign match (null = wildcard)
* - utmContent: string | null - UTM content match (null = wildcard)
*/
import { db } from '@sim/db'
import { referralCampaigns } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
import {
badRequestResponse,
internalErrorResponse,
notFoundResponse,
singleResponse,
} from '@/app/api/v1/admin/responses'
import { toAdminReferralCampaign } from '@/app/api/v1/admin/types'
const logger = createLogger('AdminReferralCampaignDetailAPI')
interface RouteParams {
id: string
}
export const GET = withAdminAuthParams<RouteParams>(async (_, context) => {
try {
const { id: campaignId } = await context.params
const [campaign] = await db
.select()
.from(referralCampaigns)
.where(eq(referralCampaigns.id, campaignId))
.limit(1)
if (!campaign) {
return notFoundResponse('Campaign')
}
logger.info(`Admin API: Retrieved referral campaign ${campaignId}`)
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
} catch (error) {
logger.error('Admin API: Failed to get referral campaign', { error })
return internalErrorResponse('Failed to get referral campaign')
}
})
export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) => {
try {
const { id: campaignId } = await context.params
const body = await request.json()
const [existing] = await db
.select()
.from(referralCampaigns)
.where(eq(referralCampaigns.id, campaignId))
.limit(1)
if (!existing) {
return notFoundResponse('Campaign')
}
const updateData: Record<string, unknown> = { updatedAt: new Date() }
if (body.name !== undefined) {
if (typeof body.name !== 'string' || body.name.trim().length === 0) {
return badRequestResponse('name must be a non-empty string')
}
updateData.name = body.name.trim()
}
if (body.bonusCreditAmount !== undefined) {
if (
typeof body.bonusCreditAmount !== 'number' ||
!Number.isFinite(body.bonusCreditAmount) ||
body.bonusCreditAmount <= 0
) {
return badRequestResponse('bonusCreditAmount must be a positive number')
}
updateData.bonusCreditAmount = body.bonusCreditAmount.toString()
}
if (body.isActive !== undefined) {
if (typeof body.isActive !== 'boolean') {
return badRequestResponse('isActive must be a boolean')
}
updateData.isActive = body.isActive
}
if (body.code !== undefined) {
if (body.code !== null) {
if (typeof body.code !== 'string') {
return badRequestResponse('code must be a string or null')
}
if (body.code.trim().length < 6) {
return badRequestResponse('code must be at least 6 characters')
}
}
updateData.code = body.code ? body.code.trim().toUpperCase() : null
}
for (const field of ['utmSource', 'utmMedium', 'utmCampaign', 'utmContent'] as const) {
if (body[field] !== undefined) {
if (body[field] !== null && typeof body[field] !== 'string') {
return badRequestResponse(`${field} must be a string or null`)
}
updateData[field] = body[field] || null
}
}
const [updated] = await db
.update(referralCampaigns)
.set(updateData)
.where(eq(referralCampaigns.id, campaignId))
.returning()
logger.info(`Admin API: Updated referral campaign ${campaignId}`, {
fields: Object.keys(updateData).filter((k) => k !== 'updatedAt'),
})
return singleResponse(toAdminReferralCampaign(updated, getBaseUrl()))
} catch (error) {
logger.error('Admin API: Failed to update referral campaign', { error })
return internalErrorResponse('Failed to update referral campaign')
}
})

View File

@@ -0,0 +1,140 @@
/**
* GET /api/v1/admin/referral-campaigns
*
* List referral campaigns with optional filtering and pagination.
*
* Query Parameters:
* - active: string (optional) - Filter by active status ('true' or 'false')
* - limit: number (default: 50, max: 250)
* - offset: number (default: 0)
*
* POST /api/v1/admin/referral-campaigns
*
* Create a new referral campaign.
*
* Body:
* - name: string (required) - Campaign name
* - bonusCreditAmount: number (required, > 0) - Bonus credits in dollars
* - code: string | null (optional, min 6 chars, auto-uppercased) - Redeemable code
* - utmSource: string | null (optional) - UTM source match (null = wildcard)
* - utmMedium: string | null (optional) - UTM medium match (null = wildcard)
* - utmCampaign: string | null (optional) - UTM campaign match (null = wildcard)
* - utmContent: string | null (optional) - UTM content match (null = wildcard)
*/
import { db } from '@sim/db'
import { referralCampaigns } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { count, eq, type SQL } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
import {
badRequestResponse,
internalErrorResponse,
listResponse,
singleResponse,
} from '@/app/api/v1/admin/responses'
import {
type AdminReferralCampaign,
createPaginationMeta,
parsePaginationParams,
toAdminReferralCampaign,
} from '@/app/api/v1/admin/types'
const logger = createLogger('AdminReferralCampaignsAPI')
export const GET = withAdminAuth(async (request) => {
const url = new URL(request.url)
const { limit, offset } = parsePaginationParams(url)
const activeFilter = url.searchParams.get('active')
try {
const conditions: SQL<unknown>[] = []
if (activeFilter === 'true') {
conditions.push(eq(referralCampaigns.isActive, true))
} else if (activeFilter === 'false') {
conditions.push(eq(referralCampaigns.isActive, false))
}
const whereClause = conditions.length > 0 ? conditions[0] : undefined
const baseUrl = getBaseUrl()
const [countResult, campaigns] = await Promise.all([
db.select({ total: count() }).from(referralCampaigns).where(whereClause),
db
.select()
.from(referralCampaigns)
.where(whereClause)
.orderBy(referralCampaigns.createdAt)
.limit(limit)
.offset(offset),
])
const total = countResult[0].total
const data: AdminReferralCampaign[] = campaigns.map((c) => toAdminReferralCampaign(c, baseUrl))
const pagination = createPaginationMeta(total, limit, offset)
logger.info(`Admin API: Listed ${data.length} referral campaigns (total: ${total})`)
return listResponse(data, pagination)
} catch (error) {
logger.error('Admin API: Failed to list referral campaigns', { error })
return internalErrorResponse('Failed to list referral campaigns')
}
})
export const POST = withAdminAuth(async (request) => {
try {
const body = await request.json()
const { name, code, utmSource, utmMedium, utmCampaign, utmContent, bonusCreditAmount } = body
if (!name || typeof name !== 'string') {
return badRequestResponse('name is required and must be a string')
}
if (
typeof bonusCreditAmount !== 'number' ||
!Number.isFinite(bonusCreditAmount) ||
bonusCreditAmount <= 0
) {
return badRequestResponse('bonusCreditAmount must be a positive number')
}
if (code !== undefined && code !== null) {
if (typeof code !== 'string') {
return badRequestResponse('code must be a string or null')
}
if (code.trim().length < 6) {
return badRequestResponse('code must be at least 6 characters')
}
}
const id = nanoid()
const [campaign] = await db
.insert(referralCampaigns)
.values({
id,
name,
code: code ? code.trim().toUpperCase() : null,
utmSource: utmSource || null,
utmMedium: utmMedium || null,
utmCampaign: utmCampaign || null,
utmContent: utmContent || null,
bonusCreditAmount: bonusCreditAmount.toString(),
})
.returning()
logger.info(`Admin API: Created referral campaign ${id}`, {
name,
code: campaign.code,
bonusCreditAmount,
})
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
} catch (error) {
logger.error('Admin API: Failed to create referral campaign', { error })
return internalErrorResponse('Failed to create referral campaign')
}
})

View File

@@ -8,6 +8,7 @@
import type { import type {
member, member,
organization, organization,
referralCampaigns,
subscription, subscription,
user, user,
userStats, userStats,
@@ -31,6 +32,7 @@ export type DbOrganization = InferSelectModel<typeof organization>
export type DbSubscription = InferSelectModel<typeof subscription> export type DbSubscription = InferSelectModel<typeof subscription>
export type DbMember = InferSelectModel<typeof member> export type DbMember = InferSelectModel<typeof member>
export type DbUserStats = InferSelectModel<typeof userStats> export type DbUserStats = InferSelectModel<typeof userStats>
export type DbReferralCampaign = InferSelectModel<typeof referralCampaigns>
// ============================================================================= // =============================================================================
// Pagination // Pagination
@@ -646,3 +648,49 @@ export interface AdminDeployResult {
export interface AdminUndeployResult { export interface AdminUndeployResult {
isDeployed: boolean isDeployed: boolean
} }
// =============================================================================
// Referral Campaign Types
// =============================================================================
export interface AdminReferralCampaign {
id: string
name: string
code: string | null
utmSource: string | null
utmMedium: string | null
utmCampaign: string | null
utmContent: string | null
bonusCreditAmount: string
isActive: boolean
signupUrl: string | null
createdAt: string
updatedAt: string
}
export function toAdminReferralCampaign(
dbCampaign: DbReferralCampaign,
baseUrl: string
): AdminReferralCampaign {
const utmParams = new URLSearchParams()
if (dbCampaign.utmSource) utmParams.set('utm_source', dbCampaign.utmSource)
if (dbCampaign.utmMedium) utmParams.set('utm_medium', dbCampaign.utmMedium)
if (dbCampaign.utmCampaign) utmParams.set('utm_campaign', dbCampaign.utmCampaign)
if (dbCampaign.utmContent) utmParams.set('utm_content', dbCampaign.utmContent)
const query = utmParams.toString()
return {
id: dbCampaign.id,
name: dbCampaign.name,
code: dbCampaign.code,
utmSource: dbCampaign.utmSource,
utmMedium: dbCampaign.utmMedium,
utmCampaign: dbCampaign.utmCampaign,
utmContent: dbCampaign.utmContent,
bonusCreditAmount: dbCampaign.bonusCreditAmount,
isActive: dbCampaign.isActive,
signupUrl: query ? `${baseUrl}/signup?${query}` : null,
createdAt: dbCampaign.createdAt.toISOString(),
updatedAt: dbCampaign.updatedAt.toISOString(),
}
}

View File

@@ -29,7 +29,7 @@ const patchBodySchema = z
description: z description: z
.string() .string()
.trim() .trim()
.max(500, 'Description must be 500 characters or less') .max(2000, 'Description must be 2000 characters or less')
.nullable() .nullable()
.optional(), .optional(),
isActive: z.literal(true).optional(), // Set to true to activate this version isActive: z.literal(true).optional(), // Set to true to activate this version

View File

@@ -12,7 +12,7 @@ import {
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse' import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
import { markExecutionCancelled } from '@/lib/execution/cancellation' import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer'
import { processInputFileFields } from '@/lib/execution/files' import { processInputFileFields } from '@/lib/execution/files'
import { preprocessExecution } from '@/lib/execution/preprocessing' import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session' import { LoggingSession } from '@/lib/logs/execution/logging-session'
@@ -700,15 +700,27 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync) const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
let isStreamClosed = false let isStreamClosed = false
const eventWriter = createExecutionEventWriter(executionId)
setExecutionMeta(executionId, {
status: 'active',
userId: actorUserId,
workflowId,
}).catch(() => {})
const stream = new ReadableStream<Uint8Array>({ const stream = new ReadableStream<Uint8Array>({
async start(controller) { async start(controller) {
const sendEvent = (event: ExecutionEvent) => { let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null
if (isStreamClosed) return
try { const sendEvent = (event: ExecutionEvent) => {
controller.enqueue(encodeSSEEvent(event)) if (!isStreamClosed) {
} catch { try {
isStreamClosed = true controller.enqueue(encodeSSEEvent(event))
} catch {
isStreamClosed = true
}
}
if (event.type !== 'stream:chunk' && event.type !== 'stream:done') {
eventWriter.write(event).catch(() => {})
} }
} }
@@ -829,14 +841,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const reader = streamingExec.stream.getReader() const reader = streamingExec.stream.getReader()
const decoder = new TextDecoder() const decoder = new TextDecoder()
let chunkCount = 0
try { try {
while (true) { while (true) {
const { done, value } = await reader.read() const { done, value } = await reader.read()
if (done) break if (done) break
chunkCount++
const chunk = decoder.decode(value, { stream: true }) const chunk = decoder.decode(value, { stream: true })
sendEvent({ sendEvent({
type: 'stream:chunk', type: 'stream:chunk',
@@ -951,6 +961,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: result.metadata?.duration || 0, duration: result.metadata?.duration || 0,
}, },
}) })
finalMetaStatus = 'error'
} else { } else {
logger.info(`[${requestId}] Workflow execution was cancelled`) logger.info(`[${requestId}] Workflow execution was cancelled`)
@@ -963,6 +974,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: result.metadata?.duration || 0, duration: result.metadata?.duration || 0,
}, },
}) })
finalMetaStatus = 'cancelled'
} }
return return
} }
@@ -986,6 +998,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
endTime: result.metadata?.endTime || new Date().toISOString(), endTime: result.metadata?.endTime || new Date().toISOString(),
}, },
}) })
finalMetaStatus = 'complete'
} catch (error: unknown) { } catch (error: unknown) {
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut() const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
const errorMessage = isTimeout const errorMessage = isTimeout
@@ -1017,7 +1030,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: executionResult?.metadata?.duration || 0, duration: executionResult?.metadata?.duration || 0,
}, },
}) })
finalMetaStatus = 'error'
} finally { } finally {
try {
await eventWriter.close()
} catch (closeError) {
logger.warn(`[${requestId}] Failed to close event writer`, {
error: closeError instanceof Error ? closeError.message : String(closeError),
})
}
if (finalMetaStatus) {
setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {})
}
timeoutController.cleanup() timeoutController.cleanup()
if (executionId) { if (executionId) {
await cleanupExecutionBase64Cache(executionId) await cleanupExecutionBase64Cache(executionId)
@@ -1032,10 +1056,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}, },
cancel() { cancel() {
isStreamClosed = true isStreamClosed = true
timeoutController.cleanup() logger.info(`[${requestId}] Client disconnected from SSE stream`)
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
timeoutController.abort()
markExecutionCancelled(executionId).catch(() => {})
}, },
}) })

View File

@@ -0,0 +1,170 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import {
type ExecutionStreamStatus,
getExecutionMeta,
readExecutionEvents,
} from '@/lib/execution/event-buffer'
import { formatSSEEvent } from '@/lib/workflows/executor/execution-events'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
const logger = createLogger('ExecutionStreamReconnectAPI')
const POLL_INTERVAL_MS = 500
const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes
function isTerminalStatus(status: ExecutionStreamStatus): boolean {
return status === 'complete' || status === 'error' || status === 'cancelled'
}
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function GET(
req: NextRequest,
{ params }: { params: Promise<{ id: string; executionId: string }> }
) {
const { id: workflowId, executionId } = await params
try {
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
workflowId,
userId: auth.userId,
action: 'read',
})
if (!workflowAuthorization.allowed) {
return NextResponse.json(
{ error: workflowAuthorization.message || 'Access denied' },
{ status: workflowAuthorization.status }
)
}
const meta = await getExecutionMeta(executionId)
if (!meta) {
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
}
if (meta.workflowId && meta.workflowId !== workflowId) {
return NextResponse.json(
{ error: 'Execution does not belong to this workflow' },
{ status: 403 }
)
}
const fromParam = req.nextUrl.searchParams.get('from')
const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0
const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0
logger.info('Reconnection stream requested', {
workflowId,
executionId,
fromEventId,
metaStatus: meta.status,
})
const encoder = new TextEncoder()
let closed = false
const stream = new ReadableStream<Uint8Array>({
async start(controller) {
let lastEventId = fromEventId
const pollDeadline = Date.now() + MAX_POLL_DURATION_MS
const enqueue = (text: string) => {
if (closed) return
try {
controller.enqueue(encoder.encode(text))
} catch {
closed = true
}
}
try {
const events = await readExecutionEvents(executionId, lastEventId)
for (const entry of events) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
const currentMeta = await getExecutionMeta(executionId)
if (!currentMeta || isTerminalStatus(currentMeta.status)) {
enqueue('data: [DONE]\n\n')
if (!closed) controller.close()
return
}
while (!closed && Date.now() < pollDeadline) {
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
if (closed) return
const newEvents = await readExecutionEvents(executionId, lastEventId)
for (const entry of newEvents) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
const polledMeta = await getExecutionMeta(executionId)
if (!polledMeta || isTerminalStatus(polledMeta.status)) {
const finalEvents = await readExecutionEvents(executionId, lastEventId)
for (const entry of finalEvents) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
enqueue('data: [DONE]\n\n')
if (!closed) controller.close()
return
}
}
if (!closed) {
logger.warn('Reconnection stream poll deadline reached', { executionId })
enqueue('data: [DONE]\n\n')
controller.close()
}
} catch (error) {
logger.error('Error in reconnection stream', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
if (!closed) {
try {
controller.close()
} catch {}
}
}
},
cancel() {
closed = true
logger.info('Client disconnected from reconnection stream', { executionId })
},
})
return new NextResponse(stream, {
headers: {
...SSE_HEADERS,
'X-Execution-Id': executionId,
},
})
} catch (error: any) {
logger.error('Failed to start reconnection stream', {
workflowId,
executionId,
error: error.message,
})
return NextResponse.json(
{ error: error.message || 'Failed to start reconnection stream' },
{ status: 500 }
)
}
}

View File

@@ -113,7 +113,7 @@ export function VersionDescriptionModal({
className='min-h-[120px] resize-none' className='min-h-[120px] resize-none'
value={description} value={description}
onChange={(e) => setDescription(e.target.value)} onChange={(e) => setDescription(e.target.value)}
maxLength={500} maxLength={2000}
disabled={isGenerating} disabled={isGenerating}
/> />
<div className='flex items-center justify-between'> <div className='flex items-center justify-between'>
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({
</p> </p>
)} )}
{!updateMutation.error && !generateMutation.error && <div />} {!updateMutation.error && !generateMutation.error && <div />}
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p> <p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/2000</p>
</div> </div>
</ModalBody> </ModalBody>
<ModalFooter> <ModalFooter>

View File

@@ -57,6 +57,21 @@ export function useChangeDetection({
} }
} }
if (block.triggerMode) {
const triggerConfigValue = blockSubValues?.triggerConfig
if (
triggerConfigValue &&
typeof triggerConfigValue === 'object' &&
!subBlocks.triggerConfig
) {
subBlocks.triggerConfig = {
id: 'triggerConfig',
type: 'short-input',
value: triggerConfigValue,
}
}
}
blocksWithSubBlocks[blockId] = { blocksWithSubBlocks[blockId] = {
...block, ...block,
subBlocks, subBlocks,

View File

@@ -1,4 +1,4 @@
import { useCallback, useRef, useState } from 'react' import { useCallback, useEffect, useRef, useState } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query' import { useQueryClient } from '@tanstack/react-query'
import { v4 as uuidv4 } from 'uuid' import { v4 as uuidv4 } from 'uuid'
@@ -46,7 +46,13 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('useWorkflowExecution') const logger = createLogger('useWorkflowExecution')
// Debug state validation result /**
* Module-level Set tracking which workflows have an active reconnection effect.
* Prevents multiple hook instances (from different components) from starting
* concurrent reconnection streams for the same workflow during the same mount cycle.
*/
const activeReconnections = new Set<string>()
interface DebugValidationResult { interface DebugValidationResult {
isValid: boolean isValid: boolean
error?: string error?: string
@@ -54,7 +60,7 @@ interface DebugValidationResult {
interface BlockEventHandlerConfig { interface BlockEventHandlerConfig {
workflowId?: string workflowId?: string
executionId?: string executionIdRef: { current: string }
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }> workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
activeBlocksSet: Set<string> activeBlocksSet: Set<string>
accumulatedBlockLogs: BlockLog[] accumulatedBlockLogs: BlockLog[]
@@ -108,12 +114,15 @@ export function useWorkflowExecution() {
const queryClient = useQueryClient() const queryClient = useQueryClient()
const currentWorkflow = useCurrentWorkflow() const currentWorkflow = useCurrentWorkflow()
const { activeWorkflowId, workflows } = useWorkflowRegistry() const { activeWorkflowId, workflows } = useWorkflowRegistry()
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } = const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } =
useTerminalConsoleStore() useTerminalConsoleStore()
const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated)
const { getAllVariables } = useEnvironmentStore() const { getAllVariables } = useEnvironmentStore()
const { getVariablesByWorkflowId, variables } = useVariablesStore() const { getVariablesByWorkflowId, variables } = useVariablesStore()
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } = const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
useCurrentWorkflowExecution() useCurrentWorkflowExecution()
const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId)
const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId)
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting) const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging) const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks) const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
@@ -297,7 +306,7 @@ export function useWorkflowExecution() {
(config: BlockEventHandlerConfig) => { (config: BlockEventHandlerConfig) => {
const { const {
workflowId, workflowId,
executionId, executionIdRef,
workflowEdges, workflowEdges,
activeBlocksSet, activeBlocksSet,
accumulatedBlockLogs, accumulatedBlockLogs,
@@ -308,6 +317,14 @@ export function useWorkflowExecution() {
onBlockCompleteCallback, onBlockCompleteCallback,
} = config } = config
/** Returns true if this execution was cancelled or superseded by another run. */
const isStaleExecution = () =>
!!(
workflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current
)
const updateActiveBlocks = (blockId: string, isActive: boolean) => { const updateActiveBlocks = (blockId: string, isActive: boolean) => {
if (!workflowId) return if (!workflowId) return
if (isActive) { if (isActive) {
@@ -360,7 +377,7 @@ export function useWorkflowExecution() {
endedAt: data.endedAt, endedAt: data.endedAt,
workflowId, workflowId,
blockId: data.blockId, blockId: data.blockId,
executionId, executionId: executionIdRef.current,
blockName: data.blockName || 'Unknown Block', blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown', blockType: data.blockType || 'unknown',
iterationCurrent: data.iterationCurrent, iterationCurrent: data.iterationCurrent,
@@ -383,7 +400,7 @@ export function useWorkflowExecution() {
endedAt: data.endedAt, endedAt: data.endedAt,
workflowId, workflowId,
blockId: data.blockId, blockId: data.blockId,
executionId, executionId: executionIdRef.current,
blockName: data.blockName || 'Unknown Block', blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown', blockType: data.blockType || 'unknown',
iterationCurrent: data.iterationCurrent, iterationCurrent: data.iterationCurrent,
@@ -410,7 +427,7 @@ export function useWorkflowExecution() {
iterationType: data.iterationType, iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId, iterationContainerId: data.iterationContainerId,
}, },
executionId executionIdRef.current
) )
} }
@@ -432,11 +449,12 @@ export function useWorkflowExecution() {
iterationType: data.iterationType, iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId, iterationContainerId: data.iterationContainerId,
}, },
executionId executionIdRef.current
) )
} }
const onBlockStarted = (data: BlockStartedData) => { const onBlockStarted = (data: BlockStartedData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, true) updateActiveBlocks(data.blockId, true)
markIncomingEdges(data.blockId) markIncomingEdges(data.blockId)
@@ -453,7 +471,7 @@ export function useWorkflowExecution() {
endedAt: undefined, endedAt: undefined,
workflowId, workflowId,
blockId: data.blockId, blockId: data.blockId,
executionId, executionId: executionIdRef.current,
blockName: data.blockName || 'Unknown Block', blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown', blockType: data.blockType || 'unknown',
isRunning: true, isRunning: true,
@@ -465,6 +483,7 @@ export function useWorkflowExecution() {
} }
const onBlockCompleted = (data: BlockCompletedData) => { const onBlockCompleted = (data: BlockCompletedData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, false) updateActiveBlocks(data.blockId, false)
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success') if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
@@ -495,6 +514,7 @@ export function useWorkflowExecution() {
} }
const onBlockError = (data: BlockErrorData) => { const onBlockError = (data: BlockErrorData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, false) updateActiveBlocks(data.blockId, false)
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error') if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
@@ -902,10 +922,6 @@ export function useWorkflowExecution() {
// Update block logs with actual stream completion times // Update block logs with actual stream completion times
if (result.logs && streamCompletionTimes.size > 0) { if (result.logs && streamCompletionTimes.size > 0) {
const streamCompletionEndTime = new Date(
Math.max(...Array.from(streamCompletionTimes.values()))
).toISOString()
result.logs.forEach((log: BlockLog) => { result.logs.forEach((log: BlockLog) => {
if (streamCompletionTimes.has(log.blockId)) { if (streamCompletionTimes.has(log.blockId)) {
const completionTime = streamCompletionTimes.get(log.blockId)! const completionTime = streamCompletionTimes.get(log.blockId)!
@@ -987,7 +1003,6 @@ export function useWorkflowExecution() {
return { success: true, stream } return { success: true, stream }
} }
// For manual (non-chat) execution
const manualExecutionId = uuidv4() const manualExecutionId = uuidv4()
try { try {
const result = await executeWorkflow( const result = await executeWorkflow(
@@ -1002,29 +1017,10 @@ export function useWorkflowExecution() {
if (result.metadata.pendingBlocks) { if (result.metadata.pendingBlocks) {
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks) setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
} }
} else if (result && 'success' in result) {
setExecutionResult(result)
// Reset execution state after successful non-debug execution
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
if (isChatExecution) {
if (!result.metadata) {
result.metadata = { duration: 0, startTime: new Date().toISOString() }
}
;(result.metadata as any).source = 'chat'
}
// Invalidate subscription queries to update usage
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
}, 1000)
} }
return result return result
} catch (error: any) { } catch (error: any) {
const errorResult = handleExecutionError(error, { executionId: manualExecutionId }) const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
// Note: Error logs are already persisted server-side via execution-core.ts
return errorResult return errorResult
} }
}, },
@@ -1275,7 +1271,7 @@ export function useWorkflowExecution() {
if (activeWorkflowId) { if (activeWorkflowId) {
logger.info('Using server-side executor') logger.info('Using server-side executor')
const executionId = uuidv4() const executionIdRef = { current: '' }
let executionResult: ExecutionResult = { let executionResult: ExecutionResult = {
success: false, success: false,
@@ -1293,7 +1289,7 @@ export function useWorkflowExecution() {
try { try {
const blockHandlers = buildBlockEventHandlers({ const blockHandlers = buildBlockEventHandlers({
workflowId: activeWorkflowId, workflowId: activeWorkflowId,
executionId, executionIdRef,
workflowEdges, workflowEdges,
activeBlocksSet, activeBlocksSet,
accumulatedBlockLogs, accumulatedBlockLogs,
@@ -1326,6 +1322,10 @@ export function useWorkflowExecution() {
loops: clientWorkflowState.loops, loops: clientWorkflowState.loops,
parallels: clientWorkflowState.parallels, parallels: clientWorkflowState.parallels,
}, },
onExecutionId: (id) => {
executionIdRef.current = id
setCurrentExecutionId(activeWorkflowId, id)
},
callbacks: { callbacks: {
onExecutionStarted: (data) => { onExecutionStarted: (data) => {
logger.info('Server execution started:', data) logger.info('Server execution started:', data)
@@ -1368,6 +1368,18 @@ export function useWorkflowExecution() {
}, },
onExecutionCompleted: (data) => { onExecutionCompleted: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
executionResult = { executionResult = {
success: data.success, success: data.success,
output: data.output, output: data.output,
@@ -1425,9 +1437,33 @@ export function useWorkflowExecution() {
}) })
} }
} }
const workflowExecState = activeWorkflowId
? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId)
: null
if (activeWorkflowId && !workflowExecState?.isDebugging) {
setExecutionResult(executionResult)
setIsExecuting(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
}, 1000)
}
}, },
onExecutionError: (data) => { onExecutionError: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
executionResult = { executionResult = {
success: false, success: false,
output: {}, output: {},
@@ -1441,43 +1477,53 @@ export function useWorkflowExecution() {
const isPreExecutionError = accumulatedBlockLogs.length === 0 const isPreExecutionError = accumulatedBlockLogs.length === 0
handleExecutionErrorConsole({ handleExecutionErrorConsole({
workflowId: activeWorkflowId, workflowId: activeWorkflowId,
executionId, executionId: executionIdRef.current,
error: data.error, error: data.error,
durationMs: data.duration, durationMs: data.duration,
blockLogs: accumulatedBlockLogs, blockLogs: accumulatedBlockLogs,
isPreExecutionError, isPreExecutionError,
}) })
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
}, },
onExecutionCancelled: (data) => { onExecutionCancelled: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
handleExecutionCancelledConsole({ handleExecutionCancelledConsole({
workflowId: activeWorkflowId, workflowId: activeWorkflowId,
executionId, executionId: executionIdRef.current,
durationMs: data?.duration, durationMs: data?.duration,
}) })
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
}, },
}, },
}) })
return executionResult return executionResult
} catch (error: any) { } catch (error: any) {
// Don't log abort errors - they're intentional user actions
if (error.name === 'AbortError' || error.message?.includes('aborted')) { if (error.name === 'AbortError' || error.message?.includes('aborted')) {
logger.info('Execution aborted by user') logger.info('Execution aborted by user')
return executionResult
// Reset execution state
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
// Return gracefully without error
return {
success: false,
output: {},
metadata: { duration: 0 },
logs: [],
}
} }
logger.error('Server-side execution failed:', error) logger.error('Server-side execution failed:', error)
@@ -1485,7 +1531,6 @@ export function useWorkflowExecution() {
} }
} }
// Fallback: should never reach here
throw new Error('Server-side execution is required') throw new Error('Server-side execution is required')
} }
@@ -1717,25 +1762,28 @@ export function useWorkflowExecution() {
* Handles cancelling the current workflow execution * Handles cancelling the current workflow execution
*/ */
const handleCancelExecution = useCallback(() => { const handleCancelExecution = useCallback(() => {
if (!activeWorkflowId) return
logger.info('Workflow execution cancellation requested') logger.info('Workflow execution cancellation requested')
// Cancel the execution stream for this workflow (server-side) const storedExecutionId = getCurrentExecutionId(activeWorkflowId)
executionStream.cancel(activeWorkflowId ?? undefined)
// Mark current chat execution as superseded so its cleanup won't affect new executions if (storedExecutionId) {
currentChatExecutionIdRef.current = null setCurrentExecutionId(activeWorkflowId, null)
fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, {
// Mark all running entries as canceled in the terminal method: 'POST',
if (activeWorkflowId) { }).catch(() => {})
cancelRunningEntries(activeWorkflowId) handleExecutionCancelledConsole({
workflowId: activeWorkflowId,
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx executionId: storedExecutionId,
setIsExecuting(activeWorkflowId, false) })
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
} }
// If in debug mode, also reset debug state executionStream.cancel(activeWorkflowId)
currentChatExecutionIdRef.current = null
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
if (isDebugging) { if (isDebugging) {
resetDebugState() resetDebugState()
} }
@@ -1747,7 +1795,9 @@ export function useWorkflowExecution() {
setIsDebugging, setIsDebugging,
setActiveBlocks, setActiveBlocks,
activeWorkflowId, activeWorkflowId,
cancelRunningEntries, getCurrentExecutionId,
setCurrentExecutionId,
handleExecutionCancelledConsole,
]) ])
/** /**
@@ -1847,7 +1897,7 @@ export function useWorkflowExecution() {
} }
setIsExecuting(workflowId, true) setIsExecuting(workflowId, true)
const executionId = uuidv4() const executionIdRef = { current: '' }
const accumulatedBlockLogs: BlockLog[] = [] const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>() const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>() const executedBlockIds = new Set<string>()
@@ -1856,7 +1906,7 @@ export function useWorkflowExecution() {
try { try {
const blockHandlers = buildBlockEventHandlers({ const blockHandlers = buildBlockEventHandlers({
workflowId, workflowId,
executionId, executionIdRef,
workflowEdges, workflowEdges,
activeBlocksSet, activeBlocksSet,
accumulatedBlockLogs, accumulatedBlockLogs,
@@ -1871,6 +1921,10 @@ export function useWorkflowExecution() {
startBlockId: blockId, startBlockId: blockId,
sourceSnapshot: effectiveSnapshot, sourceSnapshot: effectiveSnapshot,
input: workflowInput, input: workflowInput,
onExecutionId: (id) => {
executionIdRef.current = id
setCurrentExecutionId(workflowId, id)
},
callbacks: { callbacks: {
onBlockStarted: blockHandlers.onBlockStarted, onBlockStarted: blockHandlers.onBlockStarted,
onBlockCompleted: blockHandlers.onBlockCompleted, onBlockCompleted: blockHandlers.onBlockCompleted,
@@ -1878,7 +1932,6 @@ export function useWorkflowExecution() {
onExecutionCompleted: (data) => { onExecutionCompleted: (data) => {
if (data.success) { if (data.success) {
// Add the start block (trigger) to executed blocks
executedBlockIds.add(blockId) executedBlockIds.add(blockId)
const mergedBlockStates: Record<string, BlockState> = { const mergedBlockStates: Record<string, BlockState> = {
@@ -1902,6 +1955,10 @@ export function useWorkflowExecution() {
} }
setLastExecutionSnapshot(workflowId, updatedSnapshot) setLastExecutionSnapshot(workflowId, updatedSnapshot)
} }
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
}, },
onExecutionError: (data) => { onExecutionError: (data) => {
@@ -1921,19 +1978,27 @@ export function useWorkflowExecution() {
handleExecutionErrorConsole({ handleExecutionErrorConsole({
workflowId, workflowId,
executionId, executionId: executionIdRef.current,
error: data.error, error: data.error,
durationMs: data.duration, durationMs: data.duration,
blockLogs: accumulatedBlockLogs, blockLogs: accumulatedBlockLogs,
}) })
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
}, },
onExecutionCancelled: (data) => { onExecutionCancelled: (data) => {
handleExecutionCancelledConsole({ handleExecutionCancelledConsole({
workflowId, workflowId,
executionId, executionId: executionIdRef.current,
durationMs: data?.duration, durationMs: data?.duration,
}) })
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
}, },
}, },
}) })
@@ -1942,14 +2007,20 @@ export function useWorkflowExecution() {
logger.error('Run-from-block failed:', error) logger.error('Run-from-block failed:', error)
} }
} finally { } finally {
setIsExecuting(workflowId, false) const currentId = getCurrentExecutionId(workflowId)
setActiveBlocks(workflowId, new Set()) if (currentId === null || currentId === executionIdRef.current) {
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
}
} }
}, },
[ [
getLastExecutionSnapshot, getLastExecutionSnapshot,
setLastExecutionSnapshot, setLastExecutionSnapshot,
clearLastExecutionSnapshot, clearLastExecutionSnapshot,
getCurrentExecutionId,
setCurrentExecutionId,
setIsExecuting, setIsExecuting,
setActiveBlocks, setActiveBlocks,
setBlockRunStatus, setBlockRunStatus,
@@ -1979,29 +2050,213 @@ export function useWorkflowExecution() {
const executionId = uuidv4() const executionId = uuidv4()
try { try {
const result = await executeWorkflow( await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId)
undefined,
undefined,
executionId,
undefined,
'manual',
blockId
)
if (result && 'success' in result) {
setExecutionResult(result)
}
} catch (error) { } catch (error) {
const errorResult = handleExecutionError(error, { executionId }) const errorResult = handleExecutionError(error, { executionId })
return errorResult return errorResult
} finally { } finally {
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false) setIsExecuting(workflowId, false)
setIsDebugging(workflowId, false) setIsDebugging(workflowId, false)
setActiveBlocks(workflowId, new Set()) setActiveBlocks(workflowId, new Set())
} }
}, },
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks] [
activeWorkflowId,
setCurrentExecutionId,
setExecutionResult,
setIsExecuting,
setIsDebugging,
setActiveBlocks,
]
) )
useEffect(() => {
if (!activeWorkflowId || !hasHydrated) return
const entries = useTerminalConsoleStore.getState().entries
const runningEntries = entries.filter(
(e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId
)
if (runningEntries.length === 0) return
if (activeReconnections.has(activeWorkflowId)) return
activeReconnections.add(activeWorkflowId)
executionStream.cancel(activeWorkflowId)
const sorted = [...runningEntries].sort((a, b) => {
const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0
const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0
return bTime - aTime
})
const executionId = sorted[0].executionId!
const otherExecutionIds = new Set(
sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!)
)
if (otherExecutionIds.size > 0) {
cancelRunningEntries(activeWorkflowId)
}
setCurrentExecutionId(activeWorkflowId, executionId)
setIsExecuting(activeWorkflowId, true)
const workflowEdges = useWorkflowStore.getState().edges
const activeBlocksSet = new Set<string>()
const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>()
const executionIdRef = { current: executionId }
const handlers = buildBlockEventHandlers({
workflowId: activeWorkflowId,
executionIdRef,
workflowEdges,
activeBlocksSet,
accumulatedBlockLogs,
accumulatedBlockStates,
executedBlockIds,
consoleMode: 'update',
includeStartConsoleEntry: true,
})
const originalEntries = entries
.filter((e) => e.executionId === executionId)
.map((e) => ({ ...e }))
let cleared = false
let reconnectionComplete = false
let cleanupRan = false
const clearOnce = () => {
if (!cleared) {
cleared = true
clearExecutionEntries(executionId)
}
}
const reconnectWorkflowId = activeWorkflowId
executionStream
.reconnect({
workflowId: reconnectWorkflowId,
executionId,
callbacks: {
onBlockStarted: (data) => {
clearOnce()
handlers.onBlockStarted(data)
},
onBlockCompleted: (data) => {
clearOnce()
handlers.onBlockCompleted(data)
},
onBlockError: (data) => {
clearOnce()
handlers.onBlockError(data)
},
onExecutionCompleted: () => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
},
onExecutionError: (data) => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
handleExecutionErrorConsole({
workflowId: reconnectWorkflowId,
executionId,
error: data.error,
blockLogs: accumulatedBlockLogs,
})
},
onExecutionCancelled: () => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
handleExecutionCancelledConsole({
workflowId: reconnectWorkflowId,
executionId,
})
},
},
})
.catch((error) => {
logger.warn('Execution reconnection failed', { executionId, error })
})
.finally(() => {
if (reconnectionComplete || cleanupRan) return
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) return
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
clearExecutionEntries(executionId)
for (const entry of originalEntries) {
addConsole({
workflowId: entry.workflowId,
blockId: entry.blockId,
blockName: entry.blockName,
blockType: entry.blockType,
executionId: entry.executionId,
executionOrder: entry.executionOrder,
isRunning: false,
warning: 'Execution result unavailable — check the logs page',
})
}
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
})
return () => {
cleanupRan = true
executionStream.cancel(reconnectWorkflowId)
activeReconnections.delete(reconnectWorkflowId)
if (cleared && !reconnectionComplete) {
clearExecutionEntries(executionId)
for (const entry of originalEntries) {
addConsole(entry)
}
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [activeWorkflowId, hasHydrated])
return { return {
isExecuting, isExecuting,
isDebugging, isDebugging,

View File

@@ -1,3 +1,4 @@
export { CancelSubscription } from './cancel-subscription' export { CancelSubscription } from './cancel-subscription'
export { CreditBalance } from './credit-balance' export { CreditBalance } from './credit-balance'
export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card' export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card'
export { ReferralCode } from './referral-code'

View File

@@ -0,0 +1,101 @@
'use client'
import { useState } from 'react'
import { createLogger } from '@sim/logger'
import { Button, Input, Label } from '@/components/emcn'
const logger = createLogger('ReferralCode')
interface ReferralCodeProps {
onRedeemComplete?: () => void
}
/**
* Inline referral/promo code entry field with redeem button.
* One-time use per account — shows success or "already redeemed" state.
*/
export function ReferralCode({ onRedeemComplete }: ReferralCodeProps) {
const [code, setCode] = useState('')
const [isRedeeming, setIsRedeeming] = useState(false)
const [error, setError] = useState<string | null>(null)
const [success, setSuccess] = useState<{ bonusAmount: number } | null>(null)
const handleRedeem = async () => {
const trimmed = code.trim()
if (!trimmed || isRedeeming) return
setIsRedeeming(true)
setError(null)
try {
const response = await fetch('/api/referral-code/redeem', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ code: trimmed }),
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to redeem code')
}
if (data.redeemed) {
setSuccess({ bonusAmount: data.bonusAmount })
setCode('')
onRedeemComplete?.()
} else {
setError(data.error || 'Code could not be redeemed')
}
} catch (err) {
logger.error('Referral code redemption failed', { error: err })
setError(err instanceof Error ? err.message : 'Failed to redeem code')
} finally {
setIsRedeeming(false)
}
}
if (success) {
return (
<div className='flex items-center justify-between'>
<Label>Referral Code</Label>
<span className='text-[12px] text-[var(--text-secondary)]'>
+${success.bonusAmount} credits applied
</span>
</div>
)
}
return (
<div className='flex items-center justify-between gap-[12px]'>
<Label className='shrink-0'>Referral Code</Label>
<div className='flex items-center gap-[8px]'>
<div className='flex flex-col'>
<Input
type='text'
value={code}
onChange={(e) => {
setCode(e.target.value)
setError(null)
}}
onKeyDown={(e) => {
if (e.key === 'Enter') handleRedeem()
}}
placeholder='Enter code'
className='h-[32px] w-[140px] text-[12px]'
disabled={isRedeeming}
/>
{error && <span className='mt-[4px] text-[11px] text-[var(--text-error)]'>{error}</span>}
</div>
<Button
variant='active'
className='h-[32px] shrink-0 rounded-[6px] text-[12px]'
onClick={handleRedeem}
disabled={isRedeeming || !code.trim()}
>
{isRedeeming ? 'Redeeming...' : 'Redeem'}
</Button>
</div>
</div>
)
}

View File

@@ -17,6 +17,7 @@ import {
CancelSubscription, CancelSubscription,
CreditBalance, CreditBalance,
PlanCard, PlanCard,
ReferralCode,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components' } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components'
import { import {
ENTERPRISE_PLAN_FEATURES, ENTERPRISE_PLAN_FEATURES,
@@ -549,6 +550,10 @@ export function Subscription() {
/> />
)} )}
{!subscription.isEnterprise && (
<ReferralCode onRedeemComplete={() => refetchSubscription()} />
)}
{/* Next Billing Date - hidden from team members */} {/* Next Billing Date - hidden from team members */}
{subscription.isPaid && {subscription.isPaid &&
subscriptionData?.data?.periodEnd && subscriptionData?.data?.periodEnd &&

View File

@@ -4,12 +4,14 @@ import { useEffect } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { useRouter } from 'next/navigation' import { useRouter } from 'next/navigation'
import { useSession } from '@/lib/auth/auth-client' import { useSession } from '@/lib/auth/auth-client'
import { useReferralAttribution } from '@/hooks/use-referral-attribution'
const logger = createLogger('WorkspacePage') const logger = createLogger('WorkspacePage')
export default function WorkspacePage() { export default function WorkspacePage() {
const router = useRouter() const router = useRouter()
const { data: session, isPending } = useSession() const { data: session, isPending } = useSession()
useReferralAttribution()
useEffect(() => { useEffect(() => {
const redirectToFirstWorkspace = async () => { const redirectToFirstWorkspace = async () => {

View File

@@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables {
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform. const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions. Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions.
Guidelines: Guidelines:
- Use the specific values provided (credential names, channel names, model names) - Use the specific values provided (credential names, channel names, model names)

View File

@@ -1,4 +1,4 @@
import { useCallback, useRef } from 'react' import { useCallback } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import type { import type {
BlockCompletedData, BlockCompletedData,
@@ -16,6 +16,18 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
const logger = createLogger('useExecutionStream') const logger = createLogger('useExecutionStream')
/**
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
* These should be treated as clean disconnects, not execution errors.
*/
function isClientDisconnectError(error: any): boolean {
if (error.name === 'AbortError') return true
const msg = (error.message ?? '').toLowerCase()
return (
msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed')
)
}
/** /**
* Processes SSE events from a response body and invokes appropriate callbacks. * Processes SSE events from a response body and invokes appropriate callbacks.
*/ */
@@ -121,6 +133,7 @@ export interface ExecuteStreamOptions {
parallels?: Record<string, any> parallels?: Record<string, any>
} }
stopAfterBlockId?: string stopAfterBlockId?: string
onExecutionId?: (executionId: string) => void
callbacks?: ExecutionStreamCallbacks callbacks?: ExecutionStreamCallbacks
} }
@@ -129,30 +142,40 @@ export interface ExecuteFromBlockOptions {
startBlockId: string startBlockId: string
sourceSnapshot: SerializableExecutionState sourceSnapshot: SerializableExecutionState
input?: any input?: any
onExecutionId?: (executionId: string) => void
callbacks?: ExecutionStreamCallbacks callbacks?: ExecutionStreamCallbacks
} }
export interface ReconnectStreamOptions {
workflowId: string
executionId: string
fromEventId?: number
callbacks?: ExecutionStreamCallbacks
}
/**
* Module-level map shared across all hook instances.
* Ensures ANY instance can cancel streams started by ANY other instance,
* which is critical for SPA navigation where the original hook instance unmounts
* but the SSE stream must be cancellable from the new instance.
*/
const sharedAbortControllers = new Map<string, AbortController>()
/** /**
* Hook for executing workflows via server-side SSE streaming. * Hook for executing workflows via server-side SSE streaming.
* Supports concurrent executions via per-workflow AbortController maps. * Supports concurrent executions via per-workflow AbortController maps.
*/ */
export function useExecutionStream() { export function useExecutionStream() {
const abortControllersRef = useRef<Map<string, AbortController>>(new Map())
const currentExecutionsRef = useRef<Map<string, { workflowId: string; executionId: string }>>(
new Map()
)
const execute = useCallback(async (options: ExecuteStreamOptions) => { const execute = useCallback(async (options: ExecuteStreamOptions) => {
const { workflowId, callbacks = {}, ...payload } = options const { workflowId, callbacks = {}, onExecutionId, ...payload } = options
const existing = abortControllersRef.current.get(workflowId) const existing = sharedAbortControllers.get(workflowId)
if (existing) { if (existing) {
existing.abort() existing.abort()
} }
const abortController = new AbortController() const abortController = new AbortController()
abortControllersRef.current.set(workflowId, abortController) sharedAbortControllers.set(workflowId, abortController)
currentExecutionsRef.current.delete(workflowId)
try { try {
const response = await fetch(`/api/workflows/${workflowId}/execute`, { const response = await fetch(`/api/workflows/${workflowId}/execute`, {
@@ -177,42 +200,48 @@ export function useExecutionStream() {
throw new Error('No response body') throw new Error('No response body')
} }
const executionId = response.headers.get('X-Execution-Id') const serverExecutionId = response.headers.get('X-Execution-Id')
if (executionId) { if (serverExecutionId) {
currentExecutionsRef.current.set(workflowId, { workflowId, executionId }) onExecutionId?.(serverExecutionId)
} }
const reader = response.body.getReader() const reader = response.body.getReader()
await processSSEStream(reader, callbacks, 'Execution') await processSSEStream(reader, callbacks, 'Execution')
} catch (error: any) { } catch (error: any) {
if (error.name === 'AbortError') { if (isClientDisconnectError(error)) {
logger.info('Execution stream cancelled') logger.info('Execution stream disconnected (page unload or abort)')
callbacks.onExecutionCancelled?.({ duration: 0 }) return
} else {
logger.error('Execution stream error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
} }
logger.error('Execution stream error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
throw error throw error
} finally { } finally {
abortControllersRef.current.delete(workflowId) if (sharedAbortControllers.get(workflowId) === abortController) {
currentExecutionsRef.current.delete(workflowId) sharedAbortControllers.delete(workflowId)
}
} }
}, []) }, [])
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => { const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options const {
workflowId,
startBlockId,
sourceSnapshot,
input,
onExecutionId,
callbacks = {},
} = options
const existing = abortControllersRef.current.get(workflowId) const existing = sharedAbortControllers.get(workflowId)
if (existing) { if (existing) {
existing.abort() existing.abort()
} }
const abortController = new AbortController() const abortController = new AbortController()
abortControllersRef.current.set(workflowId, abortController) sharedAbortControllers.set(workflowId, abortController)
currentExecutionsRef.current.delete(workflowId)
try { try {
const response = await fetch(`/api/workflows/${workflowId}/execute`, { const response = await fetch(`/api/workflows/${workflowId}/execute`, {
@@ -246,64 +275,80 @@ export function useExecutionStream() {
throw new Error('No response body') throw new Error('No response body')
} }
const executionId = response.headers.get('X-Execution-Id') const serverExecutionId = response.headers.get('X-Execution-Id')
if (executionId) { if (serverExecutionId) {
currentExecutionsRef.current.set(workflowId, { workflowId, executionId }) onExecutionId?.(serverExecutionId)
} }
const reader = response.body.getReader() const reader = response.body.getReader()
await processSSEStream(reader, callbacks, 'Run-from-block') await processSSEStream(reader, callbacks, 'Run-from-block')
} catch (error: any) { } catch (error: any) {
if (error.name === 'AbortError') { if (isClientDisconnectError(error)) {
logger.info('Run-from-block execution cancelled') logger.info('Run-from-block stream disconnected (page unload or abort)')
callbacks.onExecutionCancelled?.({ duration: 0 }) return
} else {
logger.error('Run-from-block execution error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
} }
logger.error('Run-from-block execution error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
throw error throw error
} finally { } finally {
abortControllersRef.current.delete(workflowId) if (sharedAbortControllers.get(workflowId) === abortController) {
currentExecutionsRef.current.delete(workflowId) sharedAbortControllers.delete(workflowId)
}
}
}, [])
const reconnect = useCallback(async (options: ReconnectStreamOptions) => {
const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options
const existing = sharedAbortControllers.get(workflowId)
if (existing) {
existing.abort()
}
const abortController = new AbortController()
sharedAbortControllers.set(workflowId, abortController)
try {
const response = await fetch(
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
{ signal: abortController.signal }
)
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
if (!response.body) throw new Error('No response body')
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
} catch (error: any) {
if (isClientDisconnectError(error)) return
logger.error('Reconnection stream error:', error)
throw error
} finally {
if (sharedAbortControllers.get(workflowId) === abortController) {
sharedAbortControllers.delete(workflowId)
}
} }
}, []) }, [])
const cancel = useCallback((workflowId?: string) => { const cancel = useCallback((workflowId?: string) => {
if (workflowId) { if (workflowId) {
const execution = currentExecutionsRef.current.get(workflowId) const controller = sharedAbortControllers.get(workflowId)
if (execution) {
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
method: 'POST',
}).catch(() => {})
}
const controller = abortControllersRef.current.get(workflowId)
if (controller) { if (controller) {
controller.abort() controller.abort()
abortControllersRef.current.delete(workflowId) sharedAbortControllers.delete(workflowId)
} }
currentExecutionsRef.current.delete(workflowId)
} else { } else {
for (const [, execution] of currentExecutionsRef.current) { for (const [, controller] of sharedAbortControllers) {
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
method: 'POST',
}).catch(() => {})
}
for (const [, controller] of abortControllersRef.current) {
controller.abort() controller.abort()
} }
abortControllersRef.current.clear() sharedAbortControllers.clear()
currentExecutionsRef.current.clear()
} }
}, []) }, [])
return { return {
execute, execute,
executeFromBlock, executeFromBlock,
reconnect,
cancel, cancel,
} }
} }

View File

@@ -0,0 +1,46 @@
'use client'
import { useEffect, useRef } from 'react'
import { createLogger } from '@sim/logger'
const logger = createLogger('ReferralAttribution')
const COOKIE_NAME = 'sim_utm'
const TERMINAL_REASONS = new Set([
'account_predates_cookie',
'invalid_cookie',
'no_utm_cookie',
'no_matching_campaign',
])
/**
* Fires a one-shot `POST /api/attribution` when a `sim_utm` cookie is present.
* Retries on transient failures; stops on terminal outcomes.
*/
export function useReferralAttribution() {
const calledRef = useRef(false)
useEffect(() => {
if (calledRef.current) return
if (!document.cookie.includes(COOKIE_NAME)) return
calledRef.current = true
fetch('/api/attribution', { method: 'POST' })
.then((res) => res.json())
.then((data) => {
if (data.attributed) {
logger.info('Referral attribution successful', { bonusAmount: data.bonusAmount })
} else if (data.error || TERMINAL_REASONS.has(data.reason)) {
logger.info('Referral attribution skipped', { reason: data.reason || data.error })
} else {
calledRef.current = false
}
})
.catch((err) => {
logger.warn('Referral attribution failed, will retry', { error: err })
calledRef.current = false
})
}, [])
}

View File

@@ -0,0 +1,64 @@
import { db } from '@sim/db'
import { organization, userStats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq, sql } from 'drizzle-orm'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import type { DbOrTx } from '@/lib/db/types'
const logger = createLogger('BonusCredits')
/**
* Apply bonus credits to a user (e.g. referral bonuses, promotional codes).
*
* Detects the user's current plan and routes credits accordingly:
* - Free/Pro: adds to `userStats.creditBalance` and increments `currentUsageLimit`
* - Team/Enterprise: adds to `organization.creditBalance` and increments `orgUsageLimit`
*
* Uses direct increment (not recalculation) so it works correctly for free-tier
* users where `setUsageLimitForCredits` would compute planBase=0 and skip the update.
*
* @param tx - Optional Drizzle transaction context. When provided, all DB writes
* participate in the caller's transaction for atomicity.
*/
export async function applyBonusCredits(
userId: string,
amount: number,
tx?: DbOrTx
): Promise<void> {
const dbCtx = tx ?? db
const subscription = await getHighestPrioritySubscription(userId)
const isTeamOrEnterprise = subscription?.plan === 'team' || subscription?.plan === 'enterprise'
if (isTeamOrEnterprise && subscription?.referenceId) {
const orgId = subscription.referenceId
await dbCtx
.update(organization)
.set({
creditBalance: sql`${organization.creditBalance} + ${amount}`,
orgUsageLimit: sql`COALESCE(${organization.orgUsageLimit}, '0')::decimal + ${amount}`,
})
.where(eq(organization.id, orgId))
logger.info('Applied bonus credits to organization', {
userId,
organizationId: orgId,
plan: subscription.plan,
amount,
})
} else {
await dbCtx
.update(userStats)
.set({
creditBalance: sql`${userStats.creditBalance} + ${amount}`,
currentUsageLimit: sql`COALESCE(${userStats.currentUsageLimit}, '0')::decimal + ${amount}`,
})
.where(eq(userStats.userId, userId))
logger.info('Applied bonus credits to user', {
userId,
plan: subscription?.plan || 'free',
amount,
})
}
}

View File

@@ -20,6 +20,8 @@ export interface BuildPayloadParams {
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }> fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
commands?: string[] commands?: string[]
chatId?: string chatId?: string
conversationId?: string
prefetch?: boolean
implicitFeedback?: string implicitFeedback?: string
} }
@@ -64,6 +66,10 @@ export async function buildCopilotRequestPayload(
fileAttachments, fileAttachments,
commands, commands,
chatId, chatId,
conversationId,
prefetch,
conversationHistory,
implicitFeedback,
} = params } = params
const selectedModel = options.selectedModel const selectedModel = options.selectedModel
@@ -154,6 +160,12 @@ export async function buildCopilotRequestPayload(
version: SIM_AGENT_VERSION, version: SIM_AGENT_VERSION,
...(contexts && contexts.length > 0 ? { context: contexts } : {}), ...(contexts && contexts.length > 0 ? { context: contexts } : {}),
...(chatId ? { chatId } : {}), ...(chatId ? { chatId } : {}),
...(conversationId ? { conversationId } : {}),
...(Array.isArray(conversationHistory) && conversationHistory.length > 0
? { conversationHistory }
: {}),
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
...(implicitFeedback ? { implicitFeedback } : {}),
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}), ...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
...(integrationTools.length > 0 ? { integrationTools } : {}), ...(integrationTools.length > 0 ? { integrationTools } : {}),
...(credentials ? { credentials } : {}), ...(credentials ? { credentials } : {}),

View File

@@ -0,0 +1,246 @@
import { createLogger } from '@sim/logger'
import { getRedisClient } from '@/lib/core/config/redis'
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
const logger = createLogger('ExecutionEventBuffer')
const REDIS_PREFIX = 'execution:stream:'
const TTL_SECONDS = 60 * 60 // 1 hour
const EVENT_LIMIT = 1000
const RESERVE_BATCH = 100
const FLUSH_INTERVAL_MS = 15
const FLUSH_MAX_BATCH = 200
function getEventsKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:events`
}
function getSeqKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:seq`
}
function getMetaKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:meta`
}
export type ExecutionStreamStatus = 'active' | 'complete' | 'error' | 'cancelled'
export interface ExecutionStreamMeta {
status: ExecutionStreamStatus
userId?: string
workflowId?: string
updatedAt?: string
}
export interface ExecutionEventEntry {
eventId: number
executionId: string
event: ExecutionEvent
}
export interface ExecutionEventWriter {
write: (event: ExecutionEvent) => Promise<ExecutionEventEntry>
flush: () => Promise<void>
close: () => Promise<void>
}
export async function setExecutionMeta(
executionId: string,
meta: Partial<ExecutionStreamMeta>
): Promise<void> {
const redis = getRedisClient()
if (!redis) {
logger.warn('setExecutionMeta: Redis client unavailable', { executionId })
return
}
try {
const key = getMetaKey(executionId)
const payload: Record<string, string> = {
updatedAt: new Date().toISOString(),
}
if (meta.status) payload.status = meta.status
if (meta.userId) payload.userId = meta.userId
if (meta.workflowId) payload.workflowId = meta.workflowId
await redis.hset(key, payload)
await redis.expire(key, TTL_SECONDS)
} catch (error) {
logger.warn('Failed to update execution meta', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
}
}
export async function getExecutionMeta(executionId: string): Promise<ExecutionStreamMeta | null> {
const redis = getRedisClient()
if (!redis) {
logger.warn('getExecutionMeta: Redis client unavailable', { executionId })
return null
}
try {
const key = getMetaKey(executionId)
const meta = await redis.hgetall(key)
if (!meta || Object.keys(meta).length === 0) return null
return meta as unknown as ExecutionStreamMeta
} catch (error) {
logger.warn('Failed to read execution meta', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
return null
}
}
export async function readExecutionEvents(
executionId: string,
afterEventId: number
): Promise<ExecutionEventEntry[]> {
const redis = getRedisClient()
if (!redis) return []
try {
const raw = await redis.zrangebyscore(getEventsKey(executionId), afterEventId + 1, '+inf')
return raw
.map((entry) => {
try {
return JSON.parse(entry) as ExecutionEventEntry
} catch {
return null
}
})
.filter((entry): entry is ExecutionEventEntry => Boolean(entry))
} catch (error) {
logger.warn('Failed to read execution events', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
return []
}
}
export function createExecutionEventWriter(executionId: string): ExecutionEventWriter {
const redis = getRedisClient()
if (!redis) {
logger.warn(
'createExecutionEventWriter: Redis client unavailable, events will not be buffered',
{
executionId,
}
)
return {
write: async (event) => ({ eventId: 0, executionId, event }),
flush: async () => {},
close: async () => {},
}
}
let pending: ExecutionEventEntry[] = []
let nextEventId = 0
let maxReservedId = 0
let flushTimer: ReturnType<typeof setTimeout> | null = null
const scheduleFlush = () => {
if (flushTimer) return
flushTimer = setTimeout(() => {
flushTimer = null
void flush()
}, FLUSH_INTERVAL_MS)
}
const reserveIds = async (minCount: number) => {
const reserveCount = Math.max(RESERVE_BATCH, minCount)
const newMax = await redis.incrby(getSeqKey(executionId), reserveCount)
const startId = newMax - reserveCount + 1
if (nextEventId === 0 || nextEventId > maxReservedId) {
nextEventId = startId
maxReservedId = newMax
}
}
let flushPromise: Promise<void> | null = null
let closed = false
const inflightWrites = new Set<Promise<ExecutionEventEntry>>()
const doFlush = async () => {
if (pending.length === 0) return
const batch = pending
pending = []
try {
const key = getEventsKey(executionId)
const zaddArgs: (string | number)[] = []
for (const entry of batch) {
zaddArgs.push(entry.eventId, JSON.stringify(entry))
}
const pipeline = redis.pipeline()
pipeline.zadd(key, ...zaddArgs)
pipeline.expire(key, TTL_SECONDS)
pipeline.expire(getSeqKey(executionId), TTL_SECONDS)
pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1)
await pipeline.exec()
} catch (error) {
logger.warn('Failed to flush execution events', {
executionId,
batchSize: batch.length,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
})
pending = batch.concat(pending)
}
}
const flush = async () => {
if (flushPromise) {
await flushPromise
return
}
flushPromise = doFlush()
try {
await flushPromise
} finally {
flushPromise = null
if (pending.length > 0) scheduleFlush()
}
}
const writeCore = async (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
if (closed) return { eventId: 0, executionId, event }
if (nextEventId === 0 || nextEventId > maxReservedId) {
await reserveIds(1)
}
const eventId = nextEventId++
const entry: ExecutionEventEntry = { eventId, executionId, event }
pending.push(entry)
if (pending.length >= FLUSH_MAX_BATCH) {
await flush()
} else {
scheduleFlush()
}
return entry
}
const write = (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
const p = writeCore(event)
inflightWrites.add(p)
const remove = () => inflightWrites.delete(p)
p.then(remove, remove)
return p
}
const close = async () => {
closed = true
if (flushTimer) {
clearTimeout(flushTimer)
flushTimer = null
}
if (inflightWrites.size > 0) {
await Promise.allSettled(inflightWrites)
}
if (flushPromise) {
await flushPromise
}
if (pending.length > 0) {
await doFlush()
}
}
return { write, flush, close }
}

View File

@@ -2364,6 +2364,261 @@ describe('hasWorkflowChanged', () => {
}) })
}) })
describe('Trigger Config Normalization (False Positive Prevention)', () => {
it.concurrent(
'should not detect change when deployed has null fields but current has values from triggerConfig',
() => {
// Core scenario: deployed state has null individual fields, current state has
// values populated from triggerConfig at runtime by populateTriggerFieldsFromConfig
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent(
'should detect change when user edits a trigger field to a different value',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'new-secret' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
}
)
it.concurrent('should not detect change when both sides have no triggerConfig', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent(
'should not detect change when deployed has empty fields and triggerConfig populates them',
() => {
// Empty string is also treated as "empty" by normalizeTriggerConfigValues
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent('should not detect change when triggerId differs', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
triggerId: { value: null },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
triggerId: { value: 'slack_webhook' },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent(
'should not detect change for namespaced system subBlock IDs like samplePayload_slack_webhook',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
samplePayload_slack_webhook: { value: 'old payload' },
triggerInstructions_slack_webhook: { value: 'old instructions' },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
samplePayload_slack_webhook: { value: 'new payload' },
triggerInstructions_slack_webhook: { value: 'new instructions' },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent(
'should handle mixed scenario: some fields from triggerConfig, some user-edited',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
includeFiles: { id: 'includeFiles', type: 'switch', value: false },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
includeFiles: { id: 'includeFiles', type: 'switch', value: true },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
// includeFiles changed from false to true — this IS a real change
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
}
)
})
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => { describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
it.concurrent('should not detect change when webhookId differs', () => { it.concurrent('should not detect change when webhookId differs', () => {
const deployedState = createWorkflowState({ const deployedState = createWorkflowState({

View File

@@ -9,6 +9,7 @@ import {
normalizeLoop, normalizeLoop,
normalizeParallel, normalizeParallel,
normalizeSubBlockValue, normalizeSubBlockValue,
normalizeTriggerConfigValues,
normalizeValue, normalizeValue,
normalizeVariables, normalizeVariables,
sanitizeVariable, sanitizeVariable,
@@ -172,14 +173,18 @@ export function generateWorkflowDiffSummary(
} }
} }
// Normalize trigger config values for both states before comparison
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
// Compare subBlocks using shared helper for filtering (single source of truth) // Compare subBlocks using shared helper for filtering (single source of truth)
const allSubBlockIds = filterSubBlockIds([ const allSubBlockIds = filterSubBlockIds([
...new Set([...Object.keys(currentSubBlocks), ...Object.keys(previousSubBlocks)]), ...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
]) ])
for (const subId of allSubBlockIds) { for (const subId of allSubBlockIds) {
const currentSub = currentSubBlocks[subId] as Record<string, unknown> | undefined const currentSub = normalizedCurrentSubs[subId] as Record<string, unknown> | undefined
const previousSub = previousSubBlocks[subId] as Record<string, unknown> | undefined const previousSub = normalizedPreviousSubs[subId] as Record<string, unknown> | undefined
if (!currentSub || !previousSub) { if (!currentSub || !previousSub) {
changes.push({ changes.push({

View File

@@ -4,10 +4,12 @@
import { describe, expect, it } from 'vitest' import { describe, expect, it } from 'vitest'
import type { Loop, Parallel } from '@/stores/workflows/workflow/types' import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
import { import {
filterSubBlockIds,
normalizedStringify, normalizedStringify,
normalizeEdge, normalizeEdge,
normalizeLoop, normalizeLoop,
normalizeParallel, normalizeParallel,
normalizeTriggerConfigValues,
normalizeValue, normalizeValue,
sanitizeInputFormat, sanitizeInputFormat,
sanitizeTools, sanitizeTools,
@@ -584,4 +586,214 @@ describe('Workflow Normalization Utilities', () => {
expect(result2).toBe(result3) expect(result2).toBe(result3)
}) })
}) })
describe('filterSubBlockIds', () => {
it.concurrent('should exclude exact SYSTEM_SUBBLOCK_IDS', () => {
const ids = ['signingSecret', 'samplePayload', 'triggerInstructions', 'botToken']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['botToken', 'signingSecret'])
})
it.concurrent('should exclude namespaced SYSTEM_SUBBLOCK_IDS (prefix matching)', () => {
const ids = [
'signingSecret',
'samplePayload_slack_webhook',
'triggerInstructions_slack_webhook',
'webhookUrlDisplay_slack_webhook',
'botToken',
]
const result = filterSubBlockIds(ids)
expect(result).toEqual(['botToken', 'signingSecret'])
})
it.concurrent('should exclude exact TRIGGER_RUNTIME_SUBBLOCK_IDS', () => {
const ids = ['webhookId', 'triggerPath', 'triggerConfig', 'triggerId', 'signingSecret']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['signingSecret'])
})
it.concurrent('should not exclude IDs that merely contain a system ID substring', () => {
const ids = ['mySamplePayload', 'notSamplePayload']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['mySamplePayload', 'notSamplePayload'])
})
it.concurrent('should return sorted results', () => {
const ids = ['zebra', 'alpha', 'middle']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['alpha', 'middle', 'zebra'])
})
it.concurrent('should handle empty array', () => {
expect(filterSubBlockIds([])).toEqual([])
})
it.concurrent('should handle all IDs being excluded', () => {
const ids = ['webhookId', 'triggerPath', 'samplePayload', 'triggerConfig']
const result = filterSubBlockIds(ids)
expect(result).toEqual([])
})
it.concurrent('should exclude setupScript and scheduleInfo namespaced variants', () => {
const ids = ['setupScript_google_sheets_row', 'scheduleInfo_cron_trigger', 'realField']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['realField'])
})
it.concurrent('should exclude triggerCredentials namespaced variants', () => {
const ids = ['triggerCredentials_slack_webhook', 'signingSecret']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['signingSecret'])
})
})
describe('normalizeTriggerConfigValues', () => {
it.concurrent('should return subBlocks unchanged when no triggerConfig exists', () => {
const subBlocks = {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
})
it.concurrent('should return subBlocks unchanged when triggerConfig value is null', () => {
const subBlocks = {
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: null },
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
})
it.concurrent(
'should return subBlocks unchanged when triggerConfig value is not an object',
() => {
const subBlocks = {
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: 'string-value' },
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
}
)
it.concurrent('should populate null individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
expect((result.botToken as Record<string, unknown>).value).toBe('token456')
})
it.concurrent('should populate undefined individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: undefined },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
})
it.concurrent('should populate empty string individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
})
it.concurrent('should NOT overwrite existing non-empty individual field values', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'user-edited-secret' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('user-edited-secret')
})
it.concurrent('should skip triggerConfig fields that are null/undefined', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: null, botToken: undefined },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
expect((result.botToken as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should skip fields from triggerConfig that have no matching subBlock', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { nonExistentField: 'value123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result.nonExistentField).toBeUndefined()
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should not mutate the original subBlocks object', () => {
const original = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
normalizeTriggerConfigValues(original)
expect((original.signingSecret as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should preserve other subBlock properties when populating value', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: {
id: 'signingSecret',
type: 'short-input',
value: null,
placeholder: 'Enter signing secret',
},
}
const result = normalizeTriggerConfigValues(subBlocks)
const normalized = result.signingSecret as Record<string, unknown>
expect(normalized.value).toBe('secret123')
expect(normalized.id).toBe('signingSecret')
expect(normalized.type).toBe('short-input')
expect(normalized.placeholder).toBe('Enter signing secret')
})
})
}) })

View File

@@ -418,10 +418,48 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
*/ */
export function filterSubBlockIds(subBlockIds: string[]): string[] { export function filterSubBlockIds(subBlockIds: string[]): string[] {
return subBlockIds return subBlockIds
.filter((id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) .filter((id) => {
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
return false
return true
})
.sort() .sort()
} }
/**
* Normalizes trigger block subBlocks by populating null/empty individual fields
* from the triggerConfig aggregate subBlock. This compensates for the runtime
* population done by populateTriggerFieldsFromConfig, ensuring consistent
* comparison between client state (with populated values) and deployed state
* (with null values from DB).
*/
export function normalizeTriggerConfigValues(
subBlocks: Record<string, unknown>
): Record<string, unknown> {
const triggerConfigSub = subBlocks.triggerConfig as Record<string, unknown> | undefined
const triggerConfigValue = triggerConfigSub?.value
if (!triggerConfigValue || typeof triggerConfigValue !== 'object') {
return subBlocks
}
const result = { ...subBlocks }
for (const [fieldId, configValue] of Object.entries(
triggerConfigValue as Record<string, unknown>
)) {
if (configValue === null || configValue === undefined) continue
const existingSub = result[fieldId] as Record<string, unknown> | undefined
if (
existingSub &&
(existingSub.value === null || existingSub.value === undefined || existingSub.value === '')
) {
result[fieldId] = { ...existingSub, value: configValue }
}
}
return result
}
/** /**
* Normalizes a subBlock value with sanitization for specific subBlock types. * Normalizes a subBlock value with sanitization for specific subBlock types.
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed) * Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)

View File

@@ -137,6 +137,37 @@ function handleSecurityFiltering(request: NextRequest): NextResponse | null {
return null return null
} }
const UTM_KEYS = ['utm_source', 'utm_medium', 'utm_campaign', 'utm_content'] as const
const UTM_COOKIE_NAME = 'sim_utm'
const UTM_COOKIE_MAX_AGE = 3600
/**
* Sets a `sim_utm` cookie when UTM params are present on auth pages.
* Captures UTM values, the HTTP Referer, landing page, and a timestamp
* used by the attribution API to verify the user signed up after visiting the link.
*/
function setUtmCookie(request: NextRequest, response: NextResponse): void {
const { searchParams, pathname } = request.nextUrl
const hasUtm = UTM_KEYS.some((key) => searchParams.get(key))
if (!hasUtm) return
const utmData: Record<string, string> = {}
for (const key of UTM_KEYS) {
const value = searchParams.get(key)
if (value) utmData[key] = value
}
utmData.referrer_url = request.headers.get('referer') || ''
utmData.landing_page = pathname
utmData.created_at = Date.now().toString()
response.cookies.set(UTM_COOKIE_NAME, JSON.stringify(utmData), {
path: '/',
maxAge: UTM_COOKIE_MAX_AGE,
sameSite: 'lax',
httpOnly: false, // Client-side hook needs to detect cookie presence
})
}
export async function proxy(request: NextRequest) { export async function proxy(request: NextRequest) {
const url = request.nextUrl const url = request.nextUrl
@@ -152,6 +183,7 @@ export async function proxy(request: NextRequest) {
} }
const response = NextResponse.next() const response = NextResponse.next()
response.headers.set('Content-Security-Policy', generateRuntimeCSP()) response.headers.set('Content-Security-Policy', generateRuntimeCSP())
setUtmCookie(request, response)
return response return response
} }

View File

@@ -129,6 +129,18 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
}) })
}, },
setCurrentExecutionId: (workflowId, executionId) => {
set({
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
currentExecutionId: executionId,
}),
})
},
getCurrentExecutionId: (workflowId) => {
return getOrCreate(get().workflowExecutions, workflowId).currentExecutionId
},
clearRunPath: (workflowId) => { clearRunPath: (workflowId) => {
set({ set({
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, { workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {

View File

@@ -35,6 +35,8 @@ export interface WorkflowExecutionState {
lastRunPath: Map<string, BlockRunStatus> lastRunPath: Map<string, BlockRunStatus>
/** Maps edge IDs to their run result from the last execution */ /** Maps edge IDs to their run result from the last execution */
lastRunEdges: Map<string, EdgeRunStatus> lastRunEdges: Map<string, EdgeRunStatus>
/** The execution ID of the currently running execution */
currentExecutionId: string | null
} }
/** /**
@@ -54,6 +56,7 @@ export const defaultWorkflowExecutionState: WorkflowExecutionState = {
debugContext: null, debugContext: null,
lastRunPath: new Map(), lastRunPath: new Map(),
lastRunEdges: new Map(), lastRunEdges: new Map(),
currentExecutionId: null,
} }
/** /**
@@ -96,6 +99,10 @@ export interface ExecutionActions {
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
/** Clears the run path and run edges for a workflow */ /** Clears the run path and run edges for a workflow */
clearRunPath: (workflowId: string) => void clearRunPath: (workflowId: string) => void
/** Stores the current execution ID for a workflow */
setCurrentExecutionId: (workflowId: string, executionId: string | null) => void
/** Returns the current execution ID for a workflow */
getCurrentExecutionId: (workflowId: string) => string | null
/** Resets the entire store to its initial empty state */ /** Resets the entire store to its initial empty state */
reset: () => void reset: () => void
/** Stores a serializable execution snapshot for a workflow */ /** Stores a serializable execution snapshot for a workflow */

View File

@@ -224,7 +224,7 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
const newEntry = get().entries[0] const newEntry = get().entries[0]
if (newEntry?.error) { if (newEntry?.error && newEntry.blockType !== 'cancelled') {
notifyBlockError({ notifyBlockError({
error: newEntry.error, error: newEntry.error,
blockName: newEntry.blockName || 'Unknown Block', blockName: newEntry.blockName || 'Unknown Block',
@@ -243,6 +243,11 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
useExecutionStore.getState().clearRunPath(workflowId) useExecutionStore.getState().clearRunPath(workflowId)
}, },
clearExecutionEntries: (executionId: string) =>
set((state) => ({
entries: state.entries.filter((e) => e.executionId !== executionId),
})),
exportConsoleCSV: (workflowId: string) => { exportConsoleCSV: (workflowId: string) => {
const entries = get().entries.filter((entry) => entry.workflowId === workflowId) const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
@@ -470,12 +475,24 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
}, },
merge: (persistedState, currentState) => { merge: (persistedState, currentState) => {
const persisted = persistedState as Partial<ConsoleStore> | undefined const persisted = persistedState as Partial<ConsoleStore> | undefined
const entries = (persisted?.entries ?? currentState.entries).map((entry, index) => { const rawEntries = persisted?.entries ?? currentState.entries
const oneHourAgo = Date.now() - 60 * 60 * 1000
const entries = rawEntries.map((entry, index) => {
let updated = entry
if (entry.executionOrder === undefined) { if (entry.executionOrder === undefined) {
return { ...entry, executionOrder: index + 1 } updated = { ...updated, executionOrder: index + 1 }
} }
return entry if (
entry.isRunning &&
entry.startedAt &&
new Date(entry.startedAt).getTime() < oneHourAgo
) {
updated = { ...updated, isRunning: false }
}
return updated
}) })
return { return {
...currentState, ...currentState,
entries, entries,

View File

@@ -51,6 +51,7 @@ export interface ConsoleStore {
isOpen: boolean isOpen: boolean
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
clearWorkflowConsole: (workflowId: string) => void clearWorkflowConsole: (workflowId: string) => void
clearExecutionEntries: (executionId: string) => void
exportConsoleCSV: (workflowId: string) => void exportConsoleCSV: (workflowId: string) => void
getWorkflowEntries: (workflowId: string) => ConsoleEntry[] getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
toggleConsole: () => void toggleConsole: () => void

View File

@@ -23,7 +23,12 @@ export const SYSTEM_SUBBLOCK_IDS: string[] = [
* with default values from the trigger definition on load, which aren't present in * with default values from the trigger definition on load, which aren't present in
* the deployed state, causing false positive change detection. * the deployed state, causing false positive change detection.
*/ */
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = ['webhookId', 'triggerPath', 'triggerConfig'] export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = [
'webhookId',
'triggerPath',
'triggerConfig',
'triggerId',
]
/** /**
* Maximum number of consecutive failures before a trigger (schedule/webhook) is auto-disabled. * Maximum number of consecutive failures before a trigger (schedule/webhook) is auto-disabled.

View File

@@ -0,0 +1,41 @@
CREATE TABLE "referral_attribution" (
"id" text PRIMARY KEY NOT NULL,
"user_id" text NOT NULL,
"organization_id" text,
"campaign_id" text,
"utm_source" text,
"utm_medium" text,
"utm_campaign" text,
"utm_content" text,
"referrer_url" text,
"landing_page" text,
"bonus_credit_amount" numeric DEFAULT '0' NOT NULL,
"created_at" timestamp DEFAULT now() NOT NULL,
CONSTRAINT "referral_attribution_user_id_unique" UNIQUE("user_id")
);
--> statement-breakpoint
CREATE TABLE "referral_campaigns" (
"id" text PRIMARY KEY NOT NULL,
"name" text NOT NULL,
"code" text,
"utm_source" text,
"utm_medium" text,
"utm_campaign" text,
"utm_content" text,
"bonus_credit_amount" numeric NOT NULL,
"is_active" boolean DEFAULT true NOT NULL,
"created_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL,
CONSTRAINT "referral_campaigns_code_unique" UNIQUE("code")
);
--> statement-breakpoint
ALTER TABLE "referral_attribution" ADD CONSTRAINT "referral_attribution_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "referral_attribution" ADD CONSTRAINT "referral_attribution_organization_id_organization_id_fk" FOREIGN KEY ("organization_id") REFERENCES "public"."organization"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "referral_attribution" ADD CONSTRAINT "referral_attribution_campaign_id_referral_campaigns_id_fk" FOREIGN KEY ("campaign_id") REFERENCES "public"."referral_campaigns"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "referral_attribution_user_id_idx" ON "referral_attribution" USING btree ("user_id");--> statement-breakpoint
CREATE UNIQUE INDEX "referral_attribution_org_unique_idx" ON "referral_attribution" USING btree ("organization_id") WHERE "referral_attribution"."organization_id" IS NOT NULL;--> statement-breakpoint
CREATE INDEX "referral_attribution_campaign_id_idx" ON "referral_attribution" USING btree ("campaign_id");--> statement-breakpoint
CREATE INDEX "referral_attribution_utm_campaign_idx" ON "referral_attribution" USING btree ("utm_campaign");--> statement-breakpoint
CREATE INDEX "referral_attribution_utm_content_idx" ON "referral_attribution" USING btree ("utm_content");--> statement-breakpoint
CREATE INDEX "referral_attribution_created_at_idx" ON "referral_attribution" USING btree ("created_at");--> statement-breakpoint
CREATE INDEX "referral_campaigns_active_idx" ON "referral_campaigns" USING btree ("is_active");

File diff suppressed because it is too large Load Diff

View File

@@ -1072,6 +1072,13 @@
"when": 1770410282842, "when": 1770410282842,
"tag": "0153_complete_arclight", "tag": "0153_complete_arclight",
"breakpoints": true "breakpoints": true
},
{
"idx": 154,
"version": "7",
"when": 1770869658697,
"tag": "0154_bumpy_living_mummy",
"breakpoints": true
} }
] ]
} }

View File

@@ -726,6 +726,61 @@ export const userStats = pgTable('user_stats', {
billingBlockedReason: billingBlockedReasonEnum('billing_blocked_reason'), billingBlockedReason: billingBlockedReasonEnum('billing_blocked_reason'),
}) })
export const referralCampaigns = pgTable(
'referral_campaigns',
{
id: text('id').primaryKey(),
name: text('name').notNull(),
code: text('code').unique(),
utmSource: text('utm_source'),
utmMedium: text('utm_medium'),
utmCampaign: text('utm_campaign'),
utmContent: text('utm_content'),
bonusCreditAmount: decimal('bonus_credit_amount').notNull(),
isActive: boolean('is_active').notNull().default(true),
createdAt: timestamp('created_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
},
(table) => ({
activeIdx: index('referral_campaigns_active_idx').on(table.isActive),
})
)
export const referralAttribution = pgTable(
'referral_attribution',
{
id: text('id').primaryKey(),
userId: text('user_id')
.notNull()
.references(() => user.id, { onDelete: 'cascade' })
.unique(),
organizationId: text('organization_id').references(() => organization.id, {
onDelete: 'set null',
}),
campaignId: text('campaign_id').references(() => referralCampaigns.id, {
onDelete: 'set null',
}),
utmSource: text('utm_source'),
utmMedium: text('utm_medium'),
utmCampaign: text('utm_campaign'),
utmContent: text('utm_content'),
referrerUrl: text('referrer_url'),
landingPage: text('landing_page'),
bonusCreditAmount: decimal('bonus_credit_amount').notNull().default('0'),
createdAt: timestamp('created_at').notNull().defaultNow(),
},
(table) => ({
userIdIdx: index('referral_attribution_user_id_idx').on(table.userId),
orgUniqueIdx: uniqueIndex('referral_attribution_org_unique_idx')
.on(table.organizationId)
.where(sql`${table.organizationId} IS NOT NULL`),
campaignIdIdx: index('referral_attribution_campaign_id_idx').on(table.campaignId),
utmCampaignIdx: index('referral_attribution_utm_campaign_idx').on(table.utmCampaign),
utmContentIdx: index('referral_attribution_utm_content_idx').on(table.utmContent),
createdAtIdx: index('referral_attribution_created_at_idx').on(table.createdAt),
})
)
export const customTools = pgTable( export const customTools = pgTable(
'custom_tools', 'custom_tools',
{ {