Compare commits

...

15 Commits

Author SHA1 Message Date
Waleed Latif
043f060c24 reran migrations 2026-02-11 20:14:34 -08:00
Waleed Latif
8abe8af289 update admin routes 2026-02-11 19:57:46 -08:00
Waleed
85284eb7c4 fix(terminal): reconnect to running executions after page refresh (#3200)
* fix(terminal): reconnect to running executions after page refresh

* fix(terminal): use ExecutionEvent type instead of any in reconnection stream

* fix(execution): type event buffer with ExecutionEvent instead of Record<string, unknown>

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* fix(execution): validate fromEventId query param in reconnection endpoint

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* Fix some bugs

* fix(variables): fix tag dropdown and cursor alignment in variables block (#3199)

* feat(confluence): added list space labels, delete label, delete page prop (#3201)

* updated route

* ack comments

* fix(execution): reset execution state in reconnection cleanup to unblock re-entry

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* fix(execution): restore running entries when reconnection is interrupted by navigation

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* done

* remove cast in ioredis types

* ack PR comments

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: Siddharth Ganesan <siddharthganesan@gmail.com>
2026-02-11 19:37:12 -08:00
Waleed
d068ed6d65 fix(change-detection): resolve false positive trigger block change detection (#3204) 2026-02-11 19:37:12 -08:00
Vikhyath Mondreti
bef43f3e84 fix build 2026-02-11 19:37:12 -08:00
Waleed
7453a2bd27 fix(confl): use recommended query param pattern for confluence route (#3202)
* fix(confl): use recommended query param pattern for confluence route

* use unused var
2026-02-11 19:37:12 -08:00
Waleed
bf41c78815 feat(confluence): added list space labels, delete label, delete page prop (#3201) 2026-02-11 19:37:12 -08:00
Waleed
ccdd42639f fix(variables): fix tag dropdown and cursor alignment in variables block (#3199) 2026-02-11 19:37:12 -08:00
Waleed
4a1dd261da fix(hotkeys): remove C, T, E tab-switching hotkeys (#3197) 2026-02-11 19:37:12 -08:00
Waleed
ae43131cfe improvement(oom): increase trigger machine size (#3196) 2026-02-11 19:37:12 -08:00
Waleed Latif
808dc4f8b4 remove duplicate index 2026-02-11 13:14:52 -08:00
Waleed Latif
27e03c44dc remove default 2026-02-11 13:07:42 -08:00
Waleed Latif
05d1c92e1a added zod 2026-02-11 12:29:37 -08:00
Waleed Latif
9228893c19 more 2026-02-11 12:25:19 -08:00
Waleed Latif
eedf67013c feat(creators): added referrers, code redemption, campaign tracking, etc 2026-02-11 11:54:50 -08:00
62 changed files with 15400 additions and 268 deletions

View File

@@ -41,9 +41,6 @@ Diese Tastenkombinationen wechseln zwischen den Panel-Tabs auf der rechten Seite
| Tastenkombination | Aktion |
|----------|--------|
| `C` | Copilot-Tab fokussieren |
| `T` | Toolbar-Tab fokussieren |
| `E` | Editor-Tab fokussieren |
| `Mod` + `F` | Toolbar-Suche fokussieren |
## Globale Navigation

View File

@@ -43,9 +43,6 @@ These shortcuts switch between panel tabs on the right side of the canvas.
| Shortcut | Action |
|----------|--------|
| `C` | Focus Copilot tab |
| `T` | Focus Toolbar tab |
| `E` | Focus Editor tab |
| `Mod` + `F` | Focus Toolbar search |
## Global Navigation

View File

@@ -399,6 +399,28 @@ Create a new custom property (metadata) on a Confluence page.
| ↳ `authorId` | string | Account ID of the version author |
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
### `confluence_delete_page_property`
Delete a content property from a Confluence page by its property ID.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `pageId` | string | Yes | The ID of the page containing the property |
| `propertyId` | string | Yes | The ID of the property to delete |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `pageId` | string | ID of the page |
| `propertyId` | string | ID of the deleted property |
| `deleted` | boolean | Deletion status |
### `confluence_search`
Search for content across Confluence pages, blog posts, and other content.
@@ -872,6 +894,90 @@ Add a label to a Confluence page for organization and categorization.
| `labelName` | string | Name of the added label |
| `labelId` | string | ID of the added label |
### `confluence_delete_label`
Remove a label from a Confluence page.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `pageId` | string | Yes | Confluence page ID to remove the label from |
| `labelName` | string | Yes | Name of the label to remove |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `pageId` | string | Page ID the label was removed from |
| `labelName` | string | Name of the removed label |
| `deleted` | boolean | Deletion status |
### `confluence_get_pages_by_label`
Retrieve all pages that have a specific label applied.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `labelId` | string | Yes | The ID of the label to get pages for |
| `limit` | number | No | Maximum number of pages to return \(default: 50, max: 250\) |
| `cursor` | string | No | Pagination cursor from previous response |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `labelId` | string | ID of the label |
| `pages` | array | Array of pages with this label |
| ↳ `id` | string | Unique page identifier |
| ↳ `title` | string | Page title |
| ↳ `status` | string | Page status \(e.g., current, archived, trashed, draft\) |
| ↳ `spaceId` | string | ID of the space containing the page |
| ↳ `parentId` | string | ID of the parent page \(null if top-level\) |
| ↳ `authorId` | string | Account ID of the page author |
| ↳ `createdAt` | string | ISO 8601 timestamp when the page was created |
| ↳ `version` | object | Page version information |
| ↳ `number` | number | Version number |
| ↳ `message` | string | Version message |
| ↳ `minorEdit` | boolean | Whether this is a minor edit |
| ↳ `authorId` | string | Account ID of the version author |
| ↳ `createdAt` | string | ISO 8601 timestamp of version creation |
| `nextCursor` | string | Cursor for fetching the next page of results |
### `confluence_list_space_labels`
List all labels associated with a Confluence space.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Confluence domain \(e.g., yourcompany.atlassian.net\) |
| `spaceId` | string | Yes | The ID of the Confluence space to list labels from |
| `limit` | number | No | Maximum number of labels to return \(default: 25, max: 250\) |
| `cursor` | string | No | Pagination cursor from previous response |
| `cloudId` | string | No | Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | ISO 8601 timestamp of the operation |
| `spaceId` | string | ID of the space |
| `labels` | array | Array of labels on the space |
| ↳ `id` | string | Unique label identifier |
| ↳ `name` | string | Label name |
| ↳ `prefix` | string | Label prefix/type \(e.g., global, my, team\) |
| `nextCursor` | string | Cursor for fetching the next page of results |
### `confluence_get_space`
Get details about a specific Confluence space.

View File

@@ -42,9 +42,6 @@ Estos atajos cambian entre las pestañas del panel en el lado derecho del lienzo
| Atajo | Acción |
|----------|--------|
| `C` | Enfocar pestaña Copilot |
| `T` | Enfocar pestaña Barra de herramientas |
| `E` | Enfocar pestaña Editor |
| `Mod` + `F` | Enfocar búsqueda de Barra de herramientas |
## Navegación global

View File

@@ -42,9 +42,6 @@ Ces raccourcis permettent de basculer entre les onglets du panneau sur le côté
| Raccourci | Action |
|----------|--------|
| `C` | Activer l'onglet Copilot |
| `T` | Activer l'onglet Barre d'outils |
| `E` | Activer l'onglet Éditeur |
| `Mod` + `F` | Activer la recherche dans la barre d'outils |
## Navigation globale

View File

@@ -41,9 +41,6 @@ import { Callout } from 'fumadocs-ui/components/callout'
| ショートカット | 操作 |
|----------|--------|
| `C` | Copilotタブにフォーカス |
| `T` | Toolbarタブにフォーカス |
| `E` | Editorタブにフォーカス |
| `Mod` + `F` | Toolbar検索にフォーカス |
## グローバルナビゲーション

View File

@@ -41,9 +41,6 @@ import { Callout } from 'fumadocs-ui/components/callout'
| 快捷键 | 操作 |
|----------|--------|
| `C` | 聚焦 Copilot 标签页 |
| `T` | 聚焦 Toolbar 标签页 |
| `E` | 聚焦 Editor 标签页 |
| `Mod` + `F` | 聚焦 Toolbar 搜索 |
## 全局导航

View File

@@ -0,0 +1,215 @@
/**
* POST /api/attribution
*
* Automatic UTM-based referral attribution for new signups.
*
* Reads the `sim_utm` cookie (set by proxy on auth pages), verifies the user
* account was created after the cookie was set, matches a campaign by UTM
* specificity, and atomically inserts an attribution record + applies bonus credits.
*
* Idempotent — the unique constraint on `userId` prevents double-attribution.
*/
import { db } from '@sim/db'
import { referralAttribution, referralCampaigns, user, userStats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { cookies } from 'next/headers'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
const logger = createLogger('AttributionAPI')
const COOKIE_NAME = 'sim_utm'
const CLOCK_DRIFT_TOLERANCE_MS = 60 * 1000
const UtmCookieSchema = z.object({
utm_source: z.string().optional(),
utm_medium: z.string().optional(),
utm_campaign: z.string().optional(),
utm_content: z.string().optional(),
referrer_url: z.string().optional(),
landing_page: z.string().optional(),
created_at: z.string().min(1),
})
/**
* Finds the most specific active campaign matching the given UTM params.
* Null fields on a campaign act as wildcards. Ties broken by newest campaign.
*/
async function findMatchingCampaign(utmData: z.infer<typeof UtmCookieSchema>) {
const campaigns = await db
.select()
.from(referralCampaigns)
.where(eq(referralCampaigns.isActive, true))
let bestMatch: (typeof campaigns)[number] | null = null
let bestScore = -1
for (const campaign of campaigns) {
let score = 0
let mismatch = false
const fields = [
{ campaignVal: campaign.utmSource, utmVal: utmData.utm_source },
{ campaignVal: campaign.utmMedium, utmVal: utmData.utm_medium },
{ campaignVal: campaign.utmCampaign, utmVal: utmData.utm_campaign },
{ campaignVal: campaign.utmContent, utmVal: utmData.utm_content },
] as const
for (const { campaignVal, utmVal } of fields) {
if (campaignVal === null) continue
if (campaignVal === utmVal) {
score++
} else {
mismatch = true
break
}
}
if (!mismatch && score > 0) {
if (
score > bestScore ||
(score === bestScore &&
bestMatch &&
campaign.createdAt.getTime() > bestMatch.createdAt.getTime())
) {
bestScore = score
bestMatch = campaign
}
}
}
return bestMatch
}
export async function POST() {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const cookieStore = await cookies()
const utmCookie = cookieStore.get(COOKIE_NAME)
if (!utmCookie?.value) {
return NextResponse.json({ attributed: false, reason: 'no_utm_cookie' })
}
let utmData: z.infer<typeof UtmCookieSchema>
try {
let decoded: string
try {
decoded = decodeURIComponent(utmCookie.value)
} catch {
decoded = utmCookie.value
}
utmData = UtmCookieSchema.parse(JSON.parse(decoded))
} catch {
logger.warn('Failed to parse UTM cookie', { userId: session.user.id })
cookieStore.delete(COOKIE_NAME)
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
}
const cookieCreatedAt = Number(utmData.created_at)
if (!Number.isFinite(cookieCreatedAt)) {
logger.warn('UTM cookie has invalid created_at timestamp', { userId: session.user.id })
cookieStore.delete(COOKIE_NAME)
return NextResponse.json({ attributed: false, reason: 'invalid_cookie' })
}
const userRows = await db
.select({ createdAt: user.createdAt })
.from(user)
.where(eq(user.id, session.user.id))
.limit(1)
if (userRows.length === 0) {
return NextResponse.json({ error: 'User not found' }, { status: 404 })
}
const userCreatedAt = userRows[0].createdAt.getTime()
if (userCreatedAt < cookieCreatedAt - CLOCK_DRIFT_TOLERANCE_MS) {
logger.info('User account predates UTM cookie, skipping attribution', {
userId: session.user.id,
userCreatedAt: new Date(userCreatedAt).toISOString(),
cookieCreatedAt: new Date(cookieCreatedAt).toISOString(),
})
cookieStore.delete(COOKIE_NAME)
return NextResponse.json({ attributed: false, reason: 'account_predates_cookie' })
}
const [existingStats] = await db
.select({ id: userStats.id })
.from(userStats)
.where(eq(userStats.userId, session.user.id))
.limit(1)
if (!existingStats) {
await db.insert(userStats).values({
id: nanoid(),
userId: session.user.id,
})
}
const matchedCampaign = await findMatchingCampaign(utmData)
if (!matchedCampaign) {
cookieStore.delete(COOKIE_NAME)
return NextResponse.json({ attributed: false, reason: 'no_matching_campaign' })
}
const bonusAmount = Number(matchedCampaign.bonusCreditAmount)
let attributed = false
await db.transaction(async (tx) => {
const result = await tx
.insert(referralAttribution)
.values({
id: nanoid(),
userId: session.user.id,
campaignId: matchedCampaign.id,
utmSource: utmData.utm_source || null,
utmMedium: utmData.utm_medium || null,
utmCampaign: utmData.utm_campaign || null,
utmContent: utmData.utm_content || null,
referrerUrl: utmData.referrer_url || null,
landingPage: utmData.landing_page || null,
bonusCreditAmount: bonusAmount.toString(),
})
.onConflictDoNothing({ target: referralAttribution.userId })
.returning({ id: referralAttribution.id })
if (result.length > 0) {
await applyBonusCredits(session.user.id, bonusAmount, tx)
attributed = true
}
})
if (attributed) {
logger.info('Referral attribution created and bonus credits applied', {
userId: session.user.id,
campaignId: matchedCampaign.id,
campaignName: matchedCampaign.name,
utmSource: utmData.utm_source,
utmCampaign: utmData.utm_campaign,
utmContent: utmData.utm_content,
bonusAmount,
})
} else {
logger.info('User already attributed, skipping', { userId: session.user.id })
}
cookieStore.delete(COOKIE_NAME)
return NextResponse.json({
attributed,
bonusAmount: attributed ? bonusAmount : undefined,
})
} catch (error) {
logger.error('Attribution error', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -113,6 +113,7 @@ const ChatMessageSchema = z.object({
workflowId: z.string().optional(),
knowledgeId: z.string().optional(),
blockId: z.string().optional(),
blockIds: z.array(z.string()).optional(),
templateId: z.string().optional(),
executionId: z.string().optional(),
// For workflow_block, provide both workflowId and blockId
@@ -159,6 +160,20 @@ export async function POST(req: NextRequest) {
commands,
} = ChatMessageSchema.parse(body)
const normalizedContexts = Array.isArray(contexts)
? contexts.map((ctx) => {
if (ctx.kind !== 'blocks') return ctx
if (Array.isArray(ctx.blockIds) && ctx.blockIds.length > 0) return ctx
if (ctx.blockId) {
return {
...ctx,
blockIds: [ctx.blockId],
}
}
return ctx
})
: contexts
// Resolve workflowId - if not provided, use first workflow or find by name
const resolved = await resolveWorkflowIdForUser(
authenticatedUserId,
@@ -176,10 +191,10 @@ export async function POST(req: NextRequest) {
const userMessageIdToUse = userMessageId || crypto.randomUUID()
try {
logger.info(`[${tracker.requestId}] Received chat POST`, {
hasContexts: Array.isArray(contexts),
contextsCount: Array.isArray(contexts) ? contexts.length : 0,
contextsPreview: Array.isArray(contexts)
? contexts.map((c: any) => ({
hasContexts: Array.isArray(normalizedContexts),
contextsCount: Array.isArray(normalizedContexts) ? normalizedContexts.length : 0,
contextsPreview: Array.isArray(normalizedContexts)
? normalizedContexts.map((c: any) => ({
kind: c?.kind,
chatId: c?.chatId,
workflowId: c?.workflowId,
@@ -191,17 +206,25 @@ export async function POST(req: NextRequest) {
} catch {}
// Preprocess contexts server-side
let agentContexts: Array<{ type: string; content: string }> = []
if (Array.isArray(contexts) && contexts.length > 0) {
if (Array.isArray(normalizedContexts) && normalizedContexts.length > 0) {
try {
const { processContextsServer } = await import('@/lib/copilot/process-contents')
const processed = await processContextsServer(contexts as any, authenticatedUserId, message)
const processed = await processContextsServer(
normalizedContexts as any,
authenticatedUserId,
message
)
agentContexts = processed
logger.info(`[${tracker.requestId}] Contexts processed for request`, {
processedCount: agentContexts.length,
kinds: agentContexts.map((c) => c.type),
lengthPreview: agentContexts.map((c) => c.content?.length ?? 0),
})
if (Array.isArray(contexts) && contexts.length > 0 && agentContexts.length === 0) {
if (
Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 &&
agentContexts.length === 0
) {
logger.warn(
`[${tracker.requestId}] Contexts provided but none processed. Check executionId for logs contexts.`
)
@@ -246,11 +269,13 @@ export async function POST(req: NextRequest) {
mode,
model: selectedModel,
provider,
conversationId: effectiveConversationId,
conversationHistory,
contexts: agentContexts,
fileAttachments,
commands,
chatId: actualChatId,
prefetch,
implicitFeedback,
},
{
@@ -432,10 +457,15 @@ export async function POST(req: NextRequest) {
content: message,
timestamp: new Date().toISOString(),
...(fileAttachments && fileAttachments.length > 0 && { fileAttachments }),
...(Array.isArray(contexts) && contexts.length > 0 && { contexts }),
...(Array.isArray(contexts) &&
contexts.length > 0 && {
contentBlocks: [{ type: 'contexts', contexts: contexts as any, timestamp: Date.now() }],
...(Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 && {
contexts: normalizedContexts,
}),
...(Array.isArray(normalizedContexts) &&
normalizedContexts.length > 0 && {
contentBlocks: [
{ type: 'contexts', contexts: normalizedContexts as any, timestamp: Date.now() },
],
}),
}

View File

@@ -0,0 +1,170 @@
/**
* POST /api/referral-code/redeem
*
* Redeem a referral/promo code to receive bonus credits.
*
* Body:
* - code: string — The referral code to redeem
*
* Response: { redeemed: boolean, bonusAmount?: number, error?: string }
*
* Constraints:
* - Enterprise users cannot redeem codes
* - One redemption per user, ever (unique constraint on userId)
* - One redemption per organization for team users (partial unique on organizationId)
*/
import { db } from '@sim/db'
import { referralAttribution, referralCampaigns, userStats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { applyBonusCredits } from '@/lib/billing/credits/bonus'
const logger = createLogger('ReferralCodeRedemption')
const RedeemCodeSchema = z.object({
code: z.string().min(1, 'Code is required'),
})
export async function POST(request: Request) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const { code } = RedeemCodeSchema.parse(body)
const subscription = await getHighestPrioritySubscription(session.user.id)
if (subscription?.plan === 'enterprise') {
return NextResponse.json({
redeemed: false,
error: 'Enterprise accounts cannot redeem referral codes',
})
}
const isTeam = subscription?.plan === 'team'
const orgId = isTeam ? subscription.referenceId : null
const normalizedCode = code.trim().toUpperCase()
const [campaign] = await db
.select()
.from(referralCampaigns)
.where(and(eq(referralCampaigns.code, normalizedCode), eq(referralCampaigns.isActive, true)))
.limit(1)
if (!campaign) {
logger.info('Invalid code redemption attempt', {
userId: session.user.id,
code: normalizedCode,
})
return NextResponse.json({ error: 'Invalid or expired code' }, { status: 404 })
}
const [existingUserAttribution] = await db
.select({ id: referralAttribution.id })
.from(referralAttribution)
.where(eq(referralAttribution.userId, session.user.id))
.limit(1)
if (existingUserAttribution) {
return NextResponse.json({
redeemed: false,
error: 'You have already redeemed a code',
})
}
if (orgId) {
const [existingOrgAttribution] = await db
.select({ id: referralAttribution.id })
.from(referralAttribution)
.where(eq(referralAttribution.organizationId, orgId))
.limit(1)
if (existingOrgAttribution) {
return NextResponse.json({
redeemed: false,
error: 'A code has already been redeemed for your organization',
})
}
}
const [existingStats] = await db
.select({ id: userStats.id })
.from(userStats)
.where(eq(userStats.userId, session.user.id))
.limit(1)
if (!existingStats) {
await db.insert(userStats).values({
id: nanoid(),
userId: session.user.id,
})
}
const bonusAmount = Number(campaign.bonusCreditAmount)
let redeemed = false
await db.transaction(async (tx) => {
const result = await tx
.insert(referralAttribution)
.values({
id: nanoid(),
userId: session.user.id,
organizationId: orgId,
campaignId: campaign.id,
utmSource: null,
utmMedium: null,
utmCampaign: null,
utmContent: null,
referrerUrl: null,
landingPage: null,
bonusCreditAmount: bonusAmount.toString(),
})
.onConflictDoNothing()
.returning({ id: referralAttribution.id })
if (result.length > 0) {
await applyBonusCredits(session.user.id, bonusAmount, tx)
redeemed = true
}
})
if (redeemed) {
logger.info('Referral code redeemed', {
userId: session.user.id,
organizationId: orgId,
code: normalizedCode,
campaignId: campaign.id,
campaignName: campaign.name,
bonusAmount,
})
}
if (!redeemed) {
return NextResponse.json({
redeemed: false,
error: 'You have already redeemed a code',
})
}
return NextResponse.json({
redeemed: true,
bonusAmount,
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json({ error: error.errors[0].message }, { status: 400 })
}
logger.error('Referral code redemption error', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -191,3 +191,84 @@ export async function GET(request: NextRequest) {
)
}
}
// Delete a label from a page
export async function DELETE(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const {
domain,
accessToken,
cloudId: providedCloudId,
pageId,
labelName,
} = await request.json()
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!pageId) {
return NextResponse.json({ error: 'Page ID is required' }, { status: 400 })
}
if (!labelName) {
return NextResponse.json({ error: 'Label name is required' }, { status: 400 })
}
const pageIdValidation = validateAlphanumericId(pageId, 'pageId', 255)
if (!pageIdValidation.isValid) {
return NextResponse.json({ error: pageIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const encodedLabel = encodeURIComponent(labelName.trim())
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/rest/api/content/${pageId}/label?name=${encodedLabel}`
const response = await fetch(url, {
method: 'DELETE',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage =
errorData?.message || `Failed to delete Confluence label (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
return NextResponse.json({
pageId,
labelName,
deleted: true,
})
} catch (error) {
logger.error('Error deleting Confluence label:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,103 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
import { getConfluenceCloudId } from '@/tools/confluence/utils'
const logger = createLogger('ConfluencePagesByLabelAPI')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const domain = searchParams.get('domain')
const accessToken = searchParams.get('accessToken')
const labelId = searchParams.get('labelId')
const providedCloudId = searchParams.get('cloudId')
const limit = searchParams.get('limit') || '50'
const cursor = searchParams.get('cursor')
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!labelId) {
return NextResponse.json({ error: 'Label ID is required' }, { status: 400 })
}
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
if (!labelIdValidation.isValid) {
return NextResponse.json({ error: labelIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const queryParams = new URLSearchParams()
queryParams.append('limit', String(Math.min(Number(limit), 250)))
if (cursor) {
queryParams.append('cursor', cursor)
}
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/labels/${labelId}/pages?${queryParams.toString()}`
const response = await fetch(url, {
method: 'GET',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage = errorData?.message || `Failed to get pages by label (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const data = await response.json()
const pages = (data.results || []).map((page: any) => ({
id: page.id,
title: page.title,
status: page.status ?? null,
spaceId: page.spaceId ?? null,
parentId: page.parentId ?? null,
authorId: page.authorId ?? null,
createdAt: page.createdAt ?? null,
version: page.version ?? null,
}))
return NextResponse.json({
pages,
labelId,
nextCursor: data._links?.next
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
: null,
})
} catch (error) {
logger.error('Error getting pages by label:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,98 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
import { getConfluenceCloudId } from '@/tools/confluence/utils'
const logger = createLogger('ConfluenceSpaceLabelsAPI')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
try {
const auth = await checkSessionOrInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const { searchParams } = new URL(request.url)
const domain = searchParams.get('domain')
const accessToken = searchParams.get('accessToken')
const spaceId = searchParams.get('spaceId')
const providedCloudId = searchParams.get('cloudId')
const limit = searchParams.get('limit') || '25'
const cursor = searchParams.get('cursor')
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
if (!spaceId) {
return NextResponse.json({ error: 'Space ID is required' }, { status: 400 })
}
const spaceIdValidation = validateAlphanumericId(spaceId, 'spaceId', 255)
if (!spaceIdValidation.isValid) {
return NextResponse.json({ error: spaceIdValidation.error }, { status: 400 })
}
const cloudId = providedCloudId || (await getConfluenceCloudId(domain, accessToken))
const cloudIdValidation = validateJiraCloudId(cloudId, 'cloudId')
if (!cloudIdValidation.isValid) {
return NextResponse.json({ error: cloudIdValidation.error }, { status: 400 })
}
const queryParams = new URLSearchParams()
queryParams.append('limit', String(Math.min(Number(limit), 250)))
if (cursor) {
queryParams.append('cursor', cursor)
}
const url = `https://api.atlassian.com/ex/confluence/${cloudId}/wiki/api/v2/spaces/${spaceId}/labels?${queryParams.toString()}`
const response = await fetch(url, {
method: 'GET',
headers: {
Accept: 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = await response.json().catch(() => null)
logger.error('Confluence API error response:', {
status: response.status,
statusText: response.statusText,
error: JSON.stringify(errorData, null, 2),
})
const errorMessage = errorData?.message || `Failed to list space labels (${response.status})`
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const data = await response.json()
const labels = (data.results || []).map((label: any) => ({
id: label.id,
name: label.name,
prefix: label.prefix || 'global',
}))
return NextResponse.json({
labels,
spaceId,
nextCursor: data._links?.next
? new URL(data._links.next, 'https://placeholder').searchParams.get('cursor')
: null,
})
} catch (error) {
logger.error('Error listing space labels:', error)
return NextResponse.json(
{ error: (error as Error).message || 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -66,6 +66,12 @@
* Credits:
* POST /api/v1/admin/credits - Issue credits to user (by userId or email)
*
* Referral Campaigns:
* GET /api/v1/admin/referral-campaigns - List campaigns (?active=true/false)
* POST /api/v1/admin/referral-campaigns - Create campaign
* GET /api/v1/admin/referral-campaigns/:id - Get campaign details
* PATCH /api/v1/admin/referral-campaigns/:id - Update campaign fields
*
* Access Control (Permission Groups):
* GET /api/v1/admin/access-control - List permission groups (?organizationId=X)
* DELETE /api/v1/admin/access-control - Delete permission groups for org (?organizationId=X)
@@ -97,6 +103,7 @@ export type {
AdminOrganization,
AdminOrganizationBillingSummary,
AdminOrganizationDetail,
AdminReferralCampaign,
AdminSeatAnalytics,
AdminSingleResponse,
AdminSubscription,
@@ -111,6 +118,7 @@ export type {
AdminWorkspaceMember,
DbMember,
DbOrganization,
DbReferralCampaign,
DbSubscription,
DbUser,
DbUserStats,
@@ -139,6 +147,7 @@ export {
parseWorkflowVariables,
toAdminFolder,
toAdminOrganization,
toAdminReferralCampaign,
toAdminSubscription,
toAdminUser,
toAdminWorkflow,

View File

@@ -0,0 +1,142 @@
/**
* GET /api/v1/admin/referral-campaigns/:id
*
* Get a single referral campaign by ID.
*
* PATCH /api/v1/admin/referral-campaigns/:id
*
* Update campaign fields. All fields are optional.
*
* Body:
* - name: string (non-empty) - Campaign name
* - bonusCreditAmount: number (> 0) - Bonus credits in dollars
* - isActive: boolean - Enable/disable the campaign
* - code: string | null (min 6 chars, auto-uppercased, null to remove) - Redeemable code
* - utmSource: string | null - UTM source match (null = wildcard)
* - utmMedium: string | null - UTM medium match (null = wildcard)
* - utmCampaign: string | null - UTM campaign match (null = wildcard)
* - utmContent: string | null - UTM content match (null = wildcard)
*/
import { db } from '@sim/db'
import { referralCampaigns } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
import {
badRequestResponse,
internalErrorResponse,
notFoundResponse,
singleResponse,
} from '@/app/api/v1/admin/responses'
import { toAdminReferralCampaign } from '@/app/api/v1/admin/types'
const logger = createLogger('AdminReferralCampaignDetailAPI')
interface RouteParams {
id: string
}
export const GET = withAdminAuthParams<RouteParams>(async (_, context) => {
try {
const { id: campaignId } = await context.params
const [campaign] = await db
.select()
.from(referralCampaigns)
.where(eq(referralCampaigns.id, campaignId))
.limit(1)
if (!campaign) {
return notFoundResponse('Campaign')
}
logger.info(`Admin API: Retrieved referral campaign ${campaignId}`)
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
} catch (error) {
logger.error('Admin API: Failed to get referral campaign', { error })
return internalErrorResponse('Failed to get referral campaign')
}
})
export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) => {
try {
const { id: campaignId } = await context.params
const body = await request.json()
const [existing] = await db
.select()
.from(referralCampaigns)
.where(eq(referralCampaigns.id, campaignId))
.limit(1)
if (!existing) {
return notFoundResponse('Campaign')
}
const updateData: Record<string, unknown> = { updatedAt: new Date() }
if (body.name !== undefined) {
if (typeof body.name !== 'string' || body.name.trim().length === 0) {
return badRequestResponse('name must be a non-empty string')
}
updateData.name = body.name.trim()
}
if (body.bonusCreditAmount !== undefined) {
if (
typeof body.bonusCreditAmount !== 'number' ||
!Number.isFinite(body.bonusCreditAmount) ||
body.bonusCreditAmount <= 0
) {
return badRequestResponse('bonusCreditAmount must be a positive number')
}
updateData.bonusCreditAmount = body.bonusCreditAmount.toString()
}
if (body.isActive !== undefined) {
if (typeof body.isActive !== 'boolean') {
return badRequestResponse('isActive must be a boolean')
}
updateData.isActive = body.isActive
}
if (body.code !== undefined) {
if (body.code !== null) {
if (typeof body.code !== 'string') {
return badRequestResponse('code must be a string or null')
}
if (body.code.trim().length < 6) {
return badRequestResponse('code must be at least 6 characters')
}
}
updateData.code = body.code ? body.code.trim().toUpperCase() : null
}
for (const field of ['utmSource', 'utmMedium', 'utmCampaign', 'utmContent'] as const) {
if (body[field] !== undefined) {
if (body[field] !== null && typeof body[field] !== 'string') {
return badRequestResponse(`${field} must be a string or null`)
}
updateData[field] = body[field] || null
}
}
const [updated] = await db
.update(referralCampaigns)
.set(updateData)
.where(eq(referralCampaigns.id, campaignId))
.returning()
logger.info(`Admin API: Updated referral campaign ${campaignId}`, {
fields: Object.keys(updateData).filter((k) => k !== 'updatedAt'),
})
return singleResponse(toAdminReferralCampaign(updated, getBaseUrl()))
} catch (error) {
logger.error('Admin API: Failed to update referral campaign', { error })
return internalErrorResponse('Failed to update referral campaign')
}
})

View File

@@ -0,0 +1,140 @@
/**
* GET /api/v1/admin/referral-campaigns
*
* List referral campaigns with optional filtering and pagination.
*
* Query Parameters:
* - active: string (optional) - Filter by active status ('true' or 'false')
* - limit: number (default: 50, max: 250)
* - offset: number (default: 0)
*
* POST /api/v1/admin/referral-campaigns
*
* Create a new referral campaign.
*
* Body:
* - name: string (required) - Campaign name
* - bonusCreditAmount: number (required, > 0) - Bonus credits in dollars
* - code: string | null (optional, min 6 chars, auto-uppercased) - Redeemable code
* - utmSource: string | null (optional) - UTM source match (null = wildcard)
* - utmMedium: string | null (optional) - UTM medium match (null = wildcard)
* - utmCampaign: string | null (optional) - UTM campaign match (null = wildcard)
* - utmContent: string | null (optional) - UTM content match (null = wildcard)
*/
import { db } from '@sim/db'
import { referralCampaigns } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { count, eq, type SQL } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { withAdminAuth } from '@/app/api/v1/admin/middleware'
import {
badRequestResponse,
internalErrorResponse,
listResponse,
singleResponse,
} from '@/app/api/v1/admin/responses'
import {
type AdminReferralCampaign,
createPaginationMeta,
parsePaginationParams,
toAdminReferralCampaign,
} from '@/app/api/v1/admin/types'
const logger = createLogger('AdminReferralCampaignsAPI')
export const GET = withAdminAuth(async (request) => {
const url = new URL(request.url)
const { limit, offset } = parsePaginationParams(url)
const activeFilter = url.searchParams.get('active')
try {
const conditions: SQL<unknown>[] = []
if (activeFilter === 'true') {
conditions.push(eq(referralCampaigns.isActive, true))
} else if (activeFilter === 'false') {
conditions.push(eq(referralCampaigns.isActive, false))
}
const whereClause = conditions.length > 0 ? conditions[0] : undefined
const baseUrl = getBaseUrl()
const [countResult, campaigns] = await Promise.all([
db.select({ total: count() }).from(referralCampaigns).where(whereClause),
db
.select()
.from(referralCampaigns)
.where(whereClause)
.orderBy(referralCampaigns.createdAt)
.limit(limit)
.offset(offset),
])
const total = countResult[0].total
const data: AdminReferralCampaign[] = campaigns.map((c) => toAdminReferralCampaign(c, baseUrl))
const pagination = createPaginationMeta(total, limit, offset)
logger.info(`Admin API: Listed ${data.length} referral campaigns (total: ${total})`)
return listResponse(data, pagination)
} catch (error) {
logger.error('Admin API: Failed to list referral campaigns', { error })
return internalErrorResponse('Failed to list referral campaigns')
}
})
export const POST = withAdminAuth(async (request) => {
try {
const body = await request.json()
const { name, code, utmSource, utmMedium, utmCampaign, utmContent, bonusCreditAmount } = body
if (!name || typeof name !== 'string') {
return badRequestResponse('name is required and must be a string')
}
if (
typeof bonusCreditAmount !== 'number' ||
!Number.isFinite(bonusCreditAmount) ||
bonusCreditAmount <= 0
) {
return badRequestResponse('bonusCreditAmount must be a positive number')
}
if (code !== undefined && code !== null) {
if (typeof code !== 'string') {
return badRequestResponse('code must be a string or null')
}
if (code.trim().length < 6) {
return badRequestResponse('code must be at least 6 characters')
}
}
const id = nanoid()
const [campaign] = await db
.insert(referralCampaigns)
.values({
id,
name,
code: code ? code.trim().toUpperCase() : null,
utmSource: utmSource || null,
utmMedium: utmMedium || null,
utmCampaign: utmCampaign || null,
utmContent: utmContent || null,
bonusCreditAmount: bonusCreditAmount.toString(),
})
.returning()
logger.info(`Admin API: Created referral campaign ${id}`, {
name,
code: campaign.code,
bonusCreditAmount,
})
return singleResponse(toAdminReferralCampaign(campaign, getBaseUrl()))
} catch (error) {
logger.error('Admin API: Failed to create referral campaign', { error })
return internalErrorResponse('Failed to create referral campaign')
}
})

View File

@@ -8,6 +8,7 @@
import type {
member,
organization,
referralCampaigns,
subscription,
user,
userStats,
@@ -31,6 +32,7 @@ export type DbOrganization = InferSelectModel<typeof organization>
export type DbSubscription = InferSelectModel<typeof subscription>
export type DbMember = InferSelectModel<typeof member>
export type DbUserStats = InferSelectModel<typeof userStats>
export type DbReferralCampaign = InferSelectModel<typeof referralCampaigns>
// =============================================================================
// Pagination
@@ -646,3 +648,49 @@ export interface AdminDeployResult {
export interface AdminUndeployResult {
isDeployed: boolean
}
// =============================================================================
// Referral Campaign Types
// =============================================================================
export interface AdminReferralCampaign {
id: string
name: string
code: string | null
utmSource: string | null
utmMedium: string | null
utmCampaign: string | null
utmContent: string | null
bonusCreditAmount: string
isActive: boolean
signupUrl: string | null
createdAt: string
updatedAt: string
}
export function toAdminReferralCampaign(
dbCampaign: DbReferralCampaign,
baseUrl: string
): AdminReferralCampaign {
const utmParams = new URLSearchParams()
if (dbCampaign.utmSource) utmParams.set('utm_source', dbCampaign.utmSource)
if (dbCampaign.utmMedium) utmParams.set('utm_medium', dbCampaign.utmMedium)
if (dbCampaign.utmCampaign) utmParams.set('utm_campaign', dbCampaign.utmCampaign)
if (dbCampaign.utmContent) utmParams.set('utm_content', dbCampaign.utmContent)
const query = utmParams.toString()
return {
id: dbCampaign.id,
name: dbCampaign.name,
code: dbCampaign.code,
utmSource: dbCampaign.utmSource,
utmMedium: dbCampaign.utmMedium,
utmCampaign: dbCampaign.utmCampaign,
utmContent: dbCampaign.utmContent,
bonusCreditAmount: dbCampaign.bonusCreditAmount,
isActive: dbCampaign.isActive,
signupUrl: query ? `${baseUrl}/signup?${query}` : null,
createdAt: dbCampaign.createdAt.toISOString(),
updatedAt: dbCampaign.updatedAt.toISOString(),
}
}

View File

@@ -29,7 +29,7 @@ const patchBodySchema = z
description: z
.string()
.trim()
.max(500, 'Description must be 500 characters or less')
.max(2000, 'Description must be 2000 characters or less')
.nullable()
.optional(),
isActive: z.literal(true).optional(), // Set to true to activate this version

View File

@@ -12,7 +12,7 @@ import {
import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { markExecutionCancelled } from '@/lib/execution/cancellation'
import { createExecutionEventWriter, setExecutionMeta } from '@/lib/execution/event-buffer'
import { processInputFileFields } from '@/lib/execution/files'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
@@ -700,15 +700,27 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
let isStreamClosed = false
const eventWriter = createExecutionEventWriter(executionId)
setExecutionMeta(executionId, {
status: 'active',
userId: actorUserId,
workflowId,
}).catch(() => {})
const stream = new ReadableStream<Uint8Array>({
async start(controller) {
const sendEvent = (event: ExecutionEvent) => {
if (isStreamClosed) return
let finalMetaStatus: 'complete' | 'error' | 'cancelled' | null = null
try {
controller.enqueue(encodeSSEEvent(event))
} catch {
isStreamClosed = true
const sendEvent = (event: ExecutionEvent) => {
if (!isStreamClosed) {
try {
controller.enqueue(encodeSSEEvent(event))
} catch {
isStreamClosed = true
}
}
if (event.type !== 'stream:chunk' && event.type !== 'stream:done') {
eventWriter.write(event).catch(() => {})
}
}
@@ -829,14 +841,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const reader = streamingExec.stream.getReader()
const decoder = new TextDecoder()
let chunkCount = 0
try {
while (true) {
const { done, value } = await reader.read()
if (done) break
chunkCount++
const chunk = decoder.decode(value, { stream: true })
sendEvent({
type: 'stream:chunk',
@@ -951,6 +961,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: result.metadata?.duration || 0,
},
})
finalMetaStatus = 'error'
} else {
logger.info(`[${requestId}] Workflow execution was cancelled`)
@@ -963,6 +974,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: result.metadata?.duration || 0,
},
})
finalMetaStatus = 'cancelled'
}
return
}
@@ -986,6 +998,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
endTime: result.metadata?.endTime || new Date().toISOString(),
},
})
finalMetaStatus = 'complete'
} catch (error: unknown) {
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
const errorMessage = isTimeout
@@ -1017,7 +1030,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: executionResult?.metadata?.duration || 0,
},
})
finalMetaStatus = 'error'
} finally {
try {
await eventWriter.close()
} catch (closeError) {
logger.warn(`[${requestId}] Failed to close event writer`, {
error: closeError instanceof Error ? closeError.message : String(closeError),
})
}
if (finalMetaStatus) {
setExecutionMeta(executionId, { status: finalMetaStatus }).catch(() => {})
}
timeoutController.cleanup()
if (executionId) {
await cleanupExecutionBase64Cache(executionId)
@@ -1032,10 +1056,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
},
cancel() {
isStreamClosed = true
timeoutController.cleanup()
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
timeoutController.abort()
markExecutionCancelled(executionId).catch(() => {})
logger.info(`[${requestId}] Client disconnected from SSE stream`)
},
})

View File

@@ -0,0 +1,170 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import {
type ExecutionStreamStatus,
getExecutionMeta,
readExecutionEvents,
} from '@/lib/execution/event-buffer'
import { formatSSEEvent } from '@/lib/workflows/executor/execution-events'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
const logger = createLogger('ExecutionStreamReconnectAPI')
const POLL_INTERVAL_MS = 500
const MAX_POLL_DURATION_MS = 10 * 60 * 1000 // 10 minutes
function isTerminalStatus(status: ExecutionStreamStatus): boolean {
return status === 'complete' || status === 'error' || status === 'cancelled'
}
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function GET(
req: NextRequest,
{ params }: { params: Promise<{ id: string; executionId: string }> }
) {
const { id: workflowId, executionId } = await params
try {
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({
workflowId,
userId: auth.userId,
action: 'read',
})
if (!workflowAuthorization.allowed) {
return NextResponse.json(
{ error: workflowAuthorization.message || 'Access denied' },
{ status: workflowAuthorization.status }
)
}
const meta = await getExecutionMeta(executionId)
if (!meta) {
return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 })
}
if (meta.workflowId && meta.workflowId !== workflowId) {
return NextResponse.json(
{ error: 'Execution does not belong to this workflow' },
{ status: 403 }
)
}
const fromParam = req.nextUrl.searchParams.get('from')
const parsed = fromParam ? Number.parseInt(fromParam, 10) : 0
const fromEventId = Number.isFinite(parsed) && parsed >= 0 ? parsed : 0
logger.info('Reconnection stream requested', {
workflowId,
executionId,
fromEventId,
metaStatus: meta.status,
})
const encoder = new TextEncoder()
let closed = false
const stream = new ReadableStream<Uint8Array>({
async start(controller) {
let lastEventId = fromEventId
const pollDeadline = Date.now() + MAX_POLL_DURATION_MS
const enqueue = (text: string) => {
if (closed) return
try {
controller.enqueue(encoder.encode(text))
} catch {
closed = true
}
}
try {
const events = await readExecutionEvents(executionId, lastEventId)
for (const entry of events) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
const currentMeta = await getExecutionMeta(executionId)
if (!currentMeta || isTerminalStatus(currentMeta.status)) {
enqueue('data: [DONE]\n\n')
if (!closed) controller.close()
return
}
while (!closed && Date.now() < pollDeadline) {
await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS))
if (closed) return
const newEvents = await readExecutionEvents(executionId, lastEventId)
for (const entry of newEvents) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
const polledMeta = await getExecutionMeta(executionId)
if (!polledMeta || isTerminalStatus(polledMeta.status)) {
const finalEvents = await readExecutionEvents(executionId, lastEventId)
for (const entry of finalEvents) {
if (closed) return
enqueue(formatSSEEvent(entry.event))
lastEventId = entry.eventId
}
enqueue('data: [DONE]\n\n')
if (!closed) controller.close()
return
}
}
if (!closed) {
logger.warn('Reconnection stream poll deadline reached', { executionId })
enqueue('data: [DONE]\n\n')
controller.close()
}
} catch (error) {
logger.error('Error in reconnection stream', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
if (!closed) {
try {
controller.close()
} catch {}
}
}
},
cancel() {
closed = true
logger.info('Client disconnected from reconnection stream', { executionId })
},
})
return new NextResponse(stream, {
headers: {
...SSE_HEADERS,
'X-Execution-Id': executionId,
},
})
} catch (error: any) {
logger.error('Failed to start reconnection stream', {
workflowId,
executionId,
error: error.message,
})
return NextResponse.json(
{ error: error.message || 'Failed to start reconnection stream' },
{ status: 500 }
)
}
}

View File

@@ -13,9 +13,6 @@ export type CommandId =
| 'goto-logs'
| 'open-search'
| 'run-workflow'
| 'focus-copilot-tab'
| 'focus-toolbar-tab'
| 'focus-editor-tab'
| 'clear-terminal-console'
| 'focus-toolbar-search'
| 'clear-notifications'
@@ -75,21 +72,6 @@ export const COMMAND_DEFINITIONS: Record<CommandId, CommandDefinition> = {
shortcut: 'Mod+Enter',
allowInEditable: false,
},
'focus-copilot-tab': {
id: 'focus-copilot-tab',
shortcut: 'C',
allowInEditable: false,
},
'focus-toolbar-tab': {
id: 'focus-toolbar-tab',
shortcut: 'T',
allowInEditable: false,
},
'focus-editor-tab': {
id: 'focus-editor-tab',
shortcut: 'E',
allowInEditable: false,
},
'clear-terminal-console': {
id: 'clear-terminal-console',
shortcut: 'Mod+D',

View File

@@ -113,7 +113,7 @@ export function VersionDescriptionModal({
className='min-h-[120px] resize-none'
value={description}
onChange={(e) => setDescription(e.target.value)}
maxLength={500}
maxLength={2000}
disabled={isGenerating}
/>
<div className='flex items-center justify-between'>
@@ -123,7 +123,7 @@ export function VersionDescriptionModal({
</p>
)}
{!updateMutation.error && !generateMutation.error && <div />}
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p>
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/2000</p>
</div>
</ModalBody>
<ModalFooter>

View File

@@ -57,6 +57,21 @@ export function useChangeDetection({
}
}
if (block.triggerMode) {
const triggerConfigValue = blockSubValues?.triggerConfig
if (
triggerConfigValue &&
typeof triggerConfigValue === 'object' &&
!subBlocks.triggerConfig
) {
subBlocks.triggerConfig = {
id: 'triggerConfig',
type: 'short-input',
value: triggerConfigValue,
}
}
}
blocksWithSubBlocks[blockId] = {
...block,
subBlocks,

View File

@@ -340,13 +340,7 @@ export const Panel = memo(function Panel() {
* Register global keyboard shortcuts using the central commands registry.
*
* - Mod+Enter: Run / cancel workflow (matches the Run button behavior)
* - C: Focus Copilot tab
* - T: Focus Toolbar tab
* - E: Focus Editor tab
* - Mod+F: Focus Toolbar tab and search input
*
* The tab-switching commands are disabled inside editable elements so typing
* in inputs or textareas is not interrupted.
*/
useRegisterGlobalCommands(() =>
createCommands([
@@ -363,33 +357,6 @@ export const Panel = memo(function Panel() {
allowInEditable: false,
},
},
{
id: 'focus-copilot-tab',
handler: () => {
setActiveTab('copilot')
},
overrides: {
allowInEditable: false,
},
},
{
id: 'focus-toolbar-tab',
handler: () => {
setActiveTab('toolbar')
},
overrides: {
allowInEditable: false,
},
},
{
id: 'focus-editor-tab',
handler: () => {
setActiveTab('editor')
},
overrides: {
allowInEditable: false,
},
},
{
id: 'focus-toolbar-search',
handler: () => {

View File

@@ -1,4 +1,4 @@
import { useCallback, useRef, useState } from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { v4 as uuidv4 } from 'uuid'
@@ -46,7 +46,13 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('useWorkflowExecution')
// Debug state validation result
/**
* Module-level Set tracking which workflows have an active reconnection effect.
* Prevents multiple hook instances (from different components) from starting
* concurrent reconnection streams for the same workflow during the same mount cycle.
*/
const activeReconnections = new Set<string>()
interface DebugValidationResult {
isValid: boolean
error?: string
@@ -54,7 +60,7 @@ interface DebugValidationResult {
interface BlockEventHandlerConfig {
workflowId?: string
executionId?: string
executionIdRef: { current: string }
workflowEdges: Array<{ id: string; target: string; sourceHandle?: string | null }>
activeBlocksSet: Set<string>
accumulatedBlockLogs: BlockLog[]
@@ -108,12 +114,15 @@ export function useWorkflowExecution() {
const queryClient = useQueryClient()
const currentWorkflow = useCurrentWorkflow()
const { activeWorkflowId, workflows } = useWorkflowRegistry()
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } =
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries, clearExecutionEntries } =
useTerminalConsoleStore()
const hasHydrated = useTerminalConsoleStore((s) => s._hasHydrated)
const { getAllVariables } = useEnvironmentStore()
const { getVariablesByWorkflowId, variables } = useVariablesStore()
const { isExecuting, isDebugging, pendingBlocks, executor, debugContext } =
useCurrentWorkflowExecution()
const setCurrentExecutionId = useExecutionStore((s) => s.setCurrentExecutionId)
const getCurrentExecutionId = useExecutionStore((s) => s.getCurrentExecutionId)
const setIsExecuting = useExecutionStore((s) => s.setIsExecuting)
const setIsDebugging = useExecutionStore((s) => s.setIsDebugging)
const setPendingBlocks = useExecutionStore((s) => s.setPendingBlocks)
@@ -297,7 +306,7 @@ export function useWorkflowExecution() {
(config: BlockEventHandlerConfig) => {
const {
workflowId,
executionId,
executionIdRef,
workflowEdges,
activeBlocksSet,
accumulatedBlockLogs,
@@ -308,6 +317,14 @@ export function useWorkflowExecution() {
onBlockCompleteCallback,
} = config
/** Returns true if this execution was cancelled or superseded by another run. */
const isStaleExecution = () =>
!!(
workflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(workflowId) !== executionIdRef.current
)
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
if (!workflowId) return
if (isActive) {
@@ -360,7 +377,7 @@ export function useWorkflowExecution() {
endedAt: data.endedAt,
workflowId,
blockId: data.blockId,
executionId,
executionId: executionIdRef.current,
blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown',
iterationCurrent: data.iterationCurrent,
@@ -383,7 +400,7 @@ export function useWorkflowExecution() {
endedAt: data.endedAt,
workflowId,
blockId: data.blockId,
executionId,
executionId: executionIdRef.current,
blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown',
iterationCurrent: data.iterationCurrent,
@@ -410,7 +427,7 @@ export function useWorkflowExecution() {
iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId,
},
executionId
executionIdRef.current
)
}
@@ -432,11 +449,12 @@ export function useWorkflowExecution() {
iterationType: data.iterationType,
iterationContainerId: data.iterationContainerId,
},
executionId
executionIdRef.current
)
}
const onBlockStarted = (data: BlockStartedData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, true)
markIncomingEdges(data.blockId)
@@ -453,7 +471,7 @@ export function useWorkflowExecution() {
endedAt: undefined,
workflowId,
blockId: data.blockId,
executionId,
executionId: executionIdRef.current,
blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown',
isRunning: true,
@@ -465,6 +483,7 @@ export function useWorkflowExecution() {
}
const onBlockCompleted = (data: BlockCompletedData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, false)
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'success')
@@ -495,6 +514,7 @@ export function useWorkflowExecution() {
}
const onBlockError = (data: BlockErrorData) => {
if (isStaleExecution()) return
updateActiveBlocks(data.blockId, false)
if (workflowId) setBlockRunStatus(workflowId, data.blockId, 'error')
@@ -902,10 +922,6 @@ export function useWorkflowExecution() {
// Update block logs with actual stream completion times
if (result.logs && streamCompletionTimes.size > 0) {
const streamCompletionEndTime = new Date(
Math.max(...Array.from(streamCompletionTimes.values()))
).toISOString()
result.logs.forEach((log: BlockLog) => {
if (streamCompletionTimes.has(log.blockId)) {
const completionTime = streamCompletionTimes.get(log.blockId)!
@@ -987,7 +1003,6 @@ export function useWorkflowExecution() {
return { success: true, stream }
}
// For manual (non-chat) execution
const manualExecutionId = uuidv4()
try {
const result = await executeWorkflow(
@@ -1002,29 +1017,10 @@ export function useWorkflowExecution() {
if (result.metadata.pendingBlocks) {
setPendingBlocks(activeWorkflowId, result.metadata.pendingBlocks)
}
} else if (result && 'success' in result) {
setExecutionResult(result)
// Reset execution state after successful non-debug execution
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
if (isChatExecution) {
if (!result.metadata) {
result.metadata = { duration: 0, startTime: new Date().toISOString() }
}
;(result.metadata as any).source = 'chat'
}
// Invalidate subscription queries to update usage
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
}, 1000)
}
return result
} catch (error: any) {
const errorResult = handleExecutionError(error, { executionId: manualExecutionId })
// Note: Error logs are already persisted server-side via execution-core.ts
return errorResult
}
},
@@ -1275,7 +1271,7 @@ export function useWorkflowExecution() {
if (activeWorkflowId) {
logger.info('Using server-side executor')
const executionId = uuidv4()
const executionIdRef = { current: '' }
let executionResult: ExecutionResult = {
success: false,
@@ -1293,7 +1289,7 @@ export function useWorkflowExecution() {
try {
const blockHandlers = buildBlockEventHandlers({
workflowId: activeWorkflowId,
executionId,
executionIdRef,
workflowEdges,
activeBlocksSet,
accumulatedBlockLogs,
@@ -1326,6 +1322,10 @@ export function useWorkflowExecution() {
loops: clientWorkflowState.loops,
parallels: clientWorkflowState.parallels,
},
onExecutionId: (id) => {
executionIdRef.current = id
setCurrentExecutionId(activeWorkflowId, id)
},
callbacks: {
onExecutionStarted: (data) => {
logger.info('Server execution started:', data)
@@ -1368,6 +1368,18 @@ export function useWorkflowExecution() {
},
onExecutionCompleted: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
executionResult = {
success: data.success,
output: data.output,
@@ -1425,9 +1437,33 @@ export function useWorkflowExecution() {
})
}
}
const workflowExecState = activeWorkflowId
? useExecutionStore.getState().getWorkflowExecution(activeWorkflowId)
: null
if (activeWorkflowId && !workflowExecState?.isDebugging) {
setExecutionResult(executionResult)
setIsExecuting(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
setTimeout(() => {
queryClient.invalidateQueries({ queryKey: subscriptionKeys.all })
}, 1000)
}
},
onExecutionError: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
executionResult = {
success: false,
output: {},
@@ -1441,43 +1477,53 @@ export function useWorkflowExecution() {
const isPreExecutionError = accumulatedBlockLogs.length === 0
handleExecutionErrorConsole({
workflowId: activeWorkflowId,
executionId,
executionId: executionIdRef.current,
error: data.error,
durationMs: data.duration,
blockLogs: accumulatedBlockLogs,
isPreExecutionError,
})
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
},
onExecutionCancelled: (data) => {
if (
activeWorkflowId &&
executionIdRef.current &&
useExecutionStore.getState().getCurrentExecutionId(activeWorkflowId) !==
executionIdRef.current
)
return
if (activeWorkflowId) {
setCurrentExecutionId(activeWorkflowId, null)
}
handleExecutionCancelledConsole({
workflowId: activeWorkflowId,
executionId,
executionId: executionIdRef.current,
durationMs: data?.duration,
})
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
},
},
})
return executionResult
} catch (error: any) {
// Don't log abort errors - they're intentional user actions
if (error.name === 'AbortError' || error.message?.includes('aborted')) {
logger.info('Execution aborted by user')
// Reset execution state
if (activeWorkflowId) {
setIsExecuting(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
}
// Return gracefully without error
return {
success: false,
output: {},
metadata: { duration: 0 },
logs: [],
}
return executionResult
}
logger.error('Server-side execution failed:', error)
@@ -1485,7 +1531,6 @@ export function useWorkflowExecution() {
}
}
// Fallback: should never reach here
throw new Error('Server-side execution is required')
}
@@ -1717,25 +1762,28 @@ export function useWorkflowExecution() {
* Handles cancelling the current workflow execution
*/
const handleCancelExecution = useCallback(() => {
if (!activeWorkflowId) return
logger.info('Workflow execution cancellation requested')
// Cancel the execution stream for this workflow (server-side)
executionStream.cancel(activeWorkflowId ?? undefined)
const storedExecutionId = getCurrentExecutionId(activeWorkflowId)
// Mark current chat execution as superseded so its cleanup won't affect new executions
currentChatExecutionIdRef.current = null
// Mark all running entries as canceled in the terminal
if (activeWorkflowId) {
cancelRunningEntries(activeWorkflowId)
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
if (storedExecutionId) {
setCurrentExecutionId(activeWorkflowId, null)
fetch(`/api/workflows/${activeWorkflowId}/executions/${storedExecutionId}/cancel`, {
method: 'POST',
}).catch(() => {})
handleExecutionCancelledConsole({
workflowId: activeWorkflowId,
executionId: storedExecutionId,
})
}
// If in debug mode, also reset debug state
executionStream.cancel(activeWorkflowId)
currentChatExecutionIdRef.current = null
setIsExecuting(activeWorkflowId, false)
setIsDebugging(activeWorkflowId, false)
setActiveBlocks(activeWorkflowId, new Set())
if (isDebugging) {
resetDebugState()
}
@@ -1747,7 +1795,9 @@ export function useWorkflowExecution() {
setIsDebugging,
setActiveBlocks,
activeWorkflowId,
cancelRunningEntries,
getCurrentExecutionId,
setCurrentExecutionId,
handleExecutionCancelledConsole,
])
/**
@@ -1847,7 +1897,7 @@ export function useWorkflowExecution() {
}
setIsExecuting(workflowId, true)
const executionId = uuidv4()
const executionIdRef = { current: '' }
const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>()
@@ -1856,7 +1906,7 @@ export function useWorkflowExecution() {
try {
const blockHandlers = buildBlockEventHandlers({
workflowId,
executionId,
executionIdRef,
workflowEdges,
activeBlocksSet,
accumulatedBlockLogs,
@@ -1871,6 +1921,10 @@ export function useWorkflowExecution() {
startBlockId: blockId,
sourceSnapshot: effectiveSnapshot,
input: workflowInput,
onExecutionId: (id) => {
executionIdRef.current = id
setCurrentExecutionId(workflowId, id)
},
callbacks: {
onBlockStarted: blockHandlers.onBlockStarted,
onBlockCompleted: blockHandlers.onBlockCompleted,
@@ -1878,7 +1932,6 @@ export function useWorkflowExecution() {
onExecutionCompleted: (data) => {
if (data.success) {
// Add the start block (trigger) to executed blocks
executedBlockIds.add(blockId)
const mergedBlockStates: Record<string, BlockState> = {
@@ -1902,6 +1955,10 @@ export function useWorkflowExecution() {
}
setLastExecutionSnapshot(workflowId, updatedSnapshot)
}
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
},
onExecutionError: (data) => {
@@ -1921,19 +1978,27 @@ export function useWorkflowExecution() {
handleExecutionErrorConsole({
workflowId,
executionId,
executionId: executionIdRef.current,
error: data.error,
durationMs: data.duration,
blockLogs: accumulatedBlockLogs,
})
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
},
onExecutionCancelled: (data) => {
handleExecutionCancelledConsole({
workflowId,
executionId,
executionId: executionIdRef.current,
durationMs: data?.duration,
})
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
},
},
})
@@ -1942,14 +2007,20 @@ export function useWorkflowExecution() {
logger.error('Run-from-block failed:', error)
}
} finally {
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
const currentId = getCurrentExecutionId(workflowId)
if (currentId === null || currentId === executionIdRef.current) {
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setActiveBlocks(workflowId, new Set())
}
}
},
[
getLastExecutionSnapshot,
setLastExecutionSnapshot,
clearLastExecutionSnapshot,
getCurrentExecutionId,
setCurrentExecutionId,
setIsExecuting,
setActiveBlocks,
setBlockRunStatus,
@@ -1979,29 +2050,213 @@ export function useWorkflowExecution() {
const executionId = uuidv4()
try {
const result = await executeWorkflow(
undefined,
undefined,
executionId,
undefined,
'manual',
blockId
)
if (result && 'success' in result) {
setExecutionResult(result)
}
await executeWorkflow(undefined, undefined, executionId, undefined, 'manual', blockId)
} catch (error) {
const errorResult = handleExecutionError(error, { executionId })
return errorResult
} finally {
setCurrentExecutionId(workflowId, null)
setIsExecuting(workflowId, false)
setIsDebugging(workflowId, false)
setActiveBlocks(workflowId, new Set())
}
},
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
[
activeWorkflowId,
setCurrentExecutionId,
setExecutionResult,
setIsExecuting,
setIsDebugging,
setActiveBlocks,
]
)
useEffect(() => {
if (!activeWorkflowId || !hasHydrated) return
const entries = useTerminalConsoleStore.getState().entries
const runningEntries = entries.filter(
(e) => e.isRunning && e.workflowId === activeWorkflowId && e.executionId
)
if (runningEntries.length === 0) return
if (activeReconnections.has(activeWorkflowId)) return
activeReconnections.add(activeWorkflowId)
executionStream.cancel(activeWorkflowId)
const sorted = [...runningEntries].sort((a, b) => {
const aTime = a.startedAt ? new Date(a.startedAt).getTime() : 0
const bTime = b.startedAt ? new Date(b.startedAt).getTime() : 0
return bTime - aTime
})
const executionId = sorted[0].executionId!
const otherExecutionIds = new Set(
sorted.filter((e) => e.executionId !== executionId).map((e) => e.executionId!)
)
if (otherExecutionIds.size > 0) {
cancelRunningEntries(activeWorkflowId)
}
setCurrentExecutionId(activeWorkflowId, executionId)
setIsExecuting(activeWorkflowId, true)
const workflowEdges = useWorkflowStore.getState().edges
const activeBlocksSet = new Set<string>()
const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>()
const executionIdRef = { current: executionId }
const handlers = buildBlockEventHandlers({
workflowId: activeWorkflowId,
executionIdRef,
workflowEdges,
activeBlocksSet,
accumulatedBlockLogs,
accumulatedBlockStates,
executedBlockIds,
consoleMode: 'update',
includeStartConsoleEntry: true,
})
const originalEntries = entries
.filter((e) => e.executionId === executionId)
.map((e) => ({ ...e }))
let cleared = false
let reconnectionComplete = false
let cleanupRan = false
const clearOnce = () => {
if (!cleared) {
cleared = true
clearExecutionEntries(executionId)
}
}
const reconnectWorkflowId = activeWorkflowId
executionStream
.reconnect({
workflowId: reconnectWorkflowId,
executionId,
callbacks: {
onBlockStarted: (data) => {
clearOnce()
handlers.onBlockStarted(data)
},
onBlockCompleted: (data) => {
clearOnce()
handlers.onBlockCompleted(data)
},
onBlockError: (data) => {
clearOnce()
handlers.onBlockError(data)
},
onExecutionCompleted: () => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
},
onExecutionError: (data) => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
handleExecutionErrorConsole({
workflowId: reconnectWorkflowId,
executionId,
error: data.error,
blockLogs: accumulatedBlockLogs,
})
},
onExecutionCancelled: () => {
const currentId = useExecutionStore
.getState()
.getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) {
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
return
}
clearOnce()
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
handleExecutionCancelledConsole({
workflowId: reconnectWorkflowId,
executionId,
})
},
},
})
.catch((error) => {
logger.warn('Execution reconnection failed', { executionId, error })
})
.finally(() => {
if (reconnectionComplete || cleanupRan) return
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
if (currentId !== executionId) return
reconnectionComplete = true
activeReconnections.delete(reconnectWorkflowId)
clearExecutionEntries(executionId)
for (const entry of originalEntries) {
addConsole({
workflowId: entry.workflowId,
blockId: entry.blockId,
blockName: entry.blockName,
blockType: entry.blockType,
executionId: entry.executionId,
executionOrder: entry.executionOrder,
isRunning: false,
warning: 'Execution result unavailable — check the logs page',
})
}
setCurrentExecutionId(reconnectWorkflowId, null)
setIsExecuting(reconnectWorkflowId, false)
setActiveBlocks(reconnectWorkflowId, new Set())
})
return () => {
cleanupRan = true
executionStream.cancel(reconnectWorkflowId)
activeReconnections.delete(reconnectWorkflowId)
if (cleared && !reconnectionComplete) {
clearExecutionEntries(executionId)
for (const entry of originalEntries) {
addConsole(entry)
}
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [activeWorkflowId, hasHydrated])
return {
isExecuting,
isDebugging,

View File

@@ -1,3 +1,4 @@
export { CancelSubscription } from './cancel-subscription'
export { CreditBalance } from './credit-balance'
export { PlanCard, type PlanCardProps, type PlanFeature } from './plan-card'
export { ReferralCode } from './referral-code'

View File

@@ -0,0 +1,101 @@
'use client'
import { useState } from 'react'
import { createLogger } from '@sim/logger'
import { Button, Input, Label } from '@/components/emcn'
const logger = createLogger('ReferralCode')
interface ReferralCodeProps {
onRedeemComplete?: () => void
}
/**
* Inline referral/promo code entry field with redeem button.
* One-time use per account — shows success or "already redeemed" state.
*/
export function ReferralCode({ onRedeemComplete }: ReferralCodeProps) {
const [code, setCode] = useState('')
const [isRedeeming, setIsRedeeming] = useState(false)
const [error, setError] = useState<string | null>(null)
const [success, setSuccess] = useState<{ bonusAmount: number } | null>(null)
const handleRedeem = async () => {
const trimmed = code.trim()
if (!trimmed || isRedeeming) return
setIsRedeeming(true)
setError(null)
try {
const response = await fetch('/api/referral-code/redeem', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ code: trimmed }),
})
const data = await response.json()
if (!response.ok) {
throw new Error(data.error || 'Failed to redeem code')
}
if (data.redeemed) {
setSuccess({ bonusAmount: data.bonusAmount })
setCode('')
onRedeemComplete?.()
} else {
setError(data.error || 'Code could not be redeemed')
}
} catch (err) {
logger.error('Referral code redemption failed', { error: err })
setError(err instanceof Error ? err.message : 'Failed to redeem code')
} finally {
setIsRedeeming(false)
}
}
if (success) {
return (
<div className='flex items-center justify-between'>
<Label>Referral Code</Label>
<span className='text-[12px] text-[var(--text-secondary)]'>
+${success.bonusAmount} credits applied
</span>
</div>
)
}
return (
<div className='flex items-center justify-between gap-[12px]'>
<Label className='shrink-0'>Referral Code</Label>
<div className='flex items-center gap-[8px]'>
<div className='flex flex-col'>
<Input
type='text'
value={code}
onChange={(e) => {
setCode(e.target.value)
setError(null)
}}
onKeyDown={(e) => {
if (e.key === 'Enter') handleRedeem()
}}
placeholder='Enter code'
className='h-[32px] w-[140px] text-[12px]'
disabled={isRedeeming}
/>
{error && <span className='mt-[4px] text-[11px] text-[var(--text-error)]'>{error}</span>}
</div>
<Button
variant='active'
className='h-[32px] shrink-0 rounded-[6px] text-[12px]'
onClick={handleRedeem}
disabled={isRedeeming || !code.trim()}
>
{isRedeeming ? 'Redeeming...' : 'Redeem'}
</Button>
</div>
</div>
)
}

View File

@@ -17,6 +17,7 @@ import {
CancelSubscription,
CreditBalance,
PlanCard,
ReferralCode,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components'
import {
ENTERPRISE_PLAN_FEATURES,
@@ -549,6 +550,10 @@ export function Subscription() {
/>
)}
{!subscription.isEnterprise && (
<ReferralCode onRedeemComplete={() => refetchSubscription()} />
)}
{/* Next Billing Date - hidden from team members */}
{subscription.isPaid &&
subscriptionData?.data?.periodEnd &&

View File

@@ -4,12 +4,14 @@ import { useEffect } from 'react'
import { createLogger } from '@sim/logger'
import { useRouter } from 'next/navigation'
import { useSession } from '@/lib/auth/auth-client'
import { useReferralAttribution } from '@/hooks/use-referral-attribution'
const logger = createLogger('WorkspacePage')
export default function WorkspacePage() {
const router = useRouter()
const { data: session, isPending } = useSession()
useReferralAttribution()
useEffect(() => {
const redirectToFirstWorkspace = async () => {

View File

@@ -589,6 +589,7 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
export const scheduleExecution = task({
id: 'schedule-execution',
machine: 'medium-1x',
retry: {
maxAttempts: 1,
},

View File

@@ -669,6 +669,7 @@ async function executeWebhookJobInternal(
export const webhookExecution = task({
id: 'webhook-execution',
machine: 'medium-1x',
retry: {
maxAttempts: 1,
},

View File

@@ -197,5 +197,6 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
export const workflowExecutionTask = task({
id: 'workflow-execution',
machine: 'medium-1x',
run: executeWorkflowJob,
})

View File

@@ -394,6 +394,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Page Property Operations
{ label: 'List Page Properties', id: 'list_page_properties' },
{ label: 'Create Page Property', id: 'create_page_property' },
{ label: 'Delete Page Property', id: 'delete_page_property' },
// Search Operations
{ label: 'Search Content', id: 'search' },
{ label: 'Search in Space', id: 'search_in_space' },
@@ -414,6 +415,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Operations
{ label: 'List Labels', id: 'list_labels' },
{ label: 'Add Label', id: 'add_label' },
{ label: 'Delete Label', id: 'delete_label' },
{ label: 'Get Pages by Label', id: 'get_pages_by_label' },
{ label: 'List Space Labels', id: 'list_space_labels' },
// Space Operations
{ label: 'Get Space', id: 'get_space' },
{ label: 'List Spaces', id: 'list_spaces' },
@@ -485,6 +489,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space',
'get_space',
'list_spaces',
'get_pages_by_label',
'list_space_labels',
],
not: true,
},
@@ -500,6 +506,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_labels',
'upload_attachment',
'add_label',
'delete_label',
'delete_page_property',
'get_page_children',
'get_page_ancestors',
'list_page_versions',
@@ -527,6 +535,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space',
'get_space',
'list_spaces',
'get_pages_by_label',
'list_space_labels',
],
not: true,
},
@@ -542,6 +552,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_labels',
'upload_attachment',
'add_label',
'delete_label',
'delete_page_property',
'get_page_children',
'get_page_ancestors',
'list_page_versions',
@@ -566,6 +578,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'search_in_space',
'create_blogpost',
'list_blogposts_in_space',
'list_space_labels',
],
},
},
@@ -601,6 +614,14 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
required: true,
condition: { field: 'operation', value: 'create_page_property' },
},
{
id: 'propertyId',
title: 'Property ID',
type: 'short-input',
placeholder: 'Enter property ID to delete',
required: true,
condition: { field: 'operation', value: 'delete_page_property' },
},
{
id: 'title',
title: 'Title',
@@ -694,7 +715,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
type: 'short-input',
placeholder: 'Enter label name',
required: true,
condition: { field: 'operation', value: 'add_label' },
condition: { field: 'operation', value: ['add_label', 'delete_label'] },
},
{
id: 'labelPrefix',
@@ -709,6 +730,14 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
value: () => 'global',
condition: { field: 'operation', value: 'add_label' },
},
{
id: 'labelId',
title: 'Label ID',
type: 'short-input',
placeholder: 'Enter label ID',
required: true,
condition: { field: 'operation', value: 'get_pages_by_label' },
},
{
id: 'blogPostStatus',
title: 'Status',
@@ -759,6 +788,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_page_versions',
'list_page_properties',
'list_labels',
'get_pages_by_label',
'list_space_labels',
],
},
},
@@ -780,6 +811,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
'list_page_versions',
'list_page_properties',
'list_labels',
'get_pages_by_label',
'list_space_labels',
],
},
},
@@ -800,6 +833,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Property Tools
'confluence_list_page_properties',
'confluence_create_page_property',
'confluence_delete_page_property',
// Search Tools
'confluence_search',
'confluence_search_in_space',
@@ -820,6 +854,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Tools
'confluence_list_labels',
'confluence_add_label',
'confluence_delete_label',
'confluence_get_pages_by_label',
'confluence_list_space_labels',
// Space Tools
'confluence_get_space',
'confluence_list_spaces',
@@ -852,6 +889,8 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
return 'confluence_list_page_properties'
case 'create_page_property':
return 'confluence_create_page_property'
case 'delete_page_property':
return 'confluence_delete_page_property'
// Search Operations
case 'search':
return 'confluence_search'
@@ -887,6 +926,12 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
return 'confluence_list_labels'
case 'add_label':
return 'confluence_add_label'
case 'delete_label':
return 'confluence_delete_label'
case 'get_pages_by_label':
return 'confluence_get_pages_by_label'
case 'list_space_labels':
return 'confluence_list_space_labels'
// Space Operations
case 'get_space':
return 'confluence_get_space'
@@ -908,7 +953,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
versionNumber,
propertyKey,
propertyValue,
propertyId,
labelPrefix,
labelId,
blogPostStatus,
purge,
bodyFormat,
@@ -959,7 +1006,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
}
}
// Operations that support cursor pagination
// Operations that support generic cursor pagination.
// get_pages_by_label and list_space_labels have dedicated handlers
// below that pass cursor along with their required params (labelId, spaceId).
const supportsCursor = [
'list_attachments',
'list_spaces',
@@ -996,6 +1045,35 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
}
}
if (operation === 'delete_page_property') {
return {
credential,
pageId: effectivePageId,
operation,
propertyId,
...rest,
}
}
if (operation === 'get_pages_by_label') {
return {
credential,
operation,
labelId,
cursor: cursor || undefined,
...rest,
}
}
if (operation === 'list_space_labels') {
return {
credential,
operation,
cursor: cursor || undefined,
...rest,
}
}
if (operation === 'upload_attachment') {
const normalizedFile = normalizeFileInput(attachmentFile, { single: true })
if (!normalizedFile) {
@@ -1044,7 +1122,9 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
attachmentFileName: { type: 'string', description: 'Custom file name for attachment' },
attachmentComment: { type: 'string', description: 'Comment for the attachment' },
labelName: { type: 'string', description: 'Label name' },
labelId: { type: 'string', description: 'Label identifier' },
labelPrefix: { type: 'string', description: 'Label prefix (global, my, team, system)' },
propertyId: { type: 'string', description: 'Property identifier' },
blogPostStatus: { type: 'string', description: 'Blog post status (current or draft)' },
purge: { type: 'boolean', description: 'Permanently delete instead of moving to trash' },
bodyFormat: { type: 'string', description: 'Body format for comments' },
@@ -1080,6 +1160,7 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
// Label Results
labels: { type: 'array', description: 'List of labels' },
labelName: { type: 'string', description: 'Label name' },
labelId: { type: 'string', description: 'Label identifier' },
// Space Results
spaces: { type: 'array', description: 'List of spaces' },
spaceId: { type: 'string', description: 'Space identifier' },

View File

@@ -423,7 +423,7 @@ interface GenerateVersionDescriptionVariables {
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are writing deployment version descriptions for a workflow automation platform.
Write a brief, factual description (1-3 sentences, under 400 characters) that states what changed between versions.
Write a brief, factual description (1-3 sentences, under 2000 characters) that states what changed between versions.
Guidelines:
- Use the specific values provided (credential names, channel names, model names)

View File

@@ -1,4 +1,4 @@
import { useCallback, useRef } from 'react'
import { useCallback } from 'react'
import { createLogger } from '@sim/logger'
import type {
BlockCompletedData,
@@ -16,6 +16,18 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
const logger = createLogger('useExecutionStream')
/**
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
* These should be treated as clean disconnects, not execution errors.
*/
function isClientDisconnectError(error: any): boolean {
if (error.name === 'AbortError') return true
const msg = (error.message ?? '').toLowerCase()
return (
msg.includes('network error') || msg.includes('failed to fetch') || msg.includes('load failed')
)
}
/**
* Processes SSE events from a response body and invokes appropriate callbacks.
*/
@@ -121,6 +133,7 @@ export interface ExecuteStreamOptions {
parallels?: Record<string, any>
}
stopAfterBlockId?: string
onExecutionId?: (executionId: string) => void
callbacks?: ExecutionStreamCallbacks
}
@@ -129,30 +142,40 @@ export interface ExecuteFromBlockOptions {
startBlockId: string
sourceSnapshot: SerializableExecutionState
input?: any
onExecutionId?: (executionId: string) => void
callbacks?: ExecutionStreamCallbacks
}
export interface ReconnectStreamOptions {
workflowId: string
executionId: string
fromEventId?: number
callbacks?: ExecutionStreamCallbacks
}
/**
* Module-level map shared across all hook instances.
* Ensures ANY instance can cancel streams started by ANY other instance,
* which is critical for SPA navigation where the original hook instance unmounts
* but the SSE stream must be cancellable from the new instance.
*/
const sharedAbortControllers = new Map<string, AbortController>()
/**
* Hook for executing workflows via server-side SSE streaming.
* Supports concurrent executions via per-workflow AbortController maps.
*/
export function useExecutionStream() {
const abortControllersRef = useRef<Map<string, AbortController>>(new Map())
const currentExecutionsRef = useRef<Map<string, { workflowId: string; executionId: string }>>(
new Map()
)
const execute = useCallback(async (options: ExecuteStreamOptions) => {
const { workflowId, callbacks = {}, ...payload } = options
const { workflowId, callbacks = {}, onExecutionId, ...payload } = options
const existing = abortControllersRef.current.get(workflowId)
const existing = sharedAbortControllers.get(workflowId)
if (existing) {
existing.abort()
}
const abortController = new AbortController()
abortControllersRef.current.set(workflowId, abortController)
currentExecutionsRef.current.delete(workflowId)
sharedAbortControllers.set(workflowId, abortController)
try {
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
@@ -177,42 +200,48 @@ export function useExecutionStream() {
throw new Error('No response body')
}
const executionId = response.headers.get('X-Execution-Id')
if (executionId) {
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
const serverExecutionId = response.headers.get('X-Execution-Id')
if (serverExecutionId) {
onExecutionId?.(serverExecutionId)
}
const reader = response.body.getReader()
await processSSEStream(reader, callbacks, 'Execution')
} catch (error: any) {
if (error.name === 'AbortError') {
logger.info('Execution stream cancelled')
callbacks.onExecutionCancelled?.({ duration: 0 })
} else {
logger.error('Execution stream error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
if (isClientDisconnectError(error)) {
logger.info('Execution stream disconnected (page unload or abort)')
return
}
logger.error('Execution stream error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
throw error
} finally {
abortControllersRef.current.delete(workflowId)
currentExecutionsRef.current.delete(workflowId)
if (sharedAbortControllers.get(workflowId) === abortController) {
sharedAbortControllers.delete(workflowId)
}
}
}, [])
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
const {
workflowId,
startBlockId,
sourceSnapshot,
input,
onExecutionId,
callbacks = {},
} = options
const existing = abortControllersRef.current.get(workflowId)
const existing = sharedAbortControllers.get(workflowId)
if (existing) {
existing.abort()
}
const abortController = new AbortController()
abortControllersRef.current.set(workflowId, abortController)
currentExecutionsRef.current.delete(workflowId)
sharedAbortControllers.set(workflowId, abortController)
try {
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
@@ -246,64 +275,80 @@ export function useExecutionStream() {
throw new Error('No response body')
}
const executionId = response.headers.get('X-Execution-Id')
if (executionId) {
currentExecutionsRef.current.set(workflowId, { workflowId, executionId })
const serverExecutionId = response.headers.get('X-Execution-Id')
if (serverExecutionId) {
onExecutionId?.(serverExecutionId)
}
const reader = response.body.getReader()
await processSSEStream(reader, callbacks, 'Run-from-block')
} catch (error: any) {
if (error.name === 'AbortError') {
logger.info('Run-from-block execution cancelled')
callbacks.onExecutionCancelled?.({ duration: 0 })
} else {
logger.error('Run-from-block execution error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
if (isClientDisconnectError(error)) {
logger.info('Run-from-block stream disconnected (page unload or abort)')
return
}
logger.error('Run-from-block execution error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
throw error
} finally {
abortControllersRef.current.delete(workflowId)
currentExecutionsRef.current.delete(workflowId)
if (sharedAbortControllers.get(workflowId) === abortController) {
sharedAbortControllers.delete(workflowId)
}
}
}, [])
const reconnect = useCallback(async (options: ReconnectStreamOptions) => {
const { workflowId, executionId, fromEventId = 0, callbacks = {} } = options
const existing = sharedAbortControllers.get(workflowId)
if (existing) {
existing.abort()
}
const abortController = new AbortController()
sharedAbortControllers.set(workflowId, abortController)
try {
const response = await fetch(
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
{ signal: abortController.signal }
)
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
if (!response.body) throw new Error('No response body')
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
} catch (error: any) {
if (isClientDisconnectError(error)) return
logger.error('Reconnection stream error:', error)
throw error
} finally {
if (sharedAbortControllers.get(workflowId) === abortController) {
sharedAbortControllers.delete(workflowId)
}
}
}, [])
const cancel = useCallback((workflowId?: string) => {
if (workflowId) {
const execution = currentExecutionsRef.current.get(workflowId)
if (execution) {
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
method: 'POST',
}).catch(() => {})
}
const controller = abortControllersRef.current.get(workflowId)
const controller = sharedAbortControllers.get(workflowId)
if (controller) {
controller.abort()
abortControllersRef.current.delete(workflowId)
sharedAbortControllers.delete(workflowId)
}
currentExecutionsRef.current.delete(workflowId)
} else {
for (const [, execution] of currentExecutionsRef.current) {
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
method: 'POST',
}).catch(() => {})
}
for (const [, controller] of abortControllersRef.current) {
for (const [, controller] of sharedAbortControllers) {
controller.abort()
}
abortControllersRef.current.clear()
currentExecutionsRef.current.clear()
sharedAbortControllers.clear()
}
}, [])
return {
execute,
executeFromBlock,
reconnect,
cancel,
}
}

View File

@@ -0,0 +1,46 @@
'use client'
import { useEffect, useRef } from 'react'
import { createLogger } from '@sim/logger'
const logger = createLogger('ReferralAttribution')
const COOKIE_NAME = 'sim_utm'
const TERMINAL_REASONS = new Set([
'account_predates_cookie',
'invalid_cookie',
'no_utm_cookie',
'no_matching_campaign',
])
/**
* Fires a one-shot `POST /api/attribution` when a `sim_utm` cookie is present.
* Retries on transient failures; stops on terminal outcomes.
*/
export function useReferralAttribution() {
const calledRef = useRef(false)
useEffect(() => {
if (calledRef.current) return
if (!document.cookie.includes(COOKIE_NAME)) return
calledRef.current = true
fetch('/api/attribution', { method: 'POST' })
.then((res) => res.json())
.then((data) => {
if (data.attributed) {
logger.info('Referral attribution successful', { bonusAmount: data.bonusAmount })
} else if (data.error || TERMINAL_REASONS.has(data.reason)) {
logger.info('Referral attribution skipped', { reason: data.reason || data.error })
} else {
calledRef.current = false
}
})
.catch((err) => {
logger.warn('Referral attribution failed, will retry', { error: err })
calledRef.current = false
})
}, [])
}

View File

@@ -0,0 +1,64 @@
import { db } from '@sim/db'
import { organization, userStats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq, sql } from 'drizzle-orm'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import type { DbOrTx } from '@/lib/db/types'
const logger = createLogger('BonusCredits')
/**
* Apply bonus credits to a user (e.g. referral bonuses, promotional codes).
*
* Detects the user's current plan and routes credits accordingly:
* - Free/Pro: adds to `userStats.creditBalance` and increments `currentUsageLimit`
* - Team/Enterprise: adds to `organization.creditBalance` and increments `orgUsageLimit`
*
* Uses direct increment (not recalculation) so it works correctly for free-tier
* users where `setUsageLimitForCredits` would compute planBase=0 and skip the update.
*
* @param tx - Optional Drizzle transaction context. When provided, all DB writes
* participate in the caller's transaction for atomicity.
*/
export async function applyBonusCredits(
userId: string,
amount: number,
tx?: DbOrTx
): Promise<void> {
const dbCtx = tx ?? db
const subscription = await getHighestPrioritySubscription(userId)
const isTeamOrEnterprise = subscription?.plan === 'team' || subscription?.plan === 'enterprise'
if (isTeamOrEnterprise && subscription?.referenceId) {
const orgId = subscription.referenceId
await dbCtx
.update(organization)
.set({
creditBalance: sql`${organization.creditBalance} + ${amount}`,
orgUsageLimit: sql`COALESCE(${organization.orgUsageLimit}, '0')::decimal + ${amount}`,
})
.where(eq(organization.id, orgId))
logger.info('Applied bonus credits to organization', {
userId,
organizationId: orgId,
plan: subscription.plan,
amount,
})
} else {
await dbCtx
.update(userStats)
.set({
creditBalance: sql`${userStats.creditBalance} + ${amount}`,
currentUsageLimit: sql`COALESCE(${userStats.currentUsageLimit}, '0')::decimal + ${amount}`,
})
.where(eq(userStats.userId, userId))
logger.info('Applied bonus credits to user', {
userId,
plan: subscription?.plan || 'free',
amount,
})
}
}

View File

@@ -20,6 +20,8 @@ export interface BuildPayloadParams {
fileAttachments?: Array<{ id: string; key: string; size: number; [key: string]: unknown }>
commands?: string[]
chatId?: string
conversationId?: string
prefetch?: boolean
implicitFeedback?: string
}
@@ -64,6 +66,10 @@ export async function buildCopilotRequestPayload(
fileAttachments,
commands,
chatId,
conversationId,
prefetch,
conversationHistory,
implicitFeedback,
} = params
const selectedModel = options.selectedModel
@@ -154,6 +160,12 @@ export async function buildCopilotRequestPayload(
version: SIM_AGENT_VERSION,
...(contexts && contexts.length > 0 ? { context: contexts } : {}),
...(chatId ? { chatId } : {}),
...(conversationId ? { conversationId } : {}),
...(Array.isArray(conversationHistory) && conversationHistory.length > 0
? { conversationHistory }
: {}),
...(typeof prefetch === 'boolean' ? { prefetch } : {}),
...(implicitFeedback ? { implicitFeedback } : {}),
...(processedFileContents.length > 0 ? { fileAttachments: processedFileContents } : {}),
...(integrationTools.length > 0 ? { integrationTools } : {}),
...(credentials ? { credentials } : {}),

View File

@@ -1,7 +1,7 @@
import { db } from '@sim/db'
import { workflow } from '@sim/db/schema'
import { customTools, workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { and, desc, eq, isNull, or } from 'drizzle-orm'
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
import type {
ExecutionContext,
@@ -12,6 +12,7 @@ import { routeExecution } from '@/lib/copilot/tools/server/router'
import { env } from '@/lib/core/config/env'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
import { getTool, resolveToolId } from '@/tools/utils'
import {
executeCheckDeploymentStatus,
@@ -76,6 +77,247 @@ import {
const logger = createLogger('CopilotToolExecutor')
type ManageCustomToolOperation = 'add' | 'edit' | 'delete' | 'list'
interface ManageCustomToolSchema {
type: 'function'
function: {
name: string
description?: string
parameters: Record<string, unknown>
}
}
interface ManageCustomToolParams {
operation?: string
toolId?: string
schema?: ManageCustomToolSchema
code?: string
title?: string
workspaceId?: string
}
async function executeManageCustomTool(
rawParams: Record<string, unknown>,
context: ExecutionContext
): Promise<ToolCallResult> {
const params = rawParams as ManageCustomToolParams
const operation = String(params.operation || '').toLowerCase() as ManageCustomToolOperation
const workspaceId = params.workspaceId || context.workspaceId
if (!operation) {
return { success: false, error: "Missing required 'operation' argument" }
}
try {
if (operation === 'list') {
const toolsForUser = workspaceId
? await db
.select()
.from(customTools)
.where(
or(
eq(customTools.workspaceId, workspaceId),
and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId))
)
)
.orderBy(desc(customTools.createdAt))
: await db
.select()
.from(customTools)
.where(and(isNull(customTools.workspaceId), eq(customTools.userId, context.userId)))
.orderBy(desc(customTools.createdAt))
return {
success: true,
output: {
success: true,
operation,
tools: toolsForUser,
count: toolsForUser.length,
},
}
}
if (operation === 'add') {
if (!workspaceId) {
return {
success: false,
error: "workspaceId is required for operation 'add'",
}
}
if (!params.schema || !params.code) {
return {
success: false,
error: "Both 'schema' and 'code' are required for operation 'add'",
}
}
const title = params.title || params.schema.function?.name
if (!title) {
return { success: false, error: "Missing tool title or schema.function.name for 'add'" }
}
const resultTools = await upsertCustomTools({
tools: [
{
title,
schema: params.schema,
code: params.code,
},
],
workspaceId,
userId: context.userId,
})
const created = resultTools.find((tool) => tool.title === title)
return {
success: true,
output: {
success: true,
operation,
toolId: created?.id,
title,
message: `Created custom tool "${title}"`,
},
}
}
if (operation === 'edit') {
if (!workspaceId) {
return {
success: false,
error: "workspaceId is required for operation 'edit'",
}
}
if (!params.toolId) {
return { success: false, error: "'toolId' is required for operation 'edit'" }
}
if (!params.schema && !params.code) {
return {
success: false,
error: "At least one of 'schema' or 'code' is required for operation 'edit'",
}
}
const workspaceTool = await db
.select()
.from(customTools)
.where(and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId)))
.limit(1)
const legacyTool =
workspaceTool.length === 0
? await db
.select()
.from(customTools)
.where(
and(
eq(customTools.id, params.toolId),
isNull(customTools.workspaceId),
eq(customTools.userId, context.userId)
)
)
.limit(1)
: []
const existing = workspaceTool[0] || legacyTool[0]
if (!existing) {
return { success: false, error: `Custom tool not found: ${params.toolId}` }
}
const mergedSchema = params.schema || (existing.schema as ManageCustomToolSchema)
const mergedCode = params.code || existing.code
const title = params.title || mergedSchema.function?.name || existing.title
await upsertCustomTools({
tools: [
{
id: params.toolId,
title,
schema: mergedSchema,
code: mergedCode,
},
],
workspaceId,
userId: context.userId,
})
return {
success: true,
output: {
success: true,
operation,
toolId: params.toolId,
title,
message: `Updated custom tool "${title}"`,
},
}
}
if (operation === 'delete') {
if (!params.toolId) {
return { success: false, error: "'toolId' is required for operation 'delete'" }
}
const workspaceDelete =
workspaceId != null
? await db
.delete(customTools)
.where(
and(eq(customTools.id, params.toolId), eq(customTools.workspaceId, workspaceId))
)
.returning({ id: customTools.id })
: []
const legacyDelete =
workspaceDelete.length === 0
? await db
.delete(customTools)
.where(
and(
eq(customTools.id, params.toolId),
isNull(customTools.workspaceId),
eq(customTools.userId, context.userId)
)
)
.returning({ id: customTools.id })
: []
const deleted = workspaceDelete[0] || legacyDelete[0]
if (!deleted) {
return { success: false, error: `Custom tool not found: ${params.toolId}` }
}
return {
success: true,
output: {
success: true,
operation,
toolId: params.toolId,
message: 'Deleted custom tool',
},
}
}
return {
success: false,
error: `Unsupported operation for manage_custom_tool: ${operation}`,
}
} catch (error) {
logger.error('manage_custom_tool execution failed', {
operation,
workspaceId,
userId: context.userId,
error: error instanceof Error ? error.message : String(error),
})
return {
success: false,
error: error instanceof Error ? error.message : 'Failed to manage custom tool',
}
}
}
const SERVER_TOOLS = new Set<string>([
'get_blocks_and_tools',
'get_blocks_metadata',
@@ -161,6 +403,19 @@ const SIM_WORKFLOW_TOOL_HANDLERS: Record<
}
}
},
oauth_request_access: async (p, _c) => {
const providerName = (p.providerName || p.provider_name || 'the provider') as string
return {
success: true,
output: {
success: true,
status: 'requested',
providerName,
message: `Requested ${providerName} OAuth connection. The user should complete the OAuth modal in the UI, then retry credential-dependent actions.`,
},
}
},
manage_custom_tool: (p, c) => executeManageCustomTool(p, c),
}
/**

View File

@@ -0,0 +1,246 @@
import { createLogger } from '@sim/logger'
import { getRedisClient } from '@/lib/core/config/redis'
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
const logger = createLogger('ExecutionEventBuffer')
const REDIS_PREFIX = 'execution:stream:'
const TTL_SECONDS = 60 * 60 // 1 hour
const EVENT_LIMIT = 1000
const RESERVE_BATCH = 100
const FLUSH_INTERVAL_MS = 15
const FLUSH_MAX_BATCH = 200
function getEventsKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:events`
}
function getSeqKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:seq`
}
function getMetaKey(executionId: string) {
return `${REDIS_PREFIX}${executionId}:meta`
}
export type ExecutionStreamStatus = 'active' | 'complete' | 'error' | 'cancelled'
export interface ExecutionStreamMeta {
status: ExecutionStreamStatus
userId?: string
workflowId?: string
updatedAt?: string
}
export interface ExecutionEventEntry {
eventId: number
executionId: string
event: ExecutionEvent
}
export interface ExecutionEventWriter {
write: (event: ExecutionEvent) => Promise<ExecutionEventEntry>
flush: () => Promise<void>
close: () => Promise<void>
}
export async function setExecutionMeta(
executionId: string,
meta: Partial<ExecutionStreamMeta>
): Promise<void> {
const redis = getRedisClient()
if (!redis) {
logger.warn('setExecutionMeta: Redis client unavailable', { executionId })
return
}
try {
const key = getMetaKey(executionId)
const payload: Record<string, string> = {
updatedAt: new Date().toISOString(),
}
if (meta.status) payload.status = meta.status
if (meta.userId) payload.userId = meta.userId
if (meta.workflowId) payload.workflowId = meta.workflowId
await redis.hset(key, payload)
await redis.expire(key, TTL_SECONDS)
} catch (error) {
logger.warn('Failed to update execution meta', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
}
}
export async function getExecutionMeta(executionId: string): Promise<ExecutionStreamMeta | null> {
const redis = getRedisClient()
if (!redis) {
logger.warn('getExecutionMeta: Redis client unavailable', { executionId })
return null
}
try {
const key = getMetaKey(executionId)
const meta = await redis.hgetall(key)
if (!meta || Object.keys(meta).length === 0) return null
return meta as unknown as ExecutionStreamMeta
} catch (error) {
logger.warn('Failed to read execution meta', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
return null
}
}
export async function readExecutionEvents(
executionId: string,
afterEventId: number
): Promise<ExecutionEventEntry[]> {
const redis = getRedisClient()
if (!redis) return []
try {
const raw = await redis.zrangebyscore(getEventsKey(executionId), afterEventId + 1, '+inf')
return raw
.map((entry) => {
try {
return JSON.parse(entry) as ExecutionEventEntry
} catch {
return null
}
})
.filter((entry): entry is ExecutionEventEntry => Boolean(entry))
} catch (error) {
logger.warn('Failed to read execution events', {
executionId,
error: error instanceof Error ? error.message : String(error),
})
return []
}
}
export function createExecutionEventWriter(executionId: string): ExecutionEventWriter {
const redis = getRedisClient()
if (!redis) {
logger.warn(
'createExecutionEventWriter: Redis client unavailable, events will not be buffered',
{
executionId,
}
)
return {
write: async (event) => ({ eventId: 0, executionId, event }),
flush: async () => {},
close: async () => {},
}
}
let pending: ExecutionEventEntry[] = []
let nextEventId = 0
let maxReservedId = 0
let flushTimer: ReturnType<typeof setTimeout> | null = null
const scheduleFlush = () => {
if (flushTimer) return
flushTimer = setTimeout(() => {
flushTimer = null
void flush()
}, FLUSH_INTERVAL_MS)
}
const reserveIds = async (minCount: number) => {
const reserveCount = Math.max(RESERVE_BATCH, minCount)
const newMax = await redis.incrby(getSeqKey(executionId), reserveCount)
const startId = newMax - reserveCount + 1
if (nextEventId === 0 || nextEventId > maxReservedId) {
nextEventId = startId
maxReservedId = newMax
}
}
let flushPromise: Promise<void> | null = null
let closed = false
const inflightWrites = new Set<Promise<ExecutionEventEntry>>()
const doFlush = async () => {
if (pending.length === 0) return
const batch = pending
pending = []
try {
const key = getEventsKey(executionId)
const zaddArgs: (string | number)[] = []
for (const entry of batch) {
zaddArgs.push(entry.eventId, JSON.stringify(entry))
}
const pipeline = redis.pipeline()
pipeline.zadd(key, ...zaddArgs)
pipeline.expire(key, TTL_SECONDS)
pipeline.expire(getSeqKey(executionId), TTL_SECONDS)
pipeline.zremrangebyrank(key, 0, -EVENT_LIMIT - 1)
await pipeline.exec()
} catch (error) {
logger.warn('Failed to flush execution events', {
executionId,
batchSize: batch.length,
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
})
pending = batch.concat(pending)
}
}
const flush = async () => {
if (flushPromise) {
await flushPromise
return
}
flushPromise = doFlush()
try {
await flushPromise
} finally {
flushPromise = null
if (pending.length > 0) scheduleFlush()
}
}
const writeCore = async (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
if (closed) return { eventId: 0, executionId, event }
if (nextEventId === 0 || nextEventId > maxReservedId) {
await reserveIds(1)
}
const eventId = nextEventId++
const entry: ExecutionEventEntry = { eventId, executionId, event }
pending.push(entry)
if (pending.length >= FLUSH_MAX_BATCH) {
await flush()
} else {
scheduleFlush()
}
return entry
}
const write = (event: ExecutionEvent): Promise<ExecutionEventEntry> => {
const p = writeCore(event)
inflightWrites.add(p)
const remove = () => inflightWrites.delete(p)
p.then(remove, remove)
return p
}
const close = async () => {
closed = true
if (flushTimer) {
clearTimeout(flushTimer)
flushTimer = null
}
if (inflightWrites.size > 0) {
await Promise.allSettled(inflightWrites)
}
if (flushPromise) {
await flushPromise
}
if (pending.length > 0) {
await doFlush()
}
}
return { write, flush, close }
}

View File

@@ -2364,6 +2364,261 @@ describe('hasWorkflowChanged', () => {
})
})
describe('Trigger Config Normalization (False Positive Prevention)', () => {
it.concurrent(
'should not detect change when deployed has null fields but current has values from triggerConfig',
() => {
// Core scenario: deployed state has null individual fields, current state has
// values populated from triggerConfig at runtime by populateTriggerFieldsFromConfig
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent(
'should detect change when user edits a trigger field to a different value',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'new-secret' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
}
)
it.concurrent('should not detect change when both sides have no triggerConfig', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent(
'should not detect change when deployed has empty fields and triggerConfig populates them',
() => {
// Empty string is also treated as "empty" by normalizeTriggerConfigValues
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent('should not detect change when triggerId differs', () => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
triggerId: { value: null },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
triggerId: { value: 'slack_webhook' },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
})
it.concurrent(
'should not detect change for namespaced system subBlock IDs like samplePayload_slack_webhook',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
samplePayload_slack_webhook: { value: 'old payload' },
triggerInstructions_slack_webhook: { value: 'old instructions' },
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
model: { value: 'gpt-4' },
samplePayload_slack_webhook: { value: 'new payload' },
triggerInstructions_slack_webhook: { value: 'new instructions' },
},
}),
},
})
expect(hasWorkflowChanged(currentState, deployedState)).toBe(false)
}
)
it.concurrent(
'should handle mixed scenario: some fields from triggerConfig, some user-edited',
() => {
const deployedState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
includeFiles: { id: 'includeFiles', type: 'switch', value: false },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
const currentState = createWorkflowState({
blocks: {
block1: createBlock('block1', {
type: 'starter',
subBlocks: {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
includeFiles: { id: 'includeFiles', type: 'switch', value: true },
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
},
}),
},
})
// includeFiles changed from false to true — this IS a real change
expect(hasWorkflowChanged(currentState, deployedState)).toBe(true)
}
)
})
describe('Trigger Runtime Metadata (Should Not Trigger Change)', () => {
it.concurrent('should not detect change when webhookId differs', () => {
const deployedState = createWorkflowState({

View File

@@ -9,6 +9,7 @@ import {
normalizeLoop,
normalizeParallel,
normalizeSubBlockValue,
normalizeTriggerConfigValues,
normalizeValue,
normalizeVariables,
sanitizeVariable,
@@ -172,14 +173,18 @@ export function generateWorkflowDiffSummary(
}
}
// Normalize trigger config values for both states before comparison
const normalizedCurrentSubs = normalizeTriggerConfigValues(currentSubBlocks)
const normalizedPreviousSubs = normalizeTriggerConfigValues(previousSubBlocks)
// Compare subBlocks using shared helper for filtering (single source of truth)
const allSubBlockIds = filterSubBlockIds([
...new Set([...Object.keys(currentSubBlocks), ...Object.keys(previousSubBlocks)]),
...new Set([...Object.keys(normalizedCurrentSubs), ...Object.keys(normalizedPreviousSubs)]),
])
for (const subId of allSubBlockIds) {
const currentSub = currentSubBlocks[subId] as Record<string, unknown> | undefined
const previousSub = previousSubBlocks[subId] as Record<string, unknown> | undefined
const currentSub = normalizedCurrentSubs[subId] as Record<string, unknown> | undefined
const previousSub = normalizedPreviousSubs[subId] as Record<string, unknown> | undefined
if (!currentSub || !previousSub) {
changes.push({

View File

@@ -4,10 +4,12 @@
import { describe, expect, it } from 'vitest'
import type { Loop, Parallel } from '@/stores/workflows/workflow/types'
import {
filterSubBlockIds,
normalizedStringify,
normalizeEdge,
normalizeLoop,
normalizeParallel,
normalizeTriggerConfigValues,
normalizeValue,
sanitizeInputFormat,
sanitizeTools,
@@ -584,4 +586,214 @@ describe('Workflow Normalization Utilities', () => {
expect(result2).toBe(result3)
})
})
describe('filterSubBlockIds', () => {
it.concurrent('should exclude exact SYSTEM_SUBBLOCK_IDS', () => {
const ids = ['signingSecret', 'samplePayload', 'triggerInstructions', 'botToken']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['botToken', 'signingSecret'])
})
it.concurrent('should exclude namespaced SYSTEM_SUBBLOCK_IDS (prefix matching)', () => {
const ids = [
'signingSecret',
'samplePayload_slack_webhook',
'triggerInstructions_slack_webhook',
'webhookUrlDisplay_slack_webhook',
'botToken',
]
const result = filterSubBlockIds(ids)
expect(result).toEqual(['botToken', 'signingSecret'])
})
it.concurrent('should exclude exact TRIGGER_RUNTIME_SUBBLOCK_IDS', () => {
const ids = ['webhookId', 'triggerPath', 'triggerConfig', 'triggerId', 'signingSecret']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['signingSecret'])
})
it.concurrent('should not exclude IDs that merely contain a system ID substring', () => {
const ids = ['mySamplePayload', 'notSamplePayload']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['mySamplePayload', 'notSamplePayload'])
})
it.concurrent('should return sorted results', () => {
const ids = ['zebra', 'alpha', 'middle']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['alpha', 'middle', 'zebra'])
})
it.concurrent('should handle empty array', () => {
expect(filterSubBlockIds([])).toEqual([])
})
it.concurrent('should handle all IDs being excluded', () => {
const ids = ['webhookId', 'triggerPath', 'samplePayload', 'triggerConfig']
const result = filterSubBlockIds(ids)
expect(result).toEqual([])
})
it.concurrent('should exclude setupScript and scheduleInfo namespaced variants', () => {
const ids = ['setupScript_google_sheets_row', 'scheduleInfo_cron_trigger', 'realField']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['realField'])
})
it.concurrent('should exclude triggerCredentials namespaced variants', () => {
const ids = ['triggerCredentials_slack_webhook', 'signingSecret']
const result = filterSubBlockIds(ids)
expect(result).toEqual(['signingSecret'])
})
})
describe('normalizeTriggerConfigValues', () => {
it.concurrent('should return subBlocks unchanged when no triggerConfig exists', () => {
const subBlocks = {
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'secret123' },
botToken: { id: 'botToken', type: 'short-input', value: 'token456' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
})
it.concurrent('should return subBlocks unchanged when triggerConfig value is null', () => {
const subBlocks = {
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: null },
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
})
it.concurrent(
'should return subBlocks unchanged when triggerConfig value is not an object',
() => {
const subBlocks = {
triggerConfig: { id: 'triggerConfig', type: 'short-input', value: 'string-value' },
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result).toEqual(subBlocks)
}
)
it.concurrent('should populate null individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123', botToken: 'token456' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
expect((result.botToken as Record<string, unknown>).value).toBe('token456')
})
it.concurrent('should populate undefined individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: undefined },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
})
it.concurrent('should populate empty string individual fields from triggerConfig', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: '' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('secret123')
})
it.concurrent('should NOT overwrite existing non-empty individual field values', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'old-secret' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: 'user-edited-secret' },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe('user-edited-secret')
})
it.concurrent('should skip triggerConfig fields that are null/undefined', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: null, botToken: undefined },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
botToken: { id: 'botToken', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
expect((result.botToken as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should skip fields from triggerConfig that have no matching subBlock', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { nonExistentField: 'value123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
const result = normalizeTriggerConfigValues(subBlocks)
expect(result.nonExistentField).toBeUndefined()
expect((result.signingSecret as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should not mutate the original subBlocks object', () => {
const original = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: { id: 'signingSecret', type: 'short-input', value: null },
}
normalizeTriggerConfigValues(original)
expect((original.signingSecret as Record<string, unknown>).value).toBe(null)
})
it.concurrent('should preserve other subBlock properties when populating value', () => {
const subBlocks = {
triggerConfig: {
id: 'triggerConfig',
type: 'short-input',
value: { signingSecret: 'secret123' },
},
signingSecret: {
id: 'signingSecret',
type: 'short-input',
value: null,
placeholder: 'Enter signing secret',
},
}
const result = normalizeTriggerConfigValues(subBlocks)
const normalized = result.signingSecret as Record<string, unknown>
expect(normalized.value).toBe('secret123')
expect(normalized.id).toBe('signingSecret')
expect(normalized.type).toBe('short-input')
expect(normalized.placeholder).toBe('Enter signing secret')
})
})
})

View File

@@ -418,10 +418,48 @@ export function extractBlockFieldsForComparison(block: BlockState): ExtractedBlo
*/
export function filterSubBlockIds(subBlockIds: string[]): string[] {
return subBlockIds
.filter((id) => !SYSTEM_SUBBLOCK_IDS.includes(id) && !TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id))
.filter((id) => {
if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(id)) return false
if (SYSTEM_SUBBLOCK_IDS.some((sysId) => id === sysId || id.startsWith(`${sysId}_`)))
return false
return true
})
.sort()
}
/**
* Normalizes trigger block subBlocks by populating null/empty individual fields
* from the triggerConfig aggregate subBlock. This compensates for the runtime
* population done by populateTriggerFieldsFromConfig, ensuring consistent
* comparison between client state (with populated values) and deployed state
* (with null values from DB).
*/
export function normalizeTriggerConfigValues(
subBlocks: Record<string, unknown>
): Record<string, unknown> {
const triggerConfigSub = subBlocks.triggerConfig as Record<string, unknown> | undefined
const triggerConfigValue = triggerConfigSub?.value
if (!triggerConfigValue || typeof triggerConfigValue !== 'object') {
return subBlocks
}
const result = { ...subBlocks }
for (const [fieldId, configValue] of Object.entries(
triggerConfigValue as Record<string, unknown>
)) {
if (configValue === null || configValue === undefined) continue
const existingSub = result[fieldId] as Record<string, unknown> | undefined
if (
existingSub &&
(existingSub.value === null || existingSub.value === undefined || existingSub.value === '')
) {
result[fieldId] = { ...existingSub, value: configValue }
}
}
return result
}
/**
* Normalizes a subBlock value with sanitization for specific subBlock types.
* Sanitizes: tools (removes isExpanded), inputFormat (removes collapsed)

View File

@@ -137,6 +137,37 @@ function handleSecurityFiltering(request: NextRequest): NextResponse | null {
return null
}
const UTM_KEYS = ['utm_source', 'utm_medium', 'utm_campaign', 'utm_content'] as const
const UTM_COOKIE_NAME = 'sim_utm'
const UTM_COOKIE_MAX_AGE = 3600
/**
* Sets a `sim_utm` cookie when UTM params are present on auth pages.
* Captures UTM values, the HTTP Referer, landing page, and a timestamp
* used by the attribution API to verify the user signed up after visiting the link.
*/
function setUtmCookie(request: NextRequest, response: NextResponse): void {
const { searchParams, pathname } = request.nextUrl
const hasUtm = UTM_KEYS.some((key) => searchParams.get(key))
if (!hasUtm) return
const utmData: Record<string, string> = {}
for (const key of UTM_KEYS) {
const value = searchParams.get(key)
if (value) utmData[key] = value
}
utmData.referrer_url = request.headers.get('referer') || ''
utmData.landing_page = pathname
utmData.created_at = Date.now().toString()
response.cookies.set(UTM_COOKIE_NAME, JSON.stringify(utmData), {
path: '/',
maxAge: UTM_COOKIE_MAX_AGE,
sameSite: 'lax',
httpOnly: false, // Client-side hook needs to detect cookie presence
})
}
export async function proxy(request: NextRequest) {
const url = request.nextUrl
@@ -152,6 +183,7 @@ export async function proxy(request: NextRequest) {
}
const response = NextResponse.next()
response.headers.set('Content-Security-Policy', generateRuntimeCSP())
setUtmCookie(request, response)
return response
}

View File

@@ -129,6 +129,18 @@ export const useExecutionStore = create<ExecutionState & ExecutionActions>()((se
})
},
setCurrentExecutionId: (workflowId, executionId) => {
set({
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {
currentExecutionId: executionId,
}),
})
},
getCurrentExecutionId: (workflowId) => {
return getOrCreate(get().workflowExecutions, workflowId).currentExecutionId
},
clearRunPath: (workflowId) => {
set({
workflowExecutions: updatedMap(get().workflowExecutions, workflowId, {

View File

@@ -35,6 +35,8 @@ export interface WorkflowExecutionState {
lastRunPath: Map<string, BlockRunStatus>
/** Maps edge IDs to their run result from the last execution */
lastRunEdges: Map<string, EdgeRunStatus>
/** The execution ID of the currently running execution */
currentExecutionId: string | null
}
/**
@@ -54,6 +56,7 @@ export const defaultWorkflowExecutionState: WorkflowExecutionState = {
debugContext: null,
lastRunPath: new Map(),
lastRunEdges: new Map(),
currentExecutionId: null,
}
/**
@@ -96,6 +99,10 @@ export interface ExecutionActions {
setEdgeRunStatus: (workflowId: string, edgeId: string, status: EdgeRunStatus) => void
/** Clears the run path and run edges for a workflow */
clearRunPath: (workflowId: string) => void
/** Stores the current execution ID for a workflow */
setCurrentExecutionId: (workflowId: string, executionId: string | null) => void
/** Returns the current execution ID for a workflow */
getCurrentExecutionId: (workflowId: string) => string | null
/** Resets the entire store to its initial empty state */
reset: () => void
/** Stores a serializable execution snapshot for a workflow */

View File

@@ -310,6 +310,50 @@ function parseModelKey(compositeKey: string): { provider: string; modelId: strin
return { provider: compositeKey.slice(0, slashIdx), modelId: compositeKey.slice(slashIdx + 1) }
}
/**
* Convert legacy/variant Claude IDs into the canonical ID shape used by the model catalog.
*
* Examples:
* - claude-4.5-opus -> claude-opus-4-5
* - claude-opus-4.6 -> claude-opus-4-6
* - anthropic.claude-opus-4-5-20251101-v1:0 -> claude-opus-4-5 (match key only)
*/
function canonicalizeModelMatchKey(modelId: string): string {
if (!modelId) return modelId
const normalized = modelId.trim().toLowerCase()
const toCanonicalClaude = (tier: string, version: string): string => {
const normalizedVersion = version.replace(/\./g, '-')
return `claude-${tier}-${normalizedVersion}`
}
const tierFirstExact = normalized.match(/^claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)$/)
if (tierFirstExact) {
const [, tier, version] = tierFirstExact
return toCanonicalClaude(tier, version)
}
const versionFirstExact = normalized.match(/^claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)$/)
if (versionFirstExact) {
const [, version, tier] = versionFirstExact
return toCanonicalClaude(tier, version)
}
const tierFirstEmbedded = normalized.match(/claude-(opus|sonnet|haiku)-(\d+(?:[.-]\d+)?)/)
if (tierFirstEmbedded) {
const [, tier, version] = tierFirstEmbedded
return toCanonicalClaude(tier, version)
}
const versionFirstEmbedded = normalized.match(/claude-(\d+(?:[.-]\d+)?)-(opus|sonnet|haiku)/)
if (versionFirstEmbedded) {
const [, version, tier] = versionFirstEmbedded
return toCanonicalClaude(tier, version)
}
return normalized
}
const MODEL_PROVIDER_PRIORITY = [
'anthropic',
'bedrock',
@@ -350,12 +394,23 @@ function normalizeSelectedModelKey(selectedModel: string, models: AvailableModel
const { provider, modelId } = parseModelKey(selectedModel)
const targetModelId = modelId || selectedModel
const targetMatchKey = canonicalizeModelMatchKey(targetModelId)
const matches = models.filter((m) => m.id.endsWith(`/${targetModelId}`))
const matches = models.filter((m) => {
const candidateModelId = parseModelKey(m.id).modelId || m.id
const candidateMatchKey = canonicalizeModelMatchKey(candidateModelId)
return (
candidateModelId === targetModelId ||
m.id.endsWith(`/${targetModelId}`) ||
candidateMatchKey === targetMatchKey
)
})
if (matches.length === 0) return selectedModel
if (provider) {
const sameProvider = matches.find((m) => m.provider === provider)
const sameProvider = matches.find(
(m) => m.provider === provider || m.id.startsWith(`${provider}/`)
)
if (sameProvider) return sameProvider.id
}
@@ -1093,11 +1148,12 @@ export const useCopilotStore = create<CopilotStore>()(
const chatConfig = chat.config ?? {}
const chatMode = chatConfig.mode || get().mode
const chatModel = chatConfig.model || get().selectedModel
const normalizedChatModel = normalizeSelectedModelKey(chatModel, get().availableModels)
logger.debug('[Chat] Restoring chat config', {
chatId: chat.id,
mode: chatMode,
model: chatModel,
model: normalizedChatModel,
hasPlanArtifact: !!planArtifact,
})
@@ -1119,7 +1175,7 @@ export const useCopilotStore = create<CopilotStore>()(
showPlanTodos: false,
streamingPlanContent: planArtifact,
mode: chatMode,
selectedModel: chatModel as CopilotStore['selectedModel'],
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
suppressAutoSelect: false,
})
@@ -1292,6 +1348,10 @@ export const useCopilotStore = create<CopilotStore>()(
const refreshedConfig = updatedCurrentChat.config ?? {}
const refreshedMode = refreshedConfig.mode || get().mode
const refreshedModel = refreshedConfig.model || get().selectedModel
const normalizedRefreshedModel = normalizeSelectedModelKey(
refreshedModel,
get().availableModels
)
const toolCallsById = buildToolCallsById(normalizedMessages)
set({
@@ -1300,7 +1360,7 @@ export const useCopilotStore = create<CopilotStore>()(
toolCallsById,
streamingPlanContent: refreshedPlanArtifact,
mode: refreshedMode,
selectedModel: refreshedModel as CopilotStore['selectedModel'],
selectedModel: normalizedRefreshedModel as CopilotStore['selectedModel'],
})
}
try {
@@ -1320,11 +1380,15 @@ export const useCopilotStore = create<CopilotStore>()(
const chatConfig = mostRecentChat.config ?? {}
const chatMode = chatConfig.mode || get().mode
const chatModel = chatConfig.model || get().selectedModel
const normalizedChatModel = normalizeSelectedModelKey(
chatModel,
get().availableModels
)
logger.info('[Chat] Auto-selecting most recent chat with config', {
chatId: mostRecentChat.id,
mode: chatMode,
model: chatModel,
model: normalizedChatModel,
hasPlanArtifact: !!planArtifact,
})
@@ -1336,7 +1400,7 @@ export const useCopilotStore = create<CopilotStore>()(
toolCallsById,
streamingPlanContent: planArtifact,
mode: chatMode,
selectedModel: chatModel as CopilotStore['selectedModel'],
selectedModel: normalizedChatModel as CopilotStore['selectedModel'],
})
try {
await get().loadMessageCheckpoints(mostRecentChat.id)
@@ -2268,7 +2332,8 @@ export const useCopilotStore = create<CopilotStore>()(
},
setSelectedModel: async (model) => {
set({ selectedModel: model })
const normalizedModel = normalizeSelectedModelKey(model, get().availableModels)
set({ selectedModel: normalizedModel as CopilotStore['selectedModel'] })
},
setAgentPrefetch: (prefetch) => set({ agentPrefetch: prefetch }),
loadAvailableModels: async () => {

View File

@@ -224,7 +224,7 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
const newEntry = get().entries[0]
if (newEntry?.error) {
if (newEntry?.error && newEntry.blockType !== 'cancelled') {
notifyBlockError({
error: newEntry.error,
blockName: newEntry.blockName || 'Unknown Block',
@@ -243,6 +243,11 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
useExecutionStore.getState().clearRunPath(workflowId)
},
clearExecutionEntries: (executionId: string) =>
set((state) => ({
entries: state.entries.filter((e) => e.executionId !== executionId),
})),
exportConsoleCSV: (workflowId: string) => {
const entries = get().entries.filter((entry) => entry.workflowId === workflowId)
@@ -470,12 +475,24 @@ export const useTerminalConsoleStore = create<ConsoleStore>()(
},
merge: (persistedState, currentState) => {
const persisted = persistedState as Partial<ConsoleStore> | undefined
const entries = (persisted?.entries ?? currentState.entries).map((entry, index) => {
const rawEntries = persisted?.entries ?? currentState.entries
const oneHourAgo = Date.now() - 60 * 60 * 1000
const entries = rawEntries.map((entry, index) => {
let updated = entry
if (entry.executionOrder === undefined) {
return { ...entry, executionOrder: index + 1 }
updated = { ...updated, executionOrder: index + 1 }
}
return entry
if (
entry.isRunning &&
entry.startedAt &&
new Date(entry.startedAt).getTime() < oneHourAgo
) {
updated = { ...updated, isRunning: false }
}
return updated
})
return {
...currentState,
entries,

View File

@@ -51,6 +51,7 @@ export interface ConsoleStore {
isOpen: boolean
addConsole: (entry: Omit<ConsoleEntry, 'id' | 'timestamp'>) => ConsoleEntry
clearWorkflowConsole: (workflowId: string) => void
clearExecutionEntries: (executionId: string) => void
exportConsoleCSV: (workflowId: string) => void
getWorkflowEntries: (workflowId: string) => ConsoleEntry[]
toggleConsole: () => void

View File

@@ -0,0 +1,114 @@
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceDeleteLabelParams {
accessToken: string
domain: string
pageId: string
labelName: string
cloudId?: string
}
export interface ConfluenceDeleteLabelResponse {
success: boolean
output: {
ts: string
pageId: string
labelName: string
deleted: boolean
}
}
export const confluenceDeleteLabelTool: ToolConfig<
ConfluenceDeleteLabelParams,
ConfluenceDeleteLabelResponse
> = {
id: 'confluence_delete_label',
name: 'Confluence Delete Label',
description: 'Remove a label from a Confluence page.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
pageId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Confluence page ID to remove the label from',
},
labelName: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Name of the label to remove',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: () => '/api/tools/confluence/labels',
method: 'DELETE',
headers: (params: ConfluenceDeleteLabelParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
body: (params: ConfluenceDeleteLabelParams) => ({
domain: params.domain,
accessToken: params.accessToken,
pageId: params.pageId?.trim(),
labelName: params.labelName?.trim(),
cloudId: params.cloudId,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
pageId: data.pageId ?? '',
labelName: data.labelName ?? '',
deleted: true,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
pageId: {
type: 'string',
description: 'Page ID the label was removed from',
},
labelName: {
type: 'string',
description: 'Name of the removed label',
},
deleted: {
type: 'boolean',
description: 'Deletion status',
},
},
}

View File

@@ -0,0 +1,105 @@
import { TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceDeletePagePropertyParams {
accessToken: string
domain: string
pageId: string
propertyId: string
cloudId?: string
}
export interface ConfluenceDeletePagePropertyResponse {
success: boolean
output: {
ts: string
pageId: string
propertyId: string
deleted: boolean
}
}
export const confluenceDeletePagePropertyTool: ToolConfig<
ConfluenceDeletePagePropertyParams,
ConfluenceDeletePagePropertyResponse
> = {
id: 'confluence_delete_page_property',
name: 'Confluence Delete Page Property',
description: 'Delete a content property from a Confluence page by its property ID.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
pageId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the page containing the property',
},
propertyId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the property to delete',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: () => '/api/tools/confluence/page-properties',
method: 'DELETE',
headers: (params: ConfluenceDeletePagePropertyParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
body: (params: ConfluenceDeletePagePropertyParams) => ({
domain: params.domain,
accessToken: params.accessToken,
pageId: params.pageId?.trim(),
propertyId: params.propertyId?.trim(),
cloudId: params.cloudId,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
pageId: data.pageId ?? '',
propertyId: data.propertyId ?? '',
deleted: true,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
pageId: { type: 'string', description: 'ID of the page' },
propertyId: { type: 'string', description: 'ID of the deleted property' },
deleted: { type: 'boolean', description: 'Deletion status' },
},
}

View File

@@ -0,0 +1,143 @@
import { PAGE_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceGetPagesByLabelParams {
accessToken: string
domain: string
labelId: string
limit?: number
cursor?: string
cloudId?: string
}
export interface ConfluenceGetPagesByLabelResponse {
success: boolean
output: {
ts: string
labelId: string
pages: Array<{
id: string
title: string
status: string | null
spaceId: string | null
parentId: string | null
authorId: string | null
createdAt: string | null
version: {
number: number
message?: string
createdAt?: string
} | null
}>
nextCursor: string | null
}
}
export const confluenceGetPagesByLabelTool: ToolConfig<
ConfluenceGetPagesByLabelParams,
ConfluenceGetPagesByLabelResponse
> = {
id: 'confluence_get_pages_by_label',
name: 'Confluence Get Pages by Label',
description: 'Retrieve all pages that have a specific label applied.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
labelId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the label to get pages for',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of pages to return (default: 50, max: 250)',
},
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination cursor from previous response',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: (params: ConfluenceGetPagesByLabelParams) => {
const query = new URLSearchParams({
domain: params.domain,
accessToken: params.accessToken,
labelId: params.labelId,
limit: String(params.limit || 50),
})
if (params.cursor) {
query.set('cursor', params.cursor)
}
if (params.cloudId) {
query.set('cloudId', params.cloudId)
}
return `/api/tools/confluence/pages-by-label?${query.toString()}`
},
method: 'GET',
headers: (params: ConfluenceGetPagesByLabelParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
labelId: data.labelId ?? '',
pages: data.pages ?? [],
nextCursor: data.nextCursor ?? null,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
labelId: { type: 'string', description: 'ID of the label' },
pages: {
type: 'array',
description: 'Array of pages with this label',
items: {
type: 'object',
properties: PAGE_ITEM_PROPERTIES,
},
},
nextCursor: {
type: 'string',
description: 'Cursor for fetching the next page of results',
optional: true,
},
},
}

View File

@@ -5,11 +5,14 @@ import { confluenceCreatePageTool } from '@/tools/confluence/create_page'
import { confluenceCreatePagePropertyTool } from '@/tools/confluence/create_page_property'
import { confluenceDeleteAttachmentTool } from '@/tools/confluence/delete_attachment'
import { confluenceDeleteCommentTool } from '@/tools/confluence/delete_comment'
import { confluenceDeleteLabelTool } from '@/tools/confluence/delete_label'
import { confluenceDeletePageTool } from '@/tools/confluence/delete_page'
import { confluenceDeletePagePropertyTool } from '@/tools/confluence/delete_page_property'
import { confluenceGetBlogPostTool } from '@/tools/confluence/get_blogpost'
import { confluenceGetPageAncestorsTool } from '@/tools/confluence/get_page_ancestors'
import { confluenceGetPageChildrenTool } from '@/tools/confluence/get_page_children'
import { confluenceGetPageVersionTool } from '@/tools/confluence/get_page_version'
import { confluenceGetPagesByLabelTool } from '@/tools/confluence/get_pages_by_label'
import { confluenceGetSpaceTool } from '@/tools/confluence/get_space'
import { confluenceListAttachmentsTool } from '@/tools/confluence/list_attachments'
import { confluenceListBlogPostsTool } from '@/tools/confluence/list_blogposts'
@@ -19,6 +22,7 @@ import { confluenceListLabelsTool } from '@/tools/confluence/list_labels'
import { confluenceListPagePropertiesTool } from '@/tools/confluence/list_page_properties'
import { confluenceListPageVersionsTool } from '@/tools/confluence/list_page_versions'
import { confluenceListPagesInSpaceTool } from '@/tools/confluence/list_pages_in_space'
import { confluenceListSpaceLabelsTool } from '@/tools/confluence/list_space_labels'
import { confluenceListSpacesTool } from '@/tools/confluence/list_spaces'
import { confluenceRetrieveTool } from '@/tools/confluence/retrieve'
import { confluenceSearchTool } from '@/tools/confluence/search'
@@ -78,6 +82,7 @@ export {
// Page Properties Tools
confluenceListPagePropertiesTool,
confluenceCreatePagePropertyTool,
confluenceDeletePagePropertyTool,
// Blog Post Tools
confluenceListBlogPostsTool,
confluenceGetBlogPostTool,
@@ -98,6 +103,9 @@ export {
// Label Tools
confluenceListLabelsTool,
confluenceAddLabelTool,
confluenceDeleteLabelTool,
confluenceGetPagesByLabelTool,
confluenceListSpaceLabelsTool,
// Space Tools
confluenceGetSpaceTool,
confluenceListSpacesTool,

View File

@@ -0,0 +1,134 @@
import { LABEL_ITEM_PROPERTIES, TIMESTAMP_OUTPUT } from '@/tools/confluence/types'
import type { ToolConfig } from '@/tools/types'
export interface ConfluenceListSpaceLabelsParams {
accessToken: string
domain: string
spaceId: string
limit?: number
cursor?: string
cloudId?: string
}
export interface ConfluenceListSpaceLabelsResponse {
success: boolean
output: {
ts: string
spaceId: string
labels: Array<{
id: string
name: string
prefix: string
}>
nextCursor: string | null
}
}
export const confluenceListSpaceLabelsTool: ToolConfig<
ConfluenceListSpaceLabelsParams,
ConfluenceListSpaceLabelsResponse
> = {
id: 'confluence_list_space_labels',
name: 'Confluence List Space Labels',
description: 'List all labels associated with a Confluence space.',
version: '1.0.0',
oauth: {
required: true,
provider: 'confluence',
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'OAuth access token for Confluence',
},
domain: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Your Confluence domain (e.g., yourcompany.atlassian.net)',
},
spaceId: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'The ID of the Confluence space to list labels from',
},
limit: {
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Maximum number of labels to return (default: 25, max: 250)',
},
cursor: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Pagination cursor from previous response',
},
cloudId: {
type: 'string',
required: false,
visibility: 'user-only',
description:
'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.',
},
},
request: {
url: (params: ConfluenceListSpaceLabelsParams) => {
const query = new URLSearchParams({
domain: params.domain,
accessToken: params.accessToken,
spaceId: params.spaceId,
limit: String(params.limit || 25),
})
if (params.cursor) {
query.set('cursor', params.cursor)
}
if (params.cloudId) {
query.set('cloudId', params.cloudId)
}
return `/api/tools/confluence/space-labels?${query.toString()}`
},
method: 'GET',
headers: (params: ConfluenceListSpaceLabelsParams) => ({
Accept: 'application/json',
Authorization: `Bearer ${params.accessToken}`,
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
ts: new Date().toISOString(),
spaceId: data.spaceId ?? '',
labels: data.labels ?? [],
nextCursor: data.nextCursor ?? null,
},
}
},
outputs: {
ts: TIMESTAMP_OUTPUT,
spaceId: { type: 'string', description: 'ID of the space' },
labels: {
type: 'array',
description: 'Array of labels on the space',
items: {
type: 'object',
properties: LABEL_ITEM_PROPERTIES,
},
},
nextCursor: {
type: 'string',
description: 'Cursor for fetching the next page of results',
optional: true,
},
},
}

View File

@@ -118,10 +118,13 @@ import {
confluenceCreatePageTool,
confluenceDeleteAttachmentTool,
confluenceDeleteCommentTool,
confluenceDeleteLabelTool,
confluenceDeletePagePropertyTool,
confluenceDeletePageTool,
confluenceGetBlogPostTool,
confluenceGetPageAncestorsTool,
confluenceGetPageChildrenTool,
confluenceGetPagesByLabelTool,
confluenceGetPageVersionTool,
confluenceGetSpaceTool,
confluenceListAttachmentsTool,
@@ -132,6 +135,7 @@ import {
confluenceListPagePropertiesTool,
confluenceListPagesInSpaceTool,
confluenceListPageVersionsTool,
confluenceListSpaceLabelsTool,
confluenceListSpacesTool,
confluenceRetrieveTool,
confluenceSearchInSpaceTool,
@@ -2667,6 +2671,10 @@ export const tools: Record<string, ToolConfig> = {
confluence_delete_attachment: confluenceDeleteAttachmentTool,
confluence_list_labels: confluenceListLabelsTool,
confluence_add_label: confluenceAddLabelTool,
confluence_get_pages_by_label: confluenceGetPagesByLabelTool,
confluence_list_space_labels: confluenceListSpaceLabelsTool,
confluence_delete_label: confluenceDeleteLabelTool,
confluence_delete_page_property: confluenceDeletePagePropertyTool,
confluence_get_space: confluenceGetSpaceTool,
confluence_list_spaces: confluenceListSpacesTool,
cursor_list_agents: cursorListAgentsTool,

View File

@@ -23,7 +23,12 @@ export const SYSTEM_SUBBLOCK_IDS: string[] = [
* with default values from the trigger definition on load, which aren't present in
* the deployed state, causing false positive change detection.
*/
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = ['webhookId', 'triggerPath', 'triggerConfig']
export const TRIGGER_RUNTIME_SUBBLOCK_IDS: string[] = [
'webhookId',
'triggerPath',
'triggerConfig',
'triggerId',
]
/**
* Maximum number of consecutive failures before a trigger (schedule/webhook) is auto-disabled.

View File

@@ -0,0 +1,41 @@
CREATE TABLE "referral_attribution" (
"id" text PRIMARY KEY NOT NULL,
"user_id" text NOT NULL,
"organization_id" text,
"campaign_id" text,
"utm_source" text,
"utm_medium" text,
"utm_campaign" text,
"utm_content" text,
"referrer_url" text,
"landing_page" text,
"bonus_credit_amount" numeric DEFAULT '0' NOT NULL,
"created_at" timestamp DEFAULT now() NOT NULL,
CONSTRAINT "referral_attribution_user_id_unique" UNIQUE("user_id")
);
--> statement-breakpoint
CREATE TABLE "referral_campaigns" (
"id" text PRIMARY KEY NOT NULL,
"name" text NOT NULL,
"code" text,
"utm_source" text,
"utm_medium" text,
"utm_campaign" text,
"utm_content" text,
"bonus_credit_amount" numeric NOT NULL,
"is_active" boolean DEFAULT true NOT NULL,
"created_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL,
CONSTRAINT "referral_campaigns_code_unique" UNIQUE("code")
);
--> statement-breakpoint
ALTER TABLE "referral_attribution" ADD CONSTRAINT "referral_attribution_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "referral_attribution" ADD CONSTRAINT "referral_attribution_organization_id_organization_id_fk" FOREIGN KEY ("organization_id") REFERENCES "public"."organization"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "referral_attribution" ADD CONSTRAINT "referral_attribution_campaign_id_referral_campaigns_id_fk" FOREIGN KEY ("campaign_id") REFERENCES "public"."referral_campaigns"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "referral_attribution_user_id_idx" ON "referral_attribution" USING btree ("user_id");--> statement-breakpoint
CREATE UNIQUE INDEX "referral_attribution_org_unique_idx" ON "referral_attribution" USING btree ("organization_id") WHERE "referral_attribution"."organization_id" IS NOT NULL;--> statement-breakpoint
CREATE INDEX "referral_attribution_campaign_id_idx" ON "referral_attribution" USING btree ("campaign_id");--> statement-breakpoint
CREATE INDEX "referral_attribution_utm_campaign_idx" ON "referral_attribution" USING btree ("utm_campaign");--> statement-breakpoint
CREATE INDEX "referral_attribution_utm_content_idx" ON "referral_attribution" USING btree ("utm_content");--> statement-breakpoint
CREATE INDEX "referral_attribution_created_at_idx" ON "referral_attribution" USING btree ("created_at");--> statement-breakpoint
CREATE INDEX "referral_campaigns_active_idx" ON "referral_campaigns" USING btree ("is_active");

File diff suppressed because it is too large Load Diff

View File

@@ -1072,6 +1072,13 @@
"when": 1770410282842,
"tag": "0153_complete_arclight",
"breakpoints": true
},
{
"idx": 154,
"version": "7",
"when": 1770869658697,
"tag": "0154_bumpy_living_mummy",
"breakpoints": true
}
]
}
}

View File

@@ -726,6 +726,61 @@ export const userStats = pgTable('user_stats', {
billingBlockedReason: billingBlockedReasonEnum('billing_blocked_reason'),
})
export const referralCampaigns = pgTable(
'referral_campaigns',
{
id: text('id').primaryKey(),
name: text('name').notNull(),
code: text('code').unique(),
utmSource: text('utm_source'),
utmMedium: text('utm_medium'),
utmCampaign: text('utm_campaign'),
utmContent: text('utm_content'),
bonusCreditAmount: decimal('bonus_credit_amount').notNull(),
isActive: boolean('is_active').notNull().default(true),
createdAt: timestamp('created_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
},
(table) => ({
activeIdx: index('referral_campaigns_active_idx').on(table.isActive),
})
)
export const referralAttribution = pgTable(
'referral_attribution',
{
id: text('id').primaryKey(),
userId: text('user_id')
.notNull()
.references(() => user.id, { onDelete: 'cascade' })
.unique(),
organizationId: text('organization_id').references(() => organization.id, {
onDelete: 'set null',
}),
campaignId: text('campaign_id').references(() => referralCampaigns.id, {
onDelete: 'set null',
}),
utmSource: text('utm_source'),
utmMedium: text('utm_medium'),
utmCampaign: text('utm_campaign'),
utmContent: text('utm_content'),
referrerUrl: text('referrer_url'),
landingPage: text('landing_page'),
bonusCreditAmount: decimal('bonus_credit_amount').notNull().default('0'),
createdAt: timestamp('created_at').notNull().defaultNow(),
},
(table) => ({
userIdIdx: index('referral_attribution_user_id_idx').on(table.userId),
orgUniqueIdx: uniqueIndex('referral_attribution_org_unique_idx')
.on(table.organizationId)
.where(sql`${table.organizationId} IS NOT NULL`),
campaignIdIdx: index('referral_attribution_campaign_id_idx').on(table.campaignId),
utmCampaignIdx: index('referral_attribution_utm_campaign_idx').on(table.utmCampaign),
utmContentIdx: index('referral_attribution_utm_content_idx').on(table.utmContent),
createdAtIdx: index('referral_attribution_created_at_idx').on(table.createdAt),
})
)
export const customTools = pgTable(
'custom_tools',
{