Compare commits

...

22 Commits

Author SHA1 Message Date
Waleed Latif
4846f6c60d v0.3.37: azure OCR api key, wand SSE, CRON helm 2025-08-22 14:54:36 -07:00
Vikhyath Mondreti
be810013c7 feat(native-bg-tasks): support webhooks and async workflow executions without trigger.dev (#1106)
* feat(native-bg-tasks): support webhooks and async workflow executions without trigger"

* fix tests

* fix env var defaults and revert async workflow execution to always use trigger

* fix UI for hiding async

* hide entire toggle
2025-08-22 14:43:21 -07:00
Waleed Latif
1ee4263e60 feat(helm): added CRON jobs to helm charts (#1107) 2025-08-22 14:29:44 -07:00
Waleed Latif
60c4668682 fix(naming): prevent identical normalized block names (#1105) 2025-08-22 13:20:45 -07:00
Emir Karabeg
a268fb7c04 fix(chat-deploy): dark mode ui (#1101) 2025-08-22 12:23:11 -07:00
Waleed Latif
6c606750f5 improvement(signup): modify signup and login pages to not show social sign in when not configured, increase logo size (#1103) 2025-08-22 12:15:59 -07:00
Waleed Latif
e13adab14f improvement(wand): upgrade wand to use SSE (#1100)
* improvement(wand): upgrade wand to use SSE

* fix(ocr-azure): added OCR_AZURE_API_KEY envvar (#1102)

* make wand identical to chat panel
2025-08-22 12:01:16 -07:00
Waleed Latif
44bc12b474 fix(ocr-azure): added OCR_AZURE_API_KEY envvar (#1102) 2025-08-22 11:49:56 -07:00
Waleed Latif
991f0442e9 v0.3.36: workflow block logs, whitelabeling configurability, session provider 2025-08-21 21:44:28 -07:00
Waleed Latif
2ebfb576ae fix(day-picker): remove unused react-day-picker (#1094) 2025-08-21 21:29:20 -07:00
Vikhyath Mondreti
11a7be54f2 fix circular dependsOn for Jira manualIssueKey 2025-08-21 21:21:19 -07:00
Vikhyath Mondreti
f5219d03c3 fix(ms-oauth): oauth edge cases (#1093) 2025-08-21 21:19:11 -07:00
Waleed Latif
f0643e01b4 fix(logs): make child workflow span errors the same as root level workflow errors (#1092) 2025-08-21 21:17:09 -07:00
Adam Gough
77b0c5b9ed Fix(excel-range): fixed excel range (#1088)
* added auto range

* lint

* removed any

* utils file

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
2025-08-21 20:04:20 -07:00
Adam Gough
9dbd44e555 fix(webhook-payloads): fixed the variable resolution in webhooks (#1019)
* telegram webhook fix

* changed payloads

* test

* test

* test

* test

* fix github dropdown

* test

* reverted github changes

* fixed github var

* test

* bun run lint

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test

* test push

* test

* bun run lint

* edited airtable payload and webhook deletion

* Revert bun.lock and package.json to upstream/staging

* cleaned up

* test

* test

* resolving more cmments

* resolved comments, updated trigger

* cleaned up, resolved comments

* test

* test

* lint

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
2025-08-21 20:03:04 -07:00
Waleed Latif
9ea9f2d52e improvement(log-level): make log level configurable via envvar (#1091) 2025-08-21 19:40:47 -07:00
Waleed Latif
4cd707fadb improvement(emails): fixed email subjects to use provided brand name (#1090)
* improvement(emails): fixed email subjects to use provided brand name

* update manifest to use dynamic background & theme color
2025-08-21 19:34:05 -07:00
Waleed Latif
f0b07428bc feat(theme): added custom envvars for themes (#1089)
* feat(theme): added custom envvars for themes

* add regec
2025-08-21 19:27:56 -07:00
Vikhyath Mondreti
8c9e182e10 fix(infinite-get-session): pass session once per tree using session provider + multiple fixes (#1085)
* fix(infinite-get-session): pass session using session provider

* prevent auto refetch

* fix typing:

* fix types

* fix

* fix oauth token for microsoft file selector

* fix start block required error
2025-08-21 18:45:15 -07:00
Waleed Latif
33dd59f7a7 fix(db-consts): make the migrations image fully standalone by adding db consts (#1087) 2025-08-21 17:25:35 -07:00
Waleed Latif
53ee9f99db fix(templates): added option to delete/keep templates when deleting workspace, updated template modal, sidebar code cleanup (#1086)
* feat(templates): added in the ability to keep/remove templates when deleting workspace

* code cleanup in sidebar

* add the ability to edit existing templates

* updated template modal

* fix build

* revert bun.lock

* add template logic to workflow deletion as well

* add ability to delete templates

* add owner/admin enforcemnet to modify or delete templates
2025-08-21 17:11:22 -07:00
Vikhyath Mondreti
0f2a125eae improvement(block-error-logs): workflow in workflow (#1084)
* improvement(add-block-logs): workflow in workflow

* fix lint
2025-08-21 15:01:30 -07:00
86 changed files with 9545 additions and 1839 deletions

View File

@@ -3,7 +3,6 @@
import { useEffect, useState } from 'react'
import { GithubIcon, GoogleIcon } from '@/components/icons'
import { Button } from '@/components/ui/button'
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
import { client } from '@/lib/auth-client'
interface SocialLoginButtonsProps {
@@ -114,58 +113,16 @@ export function SocialLoginButtons({
</Button>
)
const renderGithubButton = () => {
if (githubAvailable) return githubButton
const hasAnyOAuthProvider = githubAvailable || googleAvailable
return (
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<div>{githubButton}</div>
</TooltipTrigger>
<TooltipContent className='border-neutral-700 bg-neutral-800 text-white'>
<p>
GitHub login requires OAuth credentials to be configured. Add the following
environment variables:
</p>
<ul className='mt-2 space-y-1 text-neutral-300 text-xs'>
<li> GITHUB_CLIENT_ID</li>
<li> GITHUB_CLIENT_SECRET</li>
</ul>
</TooltipContent>
</Tooltip>
</TooltipProvider>
)
}
const renderGoogleButton = () => {
if (googleAvailable) return googleButton
return (
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<div>{googleButton}</div>
</TooltipTrigger>
<TooltipContent className='border-neutral-700 bg-neutral-800 text-white'>
<p>
Google login requires OAuth credentials to be configured. Add the following
environment variables:
</p>
<ul className='mt-2 space-y-1 text-neutral-300 text-xs'>
<li> GOOGLE_CLIENT_ID</li>
<li> GOOGLE_CLIENT_SECRET</li>
</ul>
</TooltipContent>
</Tooltip>
</TooltipProvider>
)
if (!hasAnyOAuthProvider) {
return null
}
return (
<div className='grid gap-3'>
{renderGithubButton()}
{renderGoogleButton()}
{githubAvailable && githubButton}
{googleAvailable && googleButton}
</div>
)
}

View File

@@ -28,12 +28,12 @@ export default function AuthLayout({ children }: { children: React.ReactNode })
<img
src={brand.logoUrl}
alt={`${brand.name} Logo`}
width={42}
height={42}
className='h-[42px] w-[42px] object-contain'
width={56}
height={56}
className='h-[56px] w-[56px] object-contain'
/>
) : (
<Image src='/sim.svg' alt={`${brand.name} Logo`} width={42} height={42} />
<Image src='/sim.svg' alt={`${brand.name} Logo`} width={56} height={56} />
)}
</Link>
</div>

View File

@@ -366,11 +366,13 @@ export default function LoginPage({
callbackURL={callbackUrl}
/>
<div className='relative mt-2 py-4'>
<div className='absolute inset-0 flex items-center'>
<div className='w-full border-neutral-700/50 border-t' />
{(githubAvailable || googleAvailable) && (
<div className='relative mt-2 py-4'>
<div className='absolute inset-0 flex items-center'>
<div className='w-full border-neutral-700/50 border-t' />
</div>
</div>
</div>
)}
<form onSubmit={onSubmit} className='space-y-5'>
<div className='space-y-4'>

View File

@@ -381,11 +381,13 @@ function SignupFormContent({
isProduction={isProduction}
/>
<div className='relative mt-2 py-4'>
<div className='absolute inset-0 flex items-center'>
<div className='w-full border-neutral-700/50 border-t' />
{(githubAvailable || googleAvailable) && (
<div className='relative mt-2 py-4'>
<div className='absolute inset-0 flex items-center'>
<div className='w-full border-neutral-700/50 border-t' />
</div>
</div>
</div>
)}
<form onSubmit={onSubmit} className='space-y-5'>
<div className='space-y-4'>

View File

@@ -354,6 +354,18 @@ export function mockExecutionDependencies() {
}))
}
/**
* Mock Trigger.dev SDK (tasks.trigger and task factory) for tests that import background modules
*/
export function mockTriggerDevSdk() {
vi.mock('@trigger.dev/sdk', () => ({
tasks: {
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
},
task: vi.fn().mockReturnValue({}),
}))
}
export function mockWorkflowAccessValidation(shouldSucceed = true) {
if (shouldSucceed) {
vi.mock('@/app/api/workflows/middleware', () => ({

View File

@@ -4,8 +4,9 @@ import { auth } from '@/lib/auth'
export async function POST() {
try {
const hdrs = await headers()
const response = await auth.api.generateOneTimeToken({
headers: await headers(),
headers: hdrs,
})
if (!response) {
@@ -14,7 +15,6 @@ export async function POST() {
return NextResponse.json({ token: response.token })
} catch (error) {
console.error('Error generating one-time token:', error)
return NextResponse.json({ error: 'Failed to generate token' }, { status: 500 })
}
}

View File

@@ -1,9 +1,11 @@
import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { hasAdminPermission } from '@/lib/permissions/utils'
import { db } from '@/db'
import { templates } from '@/db/schema'
import { templates, workflow } from '@/db/schema'
const logger = createLogger('TemplateByIdAPI')
@@ -62,3 +64,153 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
const updateTemplateSchema = z.object({
name: z.string().min(1).max(100),
description: z.string().min(1).max(500),
author: z.string().min(1).max(100),
category: z.string().min(1),
icon: z.string().min(1),
color: z.string().regex(/^#[0-9A-F]{6}$/i),
state: z.any().optional(), // Workflow state
})
// PUT /api/templates/[id] - Update a template
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const { id } = await params
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized template update attempt for ID: ${id}`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const validationResult = updateTemplateSchema.safeParse(body)
if (!validationResult.success) {
logger.warn(`[${requestId}] Invalid template data for update: ${id}`, validationResult.error)
return NextResponse.json(
{ error: 'Invalid template data', details: validationResult.error.errors },
{ status: 400 }
)
}
const { name, description, author, category, icon, color, state } = validationResult.data
// Check if template exists
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existingTemplate.length === 0) {
logger.warn(`[${requestId}] Template not found for update: ${id}`)
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
// Permission: template owner OR admin of the workflow's workspace (if any)
let canUpdate = existingTemplate[0].userId === session.user.id
if (!canUpdate && existingTemplate[0].workflowId) {
const wfRows = await db
.select({ workspaceId: workflow.workspaceId })
.from(workflow)
.where(eq(workflow.id, existingTemplate[0].workflowId))
.limit(1)
const workspaceId = wfRows[0]?.workspaceId as string | null | undefined
if (workspaceId) {
const hasAdmin = await hasAdminPermission(session.user.id, workspaceId)
if (hasAdmin) canUpdate = true
}
}
if (!canUpdate) {
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Update the template
const updatedTemplate = await db
.update(templates)
.set({
name,
description,
author,
category,
icon,
color,
...(state && { state }),
updatedAt: new Date(),
})
.where(eq(templates.id, id))
.returning()
logger.info(`[${requestId}] Successfully updated template: ${id}`)
return NextResponse.json({
data: updatedTemplate[0],
message: 'Template updated successfully',
})
} catch (error: any) {
logger.error(`[${requestId}] Error updating template: ${id}`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
// DELETE /api/templates/[id] - Delete a template
export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const { id } = await params
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized template delete attempt for ID: ${id}`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Fetch template
const existing = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existing.length === 0) {
logger.warn(`[${requestId}] Template not found for delete: ${id}`)
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
const template = existing[0]
// Permission: owner or admin of the workflow's workspace (if any)
let canDelete = template.userId === session.user.id
if (!canDelete && template.workflowId) {
// Look up workflow to get workspaceId
const wfRows = await db
.select({ workspaceId: workflow.workspaceId })
.from(workflow)
.where(eq(workflow.id, template.workflowId))
.limit(1)
const workspaceId = wfRows[0]?.workspaceId as string | null | undefined
if (workspaceId) {
const hasAdmin = await hasAdminPermission(session.user.id, workspaceId)
if (hasAdmin) canDelete = true
}
}
if (!canDelete) {
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
await db.delete(templates).where(eq(templates.id, id))
logger.info(`[${requestId}] Deleted template: ${id}`)
return NextResponse.json({ success: true })
} catch (error: any) {
logger.error(`[${requestId}] Error deleting template: ${id}`, error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -77,6 +77,7 @@ const QueryParamsSchema = z.object({
limit: z.coerce.number().optional().default(50),
offset: z.coerce.number().optional().default(0),
search: z.string().optional(),
workflowId: z.string().optional(),
})
// GET /api/templates - Retrieve templates
@@ -111,6 +112,11 @@ export async function GET(request: NextRequest) {
)
}
// Apply workflow filter if provided (for getting template by workflow)
if (params.workflowId) {
conditions.push(eq(templates.workflowId, params.workflowId))
}
// Combine conditions
const whereCondition = conditions.length > 0 ? and(...conditions) : undefined

View File

@@ -95,12 +95,19 @@ export async function POST(req: NextRequest) {
{
stream,
historyLength: history.length,
endpoint: useWandAzure ? azureEndpoint : 'api.openai.com',
model: useWandAzure ? wandModelName : 'gpt-4o',
apiVersion: useWandAzure ? azureApiVersion : 'N/A',
}
)
// For streaming responses
if (stream) {
try {
logger.debug(
`[${requestId}] Starting streaming request to ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'}`
)
const streamCompletion = await client.chat.completions.create({
model: useWandAzure ? wandModelName : 'gpt-4o',
messages: messages,
@@ -109,6 +116,8 @@ export async function POST(req: NextRequest) {
stream: true,
})
logger.debug(`[${requestId}] Stream connection established successfully`)
return new Response(
new ReadableStream({
async start(controller) {
@@ -118,21 +127,23 @@ export async function POST(req: NextRequest) {
for await (const chunk of streamCompletion) {
const content = chunk.choices[0]?.delta?.content || ''
if (content) {
// Use the same format as codegen API for consistency
// Use SSE format identical to chat streaming
controller.enqueue(
encoder.encode(`${JSON.stringify({ chunk: content, done: false })}\n`)
encoder.encode(`data: ${JSON.stringify({ chunk: content })}\n\n`)
)
}
}
// Send completion signal
controller.enqueue(encoder.encode(`${JSON.stringify({ chunk: '', done: true })}\n`))
// Send completion signal in SSE format
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ done: true })}\n\n`))
controller.close()
logger.info(`[${requestId}] Wand generation streaming completed`)
} catch (streamError: any) {
logger.error(`[${requestId}] Streaming error`, { error: streamError.message })
controller.enqueue(
encoder.encode(`${JSON.stringify({ error: 'Streaming failed', done: true })}\n`)
encoder.encode(
`data: ${JSON.stringify({ error: 'Streaming failed', done: true })}\n\n`
)
)
controller.close()
}
@@ -140,9 +151,10 @@ export async function POST(req: NextRequest) {
}),
{
headers: {
'Content-Type': 'text/plain',
'Cache-Control': 'no-cache, no-transform',
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
'X-Accel-Buffering': 'no',
},
}
)

View File

@@ -1,8 +1,10 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { webhook, workflow } from '@/db/schema'
@@ -242,6 +244,167 @@ export async function DELETE(
const foundWebhook = webhookData.webhook
// If it's an Airtable webhook, delete it from Airtable first
if (foundWebhook.provider === 'airtable') {
try {
const { baseId, externalId } = (foundWebhook.providerConfig || {}) as {
baseId?: string
externalId?: string
}
if (!baseId) {
logger.warn(`[${requestId}] Missing baseId for Airtable webhook deletion.`, {
webhookId: id,
})
return NextResponse.json(
{ error: 'Missing baseId for Airtable webhook deletion' },
{ status: 400 }
)
}
// Get access token for the workflow owner
const userIdForToken = webhookData.workflow.userId
const accessToken = await getOAuthToken(userIdForToken, 'airtable')
if (!accessToken) {
logger.warn(
`[${requestId}] Could not retrieve Airtable access token for user ${userIdForToken}. Cannot delete webhook in Airtable.`,
{ webhookId: id }
)
return NextResponse.json(
{ error: 'Airtable access token not found for webhook deletion' },
{ status: 401 }
)
}
// Resolve externalId if missing by listing webhooks and matching our notificationUrl
let resolvedExternalId: string | undefined = externalId
if (!resolvedExternalId) {
try {
const requestOrigin = new URL(request.url).origin
const effectiveOrigin = requestOrigin.includes('localhost')
? env.NEXT_PUBLIC_APP_URL || requestOrigin
: requestOrigin
const expectedNotificationUrl = `${effectiveOrigin}/api/webhooks/trigger/${foundWebhook.path}`
const listUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`
const listResp = await fetch(listUrl, {
headers: {
Authorization: `Bearer ${accessToken}`,
},
})
const listBody = await listResp.json().catch(() => null)
if (listResp.ok && listBody && Array.isArray(listBody.webhooks)) {
const match = listBody.webhooks.find((w: any) => {
const url: string | undefined = w?.notificationUrl
if (!url) return false
// Prefer exact match; fallback to suffix match to handle origin/host remaps
return (
url === expectedNotificationUrl ||
url.endsWith(`/api/webhooks/trigger/${foundWebhook.path}`)
)
})
if (match?.id) {
resolvedExternalId = match.id as string
// Persist resolved externalId for future operations
try {
await db
.update(webhook)
.set({
providerConfig: {
...(foundWebhook.providerConfig || {}),
externalId: resolvedExternalId,
},
updatedAt: new Date(),
})
.where(eq(webhook.id, id))
} catch {
// non-fatal persistence error
}
logger.info(`[${requestId}] Resolved Airtable externalId by listing webhooks`, {
baseId,
externalId: resolvedExternalId,
})
} else {
logger.warn(`[${requestId}] Could not resolve Airtable externalId from list`, {
baseId,
expectedNotificationUrl,
})
}
} else {
logger.warn(`[${requestId}] Failed to list Airtable webhooks to resolve externalId`, {
baseId,
status: listResp.status,
body: listBody,
})
}
} catch (e: any) {
logger.warn(`[${requestId}] Error attempting to resolve Airtable externalId`, {
error: e?.message,
})
}
}
// If still not resolvable, skip remote deletion but proceed with local delete
if (!resolvedExternalId) {
logger.info(
`[${requestId}] Airtable externalId not found; skipping remote deletion and proceeding to remove local record`,
{ baseId }
)
}
if (resolvedExternalId) {
const airtableDeleteUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks/${resolvedExternalId}`
const airtableResponse = await fetch(airtableDeleteUrl, {
method: 'DELETE',
headers: {
Authorization: `Bearer ${accessToken}`,
},
})
// Attempt to parse error body for better diagnostics
if (!airtableResponse.ok) {
let responseBody: any = null
try {
responseBody = await airtableResponse.json()
} catch {
// ignore parse errors
}
logger.error(
`[${requestId}] Failed to delete Airtable webhook in Airtable. Status: ${airtableResponse.status}`,
{ baseId, externalId: resolvedExternalId, response: responseBody }
)
return NextResponse.json(
{
error: 'Failed to delete webhook from Airtable',
details:
(responseBody && (responseBody.error?.message || responseBody.error)) ||
`Status ${airtableResponse.status}`,
},
{ status: 500 }
)
}
logger.info(`[${requestId}] Successfully deleted Airtable webhook in Airtable`, {
baseId,
externalId: resolvedExternalId,
})
}
} catch (error: any) {
logger.error(`[${requestId}] Error deleting Airtable webhook`, {
webhookId: id,
error: error.message,
stack: error.stack,
})
return NextResponse.json(
{ error: 'Failed to delete webhook from Airtable', details: error.message },
{ status: 500 }
)
}
}
// If it's a Telegram webhook, delete it from Telegram first
if (foundWebhook.provider === 'telegram') {
try {

View File

@@ -5,7 +5,22 @@ import { NextRequest } from 'next/server'
* @vitest-environment node
*/
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest, mockExecutionDependencies } from '@/app/api/__test-utils__/utils'
import {
createMockRequest,
mockExecutionDependencies,
mockTriggerDevSdk,
} from '@/app/api/__test-utils__/utils'
// Prefer mocking the background module to avoid loading Trigger.dev at all during tests
vi.mock('@/background/webhook-execution', () => ({
executeWebhookJob: vi.fn().mockResolvedValue({
success: true,
workflowId: 'test-workflow-id',
executionId: 'test-exec-id',
output: {},
executedAt: new Date().toISOString(),
}),
}))
const hasProcessedMessageMock = vi.fn().mockResolvedValue(false)
const markMessageAsProcessedMock = vi.fn().mockResolvedValue(true)
@@ -111,6 +126,7 @@ describe('Webhook Trigger API Route', () => {
vi.resetAllMocks()
mockExecutionDependencies()
mockTriggerDevSdk()
vi.doMock('@/services/queue', () => ({
RateLimiter: vi.fn().mockImplementation(() => ({
@@ -309,11 +325,7 @@ describe('Webhook Trigger API Route', () => {
const req = createMockRequest('POST', { event: 'test', id: 'test-123' })
const params = Promise.resolve({ path: 'test-path' })
vi.doMock('@trigger.dev/sdk', () => ({
tasks: {
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
},
}))
mockTriggerDevSdk()
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
@@ -339,11 +351,7 @@ describe('Webhook Trigger API Route', () => {
const req = createMockRequest('POST', { event: 'bearer.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
vi.doMock('@trigger.dev/sdk', () => ({
tasks: {
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
},
}))
mockTriggerDevSdk()
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })
@@ -369,11 +377,7 @@ describe('Webhook Trigger API Route', () => {
const req = createMockRequest('POST', { event: 'custom.header.test' }, headers)
const params = Promise.resolve({ path: 'test-path' })
vi.doMock('@trigger.dev/sdk', () => ({
tasks: {
trigger: vi.fn().mockResolvedValue({ id: 'mock-task-id' }),
},
}))
mockTriggerDevSdk()
const { POST } = await import('@/app/api/webhooks/trigger/[path]/route')
const response = await POST(req, { params })

View File

@@ -2,12 +2,14 @@ import { tasks } from '@trigger.dev/sdk'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkServerSideUsageLimits } from '@/lib/billing'
import { env, isTruthy } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import {
handleSlackChallenge,
handleWhatsAppVerification,
validateMicrosoftTeamsSignature,
} from '@/lib/webhooks/utils'
import { executeWebhookJob } from '@/background/webhook-execution'
import { db } from '@/db'
import { subscription, webhook, workflow } from '@/db/schema'
import { RateLimiter } from '@/services/queue'
@@ -17,6 +19,7 @@ const logger = createLogger('WebhookTriggerAPI')
export const dynamic = 'force-dynamic'
export const maxDuration = 300
export const runtime = 'nodejs'
/**
* Webhook Verification Handler (GET)
@@ -330,10 +333,9 @@ export async function POST(
// Continue processing - better to risk usage limit bypass than fail webhook
}
// --- PHASE 5: Queue webhook execution via trigger.dev ---
// --- PHASE 5: Queue webhook execution (trigger.dev or direct based on env) ---
try {
// Queue the webhook execution task
const handle = await tasks.trigger('webhook-execution', {
const payload = {
webhookId: foundWebhook.id,
workflowId: foundWorkflow.id,
userId: foundWorkflow.userId,
@@ -342,11 +344,24 @@ export async function POST(
headers: Object.fromEntries(request.headers.entries()),
path,
blockId: foundWebhook.blockId,
})
}
logger.info(
`[${requestId}] Queued webhook execution task ${handle.id} for ${foundWebhook.provider} webhook`
)
const useTrigger = isTruthy(env.TRIGGER_DEV_ENABLED)
if (useTrigger) {
const handle = await tasks.trigger('webhook-execution', payload)
logger.info(
`[${requestId}] Queued webhook execution task ${handle.id} for ${foundWebhook.provider} webhook`
)
} else {
// Fire-and-forget direct execution to avoid blocking webhook response
void executeWebhookJob(payload).catch((error) => {
logger.error(`[${requestId}] Direct webhook execution failed`, error)
})
logger.info(
`[${requestId}] Queued direct webhook execution for ${foundWebhook.provider} webhook (Trigger.dev disabled)`
)
}
// Return immediate acknowledgment with provider-specific format
if (foundWebhook.provider === 'microsoftteams') {

View File

@@ -540,7 +540,7 @@ export async function POST(
)
}
// Rate limit passed - trigger the task
// Rate limit passed - always use Trigger.dev for async executions
const handle = await tasks.trigger('workflow-execution', {
workflowId,
userId: authenticatedUserId,

View File

@@ -8,7 +8,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions, hasAdminPermission } from '@/lib/permissions/utils'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
import { db } from '@/db'
import { apiKey as apiKeyTable, workflow } from '@/db/schema'
import { apiKey as apiKeyTable, templates, workflow } from '@/db/schema'
const logger = createLogger('WorkflowByIdAPI')
@@ -218,6 +218,48 @@ export async function DELETE(
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Check if workflow has published templates before deletion
const { searchParams } = new URL(request.url)
const checkTemplates = searchParams.get('check-templates') === 'true'
const deleteTemplatesParam = searchParams.get('deleteTemplates')
if (checkTemplates) {
// Return template information for frontend to handle
const publishedTemplates = await db
.select()
.from(templates)
.where(eq(templates.workflowId, workflowId))
return NextResponse.json({
hasPublishedTemplates: publishedTemplates.length > 0,
count: publishedTemplates.length,
publishedTemplates: publishedTemplates.map((t) => ({
id: t.id,
name: t.name,
views: t.views,
stars: t.stars,
})),
})
}
// Handle template deletion based on user choice
if (deleteTemplatesParam !== null) {
const deleteTemplates = deleteTemplatesParam === 'delete'
if (deleteTemplates) {
// Delete all templates associated with this workflow
await db.delete(templates).where(eq(templates.workflowId, workflowId))
logger.info(`[${requestId}] Deleted templates for workflow ${workflowId}`)
} else {
// Orphan the templates (set workflowId to null)
await db
.update(templates)
.set({ workflowId: null })
.where(eq(templates.workflowId, workflowId))
logger.info(`[${requestId}] Orphaned templates for workflow ${workflowId}`)
}
}
await db.delete(workflow).where(eq(workflow.id, workflowId))
const elapsed = Date.now() - startTime

View File

@@ -1,4 +1,4 @@
import { and, eq } from 'drizzle-orm'
import { and, eq, inArray } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
@@ -8,7 +8,7 @@ const logger = createLogger('WorkspaceByIdAPI')
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { db } from '@/db'
import { knowledgeBase, permissions, workspace } from '@/db/schema'
import { knowledgeBase, permissions, templates, workspace } from '@/db/schema'
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const { id } = await params
@@ -19,6 +19,8 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
}
const workspaceId = id
const url = new URL(request.url)
const checkTemplates = url.searchParams.get('check-templates') === 'true'
// Check if user has any access to this workspace
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
@@ -26,6 +28,42 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 404 })
}
// If checking for published templates before deletion
if (checkTemplates) {
try {
// Get all workflows in this workspace
const workspaceWorkflows = await db
.select({ id: workflow.id })
.from(workflow)
.where(eq(workflow.workspaceId, workspaceId))
if (workspaceWorkflows.length === 0) {
return NextResponse.json({ hasPublishedTemplates: false, publishedTemplates: [] })
}
const workflowIds = workspaceWorkflows.map((w) => w.id)
// Check for published templates that reference these workflows
const publishedTemplates = await db
.select({
id: templates.id,
name: templates.name,
workflowId: templates.workflowId,
})
.from(templates)
.where(inArray(templates.workflowId, workflowIds))
return NextResponse.json({
hasPublishedTemplates: publishedTemplates.length > 0,
publishedTemplates,
count: publishedTemplates.length,
})
} catch (error) {
logger.error(`Error checking published templates for workspace ${workspaceId}:`, error)
return NextResponse.json({ error: 'Failed to check published templates' }, { status: 500 })
}
}
// Get workspace details
const workspaceDetails = await db
.select()
@@ -108,6 +146,8 @@ export async function DELETE(
}
const workspaceId = id
const body = await request.json().catch(() => ({}))
const { deleteTemplates = false } = body // User's choice: false = keep templates (recommended), true = delete templates
// Check if user has admin permissions to delete workspace
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
@@ -116,10 +156,39 @@ export async function DELETE(
}
try {
logger.info(`Deleting workspace ${workspaceId} for user ${session.user.id}`)
logger.info(
`Deleting workspace ${workspaceId} for user ${session.user.id}, deleteTemplates: ${deleteTemplates}`
)
// Delete workspace and all related data in a transaction
await db.transaction(async (tx) => {
// Get all workflows in this workspace before deletion
const workspaceWorkflows = await tx
.select({ id: workflow.id })
.from(workflow)
.where(eq(workflow.workspaceId, workspaceId))
if (workspaceWorkflows.length > 0) {
const workflowIds = workspaceWorkflows.map((w) => w.id)
// Handle templates based on user choice
if (deleteTemplates) {
// Delete published templates that reference these workflows
await tx.delete(templates).where(inArray(templates.workflowId, workflowIds))
logger.info(`Deleted templates for workflows in workspace ${workspaceId}`)
} else {
// Set workflowId to null for templates to create "orphaned" templates
// This allows templates to remain in marketplace but without source workflows
await tx
.update(templates)
.set({ workflowId: null })
.where(inArray(templates.workflowId, workflowIds))
logger.info(
`Updated templates to orphaned status for workflows in workspace ${workspaceId}`
)
}
}
// Delete all workflows in the workspace - database cascade will handle all workflow-related data
// The database cascade will handle deleting related workflow_blocks, workflow_edges, workflow_subflows,
// workflow_logs, workflow_execution_snapshots, workflow_execution_logs, workflow_execution_trace_spans,

View File

@@ -0,0 +1,167 @@
/* Force light mode for chat subdomain by overriding dark mode utilities */
/* This file uses CSS variables from globals.css light mode theme */
/* When inside the chat layout, force all light mode CSS variables */
.chat-light-wrapper {
/* Core Colors - from globals.css light mode */
--background: 0 0% 100%;
--foreground: 0 0% 3.9%;
/* Card Colors */
--card: 0 0% 99.2%;
--card-foreground: 0 0% 3.9%;
/* Popover Colors */
--popover: 0 0% 100%;
--popover-foreground: 0 0% 3.9%;
/* Primary Colors */
--primary: 0 0% 11.2%;
--primary-foreground: 0 0% 98%;
/* Secondary Colors */
--secondary: 0 0% 96.1%;
--secondary-foreground: 0 0% 11.2%;
/* Muted Colors */
--muted: 0 0% 96.1%;
--muted-foreground: 0 0% 46.9%;
/* Accent Colors */
--accent: 0 0% 92.5%;
--accent-foreground: 0 0% 11.2%;
/* Destructive Colors */
--destructive: 0 84.2% 60.2%;
--destructive-foreground: 0 0% 98%;
/* Border & Input Colors */
--border: 0 0% 89.8%;
--input: 0 0% 89.8%;
--ring: 0 0% 3.9%;
/* Border Radius */
--radius: 0.5rem;
/* Scrollbar Properties */
--scrollbar-track: 0 0% 85%;
--scrollbar-thumb: 0 0% 65%;
--scrollbar-thumb-hover: 0 0% 55%;
--scrollbar-size: 8px;
/* Workflow Properties */
--workflow-background: 0 0% 100%;
--workflow-dots: 0 0% 94.5%;
--card-background: 0 0% 99.2%;
--card-border: 0 0% 89.8%;
--card-text: 0 0% 3.9%;
--card-hover: 0 0% 96.1%;
/* Base Component Properties */
--base-muted-foreground: #737373;
/* Gradient Colors */
--gradient-primary: 263 85% 70%;
--gradient-secondary: 336 95% 65%;
/* Brand Colors */
--brand-primary-hex: #701ffc;
--brand-primary-hover-hex: #802fff;
--brand-secondary-hex: #6518e6;
--brand-accent-hex: #9d54ff;
--brand-accent-hover-hex: #a66fff;
--brand-background-hex: #0c0c0c;
/* UI Surface Colors */
--surface-elevated: #202020;
}
/* Override dark mode utility classes using CSS variables */
.chat-light-wrapper :is(.dark\:bg-black) {
background-color: hsl(var(--secondary));
}
.chat-light-wrapper :is(.dark\:bg-gray-900) {
background-color: hsl(var(--background));
}
.chat-light-wrapper :is(.dark\:bg-gray-800) {
background-color: hsl(var(--secondary));
}
.chat-light-wrapper :is(.dark\:bg-gray-700) {
background-color: hsl(var(--accent));
}
.chat-light-wrapper :is(.dark\:bg-gray-600) {
background-color: hsl(var(--muted));
}
.chat-light-wrapper :is(.dark\:bg-gray-300) {
background-color: hsl(var(--primary));
}
/* Text color overrides using CSS variables */
.chat-light-wrapper :is(.dark\:text-gray-100) {
color: hsl(var(--primary));
}
.chat-light-wrapper :is(.dark\:text-gray-200) {
color: hsl(var(--foreground));
}
.chat-light-wrapper :is(.dark\:text-gray-300) {
color: hsl(var(--muted-foreground));
}
.chat-light-wrapper :is(.dark\:text-gray-400) {
color: hsl(var(--muted-foreground));
}
.chat-light-wrapper :is(.dark\:text-neutral-600) {
color: hsl(var(--muted-foreground));
}
.chat-light-wrapper :is(.dark\:text-blue-400) {
color: var(--brand-accent-hex);
}
/* Border color overrides using CSS variables */
.chat-light-wrapper :is(.dark\:border-gray-700) {
border-color: hsl(var(--border));
}
.chat-light-wrapper :is(.dark\:border-gray-800) {
border-color: hsl(var(--border));
}
.chat-light-wrapper :is(.dark\:border-gray-600) {
border-color: hsl(var(--border));
}
.chat-light-wrapper :is(.dark\:divide-gray-700) > * + * {
border-color: hsl(var(--border));
}
/* Hover state overrides */
.chat-light-wrapper :is(.dark\:hover\:bg-gray-800\/60:hover) {
background-color: hsl(var(--card-hover));
}
/* Code blocks specific overrides using CSS variables */
.chat-light-wrapper pre:is(.dark\:bg-black) {
background-color: hsl(var(--workflow-dots));
}
.chat-light-wrapper code:is(.dark\:bg-gray-700) {
background-color: hsl(var(--accent));
}
.chat-light-wrapper code:is(.dark\:text-gray-200) {
color: hsl(var(--foreground));
}
/* Force color scheme */
.chat-light-wrapper {
color-scheme: light !important;
}

View File

@@ -481,7 +481,7 @@ export default function ChatClient({ subdomain }: { subdomain: string }) {
// Standard text-based chat interface
return (
<div className='fixed inset-0 z-[100] flex flex-col bg-background'>
<div className='fixed inset-0 z-[100] flex flex-col bg-background text-foreground'>
{/* Header component */}
<ChatHeader chatConfig={chatConfig} starCount={starCount} />

View File

@@ -22,53 +22,14 @@ export function ChatHeader({ chatConfig, starCount }: ChatHeaderProps) {
return (
<div className='flex items-center justify-between bg-background/95 px-6 py-4 pt-6 backdrop-blur supports-[backdrop-filter]:bg-background/60 md:px-8 md:pt-4'>
<div className='flex items-center gap-4'>
{customImage ? (
{customImage && (
<img
src={customImage}
alt={`${chatConfig?.title || 'Chat'} logo`}
className='h-12 w-12 rounded-md object-cover'
className='h-8 w-8 rounded-md object-cover'
/>
) : (
// Default Sim Studio logo when no custom image is provided
<div
className='flex h-12 w-12 items-center justify-center rounded-md'
style={{ backgroundColor: primaryColor }}
>
<svg
width='20'
height='20'
viewBox='0 0 50 50'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<path
d='M34.1455 20.0728H16.0364C12.7026 20.0728 10 22.7753 10 26.1091V35.1637C10 38.4975 12.7026 41.2 16.0364 41.2H34.1455C37.4792 41.2 40.1818 38.4975 40.1818 35.1637V26.1091C40.1818 22.7753 37.4792 20.0728 34.1455 20.0728Z'
fill={primaryColor}
stroke='white'
strokeWidth='3.5'
strokeLinecap='round'
strokeLinejoin='round'
/>
<path
d='M25.0919 14.0364C26.7588 14.0364 28.1101 12.6851 28.1101 11.0182C28.1101 9.35129 26.7588 8 25.0919 8C23.425 8 22.0737 9.35129 22.0737 11.0182C22.0737 12.6851 23.425 14.0364 25.0919 14.0364Z'
fill={primaryColor}
stroke='white'
strokeWidth='4'
strokeLinecap='round'
strokeLinejoin='round'
/>
<path
d='M25.0915 14.856V19.0277M20.5645 32.1398V29.1216M29.619 29.1216V32.1398'
stroke='white'
strokeWidth='4'
strokeLinecap='round'
strokeLinejoin='round'
/>
<circle cx='25' cy='11' r='2' fill={primaryColor} />
</svg>
</div>
)}
<h2 className='font-medium text-lg'>
<h2 className='font-medium text-foreground text-lg'>
{chatConfig?.customizations?.headerText || chatConfig?.title || 'Chat'}
</h2>
</div>

View File

@@ -2,10 +2,10 @@
export function ChatLoadingState() {
return (
<div className='flex min-h-screen items-center justify-center bg-gray-50'>
<div className='flex min-h-screen items-center justify-center bg-background text-foreground'>
<div className='animate-pulse text-center'>
<div className='mx-auto mb-4 h-8 w-48 rounded bg-gray-200' />
<div className='mx-auto h-4 w-64 rounded bg-gray-200' />
<div className='mx-auto mb-4 h-8 w-48 rounded bg-muted' />
<div className='mx-auto h-4 w-64 rounded bg-muted' />
</div>
</div>
)

View File

@@ -0,0 +1,19 @@
'use client'
import { ThemeProvider } from 'next-themes'
import './chat-client.css'
export default function ChatLayout({ children }: { children: React.ReactNode }) {
return (
<ThemeProvider
attribute='class'
forcedTheme='light'
enableSystem={false}
disableTransitionOnChange
>
<div className='light chat-light-wrapper' style={{ colorScheme: 'light' }}>
{children}
</div>
</ThemeProvider>
)
}

View File

@@ -3,6 +3,7 @@ import { SpeedInsights } from '@vercel/speed-insights/next'
import type { Metadata, Viewport } from 'next'
import { PublicEnvScript } from 'next-runtime-env'
import { BrandedLayout } from '@/components/branded-layout'
import { generateThemeCSS } from '@/lib/branding/inject-theme'
import { generateBrandedMetadata, generateStructuredData } from '@/lib/branding/metadata'
import { env } from '@/lib/env'
import { isHosted } from '@/lib/environment'
@@ -10,6 +11,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getAssetUrl } from '@/lib/utils'
import '@/app/globals.css'
import { SessionProvider } from '@/lib/session-context'
import { ThemeProvider } from '@/app/theme-provider'
import { ZoomPrevention } from '@/app/zoom-prevention'
@@ -61,6 +63,7 @@ export const metadata: Metadata = generateBrandedMetadata()
export default function RootLayout({ children }: { children: React.ReactNode }) {
const structuredData = generateStructuredData()
const themeCSS = generateThemeCSS()
return (
<html lang='en' suppressHydrationWarning>
@@ -73,6 +76,16 @@ export default function RootLayout({ children }: { children: React.ReactNode })
}}
/>
{/* Theme CSS Override */}
{themeCSS && (
<style
id='theme-override'
dangerouslySetInnerHTML={{
__html: themeCSS,
}}
/>
)}
{/* Meta tags for better SEO */}
<meta name='color-scheme' content='light dark' />
<meta name='format-detection' content='telephone=no' />
@@ -111,16 +124,18 @@ export default function RootLayout({ children }: { children: React.ReactNode })
</head>
<body suppressHydrationWarning>
<ThemeProvider>
<BrandedLayout>
<ZoomPrevention />
{children}
{isHosted && (
<>
<SpeedInsights />
<Analytics />
</>
)}
</BrandedLayout>
<SessionProvider>
<BrandedLayout>
<ZoomPrevention />
{children}
{isHosted && (
<>
<SpeedInsights />
<Analytics />
</>
)}
</BrandedLayout>
</SessionProvider>
</ThemeProvider>
</body>
</html>

View File

@@ -11,8 +11,8 @@ export default function manifest(): MetadataRoute.Manifest {
'Build and deploy AI agents using our Figma-like canvas. Build, write evals, and deploy AI agent workflows that automate workflows and streamline your business processes.',
start_url: '/',
display: 'standalone',
background_color: '#701FFC', // Default Sim brand primary color
theme_color: '#701FFC', // Default Sim brand primary color
background_color: brand.theme?.backgroundColor || '#701FFC',
theme_color: brand.theme?.primaryColor || '#701FFC',
icons: [
{
src: '/favicon/android-chrome-192x192.png',

View File

@@ -29,7 +29,7 @@ export type CategoryValue = (typeof categories)[number]['value']
// Template data structure
export interface Template {
id: string
workflowId: string
workflowId: string | null
userId: string
name: string
description: string | null

View File

@@ -11,6 +11,7 @@ import {
DropdownMenuTrigger,
} from '@/components/ui/dropdown-menu'
import { Label } from '@/components/ui/label'
import { getEnv, isTruthy } from '@/lib/env'
interface ExampleCommandProps {
command: string
@@ -32,6 +33,7 @@ export function ExampleCommand({
}: ExampleCommandProps) {
const [mode, setMode] = useState<ExampleMode>('sync')
const [exampleType, setExampleType] = useState<ExampleType>('execute')
const isAsyncEnabled = isTruthy(getEnv('NEXT_PUBLIC_TRIGGER_DEV_ENABLED'))
// Format the curl command to use a placeholder for the API key
const formatCurlCommand = (command: string, apiKey: string) => {
@@ -146,62 +148,67 @@ export function ExampleCommand({
<div className='space-y-1.5'>
<div className='flex items-center justify-between'>
{showLabel && <Label className='font-medium text-sm'>Example</Label>}
<div className='flex items-center gap-1'>
<Button
variant='outline'
size='sm'
onClick={() => setMode('sync')}
className={`h-6 min-w-[50px] px-2 py-1 text-xs transition-none ${
mode === 'sync'
? 'border-primary bg-primary text-primary-foreground hover:border-primary hover:bg-primary hover:text-primary-foreground'
: ''
}`}
>
Sync
</Button>
<Button
variant='outline'
size='sm'
onClick={() => setMode('async')}
className={`h-6 min-w-[50px] px-2 py-1 text-xs transition-none ${
mode === 'async'
? 'border-primary bg-primary text-primary-foreground hover:border-primary hover:bg-primary hover:text-primary-foreground'
: ''
}`}
>
Async
</Button>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button
variant='outline'
size='sm'
className='h-6 min-w-[140px] justify-between px-2 py-1 text-xs'
disabled={mode === 'sync'}
>
<span className='truncate'>{getExampleTitle()}</span>
<ChevronDown className='ml-1 h-3 w-3 flex-shrink-0' />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align='end'>
<DropdownMenuItem
className='cursor-pointer'
onClick={() => setExampleType('execute')}
>
Async Execution
</DropdownMenuItem>
<DropdownMenuItem className='cursor-pointer' onClick={() => setExampleType('status')}>
Check Job Status
</DropdownMenuItem>
<DropdownMenuItem
className='cursor-pointer'
onClick={() => setExampleType('rate-limits')}
>
Rate Limits & Usage
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</div>
{isAsyncEnabled && (
<div className='flex items-center gap-1'>
<Button
variant='outline'
size='sm'
onClick={() => setMode('sync')}
className={`h-6 min-w-[50px] px-2 py-1 text-xs transition-none ${
mode === 'sync'
? 'border-primary bg-primary text-primary-foreground hover:border-primary hover:bg-primary hover:text-primary-foreground'
: ''
}`}
>
Sync
</Button>
<Button
variant='outline'
size='sm'
onClick={() => setMode('async')}
className={`h-6 min-w-[50px] px-2 py-1 text-xs transition-none ${
mode === 'async'
? 'border-primary bg-primary text-primary-foreground hover:border-primary hover:bg-primary hover:text-primary-foreground'
: ''
}`}
>
Async
</Button>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button
variant='outline'
size='sm'
className='h-6 min-w-[140px] justify-between px-2 py-1 text-xs'
disabled={mode === 'sync'}
>
<span className='truncate'>{getExampleTitle()}</span>
<ChevronDown className='ml-1 h-3 w-3 flex-shrink-0' />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align='end'>
<DropdownMenuItem
className='cursor-pointer'
onClick={() => setExampleType('execute')}
>
Async Execution
</DropdownMenuItem>
<DropdownMenuItem
className='cursor-pointer'
onClick={() => setExampleType('status')}
>
Check Job Status
</DropdownMenuItem>
<DropdownMenuItem
className='cursor-pointer'
onClick={() => setExampleType('rate-limits')}
>
Rate Limits & Usage
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</div>
)}
</div>
<div className='group relative h-[120px] rounded-md border bg-background transition-colors hover:bg-muted/50'>

View File

@@ -1,6 +1,6 @@
'use client'
import { useState } from 'react'
import { useEffect, useState } from 'react'
import { zodResolver } from '@hookform/resolvers/zod'
import {
Award,
@@ -18,6 +18,7 @@ import {
Database,
DollarSign,
Edit,
Eye,
FileText,
Folder,
Globe,
@@ -48,6 +49,16 @@ import {
} from 'lucide-react'
import { useForm } from 'react-hook-form'
import { z } from 'zod'
import {
AlertDialog,
AlertDialogAction,
AlertDialogCancel,
AlertDialogContent,
AlertDialogDescription,
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
} from '@/components/ui/alert-dialog'
import { Button } from '@/components/ui/button'
import { ColorPicker } from '@/components/ui/color-picker'
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
@@ -68,6 +79,7 @@ import {
SelectTrigger,
SelectValue,
} from '@/components/ui/select'
import { Skeleton } from '@/components/ui/skeleton'
import { Textarea } from '@/components/ui/textarea'
import { useSession } from '@/lib/auth-client'
import { createLogger } from '@/lib/logs/console/logger'
@@ -100,7 +112,6 @@ interface TemplateModalProps {
workflowId: string
}
// Enhanced icon selection with category-relevant icons
const icons = [
// Content & Documentation
{ value: 'FileText', label: 'File Text', component: FileText },
@@ -165,6 +176,10 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
const { data: session } = useSession()
const [isSubmitting, setIsSubmitting] = useState(false)
const [iconPopoverOpen, setIconPopoverOpen] = useState(false)
const [existingTemplate, setExistingTemplate] = useState<any>(null)
const [isLoadingTemplate, setIsLoadingTemplate] = useState(false)
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const [isDeleting, setIsDeleting] = useState(false)
const form = useForm<TemplateFormData>({
resolver: zodResolver(templateSchema),
@@ -178,6 +193,63 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
},
})
// Watch form state to determine if all required fields are valid
const formValues = form.watch()
const isFormValid =
form.formState.isValid &&
formValues.name?.trim() &&
formValues.description?.trim() &&
formValues.author?.trim() &&
formValues.category
// Check for existing template when modal opens
useEffect(() => {
if (open && workflowId) {
checkExistingTemplate()
}
}, [open, workflowId])
const checkExistingTemplate = async () => {
setIsLoadingTemplate(true)
try {
const response = await fetch(`/api/templates?workflowId=${workflowId}&limit=1`)
if (response.ok) {
const result = await response.json()
const template = result.data?.[0] || null
setExistingTemplate(template)
// Pre-fill form with existing template data
if (template) {
form.reset({
name: template.name,
description: template.description,
author: template.author,
category: template.category,
icon: template.icon,
color: template.color,
})
} else {
// No existing template found
setExistingTemplate(null)
// Reset form to defaults
form.reset({
name: '',
description: '',
author: session?.user?.name || session?.user?.email || '',
category: '',
icon: 'FileText',
color: '#3972F6',
})
}
}
} catch (error) {
logger.error('Error checking existing template:', error)
setExistingTemplate(null)
} finally {
setIsLoadingTemplate(false)
}
}
const onSubmit = async (data: TemplateFormData) => {
if (!session?.user) {
logger.error('User not authenticated')
@@ -201,21 +273,36 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
state: templateState,
}
const response = await fetch('/api/templates', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(templateData),
})
let response
if (existingTemplate) {
// Update existing template
response = await fetch(`/api/templates/${existingTemplate.id}`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(templateData),
})
} else {
// Create new template
response = await fetch('/api/templates', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(templateData),
})
}
if (!response.ok) {
const errorData = await response.json()
throw new Error(errorData.error || 'Failed to create template')
throw new Error(
errorData.error || `Failed to ${existingTemplate ? 'update' : 'create'} template`
)
}
const result = await response.json()
logger.info('Template created successfully:', result)
logger.info(`Template ${existingTemplate ? 'updated' : 'created'} successfully:`, result)
// Reset form and close modal
form.reset()
@@ -241,7 +328,35 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
>
<DialogHeader className='flex-shrink-0 border-b px-6 py-4'>
<div className='flex items-center justify-between'>
<DialogTitle className='font-medium text-lg'>Publish Template</DialogTitle>
<div className='flex items-center gap-3'>
<DialogTitle className='font-medium text-lg'>
{isLoadingTemplate
? 'Loading...'
: existingTemplate
? 'Update Template'
: 'Publish Template'}
</DialogTitle>
{existingTemplate && (
<div className='flex items-center gap-2'>
{existingTemplate.stars > 0 && (
<div className='flex items-center gap-1 rounded-full bg-yellow-50 px-2 py-1 dark:bg-yellow-900/20'>
<Star className='h-3 w-3 fill-yellow-400 text-yellow-400' />
<span className='font-medium text-xs text-yellow-700 dark:text-yellow-300'>
{existingTemplate.stars}
</span>
</div>
)}
{existingTemplate.views > 0 && (
<div className='flex items-center gap-1 rounded-full bg-blue-50 px-2 py-1 dark:bg-blue-900/20'>
<Eye className='h-3 w-3 text-blue-500' />
<span className='font-medium text-blue-700 text-xs dark:text-blue-300'>
{existingTemplate.views}
</span>
</div>
)}
</div>
)}
</div>
<Button
variant='ghost'
size='icon'
@@ -259,65 +374,189 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
onSubmit={form.handleSubmit(onSubmit)}
className='flex flex-1 flex-col overflow-hidden'
>
<div className='flex-1 overflow-y-auto px-6 py-4'>
<div className='space-y-6'>
<div className='flex gap-3'>
<div className='flex-1 overflow-y-auto px-6 py-6'>
{isLoadingTemplate ? (
<div className='space-y-6'>
{/* Icon and Color row */}
<div className='flex gap-3'>
<div className='w-20'>
<Skeleton className='mb-2 h-4 w-8' /> {/* Label */}
<Skeleton className='h-10 w-20' /> {/* Icon picker */}
</div>
<div className='w-20'>
<Skeleton className='mb-2 h-4 w-10' /> {/* Label */}
<Skeleton className='h-10 w-20' /> {/* Color picker */}
</div>
</div>
{/* Name field */}
<div>
<Skeleton className='mb-2 h-4 w-12' /> {/* Label */}
<Skeleton className='h-10 w-full' /> {/* Input */}
</div>
{/* Author and Category row */}
<div className='grid grid-cols-2 gap-4'>
<div>
<Skeleton className='mb-2 h-4 w-14' /> {/* Label */}
<Skeleton className='h-10 w-full' /> {/* Input */}
</div>
<div>
<Skeleton className='mb-2 h-4 w-16' /> {/* Label */}
<Skeleton className='h-10 w-full' /> {/* Select */}
</div>
</div>
{/* Description field */}
<div>
<Skeleton className='mb-2 h-4 w-20' /> {/* Label */}
<Skeleton className='h-20 w-full' /> {/* Textarea */}
</div>
</div>
) : (
<div className='space-y-6'>
<div className='flex gap-3'>
<FormField
control={form.control}
name='icon'
render={({ field }) => (
<FormItem className='w-20'>
<FormLabel className='!text-foreground font-medium text-sm'>
Icon
</FormLabel>
<Popover open={iconPopoverOpen} onOpenChange={setIconPopoverOpen}>
<PopoverTrigger asChild>
<Button variant='outline' role='combobox' className='h-10 w-20 p-0'>
<SelectedIconComponent className='h-4 w-4' />
</Button>
</PopoverTrigger>
<PopoverContent className='z-50 w-84 p-0' align='start'>
<div className='p-3'>
<div className='grid max-h-80 grid-cols-8 gap-2 overflow-y-auto'>
{icons.map((icon) => {
const IconComponent = icon.component
return (
<button
key={icon.value}
type='button'
onClick={() => {
field.onChange(icon.value)
setIconPopoverOpen(false)
}}
className={cn(
'flex h-8 w-8 items-center justify-center rounded-md border transition-colors hover:bg-muted',
field.value === icon.value &&
'bg-primary text-primary-foreground'
)}
>
<IconComponent className='h-4 w-4' />
</button>
)
})}
</div>
</div>
</PopoverContent>
</Popover>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name='color'
render={({ field }) => (
<FormItem className='w-20'>
<FormLabel className='!text-foreground font-medium text-sm'>
Color
</FormLabel>
<FormControl>
<ColorPicker
value={field.value}
onChange={field.onChange}
onBlur={field.onBlur}
className='h-10 w-20'
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
</div>
<FormField
control={form.control}
name='icon'
name='name'
render={({ field }) => (
<FormItem className='w-20'>
<FormLabel>Icon</FormLabel>
<Popover open={iconPopoverOpen} onOpenChange={setIconPopoverOpen}>
<PopoverTrigger asChild>
<Button variant='outline' role='combobox' className='h-10 w-20 p-0'>
<SelectedIconComponent className='h-4 w-4' />
</Button>
</PopoverTrigger>
<PopoverContent className='z-50 w-84 p-0' align='start'>
<div className='p-3'>
<div className='grid max-h-80 grid-cols-8 gap-2 overflow-y-auto'>
{icons.map((icon) => {
const IconComponent = icon.component
return (
<button
key={icon.value}
type='button'
onClick={() => {
field.onChange(icon.value)
setIconPopoverOpen(false)
}}
className={cn(
'flex h-8 w-8 items-center justify-center rounded-md border transition-colors hover:bg-muted',
field.value === icon.value &&
'bg-primary text-primary-foreground'
)}
>
<IconComponent className='h-4 w-4' />
</button>
)
})}
</div>
</div>
</PopoverContent>
</Popover>
<FormItem>
<FormLabel className='!text-foreground font-medium text-sm'>Name</FormLabel>
<FormControl>
<Input placeholder='Enter template name' {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<div className='grid grid-cols-2 gap-4'>
<FormField
control={form.control}
name='author'
render={({ field }) => (
<FormItem>
<FormLabel className='!text-foreground font-medium text-sm'>
Author
</FormLabel>
<FormControl>
<Input placeholder='Enter author name' {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name='category'
render={({ field }) => (
<FormItem>
<FormLabel className='!text-foreground font-medium text-sm'>
Category
</FormLabel>
<Select onValueChange={field.onChange} defaultValue={field.value}>
<FormControl>
<SelectTrigger>
<SelectValue placeholder='Select a category' />
</SelectTrigger>
</FormControl>
<SelectContent>
{categories.map((category) => (
<SelectItem key={category.value} value={category.value}>
{category.label}
</SelectItem>
))}
</SelectContent>
</Select>
<FormMessage />
</FormItem>
)}
/>
</div>
<FormField
control={form.control}
name='color'
name='description'
render={({ field }) => (
<FormItem className='w-20'>
<FormLabel>Color</FormLabel>
<FormItem>
<FormLabel className='!text-foreground font-medium text-sm'>
Description
</FormLabel>
<FormControl>
<ColorPicker
value={field.value}
onChange={field.onChange}
onBlur={field.onBlur}
className='h-10 w-20'
<Textarea
placeholder='Describe what this template does...'
className='resize-none'
rows={3}
{...field}
/>
</FormControl>
<FormMessage />
@@ -325,91 +564,28 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
)}
/>
</div>
<FormField
control={form.control}
name='name'
render={({ field }) => (
<FormItem>
<FormLabel>Name</FormLabel>
<FormControl>
<Input placeholder='Enter template name' {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<div className='grid grid-cols-2 gap-4'>
<FormField
control={form.control}
name='author'
render={({ field }) => (
<FormItem>
<FormLabel>Author</FormLabel>
<FormControl>
<Input placeholder='Enter author name' {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name='category'
render={({ field }) => (
<FormItem>
<FormLabel>Category</FormLabel>
<Select onValueChange={field.onChange} defaultValue={field.value}>
<FormControl>
<SelectTrigger>
<SelectValue placeholder='Select a category' />
</SelectTrigger>
</FormControl>
<SelectContent>
{categories.map((category) => (
<SelectItem key={category.value} value={category.value}>
{category.label}
</SelectItem>
))}
</SelectContent>
</Select>
<FormMessage />
</FormItem>
)}
/>
</div>
<FormField
control={form.control}
name='description'
render={({ field }) => (
<FormItem>
<FormLabel>Description</FormLabel>
<FormControl>
<Textarea
placeholder='Describe what this template does...'
className='resize-none'
rows={3}
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
</div>
)}
</div>
{/* Fixed Footer */}
<div className='mt-auto border-t px-6 pt-4 pb-6'>
<div className='flex justify-end'>
<div className='flex items-center'>
{existingTemplate && (
<Button
type='button'
variant='destructive'
onClick={() => setShowDeleteDialog(true)}
disabled={isSubmitting || isLoadingTemplate}
className='h-10 rounded-md px-4 py-2'
>
Delete
</Button>
)}
<Button
type='submit'
disabled={isSubmitting}
disabled={isSubmitting || !isFormValid || isLoadingTemplate}
className={cn(
'font-medium',
'ml-auto font-medium',
'bg-[var(--brand-primary-hex)] hover:bg-[var(--brand-primary-hover-hex)]',
'shadow-[0_0_0_0_var(--brand-primary-hex)] hover:shadow-[0_0_0_4px_rgba(127,47,255,0.15)]',
'text-white transition-all duration-200',
@@ -420,16 +596,59 @@ export function TemplateModal({ open, onOpenChange, workflowId }: TemplateModalP
{isSubmitting ? (
<>
<Loader2 className='mr-2 h-4 w-4 animate-spin' />
Publishing...
{existingTemplate ? 'Updating...' : 'Publishing...'}
</>
) : existingTemplate ? (
'Update Template'
) : (
'Publish'
'Publish Template'
)}
</Button>
</div>
</div>
</form>
</Form>
{existingTemplate && (
<AlertDialog open={showDeleteDialog} onOpenChange={setShowDeleteDialog}>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Delete Template?</AlertDialogTitle>
<AlertDialogDescription>
Deleting this template will remove it from the gallery. This action cannot be
undone.
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel disabled={isDeleting}>Cancel</AlertDialogCancel>
<AlertDialogAction
className='bg-destructive text-destructive-foreground hover:bg-destructive/90'
disabled={isDeleting}
onClick={async () => {
if (!existingTemplate) return
setIsDeleting(true)
try {
const resp = await fetch(`/api/templates/${existingTemplate.id}`, {
method: 'DELETE',
})
if (!resp.ok) {
const err = await resp.json().catch(() => ({}))
throw new Error(err.error || 'Failed to delete template')
}
setShowDeleteDialog(false)
onOpenChange(false)
} catch (err) {
logger.error('Failed to delete template', err)
} finally {
setIsDeleting(false)
}
}}
>
{isDeleting ? 'Deleting...' : 'Delete'}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
)}
</DialogContent>
</Dialog>
)

View File

@@ -18,7 +18,6 @@ import {
import { useParams, useRouter } from 'next/navigation'
import {
AlertDialog,
AlertDialogAction,
AlertDialogCancel,
AlertDialogContent,
AlertDialogDescription,
@@ -113,6 +112,15 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
const [isTemplateModalOpen, setIsTemplateModalOpen] = useState(false)
const [isAutoLayouting, setIsAutoLayouting] = useState(false)
// Delete workflow state - grouped for better organization
const [deleteState, setDeleteState] = useState({
showDialog: false,
isDeleting: false,
hasPublishedTemplates: false,
publishedTemplates: [] as any[],
showTemplateChoice: false,
})
// Deployed state management
const [deployedState, setDeployedState] = useState<WorkflowState | null>(null)
const [isLoadingDeployedState, setIsLoadingDeployedState] = useState<boolean>(false)
@@ -337,35 +345,170 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
}
/**
* Handle deleting the current workflow
* Reset delete state
*/
const handleDeleteWorkflow = () => {
const resetDeleteState = useCallback(() => {
setDeleteState({
showDialog: false,
isDeleting: false,
hasPublishedTemplates: false,
publishedTemplates: [],
showTemplateChoice: false,
})
}, [])
/**
* Navigate to next workflow after deletion
*/
const navigateAfterDeletion = useCallback(
(currentWorkflowId: string) => {
const sidebarWorkflows = getSidebarOrderedWorkflows()
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === currentWorkflowId)
// Find next workflow: try next, then previous
let nextWorkflowId: string | null = null
if (sidebarWorkflows.length > 1) {
if (currentIndex < sidebarWorkflows.length - 1) {
nextWorkflowId = sidebarWorkflows[currentIndex + 1].id
} else if (currentIndex > 0) {
nextWorkflowId = sidebarWorkflows[currentIndex - 1].id
}
}
// Navigate to next workflow or workspace home
if (nextWorkflowId) {
router.push(`/workspace/${workspaceId}/w/${nextWorkflowId}`)
} else {
router.push(`/workspace/${workspaceId}`)
}
},
[workspaceId, router]
)
/**
* Check if workflow has published templates
*/
const checkPublishedTemplates = useCallback(async (workflowId: string) => {
const checkResponse = await fetch(`/api/workflows/${workflowId}?check-templates=true`, {
method: 'DELETE',
})
if (!checkResponse.ok) {
throw new Error(`Failed to check templates: ${checkResponse.statusText}`)
}
return await checkResponse.json()
}, [])
/**
* Delete workflow with optional template handling
*/
const deleteWorkflowWithTemplates = useCallback(
async (workflowId: string, templateAction?: 'keep' | 'delete') => {
const endpoint = templateAction
? `/api/workflows/${workflowId}?deleteTemplates=${templateAction}`
: null
if (endpoint) {
// Use custom endpoint for template handling
const response = await fetch(endpoint, { method: 'DELETE' })
if (!response.ok) {
throw new Error(`Failed to delete workflow: ${response.statusText}`)
}
// Manual registry cleanup since we used custom API
useWorkflowRegistry.setState((state) => {
const newWorkflows = { ...state.workflows }
delete newWorkflows[workflowId]
return {
...state,
workflows: newWorkflows,
activeWorkflowId: state.activeWorkflowId === workflowId ? null : state.activeWorkflowId,
}
})
} else {
// Use registry's built-in deletion (handles database + state)
await useWorkflowRegistry.getState().removeWorkflow(workflowId)
}
},
[]
)
/**
* Handle deleting the current workflow - called after user confirms
*/
const handleDeleteWorkflow = useCallback(async () => {
const currentWorkflowId = params.workflowId as string
if (!currentWorkflowId || !userPermissions.canEdit) return
const sidebarWorkflows = getSidebarOrderedWorkflows()
const currentIndex = sidebarWorkflows.findIndex((w) => w.id === currentWorkflowId)
setDeleteState((prev) => ({ ...prev, isDeleting: true }))
// Find next workflow: try next, then previous
let nextWorkflowId: string | null = null
if (sidebarWorkflows.length > 1) {
if (currentIndex < sidebarWorkflows.length - 1) {
nextWorkflowId = sidebarWorkflows[currentIndex + 1].id
} else if (currentIndex > 0) {
nextWorkflowId = sidebarWorkflows[currentIndex - 1].id
try {
// Check if workflow has published templates
const checkData = await checkPublishedTemplates(currentWorkflowId)
if (checkData.hasPublishedTemplates) {
setDeleteState((prev) => ({
...prev,
hasPublishedTemplates: true,
publishedTemplates: checkData.publishedTemplates || [],
showTemplateChoice: true,
isDeleting: false, // Stop showing "Deleting..." and show template choice
}))
return
}
}
// Navigate to next workflow or workspace home
if (nextWorkflowId) {
router.push(`/workspace/${workspaceId}/w/${nextWorkflowId}`)
} else {
router.push(`/workspace/${workspaceId}`)
// No templates, proceed with standard deletion
navigateAfterDeletion(currentWorkflowId)
await deleteWorkflowWithTemplates(currentWorkflowId)
resetDeleteState()
} catch (error) {
logger.error('Error deleting workflow:', error)
setDeleteState((prev) => ({ ...prev, isDeleting: false }))
}
}, [
params.workflowId,
userPermissions.canEdit,
checkPublishedTemplates,
navigateAfterDeletion,
deleteWorkflowWithTemplates,
resetDeleteState,
])
// Remove the workflow from the registry using the URL parameter
useWorkflowRegistry.getState().removeWorkflow(currentWorkflowId)
}
/**
* Handle template action selection
*/
const handleTemplateAction = useCallback(
async (action: 'keep' | 'delete') => {
const currentWorkflowId = params.workflowId as string
if (!currentWorkflowId || !userPermissions.canEdit) return
setDeleteState((prev) => ({ ...prev, isDeleting: true }))
try {
logger.info(`Deleting workflow ${currentWorkflowId} with template action: ${action}`)
navigateAfterDeletion(currentWorkflowId)
await deleteWorkflowWithTemplates(currentWorkflowId, action)
logger.info(
`Successfully deleted workflow ${currentWorkflowId} with template action: ${action}`
)
resetDeleteState()
} catch (error) {
logger.error('Error deleting workflow:', error)
setDeleteState((prev) => ({ ...prev, isDeleting: false }))
}
},
[
params.workflowId,
userPermissions.canEdit,
navigateAfterDeletion,
deleteWorkflowWithTemplates,
resetDeleteState,
]
)
// Helper function to open subscription settings
const openSubscriptionSettings = () => {
@@ -422,7 +565,23 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
}
return (
<AlertDialog>
<AlertDialog
open={deleteState.showDialog}
onOpenChange={(open) => {
if (open) {
// Reset all state when opening dialog to ensure clean start
setDeleteState({
showDialog: true,
isDeleting: false,
hasPublishedTemplates: false,
publishedTemplates: [],
showTemplateChoice: false,
})
} else {
resetDeleteState()
}
}}
>
<Tooltip>
<TooltipTrigger asChild>
<AlertDialogTrigger asChild>
@@ -444,21 +603,71 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Delete workflow?</AlertDialogTitle>
<AlertDialogDescription>
Deleting this workflow will permanently remove all associated blocks, executions, and
configuration.{' '}
<span className='text-red-500 dark:text-red-500'>This action cannot be undone.</span>
</AlertDialogDescription>
<AlertDialogTitle>
{deleteState.showTemplateChoice ? 'Published Templates Found' : 'Delete workflow?'}
</AlertDialogTitle>
{deleteState.showTemplateChoice ? (
<div className='space-y-3'>
<AlertDialogDescription>
This workflow has {deleteState.publishedTemplates.length} published template
{deleteState.publishedTemplates.length > 1 ? 's' : ''}:
</AlertDialogDescription>
{deleteState.publishedTemplates.length > 0 && (
<ul className='list-disc space-y-1 pl-6'>
{deleteState.publishedTemplates.map((template) => (
<li key={template.id}>{template.name}</li>
))}
</ul>
)}
<AlertDialogDescription>
What would you like to do with the published template
{deleteState.publishedTemplates.length > 1 ? 's' : ''}?
</AlertDialogDescription>
</div>
) : (
<AlertDialogDescription>
Deleting this workflow will permanently remove all associated blocks, executions,
and configuration.{' '}
<span className='text-red-500 dark:text-red-500'>
This action cannot be undone.
</span>
</AlertDialogDescription>
)}
</AlertDialogHeader>
<AlertDialogFooter className='flex'>
<AlertDialogCancel className='h-9 w-full rounded-[8px]'>Cancel</AlertDialogCancel>
<AlertDialogAction
onClick={handleDeleteWorkflow}
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
>
Delete
</AlertDialogAction>
{deleteState.showTemplateChoice ? (
<div className='flex w-full gap-2'>
<Button
variant='outline'
onClick={() => handleTemplateAction('keep')}
disabled={deleteState.isDeleting}
className='h-9 flex-1 rounded-[8px]'
>
Keep templates
</Button>
<Button
onClick={() => handleTemplateAction('delete')}
disabled={deleteState.isDeleting}
className='h-9 flex-1 rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
>
{deleteState.isDeleting ? 'Deleting...' : 'Delete templates'}
</Button>
</div>
) : (
<>
<AlertDialogCancel className='h-9 w-full rounded-[8px]'>Cancel</AlertDialogCancel>
<Button
onClick={(e) => {
e.preventDefault()
handleDeleteWorkflow()
}}
disabled={deleteState.isDeleting}
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
>
{deleteState.isDeleting ? 'Deleting...' : 'Delete'}
</Button>
</>
)}
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
@@ -1002,10 +1211,10 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
{renderToggleButton()}
{isExpanded && <ExportControls />}
{isExpanded && renderAutoLayoutButton()}
{renderDuplicateButton()}
{renderDeleteButton()}
{!isDebugging && renderDebugModeToggle()}
{isExpanded && renderPublishButton()}
{renderDeleteButton()}
{renderDuplicateButton()}
{!isDebugging && renderDebugModeToggle()}
{renderDeployButton()}
{isDebugging ? renderDebugControlsBar() : renderRunButton()}

View File

@@ -1,73 +0,0 @@
'use client'
import * as React from 'react'
import { format } from 'date-fns'
import { Calendar as CalendarIcon } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { Calendar } from '@/components/ui/calendar'
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
import { cn } from '@/lib/utils'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/hooks/use-sub-block-value'
interface DateInputProps {
blockId: string
subBlockId: string
placeholder?: string
isPreview?: boolean
previewValue?: string | null
disabled?: boolean
}
export function DateInput({
blockId,
subBlockId,
placeholder,
isPreview = false,
previewValue,
disabled = false,
}: DateInputProps) {
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
// Use preview value when in preview mode, otherwise use store value
const value = isPreview ? previewValue : storeValue
const date = value ? new Date(value) : undefined
const isPastDate = React.useMemo(() => {
if (!date) return false
const today = new Date()
today.setHours(0, 0, 0, 0)
return date < today
}, [date])
const handleDateSelect = (selectedDate: Date | undefined) => {
if (isPreview || disabled) return
if (selectedDate) {
const today = new Date()
today.setHours(0, 0, 0, 0)
}
setStoreValue(selectedDate?.toISOString() || '')
}
return (
<Popover>
<PopoverTrigger asChild>
<Button
variant='outline'
disabled={isPreview || disabled}
className={cn(
'w-full justify-start text-left font-normal',
!date && 'text-muted-foreground',
isPastDate && 'border-red-500'
)}
>
<CalendarIcon className='mr-1 h-4 w-4' />
{date ? format(date, 'MMM d, yy') : <span>{placeholder || 'Pick a date'}</span>}
</Button>
</PopoverTrigger>
<PopoverContent className='w-auto p-0'>
<Calendar mode='single' selected={date} onSelect={handleDateSelect} initialFocus />
</PopoverContent>
</Popover>
)
}

View File

@@ -88,6 +88,8 @@ export function MicrosoftFileSelector({
const [showOAuthModal, setShowOAuthModal] = useState(false)
const [credentialsLoaded, setCredentialsLoaded] = useState(false)
const initialFetchRef = useRef(false)
// Track the last (credentialId, fileId) we attempted to resolve to avoid tight retry loops
const lastMetaAttemptRef = useRef<string>('')
// Handle Microsoft Planner task selection
const [plannerTasks, setPlannerTasks] = useState<PlannerTask[]>([])
@@ -496,11 +498,15 @@ export function MicrosoftFileSelector({
setSelectedFileId('')
onChange('')
}
// Reset memo when credential is cleared
lastMetaAttemptRef.current = ''
} else if (prevCredentialId && prevCredentialId !== selectedCredentialId) {
// Credentials changed (not initial load) - clear file info to force refetch
if (selectedFile) {
setSelectedFile(null)
}
// Reset memo when switching credentials
lastMetaAttemptRef.current = ''
}
}, [selectedCredentialId, selectedFile, onChange])
@@ -514,10 +520,17 @@ export function MicrosoftFileSelector({
(!selectedFile || selectedFile.id !== value) &&
!isLoadingSelectedFile
) {
// Avoid tight retry loops by memoizing the last attempt tuple
const attemptKey = `${selectedCredentialId}::${value}`
if (lastMetaAttemptRef.current === attemptKey) {
return
}
lastMetaAttemptRef.current = attemptKey
if (serviceId === 'microsoft-planner') {
void fetchPlannerTaskById(value)
} else {
fetchFileById(value)
void fetchFileById(value)
}
}
}, [

View File

@@ -3,6 +3,7 @@
import { useParams } from 'next/navigation'
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
import { getEnv } from '@/lib/env'
import { getProviderIdFromServiceId } from '@/lib/oauth'
import {
ConfluenceFileSelector,
DiscordChannelSelector,
@@ -73,8 +74,12 @@ export function FileSelectorInput({
const [botTokenValue] = useSubBlockValue(blockId, 'botToken')
// Determine if the persisted credential belongs to the current viewer
// Use service providerId where available (e.g., onedrive/sharepoint) instead of base provider ("microsoft")
const foreignCheckProvider = subBlock.serviceId
? getProviderIdFromServiceId(subBlock.serviceId)
: (subBlock.provider as string) || ''
const { isForeignCredential } = useForeignCredential(
subBlock.provider || subBlock.serviceId || '',
foreignCheckProvider,
(connectedCredential as string) || ''
)
@@ -224,12 +229,6 @@ export function FileSelectorInput({
}
onChange={(issueKey) => {
collaborativeSetSubblockValue(blockId, subBlock.id, issueKey)
// Clear related fields when a new issue is selected
collaborativeSetSubblockValue(blockId, 'summary', '')
collaborativeSetSubblockValue(blockId, 'description', '')
if (!issueKey) {
collaborativeSetSubblockValue(blockId, 'manualIssueKey', '')
}
}}
domain={domain}
provider='jira'
@@ -353,7 +352,7 @@ export function FileSelectorInput({
requiredScopes={subBlock.requiredScopes || []}
serviceId={subBlock.serviceId}
label={subBlock.placeholder || 'Select SharePoint site'}
disabled={disabled || !credential}
disabled={finalDisabled}
showPreview={true}
workflowId={activeWorkflowId || ''}
credentialId={credential}
@@ -389,7 +388,7 @@ export function FileSelectorInput({
requiredScopes={subBlock.requiredScopes || []}
serviceId='microsoft-planner'
label={subBlock.placeholder || 'Select task'}
disabled={disabled || !credential || !planId}
disabled={finalDisabled}
showPreview={true}
planId={planId}
workflowId={activeWorkflowId || ''}
@@ -447,7 +446,7 @@ export function FileSelectorInput({
requiredScopes={subBlock.requiredScopes || []}
serviceId={subBlock.serviceId}
label={subBlock.placeholder || 'Select Teams message location'}
disabled={disabled || !credential}
disabled={finalDisabled}
showPreview={true}
credential={credential}
selectionType={selectionType}
@@ -490,7 +489,7 @@ export function FileSelectorInput({
requiredScopes={subBlock.requiredScopes || []}
serviceId={subBlock.serviceId}
label={subBlock.placeholder || `Select ${itemType}`}
disabled={disabled || !credential}
disabled={finalDisabled}
showPreview={true}
credentialId={credential}
itemType={itemType}
@@ -531,7 +530,7 @@ export function FileSelectorInput({
provider={provider}
requiredScopes={subBlock.requiredScopes || []}
label={subBlock.placeholder || 'Select file'}
disabled={disabled || !credential}
disabled={finalDisabled}
serviceId={subBlock.serviceId}
mimeTypeFilter={subBlock.mimeType}
showPreview={true}

View File

@@ -4,7 +4,6 @@ export { Code } from './code'
export { ComboBox } from './combobox'
export { ConditionInput } from './condition-input'
export { CredentialSelector } from './credential-selector/credential-selector'
export { DateInput } from './date-input'
export { DocumentSelector } from './document-selector/document-selector'
export { Dropdown } from './dropdown'
export { EvalInput } from './eval-input'

View File

@@ -20,7 +20,6 @@ import {
CheckboxList,
Code,
ComboBox,
DateInput,
FileSelectorInput,
FileUpload,
LongInput,
@@ -172,33 +171,6 @@ function TableSyncWrapper({
)
}
function DateInputSyncWrapper({
blockId,
paramId,
value,
onChange,
uiComponent,
disabled,
}: {
blockId: string
paramId: string
value: string
onChange: (value: string) => void
uiComponent: any
disabled: boolean
}) {
return (
<GenericSyncWrapper blockId={blockId} paramId={paramId} value={value} onChange={onChange}>
<DateInput
blockId={blockId}
subBlockId={paramId}
placeholder={uiComponent.placeholder}
disabled={disabled}
/>
</GenericSyncWrapper>
)
}
function TimeInputSyncWrapper({
blockId,
paramId,
@@ -1159,18 +1131,6 @@ export function ToolInput({
/>
)
case 'date-input':
return (
<DateInputSyncWrapper
blockId={blockId}
paramId={param.id}
value={value}
onChange={onChange}
uiComponent={uiComponent}
disabled={disabled}
/>
)
case 'time-input':
return (
<TimeInputSyncWrapper

View File

@@ -65,7 +65,8 @@ export function useSubBlockValue<T = any>(
const storeValue = useSubBlockStore(
useCallback(
(state) => {
if (!activeWorkflowId) return null
// If the active workflow ID isn't available yet, return undefined so we can fall back to initialValue
if (!activeWorkflowId) return undefined
return state.workflowValues[activeWorkflowId]?.[blockId]?.[subBlockId] ?? null
},
[activeWorkflowId, blockId, subBlockId]

View File

@@ -11,7 +11,6 @@ import {
ComboBox,
ConditionInput,
CredentialSelector,
DateInput,
DocumentSelector,
Dropdown,
EvalInput,
@@ -256,17 +255,6 @@ export function SubBlock({
disabled={isDisabled}
/>
)
case 'date-input':
return (
<DateInput
blockId={blockId}
subBlockId={config.id}
placeholder={config.placeholder}
isPreview={isPreview}
previewValue={previewValue}
disabled={isDisabled}
/>
)
case 'time-input':
return (
<TimeInput

View File

@@ -198,35 +198,37 @@ export function useWand({
const { done, value } = await reader.read()
if (done) break
// Process incoming chunks
const text = decoder.decode(value)
const lines = text.split('\n').filter((line) => line.trim() !== '')
// Process incoming chunks using SSE format (identical to Chat panel)
const chunk = decoder.decode(value)
const lines = chunk.split('\n\n')
for (const line of lines) {
try {
const data = JSON.parse(line)
if (line.startsWith('data: ')) {
try {
const data = JSON.parse(line.substring(6))
// Check if there's an error
if (data.error) {
throw new Error(data.error)
}
// Process chunk
if (data.chunk && !data.done) {
accumulatedContent += data.chunk
// Stream each chunk to the UI immediately
if (onStreamChunk) {
onStreamChunk(data.chunk)
// Check if there's an error
if (data.error) {
throw new Error(data.error)
}
}
// Check if streaming is complete
if (data.done) {
break
// Process chunk
if (data.chunk) {
accumulatedContent += data.chunk
// Stream each chunk to the UI immediately
if (onStreamChunk) {
onStreamChunk(data.chunk)
}
}
// Check if streaming is complete
if (data.done) {
break
}
} catch (parseError) {
// Continue processing other lines
logger.debug('Failed to parse SSE line', { line, parseError })
}
} catch (parseError) {
// Continue processing other lines
logger.debug('Failed to parse streaming line', { line, parseError })
}
}
}

View File

@@ -0,0 +1,67 @@
import { HelpCircle, LibraryBig, ScrollText, Settings, Shapes } from 'lucide-react'
import { NavigationItem } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/navigation-item/navigation-item'
import { getKeyboardShortcutText } from '@/app/workspace/[workspaceId]/w/hooks/use-keyboard-shortcuts'
interface FloatingNavigationProps {
workspaceId: string
pathname: string
onShowSettings: () => void
onShowHelp: () => void
bottom: number
}
export const FloatingNavigation = ({
workspaceId,
pathname,
onShowSettings,
onShowHelp,
bottom,
}: FloatingNavigationProps) => {
// Navigation items with their respective actions
const navigationItems = [
{
id: 'settings',
icon: Settings,
onClick: onShowSettings,
tooltip: 'Settings',
},
{
id: 'help',
icon: HelpCircle,
onClick: onShowHelp,
tooltip: 'Help',
},
{
id: 'logs',
icon: ScrollText,
href: `/workspace/${workspaceId}/logs`,
tooltip: 'Logs',
shortcut: getKeyboardShortcutText('L', true, true),
active: pathname === `/workspace/${workspaceId}/logs`,
},
{
id: 'knowledge',
icon: LibraryBig,
href: `/workspace/${workspaceId}/knowledge`,
tooltip: 'Knowledge',
active: pathname === `/workspace/${workspaceId}/knowledge`,
},
{
id: 'templates',
icon: Shapes,
href: `/workspace/${workspaceId}/templates`,
tooltip: 'Templates',
active: pathname === `/workspace/${workspaceId}/templates`,
},
]
return (
<div className='pointer-events-auto fixed left-4 z-50 w-56' style={{ bottom: `${bottom}px` }}>
<div className='flex items-center gap-1'>
{navigationItems.map((item) => (
<NavigationItem key={item.id} item={item} />
))}
</div>
</div>
)
}

View File

@@ -1,9 +1,12 @@
export { CreateMenu } from './create-menu/create-menu'
export { FloatingNavigation } from './floating-navigation/floating-navigation'
export { FolderTree } from './folder-tree/folder-tree'
export { HelpModal } from './help-modal/help-modal'
export { KeyboardShortcut } from './keyboard-shortcut/keyboard-shortcut'
export { KnowledgeBaseTags } from './knowledge-base-tags/knowledge-base-tags'
export { KnowledgeTags } from './knowledge-tags/knowledge-tags'
export { LogsFilters } from './logs-filters/logs-filters'
export { NavigationItem } from './navigation-item/navigation-item'
export { SettingsModal } from './settings-modal/settings-modal'
export { SubscriptionModal } from './subscription-modal/subscription-modal'
export { Toolbar } from './toolbar/toolbar'

View File

@@ -0,0 +1,32 @@
import { cn } from '@/lib/utils'
interface KeyboardShortcutProps {
shortcut: string
className?: string
}
export const KeyboardShortcut = ({ shortcut, className }: KeyboardShortcutProps) => {
const parts = shortcut.split('+')
// Helper function to determine if a part is a symbol that should be larger
const isSymbol = (part: string) => {
return ['⌘', '⇧', '⌥', '⌃'].includes(part)
}
return (
<kbd
className={cn(
'flex h-6 w-8 items-center justify-center rounded-[5px] border border-border bg-background font-mono text-[#CDCDCD] text-xs dark:text-[#454545]',
className
)}
>
<span className='flex items-center justify-center gap-[1px] pt-[1px]'>
{parts.map((part, index) => (
<span key={index} className={cn(isSymbol(part) ? 'text-[17px]' : 'text-xs')}>
{part}
</span>
))}
</span>
</kbd>
)
}

View File

@@ -0,0 +1,64 @@
import { Button, Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui'
import { cn } from '@/lib/utils'
interface NavigationItemProps {
item: {
id: string
icon: React.ElementType
onClick?: () => void
href?: string
tooltip: string
shortcut?: string
active?: boolean
disabled?: boolean
}
}
export const NavigationItem = ({ item }: NavigationItemProps) => {
// Settings and help buttons get gray hover, others get purple hover
const isGrayHover = item.id === 'settings' || item.id === 'help'
const content = item.disabled ? (
<div className='inline-flex h-[42px] w-[42px] cursor-not-allowed items-center justify-center gap-2 whitespace-nowrap rounded-[11px] border bg-card font-medium text-card-foreground text-sm opacity-50 ring-offset-background transition-colors [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0'>
<item.icon className='h-4 w-4' />
</div>
) : (
<Button
variant='outline'
onClick={item.onClick}
className={cn(
'h-[42px] w-[42px] rounded-[10px] border bg-background text-foreground shadow-xs transition-all duration-200',
isGrayHover && 'hover:bg-secondary',
!isGrayHover &&
'hover:border-[var(--brand-primary-hex)] hover:bg-[var(--brand-primary-hex)] hover:text-white',
item.active && 'border-[var(--brand-primary-hex)] bg-[var(--brand-primary-hex)] text-white'
)}
>
<item.icon className='h-4 w-4' />
</Button>
)
if (item.href && !item.disabled) {
return (
<Tooltip>
<TooltipTrigger asChild>
<a href={item.href} className='inline-block'>
{content}
</a>
</TooltipTrigger>
<TooltipContent side='top' command={item.shortcut}>
{item.tooltip}
</TooltipContent>
</Tooltip>
)
}
return (
<Tooltip>
<TooltipTrigger asChild>{content}</TooltipTrigger>
<TooltipContent side='top' command={item.shortcut}>
{item.tooltip}
</TooltipContent>
</Tooltip>
)
}

View File

@@ -44,7 +44,7 @@ interface WorkspaceSelectorProps {
onWorkspaceUpdate: () => Promise<void>
onSwitchWorkspace: (workspace: Workspace) => Promise<void>
onCreateWorkspace: () => Promise<void>
onDeleteWorkspace: (workspace: Workspace) => Promise<void>
onDeleteWorkspace: (workspace: Workspace, templateAction?: 'keep' | 'delete') => Promise<void>
onLeaveWorkspace: (workspace: Workspace) => Promise<void>
updateWorkspaceName: (workspaceId: string, newName: string) => Promise<boolean>
isDeleting: boolean
@@ -76,6 +76,14 @@ export function WorkspaceSelector({
const [isRenaming, setIsRenaming] = useState(false)
const [deleteConfirmationName, setDeleteConfirmationName] = useState('')
const [leaveConfirmationName, setLeaveConfirmationName] = useState('')
const [isCheckingTemplates, setIsCheckingTemplates] = useState(false)
const [showTemplateChoice, setShowTemplateChoice] = useState(false)
const [templatesInfo, setTemplatesInfo] = useState<{
count: number
templates: Array<{ id: string; name: string }>
} | null>(null)
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false)
const [workspaceToDelete, setWorkspaceToDelete] = useState<Workspace | null>(null)
// Refs
const scrollAreaRef = useRef<HTMLDivElement>(null)
@@ -206,15 +214,82 @@ export function WorkspaceSelector({
)
/**
* Confirm delete workspace
* Reset delete dialog state
*/
const confirmDeleteWorkspace = useCallback(
async (workspaceToDelete: Workspace) => {
await onDeleteWorkspace(workspaceToDelete)
const resetDeleteState = useCallback(() => {
setDeleteConfirmationName('')
setShowTemplateChoice(false)
setTemplatesInfo(null)
setIsCheckingTemplates(false)
setWorkspaceToDelete(null)
}, [])
/**
* Handle dialog close
*/
const handleDialogClose = useCallback(
(open: boolean) => {
if (!open) {
resetDeleteState()
}
setIsDeleteDialogOpen(open)
},
[onDeleteWorkspace]
[resetDeleteState]
)
/**
* Handle template choice action
*/
const handleTemplateAction = useCallback(
async (action: 'keep' | 'delete') => {
if (!workspaceToDelete) return
setShowTemplateChoice(false)
setTemplatesInfo(null)
setDeleteConfirmationName('')
await onDeleteWorkspace(workspaceToDelete, action)
setWorkspaceToDelete(null)
setIsDeleteDialogOpen(false)
},
[workspaceToDelete, onDeleteWorkspace]
)
/**
* Check for templates and handle deletion
*/
const handleDeleteClick = useCallback(async () => {
if (!workspaceToDelete) return
setIsCheckingTemplates(true)
try {
const checkResponse = await fetch(
`/api/workspaces/${workspaceToDelete.id}?check-templates=true`
)
if (checkResponse.ok) {
const templateCheck = await checkResponse.json()
if (templateCheck.hasPublishedTemplates && templateCheck.count > 0) {
// Templates exist - show template choice
setTemplatesInfo({
count: templateCheck.count,
templates: templateCheck.publishedTemplates,
})
setShowTemplateChoice(true)
setIsCheckingTemplates(false)
return
}
}
} catch (error) {
logger.error('Error checking templates:', error)
}
// No templates or error - proceed with deletion
setIsCheckingTemplates(false)
setDeleteConfirmationName('')
await onDeleteWorkspace(workspaceToDelete)
setWorkspaceToDelete(null)
setIsDeleteDialogOpen(false)
}, [workspaceToDelete, onDeleteWorkspace])
/**
* Confirm leave workspace
*/
@@ -352,7 +427,7 @@ export function WorkspaceSelector({
<Input
value={leaveConfirmationName}
onChange={(e) => setLeaveConfirmationName(e.target.value)}
placeholder='Placeholder'
placeholder={workspace.name}
className='h-9'
/>
</div>
@@ -381,66 +456,21 @@ export function WorkspaceSelector({
{/* Delete Workspace - for admin users */}
{workspace.permissions === 'admin' && (
<AlertDialog>
<AlertDialogTrigger asChild>
<Button
variant='ghost'
size='icon'
onClick={(e) => e.stopPropagation()}
className={cn(
'h-4 w-4 p-0 text-muted-foreground transition-colors hover:bg-transparent hover:text-foreground',
!isEditing && isHovered ? 'opacity-100' : 'pointer-events-none opacity-0'
)}
>
<Trash2 className='!h-3.5 !w-3.5' />
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>Delete workspace?</AlertDialogTitle>
<AlertDialogDescription>
Deleting this workspace will permanently remove all associated workflows,
logs, and knowledge bases.{' '}
<span className='text-red-500 dark:text-red-500'>
This action cannot be undone.
</span>
</AlertDialogDescription>
</AlertDialogHeader>
<div className='py-2'>
<p className='mb-2 font-[360] text-sm'>
Enter the workspace name{' '}
<span className='font-semibold'>{workspace.name}</span> to confirm.
</p>
<Input
value={deleteConfirmationName}
onChange={(e) => setDeleteConfirmationName(e.target.value)}
placeholder='Placeholder'
className='h-9 rounded-[8px]'
/>
</div>
<AlertDialogFooter className='flex'>
<AlertDialogCancel
className='h-9 w-full rounded-[8px]'
onClick={() => setDeleteConfirmationName('')}
>
Cancel
</AlertDialogCancel>
<AlertDialogAction
onClick={() => {
confirmDeleteWorkspace(workspace)
setDeleteConfirmationName('')
}}
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
disabled={isDeleting || deleteConfirmationName !== workspace.name}
>
{isDeleting ? 'Deleting...' : 'Delete'}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
<Button
variant='ghost'
size='icon'
onClick={(e) => {
e.stopPropagation()
setWorkspaceToDelete(workspace)
setIsDeleteDialogOpen(true)
}}
className={cn(
'h-4 w-4 p-0 text-muted-foreground transition-colors hover:bg-transparent hover:text-foreground',
!isEditing && isHovered ? 'opacity-100' : 'pointer-events-none opacity-0'
)}
>
<Trash2 className='!h-3.5 !w-3.5' />
</Button>
)}
</div>
</div>
@@ -496,6 +526,106 @@ export function WorkspaceSelector({
</div>
</div>
{/* Centralized Delete Workspace Dialog */}
<AlertDialog open={isDeleteDialogOpen} onOpenChange={handleDialogClose}>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>
{showTemplateChoice
? 'Delete workspace with published templates?'
: 'Delete workspace?'}
</AlertDialogTitle>
<AlertDialogDescription>
{showTemplateChoice ? (
<>
This workspace contains {templatesInfo?.count} published template
{templatesInfo?.count === 1 ? '' : 's'}:
<br />
<br />
{templatesInfo?.templates.map((template) => (
<span key={template.id} className='block'>
{template.name}
</span>
))}
<br />
What would you like to do with the published templates?
</>
) : (
<>
Deleting this workspace will permanently remove all associated workflows, logs,
and knowledge bases.{' '}
<span className='text-red-500 dark:text-red-500'>
This action cannot be undone.
</span>
</>
)}
</AlertDialogDescription>
</AlertDialogHeader>
{showTemplateChoice ? (
<div className='flex gap-2 py-2'>
<Button
onClick={() => handleTemplateAction('keep')}
className='h-9 flex-1 rounded-[8px]'
variant='outline'
disabled={isDeleting}
>
{isDeleting ? 'Deleting...' : 'Keep Templates'}
</Button>
<Button
onClick={() => handleTemplateAction('delete')}
className='h-9 flex-1 rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
disabled={isDeleting}
>
{isDeleting ? 'Deleting...' : 'Delete Templates'}
</Button>
</div>
) : (
<div className='py-2'>
<p className='mb-2 font-[360] text-sm'>
Enter the workspace name{' '}
<span className='font-semibold'>{workspaceToDelete?.name}</span> to confirm.
</p>
<Input
value={deleteConfirmationName}
onChange={(e) => setDeleteConfirmationName(e.target.value)}
placeholder={workspaceToDelete?.name}
className='h-9 rounded-[8px]'
/>
</div>
)}
{!showTemplateChoice && (
<AlertDialogFooter className='flex'>
<Button
variant='outline'
className='h-9 w-full rounded-[8px]'
onClick={() => {
resetDeleteState()
setIsDeleteDialogOpen(false)
}}
>
Cancel
</Button>
<Button
onClick={(e) => {
e.preventDefault()
handleDeleteClick()
}}
className='h-9 w-full rounded-[8px] bg-red-500 text-white transition-all duration-200 hover:bg-red-600 dark:bg-red-500 dark:hover:bg-red-600'
disabled={
isDeleting ||
deleteConfirmationName !== workspaceToDelete?.name ||
isCheckingTemplates
}
>
{isDeleting ? 'Deleting...' : isCheckingTemplates ? 'Deleting...' : 'Delete'}
</Button>
</AlertDialogFooter>
)}
</AlertDialogContent>
</AlertDialog>
{/* Invite Modal */}
<InviteModal
open={showInviteMembers}

View File

@@ -1,20 +1,21 @@
'use client'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { HelpCircle, LibraryBig, ScrollText, Search, Settings, Shapes } from 'lucide-react'
import { Search } from 'lucide-react'
import { useParams, usePathname, useRouter } from 'next/navigation'
import { Button, ScrollArea, Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui'
import { ScrollArea } from '@/components/ui'
import { useSession } from '@/lib/auth-client'
import { getEnv, isTruthy } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { generateWorkspaceName } from '@/lib/naming'
import { cn } from '@/lib/utils'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { SearchModal } from '@/app/workspace/[workspaceId]/w/components/search-modal/search-modal'
import {
CreateMenu,
FloatingNavigation,
FolderTree,
HelpModal,
KeyboardShortcut,
KnowledgeBaseTags,
KnowledgeTags,
LogsFilters,
@@ -26,6 +27,7 @@ import {
WorkspaceSelector,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components'
import { InviteModal } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/workspace-selector/components/invite-modal/invite-modal'
import { useAutoScroll } from '@/app/workspace/[workspaceId]/w/hooks/use-auto-scroll'
import {
getKeyboardShortcutText,
useGlobalShortcuts,
@@ -40,109 +42,6 @@ const SIDEBAR_GAP = 12 // 12px gap between components - easily editable
const isBillingEnabled = isTruthy(getEnv('NEXT_PUBLIC_BILLING_ENABLED'))
/**
* Optimized auto-scroll hook for smooth drag operations
* Extracted outside component for better performance
*/
const useAutoScroll = (containerRef: React.RefObject<HTMLDivElement | null>) => {
const animationRef = useRef<number | null>(null)
const speedRef = useRef<number>(0)
const lastUpdateRef = useRef<number>(0)
const animateScroll = useCallback(() => {
const scrollContainer = containerRef.current?.querySelector(
'[data-radix-scroll-area-viewport]'
) as HTMLElement
if (!scrollContainer || speedRef.current === 0) {
animationRef.current = null
return
}
const currentScrollTop = scrollContainer.scrollTop
const maxScrollTop = scrollContainer.scrollHeight - scrollContainer.clientHeight
// Check bounds and stop if needed
if (
(speedRef.current < 0 && currentScrollTop <= 0) ||
(speedRef.current > 0 && currentScrollTop >= maxScrollTop)
) {
speedRef.current = 0
animationRef.current = null
return
}
// Apply smooth scroll
scrollContainer.scrollTop = Math.max(
0,
Math.min(maxScrollTop, currentScrollTop + speedRef.current)
)
animationRef.current = requestAnimationFrame(animateScroll)
}, [containerRef])
const startScroll = useCallback(
(speed: number) => {
speedRef.current = speed
if (!animationRef.current) {
animationRef.current = requestAnimationFrame(animateScroll)
}
},
[animateScroll]
)
const stopScroll = useCallback(() => {
if (animationRef.current) {
cancelAnimationFrame(animationRef.current)
animationRef.current = null
}
speedRef.current = 0
}, [])
const handleDragOver = useCallback(
(e: DragEvent) => {
const now = performance.now()
// Throttle to ~16ms for 60fps
if (now - lastUpdateRef.current < 16) return
lastUpdateRef.current = now
const scrollContainer = containerRef.current
if (!scrollContainer) return
const rect = scrollContainer.getBoundingClientRect()
const mouseY = e.clientY
// Early exit if mouse is outside container
if (mouseY < rect.top || mouseY > rect.bottom) {
stopScroll()
return
}
const scrollZone = 50
const maxSpeed = 4
const distanceFromTop = mouseY - rect.top
const distanceFromBottom = rect.bottom - mouseY
let scrollSpeed = 0
if (distanceFromTop < scrollZone) {
const intensity = (scrollZone - distanceFromTop) / scrollZone
scrollSpeed = -maxSpeed * intensity ** 2
} else if (distanceFromBottom < scrollZone) {
const intensity = (scrollZone - distanceFromBottom) / scrollZone
scrollSpeed = maxSpeed * intensity ** 2
}
if (Math.abs(scrollSpeed) > 0.1) {
startScroll(scrollSpeed)
} else {
stopScroll()
}
},
[containerRef, startScroll, stopScroll]
)
return { handleDragOver, stopScroll }
}
// Heights for dynamic calculation (in px)
const SIDEBAR_HEIGHTS = {
CONTAINER_PADDING: 32, // p-4 = 16px top + 16px bottom (bottom provides control bar spacing match)
@@ -204,6 +103,7 @@ export function Sidebar() {
const [isCreatingWorkspace, setIsCreatingWorkspace] = useState(false)
// Add sidebar collapsed state
const [isSidebarCollapsed, setIsSidebarCollapsed] = useState(false)
const params = useParams()
const workspaceId = params.workspaceId as string
const workflowId = params.workflowId as string
@@ -509,16 +409,22 @@ export function Sidebar() {
}, [refreshWorkspaceList, switchWorkspace, isCreatingWorkspace])
/**
* Confirm delete workspace
* Confirm delete workspace (called from regular deletion dialog)
*/
const confirmDeleteWorkspace = useCallback(
async (workspaceToDelete: Workspace) => {
async (workspaceToDelete: Workspace, templateAction?: 'keep' | 'delete') => {
setIsDeleting(true)
try {
logger.info('Deleting workspace:', workspaceToDelete.id)
const deleteTemplates = templateAction === 'delete'
const response = await fetch(`/api/workspaces/${workspaceToDelete.id}`, {
method: 'DELETE',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ deleteTemplates }),
})
if (!response.ok) {
@@ -961,44 +867,6 @@ export function Sidebar() {
}
}, [stopScroll])
// Navigation items with their respective actions
const navigationItems = [
{
id: 'settings',
icon: Settings,
onClick: () => setShowSettings(true),
tooltip: 'Settings',
},
{
id: 'help',
icon: HelpCircle,
onClick: () => setShowHelp(true),
tooltip: 'Help',
},
{
id: 'logs',
icon: ScrollText,
href: `/workspace/${workspaceId}/logs`,
tooltip: 'Logs',
shortcut: getKeyboardShortcutText('L', true, true),
active: pathname === `/workspace/${workspaceId}/logs`,
},
{
id: 'knowledge',
icon: LibraryBig,
href: `/workspace/${workspaceId}/knowledge`,
tooltip: 'Knowledge',
active: pathname === `/workspace/${workspaceId}/knowledge`,
},
{
id: 'templates',
icon: Shapes,
href: `/workspace/${workspaceId}/templates`,
tooltip: 'Templates',
active: pathname === `/workspace/${workspaceId}/templates`,
},
]
return (
<>
{/* Main Sidebar - Overlay */}
@@ -1155,16 +1023,13 @@ export function Sidebar() {
)}
{/* Floating Navigation - Always visible */}
<div
className='pointer-events-auto fixed left-4 z-50 w-56'
style={{ bottom: `${navigationBottom}px` }}
>
<div className='flex items-center gap-1'>
{navigationItems.map((item) => (
<NavigationItem key={item.id} item={item} />
))}
</div>
</div>
<FloatingNavigation
workspaceId={workspaceId}
pathname={pathname}
onShowSettings={() => setShowSettings(true)}
onShowHelp={() => setShowHelp(true)}
bottom={navigationBottom}
/>
{/* Modals */}
<SettingsModal open={showSettings} onOpenChange={setShowSettings} />
@@ -1183,98 +1048,3 @@ export function Sidebar() {
</>
)
}
// Keyboard Shortcut Component
interface KeyboardShortcutProps {
shortcut: string
className?: string
}
const KeyboardShortcut = ({ shortcut, className }: KeyboardShortcutProps) => {
const parts = shortcut.split('+')
// Helper function to determine if a part is a symbol that should be larger
const isSymbol = (part: string) => {
return ['⌘', '⇧', '⌥', '⌃'].includes(part)
}
return (
<kbd
className={cn(
'flex h-6 w-8 items-center justify-center rounded-[5px] border border-border bg-background font-mono text-[#CDCDCD] text-xs dark:text-[#454545]',
className
)}
>
<span className='flex items-center justify-center gap-[1px] pt-[1px]'>
{parts.map((part, index) => (
<span key={index} className={cn(isSymbol(part) ? 'text-[17px]' : 'text-xs')}>
{part}
</span>
))}
</span>
</kbd>
)
}
// Navigation Item Component
interface NavigationItemProps {
item: {
id: string
icon: React.ElementType
onClick?: () => void
href?: string
tooltip: string
shortcut?: string
active?: boolean
disabled?: boolean
}
}
const NavigationItem = ({ item }: NavigationItemProps) => {
// Settings and help buttons get gray hover, others get purple hover
const isGrayHover = item.id === 'settings' || item.id === 'help'
const content = item.disabled ? (
<div className='inline-flex h-[42px] w-[42px] cursor-not-allowed items-center justify-center gap-2 whitespace-nowrap rounded-[11px] border bg-card font-medium text-card-foreground text-sm opacity-50 ring-offset-background transition-colors [&_svg]:pointer-events-none [&_svg]:size-4 [&_svg]:shrink-0'>
<item.icon className='h-4 w-4' />
</div>
) : (
<Button
variant='outline'
onClick={item.onClick}
className={cn(
'h-[42px] w-[42px] rounded-[10px] border bg-background text-foreground shadow-xs transition-all duration-200',
isGrayHover && 'hover:bg-secondary',
!isGrayHover &&
'hover:border-[var(--brand-primary-hex)] hover:bg-[var(--brand-primary-hex)] hover:text-white',
item.active && 'border-[var(--brand-primary-hex)] bg-[var(--brand-primary-hex)] text-white'
)}
>
<item.icon className='h-4 w-4' />
</Button>
)
if (item.href && !item.disabled) {
return (
<Tooltip>
<TooltipTrigger asChild>
<a href={item.href} className='inline-block'>
{content}
</a>
</TooltipTrigger>
<TooltipContent side='top' command={item.shortcut}>
{item.tooltip}
</TooltipContent>
</Tooltip>
)
}
return (
<Tooltip>
<TooltipTrigger asChild>{content}</TooltipTrigger>
<TooltipContent side='top' command={item.shortcut}>
{item.tooltip}
</TooltipContent>
</Tooltip>
)
}

View File

@@ -0,0 +1,103 @@
import { useCallback, useRef } from 'react'
/**
* Optimized auto-scroll hook for smooth drag operations
*/
export const useAutoScroll = (containerRef: React.RefObject<HTMLDivElement | null>) => {
const animationRef = useRef<number | null>(null)
const speedRef = useRef<number>(0)
const lastUpdateRef = useRef<number>(0)
const animateScroll = useCallback(() => {
const scrollContainer = containerRef.current?.querySelector(
'[data-radix-scroll-area-viewport]'
) as HTMLElement
if (!scrollContainer || speedRef.current === 0) {
animationRef.current = null
return
}
const currentScrollTop = scrollContainer.scrollTop
const maxScrollTop = scrollContainer.scrollHeight - scrollContainer.clientHeight
// Check bounds and stop if needed
if (
(speedRef.current < 0 && currentScrollTop <= 0) ||
(speedRef.current > 0 && currentScrollTop >= maxScrollTop)
) {
speedRef.current = 0
animationRef.current = null
return
}
// Apply smooth scroll
scrollContainer.scrollTop = Math.max(
0,
Math.min(maxScrollTop, currentScrollTop + speedRef.current)
)
animationRef.current = requestAnimationFrame(animateScroll)
}, [containerRef])
const startScroll = useCallback(
(speed: number) => {
speedRef.current = speed
if (!animationRef.current) {
animationRef.current = requestAnimationFrame(animateScroll)
}
},
[animateScroll]
)
const stopScroll = useCallback(() => {
if (animationRef.current) {
cancelAnimationFrame(animationRef.current)
animationRef.current = null
}
speedRef.current = 0
}, [])
const handleDragOver = useCallback(
(e: DragEvent) => {
const now = performance.now()
// Throttle to ~16ms for 60fps
if (now - lastUpdateRef.current < 16) return
lastUpdateRef.current = now
const scrollContainer = containerRef.current
if (!scrollContainer) return
const rect = scrollContainer.getBoundingClientRect()
const mouseY = e.clientY
// Early exit if mouse is outside container
if (mouseY < rect.top || mouseY > rect.bottom) {
stopScroll()
return
}
const scrollZone = 50
const maxSpeed = 4
const distanceFromTop = mouseY - rect.top
const distanceFromBottom = rect.bottom - mouseY
let scrollSpeed = 0
if (distanceFromTop < scrollZone) {
const intensity = (scrollZone - distanceFromTop) / scrollZone
scrollSpeed = -maxSpeed * intensity ** 2
} else if (distanceFromBottom < scrollZone) {
const intensity = (scrollZone - distanceFromBottom) / scrollZone
scrollSpeed = maxSpeed * intensity ** 2
}
if (Math.abs(scrollSpeed) > 0.1) {
startScroll(scrollSpeed)
} else {
stopScroll()
}
},
[containerRef, startScroll, stopScroll]
)
return { handleDragOver, stopScroll }
}

View File

@@ -13,7 +13,7 @@ export default function WorkspaceRootLayout({ children }: WorkspaceRootLayoutPro
const user = session.data?.user
? {
id: session.data.user.id,
name: session.data.user.name,
name: session.data.user.name ?? undefined,
email: session.data.user.email,
}
: undefined

View File

@@ -17,362 +17,363 @@ import { mergeSubblockState } from '@/stores/workflows/server-utils'
const logger = createLogger('TriggerWebhookExecution')
export type WebhookExecutionPayload = {
webhookId: string
workflowId: string
userId: string
provider: string
body: any
headers: Record<string, string>
path: string
blockId?: string
}
export async function executeWebhookJob(payload: WebhookExecutionPayload) {
const executionId = uuidv4()
const requestId = executionId.slice(0, 8)
logger.info(`[${requestId}] Starting webhook execution`, {
webhookId: payload.webhookId,
workflowId: payload.workflowId,
provider: payload.provider,
userId: payload.userId,
executionId,
})
// Initialize logging session outside try block so it's available in catch
const loggingSession = new LoggingSession(payload.workflowId, executionId, 'webhook', requestId)
try {
// Check usage limits first
const usageCheck = await checkServerSideUsageLimits(payload.userId)
if (usageCheck.isExceeded) {
logger.warn(
`[${requestId}] User ${payload.userId} has exceeded usage limits. Skipping webhook execution.`,
{
currentUsage: usageCheck.currentUsage,
limit: usageCheck.limit,
workflowId: payload.workflowId,
}
)
throw new Error(
usageCheck.message ||
'Usage limit exceeded. Please upgrade your plan to continue using webhooks.'
)
}
// Load workflow from normalized tables
const workflowData = await loadWorkflowFromNormalizedTables(payload.workflowId)
if (!workflowData) {
throw new Error(`Workflow not found: ${payload.workflowId}`)
}
const { blocks, edges, loops, parallels } = workflowData
// Get environment variables (matching workflow-execution pattern)
const [userEnv] = await db
.select()
.from(environmentTable)
.where(eq(environmentTable.userId, payload.userId))
.limit(1)
let decryptedEnvVars: Record<string, string> = {}
if (userEnv) {
const decryptionPromises = Object.entries((userEnv.variables as any) || {}).map(
async ([key, encryptedValue]) => {
try {
const { decrypted } = await decryptSecret(encryptedValue as string)
return [key, decrypted] as const
} catch (error: any) {
logger.error(`[${requestId}] Failed to decrypt environment variable "${key}":`, error)
throw new Error(`Failed to decrypt environment variable "${key}": ${error.message}`)
}
}
)
const decryptedPairs = await Promise.all(decryptionPromises)
decryptedEnvVars = Object.fromEntries(decryptedPairs)
}
// Start logging session
await loggingSession.safeStart({
userId: payload.userId,
workspaceId: '', // TODO: Get from workflow if needed
variables: decryptedEnvVars,
})
// Merge subblock states (matching workflow-execution pattern)
const mergedStates = mergeSubblockState(blocks, {})
// Process block states for execution
const processedBlockStates = Object.entries(mergedStates).reduce(
(acc, [blockId, blockState]) => {
acc[blockId] = Object.entries(blockState.subBlocks).reduce(
(subAcc, [key, subBlock]) => {
subAcc[key] = subBlock.value
return subAcc
},
{} as Record<string, any>
)
return acc
},
{} as Record<string, Record<string, any>>
)
// Handle workflow variables (for now, use empty object since we don't have workflow metadata)
const workflowVariables = {}
// Create serialized workflow
const serializer = new Serializer()
const serializedWorkflow = serializer.serializeWorkflow(
mergedStates,
edges,
loops || {},
parallels || {},
true // Enable validation during execution
)
// Handle special Airtable case
if (payload.provider === 'airtable') {
logger.info(`[${requestId}] Processing Airtable webhook via fetchAndProcessAirtablePayloads`)
// Load the actual webhook record from database to get providerConfig
const [webhookRecord] = await db
.select()
.from(webhook)
.where(eq(webhook.id, payload.webhookId))
.limit(1)
if (!webhookRecord) {
throw new Error(`Webhook record not found: ${payload.webhookId}`)
}
const webhookData = {
id: payload.webhookId,
provider: payload.provider,
providerConfig: webhookRecord.providerConfig,
}
// Create a mock workflow object for Airtable processing
const mockWorkflow = {
id: payload.workflowId,
userId: payload.userId,
}
// Get the processed Airtable input
const airtableInput = await fetchAndProcessAirtablePayloads(
webhookData,
mockWorkflow,
requestId
)
// If we got input (changes), execute the workflow like other providers
if (airtableInput) {
logger.info(`[${requestId}] Executing workflow with Airtable changes`)
// Create executor and execute (same as standard webhook flow)
const executor = new Executor({
workflow: serializedWorkflow,
currentBlockStates: processedBlockStates,
envVarValues: decryptedEnvVars,
workflowInput: airtableInput,
workflowVariables,
contextExtensions: {
executionId,
workspaceId: '',
},
})
// Set up logging on the executor
loggingSession.setupExecutor(executor)
// Execute the workflow
const result = await executor.execute(payload.workflowId, payload.blockId)
// Check if we got a StreamingExecution result
const executionResult =
'stream' in result && 'execution' in result ? result.execution : result
logger.info(`[${requestId}] Airtable webhook execution completed`, {
success: executionResult.success,
workflowId: payload.workflowId,
})
// Update workflow run counts on success
if (executionResult.success) {
await updateWorkflowRunCounts(payload.workflowId)
// Track execution in user stats
await db
.update(userStats)
.set({
totalWebhookTriggers: sql`total_webhook_triggers + 1`,
lastActive: sql`now()`,
})
.where(eq(userStats.userId, payload.userId))
}
// Build trace spans and complete logging session
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: totalDuration || 0,
finalOutput: executionResult.output || {},
traceSpans: traceSpans as any,
})
return {
success: executionResult.success,
workflowId: payload.workflowId,
executionId,
output: executionResult.output,
executedAt: new Date().toISOString(),
provider: payload.provider,
}
}
// No changes to process
logger.info(`[${requestId}] No Airtable changes to process`)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: 0,
finalOutput: { message: 'No Airtable changes to process' },
traceSpans: [],
})
return {
success: true,
workflowId: payload.workflowId,
executionId,
output: { message: 'No Airtable changes to process' },
executedAt: new Date().toISOString(),
}
}
// Format input for standard webhooks
const mockWebhook = {
provider: payload.provider,
blockId: payload.blockId,
}
const mockWorkflow = {
id: payload.workflowId,
userId: payload.userId,
}
const mockRequest = {
headers: new Map(Object.entries(payload.headers)),
} as any
const input = formatWebhookInput(mockWebhook, mockWorkflow, payload.body, mockRequest)
if (!input && payload.provider === 'whatsapp') {
logger.info(`[${requestId}] No messages in WhatsApp payload, skipping execution`)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: 0,
finalOutput: { message: 'No messages in WhatsApp payload' },
traceSpans: [],
})
return {
success: true,
workflowId: payload.workflowId,
executionId,
output: { message: 'No messages in WhatsApp payload' },
executedAt: new Date().toISOString(),
}
}
// Create executor and execute
const executor = new Executor({
workflow: serializedWorkflow,
currentBlockStates: processedBlockStates,
envVarValues: decryptedEnvVars,
workflowInput: input || {},
workflowVariables,
contextExtensions: {
executionId,
workspaceId: '', // TODO: Get from workflow if needed - see comment on line 103
},
})
// Set up logging on the executor
loggingSession.setupExecutor(executor)
logger.info(`[${requestId}] Executing workflow for ${payload.provider} webhook`)
// Execute the workflow
const result = await executor.execute(payload.workflowId, payload.blockId)
// Check if we got a StreamingExecution result
const executionResult = 'stream' in result && 'execution' in result ? result.execution : result
logger.info(`[${requestId}] Webhook execution completed`, {
success: executionResult.success,
workflowId: payload.workflowId,
provider: payload.provider,
})
// Update workflow run counts on success
if (executionResult.success) {
await updateWorkflowRunCounts(payload.workflowId)
// Track execution in user stats
await db
.update(userStats)
.set({
totalWebhookTriggers: sql`total_webhook_triggers + 1`,
lastActive: sql`now()`,
})
.where(eq(userStats.userId, payload.userId))
}
// Build trace spans and complete logging session
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: totalDuration || 0,
finalOutput: executionResult.output || {},
traceSpans: traceSpans as any,
})
return {
success: executionResult.success,
workflowId: payload.workflowId,
executionId,
output: executionResult.output,
executedAt: new Date().toISOString(),
provider: payload.provider,
}
} catch (error: any) {
logger.error(`[${requestId}] Webhook execution failed`, {
error: error.message,
stack: error.stack,
workflowId: payload.workflowId,
provider: payload.provider,
})
// Complete logging session with error (matching workflow-execution pattern)
try {
await loggingSession.safeCompleteWithError({
endedAt: new Date().toISOString(),
totalDurationMs: 0,
error: {
message: error.message || 'Webhook execution failed',
stackTrace: error.stack,
},
})
} catch (loggingError) {
logger.error(`[${requestId}] Failed to complete logging session`, loggingError)
}
throw error
}
}
export const webhookExecution = task({
id: 'webhook-execution',
retry: {
maxAttempts: 1,
},
run: async (payload: {
webhookId: string
workflowId: string
userId: string
provider: string
body: any
headers: Record<string, string>
path: string
blockId?: string
}) => {
const executionId = uuidv4()
const requestId = executionId.slice(0, 8)
logger.info(`[${requestId}] Starting webhook execution via trigger.dev`, {
webhookId: payload.webhookId,
workflowId: payload.workflowId,
provider: payload.provider,
userId: payload.userId,
executionId,
})
// Initialize logging session outside try block so it's available in catch
const loggingSession = new LoggingSession(payload.workflowId, executionId, 'webhook', requestId)
try {
// Check usage limits first
const usageCheck = await checkServerSideUsageLimits(payload.userId)
if (usageCheck.isExceeded) {
logger.warn(
`[${requestId}] User ${payload.userId} has exceeded usage limits. Skipping webhook execution.`,
{
currentUsage: usageCheck.currentUsage,
limit: usageCheck.limit,
workflowId: payload.workflowId,
}
)
throw new Error(
usageCheck.message ||
'Usage limit exceeded. Please upgrade your plan to continue using webhooks.'
)
}
// Load workflow from normalized tables
const workflowData = await loadWorkflowFromNormalizedTables(payload.workflowId)
if (!workflowData) {
throw new Error(`Workflow not found: ${payload.workflowId}`)
}
const { blocks, edges, loops, parallels } = workflowData
// Get environment variables (matching workflow-execution pattern)
const [userEnv] = await db
.select()
.from(environmentTable)
.where(eq(environmentTable.userId, payload.userId))
.limit(1)
let decryptedEnvVars: Record<string, string> = {}
if (userEnv) {
const decryptionPromises = Object.entries((userEnv.variables as any) || {}).map(
async ([key, encryptedValue]) => {
try {
const { decrypted } = await decryptSecret(encryptedValue as string)
return [key, decrypted] as const
} catch (error: any) {
logger.error(`[${requestId}] Failed to decrypt environment variable "${key}":`, error)
throw new Error(`Failed to decrypt environment variable "${key}": ${error.message}`)
}
}
)
const decryptedPairs = await Promise.all(decryptionPromises)
decryptedEnvVars = Object.fromEntries(decryptedPairs)
}
// Start logging session
await loggingSession.safeStart({
userId: payload.userId,
workspaceId: '', // TODO: Get from workflow if needed
variables: decryptedEnvVars,
})
// Merge subblock states (matching workflow-execution pattern)
const mergedStates = mergeSubblockState(blocks, {})
// Process block states for execution
const processedBlockStates = Object.entries(mergedStates).reduce(
(acc, [blockId, blockState]) => {
acc[blockId] = Object.entries(blockState.subBlocks).reduce(
(subAcc, [key, subBlock]) => {
subAcc[key] = subBlock.value
return subAcc
},
{} as Record<string, any>
)
return acc
},
{} as Record<string, Record<string, any>>
)
// Handle workflow variables (for now, use empty object since we don't have workflow metadata)
const workflowVariables = {}
// Create serialized workflow
const serializer = new Serializer()
const serializedWorkflow = serializer.serializeWorkflow(
mergedStates,
edges,
loops || {},
parallels || {},
true // Enable validation during execution
)
// Handle special Airtable case
if (payload.provider === 'airtable') {
logger.info(
`[${requestId}] Processing Airtable webhook via fetchAndProcessAirtablePayloads`
)
// Load the actual webhook record from database to get providerConfig
const [webhookRecord] = await db
.select()
.from(webhook)
.where(eq(webhook.id, payload.webhookId))
.limit(1)
if (!webhookRecord) {
throw new Error(`Webhook record not found: ${payload.webhookId}`)
}
const webhookData = {
id: payload.webhookId,
provider: payload.provider,
providerConfig: webhookRecord.providerConfig,
}
// Create a mock workflow object for Airtable processing
const mockWorkflow = {
id: payload.workflowId,
userId: payload.userId,
}
// Get the processed Airtable input
const airtableInput = await fetchAndProcessAirtablePayloads(
webhookData,
mockWorkflow,
requestId
)
// If we got input (changes), execute the workflow like other providers
if (airtableInput) {
logger.info(`[${requestId}] Executing workflow with Airtable changes`)
// Create executor and execute (same as standard webhook flow)
const executor = new Executor({
workflow: serializedWorkflow,
currentBlockStates: processedBlockStates,
envVarValues: decryptedEnvVars,
workflowInput: airtableInput,
workflowVariables,
contextExtensions: {
executionId,
workspaceId: '',
},
})
// Set up logging on the executor
loggingSession.setupExecutor(executor)
// Execute the workflow
const result = await executor.execute(payload.workflowId, payload.blockId)
// Check if we got a StreamingExecution result
const executionResult =
'stream' in result && 'execution' in result ? result.execution : result
logger.info(`[${requestId}] Airtable webhook execution completed`, {
success: executionResult.success,
workflowId: payload.workflowId,
})
// Update workflow run counts on success
if (executionResult.success) {
await updateWorkflowRunCounts(payload.workflowId)
// Track execution in user stats
await db
.update(userStats)
.set({
totalWebhookTriggers: sql`total_webhook_triggers + 1`,
lastActive: sql`now()`,
})
.where(eq(userStats.userId, payload.userId))
}
// Build trace spans and complete logging session
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: totalDuration || 0,
finalOutput: executionResult.output || {},
traceSpans: traceSpans as any,
})
return {
success: executionResult.success,
workflowId: payload.workflowId,
executionId,
output: executionResult.output,
executedAt: new Date().toISOString(),
provider: payload.provider,
}
}
// No changes to process
logger.info(`[${requestId}] No Airtable changes to process`)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: 0,
finalOutput: { message: 'No Airtable changes to process' },
traceSpans: [],
})
return {
success: true,
workflowId: payload.workflowId,
executionId,
output: { message: 'No Airtable changes to process' },
executedAt: new Date().toISOString(),
}
}
// Format input for standard webhooks
const mockWebhook = {
provider: payload.provider,
blockId: payload.blockId,
}
const mockWorkflow = {
id: payload.workflowId,
userId: payload.userId,
}
const mockRequest = {
headers: new Map(Object.entries(payload.headers)),
} as any
const input = formatWebhookInput(mockWebhook, mockWorkflow, payload.body, mockRequest)
if (!input && payload.provider === 'whatsapp') {
logger.info(`[${requestId}] No messages in WhatsApp payload, skipping execution`)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: 0,
finalOutput: { message: 'No messages in WhatsApp payload' },
traceSpans: [],
})
return {
success: true,
workflowId: payload.workflowId,
executionId,
output: { message: 'No messages in WhatsApp payload' },
executedAt: new Date().toISOString(),
}
}
// Create executor and execute
const executor = new Executor({
workflow: serializedWorkflow,
currentBlockStates: processedBlockStates,
envVarValues: decryptedEnvVars,
workflowInput: input || {},
workflowVariables,
contextExtensions: {
executionId,
workspaceId: '', // TODO: Get from workflow if needed - see comment on line 103
},
})
// Set up logging on the executor
loggingSession.setupExecutor(executor)
logger.info(`[${requestId}] Executing workflow for ${payload.provider} webhook`)
// Execute the workflow
const result = await executor.execute(payload.workflowId, payload.blockId)
// Check if we got a StreamingExecution result
const executionResult =
'stream' in result && 'execution' in result ? result.execution : result
logger.info(`[${requestId}] Webhook execution completed`, {
success: executionResult.success,
workflowId: payload.workflowId,
provider: payload.provider,
})
// Update workflow run counts on success
if (executionResult.success) {
await updateWorkflowRunCounts(payload.workflowId)
// Track execution in user stats
await db
.update(userStats)
.set({
totalWebhookTriggers: sql`total_webhook_triggers + 1`,
lastActive: sql`now()`,
})
.where(eq(userStats.userId, payload.userId))
}
// Build trace spans and complete logging session
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: totalDuration || 0,
finalOutput: executionResult.output || {},
traceSpans: traceSpans as any,
})
return {
success: executionResult.success,
workflowId: payload.workflowId,
executionId,
output: executionResult.output,
executedAt: new Date().toISOString(),
provider: payload.provider,
}
} catch (error: any) {
logger.error(`[${requestId}] Webhook execution failed`, {
error: error.message,
stack: error.stack,
workflowId: payload.workflowId,
provider: payload.provider,
})
// Complete logging session with error (matching workflow-execution pattern)
try {
await loggingSession.safeCompleteWithError({
endedAt: new Date().toISOString(),
totalDurationMs: 0,
error: {
message: error.message || 'Webhook execution failed',
stackTrace: error.stack,
},
})
} catch (loggingError) {
logger.error(`[${requestId}] Failed to complete logging session`, loggingError)
}
throw error // Let Trigger.dev handle retries
}
},
run: async (payload: WebhookExecutionPayload) => executeWebhookJob(payload),
})

View File

@@ -16,200 +16,202 @@ import { mergeSubblockState } from '@/stores/workflows/server-utils'
const logger = createLogger('TriggerWorkflowExecution')
export type WorkflowExecutionPayload = {
workflowId: string
userId: string
input?: any
triggerType?: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'
metadata?: Record<string, any>
}
export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
const workflowId = payload.workflowId
const executionId = uuidv4()
const requestId = executionId.slice(0, 8)
logger.info(`[${requestId}] Starting workflow execution: ${workflowId}`, {
userId: payload.userId,
triggerType: payload.triggerType,
executionId,
})
// Initialize logging session
const triggerType = payload.triggerType || 'api'
const loggingSession = new LoggingSession(workflowId, executionId, triggerType, requestId)
try {
const usageCheck = await checkServerSideUsageLimits(payload.userId)
if (usageCheck.isExceeded) {
logger.warn(
`[${requestId}] User ${payload.userId} has exceeded usage limits. Skipping workflow execution.`,
{
currentUsage: usageCheck.currentUsage,
limit: usageCheck.limit,
workflowId: payload.workflowId,
}
)
throw new Error(
usageCheck.message ||
'Usage limit exceeded. Please upgrade your plan to continue using workflows.'
)
}
// Load workflow data from deployed state (this task is only used for API executions right now)
const workflowData = await loadDeployedWorkflowState(workflowId)
const { blocks, edges, loops, parallels } = workflowData
// Merge subblock states (server-safe version doesn't need workflowId)
const mergedStates = mergeSubblockState(blocks, {})
// Process block states for execution
const processedBlockStates = Object.entries(mergedStates).reduce(
(acc, [blockId, blockState]) => {
acc[blockId] = Object.entries(blockState.subBlocks).reduce(
(subAcc, [key, subBlock]) => {
subAcc[key] = subBlock.value
return subAcc
},
{} as Record<string, any>
)
return acc
},
{} as Record<string, Record<string, any>>
)
// Get environment variables
const [userEnv] = await db
.select()
.from(environmentTable)
.where(eq(environmentTable.userId, payload.userId))
.limit(1)
let decryptedEnvVars: Record<string, string> = {}
if (userEnv) {
const decryptionPromises = Object.entries((userEnv.variables as any) || {}).map(
async ([key, encryptedValue]) => {
try {
const { decrypted } = await decryptSecret(encryptedValue as string)
return [key, decrypted] as const
} catch (error: any) {
logger.error(`[${requestId}] Failed to decrypt environment variable "${key}":`, error)
throw new Error(`Failed to decrypt environment variable "${key}": ${error.message}`)
}
}
)
const decryptedPairs = await Promise.all(decryptionPromises)
decryptedEnvVars = Object.fromEntries(decryptedPairs)
}
// Start logging session
await loggingSession.safeStart({
userId: payload.userId,
workspaceId: '', // TODO: Get from workflow if needed
variables: decryptedEnvVars,
})
// Create serialized workflow
const serializer = new Serializer()
const serializedWorkflow = serializer.serializeWorkflow(
mergedStates,
edges,
loops || {},
parallels || {},
true // Enable validation during execution
)
// Create executor and execute
const executor = new Executor({
workflow: serializedWorkflow,
currentBlockStates: processedBlockStates,
envVarValues: decryptedEnvVars,
workflowInput: payload.input || {},
workflowVariables: {},
contextExtensions: {
executionId,
workspaceId: '', // TODO: Get from workflow if needed - see comment on line 120
},
})
// Set up logging on the executor
loggingSession.setupExecutor(executor)
const result = await executor.execute(workflowId)
// Handle streaming vs regular result
const executionResult = 'stream' in result && 'execution' in result ? result.execution : result
logger.info(`[${requestId}] Workflow execution completed: ${workflowId}`, {
success: executionResult.success,
executionTime: executionResult.metadata?.duration,
executionId,
})
// Update workflow run counts on success
if (executionResult.success) {
await updateWorkflowRunCounts(workflowId)
// Track execution in user stats
const statsUpdate =
triggerType === 'api'
? { totalApiCalls: sql`total_api_calls + 1` }
: triggerType === 'webhook'
? { totalWebhookTriggers: sql`total_webhook_triggers + 1` }
: triggerType === 'schedule'
? { totalScheduledExecutions: sql`total_scheduled_executions + 1` }
: { totalManualExecutions: sql`total_manual_executions + 1` }
await db
.update(userStats)
.set({
...statsUpdate,
lastActive: sql`now()`,
})
.where(eq(userStats.userId, payload.userId))
}
// Build trace spans and complete logging session (for both success and failure)
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: totalDuration || 0,
finalOutput: executionResult.output || {},
traceSpans: traceSpans as any,
})
return {
success: executionResult.success,
workflowId: payload.workflowId,
executionId,
output: executionResult.output,
executedAt: new Date().toISOString(),
metadata: payload.metadata,
}
} catch (error: any) {
logger.error(`[${requestId}] Workflow execution failed: ${workflowId}`, {
error: error.message,
stack: error.stack,
})
await loggingSession.safeCompleteWithError({
endedAt: new Date().toISOString(),
totalDurationMs: 0,
error: {
message: error.message || 'Workflow execution failed',
stackTrace: error.stack,
},
})
throw error
}
}
export const workflowExecution = task({
id: 'workflow-execution',
retry: {
maxAttempts: 1,
},
run: async (payload: {
workflowId: string
userId: string
input?: any
triggerType?: string
metadata?: Record<string, any>
}) => {
const workflowId = payload.workflowId
const executionId = uuidv4()
const requestId = executionId.slice(0, 8)
logger.info(`[${requestId}] Starting Trigger.dev workflow execution: ${workflowId}`, {
userId: payload.userId,
triggerType: payload.triggerType,
executionId,
})
// Initialize logging session
const triggerType =
(payload.triggerType as 'api' | 'webhook' | 'schedule' | 'manual' | 'chat') || 'api'
const loggingSession = new LoggingSession(workflowId, executionId, triggerType, requestId)
try {
const usageCheck = await checkServerSideUsageLimits(payload.userId)
if (usageCheck.isExceeded) {
logger.warn(
`[${requestId}] User ${payload.userId} has exceeded usage limits. Skipping workflow execution.`,
{
currentUsage: usageCheck.currentUsage,
limit: usageCheck.limit,
workflowId: payload.workflowId,
}
)
throw new Error(
usageCheck.message ||
'Usage limit exceeded. Please upgrade your plan to continue using workflows.'
)
}
// Load workflow data from deployed state (this task is only used for API executions right now)
const workflowData = await loadDeployedWorkflowState(workflowId)
const { blocks, edges, loops, parallels } = workflowData
// Merge subblock states (server-safe version doesn't need workflowId)
const mergedStates = mergeSubblockState(blocks, {})
// Process block states for execution
const processedBlockStates = Object.entries(mergedStates).reduce(
(acc, [blockId, blockState]) => {
acc[blockId] = Object.entries(blockState.subBlocks).reduce(
(subAcc, [key, subBlock]) => {
subAcc[key] = subBlock.value
return subAcc
},
{} as Record<string, any>
)
return acc
},
{} as Record<string, Record<string, any>>
)
// Get environment variables
const [userEnv] = await db
.select()
.from(environmentTable)
.where(eq(environmentTable.userId, payload.userId))
.limit(1)
let decryptedEnvVars: Record<string, string> = {}
if (userEnv) {
const decryptionPromises = Object.entries((userEnv.variables as any) || {}).map(
async ([key, encryptedValue]) => {
try {
const { decrypted } = await decryptSecret(encryptedValue as string)
return [key, decrypted] as const
} catch (error: any) {
logger.error(`[${requestId}] Failed to decrypt environment variable "${key}":`, error)
throw new Error(`Failed to decrypt environment variable "${key}": ${error.message}`)
}
}
)
const decryptedPairs = await Promise.all(decryptionPromises)
decryptedEnvVars = Object.fromEntries(decryptedPairs)
}
// Start logging session
await loggingSession.safeStart({
userId: payload.userId,
workspaceId: '', // TODO: Get from workflow if needed
variables: decryptedEnvVars,
})
// Create serialized workflow
const serializer = new Serializer()
const serializedWorkflow = serializer.serializeWorkflow(
mergedStates,
edges,
loops || {},
parallels || {},
true // Enable validation during execution
)
// Create executor and execute
const executor = new Executor({
workflow: serializedWorkflow,
currentBlockStates: processedBlockStates,
envVarValues: decryptedEnvVars,
workflowInput: payload.input || {},
workflowVariables: {},
contextExtensions: {
executionId,
workspaceId: '', // TODO: Get from workflow if needed - see comment on line 120
},
})
// Set up logging on the executor
loggingSession.setupExecutor(executor)
const result = await executor.execute(workflowId)
// Handle streaming vs regular result
const executionResult =
'stream' in result && 'execution' in result ? result.execution : result
logger.info(`[${requestId}] Workflow execution completed: ${workflowId}`, {
success: executionResult.success,
executionTime: executionResult.metadata?.duration,
executionId,
})
// Update workflow run counts on success
if (executionResult.success) {
await updateWorkflowRunCounts(workflowId)
// Track execution in user stats
const statsUpdate =
triggerType === 'api'
? { totalApiCalls: sql`total_api_calls + 1` }
: triggerType === 'webhook'
? { totalWebhookTriggers: sql`total_webhook_triggers + 1` }
: triggerType === 'schedule'
? { totalScheduledExecutions: sql`total_scheduled_executions + 1` }
: { totalManualExecutions: sql`total_manual_executions + 1` }
await db
.update(userStats)
.set({
...statsUpdate,
lastActive: sql`now()`,
})
.where(eq(userStats.userId, payload.userId))
}
// Build trace spans and complete logging session (for both success and failure)
const { traceSpans, totalDuration } = buildTraceSpans(executionResult)
await loggingSession.safeComplete({
endedAt: new Date().toISOString(),
totalDurationMs: totalDuration || 0,
finalOutput: executionResult.output || {},
traceSpans: traceSpans as any,
})
return {
success: executionResult.success,
workflowId: payload.workflowId,
executionId,
output: executionResult.output,
executedAt: new Date().toISOString(),
metadata: payload.metadata,
}
} catch (error: any) {
logger.error(`[${requestId}] Workflow execution failed: ${workflowId}`, {
error: error.message,
stack: error.stack,
})
await loggingSession.safeCompleteWithError({
endedAt: new Date().toISOString(),
totalDurationMs: 0,
error: {
message: error.message || 'Workflow execution failed',
stackTrace: error.stack,
},
})
throw error // Let Trigger.dev handle retries
}
},
run: async (payload: WorkflowExecutionPayload) => executeWorkflowJob(payload),
})

View File

@@ -106,6 +106,7 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
layout: 'full',
required: true,
placeholder: 'Enter new summary for the issue',
dependsOn: ['issueKey'],
condition: { field: 'operation', value: ['update', 'write'] },
},
{
@@ -114,6 +115,7 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
type: 'long-input',
layout: 'full',
placeholder: 'Enter new description for the issue',
dependsOn: ['issueKey'],
condition: { field: 'operation', value: ['update', 'write'] },
},
],

View File

@@ -32,7 +32,6 @@ export type SubBlockType =
| 'checkbox-list' // Multiple selection
| 'condition-input' // Conditional logic
| 'eval-input' // Evaluation input
| 'date-input' // Date input
| 'time-input' // Time input
| 'oauth-input' // OAuth credential selector
| 'webhook-config' // Webhook configuration

View File

@@ -6,6 +6,7 @@ import {
OTPVerificationEmail,
ResetPasswordEmail,
} from '@/components/emails'
import { getBrandConfig } from '@/lib/branding/branding'
export async function renderOTPEmail(
otp: string,
@@ -91,22 +92,24 @@ export function getEmailSubject(
| 'batch-invitation'
| 'help-confirmation'
): string {
const brandName = getBrandConfig().name
switch (type) {
case 'sign-in':
return 'Sign in to Sim'
return `Sign in to ${brandName}`
case 'email-verification':
return 'Verify your email for Sim'
return `Verify your email for ${brandName}`
case 'forget-password':
return 'Reset your Sim password'
return `Reset your ${brandName} password`
case 'reset-password':
return 'Reset your Sim password'
return `Reset your ${brandName} password`
case 'invitation':
return "You've been invited to join a team on Sim"
return `You've been invited to join a team on ${brandName}`
case 'batch-invitation':
return "You've been invited to join a team and workspaces on Sim"
return `You've been invited to join a team and workspaces on ${brandName}`
case 'help-confirmation':
return 'Your request has been received'
default:
return 'Sim'
return brandName
}
}

View File

@@ -1,62 +0,0 @@
'use client'
import type * as React from 'react'
import { ChevronLeft, ChevronRight } from 'lucide-react'
import { DayPicker } from 'react-day-picker'
import { buttonVariants } from '@/components/ui/button'
import { cn } from '@/lib/utils'
export type CalendarProps = React.ComponentProps<typeof DayPicker>
function Calendar({ className, classNames, showOutsideDays = true, ...props }: CalendarProps) {
return (
<DayPicker
showOutsideDays={showOutsideDays}
className={cn('p-3', className)}
classNames={{
months: 'flex flex-col sm:flex-row space-y-4 sm:space-x-4 sm:space-y-0',
month: 'space-y-4',
caption: 'flex justify-center pt-1 relative items-center',
caption_label: 'text-sm font-medium',
nav: 'space-x-1 flex items-center',
nav_button: cn(
buttonVariants({ variant: 'outline' }),
'h-7 w-7 bg-transparent p-0 opacity-50 hover:opacity-100'
),
nav_button_previous: 'absolute left-1',
nav_button_next: 'absolute right-1',
table: 'w-full border-collapse space-y-1',
head_row: 'flex',
head_cell: 'text-muted-foreground rounded-md w-9 font-normal text-[0.8rem]',
row: 'flex w-full mt-2',
cell: 'h-9 w-9 text-center text-sm p-0 relative [&:has([aria-selected].day-range-end)]:rounded-r-md [&:has([aria-selected].day-outside)]:bg-accent/50 [&:has([aria-selected])]:bg-accent first:[&:has([aria-selected])]:rounded-l-md last:[&:has([aria-selected])]:rounded-r-md focus-within:relative focus-within:z-20',
day: cn(
buttonVariants({ variant: 'ghost' }),
'h-9 w-9 p-0 font-normal aria-selected:opacity-100'
),
day_range_end: 'day-range-end',
day_selected:
'bg-primary text-primary-foreground hover:bg-primary hover:text-primary-foreground focus:bg-primary focus:text-primary-foreground',
day_today: 'bg-accent text-accent-foreground',
day_outside:
'day-outside text-muted-foreground aria-selected:bg-accent/50 aria-selected:text-muted-foreground',
day_disabled: 'text-muted-foreground opacity-50',
day_range_middle: 'aria-selected:bg-accent aria-selected:text-accent-foreground',
day_hidden: 'invisible',
...classNames,
}}
components={{
IconLeft: ({ className, ...props }) => (
<ChevronLeft className={cn('h-4 w-4', className)} {...props} />
),
IconRight: ({ className, ...props }) => (
<ChevronRight className={cn('h-4 w-4', className)} {...props} />
),
}}
{...props}
/>
)
}
Calendar.displayName = 'Calendar'
export { Calendar }

View File

@@ -22,7 +22,6 @@ export {
BreadcrumbSeparator,
} from './breadcrumb'
export { Button, buttonVariants } from './button'
export { Calendar } from './calendar'
export { Card, CardContent, CardDescription, CardFooter, CardHeader, CardTitle } from './card'
export { Checkbox } from './checkbox'
export { CodeBlock } from './code-block'

View File

@@ -108,6 +108,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
const [isConnecting, setIsConnecting] = useState(false)
const [currentWorkflowId, setCurrentWorkflowId] = useState<string | null>(null)
const [presenceUsers, setPresenceUsers] = useState<PresenceUser[]>([])
const initializedRef = useRef(false)
// Get current workflow ID from URL params
const params = useParams()
@@ -131,16 +132,16 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
// Helper function to generate a fresh socket token
const generateSocketToken = async (): Promise<string> => {
const tokenResponse = await fetch('/api/auth/socket-token', {
// Avoid overlapping token requests
const res = await fetch('/api/auth/socket-token', {
method: 'POST',
credentials: 'include',
headers: { 'cache-control': 'no-store' },
})
if (!tokenResponse.ok) {
throw new Error('Failed to generate socket token')
}
const { token } = await tokenResponse.json()
if (!res.ok) throw new Error('Failed to generate socket token')
const body = await res.json().catch(() => ({}))
const token = body?.token
if (!token || typeof token !== 'string') throw new Error('Invalid socket token')
return token
}
@@ -149,12 +150,13 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
if (!user?.id) return
// Only initialize if we don't have a socket and aren't already connecting
if (socket || isConnecting) {
if (initializedRef.current || socket || isConnecting) {
logger.info('Socket already exists or is connecting, skipping initialization')
return
}
logger.info('Initializing socket connection for user:', user.id)
initializedRef.current = true
setIsConnecting(true)
const initializeSocket = async () => {
@@ -178,17 +180,14 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
reconnectionDelay: 1000, // Start with 1 second delay
reconnectionDelayMax: 30000, // Max 30 second delay
timeout: 10000, // Back to original timeout
auth: (cb) => {
// Generate a fresh token for each connection attempt (including reconnections)
generateSocketToken()
.then((freshToken) => {
logger.info('Generated fresh token for connection attempt')
cb({ token: freshToken })
})
.catch((error) => {
logger.error('Failed to generate fresh token for connection:', error)
cb({ token: null }) // This will cause authentication to fail gracefully
})
auth: async (cb) => {
try {
const freshToken = await generateSocketToken()
cb({ token: freshToken })
} catch (error) {
logger.error('Failed to generate fresh token for connection:', error)
cb({ token: null })
}
},
})

19
apps/sim/db/consts.ts Normal file
View File

@@ -0,0 +1,19 @@
/**
* Database-only constants used in schema definitions and migrations.
* These constants are independent of application logic to keep migrations container lightweight.
*/
/**
* Default free credits (in dollars) for new users
*/
export const DEFAULT_FREE_CREDITS = 10
/**
* Tag slots available for knowledge base documents and embeddings
*/
export const TAG_SLOTS = ['tag1', 'tag2', 'tag3', 'tag4', 'tag5', 'tag6', 'tag7'] as const
/**
* Type for tag slot names
*/
export type TagSlot = (typeof TAG_SLOTS)[number]

View File

@@ -0,0 +1 @@
ALTER TABLE "templates" ALTER COLUMN "workflow_id" DROP NOT NULL;

File diff suppressed because it is too large Load Diff

View File

@@ -533,6 +533,13 @@
"when": 1755375658161,
"tag": "0076_damp_vector",
"breakpoints": true
},
{
"idx": 77,
"version": "7",
"when": 1755809024626,
"tag": "0077_rapid_chimera",
"breakpoints": true
}
]
}

View File

@@ -16,8 +16,7 @@ import {
uuid,
vector,
} from 'drizzle-orm/pg-core'
import { DEFAULT_FREE_CREDITS } from '@/lib/billing/constants'
import { TAG_SLOTS } from '@/lib/constants/knowledge'
import { DEFAULT_FREE_CREDITS, TAG_SLOTS } from './consts'
// Custom tsvector type for full-text search
export const tsvector = customType<{
@@ -1062,9 +1061,7 @@ export const templates = pgTable(
'templates',
{
id: text('id').primaryKey(),
workflowId: text('workflow_id')
.notNull()
.references(() => workflow.id),
workflowId: text('workflow_id').references(() => workflow.id),
userId: text('user_id')
.notNull()
.references(() => user.id, { onDelete: 'cascade' }),

View File

@@ -27,6 +27,13 @@ export class TriggerBlockHandler implements BlockHandler {
): Promise<any> {
logger.info(`Executing trigger block: ${block.id} (Type: ${block.metadata?.id})`)
// If this trigger block was initialized with a precomputed output in the execution context
// (e.g., webhook payload injected at init), return it as-is to preserve the raw shape.
const existingState = context.blockStates.get(block.id)
if (existingState?.output && Object.keys(existingState.output).length > 0) {
return existingState.output
}
// For trigger blocks, return the starter block's output which contains the workflow input
// This ensures webhook data like message, sender, chat, etc. are accessible
const starterBlock = context.workflow?.blocks?.find((b) => b.metadata?.id === 'starter')
@@ -36,9 +43,10 @@ export class TriggerBlockHandler implements BlockHandler {
const starterOutput = starterState.output
// Generic handling for webhook triggers - extract provider-specific data
// Check if this is a webhook execution with nested structure
// Check if this is a webhook execution
if (starterOutput.webhook?.data) {
const webhookData = starterOutput.webhook.data
const webhookData = starterOutput.webhook?.data || {}
const provider = webhookData.provider
logger.debug(`Processing webhook trigger for block ${block.id}`, {
@@ -46,7 +54,21 @@ export class TriggerBlockHandler implements BlockHandler {
blockType: block.metadata?.id,
})
// Extract the flattened properties that should be at root level
// Provider-specific early return for GitHub: expose raw payload at root
if (provider === 'github') {
const payloadSource = webhookData.payload || {}
return {
...payloadSource,
webhook: starterOutput.webhook,
}
}
// Provider-specific early return for Airtable: preserve raw shape entirely
if (provider === 'airtable') {
return starterOutput
}
// Extract the flattened properties that should be at root level (non-GitHub/Airtable)
const result: any = {
// Always keep the input at root level
input: starterOutput.input,
@@ -67,70 +89,17 @@ export class TriggerBlockHandler implements BlockHandler {
const providerData = starterOutput[provider]
for (const [key, value] of Object.entries(providerData)) {
// Special handling for GitHub provider - copy all properties
if (provider === 'github') {
// For GitHub, copy all properties (objects and primitives) to root level
// For other providers, keep existing logic (only copy objects)
if (typeof value === 'object' && value !== null) {
// Don't overwrite existing top-level properties
if (!result[key]) {
// Special handling for complex objects that might have enumeration issues
if (typeof value === 'object' && value !== null) {
try {
// Deep clone complex objects to avoid reference issues
result[key] = JSON.parse(JSON.stringify(value))
} catch (error) {
// If JSON serialization fails, try direct assignment
result[key] = value
}
} else {
result[key] = value
}
}
} else {
// For other providers, keep existing logic (only copy objects)
if (typeof value === 'object' && value !== null) {
// Don't overwrite existing top-level properties
if (!result[key]) {
result[key] = value
}
result[key] = value
}
}
}
// Keep nested structure for backwards compatibility
result[provider] = providerData
// Special handling for GitHub complex objects that might not be copied by the main loop
if (provider === 'github') {
// Comprehensive GitHub object extraction from multiple possible sources
const githubObjects = ['repository', 'sender', 'pusher', 'commits', 'head_commit']
for (const objName of githubObjects) {
// ALWAYS try to get the object, even if something exists (fix for conflicts)
let objectValue = null
// Source 1: Direct from provider data
if (providerData[objName]) {
objectValue = providerData[objName]
}
// Source 2: From webhook payload (raw GitHub webhook)
else if (starterOutput.webhook?.data?.payload?.[objName]) {
objectValue = starterOutput.webhook.data.payload[objName]
}
// Source 3: For commits, try parsing JSON string version if no object found
else if (objName === 'commits' && typeof result.commits === 'string') {
try {
objectValue = JSON.parse(result.commits)
} catch (e) {
// Keep as string if parsing fails
objectValue = result.commits
}
}
// FORCE the object to root level (removed the !result[objName] condition)
if (objectValue !== null && objectValue !== undefined) {
result[objName] = objectValue
}
}
}
}
// Pattern 2: Provider data directly in webhook.data (based on actual structure)

View File

@@ -118,7 +118,13 @@ export class WorkflowBlockHandler implements BlockHandler {
if ((mappedResult as any).success === false) {
const childError = (mappedResult as any).error || 'Unknown error'
throw new Error(`Error in child workflow "${childWorkflowName}": ${childError}`)
const errorWithSpans = new Error(
`Error in child workflow "${childWorkflowName}": ${childError}`
) as any
// Attach trace spans and name for higher-level logging to consume
errorWithSpans.childTraceSpans = childTraceSpans
errorWithSpans.childWorkflowName = childWorkflowName
throw errorWithSpans
}
return mappedResult
@@ -306,11 +312,16 @@ export class WorkflowBlockHandler implements BlockHandler {
const success = childResult.success !== false
if (!success) {
logger.warn(`Child workflow ${childWorkflowName} failed`)
return {
const failure: Record<string, any> = {
success: false,
childWorkflowName,
error: childResult.error || 'Child workflow execution failed',
} as Record<string, any>
}
// Only include spans when present to keep output stable for callers/tests
if (Array.isArray(childTraceSpans) && childTraceSpans.length > 0) {
failure.childTraceSpans = childTraceSpans
}
return failure as Record<string, any>
}
let result = childResult
if (childResult?.output) {

View File

@@ -455,6 +455,14 @@ export class Executor {
success: false,
output: finalOutput,
error: 'Workflow execution was cancelled',
metadata: {
duration: Date.now() - startTime.getTime(),
startTime: context.metadata.startTime!,
workflowConnections: this.actualWorkflow.connections.map((conn: any) => ({
source: conn.source,
target: conn.target,
})),
},
logs: context.blockLogs,
}
}
@@ -503,6 +511,14 @@ export class Executor {
success: false,
output: finalOutput,
error: this.extractErrorMessage(error),
metadata: {
duration: Date.now() - startTime.getTime(),
startTime: context.metadata.startTime!,
workflowConnections: this.actualWorkflow.connections.map((conn: any) => ({
source: conn.source,
target: conn.target,
})),
},
logs: context.blockLogs,
}
} finally {
@@ -530,6 +546,14 @@ export class Executor {
success: false,
output: finalOutput,
error: 'Workflow execution was cancelled',
metadata: {
duration: Date.now() - new Date(context.metadata.startTime!).getTime(),
startTime: context.metadata.startTime!,
workflowConnections: this.actualWorkflow.connections.map((conn: any) => ({
source: conn.source,
target: conn.target,
})),
},
logs: context.blockLogs,
}
}
@@ -596,6 +620,14 @@ export class Executor {
success: false,
output: finalOutput,
error: this.extractErrorMessage(error),
metadata: {
duration: Date.now() - new Date(context.metadata.startTime!).getTime(),
startTime: context.metadata.startTime!,
workflowConnections: this.actualWorkflow.connections.map((conn: any) => ({
source: conn.source,
target: conn.target,
})),
},
logs: context.blockLogs,
}
}
@@ -1742,6 +1774,11 @@ export class Executor {
blockLog.durationMs =
new Date(blockLog.endedAt).getTime() - new Date(blockLog.startedAt).getTime()
// If this error came from a child workflow execution, persist its trace spans on the log
if (block.metadata?.id === BlockType.WORKFLOW) {
this.attachChildWorkflowSpansToLog(blockLog, error)
}
// Log the error even if we'll continue execution through error path
context.blockLogs.push(blockLog)
@@ -1820,6 +1857,11 @@ export class Executor {
status: error.status || 500,
}
// Preserve child workflow spans on the block state so downstream logging can render them
if (block.metadata?.id === BlockType.WORKFLOW) {
this.attachChildWorkflowSpansToOutput(errorOutput, error)
}
// Set block state with error output
context.blockStates.set(blockId, {
output: errorOutput,
@@ -1864,6 +1906,39 @@ export class Executor {
}
}
/**
* Copies child workflow trace spans from an error object into a block log.
* Ensures consistent structure and avoids duplication of inline guards.
*/
private attachChildWorkflowSpansToLog(blockLog: BlockLog, error: unknown): void {
const spans = (
error as { childTraceSpans?: TraceSpan[]; childWorkflowName?: string } | null | undefined
)?.childTraceSpans
if (Array.isArray(spans) && spans.length > 0) {
blockLog.output = {
...(blockLog.output || {}),
childTraceSpans: spans,
childWorkflowName: (error as { childWorkflowName?: string } | null | undefined)
?.childWorkflowName,
}
}
}
/**
* Copies child workflow trace spans from an error object into a normalized output.
*/
private attachChildWorkflowSpansToOutput(output: NormalizedBlockOutput, error: unknown): void {
const spans = (
error as { childTraceSpans?: TraceSpan[]; childWorkflowName?: string } | null | undefined
)?.childTraceSpans
if (Array.isArray(spans) && spans.length > 0) {
output.childTraceSpans = spans
output.childWorkflowName = (
error as { childWorkflowName?: string } | null | undefined
)?.childWorkflowName
}
}
/**
* Activates error paths from a block that had an error.
* Checks for connections from the block's "error" handle and adds them to the active execution path.

View File

@@ -1,3 +1,4 @@
import type { TraceSpan } from '@/lib/logs/types'
import type { BlockOutput } from '@/blocks/types'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
@@ -52,6 +53,9 @@ export interface NormalizedBlockOutput {
headers?: Record<string, string> // HTTP headers
// Error handling
error?: string // Error message if block execution failed
// Child workflow introspection (for workflow blocks)
childTraceSpans?: TraceSpan[]
childWorkflowName?: string
}
/**

View File

@@ -34,8 +34,8 @@ export function useUserPermissions(
const { data: session } = useSession()
const userPermissions = useMemo((): WorkspaceUserPermissions => {
// If still loading or no session, return safe defaults
if (permissionsLoading || !session?.user?.email) {
const sessionEmail = session?.user?.email
if (permissionsLoading || !sessionEmail) {
return {
canRead: false,
canEdit: false,
@@ -48,13 +48,13 @@ export function useUserPermissions(
// Find current user in workspace permissions (case-insensitive)
const currentUser = workspacePermissions?.users?.find(
(user) => user.email.toLowerCase() === session.user.email.toLowerCase()
(user) => user.email.toLowerCase() === sessionEmail.toLowerCase()
)
// If user not found in workspace, they have no permissions
if (!currentUser) {
logger.warn('User not found in workspace permissions', {
userEmail: session.user.email,
userEmail: sessionEmail,
hasPermissions: !!workspacePermissions,
userCount: workspacePermissions?.users?.length || 0,
})

View File

@@ -1,8 +1,16 @@
import { useContext } from 'react'
import { stripeClient } from '@better-auth/stripe/client'
import { emailOTPClient, genericOAuthClient, organizationClient } from 'better-auth/client/plugins'
import {
customSessionClient,
emailOTPClient,
genericOAuthClient,
organizationClient,
} from 'better-auth/client/plugins'
import { createAuthClient } from 'better-auth/react'
import type { auth } from '@/lib/auth'
import { env, getEnv } from '@/lib/env'
import { isDev, isProd } from '@/lib/environment'
import { SessionContext, type SessionHookResult } from '@/lib/session-context'
export function getBaseURL() {
let baseURL
@@ -25,6 +33,7 @@ export const client = createAuthClient({
plugins: [
emailOTPClient(),
genericOAuthClient(),
customSessionClient<typeof auth>(),
// Only include Stripe client in production
...(isProd
? [
@@ -37,7 +46,17 @@ export const client = createAuthClient({
],
})
export const { useSession, useActiveOrganization } = client
export function useSession(): SessionHookResult {
const ctx = useContext(SessionContext)
if (!ctx) {
throw new Error(
'SessionProvider is not mounted. Wrap your app with <SessionProvider> in app/layout.tsx.'
)
}
return ctx
}
export const { useActiveOrganization } = client
export const useSubscription = () => {
// In development, provide mock implementations

View File

@@ -4,6 +4,7 @@ import { drizzleAdapter } from 'better-auth/adapters/drizzle'
import { nextCookies } from 'better-auth/next-js'
import {
createAuthMiddleware,
customSession,
emailOTP,
genericOAuth,
oneTimeToken,
@@ -208,6 +209,10 @@ export const auth = betterAuth({
oneTimeToken({
expiresIn: 24 * 60 * 60, // 24 hours - Socket.IO handles connection persistence with heartbeats
}),
customSession(async ({ user, session }) => ({
user,
session,
})),
emailOTP({
sendVerificationOTP: async (data: {
email: string
@@ -1480,8 +1485,9 @@ export const auth = betterAuth({
// Server-side auth helpers
export async function getSession() {
const hdrs = await headers()
return await auth.api.getSession({
headers: await headers(),
headers: hdrs,
})
}

View File

@@ -1,5 +1,14 @@
import { getEnv } from '@/lib/env'
export interface ThemeColors {
primaryColor?: string
primaryHoverColor?: string
secondaryColor?: string
accentColor?: string
accentHoverColor?: string
backgroundColor?: string
}
export interface BrandConfig {
name: string
logoUrl?: string
@@ -9,6 +18,7 @@ export interface BrandConfig {
documentationUrl?: string
termsUrl?: string
privacyUrl?: string
theme?: ThemeColors
}
/**
@@ -23,6 +33,29 @@ const defaultConfig: BrandConfig = {
documentationUrl: undefined,
termsUrl: undefined,
privacyUrl: undefined,
theme: {
primaryColor: '#701ffc',
primaryHoverColor: '#802fff',
secondaryColor: '#6518e6',
accentColor: '#9d54ff',
accentHoverColor: '#a66fff',
backgroundColor: '#0c0c0c',
},
}
const getThemeColors = (): ThemeColors => {
return {
primaryColor: getEnv('NEXT_PUBLIC_BRAND_PRIMARY_COLOR') || defaultConfig.theme?.primaryColor,
primaryHoverColor:
getEnv('NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR') || defaultConfig.theme?.primaryHoverColor,
secondaryColor:
getEnv('NEXT_PUBLIC_BRAND_SECONDARY_COLOR') || defaultConfig.theme?.secondaryColor,
accentColor: getEnv('NEXT_PUBLIC_BRAND_ACCENT_COLOR') || defaultConfig.theme?.accentColor,
accentHoverColor:
getEnv('NEXT_PUBLIC_BRAND_ACCENT_HOVER_COLOR') || defaultConfig.theme?.accentHoverColor,
backgroundColor:
getEnv('NEXT_PUBLIC_BRAND_BACKGROUND_COLOR') || defaultConfig.theme?.backgroundColor,
}
}
/**
@@ -39,6 +72,7 @@ export const getBrandConfig = (): BrandConfig => {
documentationUrl: getEnv('NEXT_PUBLIC_DOCUMENTATION_URL') || defaultConfig.documentationUrl,
termsUrl: getEnv('NEXT_PUBLIC_TERMS_URL') || defaultConfig.termsUrl,
privacyUrl: getEnv('NEXT_PUBLIC_PRIVACY_URL') || defaultConfig.privacyUrl,
theme: getThemeColors(),
}
}

View File

@@ -0,0 +1,29 @@
export function generateThemeCSS(): string {
const cssVars: string[] = []
if (process.env.NEXT_PUBLIC_BRAND_PRIMARY_COLOR) {
cssVars.push(`--brand-primary-hex: ${process.env.NEXT_PUBLIC_BRAND_PRIMARY_COLOR};`)
}
if (process.env.NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR) {
cssVars.push(`--brand-primary-hover-hex: ${process.env.NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR};`)
}
if (process.env.NEXT_PUBLIC_BRAND_SECONDARY_COLOR) {
cssVars.push(`--brand-secondary-hex: ${process.env.NEXT_PUBLIC_BRAND_SECONDARY_COLOR};`)
}
if (process.env.NEXT_PUBLIC_BRAND_ACCENT_COLOR) {
cssVars.push(`--brand-accent-hex: ${process.env.NEXT_PUBLIC_BRAND_ACCENT_COLOR};`)
}
if (process.env.NEXT_PUBLIC_BRAND_ACCENT_HOVER_COLOR) {
cssVars.push(`--brand-accent-hover-hex: ${process.env.NEXT_PUBLIC_BRAND_ACCENT_HOVER_COLOR};`)
}
if (process.env.NEXT_PUBLIC_BRAND_BACKGROUND_COLOR) {
cssVars.push(`--brand-background-hex: ${process.env.NEXT_PUBLIC_BRAND_BACKGROUND_COLOR};`)
}
return cssVars.length > 0 ? `:root { ${cssVars.join(' ')} }` : ''
}

View File

@@ -116,7 +116,7 @@ async function parseDocument(
}> {
const isPDF = mimeType === 'application/pdf'
const hasAzureMistralOCR =
env.AZURE_OPENAI_API_KEY && env.OCR_AZURE_ENDPOINT && env.OCR_AZURE_MODEL_NAME
env.OCR_AZURE_API_KEY && env.OCR_AZURE_ENDPOINT && env.OCR_AZURE_MODEL_NAME
const hasMistralOCR = env.MISTRAL_API_KEY
// Check Azure Mistral OCR configuration
@@ -288,7 +288,7 @@ async function makeOCRRequest(endpoint: string, headers: Record<string, string>,
async function parseWithAzureMistralOCR(fileUrl: string, filename: string, mimeType: string) {
validateOCRConfig(
env.AZURE_OPENAI_API_KEY,
env.OCR_AZURE_API_KEY,
env.OCR_AZURE_ENDPOINT,
env.OCR_AZURE_MODEL_NAME,
'Azure Mistral OCR'
@@ -306,7 +306,7 @@ async function parseWithAzureMistralOCR(fileUrl: string, filename: string, mimeT
env.OCR_AZURE_ENDPOINT!,
{
'Content-Type': 'application/json',
Authorization: `Bearer ${env.AZURE_OPENAI_API_KEY}`,
Authorization: `Bearer ${env.OCR_AZURE_API_KEY}`,
},
{
model: env.OCR_AZURE_MODEL_NAME,

View File

@@ -16,203 +16,217 @@ export const env = createEnv({
server: {
// Core Database & Authentication
DATABASE_URL: z.string().url(), // Primary database connection string
BETTER_AUTH_URL: z.string().url(), // Base URL for Better Auth service
BETTER_AUTH_SECRET: z.string().min(32), // Secret key for Better Auth JWT signing
DISABLE_REGISTRATION: z.boolean().optional(), // Flag to disable new user registration
ALLOWED_LOGIN_EMAILS: z.string().optional(), // Comma-separated list of allowed email addresses for login
ALLOWED_LOGIN_DOMAINS: z.string().optional(), // Comma-separated list of allowed email domains for login
ENCRYPTION_KEY: z.string().min(32), // Key for encrypting sensitive data
INTERNAL_API_SECRET: z.string().min(32), // Secret for internal API authentication
COPILOT_API_KEY: z.string().min(1).optional(), // Secret for internal sim agent API authentication
SIM_AGENT_API_URL: z.string().url().optional(), // URL for internal sim agent API
AGENT_API_DB_ENCRYPTION_KEY: z.string().min(32).optional(), // Key for encrypting sensitive data for sim agent
AGENT_API_NETWORK_ENCRYPTION_KEY: z.string().min(32).optional(), // Key for encrypting sensitive data for sim agent
DATABASE_URL: z.string().url(), // Primary database connection string
BETTER_AUTH_URL: z.string().url(), // Base URL for Better Auth service
BETTER_AUTH_SECRET: z.string().min(32), // Secret key for Better Auth JWT signing
DISABLE_REGISTRATION: z.boolean().optional(), // Flag to disable new user registration
ALLOWED_LOGIN_EMAILS: z.string().optional(), // Comma-separated list of allowed email addresses for login
ALLOWED_LOGIN_DOMAINS: z.string().optional(), // Comma-separated list of allowed email domains for login
ENCRYPTION_KEY: z.string().min(32), // Key for encrypting sensitive data
INTERNAL_API_SECRET: z.string().min(32), // Secret for internal API authentication
COPILOT_API_KEY: z.string().min(1).optional(), // Secret for internal sim agent API authentication
SIM_AGENT_API_URL: z.string().url().optional(), // URL for internal sim agent API
AGENT_API_DB_ENCRYPTION_KEY: z.string().min(32).optional(), // Key for encrypting sensitive data for sim agent
AGENT_API_NETWORK_ENCRYPTION_KEY: z.string().min(32).optional(), // Key for encrypting sensitive data for sim agent
// Database & Storage
POSTGRES_URL: z.string().url().optional(), // Alternative PostgreSQL connection string
REDIS_URL: z.string().url().optional(), // Redis connection string for caching/sessions
POSTGRES_URL: z.string().url().optional(), // Alternative PostgreSQL connection string
REDIS_URL: z.string().url().optional(), // Redis connection string for caching/sessions
// Payment & Billing
STRIPE_SECRET_KEY: z.string().min(1).optional(), // Stripe secret key for payment processing
STRIPE_BILLING_WEBHOOK_SECRET: z.string().min(1).optional(), // Webhook secret for billing events
STRIPE_WEBHOOK_SECRET: z.string().min(1).optional(), // General Stripe webhook secret
STRIPE_FREE_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for free tier
FREE_TIER_COST_LIMIT: z.number().optional(), // Cost limit for free tier users
STRIPE_PRO_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for pro tier
PRO_TIER_COST_LIMIT: z.number().optional(), // Cost limit for pro tier users
STRIPE_TEAM_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for team tier
TEAM_TIER_COST_LIMIT: z.number().optional(), // Cost limit for team tier users
STRIPE_ENTERPRISE_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for enterprise tier
ENTERPRISE_TIER_COST_LIMIT: z.number().optional(), // Cost limit for enterprise tier users
BILLING_ENABLED: z.boolean().optional(), // Enable billing enforcement and usage tracking
STRIPE_SECRET_KEY: z.string().min(1).optional(), // Stripe secret key for payment processing
STRIPE_BILLING_WEBHOOK_SECRET: z.string().min(1).optional(), // Webhook secret for billing events
STRIPE_WEBHOOK_SECRET: z.string().min(1).optional(), // General Stripe webhook secret
STRIPE_FREE_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for free tier
FREE_TIER_COST_LIMIT: z.number().optional(), // Cost limit for free tier users
STRIPE_PRO_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for pro tier
PRO_TIER_COST_LIMIT: z.number().optional(), // Cost limit for pro tier users
STRIPE_TEAM_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for team tier
TEAM_TIER_COST_LIMIT: z.number().optional(), // Cost limit for team tier users
STRIPE_ENTERPRISE_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for enterprise tier
ENTERPRISE_TIER_COST_LIMIT: z.number().optional(), // Cost limit for enterprise tier users
BILLING_ENABLED: z.boolean().optional(), // Enable billing enforcement and usage tracking
// Email & Communication
RESEND_API_KEY: z.string().min(1).optional(), // Resend API key for transactional emails
FROM_EMAIL_ADDRESS: z.string().min(1).optional(), // Complete from address (e.g., "Sim <noreply@domain.com>" or "noreply@domain.com")
EMAIL_DOMAIN: z.string().min(1).optional(), // Domain for sending emails (fallback when FROM_EMAIL_ADDRESS not set)
AZURE_ACS_CONNECTION_STRING: z.string().optional(), // Azure Communication Services connection string
RESEND_API_KEY: z.string().min(1).optional(), // Resend API key for transactional emails
FROM_EMAIL_ADDRESS: z.string().min(1).optional(), // Complete from address (e.g., "Sim <noreply@domain.com>" or "noreply@domain.com")
EMAIL_DOMAIN: z.string().min(1).optional(), // Domain for sending emails (fallback when FROM_EMAIL_ADDRESS not set)
AZURE_ACS_CONNECTION_STRING: z.string().optional(), // Azure Communication Services connection string
// AI/LLM Provider API Keys
OPENAI_API_KEY: z.string().min(1).optional(), // Primary OpenAI API key
OPENAI_API_KEY_1: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
OPENAI_API_KEY_2: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
OPENAI_API_KEY_3: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
MISTRAL_API_KEY: z.string().min(1).optional(), // Mistral AI API key
ANTHROPIC_API_KEY_1: z.string().min(1).optional(), // Primary Anthropic Claude API key
ANTHROPIC_API_KEY_2: z.string().min(1).optional(), // Additional Anthropic API key for load balancing
ANTHROPIC_API_KEY_3: z.string().min(1).optional(), // Additional Anthropic API key for load balancing
OLLAMA_URL: z.string().url().optional(), // Ollama local LLM server URL
ELEVENLABS_API_KEY: z.string().min(1).optional(), // ElevenLabs API key for text-to-speech in deployed chat
SERPER_API_KEY: z.string().min(1).optional(), // Serper API key for online search
OPENAI_API_KEY: z.string().min(1).optional(), // Primary OpenAI API key
OPENAI_API_KEY_1: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
OPENAI_API_KEY_2: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
OPENAI_API_KEY_3: z.string().min(1).optional(), // Additional OpenAI API key for load balancing
MISTRAL_API_KEY: z.string().min(1).optional(), // Mistral AI API key
ANTHROPIC_API_KEY_1: z.string().min(1).optional(), // Primary Anthropic Claude API key
ANTHROPIC_API_KEY_2: z.string().min(1).optional(), // Additional Anthropic API key for load balancing
ANTHROPIC_API_KEY_3: z.string().min(1).optional(), // Additional Anthropic API key for load balancing
OLLAMA_URL: z.string().url().optional(), // Ollama local LLM server URL
ELEVENLABS_API_KEY: z.string().min(1).optional(), // ElevenLabs API key for text-to-speech in deployed chat
SERPER_API_KEY: z.string().min(1).optional(), // Serper API key for online search
// Azure Configuration - Shared credentials with feature-specific models
AZURE_OPENAI_ENDPOINT: z.string().url().optional(), // Shared Azure OpenAI service endpoint
AZURE_OPENAI_API_VERSION: z.string().optional(), // Shared Azure OpenAI API version
AZURE_OPENAI_API_KEY: z.string().min(1).optional(), // Shared Azure OpenAI API key
KB_OPENAI_MODEL_NAME: z.string().optional(), // Knowledge base OpenAI model name (works with both regular OpenAI and Azure OpenAI)
WAND_OPENAI_MODEL_NAME: z.string().optional(), // Wand generation OpenAI model name (works with both regular OpenAI and Azure OpenAI)
OCR_AZURE_ENDPOINT: z.string().url().optional(), // Azure Mistral OCR service endpoint
OCR_AZURE_MODEL_NAME: z.string().optional(), // Azure Mistral OCR model name for document processing
AZURE_OPENAI_ENDPOINT: z.string().url().optional(), // Shared Azure OpenAI service endpoint
AZURE_OPENAI_API_VERSION: z.string().optional(), // Shared Azure OpenAI API version
AZURE_OPENAI_API_KEY: z.string().min(1).optional(), // Shared Azure OpenAI API key
KB_OPENAI_MODEL_NAME: z.string().optional(), // Knowledge base OpenAI model name (works with both regular OpenAI and Azure OpenAI)
WAND_OPENAI_MODEL_NAME: z.string().optional(), // Wand generation OpenAI model name (works with both regular OpenAI and Azure OpenAI)
OCR_AZURE_ENDPOINT: z.string().url().optional(), // Azure Mistral OCR service endpoint
OCR_AZURE_MODEL_NAME: z.string().optional(), // Azure Mistral OCR model name for document processing
OCR_AZURE_API_KEY: z.string().min(1).optional(), // Azure Mistral OCR API key
// Monitoring & Analytics
TELEMETRY_ENDPOINT: z.string().url().optional(), // Custom telemetry/analytics endpoint
COST_MULTIPLIER: z.number().optional(), // Multiplier for cost calculations
SENTRY_ORG: z.string().optional(), // Sentry organization for error tracking
SENTRY_PROJECT: z.string().optional(), // Sentry project for error tracking
SENTRY_AUTH_TOKEN: z.string().optional(), // Sentry authentication token
TELEMETRY_ENDPOINT: z.string().url().optional(), // Custom telemetry/analytics endpoint
COST_MULTIPLIER: z.number().optional(), // Multiplier for cost calculations
SENTRY_ORG: z.string().optional(), // Sentry organization for error tracking
SENTRY_PROJECT: z.string().optional(), // Sentry project for error tracking
SENTRY_AUTH_TOKEN: z.string().optional(), // Sentry authentication token
LOG_LEVEL: z.enum(['DEBUG', 'INFO', 'WARN', 'ERROR']).optional(), // Minimum log level to display (defaults to ERROR in production, DEBUG in development)
// External Services
JWT_SECRET: z.string().min(1).optional(), // JWT signing secret for custom tokens
BROWSERBASE_API_KEY: z.string().min(1).optional(), // Browserbase API key for browser automation
BROWSERBASE_PROJECT_ID: z.string().min(1).optional(), // Browserbase project ID
GITHUB_TOKEN: z.string().optional(), // GitHub personal access token for API access
JWT_SECRET: z.string().min(1).optional(), // JWT signing secret for custom tokens
BROWSERBASE_API_KEY: z.string().min(1).optional(), // Browserbase API key for browser automation
BROWSERBASE_PROJECT_ID: z.string().min(1).optional(), // Browserbase project ID
GITHUB_TOKEN: z.string().optional(), // GitHub personal access token for API access
// Infrastructure & Deployment
NEXT_RUNTIME: z.string().optional(), // Next.js runtime environment
VERCEL_ENV: z.string().optional(), // Vercel deployment environment
DOCKER_BUILD: z.boolean().optional(), // Flag indicating Docker build environment
NEXT_RUNTIME: z.string().optional(), // Next.js runtime environment
VERCEL_ENV: z.string().optional(), // Vercel deployment environment
DOCKER_BUILD: z.boolean().optional(), // Flag indicating Docker build environment
// Background Jobs & Scheduling
TRIGGER_SECRET_KEY: z.string().min(1).optional(), // Trigger.dev secret key for background jobs
CRON_SECRET: z.string().optional(), // Secret for authenticating cron job requests
JOB_RETENTION_DAYS: z.string().optional().default('1'), // Days to retain job logs/data
TRIGGER_SECRET_KEY: z.string().min(1).optional(), // Trigger.dev secret key for background jobs
TRIGGER_DEV_ENABLED: z.boolean().optional(), // Toggle to enable/disable Trigger.dev for async jobs
CRON_SECRET: z.string().optional(), // Secret for authenticating cron job requests
JOB_RETENTION_DAYS: z.string().optional().default('1'), // Days to retain job logs/data
// Cloud Storage - AWS S3
AWS_REGION: z.string().optional(), // AWS region for S3 buckets
AWS_ACCESS_KEY_ID: z.string().optional(), // AWS access key ID
AWS_SECRET_ACCESS_KEY: z.string().optional(), // AWS secret access key
S3_BUCKET_NAME: z.string().optional(), // S3 bucket for general file storage
S3_LOGS_BUCKET_NAME: z.string().optional(), // S3 bucket for storing logs
S3_KB_BUCKET_NAME: z.string().optional(), // S3 bucket for knowledge base files
S3_EXECUTION_FILES_BUCKET_NAME: z.string().optional(), // S3 bucket for workflow execution files
S3_CHAT_BUCKET_NAME: z.string().optional(), // S3 bucket for chat logos
S3_COPILOT_BUCKET_NAME: z.string().optional(), // S3 bucket for copilot files
AWS_REGION: z.string().optional(), // AWS region for S3 buckets
AWS_ACCESS_KEY_ID: z.string().optional(), // AWS access key ID
AWS_SECRET_ACCESS_KEY: z.string().optional(), // AWS secret access key
S3_BUCKET_NAME: z.string().optional(), // S3 bucket for general file storage
S3_LOGS_BUCKET_NAME: z.string().optional(), // S3 bucket for storing logs
S3_KB_BUCKET_NAME: z.string().optional(), // S3 bucket for knowledge base files
S3_EXECUTION_FILES_BUCKET_NAME: z.string().optional(), // S3 bucket for workflow execution files
S3_CHAT_BUCKET_NAME: z.string().optional(), // S3 bucket for chat logos
S3_COPILOT_BUCKET_NAME: z.string().optional(), // S3 bucket for copilot files
// Cloud Storage - Azure Blob
AZURE_ACCOUNT_NAME: z.string().optional(), // Azure storage account name
AZURE_ACCOUNT_KEY: z.string().optional(), // Azure storage account key
AZURE_CONNECTION_STRING: z.string().optional(), // Azure storage connection string
AZURE_STORAGE_CONTAINER_NAME: z.string().optional(), // Azure container for general files
AZURE_STORAGE_KB_CONTAINER_NAME: z.string().optional(), // Azure container for knowledge base files
// Cloud Storage - Azure Blob
AZURE_ACCOUNT_NAME: z.string().optional(), // Azure storage account name
AZURE_ACCOUNT_KEY: z.string().optional(), // Azure storage account key
AZURE_CONNECTION_STRING: z.string().optional(), // Azure storage connection string
AZURE_STORAGE_CONTAINER_NAME: z.string().optional(), // Azure container for general files
AZURE_STORAGE_KB_CONTAINER_NAME: z.string().optional(), // Azure container for knowledge base files
AZURE_STORAGE_EXECUTION_FILES_CONTAINER_NAME: z.string().optional(), // Azure container for workflow execution files
AZURE_STORAGE_CHAT_CONTAINER_NAME: z.string().optional(), // Azure container for chat logos
AZURE_STORAGE_COPILOT_CONTAINER_NAME: z.string().optional(), // Azure container for copilot files
AZURE_STORAGE_CHAT_CONTAINER_NAME: z.string().optional(), // Azure container for chat logos
AZURE_STORAGE_COPILOT_CONTAINER_NAME: z.string().optional(), // Azure container for copilot files
// Data Retention
FREE_PLAN_LOG_RETENTION_DAYS: z.string().optional(), // Log retention days for free plan users
FREE_PLAN_LOG_RETENTION_DAYS: z.string().optional(), // Log retention days for free plan users
// Rate Limiting Configuration
RATE_LIMIT_WINDOW_MS: z.string().optional().default('60000'), // Rate limit window duration in milliseconds (default: 1 minute)
MANUAL_EXECUTION_LIMIT: z.string().optional().default('999999'),// Manual execution bypass value (effectively unlimited)
RATE_LIMIT_FREE_SYNC: z.string().optional().default('10'), // Free tier sync API executions per minute
RATE_LIMIT_FREE_ASYNC: z.string().optional().default('50'), // Free tier async API executions per minute
RATE_LIMIT_PRO_SYNC: z.string().optional().default('25'), // Pro tier sync API executions per minute
RATE_LIMIT_PRO_ASYNC: z.string().optional().default('200'), // Pro tier async API executions per minute
RATE_LIMIT_TEAM_SYNC: z.string().optional().default('75'), // Team tier sync API executions per minute
RATE_LIMIT_TEAM_ASYNC: z.string().optional().default('500'), // Team tier async API executions per minute
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('150'), // Enterprise tier sync API executions per minute
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('1000'), // Enterprise tier async API executions per minute
RATE_LIMIT_WINDOW_MS: z.string().optional().default('60000'), // Rate limit window duration in milliseconds (default: 1 minute)
MANUAL_EXECUTION_LIMIT: z.string().optional().default('999999'),// Manual execution bypass value (effectively unlimited)
RATE_LIMIT_FREE_SYNC: z.string().optional().default('10'), // Free tier sync API executions per minute
RATE_LIMIT_FREE_ASYNC: z.string().optional().default('50'), // Free tier async API executions per minute
RATE_LIMIT_PRO_SYNC: z.string().optional().default('25'), // Pro tier sync API executions per minute
RATE_LIMIT_PRO_ASYNC: z.string().optional().default('200'), // Pro tier async API executions per minute
RATE_LIMIT_TEAM_SYNC: z.string().optional().default('75'), // Team tier sync API executions per minute
RATE_LIMIT_TEAM_ASYNC: z.string().optional().default('500'), // Team tier async API executions per minute
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('150'), // Enterprise tier sync API executions per minute
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('1000'), // Enterprise tier async API executions per minute
// Real-time Communication
SOCKET_SERVER_URL: z.string().url().optional(), // WebSocket server URL for real-time features
SOCKET_PORT: z.number().optional(), // Port for WebSocket server
PORT: z.number().optional(), // Main application port
ALLOWED_ORIGINS: z.string().optional(), // CORS allowed origins
SOCKET_SERVER_URL: z.string().url().optional(), // WebSocket server URL for real-time features
SOCKET_PORT: z.number().optional(), // Port for WebSocket server
PORT: z.number().optional(), // Main application port
ALLOWED_ORIGINS: z.string().optional(), // CORS allowed origins
// OAuth Integration Credentials - All optional, enables third-party integrations
GOOGLE_CLIENT_ID: z.string().optional(), // Google OAuth client ID for Google services
GOOGLE_CLIENT_SECRET: z.string().optional(), // Google OAuth client secret
GITHUB_CLIENT_ID: z.string().optional(), // GitHub OAuth client ID for GitHub integration
GITHUB_CLIENT_SECRET: z.string().optional(), // GitHub OAuth client secret
GITHUB_REPO_CLIENT_ID: z.string().optional(), // GitHub OAuth client ID for repo access
GITHUB_REPO_CLIENT_SECRET: z.string().optional(), // GitHub OAuth client secret for repo access
X_CLIENT_ID: z.string().optional(), // X (Twitter) OAuth client ID
X_CLIENT_SECRET: z.string().optional(), // X (Twitter) OAuth client secret
CONFLUENCE_CLIENT_ID: z.string().optional(), // Atlassian Confluence OAuth client ID
CONFLUENCE_CLIENT_SECRET: z.string().optional(), // Atlassian Confluence OAuth client secret
JIRA_CLIENT_ID: z.string().optional(), // Atlassian Jira OAuth client ID
JIRA_CLIENT_SECRET: z.string().optional(), // Atlassian Jira OAuth client secret
AIRTABLE_CLIENT_ID: z.string().optional(), // Airtable OAuth client ID
AIRTABLE_CLIENT_SECRET: z.string().optional(), // Airtable OAuth client secret
SUPABASE_CLIENT_ID: z.string().optional(), // Supabase OAuth client ID
SUPABASE_CLIENT_SECRET: z.string().optional(), // Supabase OAuth client secret
NOTION_CLIENT_ID: z.string().optional(), // Notion OAuth client ID
NOTION_CLIENT_SECRET: z.string().optional(), // Notion OAuth client secret
DISCORD_CLIENT_ID: z.string().optional(), // Discord OAuth client ID
DISCORD_CLIENT_SECRET: z.string().optional(), // Discord OAuth client secret
MICROSOFT_CLIENT_ID: z.string().optional(), // Microsoft OAuth client ID for Office 365/Teams
MICROSOFT_CLIENT_SECRET: z.string().optional(), // Microsoft OAuth client secret
HUBSPOT_CLIENT_ID: z.string().optional(), // HubSpot OAuth client ID
HUBSPOT_CLIENT_SECRET: z.string().optional(), // HubSpot OAuth client secret
WEALTHBOX_CLIENT_ID: z.string().optional(), // WealthBox OAuth client ID
WEALTHBOX_CLIENT_SECRET: z.string().optional(), // WealthBox OAuth client secret
LINEAR_CLIENT_ID: z.string().optional(), // Linear OAuth client ID
LINEAR_CLIENT_SECRET: z.string().optional(), // Linear OAuth client secret
SLACK_CLIENT_ID: z.string().optional(), // Slack OAuth client ID
SLACK_CLIENT_SECRET: z.string().optional(), // Slack OAuth client secret
REDDIT_CLIENT_ID: z.string().optional(), // Reddit OAuth client ID
REDDIT_CLIENT_SECRET: z.string().optional(), // Reddit OAuth client secret
GOOGLE_CLIENT_ID: z.string().optional(), // Google OAuth client ID for Google services
GOOGLE_CLIENT_SECRET: z.string().optional(), // Google OAuth client secret
GITHUB_CLIENT_ID: z.string().optional(), // GitHub OAuth client ID for GitHub integration
GITHUB_CLIENT_SECRET: z.string().optional(), // GitHub OAuth client secret
GITHUB_REPO_CLIENT_ID: z.string().optional(), // GitHub OAuth client ID for repo access
GITHUB_REPO_CLIENT_SECRET: z.string().optional(), // GitHub OAuth client secret for repo access
X_CLIENT_ID: z.string().optional(), // X (Twitter) OAuth client ID
X_CLIENT_SECRET: z.string().optional(), // X (Twitter) OAuth client secret
CONFLUENCE_CLIENT_ID: z.string().optional(), // Atlassian Confluence OAuth client ID
CONFLUENCE_CLIENT_SECRET: z.string().optional(), // Atlassian Confluence OAuth client secret
JIRA_CLIENT_ID: z.string().optional(), // Atlassian Jira OAuth client ID
JIRA_CLIENT_SECRET: z.string().optional(), // Atlassian Jira OAuth client secret
AIRTABLE_CLIENT_ID: z.string().optional(), // Airtable OAuth client ID
AIRTABLE_CLIENT_SECRET: z.string().optional(), // Airtable OAuth client secret
SUPABASE_CLIENT_ID: z.string().optional(), // Supabase OAuth client ID
SUPABASE_CLIENT_SECRET: z.string().optional(), // Supabase OAuth client secret
NOTION_CLIENT_ID: z.string().optional(), // Notion OAuth client ID
NOTION_CLIENT_SECRET: z.string().optional(), // Notion OAuth client secret
DISCORD_CLIENT_ID: z.string().optional(), // Discord OAuth client ID
DISCORD_CLIENT_SECRET: z.string().optional(), // Discord OAuth client secret
MICROSOFT_CLIENT_ID: z.string().optional(), // Microsoft OAuth client ID for Office 365/Teams
MICROSOFT_CLIENT_SECRET: z.string().optional(), // Microsoft OAuth client secret
HUBSPOT_CLIENT_ID: z.string().optional(), // HubSpot OAuth client ID
HUBSPOT_CLIENT_SECRET: z.string().optional(), // HubSpot OAuth client secret
WEALTHBOX_CLIENT_ID: z.string().optional(), // WealthBox OAuth client ID
WEALTHBOX_CLIENT_SECRET: z.string().optional(), // WealthBox OAuth client secret
LINEAR_CLIENT_ID: z.string().optional(), // Linear OAuth client ID
LINEAR_CLIENT_SECRET: z.string().optional(), // Linear OAuth client secret
SLACK_CLIENT_ID: z.string().optional(), // Slack OAuth client ID
SLACK_CLIENT_SECRET: z.string().optional(), // Slack OAuth client secret
REDDIT_CLIENT_ID: z.string().optional(), // Reddit OAuth client ID
REDDIT_CLIENT_SECRET: z.string().optional(), // Reddit OAuth client secret
},
client: {
// Core Application URLs - Required for frontend functionality
NEXT_PUBLIC_APP_URL: z.string().url(), // Base URL of the application (e.g., https://app.sim.ai)
NEXT_PUBLIC_VERCEL_URL: z.string().optional(), // Vercel deployment URL for preview/production
NEXT_PUBLIC_APP_URL: z.string().url(), // Base URL of the application (e.g., https://app.sim.ai)
NEXT_PUBLIC_VERCEL_URL: z.string().optional(), // Vercel deployment URL for preview/production
// Client-side Services
NEXT_PUBLIC_SENTRY_DSN: z.string().url().optional(), // Sentry DSN for client-side error tracking
NEXT_PUBLIC_SOCKET_URL: z.string().url().optional(), // WebSocket server URL for real-time features
NEXT_PUBLIC_SENTRY_DSN: z.string().url().optional(), // Sentry DSN for client-side error tracking
NEXT_PUBLIC_SOCKET_URL: z.string().url().optional(), // WebSocket server URL for real-time features
// Asset Storage
NEXT_PUBLIC_BLOB_BASE_URL: z.string().url().optional(), // Base URL for Vercel Blob storage (CDN assets)
NEXT_PUBLIC_BLOB_BASE_URL: z.string().url().optional(), // Base URL for Vercel Blob storage (CDN assets)
// Billing
NEXT_PUBLIC_BILLING_ENABLED: z.boolean().optional(), // Enable billing enforcement and usage tracking (client-side)
NEXT_PUBLIC_BILLING_ENABLED: z.boolean().optional(), // Enable billing enforcement and usage tracking (client-side)
// Google Services - For client-side Google integrations
NEXT_PUBLIC_GOOGLE_CLIENT_ID: z.string().optional(), // Google OAuth client ID for browser auth
NEXT_PUBLIC_GOOGLE_CLIENT_ID: z.string().optional(), // Google OAuth client ID for browser auth
// Analytics & Tracking
NEXT_PUBLIC_RB2B_KEY: z.string().optional(), // RB2B tracking key for B2B analytics
NEXT_PUBLIC_GOOGLE_API_KEY: z.string().optional(), // Google API key for client-side API calls
NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER: z.string().optional(), // Google project number for Drive picker
NEXT_PUBLIC_RB2B_KEY: z.string().optional(), // RB2B tracking key for B2B analytics
NEXT_PUBLIC_GOOGLE_API_KEY: z.string().optional(), // Google API key for client-side API calls
NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER: z.string().optional(), // Google project number for Drive picker
// UI Branding & Whitelabeling
NEXT_PUBLIC_BRAND_NAME: z.string().optional(), // Custom brand name (defaults to "Sim")
NEXT_PUBLIC_BRAND_LOGO_URL: z.string().url().optional(), // Custom logo URL
NEXT_PUBLIC_BRAND_FAVICON_URL: z.string().url().optional(), // Custom favicon URL
NEXT_PUBLIC_CUSTOM_CSS_URL: z.string().url().optional(), // Custom CSS stylesheet URL
NEXT_PUBLIC_SUPPORT_EMAIL: z.string().email().optional(), // Custom support email
NEXT_PUBLIC_DOCUMENTATION_URL: z.string().url().optional(), // Custom documentation URL
NEXT_PUBLIC_TERMS_URL: z.string().url().optional(), // Custom terms of service URL
NEXT_PUBLIC_PRIVACY_URL: z.string().url().optional(), // Custom privacy policy URL
NEXT_PUBLIC_BRAND_NAME: z.string().optional(), // Custom brand name (defaults to "Sim")
NEXT_PUBLIC_BRAND_LOGO_URL: z.string().url().optional(), // Custom logo URL
NEXT_PUBLIC_BRAND_FAVICON_URL: z.string().url().optional(), // Custom favicon URL
NEXT_PUBLIC_CUSTOM_CSS_URL: z.string().url().optional(), // Custom CSS stylesheet URL
NEXT_PUBLIC_SUPPORT_EMAIL: z.string().email().optional(), // Custom support email
NEXT_PUBLIC_DOCUMENTATION_URL: z.string().url().optional(), // Custom documentation URL
NEXT_PUBLIC_TERMS_URL: z.string().url().optional(), // Custom terms of service URL
NEXT_PUBLIC_PRIVACY_URL: z.string().url().optional(), // Custom privacy policy URL
// Theme Customization
NEXT_PUBLIC_BRAND_PRIMARY_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Primary brand color (hex format, e.g., "#701ffc")
NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Primary brand hover state (hex format)
NEXT_PUBLIC_BRAND_SECONDARY_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Secondary brand color (hex format)
NEXT_PUBLIC_BRAND_ACCENT_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Accent brand color (hex format)
NEXT_PUBLIC_BRAND_ACCENT_HOVER_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Accent brand hover state (hex format)
NEXT_PUBLIC_BRAND_BACKGROUND_COLOR: z.string().regex(/^#[0-9A-Fa-f]{6}$/).optional(), // Brand background color (hex format)
// Feature Flags
NEXT_PUBLIC_TRIGGER_DEV_ENABLED: z.boolean().optional(), // Client-side gate for async executions UI
},
// Variables available on both server and client
shared: {
NODE_ENV: z.enum(['development', 'test', 'production']).optional(), // Runtime environment
NEXT_TELEMETRY_DISABLED: z.string().optional(), // Disable Next.js telemetry collection
NODE_ENV: z.enum(['development', 'test', 'production']).optional(), // Runtime environment
NEXT_TELEMETRY_DISABLED: z.string().optional(), // Disable Next.js telemetry collection
},
experimental__runtimeEnv: {
@@ -234,6 +248,13 @@ export const env = createEnv({
NEXT_PUBLIC_DOCUMENTATION_URL: process.env.NEXT_PUBLIC_DOCUMENTATION_URL,
NEXT_PUBLIC_TERMS_URL: process.env.NEXT_PUBLIC_TERMS_URL,
NEXT_PUBLIC_PRIVACY_URL: process.env.NEXT_PUBLIC_PRIVACY_URL,
NEXT_PUBLIC_BRAND_PRIMARY_COLOR: process.env.NEXT_PUBLIC_BRAND_PRIMARY_COLOR,
NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR: process.env.NEXT_PUBLIC_BRAND_PRIMARY_HOVER_COLOR,
NEXT_PUBLIC_BRAND_SECONDARY_COLOR: process.env.NEXT_PUBLIC_BRAND_SECONDARY_COLOR,
NEXT_PUBLIC_BRAND_ACCENT_COLOR: process.env.NEXT_PUBLIC_BRAND_ACCENT_COLOR,
NEXT_PUBLIC_BRAND_ACCENT_HOVER_COLOR: process.env.NEXT_PUBLIC_BRAND_ACCENT_HOVER_COLOR,
NEXT_PUBLIC_BRAND_BACKGROUND_COLOR: process.env.NEXT_PUBLIC_BRAND_BACKGROUND_COLOR,
NEXT_PUBLIC_TRIGGER_DEV_ENABLED: process.env.NEXT_PUBLIC_TRIGGER_DEV_ENABLED,
NODE_ENV: process.env.NODE_ENV,
NEXT_TELEMETRY_DISABLED: process.env.NEXT_TELEMETRY_DISABLED,
},

View File

@@ -29,6 +29,30 @@ export enum LogLevel {
ERROR = 'ERROR',
}
/**
* Get the minimum log level from environment variable or use defaults
* - Development: DEBUG (show all logs)
* - Production: ERROR (only show errors, but can be overridden by LOG_LEVEL env var)
* - Test: ERROR (only show errors in tests)
*/
const getMinLogLevel = (): LogLevel => {
if (env.LOG_LEVEL) {
return env.LOG_LEVEL as LogLevel
}
const ENV = (env.NODE_ENV || 'development') as string
switch (ENV) {
case 'development':
return LogLevel.DEBUG
case 'production':
return LogLevel.ERROR
case 'test':
return LogLevel.ERROR
default:
return LogLevel.DEBUG
}
}
/**
* Configuration for different environments
*
@@ -40,17 +64,17 @@ export enum LogLevel {
const LOG_CONFIG = {
development: {
enabled: true,
minLevel: LogLevel.DEBUG, // Show all logs in development
minLevel: getMinLogLevel(),
colorize: true,
},
production: {
enabled: true, // Will be checked at runtime
minLevel: LogLevel.ERROR,
minLevel: getMinLogLevel(),
colorize: false,
},
test: {
enabled: false, // Disable logs in test environment
minLevel: LogLevel.ERROR,
minLevel: getMinLogLevel(),
colorize: false,
},
}

View File

@@ -56,9 +56,15 @@ export function buildTraceSpans(result: ExecutionResult): {
}
}
// Prefer human-friendly workflow block naming if provided by child execution mapping
const displayName =
log.blockType === 'workflow' && log.output?.childWorkflowName
? `${log.output.childWorkflowName} workflow`
: log.blockName || log.blockId
const span: TraceSpan = {
id: spanId,
name: log.blockName || log.blockId,
name: displayName,
type: log.blockType,
duration: duration,
startTime: log.startedAt,
@@ -113,7 +119,10 @@ export function buildTraceSpans(result: ExecutionResult): {
const flatChildSpans: TraceSpan[] = []
childTraceSpans.forEach((childSpan) => {
// Skip the synthetic workflow span wrapper - we only want the actual block executions
if (childSpan.type === 'workflow' && childSpan.name === 'Workflow Execution') {
if (
childSpan.type === 'workflow' &&
(childSpan.name === 'Workflow Execution' || childSpan.name.endsWith(' workflow'))
) {
// Add its children directly, skipping the synthetic wrapper
if (childSpan.children && Array.isArray(childSpan.children)) {
flatChildSpans.push(...childSpan.children)
@@ -395,7 +404,10 @@ function ensureNestedWorkflowsProcessed(span: TraceSpan): TraceSpan {
childTraceSpans.forEach((childSpan) => {
// Skip synthetic workflow wrappers and get the actual blocks
if (childSpan.type === 'workflow' && childSpan.name === 'Workflow Execution') {
if (
childSpan.type === 'workflow' &&
(childSpan.name === 'Workflow Execution' || childSpan.name.endsWith(' workflow'))
) {
if (childSpan.children && Array.isArray(childSpan.children)) {
// Recursively process each child to handle deeper nesting
childSpan.children.forEach((grandchildSpan) => {

View File

@@ -0,0 +1,61 @@
'use client'
import type React from 'react'
import { createContext, useCallback, useEffect, useMemo, useState } from 'react'
import { client } from '@/lib/auth-client'
export type AppSession = {
user: {
id: string
email: string
emailVerified?: boolean
name?: string | null
image?: string | null
createdAt?: Date
updatedAt?: Date
} | null
session?: {
id?: string
userId?: string
activeOrganizationId?: string
}
} | null
export type SessionHookResult = {
data: AppSession
isPending: boolean
error: Error | null
refetch: () => Promise<void>
}
export const SessionContext = createContext<SessionHookResult | null>(null)
export function SessionProvider({ children }: { children: React.ReactNode }) {
const [data, setData] = useState<AppSession>(null)
const [isPending, setIsPending] = useState(true)
const [error, setError] = useState<Error | null>(null)
const loadSession = useCallback(async () => {
try {
setIsPending(true)
setError(null)
const res = await client.getSession()
setData(res?.data ?? null)
} catch (e) {
setError(e instanceof Error ? e : new Error('Failed to fetch session'))
} finally {
setIsPending(false)
}
}, [])
useEffect(() => {
loadSession()
}, [loadSession])
const value = useMemo<SessionHookResult>(
() => ({ data, isPending, error, refetch: loadSession }),
[data, isPending, error, loadSession]
)
return <SessionContext.Provider value={value}>{children}</SessionContext.Provider>
}

View File

@@ -607,19 +607,9 @@ export function formatWebhookInput(
}
return {
input, // Primary workflow input
// Top-level properties for backward compatibility
...githubData,
// GitHub data structured for trigger handler to extract
github: {
// Processed convenience variables
...githubData,
// Raw GitHub webhook payload for direct field access
...body,
},
// Expose raw GitHub payload at the root
...body,
// Include webhook metadata alongside
webhook: {
data: {
provider: 'github',
@@ -835,6 +825,8 @@ export async function fetchAndProcessAirtablePayloads(
let apiCallCount = 0
// Use a Map to consolidate changes per record ID
const consolidatedChangesMap = new Map<string, AirtableChange>()
// Capture raw payloads from Airtable for exposure to workflows
const allPayloads = []
const localProviderConfig = {
...((webhookData.providerConfig as Record<string, any>) || {}),
} // Local copy
@@ -1031,6 +1023,10 @@ export async function fetchAndProcessAirtablePayloads(
// --- Process and Consolidate Changes ---
if (receivedPayloads.length > 0) {
payloadsFetched += receivedPayloads.length
// Keep the raw payloads for later exposure to the workflow
for (const p of receivedPayloads) {
allPayloads.push(p)
}
let changeCount = 0
for (const payload of receivedPayloads) {
if (payload.changedTablesById) {
@@ -1196,10 +1192,25 @@ export async function fetchAndProcessAirtablePayloads(
)
// --- Execute Workflow if we have changes (simplified - no lock check) ---
if (finalConsolidatedChanges.length > 0) {
if (finalConsolidatedChanges.length > 0 || allPayloads.length > 0) {
try {
// Format the input for the executor using the consolidated changes
const input = { airtableChanges: finalConsolidatedChanges } // Use the consolidated array
// Build input exposing raw payloads and consolidated changes
const latestPayload = allPayloads.length > 0 ? allPayloads[allPayloads.length - 1] : null
const input: any = {
// Raw Airtable payloads as received from the API
payloads: allPayloads,
latestPayload,
// Consolidated, simplified changes for convenience
airtableChanges: finalConsolidatedChanges,
// Include webhook metadata for resolver fallbacks
webhook: {
data: {
provider: 'airtable',
providerConfig: webhookData.providerConfig,
payload: latestPayload,
},
},
}
// CRITICAL EXECUTION TRACE POINT
logger.info(
@@ -1216,6 +1227,7 @@ export async function fetchAndProcessAirtablePayloads(
logger.info(`[${requestId}] CRITICAL_TRACE: Airtable changes processed, returning input`, {
workflowId: workflowData.id,
recordCount: finalConsolidatedChanges.length,
rawPayloadCount: allPayloads.length,
timestamp: new Date().toISOString(),
})

View File

@@ -105,7 +105,6 @@
"prismjs": "^1.30.0",
"react": "19.1.0",
"react-colorful": "5.6.1",
"react-day-picker": "8.10.1",
"react-dom": "19.1.0",
"react-google-drive-picker": "^1.2.2",
"react-hook-form": "^7.54.2",

View File

@@ -74,6 +74,15 @@ export class Serializer {
// Extract parameters from UI state
const params = this.extractParams(block)
try {
const isTriggerCategory = blockConfig.category === 'triggers'
if (block.triggerMode === true || isTriggerCategory) {
params.triggerMode = true
}
} catch (_) {
// no-op: conservative, avoid blocking serialization if blockConfig is unexpected
}
// Validate required fields that only users can provide (before execution starts)
if (validateRequired) {
this.validateRequiredFieldsBeforeExecution(block, blockConfig, params)
@@ -385,6 +394,10 @@ export class Serializer {
subBlocks,
outputs: serializedBlock.outputs,
enabled: true,
// Restore trigger mode from serialized params; treat trigger category as triggers as well
triggerMode:
serializedBlock.config?.params?.triggerMode === true ||
serializedBlock.metadata?.category === 'triggers',
}
}
}

View File

@@ -482,9 +482,3 @@ export const useSubscriptionStore = create<SubscriptionStore>()(
{ name: 'subscription-store' }
)
)
// Auto-load subscription data when store is first accessed
if (typeof window !== 'undefined') {
// Load data in parallel on store creation
useSubscriptionStore.getState().loadData()
}

View File

@@ -603,4 +603,133 @@ describe('workflow store', () => {
expect(childBlock.data?.extent).toBe('parent')
})
})
describe('updateBlockName', () => {
beforeEach(() => {
useWorkflowStore.setState({
blocks: {},
edges: [],
loops: {},
parallels: {},
})
const { addBlock } = useWorkflowStore.getState()
addBlock('block1', 'agent', 'Column AD', { x: 0, y: 0 })
addBlock('block2', 'function', 'Employee Length', { x: 100, y: 0 })
addBlock('block3', 'trigger', 'Start', { x: 200, y: 0 })
})
it('should have test blocks set up correctly', () => {
const state = useWorkflowStore.getState()
expect(state.blocks.block1).toBeDefined()
expect(state.blocks.block1.name).toBe('Column AD')
expect(state.blocks.block2).toBeDefined()
expect(state.blocks.block2.name).toBe('Employee Length')
expect(state.blocks.block3).toBeDefined()
expect(state.blocks.block3.name).toBe('Start')
})
it('should successfully rename a block when no conflicts exist', () => {
const { updateBlockName } = useWorkflowStore.getState()
const result = updateBlockName('block1', 'Data Processor')
expect(result).toBe(true)
const state = useWorkflowStore.getState()
expect(state.blocks.block1.name).toBe('Data Processor')
})
it('should allow renaming a block to a different case/spacing of its current name', () => {
const { updateBlockName } = useWorkflowStore.getState()
const result = updateBlockName('block1', 'column ad')
expect(result).toBe(true)
const state = useWorkflowStore.getState()
expect(state.blocks.block1.name).toBe('column ad')
})
it('should prevent renaming when another block has the same normalized name', () => {
const { updateBlockName } = useWorkflowStore.getState()
const result = updateBlockName('block2', 'Column AD')
expect(result).toBe(false)
const state = useWorkflowStore.getState()
expect(state.blocks.block2.name).toBe('Employee Length')
})
it('should prevent renaming when another block has a name that normalizes to the same value', () => {
const { updateBlockName } = useWorkflowStore.getState()
const result = updateBlockName('block2', 'columnad')
expect(result).toBe(false)
const state = useWorkflowStore.getState()
expect(state.blocks.block2.name).toBe('Employee Length')
})
it('should prevent renaming when another block has a similar name with different spacing', () => {
const { updateBlockName } = useWorkflowStore.getState()
const result = updateBlockName('block3', 'employee length')
expect(result).toBe(false)
const state = useWorkflowStore.getState()
expect(state.blocks.block3.name).toBe('Start')
})
it('should handle edge cases with empty or whitespace-only names', () => {
const { updateBlockName } = useWorkflowStore.getState()
const result1 = updateBlockName('block1', '')
expect(result1).toBe(true)
const result2 = updateBlockName('block2', ' ')
expect(result2).toBe(true)
const state = useWorkflowStore.getState()
expect(state.blocks.block1.name).toBe('')
expect(state.blocks.block2.name).toBe(' ')
})
it('should return false when trying to rename a non-existent block', () => {
const { updateBlockName } = useWorkflowStore.getState()
const result = updateBlockName('nonexistent', 'New Name')
expect(result).toBe(false)
})
it('should handle complex normalization cases correctly', () => {
const { updateBlockName } = useWorkflowStore.getState()
const conflictingNames = [
'column ad',
'COLUMN AD',
'Column AD',
'columnad',
'ColumnAD',
'COLUMNAD',
]
for (const name of conflictingNames) {
const result = updateBlockName('block2', name)
expect(result).toBe(false)
}
const result = updateBlockName('block2', 'Unique Name')
expect(result).toBe(true)
const state = useWorkflowStore.getState()
expect(state.blocks.block2.name).toBe('Unique Name')
})
})
})

View File

@@ -601,7 +601,33 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
updateBlockName: (id: string, name: string) => {
const oldBlock = get().blocks[id]
if (!oldBlock) return
if (!oldBlock) return false
// Helper function to normalize block names (same as resolver)
const normalizeBlockName = (blockName: string): string => {
return blockName.toLowerCase().replace(/\s+/g, '')
}
// Check for normalized name collisions
const normalizedNewName = normalizeBlockName(name)
const currentBlocks = get().blocks
// Find any other block with the same normalized name
const conflictingBlock = Object.entries(currentBlocks).find(([blockId, block]) => {
return (
blockId !== id && // Different block
block.name && // Has a name
normalizeBlockName(block.name) === normalizedNewName // Same normalized name
)
})
if (conflictingBlock) {
// Don't allow the rename - another block already uses this normalized name
logger.error(
`Cannot rename block to "${name}" - another block "${conflictingBlock[1].name}" already uses the normalized name "${normalizedNewName}"`
)
return false
}
// Create a new state with the updated block name
const newState = {
@@ -696,6 +722,8 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
pushHistory(set, get, newState, `${name} block name updated`)
get().updateLastSaved()
// Note: Socket.IO handles real-time sync automatically
return true
},
toggleBlockWide: (id: string) => {

View File

@@ -183,7 +183,7 @@ export interface WorkflowActions {
toggleBlockEnabled: (id: string) => void
duplicateBlock: (id: string) => void
toggleBlockHandles: (id: string) => void
updateBlockName: (id: string, name: string) => void
updateBlockName: (id: string, name: string) => boolean
toggleBlockWide: (id: string) => void
setBlockWide: (id: string, isWide: boolean) => void
setBlockAdvancedMode: (id: string, advancedMode: boolean) => void

View File

@@ -1,7 +1,9 @@
import type {
ExcelCellValue,
MicrosoftExcelReadResponse,
MicrosoftExcelToolParams,
} from '@/tools/microsoft_excel/types'
import { trimTrailingEmptyRowsAndColumns } from '@/tools/microsoft_excel/utils'
import type { ToolConfig } from '@/tools/types'
export const readTool: ToolConfig<MicrosoftExcelToolParams, MicrosoftExcelReadResponse> = {
@@ -75,8 +77,6 @@ export const readTool: ToolConfig<MicrosoftExcelToolParams, MicrosoftExcelReadRe
},
transformResponse: async (response: Response, params?: MicrosoftExcelToolParams) => {
const defaultAddress = 'A1:Z1000' // Match Google Sheets default logic
// If we came from the worksheets listing (no range provided), resolve first sheet name then fetch range
if (response.url.includes('/workbook/worksheets?')) {
const listData = await response.json()
@@ -92,9 +92,10 @@ export const readTool: ToolConfig<MicrosoftExcelToolParams, MicrosoftExcelReadRe
throw new Error('Access token is required to read Excel range')
}
// Use usedRange(valuesOnly=true) to fetch only populated cells, avoiding thousands of empty rows
const rangeUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${encodeURIComponent(
spreadsheetIdFromUrl
)}/workbook/worksheets('${encodeURIComponent(firstSheetName)}')/range(address='${defaultAddress}')`
)}/workbook/worksheets('${encodeURIComponent(firstSheetName)}')/usedRange(valuesOnly=true)`
const rangeResp = await fetch(rangeUrl, {
headers: { Authorization: `Bearer ${accessToken}` },
@@ -109,6 +110,12 @@ export const readTool: ToolConfig<MicrosoftExcelToolParams, MicrosoftExcelReadRe
const data = await rangeResp.json()
// usedRange returns an address (A1 notation) and values matrix
const address: string = data.address || data.addressLocal || `${firstSheetName}!A1`
const rawValues: ExcelCellValue[][] = data.values || []
const values = trimTrailingEmptyRowsAndColumns(rawValues)
const metadata = {
spreadsheetId: spreadsheetIdFromUrl,
properties: {},
@@ -119,8 +126,8 @@ export const readTool: ToolConfig<MicrosoftExcelToolParams, MicrosoftExcelReadRe
success: true,
output: {
data: {
range: data.range || `${firstSheetName}!${defaultAddress}`,
values: data.values || [],
range: address,
values,
},
metadata: {
spreadsheetId: metadata.spreadsheetId,
@@ -144,12 +151,16 @@ export const readTool: ToolConfig<MicrosoftExcelToolParams, MicrosoftExcelReadRe
spreadsheetUrl: `https://graph.microsoft.com/v1.0/me/drive/items/${spreadsheetId}`,
}
const address: string = data.address || data.addressLocal || data.range || ''
const rawValues: ExcelCellValue[][] = data.values || []
const values = trimTrailingEmptyRowsAndColumns(rawValues)
const result: MicrosoftExcelReadResponse = {
success: true,
output: {
data: {
range: data.range || '',
values: data.values || [],
range: address,
values,
},
metadata: {
spreadsheetId: metadata.spreadsheetId,

View File

@@ -0,0 +1,34 @@
import type { ExcelCellValue } from '@/tools/microsoft_excel/types'
export function trimTrailingEmptyRowsAndColumns(matrix: ExcelCellValue[][]): ExcelCellValue[][] {
if (!Array.isArray(matrix) || matrix.length === 0) return []
const isEmptyValue = (v: ExcelCellValue) => v === null || v === ''
// Determine last non-empty row
let lastNonEmptyRowIndex = -1
for (let r = 0; r < matrix.length; r++) {
const row = matrix[r] || []
const hasData = row.some((cell: ExcelCellValue) => !isEmptyValue(cell))
if (hasData) lastNonEmptyRowIndex = r
}
if (lastNonEmptyRowIndex === -1) return []
const trimmedRows = matrix.slice(0, lastNonEmptyRowIndex + 1)
// Determine last non-empty column across trimmed rows
let lastNonEmptyColIndex = -1
for (let r = 0; r < trimmedRows.length; r++) {
const row = trimmedRows[r] || []
for (let c = 0; c < row.length; c++) {
if (!isEmptyValue(row[c])) {
if (c > lastNonEmptyColIndex) lastNonEmptyColIndex = c
}
}
}
if (lastNonEmptyColIndex === -1) return []
return trimmedRows.map((row) => (row || []).slice(0, lastNonEmptyColIndex + 1))
}

View File

@@ -38,37 +38,43 @@ export const airtableWebhookTrigger: TriggerConfig = {
},
outputs: {
event_type: {
type: 'string',
description: 'Type of Airtable event (e.g., record.created, record.updated, record.deleted)',
payloads: {
type: 'array',
description: 'The payloads of the Airtable changes',
},
base_id: {
type: 'string',
description: 'Airtable base identifier',
latestPayload: {
timestamp: {
type: 'string',
description: 'The timestamp of the Airtable change',
},
payloadFormat: {
type: 'object',
description: 'The format of the Airtable change',
},
actionMetadata: {
source: {
type: 'string',
description: 'The source of the Airtable change',
},
sourceMetadata: {
pageId: {
type: 'string',
description: 'The ID of the page that triggered the Airtable change',
},
},
changedTablesById: {
type: 'object',
description: 'The tables that were changed',
},
baseTransactionNumber: {
type: 'number',
description: 'The transaction number of the Airtable change',
},
},
},
table_id: {
type: 'string',
description: 'Airtable table identifier',
},
record_id: {
type: 'string',
description: 'Record identifier that was modified',
},
record_data: {
type: 'string',
description: 'Complete record data (when Include Full Record Data is enabled)',
},
changed_fields: {
type: 'string',
description: 'Fields that were changed in the record',
},
webhook_id: {
type: 'string',
description: 'Unique webhook identifier',
},
timestamp: {
type: 'string',
description: 'Event timestamp',
airtableChanges: {
type: 'array',
description: 'Changes made to the Airtable table',
},
},

View File

@@ -37,7 +37,7 @@ export const githubWebhookTrigger: TriggerConfig = {
},
outputs: {
// GitHub webhook payload structure - maps 1:1 to actual GitHub webhook body
// GitHub webhook payload structure - now at root for direct access
ref: {
type: 'string',
description: 'Git reference (e.g., refs/heads/fix/telegram-wh)',

View File

@@ -30,6 +30,10 @@ export const microsoftTeamsWebhookTrigger: TriggerConfig = {
type: 'string',
description: 'Unique message identifier',
},
input: {
type: 'string',
description: 'Input message',
},
timestamp: {
type: 'string',
description: 'Message timestamp',

View File

@@ -21,55 +21,73 @@ export const telegramWebhookTrigger: TriggerConfig = {
},
outputs: {
// Matches the formatted payload built in `formatWebhookInput` for provider "telegram"
// Supports tags like <telegram.message.text> and deep paths like <telegram.message.raw.chat.id>
message: {
update_id: {
id: {
type: 'number',
description: 'Unique identifier for the update',
},
message_id: {
type: 'number',
description: 'Unique message identifier',
},
from_id: {
type: 'number',
description: 'User ID who sent the message',
},
from_username: {
type: 'string',
description: 'Username of the sender',
},
from_first_name: {
type: 'string',
description: 'First name of the sender',
},
from_last_name: {
type: 'string',
description: 'Last name of the sender',
},
chat_id: {
type: 'number',
description: 'Unique identifier for the chat',
},
chat_type: {
type: 'string',
description: 'Type of chat (private, group, supergroup, channel)',
},
chat_title: {
type: 'string',
description: 'Title of the chat (for groups and channels)',
description: 'Telegram message ID',
},
text: {
type: 'string',
description: 'Message text content',
description: 'Message text content (if present)',
},
date: {
type: 'number',
description: 'Date the message was sent (Unix timestamp)',
},
entities: {
messageType: {
type: 'string',
description: 'Special entities in the message (mentions, hashtags, etc.) as JSON string',
description:
'Detected content type: text, photo, document, audio, video, voice, sticker, location, contact, poll',
},
raw: {
message_id: {
type: 'number',
description: 'Original Telegram message_id',
},
date: {
type: 'number',
description: 'Original Telegram message date (Unix timestamp)',
},
text: {
type: 'string',
description: 'Original Telegram text (if present)',
},
caption: {
type: 'string',
description: 'Original Telegram caption (if present)',
},
chat: {
id: { type: 'number', description: 'Chat identifier' },
username: { type: 'string', description: 'Chat username (if available)' },
first_name: { type: 'string', description: 'First name (for private chats)' },
last_name: { type: 'string', description: 'Last name (for private chats)' },
},
from: {
id: { type: 'number', description: 'Sender user ID' },
is_bot: { type: 'boolean', description: 'Whether the sender is a bot' },
first_name: { type: 'string', description: 'Sender first name' },
last_name: { type: 'string', description: 'Sender last name' },
language_code: { type: 'string', description: 'Sender language code (if available)' },
},
},
},
sender: {
id: { type: 'number', description: 'Sender user ID' },
firstName: { type: 'string', description: 'Sender first name' },
lastName: { type: 'string', description: 'Sender last name' },
languageCode: { type: 'string', description: 'Sender language code (if available)' },
isBot: { type: 'boolean', description: 'Whether the sender is a bot' },
},
updateId: {
type: 'number',
description: 'Update ID for this webhook delivery',
},
updateType: {
type: 'string',
description:
'Type of update: message, edited_message, channel_post, edited_channel_post, unknown',
},
},

View File

@@ -134,7 +134,6 @@
"prismjs": "^1.30.0",
"react": "19.1.0",
"react-colorful": "5.6.1",
"react-day-picker": "8.10.1",
"react-dom": "19.1.0",
"react-google-drive-picker": "^1.2.2",
"react-hook-form": "^7.54.2",
@@ -2688,8 +2687,6 @@
"react-colorful": ["react-colorful@5.6.1", "", { "peerDependencies": { "react": ">=16.8.0", "react-dom": ">=16.8.0" } }, "sha512-1exovf0uGTGyq5mXQT0zgQ80uvj2PCwvF8zY1RN9/vbJVSjSo3fsB/4L3ObbF7u70NduSiK4xu4Y6q1MHoUGEw=="],
"react-day-picker": ["react-day-picker@8.10.1", "", { "peerDependencies": { "date-fns": "^2.28.0 || ^3.0.0", "react": "^16.8.0 || ^17.0.0 || ^18.0.0" } }, "sha512-TMx7fNbhLk15eqcMt+7Z7S2KF7mfTId/XJDjKE8f+IUcFn0l08/kI4FiYTL/0yuOLmEcbR4Fwe3GJf/NiiMnPA=="],
"react-dom": ["react-dom@19.1.0", "", { "dependencies": { "scheduler": "^0.26.0" }, "peerDependencies": { "react": "^19.1.0" } }, "sha512-Xs1hdnE+DyKgeHJeJznQmYMIBG3TKIHJJT95Q58nHLSrElKlGQqDTR2HQ9fx5CN/Gk6Vh/kupBTDLU11/nDk/g=="],
"react-email": ["react-email@4.2.8", "", { "dependencies": { "@babel/parser": "^7.27.0", "@babel/traverse": "^7.27.0", "chalk": "^5.0.0", "chokidar": "^4.0.3", "commander": "^13.0.0", "debounce": "^2.0.0", "esbuild": "^0.25.0", "glob": "^11.0.0", "jiti": "2.4.2", "log-symbols": "^7.0.0", "mime-types": "^3.0.0", "normalize-path": "^3.0.0", "nypm": "0.6.0", "ora": "^8.0.0", "prompts": "2.4.2", "socket.io": "^4.8.1", "tsconfig-paths": "4.2.0" }, "bin": { "email": "dist/index.js" } }, "sha512-Eqzs/xZnS881oghPO/4CQ1cULyESuUhEjfYboXmYNOokXnJ6QP5GKKJZ6zjkg9SnKXxSrIxSo5PxzCI5jReJMA=="],

View File

@@ -314,6 +314,42 @@ The following table lists the configurable parameters and their default values.
| `migrations.podSecurityContext` | Migrations pod security context | `fsGroup: 1001` |
| `migrations.securityContext` | Migrations container security context | `runAsNonRoot: true, runAsUser: 1001` |
### CronJob Parameters
| Parameter | Description | Default |
|-----------|-------------|---------|
| `cronjobs.enabled` | Enable all scheduled cron jobs | `true` |
| `cronjobs.image.repository` | CronJob image repository for HTTP requests | `curlimages/curl` |
| `cronjobs.image.tag` | CronJob image tag | `8.5.0` |
| `cronjobs.image.pullPolicy` | CronJob image pull policy | `IfNotPresent` |
| `cronjobs.resources` | CronJob resource limits and requests | See values.yaml |
| `cronjobs.restartPolicy` | CronJob pod restart policy | `OnFailure` |
| `cronjobs.activeDeadlineSeconds` | CronJob active deadline in seconds | `300` |
| `cronjobs.startingDeadlineSeconds` | CronJob starting deadline in seconds | `60` |
| `cronjobs.podSecurityContext` | CronJob pod security context | `fsGroup: 1001` |
| `cronjobs.securityContext` | CronJob container security context | `runAsNonRoot: true, runAsUser: 1001` |
| `cronjobs.jobs.scheduleExecution.enabled` | Enable schedule execution cron job | `true` |
| `cronjobs.jobs.scheduleExecution.name` | Schedule execution job name | `schedule-execution` |
| `cronjobs.jobs.scheduleExecution.schedule` | Schedule execution cron schedule | `"*/1 * * * *"` |
| `cronjobs.jobs.scheduleExecution.path` | Schedule execution API path | `"/api/schedules/execute"` |
| `cronjobs.jobs.scheduleExecution.concurrencyPolicy` | Schedule execution concurrency policy | `Forbid` |
| `cronjobs.jobs.scheduleExecution.successfulJobsHistoryLimit` | Schedule execution successful jobs history | `3` |
| `cronjobs.jobs.scheduleExecution.failedJobsHistoryLimit` | Schedule execution failed jobs history | `1` |
| `cronjobs.jobs.gmailWebhookPoll.enabled` | Enable Gmail webhook polling cron job | `true` |
| `cronjobs.jobs.gmailWebhookPoll.name` | Gmail webhook polling job name | `gmail-webhook-poll` |
| `cronjobs.jobs.gmailWebhookPoll.schedule` | Gmail webhook polling cron schedule | `"*/1 * * * *"` |
| `cronjobs.jobs.gmailWebhookPoll.path` | Gmail webhook polling API path | `"/api/webhooks/poll/gmail"` |
| `cronjobs.jobs.gmailWebhookPoll.concurrencyPolicy` | Gmail webhook polling concurrency policy | `Forbid` |
| `cronjobs.jobs.gmailWebhookPoll.successfulJobsHistoryLimit` | Gmail webhook polling successful jobs history | `3` |
| `cronjobs.jobs.gmailWebhookPoll.failedJobsHistoryLimit` | Gmail webhook polling failed jobs history | `1` |
| `cronjobs.jobs.outlookWebhookPoll.enabled` | Enable Outlook webhook polling cron job | `true` |
| `cronjobs.jobs.outlookWebhookPoll.name` | Outlook webhook polling job name | `outlook-webhook-poll` |
| `cronjobs.jobs.outlookWebhookPoll.schedule` | Outlook webhook polling cron schedule | `"*/1 * * * *"` |
| `cronjobs.jobs.outlookWebhookPoll.path` | Outlook webhook polling API path | `"/api/webhooks/poll/outlook"` |
| `cronjobs.jobs.outlookWebhookPoll.concurrencyPolicy` | Outlook webhook polling concurrency policy | `Forbid` |
| `cronjobs.jobs.outlookWebhookPoll.successfulJobsHistoryLimit` | Outlook webhook polling successful jobs history | `3` |
| `cronjobs.jobs.outlookWebhookPoll.failedJobsHistoryLimit` | Outlook webhook polling failed jobs history | `1` |
### Shared Storage Parameters
| Parameter | Description | Default |
@@ -509,6 +545,46 @@ This creates network policies that:
- Permit DNS resolution and HTTPS egress
- Support custom ingress/egress rules
### CronJobs for Scheduled Tasks
Enable automated scheduled tasks functionality:
```yaml
cronjobs:
enabled: true
# Customize individual jobs
jobs:
scheduleExecution:
enabled: true
schedule: "*/1 * * * *" # Every minute
gmailWebhookPoll:
enabled: true
schedule: "*/1 * * * *" # Every minute
outlookWebhookPoll:
enabled: true
schedule: "*/1 * * * *" # Every minute
# Global job configuration
resources:
limits:
memory: "256Mi"
cpu: "200m"
requests:
memory: "128Mi"
cpu: "100m"
```
This creates Kubernetes CronJob resources that:
- Execute HTTP requests to your application's API endpoints
- Handle retries and error logging automatically
- Use minimal resources with curl-based containers
- Support individual enable/disable per job
- Follow Kubernetes security best practices
### High Availability
Configure pod disruption budgets and anti-affinity:

View File

@@ -0,0 +1,90 @@
{{- if .Values.cronjobs.enabled }}
{{- range $jobKey, $jobConfig := .Values.cronjobs.jobs }}
{{- if $jobConfig.enabled }}
---
apiVersion: batch/v1
kind: CronJob
metadata:
name: {{ include "sim.fullname" $ }}-{{ $jobConfig.name }}
labels:
{{- include "sim.labels" $ | nindent 4 }}
app.kubernetes.io/component: cronjob-{{ $jobConfig.name }}
spec:
schedule: {{ $jobConfig.schedule | quote }}
concurrencyPolicy: {{ $jobConfig.concurrencyPolicy | default "Forbid" }}
successfulJobsHistoryLimit: {{ $jobConfig.successfulJobsHistoryLimit | default 3 }}
failedJobsHistoryLimit: {{ $jobConfig.failedJobsHistoryLimit | default 1 }}
{{- with $.Values.cronjobs.startingDeadlineSeconds }}
startingDeadlineSeconds: {{ . }}
{{- end }}
jobTemplate:
spec:
{{- with $.Values.cronjobs.activeDeadlineSeconds }}
activeDeadlineSeconds: {{ . }}
{{- end }}
template:
metadata:
labels:
{{- include "sim.selectorLabels" $ | nindent 12 }}
app.kubernetes.io/component: cronjob-{{ $jobConfig.name }}
spec:
restartPolicy: {{ $.Values.cronjobs.restartPolicy | default "OnFailure" }}
{{- with $.Values.cronjobs.podSecurityContext }}
securityContext:
{{- toYaml . | nindent 12 }}
{{- end }}
containers:
- name: {{ $jobConfig.name }}
image: "{{ $.Values.cronjobs.image.repository }}:{{ $.Values.cronjobs.image.tag }}"
imagePullPolicy: {{ $.Values.cronjobs.image.pullPolicy }}
{{- with $.Values.cronjobs.securityContext }}
securityContext:
{{- toYaml . | nindent 14 }}
{{- end }}
command:
- /bin/sh
- -c
args:
- |
echo "Starting cron job: {{ $jobConfig.name }}"
echo "Making HTTP request to {{ $jobConfig.path }}"
# Determine the service URL (use internal service regardless of ingress)
SERVICE_URL="http://{{ include "sim.fullname" $ }}-app:{{ $.Values.app.service.port }}"
# Make the HTTP request with timeout and retry logic
for i in $(seq 1 3); do
echo "Attempt $i/3"
if curl -f -s -S --max-time 60 --retry 2 --retry-delay 5 \
-H "Content-Type: application/json" \
-H "User-Agent: Kubernetes-CronJob/{{ $jobConfig.name }}" \
"$SERVICE_URL{{ $jobConfig.path }}"; then
echo "Success: HTTP request completed"
exit 0
fi
echo "Attempt $i failed, retrying..."
sleep 10
done
echo "Error: All attempts failed"
exit 1
resources:
{{- toYaml $.Values.cronjobs.resources | nindent 14 }}
{{- with $.Values.global.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 12 }}
{{- end }}
{{- with $.Values.app.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 12 }}
{{- end }}
{{- with $.Values.affinity }}
affinity:
{{- toYaml . | nindent 12 }}
{{- end }}
{{- with $.Values.tolerations }}
tolerations:
{{- toYaml . | nindent 12 }}
{{- end }}
{{- end }}
{{- end }}
{{- end }}

View File

@@ -582,6 +582,68 @@ affinity: {}
# Tolerations for scheduling on tainted nodes
tolerations: []
# CronJob configuration for scheduled tasks
cronjobs:
# Enable/disable all cron jobs
enabled: true
# Individual job configurations
jobs:
scheduleExecution:
enabled: true
name: schedule-execution
schedule: "*/1 * * * *"
path: "/api/schedules/execute"
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 3
failedJobsHistoryLimit: 1
gmailWebhookPoll:
enabled: true
name: gmail-webhook-poll
schedule: "*/1 * * * *"
path: "/api/webhooks/poll/gmail"
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 3
failedJobsHistoryLimit: 1
outlookWebhookPoll:
enabled: true
name: outlook-webhook-poll
schedule: "*/1 * * * *"
path: "/api/webhooks/poll/outlook"
concurrencyPolicy: Forbid
successfulJobsHistoryLimit: 3
failedJobsHistoryLimit: 1
# Global CronJob settings
image:
repository: curlimages/curl
tag: 8.5.0
pullPolicy: IfNotPresent
resources:
limits:
memory: "128Mi"
cpu: "100m"
requests:
memory: "64Mi"
cpu: "50m"
restartPolicy: OnFailure
activeDeadlineSeconds: 300
startingDeadlineSeconds: 60
# Pod security context
podSecurityContext:
fsGroup: 1001
# Container security context
securityContext:
runAsNonRoot: true
runAsUser: 1001
# Observability and telemetry configuration
telemetry:
# Enable/disable telemetry collection