Compare commits

..

12 Commits

Author SHA1 Message Date
Vikhyath Mondreti
04f109c1f4 v0.4.16: executions dashboard, UI fixes, zep tools, slack fixes 2025-10-15 19:39:10 -07:00
Waleed
48eab7e744 feat(dashboard): update UI for execution dashboard, fix next-runtime-env issues for SSO (#1649) 2025-10-15 19:29:10 -07:00
Vikhyath Mondreti
701bf2b510 improvement(response-copilot): prefer builder mode + fix builder/editor mode conversions (#1648)
* improvement(response-copilot): make it use builder mode over editor mode to prevent json formatting issues

* change placeholder text

* fix conversion between builder and editor mode
2025-10-15 19:14:56 -07:00
Vikhyath Mondreti
ba8acbba07 fix(connection-tags): drag and drop didn't render tag dropdown with input format fields (#1647) 2025-10-15 16:31:30 -07:00
Vikhyath Mondreti
56d04a9558 feat(zep): add tools from Zep (#1646)
* feat(zep): add tools from Zep

* correct icon

* address greptile comments
2025-10-15 15:49:18 -07:00
Waleed
2ca9044bc6 fix(layout): layout canvas onInit after node sizes are calculated, remove legacy history code and legacy marking of dirty workflows (#1645) 2025-10-15 15:35:32 -07:00
Vikhyath Mondreti
b2009fe467 fix build issue (#1644) 2025-10-15 14:26:06 -07:00
Vikhyath Mondreti
eb4821ff30 fix(chat-subs): always use getBaseUrl helper to fetch base url (#1643)
* fix(chat-subs): always use next public app url env

* use getBaseUrl everywhere

* move remaining uses

* fix test

* change auth.ts and make getBaseUrl() call not top level for emails

* change remaining uses

* revert csp

* cleanup

* fix
2025-10-15 14:13:23 -07:00
Waleed
4cceb22f21 fix(slack): update slack config to support refresh token rotation (#1642) 2025-10-15 11:54:33 -07:00
Vikhyath Mondreti
fd67fd220c improvement(functions): increase function block timeout to 3 min (#1641)
* improvement(functions): increase function block timeout to 3 min

* fix tests

* use shared constant

* remove comment
2025-10-15 11:52:02 -07:00
Vikhyath Mondreti
061c1dff4e fix(schedules): offload next run calculation to croner (#1640)
* fix(schedules): offload next run calculation to croner

* fix localstorage dependent tests

* address greptile comment
2025-10-15 11:37:42 -07:00
Siddharth Ganesan
1a05ef97d6 feat(test-framework): add executions logs for test framework (#1639)
* Starting logs page

* Execution history v0

* Execution dashboard

* Fix scroll

* Add open workflow button

* Lint

* Fix fetchExecutions in dependency array

---------

Co-authored-by: Waleed <walif6@gmail.com>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
2025-10-15 10:36:02 -07:00
120 changed files with 5400 additions and 1649 deletions

View File

@@ -70,6 +70,7 @@
"whatsapp",
"wikipedia",
"x",
"youtube"
"youtube",
"zep"
]
}

View File

@@ -113,6 +113,7 @@ Read content from a Microsoft Teams chat
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `chatId` | string | Yes | The ID of the chat to read from |
| `includeAttachments` | boolean | No | Download and include message attachments \(hosted contents\) into storage |
#### Output
@@ -125,6 +126,7 @@ Read content from a Microsoft Teams chat
| `attachmentCount` | number | Total number of attachments found |
| `attachmentTypes` | array | Types of attachments found |
| `content` | string | Formatted content of chat messages |
| `attachments` | file[] | Uploaded attachments for convenience \(flattened\) |
### `microsoft_teams_write_chat`
@@ -158,6 +160,7 @@ Read content from a Microsoft Teams channel
| --------- | ---- | -------- | ----------- |
| `teamId` | string | Yes | The ID of the team to read from |
| `channelId` | string | Yes | The ID of the channel to read from |
| `includeAttachments` | boolean | No | Download and include message attachments \(hosted contents\) into storage |
#### Output
@@ -171,6 +174,7 @@ Read content from a Microsoft Teams channel
| `attachmentCount` | number | Total number of attachments found |
| `attachmentTypes` | array | Types of attachments found |
| `content` | string | Formatted content of channel messages |
| `attachments` | file[] | Uploaded attachments for convenience \(flattened\) |
### `microsoft_teams_write_channel`

View File

@@ -206,6 +206,7 @@ Read emails from Outlook
| --------- | ---- | -------- | ----------- |
| `folder` | string | No | Folder ID to read emails from \(default: Inbox\) |
| `maxResults` | number | No | Maximum number of emails to retrieve \(default: 1, max: 10\) |
| `includeAttachments` | boolean | No | Download and include email attachments |
#### Output
@@ -213,6 +214,7 @@ Read emails from Outlook
| --------- | ---- | ----------- |
| `message` | string | Success or status message |
| `results` | array | Array of email message objects |
| `attachments` | file[] | All email attachments flattened from all emails |
### `outlook_forward`

View File

@@ -0,0 +1,246 @@
---
title: Zep
description: Long-term memory for AI agents
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="zep"
color="#4F46E5"
icon={true}
iconSvg={`<svg className="block-icon"
viewBox='0 0 24 24'
fill='none'
xmlns='http://www.w3.org/2000/svg'
>
<path
d='M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8z'
fill='currentColor'
/>
<path
d='M12 6c-3.31 0-6 2.69-6 6s2.69 6 6 6 6-2.69 6-6-2.69-6-6-6zm0 10c-2.21 0-4-1.79-4-4s1.79-4 4-4 4 1.79 4 4-1.79 4-4 4z'
fill='currentColor'
/>
<circle cx='12' cy='12' r='2' fill='currentColor' />
<path
d='M8 8h8M8 16h8'
stroke='currentColor'
strokeWidth='1.5'
strokeLinecap='round'
/>
</svg>`}
/>
## Usage Instructions
Integrate Zep for long-term memory management. Create threads, add messages, retrieve context with AI-powered summaries and facts extraction.
## Tools
### `zep_create_thread`
Start a new conversation thread in Zep
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `threadId` | string | Yes | Unique identifier for the thread |
| `userId` | string | Yes | User ID associated with the thread |
| `apiKey` | string | Yes | Your Zep API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `threadId` | string | The thread ID |
| `userId` | string | The user ID |
| `uuid` | string | Internal UUID |
| `createdAt` | string | Creation timestamp |
| `projectUuid` | string | Project UUID |
### `zep_get_threads`
List all conversation threads
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `pageSize` | number | No | Number of threads to retrieve per page |
| `pageNumber` | number | No | Page number for pagination |
| `orderBy` | string | No | Field to order results by \(created_at, updated_at, user_id, thread_id\) |
| `asc` | boolean | No | Order direction: true for ascending, false for descending |
| `apiKey` | string | Yes | Your Zep API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `threads` | array | Array of thread objects |
| `responseCount` | number | Number of threads in this response |
| `totalCount` | number | Total number of threads available |
### `zep_delete_thread`
Delete a conversation thread from Zep
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `threadId` | string | Yes | Thread ID to delete |
| `apiKey` | string | Yes | Your Zep API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deleted` | boolean | Whether the thread was deleted |
### `zep_get_context`
Retrieve user context from a thread with summary or basic mode
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `threadId` | string | Yes | Thread ID to get context from |
| `mode` | string | No | Context mode: "summary" \(natural language\) or "basic" \(raw facts\) |
| `minRating` | number | No | Minimum rating by which to filter relevant facts |
| `apiKey` | string | Yes | Your Zep API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `context` | string | The context string \(summary or basic\) |
| `facts` | array | Extracted facts |
| `entities` | array | Extracted entities |
| `summary` | string | Conversation summary |
### `zep_get_messages`
Retrieve messages from a thread
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `threadId` | string | Yes | Thread ID to get messages from |
| `limit` | number | No | Maximum number of messages to return |
| `cursor` | string | No | Cursor for pagination |
| `lastn` | number | No | Number of most recent messages to return \(overrides limit and cursor\) |
| `apiKey` | string | Yes | Your Zep API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `messages` | array | Array of message objects |
| `rowCount` | number | Number of messages in this response |
| `totalCount` | number | Total number of messages in the thread |
### `zep_add_messages`
Add messages to an existing thread
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `threadId` | string | Yes | Thread ID to add messages to |
| `messages` | json | Yes | Array of message objects with role and content |
| `apiKey` | string | Yes | Your Zep API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `context` | string | Updated context after adding messages |
| `messageIds` | array | Array of added message UUIDs |
| `threadId` | string | The thread ID |
### `zep_add_user`
Create a new user in Zep
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `userId` | string | Yes | Unique identifier for the user |
| `email` | string | No | User email address |
| `firstName` | string | No | User first name |
| `lastName` | string | No | User last name |
| `metadata` | json | No | Additional metadata as JSON object |
| `apiKey` | string | Yes | Your Zep API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `userId` | string | The user ID |
| `email` | string | User email |
| `firstName` | string | User first name |
| `lastName` | string | User last name |
| `uuid` | string | Internal UUID |
| `createdAt` | string | Creation timestamp |
| `metadata` | object | User metadata |
### `zep_get_user`
Retrieve user information from Zep
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `userId` | string | Yes | User ID to retrieve |
| `apiKey` | string | Yes | Your Zep API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `userId` | string | The user ID |
| `email` | string | User email |
| `firstName` | string | User first name |
| `lastName` | string | User last name |
| `uuid` | string | Internal UUID |
| `createdAt` | string | Creation timestamp |
| `updatedAt` | string | Last update timestamp |
| `metadata` | object | User metadata |
### `zep_get_user_threads`
List all conversation threads for a specific user
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `userId` | string | Yes | User ID to get threads for |
| `limit` | number | No | Maximum number of threads to return |
| `apiKey` | string | Yes | Your Zep API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `threads` | array | Array of thread objects for this user |
| `userId` | string | The user ID |
## Notes
- Category: `tools`
- Type: `zep`

View File

@@ -2,7 +2,7 @@
import { useRouter } from 'next/navigation'
import { Button } from '@/components/ui/button'
import { env, isTruthy } from '@/lib/env'
import { getEnv, isTruthy } from '@/lib/env'
import { cn } from '@/lib/utils'
interface SSOLoginButtonProps {
@@ -24,7 +24,7 @@ export function SSOLoginButton({
}: SSOLoginButtonProps) {
const router = useRouter()
if (!isTruthy(env.NEXT_PUBLIC_SSO_ENABLED)) {
if (!isTruthy(getEnv('NEXT_PUBLIC_SSO_ENABLED'))) {
return null
}

View File

@@ -16,8 +16,9 @@ import { Input } from '@/components/ui/input'
import { Label } from '@/components/ui/label'
import { client } from '@/lib/auth-client'
import { quickValidateEmail } from '@/lib/email/validation'
import { env, isFalsy, isTruthy } from '@/lib/env'
import { getEnv, isFalsy, isTruthy } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { cn } from '@/lib/utils'
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
@@ -322,7 +323,7 @@ export default function LoginPage({
},
body: JSON.stringify({
email: forgotPasswordEmail,
redirectTo: `${window.location.origin}/reset-password`,
redirectTo: `${getBaseUrl()}/reset-password`,
}),
})
@@ -367,8 +368,8 @@ export default function LoginPage({
}
}
const ssoEnabled = isTruthy(env.NEXT_PUBLIC_SSO_ENABLED)
const emailEnabled = !isFalsy(env.NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED)
const ssoEnabled = isTruthy(getEnv('NEXT_PUBLIC_SSO_ENABLED'))
const emailEnabled = !isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED'))
const hasSocial = githubAvailable || googleAvailable
const hasOnlySSO = ssoEnabled && !emailEnabled && !hasSocial
const showTopSSO = hasOnlySSO
@@ -398,7 +399,7 @@ export default function LoginPage({
)}
{/* Email/Password Form - show unless explicitly disabled */}
{!isFalsy(env.NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED) && (
{!isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED')) && (
<form onSubmit={onSubmit} className={`${inter.className} mt-8 space-y-8`}>
<div className='space-y-6'>
<div className='space-y-2'>
@@ -521,7 +522,7 @@ export default function LoginPage({
)}
{/* Only show signup link if email/password signup is enabled */}
{!isFalsy(env.NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED) && (
{!isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED')) && (
<div className={`${inter.className} pt-6 text-center font-light text-[14px]`}>
<span className='font-normal'>Don't have an account? </span>
<Link

View File

@@ -9,7 +9,7 @@ import { Input } from '@/components/ui/input'
import { Label } from '@/components/ui/label'
import { client, useSession } from '@/lib/auth-client'
import { quickValidateEmail } from '@/lib/email/validation'
import { env, isFalsy, isTruthy } from '@/lib/env'
import { getEnv, isFalsy, isTruthy } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { cn } from '@/lib/utils'
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
@@ -383,8 +383,8 @@ function SignupFormContent({
{/* SSO Login Button (primary top-only when it is the only method) */}
{(() => {
const ssoEnabled = isTruthy(env.NEXT_PUBLIC_SSO_ENABLED)
const emailEnabled = !isFalsy(env.NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED)
const ssoEnabled = isTruthy(getEnv('NEXT_PUBLIC_SSO_ENABLED'))
const emailEnabled = !isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED'))
const hasSocial = githubAvailable || googleAvailable
const hasOnlySSO = ssoEnabled && !emailEnabled && !hasSocial
return hasOnlySSO
@@ -399,7 +399,7 @@ function SignupFormContent({
)}
{/* Email/Password Form - show unless explicitly disabled */}
{!isFalsy(env.NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED) && (
{!isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED')) && (
<form onSubmit={onSubmit} className={`${inter.className} mt-8 space-y-8`}>
<div className='space-y-6'>
<div className='space-y-2'>
@@ -516,8 +516,8 @@ function SignupFormContent({
{/* Divider - show when we have multiple auth methods */}
{(() => {
const ssoEnabled = isTruthy(env.NEXT_PUBLIC_SSO_ENABLED)
const emailEnabled = !isFalsy(env.NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED)
const ssoEnabled = isTruthy(getEnv('NEXT_PUBLIC_SSO_ENABLED'))
const emailEnabled = !isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED'))
const hasSocial = githubAvailable || googleAvailable
const hasOnlySSO = ssoEnabled && !emailEnabled && !hasSocial
const showBottomSection = hasSocial || (ssoEnabled && !hasOnlySSO)
@@ -535,8 +535,8 @@ function SignupFormContent({
)}
{(() => {
const ssoEnabled = isTruthy(env.NEXT_PUBLIC_SSO_ENABLED)
const emailEnabled = !isFalsy(env.NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED)
const ssoEnabled = isTruthy(getEnv('NEXT_PUBLIC_SSO_ENABLED'))
const emailEnabled = !isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED'))
const hasSocial = githubAvailable || googleAvailable
const hasOnlySSO = ssoEnabled && !emailEnabled && !hasSocial
const showBottomSection = hasSocial || (ssoEnabled && !hasOnlySSO)
@@ -545,7 +545,7 @@ function SignupFormContent({
<div
className={cn(
inter.className,
isFalsy(env.NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED) ? 'mt-8' : undefined
isFalsy(getEnv('NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED')) ? 'mt-8' : undefined
)}
>
<SocialLoginButtons
@@ -554,7 +554,7 @@ function SignupFormContent({
callbackURL={redirectUrl || '/workspace'}
isProduction={isProduction}
>
{isTruthy(env.NEXT_PUBLIC_SSO_ENABLED) && (
{isTruthy(getEnv('NEXT_PUBLIC_SSO_ENABLED')) && (
<SSOLoginButton
callbackURL={redirectUrl || '/workspace'}
variant='outline'

View File

@@ -1,13 +1,11 @@
import { redirect } from 'next/navigation'
import { env, isTruthy } from '@/lib/env'
import { getEnv, isTruthy } from '@/lib/env'
import SSOForm from './sso-form'
// Force dynamic rendering to avoid prerender errors with search params
export const dynamic = 'force-dynamic'
export default async function SSOPage() {
// Redirect if SSO is not enabled
if (!isTruthy(env.NEXT_PUBLIC_SSO_ENABLED)) {
if (!isTruthy(getEnv('NEXT_PUBLIC_SSO_ENABLED'))) {
redirect('/login')
}

View File

@@ -90,6 +90,7 @@ const tools = [
'Wikipedia',
'X',
'YouTube',
'Zep',
]
interface FooterProps {

View File

@@ -38,7 +38,7 @@ export default function StructuredData() {
url: 'https://sim.ai',
name: 'Sim - AI Agent Workflow Builder',
description:
'Open-source AI agent workflow builder. 30,000+ developers build and deploy agentic workflows. SOC2 and HIPAA compliant.',
'Open-source AI agent workflow builder. 50,000+ developers build and deploy agentic workflows. SOC2 and HIPAA compliant.',
publisher: {
'@id': 'https://sim.ai/#organization',
},
@@ -98,7 +98,7 @@ export default function StructuredData() {
'@id': 'https://sim.ai/#software',
name: 'Sim - AI Agent Workflow Builder',
description:
'Open-source AI agent workflow builder used by 30,000+ developers. Build agentic workflows with visual drag-and-drop interface. SOC2 and HIPAA compliant. Integrate with 100+ apps.',
'Open-source AI agent workflow builder used by 50,000+ developers. Build agentic workflows with visual drag-and-drop interface. SOC2 and HIPAA compliant. Integrate with 100+ apps.',
applicationCategory: 'DeveloperApplication',
applicationSubCategory: 'AI Development Tools',
operatingSystem: 'Web, Windows, macOS, Linux',
@@ -173,7 +173,7 @@ export default function StructuredData() {
'Visual workflow builder',
'Drag-and-drop interface',
'100+ integrations',
'AI model support (OpenAI, Anthropic, Google)',
'AI model support (OpenAI, Anthropic, Google, xAI, Mistral, Perplexity)',
'Real-time collaboration',
'Version control',
'API access',
@@ -198,7 +198,7 @@ export default function StructuredData() {
name: 'What is Sim?',
acceptedAnswer: {
'@type': 'Answer',
text: 'Sim is an open-source AI agent workflow builder used by 30,000+ developers at trail-blazing startups to Fortune 500 companies. It provides a visual drag-and-drop interface for building and deploying agentic workflows. Sim is SOC2 and HIPAA compliant.',
text: 'Sim is an open-source AI agent workflow builder used by 50,000+ developers at trail-blazing startups to Fortune 500 companies. It provides a visual drag-and-drop interface for building and deploying agentic workflows. Sim is SOC2 and HIPAA compliant.',
},
},
{
@@ -206,7 +206,7 @@ export default function StructuredData() {
name: 'Which AI models does Sim support?',
acceptedAnswer: {
'@type': 'Answer',
text: 'Sim supports all major AI models including OpenAI (GPT-4, GPT-3.5), Anthropic (Claude), Google (Gemini), Mistral, Perplexity, and many more. You can also connect to open-source models via Ollama.',
text: 'Sim supports all major AI models including OpenAI (GPT-5, GPT-4o), Anthropic (Claude), Google (Gemini), xAI (Grok), Mistral, Perplexity, and many more. You can also connect to open-source models via Ollama.',
},
},
{

View File

@@ -278,7 +278,22 @@ export async function refreshTokenIfNeeded(
logger.info(`[${requestId}] Successfully refreshed access token`)
return { accessToken: refreshedToken, refreshed: true }
} catch (error) {
logger.error(`[${requestId}] Error refreshing token`, error)
logger.warn(
`[${requestId}] Refresh attempt failed, checking if another concurrent request succeeded`
)
const freshCredential = await getCredential(requestId, credentialId, credential.userId)
if (freshCredential?.accessToken) {
const freshExpiresAt = freshCredential.accessTokenExpiresAt
const stillValid = !freshExpiresAt || freshExpiresAt > new Date()
if (stillValid) {
logger.info(`[${requestId}] Found valid token from concurrent refresh, using it`)
return { accessToken: freshCredential.accessToken, refreshed: true }
}
}
logger.error(`[${requestId}] Refresh failed and no valid token found in DB`, error)
throw error
}
}

View File

@@ -4,8 +4,8 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { requireStripeClient } from '@/lib/billing/stripe-client'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
const logger = createLogger('BillingPortal')
@@ -21,8 +21,7 @@ export async function POST(request: NextRequest) {
const context: 'user' | 'organization' =
body?.context === 'organization' ? 'organization' : 'user'
const organizationId: string | undefined = body?.organizationId || undefined
const returnUrl: string =
body?.returnUrl || `${env.NEXT_PUBLIC_APP_URL}/workspace?billing=updated`
const returnUrl: string = body?.returnUrl || `${getBaseUrl()}/workspace?billing=updated`
const stripe = requireStripeClient()

View File

@@ -5,9 +5,9 @@ import type { NextRequest } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { isDev } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { encryptSecret } from '@/lib/utils'
import { checkWorkflowAccessForChatCreation } from '@/app/api/chat/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -171,7 +171,7 @@ export async function POST(request: NextRequest) {
// Return successful response with chat URL
// Generate chat URL using path-based routing instead of subdomains
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'http://localhost:3000'
const baseUrl = getBaseUrl()
let chatUrl: string
try {

View File

@@ -2,6 +2,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { getPresignedUrl, getPresignedUrlWithConfig, isUsingCloudStorage } from '@/lib/uploads'
import { BLOB_EXECUTION_FILES_CONFIG, S3_EXECUTION_FILES_CONFIG } from '@/lib/uploads/setup'
import { getBaseUrl } from '@/lib/urls/utils'
import { createErrorResponse } from '@/app/api/files/utils'
const logger = createLogger('FileDownload')
@@ -81,7 +82,7 @@ export async function POST(request: NextRequest) {
}
} else {
// For local storage, return the direct path
const downloadUrl = `${process.env.NEXT_PUBLIC_APP_URL || 'http://localhost:3000'}/api/files/serve/${key}`
const downloadUrl = `${getBaseUrl()}/api/files/serve/${key}`
return NextResponse.json({
downloadUrl,

View File

@@ -8,7 +8,9 @@ import { validateProxyUrl } from '@/lib/security/input-validation'
import { generateRequestId } from '@/lib/utils'
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
export const maxDuration = 60
// Segment config exports must be statically analyzable.
// Mirror MAX_EXECUTION_DURATION (210s) from '@/lib/execution/constants'.
export const maxDuration = 210
const logger = createLogger('FunctionExecuteAPI')
@@ -649,10 +651,12 @@ export async function POST(req: NextRequest) {
try {
const body = await req.json()
const { DEFAULT_EXECUTION_TIMEOUT_MS } = await import('@/lib/execution/constants')
const {
code,
params = {},
timeout = 5000,
timeout = DEFAULT_EXECUTION_TIMEOUT_MS,
language = DEFAULT_CODE_LANGUAGE,
useLocalVM = false,
envVars = {},

View File

@@ -23,9 +23,9 @@ import {
} from '@/lib/billing/validation/seat-management'
import { sendEmail } from '@/lib/email/mailer'
import { quickValidateEmail } from '@/lib/email/validation'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { hasWorkspaceAdminAccess } from '@/lib/permissions/utils'
import { getBaseUrl } from '@/lib/urls/utils'
const logger = createLogger('OrganizationInvitations')
@@ -339,7 +339,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
organizationEntry[0]?.name || 'organization',
role,
workspaceInvitationsWithNames,
`${env.NEXT_PUBLIC_APP_URL}/invite/${orgInvitation.id}`
`${getBaseUrl()}/invite/${orgInvitation.id}`
)
emailResult = await sendEmail({
@@ -352,7 +352,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
const emailHtml = await renderInvitationEmail(
inviter[0]?.name || 'Someone',
organizationEntry[0]?.name || 'organization',
`${env.NEXT_PUBLIC_APP_URL}/invite/${orgInvitation.id}`,
`${getBaseUrl()}/invite/${orgInvitation.id}`,
email
)

View File

@@ -9,8 +9,8 @@ import { getUserUsageData } from '@/lib/billing/core/usage'
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
import { sendEmail } from '@/lib/email/mailer'
import { quickValidateEmail } from '@/lib/email/validation'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
const logger = createLogger('OrganizationMembersAPI')
@@ -260,7 +260,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
const emailHtml = await renderInvitationEmail(
inviter[0]?.name || 'Someone',
organizationEntry[0]?.name || 'organization',
`${env.NEXT_PUBLIC_APP_URL}/invite/organization?id=${invitationId}`,
`${getBaseUrl()}/invite/organization?id=${invitationId}`,
normalizedEmail
)

View File

@@ -309,7 +309,8 @@ export async function POST(req: NextRequest) {
// Additional validation for custom cron expressions
if (defaultScheduleType === 'custom' && cronExpression) {
const validation = validateCronExpression(cronExpression)
// Validate with timezone for accurate validation
const validation = validateCronExpression(cronExpression, timezone)
if (!validation.isValid) {
logger.error(`[${requestId}] Invalid cron expression: ${validation.error}`)
return NextResponse.json(

View File

@@ -3,9 +3,9 @@ import { webhook, workflow } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { getBaseUrl } from '@/lib/urls/utils'
import { generateRequestId } from '@/lib/utils'
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
@@ -282,13 +282,7 @@ export async function DELETE(
if (!resolvedExternalId) {
try {
if (!env.NEXT_PUBLIC_APP_URL) {
logger.error(
`[${requestId}] NEXT_PUBLIC_APP_URL not configured, cannot match Airtable webhook`
)
throw new Error('NEXT_PUBLIC_APP_URL must be configured')
}
const expectedNotificationUrl = `${env.NEXT_PUBLIC_APP_URL}/api/webhooks/trigger/${foundWebhook.path}`
const expectedNotificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${foundWebhook.path}`
const listUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`
const listResp = await fetch(listUrl, {

View File

@@ -2,9 +2,9 @@ import { db, webhook, workflow } from '@sim/db'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { getBaseUrl } from '@/lib/urls/utils'
import { generateRequestId } from '@/lib/utils'
import { signTestWebhookToken } from '@/lib/webhooks/test-tokens'
@@ -64,13 +64,8 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
if (!env.NEXT_PUBLIC_APP_URL) {
logger.error(`[${requestId}] NEXT_PUBLIC_APP_URL not configured`)
return NextResponse.json({ error: 'Server configuration error' }, { status: 500 })
}
const token = await signTestWebhookToken(id, ttlSeconds)
const url = `${env.NEXT_PUBLIC_APP_URL}/api/webhooks/test/${id}?token=${encodeURIComponent(token)}`
const url = `${getBaseUrl()}/api/webhooks/test/${id}?token=${encodeURIComponent(token)}`
logger.info(`[${requestId}] Minted test URL for webhook ${id}`)
return NextResponse.json({

View File

@@ -4,9 +4,9 @@ import { and, desc, eq } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { getBaseUrl } from '@/lib/urls/utils'
import { generateRequestId } from '@/lib/utils'
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
@@ -467,14 +467,7 @@ async function createAirtableWebhookSubscription(
)
}
if (!env.NEXT_PUBLIC_APP_URL) {
logger.error(
`[${requestId}] NEXT_PUBLIC_APP_URL not configured, cannot register Airtable webhook`
)
throw new Error('NEXT_PUBLIC_APP_URL must be configured for Airtable webhook registration')
}
const notificationUrl = `${env.NEXT_PUBLIC_APP_URL}/api/webhooks/trigger/${path}`
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${path}`
const airtableApiUrl = `https://api.airtable.com/v0/bases/${baseId}/webhooks`

View File

@@ -2,8 +2,8 @@ import { db } from '@sim/db'
import { webhook } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { generateRequestId } from '@/lib/utils'
const logger = createLogger('WebhookTestAPI')
@@ -35,15 +35,7 @@ export async function GET(request: NextRequest) {
const provider = foundWebhook.provider || 'generic'
const providerConfig = (foundWebhook.providerConfig as Record<string, any>) || {}
if (!env.NEXT_PUBLIC_APP_URL) {
logger.error(`[${requestId}] NEXT_PUBLIC_APP_URL not configured, cannot test webhook`)
return NextResponse.json(
{ success: false, error: 'NEXT_PUBLIC_APP_URL must be configured' },
{ status: 500 }
)
}
const baseUrl = env.NEXT_PUBLIC_APP_URL
const webhookUrl = `${baseUrl}/api/webhooks/trigger/${foundWebhook.path}`
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${foundWebhook.path}`
logger.info(`[${requestId}] Testing webhook for provider: ${provider}`, {
webhookId,

View File

@@ -0,0 +1,305 @@
import { db } from '@sim/db'
import { permissions, workflowExecutionLogs } from '@sim/db/schema'
import { and, desc, eq, gte, inArray } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
const logger = createLogger('WorkflowExecutionDetailsAPI')
const QueryParamsSchema = z.object({
timeFilter: z.enum(['1h', '12h', '24h', '1w']).optional(),
startTime: z.string().optional(),
endTime: z.string().optional(),
triggers: z.string().optional(),
})
function getTimeRangeMs(filter: string): number {
switch (filter) {
case '1h':
return 60 * 60 * 1000
case '12h':
return 12 * 60 * 60 * 1000
case '24h':
return 24 * 60 * 60 * 1000
case '1w':
return 7 * 24 * 60 * 60 * 1000
default:
return 24 * 60 * 60 * 1000
}
}
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ id: string; workflowId: string }> }
) {
const requestId = generateRequestId()
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized workflow details access attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const { id: workspaceId, workflowId } = await params
const { searchParams } = new URL(request.url)
const queryParams = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
// Calculate time range - use custom times if provided, otherwise use timeFilter
let endTime: Date
let startTime: Date
if (queryParams.startTime && queryParams.endTime) {
startTime = new Date(queryParams.startTime)
endTime = new Date(queryParams.endTime)
} else {
endTime = new Date()
const timeRangeMs = getTimeRangeMs(queryParams.timeFilter || '24h')
startTime = new Date(endTime.getTime() - timeRangeMs)
}
const timeRangeMs = endTime.getTime() - startTime.getTime()
// Number of data points for the line charts
const dataPoints = 30
const segmentDurationMs = timeRangeMs / dataPoints
logger.debug(`[${requestId}] Fetching workflow details for ${workflowId}`)
// Check permissions
const [permission] = await db
.select()
.from(permissions)
.where(
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workspaceId),
eq(permissions.userId, userId)
)
)
.limit(1)
if (!permission) {
logger.warn(`[${requestId}] User ${userId} has no permission for workspace ${workspaceId}`)
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
// Build conditions for log filtering
const logConditions = [
eq(workflowExecutionLogs.workflowId, workflowId),
gte(workflowExecutionLogs.startedAt, startTime),
]
// Add trigger filter if specified
if (queryParams.triggers) {
const triggerList = queryParams.triggers.split(',')
logConditions.push(inArray(workflowExecutionLogs.trigger, triggerList))
}
// Fetch all logs for this workflow in the time range
const logs = await db
.select({
id: workflowExecutionLogs.id,
executionId: workflowExecutionLogs.executionId,
level: workflowExecutionLogs.level,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
})
.from(workflowExecutionLogs)
.where(and(...logConditions))
.orderBy(desc(workflowExecutionLogs.startedAt))
.limit(50)
// Calculate metrics for each time segment
const errorRates: { timestamp: string; value: number }[] = []
const durations: { timestamp: string; value: number }[] = []
const executionCounts: { timestamp: string; value: number }[] = []
for (let i = 0; i < dataPoints; i++) {
const segmentStart = new Date(startTime.getTime() + i * segmentDurationMs)
const segmentEnd = new Date(startTime.getTime() + (i + 1) * segmentDurationMs)
// Filter logs for this segment
const segmentLogs = logs.filter((log) => {
const logTime = log.startedAt.getTime()
return logTime >= segmentStart.getTime() && logTime < segmentEnd.getTime()
})
const totalExecutions = segmentLogs.length
const errorExecutions = segmentLogs.filter((log) => log.level === 'error').length
const errorRate = totalExecutions > 0 ? (errorExecutions / totalExecutions) * 100 : 0
// Calculate average duration for this segment
const durationsInSegment = segmentLogs
.filter((log) => log.totalDurationMs !== null)
.map((log) => log.totalDurationMs!)
const avgDuration =
durationsInSegment.length > 0
? durationsInSegment.reduce((sum, d) => sum + d, 0) / durationsInSegment.length
: 0
errorRates.push({
timestamp: segmentStart.toISOString(),
value: errorRate,
})
durations.push({
timestamp: segmentStart.toISOString(),
value: avgDuration,
})
executionCounts.push({
timestamp: segmentStart.toISOString(),
value: totalExecutions,
})
}
// Helper function to recursively search for error in trace spans
const findErrorInSpans = (spans: any[]): string | null => {
for (const span of spans) {
if (span.status === 'error' && span.output?.error) {
return span.output.error
}
if (span.children && Array.isArray(span.children)) {
const childError = findErrorInSpans(span.children)
if (childError) return childError
}
}
return null
}
// Helper function to get all blocks from trace spans (flattened)
const flattenTraceSpans = (spans: any[]): any[] => {
const flattened: any[] = []
for (const span of spans) {
if (span.type !== 'workflow') {
flattened.push(span)
}
if (span.children && Array.isArray(span.children)) {
flattened.push(...flattenTraceSpans(span.children))
}
}
return flattened
}
// Format logs for response
const formattedLogs = logs.map((log) => {
const executionData = log.executionData as any
const triggerData = executionData?.trigger || {}
const traceSpans = executionData?.traceSpans || []
// Extract error message from trace spans
let errorMessage = null
if (log.level === 'error') {
errorMessage = findErrorInSpans(traceSpans)
// Fallback to executionData.errorDetails
if (!errorMessage) {
errorMessage = executionData?.errorDetails?.error || null
}
}
// Extract outputs from the last block in trace spans
let outputs = null
let cost = null
if (traceSpans.length > 0) {
// Flatten all blocks from trace spans
const allBlocks = flattenTraceSpans(traceSpans)
// Find the last successful block execution
const successBlocks = allBlocks.filter(
(span: any) =>
span.status !== 'error' && span.output && Object.keys(span.output).length > 0
)
if (successBlocks.length > 0) {
const lastBlock = successBlocks[successBlocks.length - 1]
const blockOutput = lastBlock.output || {}
// Clean up the output to show meaningful data
// Priority: content > result > data > the whole output object
if (blockOutput.content) {
outputs = { content: blockOutput.content }
} else if (blockOutput.result !== undefined) {
outputs = { result: blockOutput.result }
} else if (blockOutput.data !== undefined) {
outputs = { data: blockOutput.data }
} else {
// Filter out internal/metadata fields for cleaner display
const cleanOutput: any = {}
for (const [key, value] of Object.entries(blockOutput)) {
if (
![
'executionTime',
'tokens',
'model',
'cost',
'childTraceSpans',
'error',
'stackTrace',
].includes(key)
) {
cleanOutput[key] = value
}
}
if (Object.keys(cleanOutput).length > 0) {
outputs = cleanOutput
}
}
// Extract cost from the block output
if (blockOutput.cost) {
cost = blockOutput.cost
}
}
}
// Use the cost stored at the top-level in workflowExecutionLogs table
// This is the same cost shown in the logs page
const logCost = log.cost as any
return {
id: log.id,
executionId: log.executionId,
startedAt: log.startedAt.toISOString(),
level: log.level,
trigger: log.trigger,
triggerUserId: triggerData.userId || null,
triggerInputs: triggerData.inputs || triggerData.data || null,
outputs,
errorMessage,
duration: log.totalDurationMs,
cost: logCost
? {
input: logCost.input || 0,
output: logCost.output || 0,
total: logCost.total || 0,
}
: null,
}
})
logger.debug(`[${requestId}] Successfully calculated workflow details`)
logger.debug(`[${requestId}] Returning ${formattedLogs.length} execution logs`)
return NextResponse.json({
errorRates,
durations,
executionCounts,
logs: formattedLogs,
startTime: startTime.toISOString(),
endTime: endTime.toISOString(),
})
} catch (error) {
logger.error(`[${requestId}] Error fetching workflow details:`, error)
return NextResponse.json({ error: 'Failed to fetch workflow details' }, { status: 500 })
}
}

View File

@@ -0,0 +1,223 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
import { and, eq, gte, inArray } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
const logger = createLogger('ExecutionHistoryAPI')
const QueryParamsSchema = z.object({
timeFilter: z.enum(['1h', '12h', '24h', '1w']).optional(),
startTime: z.string().optional(),
endTime: z.string().optional(),
segments: z.coerce.number().min(1).max(200).default(120),
workflowIds: z.string().optional(),
folderIds: z.string().optional(),
triggers: z.string().optional(),
})
interface TimeSegment {
successRate: number
timestamp: string
hasExecutions: boolean
totalExecutions: number
successfulExecutions: number
}
interface WorkflowExecution {
workflowId: string
workflowName: string
segments: TimeSegment[]
overallSuccessRate: number
}
function getTimeRangeMs(filter: string): number {
switch (filter) {
case '1h':
return 60 * 60 * 1000
case '12h':
return 12 * 60 * 60 * 1000
case '24h':
return 24 * 60 * 60 * 1000
case '1w':
return 7 * 24 * 60 * 60 * 1000
default:
return 24 * 60 * 60 * 1000
}
}
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized execution history access attempt`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const { id: workspaceId } = await params
const { searchParams } = new URL(request.url)
const queryParams = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries()))
// Calculate time range - use custom times if provided, otherwise use timeFilter
let endTime: Date
let startTime: Date
if (queryParams.startTime && queryParams.endTime) {
startTime = new Date(queryParams.startTime)
endTime = new Date(queryParams.endTime)
} else {
endTime = new Date()
const timeRangeMs = getTimeRangeMs(queryParams.timeFilter || '24h')
startTime = new Date(endTime.getTime() - timeRangeMs)
}
const timeRangeMs = endTime.getTime() - startTime.getTime()
const segmentDurationMs = timeRangeMs / queryParams.segments
logger.debug(`[${requestId}] Fetching execution history for workspace ${workspaceId}`)
logger.debug(
`[${requestId}] Time range: ${startTime.toISOString()} to ${endTime.toISOString()}`
)
logger.debug(
`[${requestId}] Segments: ${queryParams.segments}, duration: ${segmentDurationMs}ms`
)
// Check permissions
const [permission] = await db
.select()
.from(permissions)
.where(
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workspaceId),
eq(permissions.userId, userId)
)
)
.limit(1)
if (!permission) {
logger.warn(`[${requestId}] User ${userId} has no permission for workspace ${workspaceId}`)
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
// Build workflow query conditions
const workflowConditions = [eq(workflow.workspaceId, workspaceId)]
// Apply workflow ID filter
if (queryParams.workflowIds) {
const workflowIdList = queryParams.workflowIds.split(',')
workflowConditions.push(inArray(workflow.id, workflowIdList))
}
// Apply folder ID filter
if (queryParams.folderIds) {
const folderIdList = queryParams.folderIds.split(',')
workflowConditions.push(inArray(workflow.folderId, folderIdList))
}
// Get all workflows in the workspace with optional filters
const workflows = await db
.select({
id: workflow.id,
name: workflow.name,
})
.from(workflow)
.where(and(...workflowConditions))
logger.debug(`[${requestId}] Found ${workflows.length} workflows`)
// Use Promise.all to fetch logs in parallel per workflow
// This is better than single query when workflows have 10k+ logs each
const workflowExecutions: WorkflowExecution[] = await Promise.all(
workflows.map(async (wf) => {
// Build conditions for log filtering
const logConditions = [
eq(workflowExecutionLogs.workflowId, wf.id),
gte(workflowExecutionLogs.startedAt, startTime),
]
// Add trigger filter if specified
if (queryParams.triggers) {
const triggerList = queryParams.triggers.split(',')
logConditions.push(inArray(workflowExecutionLogs.trigger, triggerList))
}
// Fetch logs for this workflow - runs in parallel with others
const logs = await db
.select({
id: workflowExecutionLogs.id,
level: workflowExecutionLogs.level,
startedAt: workflowExecutionLogs.startedAt,
})
.from(workflowExecutionLogs)
.where(and(...logConditions))
// Initialize segments with timestamps
const segments: TimeSegment[] = []
let totalSuccess = 0
let totalExecutions = 0
for (let i = 0; i < queryParams.segments; i++) {
const segmentStart = new Date(startTime.getTime() + i * segmentDurationMs)
const segmentEnd = new Date(startTime.getTime() + (i + 1) * segmentDurationMs)
// Count executions in this segment
const segmentLogs = logs.filter((log) => {
const logTime = log.startedAt.getTime()
return logTime >= segmentStart.getTime() && logTime < segmentEnd.getTime()
})
const segmentTotal = segmentLogs.length
const segmentErrors = segmentLogs.filter((log) => log.level === 'error').length
const segmentSuccess = segmentTotal - segmentErrors
// Calculate success rate (default to 100% if no executions in this segment)
const hasExecutions = segmentTotal > 0
const successRate = hasExecutions ? (segmentSuccess / segmentTotal) * 100 : 100
segments.push({
successRate,
timestamp: segmentStart.toISOString(),
hasExecutions,
totalExecutions: segmentTotal,
successfulExecutions: segmentSuccess,
})
totalExecutions += segmentTotal
totalSuccess += segmentSuccess
}
// Calculate overall success rate (percentage of non-errored executions)
const overallSuccessRate =
totalExecutions > 0 ? (totalSuccess / totalExecutions) * 100 : 100
return {
workflowId: wf.id,
workflowName: wf.name,
segments,
overallSuccessRate,
}
})
)
logger.debug(
`[${requestId}] Successfully calculated execution history for ${workflowExecutions.length} workflows`
)
return NextResponse.json({
workflows: workflowExecutions,
segments: queryParams.segments,
startTime: startTime.toISOString(),
endTime: endTime.toISOString(),
})
} catch (error) {
logger.error(`[${requestId}] Error fetching execution history:`, error)
return NextResponse.json({ error: 'Failed to fetch execution history' }, { status: 500 })
}
}

View File

@@ -61,17 +61,21 @@ describe('Workspace Invitation [invitationId] API Route', () => {
hasWorkspaceAdminAccess: mockHasWorkspaceAdminAccess,
}))
vi.doMock('@/lib/env', () => ({
env: {
vi.doMock('@/lib/env', () => {
const mockEnv = {
NEXT_PUBLIC_APP_URL: 'https://test.sim.ai',
BILLING_ENABLED: false,
},
isTruthy: (value: string | boolean | number | undefined) =>
typeof value === 'string'
? value.toLowerCase() === 'true' || value === '1'
: Boolean(value),
getEnv: (variable: string) => process.env[variable],
}))
}
return {
env: mockEnv,
isTruthy: (value: string | boolean | number | undefined) =>
typeof value === 'string'
? value.toLowerCase() === 'true' || value === '1'
: Boolean(value),
getEnv: (variable: string) =>
mockEnv[variable as keyof typeof mockEnv] ?? process.env[variable],
}
})
mockTransaction = vi.fn()
const mockDbChain = {
@@ -384,17 +388,21 @@ describe('Workspace Invitation [invitationId] API Route', () => {
vi.doMock('@/lib/permissions/utils', () => ({
hasWorkspaceAdminAccess: vi.fn(),
}))
vi.doMock('@/lib/env', () => ({
env: {
vi.doMock('@/lib/env', () => {
const mockEnv = {
NEXT_PUBLIC_APP_URL: 'https://test.sim.ai',
BILLING_ENABLED: false,
},
isTruthy: (value: string | boolean | number | undefined) =>
typeof value === 'string'
? value.toLowerCase() === 'true' || value === '1'
: Boolean(value),
getEnv: (variable: string) => process.env[variable],
}))
}
return {
env: mockEnv,
isTruthy: (value: string | boolean | number | undefined) =>
typeof value === 'string'
? value.toLowerCase() === 'true' || value === '1'
: Boolean(value),
getEnv: (variable: string) =>
mockEnv[variable as keyof typeof mockEnv] ?? process.env[variable],
}
})
vi.doMock('@sim/db/schema', () => ({
workspaceInvitation: { id: 'id' },
}))

View File

@@ -14,8 +14,8 @@ import { WorkspaceInvitationEmail } from '@/components/emails/workspace-invitati
import { getSession } from '@/lib/auth'
import { sendEmail } from '@/lib/email/mailer'
import { getFromEmailAddress } from '@/lib/email/utils'
import { env } from '@/lib/env'
import { hasWorkspaceAdminAccess } from '@/lib/permissions/utils'
import { getBaseUrl } from '@/lib/urls/utils'
// GET /api/workspaces/invitations/[invitationId] - Get invitation details OR accept via token
export async function GET(
@@ -30,12 +30,7 @@ export async function GET(
if (!session?.user?.id) {
// For token-based acceptance flows, redirect to login
if (isAcceptFlow) {
return NextResponse.redirect(
new URL(
`/invite/${invitationId}?token=${token}`,
env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
)
)
return NextResponse.redirect(new URL(`/invite/${invitationId}?token=${token}`, getBaseUrl()))
}
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
@@ -54,10 +49,7 @@ export async function GET(
if (!invitation) {
if (isAcceptFlow) {
return NextResponse.redirect(
new URL(
`/invite/${invitationId}?error=invalid-token`,
env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
)
new URL(`/invite/${invitationId}?error=invalid-token`, getBaseUrl())
)
}
return NextResponse.json({ error: 'Invitation not found or has expired' }, { status: 404 })
@@ -66,10 +58,7 @@ export async function GET(
if (new Date() > new Date(invitation.expiresAt)) {
if (isAcceptFlow) {
return NextResponse.redirect(
new URL(
`/invite/${invitation.id}?error=expired`,
env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
)
new URL(`/invite/${invitation.id}?error=expired`, getBaseUrl())
)
}
return NextResponse.json({ error: 'Invitation has expired' }, { status: 400 })
@@ -84,10 +73,7 @@ export async function GET(
if (!workspaceDetails) {
if (isAcceptFlow) {
return NextResponse.redirect(
new URL(
`/invite/${invitation.id}?error=workspace-not-found`,
env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
)
new URL(`/invite/${invitation.id}?error=workspace-not-found`, getBaseUrl())
)
}
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
@@ -96,10 +82,7 @@ export async function GET(
if (isAcceptFlow) {
if (invitation.status !== ('pending' as WorkspaceInvitationStatus)) {
return NextResponse.redirect(
new URL(
`/invite/${invitation.id}?error=already-processed`,
env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
)
new URL(`/invite/${invitation.id}?error=already-processed`, getBaseUrl())
)
}
@@ -114,10 +97,7 @@ export async function GET(
if (!userData) {
return NextResponse.redirect(
new URL(
`/invite/${invitation.id}?error=user-not-found`,
env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
)
new URL(`/invite/${invitation.id}?error=user-not-found`, getBaseUrl())
)
}
@@ -125,10 +105,7 @@ export async function GET(
if (!isValidMatch) {
return NextResponse.redirect(
new URL(
`/invite/${invitation.id}?error=email-mismatch`,
env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
)
new URL(`/invite/${invitation.id}?error=email-mismatch`, getBaseUrl())
)
}
@@ -154,10 +131,7 @@ export async function GET(
.where(eq(workspaceInvitation.id, invitation.id))
return NextResponse.redirect(
new URL(
`/workspace/${invitation.workspaceId}/w`,
env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
)
new URL(`/workspace/${invitation.workspaceId}/w`, getBaseUrl())
)
}
@@ -181,12 +155,7 @@ export async function GET(
.where(eq(workspaceInvitation.id, invitation.id))
})
return NextResponse.redirect(
new URL(
`/workspace/${invitation.workspaceId}/w`,
env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
)
)
return NextResponse.redirect(new URL(`/workspace/${invitation.workspaceId}/w`, getBaseUrl()))
}
return NextResponse.json({
@@ -298,7 +267,7 @@ export async function POST(
.set({ token: newToken, expiresAt: newExpiresAt, updatedAt: new Date() })
.where(eq(workspaceInvitation.id, invitationId))
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
const baseUrl = getBaseUrl()
const invitationLink = `${baseUrl}/invite/${invitationId}?token=${newToken}`
const emailHtml = await render(

View File

@@ -15,8 +15,8 @@ import { WorkspaceInvitationEmail } from '@/components/emails/workspace-invitati
import { getSession } from '@/lib/auth'
import { sendEmail } from '@/lib/email/mailer'
import { getFromEmailAddress } from '@/lib/email/utils'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
export const dynamic = 'force-dynamic'
@@ -232,7 +232,7 @@ async function sendInvitationEmail({
token: string
}) {
try {
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
const baseUrl = getBaseUrl()
// Use invitation ID in path, token in query parameter for security
const invitationLink = `${baseUrl}/invite/${invitationId}?token=${token}`

View File

@@ -48,7 +48,7 @@ export async function GET() {
<rss version="2.0">
<channel>
<title>Sim Changelog</title>
<link>https://sim.dev/changelog</link>
<link>https://sim.ai/changelog</link>
<description>Latest changes, fixes and updates in Sim.</description>
<language>en-us</language>
${items}

View File

@@ -38,7 +38,6 @@ export default function manifest(): MetadataRoute.Manifest {
short_name: 'New',
description: 'Create a new AI workflow',
url: '/workspace',
icons: [{ src: '/icons/new-workflow.png', sizes: '192x192' }],
},
],
lang: 'en-US',

View File

@@ -4,7 +4,7 @@ import Landing from '@/app/(landing)/landing'
export const metadata: Metadata = {
title: 'Sim - AI Agent Workflow Builder | Open Source Platform',
description:
'Open-source AI agent workflow builder used by 30,000+ developers. Build and deploy agentic workflows with visual drag-and-drop interface. Connect 100+ apps. SOC2 and HIPAA compliant. Used by startups to Fortune 500 companies.',
'Open-source AI agent workflow builder used by 50,000+ developers. Build and deploy agentic workflows with visual drag-and-drop interface. Connect 100+ apps. SOC2 and HIPAA compliant. Used by startups to Fortune 500 companies.',
keywords:
'AI agent workflow builder, agentic workflows, open source AI, visual workflow builder, AI automation, LLM workflows, AI agents, workflow automation, no-code AI, SOC2 compliant, HIPAA compliant, enterprise AI',
authors: [{ name: 'Sim Studio' }],
@@ -18,7 +18,7 @@ export const metadata: Metadata = {
openGraph: {
title: 'Sim - AI Agent Workflow Builder | Open Source',
description:
'Open-source platform used by 30,000+ developers. Build and deploy agentic workflows with drag-and-drop interface. SOC2 & HIPAA compliant. Connect 100+ apps.',
'Open-source platform used by 50,000+ developers. Build and deploy agentic workflows with drag-and-drop interface. SOC2 & HIPAA compliant. Connect 100+ apps.',
type: 'website',
url: 'https://sim.ai',
siteName: 'Sim',
@@ -45,7 +45,7 @@ export const metadata: Metadata = {
creator: '@simdotai',
title: 'Sim - AI Agent Workflow Builder | Open Source',
description:
'Open-source platform for agentic workflows. 30,000+ developers. Visual builder. 100+ integrations. SOC2 & HIPAA compliant.',
'Open-source platform for agentic workflows. 50,000+ developers. Visual builder. 100+ integrations. SOC2 & HIPAA compliant.',
images: {
url: '/social/twitter-image.png',
alt: 'Sim - Visual AI Workflow Builder',

View File

@@ -0,0 +1,185 @@
import type { ReactNode } from 'react'
import { Loader2, Play, RefreshCw, Search, Square } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
import { cn } from '@/lib/utils'
import Timeline from '@/app/workspace/[workspaceId]/logs/components/filters/components/timeline'
export function Controls({
searchQuery,
setSearchQuery,
isRefetching,
resetToNow,
live,
setLive,
viewMode,
setViewMode,
searchComponent,
showExport = true,
onExport,
}: {
searchQuery?: string
setSearchQuery?: (v: string) => void
isRefetching: boolean
resetToNow: () => void
live: boolean
setLive: (v: (prev: boolean) => boolean) => void
viewMode: string
setViewMode: (mode: 'logs' | 'dashboard') => void
searchComponent?: ReactNode
showExport?: boolean
onExport?: () => void
}) {
return (
<div className='mb-8 flex flex-col items-stretch justify-between gap-4 sm:flex-row sm:items-start'>
{searchComponent ? (
searchComponent
) : (
<div className='relative w-full max-w-md'>
<Search className='-translate-y-1/2 absolute top-1/2 left-3 h-[18px] w-[18px] text-muted-foreground' />
<Input
type='text'
placeholder='Search workflows...'
value={searchQuery}
onChange={(e) => setSearchQuery?.(e.target.value)}
className='h-9 w-full rounded-[11px] border-[#E5E5E5] bg-[#FFFFFF] pr-10 pl-9 dark:border-[#414141] dark:bg-[var(--surface-elevated)]'
/>
{searchQuery && (
<button
onClick={() => setSearchQuery?.('')}
className='-translate-y-1/2 absolute top-1/2 right-3 text-muted-foreground hover:text-foreground'
>
<svg
width='14'
height='14'
viewBox='0 0 16 16'
fill='none'
stroke='currentColor'
strokeWidth='2'
strokeLinecap='round'
>
<path d='M12 4L4 12M4 4l8 8' />
</svg>
</button>
)}
</div>
)}
<div className='ml-auto flex flex-shrink-0 items-center gap-3'>
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='ghost'
size='icon'
onClick={resetToNow}
className='h-9 rounded-[11px] hover:bg-secondary'
disabled={isRefetching}
>
{isRefetching ? (
<Loader2 className='h-5 w-5 animate-spin' />
) : (
<RefreshCw className='h-5 w-5' />
)}
<span className='sr-only'>Refresh</span>
</Button>
</TooltipTrigger>
<TooltipContent>{isRefetching ? 'Refreshing...' : 'Refresh'}</TooltipContent>
</Tooltip>
{showExport && viewMode !== 'dashboard' && (
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='ghost'
size='icon'
onClick={onExport}
className='h-9 rounded-[11px] hover:bg-secondary'
aria-label='Export CSV'
>
<svg
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth='2'
className='h-5 w-5'
>
<path d='M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4' />
<polyline points='7 10 12 15 17 10' />
<line x1='12' y1='15' x2='12' y2='3' />
</svg>
<span className='sr-only'>Export CSV</span>
</Button>
</TooltipTrigger>
<TooltipContent>Export CSV</TooltipContent>
</Tooltip>
)}
<div className='inline-flex h-9 items-center rounded-[11px] border bg-card p-1 shadow-sm'>
<Button
variant='ghost'
size='sm'
onClick={() => setLive((v) => !v)}
className={cn(
'h-7 rounded-[8px] px-3 font-normal text-xs',
live ? 'bg-muted text-foreground' : 'text-muted-foreground hover:text-foreground'
)}
aria-pressed={live}
>
{live ? (
<>
<Square className='mr-1.5 h-3 w-3 fill-current' />
Live
</>
) : (
<>
<Play className='mr-1.5 h-3 w-3' />
Live
</>
)}
</Button>
</div>
<div className='inline-flex h-9 items-center rounded-[11px] border bg-card p-1 shadow-sm'>
<Button
variant='ghost'
size='sm'
onClick={() => setViewMode('logs')}
className={cn(
'h-7 rounded-[8px] px-3 font-normal text-xs',
(viewMode as string) !== 'dashboard'
? 'bg-muted text-foreground'
: 'text-muted-foreground hover:text-foreground'
)}
aria-pressed={(viewMode as string) !== 'dashboard'}
>
Logs
</Button>
<Button
variant='ghost'
size='sm'
onClick={() => setViewMode('dashboard')}
className={cn(
'h-7 rounded-[8px] px-3 font-normal text-xs',
(viewMode as string) === 'dashboard'
? 'bg-muted text-foreground'
: 'text-muted-foreground hover:text-foreground'
)}
aria-pressed={(viewMode as string) === 'dashboard'}
>
Dashboard
</Button>
</div>
</div>
<div className='sm:hidden'>
<TooltipProvider>
<Timeline />
</TooltipProvider>
</div>
</div>
)
}
export default Controls

View File

@@ -0,0 +1,38 @@
export interface AggregateMetrics {
totalExecutions: number
successfulExecutions: number
failedExecutions: number
activeWorkflows: number
successRate: number
}
export function KPIs({ aggregate }: { aggregate: AggregateMetrics }) {
return (
<div className='mb-5 grid grid-cols-1 gap-3 sm:grid-cols-2 lg:grid-cols-4'>
<div className='rounded-[12px] border bg-card p-4 shadow-sm'>
<div className='text-muted-foreground text-xs'>Total executions</div>
<div className='mt-1 font-semibold text-[22px] leading-6'>
{aggregate.totalExecutions.toLocaleString()}
</div>
</div>
<div className='rounded-[12px] border bg-card p-4 shadow-sm'>
<div className='text-muted-foreground text-xs'>Success rate</div>
<div className='mt-1 font-semibold text-[22px] leading-6'>
{aggregate.successRate.toFixed(1)}%
</div>
</div>
<div className='rounded-[12px] border bg-card p-4 shadow-sm'>
<div className='text-muted-foreground text-xs'>Failed executions</div>
<div className='mt-1 font-semibold text-[22px] leading-6'>
{aggregate.failedExecutions.toLocaleString()}
</div>
</div>
<div className='rounded-[12px] border bg-card p-4 shadow-sm'>
<div className='text-muted-foreground text-xs'>Active workflows</div>
<div className='mt-1 font-semibold text-[22px] leading-6'>{aggregate.activeWorkflows}</div>
</div>
</div>
)
}
export default KPIs

View File

@@ -0,0 +1,308 @@
import { useEffect, useRef, useState } from 'react'
import { TooltipProvider } from '@/components/ui/tooltip'
export interface LineChartPoint {
timestamp: string
value: number
}
export function LineChart({
data,
label,
color,
unit,
}: {
data: LineChartPoint[]
label: string
color: string
unit?: string
}) {
// Responsive sizing: chart fills its container width
const containerRef = useRef<HTMLDivElement | null>(null)
const [containerWidth, setContainerWidth] = useState<number>(420)
const width = containerWidth
const height = 176
// Add a touch more space below the axis so curves never visually clip it
const padding = { top: 18, right: 18, bottom: 32, left: 42 }
// Observe container width for responsiveness
useEffect(() => {
if (!containerRef.current) return
const element = containerRef.current
const ro = new ResizeObserver((entries) => {
const entry = entries[0]
if (entry?.contentRect) {
const w = Math.max(280, Math.floor(entry.contentRect.width))
setContainerWidth(w)
}
})
ro.observe(element)
// Initialize once immediately
const rect = element.getBoundingClientRect()
if (rect?.width) setContainerWidth(Math.max(280, Math.floor(rect.width)))
return () => ro.disconnect()
}, [])
const chartWidth = width - padding.left - padding.right
const chartHeight = height - padding.top - padding.bottom
const [hoverIndex, setHoverIndex] = useState<number | null>(null)
const [isDark, setIsDark] = useState<boolean>(true)
useEffect(() => {
if (typeof window === 'undefined') return
const el = document.documentElement
const update = () => setIsDark(el.classList.contains('dark'))
update()
const observer = new MutationObserver(update)
observer.observe(el, { attributes: true, attributeFilter: ['class'] })
return () => observer.disconnect()
}, [])
if (data.length === 0) {
return (
<div
className='flex items-center justify-center rounded-lg border bg-card p-4'
style={{ width, height }}
>
<p className='text-muted-foreground text-sm'>No data</p>
</div>
)
}
// Ensure nice padding on the y-domain so the line never hugs the axes
const rawMax = Math.max(...data.map((d) => d.value), 1)
const rawMin = Math.min(...data.map((d) => d.value), 0)
const paddedMax = rawMax === 0 ? 1 : rawMax * 1.1
const paddedMin = Math.min(0, rawMin) // never below zero for our metrics
const maxValue = Math.ceil(paddedMax)
const minValue = Math.floor(paddedMin)
const valueRange = maxValue - minValue || 1
const yMin = padding.top + 3
const yMax = padding.top + chartHeight - 3
const scaledPoints = data.map((d, i) => {
const x = padding.left + (i / (data.length - 1 || 1)) * chartWidth
const rawY = padding.top + chartHeight - ((d.value - minValue) / valueRange) * chartHeight
// keep the line safely within the plotting area to avoid clipping behind the x-axis
const y = Math.max(yMin, Math.min(yMax, rawY))
return { x, y }
})
const pathD = (() => {
if (scaledPoints.length <= 1) return ''
const p = scaledPoints
const tension = 0.2
let d = `M ${p[0].x} ${p[0].y}`
for (let i = 0; i < p.length - 1; i++) {
const p0 = p[i - 1] || p[i]
const p1 = p[i]
const p2 = p[i + 1]
const p3 = p[i + 2] || p[i + 1]
const cp1x = p1.x + ((p2.x - p0.x) / 6) * tension
let cp1y = p1.y + ((p2.y - p0.y) / 6) * tension
const cp2x = p2.x - ((p3.x - p1.x) / 6) * tension
let cp2y = p2.y - ((p3.y - p1.y) / 6) * tension
// Clamp control points vertically to avoid bezier overshoot below the axis
cp1y = Math.max(yMin, Math.min(yMax, cp1y))
cp2y = Math.max(yMin, Math.min(yMax, cp2y))
d += ` C ${cp1x} ${cp1y}, ${cp2x} ${cp2y}, ${p2.x} ${p2.y}`
}
return d
})()
return (
<div ref={containerRef} className='w-full rounded-[11px] border bg-card p-4 shadow-sm'>
<h4 className='mb-3 font-medium text-foreground text-sm'>{label}</h4>
<TooltipProvider delayDuration={0}>
<div className='relative' style={{ width, height }}>
<svg
width={width}
height={height}
className='overflow-visible'
onMouseMove={(e) => {
if (scaledPoints.length === 0) return
const rect = (e.currentTarget as SVGSVGElement).getBoundingClientRect()
const x = e.clientX - rect.left
const clamped = Math.max(padding.left, Math.min(width - padding.right, x))
const ratio = (clamped - padding.left) / (chartWidth || 1)
const i = Math.round(ratio * (scaledPoints.length - 1))
setHoverIndex(i)
}}
onMouseLeave={() => setHoverIndex(null)}
>
<defs>
<linearGradient id={`area-${label.replace(/\s+/g, '-')}`} x1='0' x2='0' y1='0' y2='1'>
<stop offset='0%' stopColor={color} stopOpacity={isDark ? 0.25 : 0.45} />
<stop offset='100%' stopColor={color} stopOpacity={isDark ? 0.03 : 0.08} />
</linearGradient>
<clipPath id={`clip-${label.replace(/\s+/g, '-')}`}>
<rect
x={padding.left}
y={yMin}
width={chartWidth}
height={chartHeight - (yMin - padding.top) * 2}
rx='2'
/>
</clipPath>
</defs>
<line
x1={padding.left}
y1={padding.top}
x2={padding.left}
y2={height - padding.bottom}
stroke='hsl(var(--border))'
strokeWidth='1'
/>
{[0.25, 0.5, 0.75].map((p) => (
<line
key={p}
x1={padding.left}
y1={padding.top + chartHeight * p}
x2={width - padding.right}
y2={padding.top + chartHeight * p}
stroke='hsl(var(--muted))'
strokeOpacity='0.35'
strokeWidth='1'
/>
))}
{/* axis baseline is drawn last (after line) to visually mask any overshoot */}
{scaledPoints.length > 1 && (
<path
d={`${pathD} L ${scaledPoints[scaledPoints.length - 1].x} ${height - padding.bottom} L ${scaledPoints[0].x} ${height - padding.bottom} Z`}
fill={`url(#area-${label.replace(/\s+/g, '-')})`}
stroke='none'
clipPath={`url(#clip-${label.replace(/\s+/g, '-')})`}
/>
)}
{scaledPoints.length > 1 ? (
<path
d={pathD}
fill='none'
stroke={color}
strokeWidth={isDark ? 1.75 : 2.25}
strokeLinecap='round'
clipPath={`url(#clip-${label.replace(/\s+/g, '-')})`}
style={{ mixBlendMode: isDark ? 'screen' : 'normal' }}
/>
) : (
// Single-point series: show a dot so the value doesn't "disappear"
<circle cx={scaledPoints[0].x} cy={scaledPoints[0].y} r='3' fill={color} />
)}
{hoverIndex !== null && scaledPoints[hoverIndex] && scaledPoints.length > 1 && (
<g pointerEvents='none' clipPath={`url(#clip-${label.replace(/\s+/g, '-')})`}>
<line
x1={scaledPoints[hoverIndex].x}
y1={padding.top}
x2={scaledPoints[hoverIndex].x}
y2={height - padding.bottom}
stroke={color}
strokeOpacity='0.35'
strokeDasharray='3 3'
/>
<circle
cx={scaledPoints[hoverIndex].x}
cy={scaledPoints[hoverIndex].y}
r='3'
fill={color}
/>
</g>
)}
{(() => {
if (data.length < 2) return null
const idx = [0, Math.floor(data.length / 2), data.length - 1]
return idx.map((i) => {
const x = padding.left + (i / (data.length - 1 || 1)) * chartWidth
const tsSource = data[i]?.timestamp
if (!tsSource) return null
const ts = new Date(tsSource)
const labelStr = Number.isNaN(ts.getTime())
? ''
: ts.toLocaleString('en-US', { month: 'short', day: 'numeric' })
return (
<text
key={i}
x={x}
y={height - padding.bottom + 14}
fontSize='10'
textAnchor='middle'
fill='hsl(var(--muted-foreground))'
>
{labelStr}
</text>
)
})
})()}
<text
x={padding.left - 10}
y={padding.top}
textAnchor='end'
fontSize='10'
fill='hsl(var(--muted-foreground))'
>
{maxValue}
{unit}
</text>
<text
x={padding.left - 10}
y={height - padding.bottom}
textAnchor='end'
fontSize='10'
fill='hsl(var(--muted-foreground))'
>
{minValue}
{unit}
</text>
<line
x1={padding.left}
y1={height - padding.bottom}
x2={width - padding.right}
y2={height - padding.bottom}
stroke='hsl(var(--border))'
strokeWidth='1'
/>
</svg>
{hoverIndex !== null &&
scaledPoints[hoverIndex] &&
(() => {
const pt = scaledPoints[hoverIndex]
const val = data[hoverIndex]?.value
let formatted = ''
if (typeof val === 'number' && Number.isFinite(val)) {
const u = unit || ''
if (u.includes('%')) {
formatted = `${val.toFixed(1)}%`
} else if (u.toLowerCase().includes('ms')) {
formatted = `${Math.round(val)}ms`
} else if (u.toLowerCase().includes('exec')) {
formatted = `${Math.round(val)}${u}` // keep label like " execs"
} else {
formatted = `${Math.round(val)}${u}`
}
}
const left = Math.min(Math.max(pt.x + 8, padding.left), width - padding.right - 60)
const top = Math.min(Math.max(pt.y - 26, padding.top), height - padding.bottom - 18)
return (
<div
className='pointer-events-none absolute rounded-md bg-background/80 px-2 py-1 font-medium text-[11px] shadow-sm ring-1 ring-border backdrop-blur'
style={{ left, top }}
>
{formatted}
</div>
)
})()}
</div>
</TooltipProvider>
</div>
)
}
export default LineChart

View File

@@ -0,0 +1,142 @@
import type React from 'react'
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
export interface StatusBarSegment {
successRate: number
hasExecutions: boolean
totalExecutions: number
successfulExecutions: number
timestamp: string
}
export function StatusBar({
segments,
selectedSegmentIndices,
onSegmentClick,
workflowId,
segmentDurationMs,
}: {
segments: StatusBarSegment[]
selectedSegmentIndices: number[] | null
onSegmentClick: (
workflowId: string,
index: number,
timestamp: string,
mode: 'single' | 'toggle' | 'range'
) => void
workflowId: string
segmentDurationMs: number
}) {
return (
<TooltipProvider delayDuration={0}>
<div className='flex select-none items-stretch gap-[2px]'>
{segments.map((segment, i) => {
let color: string
let tooltipContent: React.ReactNode
const isSelected = Array.isArray(selectedSegmentIndices)
? selectedSegmentIndices.includes(i)
: false
if (!segment.hasExecutions) {
color = 'bg-gray-300/60 dark:bg-gray-500/40'
} else {
if (segment.successRate === 100) {
color = 'bg-emerald-400/90'
} else if (segment.successRate >= 95) {
color = 'bg-amber-400/90'
} else {
color = 'bg-red-400/90'
}
const start = new Date(segment.timestamp)
const end = new Date(start.getTime() + (segmentDurationMs || 0))
const rangeLabel = Number.isNaN(start.getTime())
? ''
: `${start.toLocaleString('en-US', { month: 'short', day: 'numeric', hour: 'numeric' })} ${end.toLocaleString('en-US', { hour: 'numeric', minute: '2-digit' })}`
tooltipContent = (
<div className='text-center'>
<div className='font-semibold'>{segment.successRate.toFixed(1)}%</div>
<div className='mt-1 text-xs'>
{segment.successfulExecutions ?? 0}/{segment.totalExecutions ?? 0} succeeded
</div>
{rangeLabel && (
<div className='mt-1 text-[11px] text-muted-foreground'>{rangeLabel}</div>
)}
</div>
)
}
// For empty segments: show a minimal tooltip with just the time range
if (!segment.hasExecutions) {
const start = new Date(segment.timestamp)
const end = new Date(start.getTime() + (segmentDurationMs || 0))
const rangeLabel = Number.isNaN(start.getTime())
? ''
: `${start.toLocaleString('en-US', { month: 'short', day: 'numeric', hour: 'numeric' })} ${end.toLocaleString('en-US', { hour: 'numeric', minute: '2-digit' })}`
return (
<Tooltip key={i}>
<TooltipTrigger asChild>
<div
className={`h-6 flex-1 rounded-[3px] ${color} cursor-pointer transition-[opacity,transform] hover:opacity-90 ${
isSelected
? 'relative z-10 ring-2 ring-primary ring-offset-1'
: 'relative z-0'
}`}
aria-label={`Segment ${i + 1}`}
onClick={(e) => {
e.stopPropagation()
const mode = e.shiftKey
? 'range'
: e.metaKey || e.ctrlKey
? 'toggle'
: 'single'
onSegmentClick(workflowId, i, segment.timestamp, mode)
}}
onMouseDown={(e) => {
// Avoid selecting surrounding text when shift-clicking
e.preventDefault()
}}
/>
</TooltipTrigger>
<TooltipContent side='top' className='select-none px-3 py-2'>
{rangeLabel && (
<div className='text-[11px] text-muted-foreground'>{rangeLabel}</div>
)}
</TooltipContent>
</Tooltip>
)
}
return (
<Tooltip key={i}>
<TooltipTrigger asChild>
<div
className={`h-6 flex-1 rounded-[3px] ${color} cursor-pointer transition-[opacity,transform] hover:opacity-90 ${
isSelected ? 'relative z-10 ring-2 ring-primary ring-offset-1' : 'relative z-0'
}`}
aria-label={`Segment ${i + 1}`}
onMouseDown={(e) => {
// Avoid selecting surrounding text when shift-clicking
e.preventDefault()
}}
onClick={(e) => {
e.stopPropagation()
const mode = e.shiftKey ? 'range' : e.metaKey || e.ctrlKey ? 'toggle' : 'single'
onSegmentClick(workflowId, i, segment.timestamp, mode)
}}
/>
</TooltipTrigger>
<TooltipContent side='top' className='select-none px-3 py-2'>
{tooltipContent}
</TooltipContent>
</Tooltip>
)
})}
</div>
</TooltipProvider>
)
}
export default StatusBar

View File

@@ -0,0 +1,17 @@
export const getTriggerColor = (trigger: string | null | undefined): string => {
if (!trigger) return '#9ca3af'
switch (trigger.toLowerCase()) {
case 'manual':
return '#9ca3af'
case 'schedule':
return '#10b981'
case 'webhook':
return '#f97316'
case 'chat':
return '#8b5cf6'
case 'api':
return '#3b82f6'
default:
return '#9ca3af'
}
}

View File

@@ -0,0 +1,378 @@
import { useMemo, useState } from 'react'
import { Info, Loader2 } from 'lucide-react'
import { useRouter } from 'next/navigation'
import { cn } from '@/lib/utils'
import LineChart, {
type LineChartPoint,
} from '@/app/workspace/[workspaceId]/logs/components/dashboard/line-chart'
import { getTriggerColor } from '@/app/workspace/[workspaceId]/logs/components/dashboard/utils'
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils/format-date'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
export interface ExecutionLogItem {
id: string
executionId: string
startedAt: string
level: string
trigger: string
triggerUserId: string | null
triggerInputs: any
outputs: any
errorMessage: string | null
duration: number | null
cost: {
input: number
output: number
total: number
} | null
workflowName?: string
workflowColor?: string
}
export interface WorkflowDetailsData {
errorRates: LineChartPoint[]
durations?: LineChartPoint[]
executionCounts: LineChartPoint[]
logs: ExecutionLogItem[]
allLogs: ExecutionLogItem[]
}
export function WorkflowDetails({
workspaceId,
expandedWorkflowId,
workflowName,
overview,
details,
selectedSegmentIndex,
selectedSegment,
clearSegmentSelection,
formatCost,
}: {
workspaceId: string
expandedWorkflowId: string
workflowName: string
overview: { total: number; success: number; failures: number; rate: number }
details: WorkflowDetailsData | undefined
selectedSegmentIndex: number[] | null
selectedSegment: { timestamp: string; totalExecutions: number } | null
clearSegmentSelection: () => void
formatCost: (n: number) => string
}) {
const router = useRouter()
const { workflows } = useWorkflowRegistry()
const workflowColor = useMemo(
() => workflows[expandedWorkflowId]?.color || '#3972F6',
[workflows, expandedWorkflowId]
)
const [expandedRowId, setExpandedRowId] = useState<string | null>(null)
return (
<div className='mt-5 overflow-hidden rounded-[11px] border bg-card shadow-sm'>
<div className='border-b bg-muted/30 px-4 py-2.5'>
<div className='flex items-center justify-between'>
<div className='flex items-center gap-2'>
<button
onClick={() => router.push(`/workspace/${workspaceId}/w/${expandedWorkflowId}`)}
className='group inline-flex items-center gap-2 text-left'
>
<span
className='h-[14px] w-[14px] flex-shrink-0 rounded'
style={{ backgroundColor: workflowColor }}
/>
<span className='font-semibold text-sm tracking-tight group-hover:text-primary'>
{workflowName}
</span>
</button>
</div>
<div className='flex items-center gap-2'>
<div className='inline-flex h-7 items-center gap-2 rounded-[10px] border px-2.5'>
<span className='text-[11px] text-muted-foreground'>Executions</span>
<span className='font-semibold text-sm leading-none'>{overview.total}</span>
</div>
<div className='inline-flex h-7 items-center gap-2 rounded-[10px] border px-2.5'>
<span className='text-[11px] text-muted-foreground'>Success</span>
<span className='font-semibold text-sm leading-none'>
{overview.rate.toFixed(1)}%
</span>
</div>
<div className='inline-flex h-7 items-center gap-2 rounded-[10px] border px-2.5'>
<span className='text-[11px] text-muted-foreground'>Failures</span>
<span className='font-semibold text-sm leading-none'>{overview.failures}</span>
</div>
</div>
</div>
</div>
<div className='p-4'>
{details ? (
<>
{Array.isArray(selectedSegmentIndex) &&
selectedSegmentIndex.length > 0 &&
selectedSegment &&
(() => {
const tsObj = selectedSegment?.timestamp
? new Date(selectedSegment.timestamp)
: null
const tsLabel =
tsObj && !Number.isNaN(tsObj.getTime())
? tsObj.toLocaleString('en-US', {
month: 'short',
day: 'numeric',
hour: 'numeric',
minute: '2-digit',
hour12: true,
})
: 'Selected segment'
return (
<div className='mb-4 flex items-center justify-between rounded-lg border border-primary/30 bg-primary/10 px-4 py-2.5 text-foreground text-sm'>
<div className='flex items-center gap-2'>
<div className='h-2 w-2 animate-pulse rounded-full bg-primary ring-2 ring-primary/40' />
<span className='font-medium'>
Filtered to {tsLabel}
{selectedSegmentIndex.length > 1
? ` (+${selectedSegmentIndex.length - 1} more segment${selectedSegmentIndex.length - 1 > 1 ? 's' : ''})`
: ''}
{selectedSegment.totalExecutions} execution
{selectedSegment.totalExecutions !== 1 ? 's' : ''}
</span>
</div>
<button
onClick={clearSegmentSelection}
className='rounded px-2 py-1 text-foreground text-xs hover:bg-primary/20 focus:outline-none focus:ring-2 focus:ring-primary/50'
>
Clear filter
</button>
</div>
)
})()}
{(() => {
const hasDuration = Array.isArray(details.durations) && details.durations.length > 0
const gridCols = hasDuration
? 'md:grid-cols-2 xl:grid-cols-4'
: 'md:grid-cols-2 xl:grid-cols-3'
return (
<div className={`mb-4 grid grid-cols-1 gap-4 ${gridCols}`}>
<LineChart
data={details.errorRates}
label='Error Rate'
color='#ef4444'
unit='%'
/>
{hasDuration && (
<LineChart
data={details.durations!}
label='Workflow Duration'
color='#3b82f6'
unit='ms'
/>
)}
<LineChart
data={details.executionCounts}
label='Usage'
color='#10b981'
unit=' execs'
/>
{(() => {
const failures = details.errorRates.map((e, i) => ({
timestamp: e.timestamp,
value: ((e.value || 0) / 100) * (details.executionCounts[i]?.value || 0),
}))
return <LineChart data={failures} label='Failures' color='#f59e0b' unit='' />
})()}
</div>
)
})()}
<div className='flex flex-1 flex-col overflow-hidden'>
<div className='w-full overflow-x-auto'>
<div>
<div className='border-border border-b'>
<div className='grid min-w-[980px] grid-cols-[140px_90px_90px_90px_180px_1fr_100px] gap-2 px-2 pb-3 md:gap-3 lg:min-w-0 lg:gap-4'>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Time
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Status
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Trigger
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Cost
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Workflow
</div>
<div className='font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Output
</div>
<div className='text-right font-[480] font-sans text-[13px] text-muted-foreground leading-normal'>
Duration
</div>
</div>
</div>
</div>
</div>
<div className='flex-1 overflow-auto' style={{ maxHeight: '400px' }}>
<div className='pb-4'>
{(() => {
const logsToDisplay = details.logs
if (logsToDisplay.length === 0) {
return (
<div className='flex h-full items-center justify-center py-8'>
<div className='flex items-center gap-2 text-muted-foreground'>
<Info className='h-5 w-5' />
<span className='text-sm'>
No executions found in this time segment
</span>
</div>
</div>
)
}
return logsToDisplay.map((log) => {
const logDate = log?.startedAt ? new Date(log.startedAt) : null
const formattedDate =
logDate && !Number.isNaN(logDate.getTime())
? formatDate(logDate.toISOString())
: ({ compactDate: '—', compactTime: '' } as any)
const outputsStr = log.outputs ? JSON.stringify(log.outputs) : '—'
const errorStr = log.errorMessage || ''
const isExpanded = expandedRowId === log.id
return (
<div
key={log.id}
className={cn(
'cursor-pointer border-border border-b transition-all duration-200',
isExpanded ? 'bg-accent/30' : 'hover:bg-accent/20'
)}
onClick={() =>
setExpandedRowId((prev) => (prev === log.id ? null : log.id))
}
>
<div className='grid min-w-[980px] grid-cols-[140px_90px_90px_90px_180px_1fr_100px] items-center gap-2 px-2 py-3 md:gap-3 lg:min-w-0 lg:gap-4'>
<div>
<div className='text-[13px]'>
<span className='font-sm text-muted-foreground'>
{formattedDate.compactDate}
</span>
<span
style={{ marginLeft: '8px' }}
className='hidden font-medium sm:inline'
>
{formattedDate.compactTime}
</span>
</div>
</div>
<div>
<div
className={cn(
'inline-flex items-center rounded-[8px] px-[6px] py-[2px] font-medium text-xs transition-all duration-200 lg:px-[8px]',
log.level === 'error'
? 'bg-red-500 text-white'
: 'bg-secondary text-card-foreground'
)}
>
{log.level}
</div>
</div>
<div>
{log.trigger ? (
<div
className={cn(
'inline-flex items-center rounded-[8px] px-[6px] py-[2px] font-medium text-xs transition-all duration-200 lg:px-[8px]',
log.trigger.toLowerCase() === 'manual'
? 'bg-secondary text-card-foreground'
: 'text-white'
)}
style={
log.trigger.toLowerCase() === 'manual'
? undefined
: { backgroundColor: getTriggerColor(log.trigger) }
}
>
{log.trigger}
</div>
) : (
<div className='text-muted-foreground text-xs'></div>
)}
</div>
<div>
<div className='font-medium text-muted-foreground text-xs'>
{log.cost && log.cost.total > 0 ? formatCost(log.cost.total) : '—'}
</div>
</div>
{/* Workflow cell */}
<div className='whitespace-nowrap'>
{log.workflowName ? (
<div className='inline-flex items-center gap-2'>
<span
className='h-3.5 w-3.5 rounded'
style={{ backgroundColor: log.workflowColor || '#64748b' }}
/>
<span
className='max-w-[150px] truncate text-muted-foreground text-xs'
title={log.workflowName}
>
{log.workflowName}
</span>
</div>
) : (
<span className='text-muted-foreground text-xs'></span>
)}
</div>
{/* Output cell */}
<div className='min-w-0 truncate whitespace-nowrap pr-2 text-[13px] text-muted-foreground'>
{log.level === 'error' && errorStr ? (
<span className='font-medium text-red-500 dark:text-red-400'>
{errorStr}
</span>
) : outputsStr.length > 220 ? (
`${outputsStr.slice(0, 217)}`
) : (
outputsStr
)}
</div>
<div className='text-right'>
<div className='text-muted-foreground text-xs tabular-nums'>
{typeof log.duration === 'number' ? `${log.duration}ms` : '—'}
</div>
</div>
</div>
{isExpanded && (
<div className='px-2 pt-0 pb-4'>
<div className='rounded-md border bg-muted/30 p-2'>
<pre className='max-h-60 overflow-auto whitespace-pre-wrap break-words text-xs'>
{log.level === 'error' && errorStr ? errorStr : outputsStr}
</pre>
</div>
</div>
)}
</div>
)
})
})()}
</div>
</div>
</div>
</>
) : (
<div className='flex items-center justify-center py-12'>
<Loader2 className='h-6 w-6 animate-spin text-muted-foreground' />
</div>
)}
</div>
</div>
)
}
export default WorkflowDetails

View File

@@ -0,0 +1,166 @@
import { useMemo } from 'react'
import { ScrollArea } from '@/components/ui/scroll-area'
import StatusBar, {
type StatusBarSegment,
} from '@/app/workspace/[workspaceId]/logs/components/dashboard/status-bar'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
export interface WorkflowExecutionItem {
workflowId: string
workflowName: string
segments: StatusBarSegment[]
overallSuccessRate: number
}
export function WorkflowsList({
executions,
filteredExecutions,
expandedWorkflowId,
onToggleWorkflow,
selectedSegmentIndex,
onSegmentClick,
searchQuery,
segmentDurationMs,
}: {
executions: WorkflowExecutionItem[]
filteredExecutions: WorkflowExecutionItem[]
expandedWorkflowId: string | null
onToggleWorkflow: (workflowId: string) => void
selectedSegmentIndex: number[] | null
onSegmentClick: (
workflowId: string,
segmentIndex: number,
timestamp: string,
mode: 'single' | 'toggle' | 'range'
) => void
searchQuery: string
segmentDurationMs: number
}) {
const { workflows } = useWorkflowRegistry()
const segmentsCount = filteredExecutions[0]?.segments?.length || 120
const durationLabel = useMemo(() => {
const segMs = Math.max(1, Math.floor(segmentDurationMs || 0))
const days = Math.round(segMs / (24 * 60 * 60 * 1000))
if (days >= 1) return `${days} day${days !== 1 ? 's' : ''}`
const hours = Math.round(segMs / (60 * 60 * 1000))
if (hours >= 1) return `${hours} hour${hours !== 1 ? 's' : ''}`
const mins = Math.max(1, Math.round(segMs / (60 * 1000)))
return `${mins} minute${mins !== 1 ? 's' : ''}`
}, [segmentDurationMs])
const Axis = () => {
if (!filteredExecutions.length || !segmentsCount || !segmentDurationMs) return null
const firstTs = filteredExecutions[0]?.segments?.[0]?.timestamp
if (!firstTs) return null
const start = new Date(firstTs)
if (Number.isNaN(start.getTime())) return null
const totalMs = segmentsCount * segmentDurationMs
const end = new Date(start.getTime() + totalMs)
const midMs = start.getTime() + totalMs / 2
// Avoid duplicate labels by shifting mid tick slightly if it rounds identical to start/end
const mid = new Date(midMs + 60 * 1000)
const useDates = totalMs >= 24 * 60 * 60 * 1000
const fmt = (d: Date) => {
if (useDates) return d.toLocaleString('en-US', { month: 'short', day: 'numeric' })
return d.toLocaleString('en-US', { hour: 'numeric' })
}
return (
<div className='relative px-3 pt-2 pb-1'>
<div className='mr-[80px] ml-[224px]'>
<div className='relative h-4'>
<div className='-z-10 -translate-y-1/2 absolute inset-x-0 top-1/2 h-px bg-border' />
<div className='flex justify-between text-[10px] text-muted-foreground'>
<span>{fmt(start)}</span>
<span>{fmt(mid)}</span>
<span className='text-right'>{fmt(end)}</span>
</div>
</div>
</div>
</div>
)
}
function DynamicLegend() {
return (
<p className='mt-0.5 text-[11px] text-muted-foreground'>
Each cell {durationLabel} of the selected range. Click a cell to filter details.
</p>
)
}
return (
<div
className='overflow-hidden rounded-lg border bg-card shadow-sm'
style={{ maxHeight: '380px', display: 'flex', flexDirection: 'column' }}
>
<div className='flex-shrink-0 border-b bg-muted/30 px-4 py-2.5'>
<div className='flex items-center justify-between'>
<div>
<h3 className='font-medium text-sm'>Workflows</h3>
<DynamicLegend />
</div>
<span className='text-muted-foreground text-xs'>
{filteredExecutions.length} workflow
{filteredExecutions.length !== 1 ? 's' : ''}
{searchQuery && ` (filtered from ${executions.length})`}
</span>
</div>
</div>
<Axis />
<ScrollArea className='flex-1' style={{ height: 'calc(350px - 41px)' }}>
<div className='space-y-1 p-3'>
{filteredExecutions.length === 0 ? (
<div className='py-8 text-center text-muted-foreground text-sm'>
No workflows found matching "{searchQuery}"
</div>
) : (
filteredExecutions.map((workflow) => {
const isSelected = expandedWorkflowId === workflow.workflowId
return (
<div
key={workflow.workflowId}
className={`flex cursor-pointer items-center gap-4 rounded-lg px-2 py-1.5 transition-colors ${
isSelected ? 'bg-accent/40' : 'hover:bg-accent/20'
}`}
onClick={() => onToggleWorkflow(workflow.workflowId)}
>
<div className='w-52 min-w-0 flex-shrink-0'>
<div className='flex items-center gap-2'>
<div
className='h-[14px] w-[14px] flex-shrink-0 rounded'
style={{
backgroundColor: workflows[workflow.workflowId]?.color || '#64748b',
}}
/>
<h3 className='truncate font-medium text-sm'>{workflow.workflowName}</h3>
</div>
</div>
<div className='flex-1'>
<StatusBar
segments={workflow.segments}
selectedSegmentIndices={isSelected ? selectedSegmentIndex : null}
onSegmentClick={onSegmentClick as any}
workflowId={workflow.workflowId}
segmentDurationMs={segmentDurationMs}
/>
</div>
<div className='w-16 flex-shrink-0 text-right'>
<span className='font-medium text-muted-foreground text-sm'>
{workflow.overallSuccessRate.toFixed(1)}%
</span>
</div>
</div>
)
})
)}
</div>
</ScrollArea>
</div>
)
}
export default WorkflowsList

View File

@@ -12,7 +12,17 @@ import type { TimeRange } from '@/stores/logs/filters/types'
export default function Timeline() {
const { timeRange, setTimeRange } = useFilterStore()
const specificTimeRanges: TimeRange[] = ['Past 30 minutes', 'Past hour', 'Past 24 hours']
const specificTimeRanges: TimeRange[] = [
'Past 30 minutes',
'Past hour',
'Past 6 hours',
'Past 12 hours',
'Past 24 hours',
'Past 3 days',
'Past 7 days',
'Past 14 days',
'Past 30 days',
]
return (
<DropdownMenu>
@@ -27,13 +37,15 @@ export default function Timeline() {
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent
align='start'
className='w-[180px] rounded-lg border-[#E5E5E5] bg-[#FFFFFF] shadow-xs dark:border-[#414141] dark:bg-[var(--surface-elevated)]'
side='bottom'
align='end'
sideOffset={6}
collisionPadding={8}
className='w-[220px] rounded-lg border-[#E5E5E5] bg-[#FFFFFF] shadow-xs dark:border-[#414141] dark:bg-[var(--surface-elevated)]'
>
<DropdownMenuItem
key='all'
onSelect={(e) => {
e.preventDefault()
onSelect={() => {
setTimeRange('All time')
}}
className='flex cursor-pointer items-center justify-between rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'
@@ -47,8 +59,7 @@ export default function Timeline() {
{specificTimeRanges.map((range) => (
<DropdownMenuItem
key={range}
onSelect={(e) => {
e.preventDefault()
onSelect={() => {
setTimeRange(range)
}}
className='flex cursor-pointer items-center justify-between rounded-md px-3 py-2 font-[380] text-card-foreground text-sm hover:bg-secondary/50 focus:bg-secondary/50'

View File

@@ -0,0 +1,896 @@
'use client'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { Loader2 } from 'lucide-react'
import { useParams } from 'next/navigation'
import { soehne } from '@/app/fonts/soehne/soehne'
import Controls from '@/app/workspace/[workspaceId]/logs/components/dashboard/controls'
import KPIs from '@/app/workspace/[workspaceId]/logs/components/dashboard/kpis'
import WorkflowDetails from '@/app/workspace/[workspaceId]/logs/components/dashboard/workflow-details'
import WorkflowsList from '@/app/workspace/[workspaceId]/logs/components/dashboard/workflows-list'
import Timeline from '@/app/workspace/[workspaceId]/logs/components/filters/components/timeline'
import { formatCost } from '@/providers/utils'
import { useFilterStore } from '@/stores/logs/filters/store'
type TimeFilter = '30m' | '1h' | '6h' | '12h' | '24h' | '3d' | '7d' | '14d' | '30d'
interface WorkflowExecution {
workflowId: string
workflowName: string
segments: {
successRate: number // 0-100
timestamp: string
hasExecutions: boolean
totalExecutions: number
successfulExecutions: number
}[]
overallSuccessRate: number
}
const BAR_COUNT = 120
interface ExecutionLog {
id: string
executionId: string
startedAt: string
level: string
trigger: string
triggerUserId: string | null
triggerInputs: any
outputs: any
errorMessage: string | null
duration: number | null
cost: {
input: number
output: number
total: number
} | null
workflowName?: string
workflowColor?: string
}
interface WorkflowDetailsDataLocal {
errorRates: { timestamp: string; value: number }[]
durations: { timestamp: string; value: number }[]
executionCounts: { timestamp: string; value: number }[]
logs: ExecutionLog[]
allLogs: ExecutionLog[] // Unfiltered logs for time filtering
}
export default function ExecutionsDashboard() {
const params = useParams()
const workspaceId = params.workspaceId as string
const getTimeFilterFromRange = (range: string): TimeFilter => {
switch (range) {
case 'Past 30 minutes':
return '30m'
case 'Past hour':
return '1h'
case 'Past 6 hours':
return '6h'
case 'Past 12 hours':
return '12h'
case 'Past 24 hours':
return '24h'
case 'Past 3 days':
return '3d'
case 'Past 7 days':
return '7d'
case 'Past 14 days':
return '14d'
case 'Past 30 days':
return '30d'
default:
return '30d' // Treat "All time" as last 30 days to keep UI performant
}
}
const [endTime, setEndTime] = useState<Date>(new Date())
const [executions, setExecutions] = useState<WorkflowExecution[]>([])
const [loading, setLoading] = useState(true)
const [isRefetching, setIsRefetching] = useState(false)
const [error, setError] = useState<string | null>(null)
const [expandedWorkflowId, setExpandedWorkflowId] = useState<string | null>(null)
const [workflowDetails, setWorkflowDetails] = useState<Record<string, WorkflowDetailsDataLocal>>(
{}
)
const [globalDetails, setGlobalDetails] = useState<WorkflowDetailsDataLocal | null>(null)
const [aggregateSegments, setAggregateSegments] = useState<
{ timestamp: string; totalExecutions: number; successfulExecutions: number }[]
>([])
const [selectedSegmentIndices, setSelectedSegmentIndices] = useState<number[]>([])
const [lastAnchorIndex, setLastAnchorIndex] = useState<number | null>(null)
const [searchQuery, setSearchQuery] = useState('')
const {
workflowIds,
folderIds,
triggers,
viewMode,
setViewMode,
timeRange: sidebarTimeRange,
} = useFilterStore()
const timeFilter = getTimeFilterFromRange(sidebarTimeRange)
// Build lightweight chart series from a set of logs for a given window
const buildSeriesFromLogs = (
logs: ExecutionLog[],
start: Date,
end: Date,
bins = 10
): {
errorRates: { timestamp: string; value: number }[]
executionCounts: { timestamp: string; value: number }[]
durations: { timestamp: string; value: number }[]
} => {
const startMs = start.getTime()
const totalMs = Math.max(1, end.getTime() - startMs)
const binMs = Math.max(1, Math.floor(totalMs / Math.max(1, bins)))
const errorRates: { timestamp: string; value: number }[] = []
const executionCounts: { timestamp: string; value: number }[] = []
const durations: { timestamp: string; value: number }[] = []
for (let i = 0; i < bins; i++) {
const bStart = startMs + i * binMs
const bEnd = bStart + binMs
const binLogs = logs.filter((l) => {
const t = new Date(l.startedAt).getTime()
return t >= bStart && t < bEnd
})
const total = binLogs.length
const errors = binLogs.filter((l) => (l.level || '').toLowerCase() === 'error').length
const avgDuration =
total > 0
? Math.round(
binLogs.reduce((s, l) => s + (typeof l.duration === 'number' ? l.duration : 0), 0) /
total
)
: 0
const ts = new Date(bStart).toISOString()
errorRates.push({ timestamp: ts, value: total > 0 ? (1 - errors / total) * 100 : 100 })
executionCounts.push({ timestamp: ts, value: total })
durations.push({ timestamp: ts, value: avgDuration })
}
return { errorRates, executionCounts, durations }
}
// Filter executions based on search query
const filteredExecutions = searchQuery.trim()
? executions.filter((workflow) =>
workflow.workflowName.toLowerCase().includes(searchQuery.toLowerCase())
)
: executions
// Aggregate metrics across workflows for header KPIs
const aggregate = useMemo(() => {
let totalExecutions = 0
let successfulExecutions = 0
let activeWorkflows = 0
for (const wf of executions) {
let workflowHasExecutions = false
for (const seg of wf.segments) {
totalExecutions += seg.totalExecutions || 0
successfulExecutions += seg.successfulExecutions || 0
if (seg.hasExecutions) workflowHasExecutions = true
}
if (workflowHasExecutions) activeWorkflows += 1
}
const failedExecutions = Math.max(totalExecutions - successfulExecutions, 0)
const successRate = totalExecutions > 0 ? (successfulExecutions / totalExecutions) * 100 : 100
return {
totalExecutions,
successfulExecutions,
failedExecutions,
activeWorkflows,
successRate,
}
}, [executions])
const getStartTime = useCallback(() => {
const start = new Date(endTime)
switch (timeFilter) {
case '30m':
start.setMinutes(endTime.getMinutes() - 30)
break
case '1h':
start.setHours(endTime.getHours() - 1)
break
case '6h':
start.setHours(endTime.getHours() - 6)
break
case '12h':
start.setHours(endTime.getHours() - 12)
break
case '24h':
start.setHours(endTime.getHours() - 24)
break
case '3d':
start.setDate(endTime.getDate() - 3)
break
case '7d':
start.setDate(endTime.getDate() - 7)
break
case '14d':
start.setDate(endTime.getDate() - 14)
break
case '30d':
start.setDate(endTime.getDate() - 30)
break
default:
start.setHours(endTime.getHours() - 24)
}
return start
}, [endTime, timeFilter])
const fetchExecutions = useCallback(
async (isInitialLoad = false) => {
try {
if (isInitialLoad) {
setLoading(true)
} else {
setIsRefetching(true)
}
setError(null)
const startTime = getStartTime()
const params = new URLSearchParams({
segments: BAR_COUNT.toString(),
startTime: startTime.toISOString(),
endTime: endTime.toISOString(),
})
// Add workflow filters if any
if (workflowIds.length > 0) {
params.set('workflowIds', workflowIds.join(','))
}
// Add folder filters if any
if (folderIds.length > 0) {
params.set('folderIds', folderIds.join(','))
}
// Add trigger filters if any
if (triggers.length > 0) {
params.set('triggers', triggers.join(','))
}
const response = await fetch(
`/api/workspaces/${workspaceId}/execution-history?${params.toString()}`
)
if (!response.ok) {
throw new Error('Failed to fetch execution history')
}
const data = await response.json()
// Sort workflows by error rate (highest first)
const sortedWorkflows = [...data.workflows].sort((a, b) => {
const errorRateA = 100 - a.overallSuccessRate
const errorRateB = 100 - b.overallSuccessRate
return errorRateB - errorRateA
})
setExecutions(sortedWorkflows)
// Compute aggregate segments across all workflows
const segmentsCount: number = Number(params.get('segments') || BAR_COUNT)
const agg: { timestamp: string; totalExecutions: number; successfulExecutions: number }[] =
Array.from({ length: segmentsCount }, (_, i) => {
const base = startTime.getTime()
const span = endTime.getTime() - base
const tsNum = base + Math.floor((i * span) / segmentsCount)
const ts = new Date(tsNum)
return {
timestamp: Number.isNaN(ts.getTime()) ? new Date().toISOString() : ts.toISOString(),
totalExecutions: 0,
successfulExecutions: 0,
}
})
for (const wf of data.workflows as any[]) {
wf.segments.forEach((s: any, i: number) => {
const index = Math.min(i, segmentsCount - 1)
agg[index].totalExecutions += s.totalExecutions || 0
agg[index].successfulExecutions += s.successfulExecutions || 0
})
}
setAggregateSegments(agg)
// Build charts from aggregate
const errorRates = agg.map((s) => ({
timestamp: s.timestamp,
value: s.totalExecutions > 0 ? (1 - s.successfulExecutions / s.totalExecutions) * 100 : 0,
}))
const executionCounts = agg.map((s) => ({
timestamp: s.timestamp,
value: s.totalExecutions,
}))
// Fetch recent logs for the time window with current filters
const logsParams = new URLSearchParams({
limit: '50',
offset: '0',
workspaceId,
startDate: startTime.toISOString(),
endDate: endTime.toISOString(),
order: 'desc',
details: 'full',
})
if (workflowIds.length > 0) logsParams.set('workflowIds', workflowIds.join(','))
if (folderIds.length > 0) logsParams.set('folderIds', folderIds.join(','))
if (triggers.length > 0) logsParams.set('triggers', triggers.join(','))
const logsResponse = await fetch(`/api/logs?${logsParams.toString()}`)
let mappedLogs: ExecutionLog[] = []
if (logsResponse.ok) {
const logsData = await logsResponse.json()
mappedLogs = (logsData.data || []).map((l: any) => {
const started = l.startedAt
? new Date(l.startedAt)
: l.endedAt
? new Date(l.endedAt)
: null
const startedAt =
started && !Number.isNaN(started.getTime())
? started.toISOString()
: new Date().toISOString()
const durationCandidate =
typeof l.totalDurationMs === 'number'
? l.totalDurationMs
: typeof l.duration === 'number'
? l.duration
: typeof l.totalDurationMs === 'string'
? Number.parseInt(l.totalDurationMs.replace(/[^0-9]/g, ''), 10)
: typeof l.duration === 'string'
? Number.parseInt(l.duration.replace(/[^0-9]/g, ''), 10)
: null
// Extract a compact output for the table from executionData trace spans when available
let output: any = null
if (typeof l.output === 'string') {
output = l.output
} else if (l.executionData?.traceSpans && Array.isArray(l.executionData.traceSpans)) {
const spans: any[] = l.executionData.traceSpans
// Pick the last span that has an output or error-like payload
for (let i = spans.length - 1; i >= 0; i--) {
const s = spans[i]
if (s?.output && Object.keys(s.output).length > 0) {
output = s.output
break
}
if (s?.status === 'error' && (s?.output?.error || s?.error)) {
output = s.output?.error || s.error
break
}
}
if (!output && l.executionData?.output) {
output = l.executionData.output
}
}
if (!output) {
// Some executions store output under executionData.blockExecutions
const be = l.executionData?.blockExecutions
if (Array.isArray(be) && be.length > 0) {
const last = be[be.length - 1]
output = last?.outputData || last?.errorMessage || null
}
}
if (!output) output = l.message || null
return {
id: l.id,
executionId: l.executionId,
startedAt,
level: l.level || 'info',
trigger: l.trigger || 'manual',
triggerUserId: l.triggerUserId || null,
triggerInputs: undefined,
outputs: output || undefined,
errorMessage: l.error || null,
duration: Number.isFinite(durationCandidate as number)
? (durationCandidate as number)
: null,
cost: l.cost
? { input: l.cost.input || 0, output: l.cost.output || 0, total: l.cost.total || 0 }
: null,
workflowName: l.workflowName || l.workflow?.name,
workflowColor: l.workflowColor || l.workflow?.color,
} as ExecutionLog
})
}
setGlobalDetails({
errorRates,
durations: [],
executionCounts,
logs: mappedLogs,
allLogs: mappedLogs,
})
} catch (err) {
console.error('Error fetching executions:', err)
setError(err instanceof Error ? err.message : 'An error occurred')
} finally {
setLoading(false)
setIsRefetching(false)
}
},
[workspaceId, timeFilter, endTime, getStartTime, workflowIds, folderIds, triggers]
)
const fetchWorkflowDetails = useCallback(
async (workflowId: string, silent = false) => {
try {
const startTime = getStartTime()
const params = new URLSearchParams({
startTime: startTime.toISOString(),
endTime: endTime.toISOString(),
})
// Add trigger filters if any
if (triggers.length > 0) {
params.set('triggers', triggers.join(','))
}
const response = await fetch(
`/api/workspaces/${workspaceId}/execution-history/${workflowId}?${params.toString()}`
)
if (!response.ok) {
throw new Error('Failed to fetch workflow details')
}
const data = await response.json()
// Store both filtered and all logs - update smoothly without clearing
setWorkflowDetails((prev) => ({
...prev,
[workflowId]: {
...data,
allLogs: data.logs, // Keep a copy of all logs for filtering
},
}))
} catch (err) {
console.error('Error fetching workflow details:', err)
}
},
[workspaceId, endTime, getStartTime, triggers]
)
const toggleWorkflow = useCallback(
(workflowId: string) => {
if (expandedWorkflowId === workflowId) {
setExpandedWorkflowId(null)
setSelectedSegmentIndices([])
setLastAnchorIndex(null)
} else {
setExpandedWorkflowId(workflowId)
setSelectedSegmentIndices([])
setLastAnchorIndex(null)
if (!workflowDetails[workflowId]) {
fetchWorkflowDetails(workflowId)
}
}
},
[expandedWorkflowId, workflowDetails, fetchWorkflowDetails]
)
const handleSegmentClick = useCallback(
(
workflowId: string,
segmentIndex: number,
_timestamp: string,
mode: 'single' | 'toggle' | 'range'
) => {
// Open the workflow details if not already open
if (expandedWorkflowId !== workflowId) {
setExpandedWorkflowId(workflowId)
if (!workflowDetails[workflowId]) {
fetchWorkflowDetails(workflowId)
}
// Select the segment when opening a new workflow
setSelectedSegmentIndices([segmentIndex])
setLastAnchorIndex(segmentIndex)
} else {
setSelectedSegmentIndices((prev) => {
if (mode === 'single') {
setLastAnchorIndex(segmentIndex)
// If already selected, deselect it; otherwise select only this one
if (prev.includes(segmentIndex)) {
return prev.filter((i) => i !== segmentIndex)
}
return [segmentIndex]
}
if (mode === 'toggle') {
const exists = prev.includes(segmentIndex)
const next = exists ? prev.filter((i) => i !== segmentIndex) : [...prev, segmentIndex]
setLastAnchorIndex(segmentIndex)
return next.sort((a, b) => a - b)
}
// range mode
const anchor = lastAnchorIndex ?? segmentIndex
const [start, end] =
anchor < segmentIndex ? [anchor, segmentIndex] : [segmentIndex, anchor]
const range = Array.from({ length: end - start + 1 }, (_, i) => start + i)
const union = new Set([...(prev || []), ...range])
return Array.from(union).sort((a, b) => a - b)
})
}
},
[expandedWorkflowId, workflowDetails, fetchWorkflowDetails, lastAnchorIndex]
)
// Initial load and refetch on dependencies change
const isInitialMount = useRef(true)
useEffect(() => {
const isInitial = isInitialMount.current
if (isInitial) {
isInitialMount.current = false
}
fetchExecutions(isInitial)
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [workspaceId, timeFilter, endTime, workflowIds, folderIds, triggers])
// Refetch workflow details when time, filters, or expanded workflow changes
useEffect(() => {
if (expandedWorkflowId) {
fetchWorkflowDetails(expandedWorkflowId)
}
}, [expandedWorkflowId, timeFilter, endTime, workflowIds, folderIds, fetchWorkflowDetails])
// Clear segment selection when time or filters change
useEffect(() => {
setSelectedSegmentIndices([])
setLastAnchorIndex(null)
}, [timeFilter, endTime, workflowIds, folderIds, triggers])
const getShiftLabel = () => {
switch (sidebarTimeRange) {
case 'Past 30 minutes':
return '30 minutes'
case 'Past hour':
return 'hour'
case 'Past 12 hours':
return '12 hours'
case 'Past 24 hours':
return '24 hours'
default:
return 'period'
}
}
const getDateRange = () => {
const start = getStartTime()
return `${start.toLocaleDateString('en-US', { month: 'short', day: 'numeric', hour: 'numeric', minute: '2-digit' })} - ${endTime.toLocaleDateString('en-US', { month: 'short', day: 'numeric', hour: 'numeric', minute: '2-digit', year: 'numeric' })}`
}
const shiftTimeWindow = (direction: 'back' | 'forward') => {
let shift: number
switch (timeFilter) {
case '30m':
shift = 30 * 60 * 1000
break
case '1h':
shift = 60 * 60 * 1000
break
case '6h':
shift = 6 * 60 * 60 * 1000
break
case '12h':
shift = 12 * 60 * 60 * 1000
break
case '24h':
shift = 24 * 60 * 60 * 1000
break
case '3d':
shift = 3 * 24 * 60 * 60 * 1000
break
case '7d':
shift = 7 * 24 * 60 * 60 * 1000
break
case '14d':
shift = 14 * 24 * 60 * 60 * 1000
break
case '30d':
shift = 30 * 24 * 60 * 60 * 1000
break
default:
shift = 24 * 60 * 60 * 1000
}
setEndTime((prev) => new Date(prev.getTime() + (direction === 'forward' ? shift : -shift)))
}
const resetToNow = () => {
setEndTime(new Date())
}
const isLive = endTime.getTime() > Date.now() - 60000 // Within last minute
const [live, setLive] = useState(false)
useEffect(() => {
let interval: any
if (live) {
interval = setInterval(() => {
resetToNow()
}, 5000)
}
return () => {
if (interval) clearInterval(interval)
}
}, [live])
return (
<div className={`flex h-full min-w-0 flex-col pl-64 ${soehne.className}`}>
<div className='flex min-w-0 flex-1 overflow-hidden'>
<div
className='flex flex-1 flex-col overflow-y-scroll p-6'
style={{ scrollbarGutter: 'stable' }}
>
{/* Controls */}
<Controls
searchQuery={searchQuery}
setSearchQuery={setSearchQuery}
isRefetching={isRefetching}
resetToNow={resetToNow}
live={live}
setLive={setLive}
viewMode={viewMode as string}
setViewMode={setViewMode as (mode: 'logs' | 'dashboard') => void}
/>
{/* Content */}
{loading ? (
<div className='flex flex-1 items-center justify-center'>
<div className='flex items-center gap-2 text-muted-foreground'>
<Loader2 className='h-5 w-5 animate-spin' />
<span>Loading execution history...</span>
</div>
</div>
) : error ? (
<div className='flex flex-1 items-center justify-center'>
<div className='text-destructive'>
<p className='font-medium'>Error loading data</p>
<p className='text-sm'>{error}</p>
</div>
</div>
) : executions.length === 0 ? (
<div className='flex flex-1 items-center justify-center'>
<div className='text-center text-muted-foreground'>
<p className='font-medium'>No execution history</p>
<p className='mt-1 text-sm'>Execute some workflows to see their history here</p>
</div>
</div>
) : (
<>
{/* Time Range Display */}
<div className='mb-4 flex items-center justify-between'>
<div className='flex items-center gap-3'>
<span className='font-medium text-muted-foreground text-sm'>
{getDateRange()}
</span>
{/* Removed the "Historical" badge per design feedback */}
{(workflowIds.length > 0 || folderIds.length > 0 || triggers.length > 0) && (
<div className='flex items-center gap-2 text-muted-foreground text-xs'>
<span>Filters:</span>
{workflowIds.length > 0 && (
<span className='inline-flex items-center rounded-md bg-primary/10 px-2 py-0.5 text-primary text-xs'>
{workflowIds.length} workflow{workflowIds.length !== 1 ? 's' : ''}
</span>
)}
{folderIds.length > 0 && (
<span className='inline-flex items-center rounded-md bg-primary/10 px-2 py-0.5 text-primary text-xs'>
{folderIds.length} folder{folderIds.length !== 1 ? 's' : ''}
</span>
)}
{triggers.length > 0 && (
<span className='inline-flex items-center rounded-md bg-primary/10 px-2 py-0.5 text-primary text-xs'>
{triggers.length} trigger{triggers.length !== 1 ? 's' : ''}
</span>
)}
</div>
)}
</div>
{/* Time Controls */}
<div className='flex items-center gap-2'>
<div className='mr-2 hidden sm:block'>
<Timeline />
</div>
</div>
</div>
{/* KPIs */}
<KPIs aggregate={aggregate} />
<WorkflowsList
executions={executions as any}
filteredExecutions={filteredExecutions as any}
expandedWorkflowId={expandedWorkflowId}
onToggleWorkflow={toggleWorkflow}
selectedSegmentIndex={selectedSegmentIndices as any}
onSegmentClick={handleSegmentClick}
searchQuery={searchQuery}
segmentDurationMs={(endTime.getTime() - getStartTime().getTime()) / BAR_COUNT}
/>
{/* Details section below the entire bars component - always visible */}
{(() => {
if (expandedWorkflowId) {
const wf = executions.find((w) => w.workflowId === expandedWorkflowId)
if (!wf) return null
const total = wf.segments.reduce((s, x) => s + (x.totalExecutions || 0), 0)
const success = wf.segments.reduce((s, x) => s + (x.successfulExecutions || 0), 0)
const failures = Math.max(total - success, 0)
const rate = total > 0 ? (success / total) * 100 : 100
// Prepare filtered logs for details
const details = workflowDetails[expandedWorkflowId]
let logsToDisplay = details?.logs || []
if (details && selectedSegmentIndices.length > 0) {
const totalMs = endTime.getTime() - getStartTime().getTime()
const segMs = totalMs / BAR_COUNT
const windows = selectedSegmentIndices
.map((idx) => wf.segments[idx])
.filter(Boolean)
.map((s) => {
const start = new Date(s.timestamp).getTime()
const end = start + segMs
return { start, end }
})
const inAnyWindow = (t: number) =>
windows.some((w) => t >= w.start && t < w.end)
logsToDisplay = details.allLogs
.filter((log) => inAnyWindow(new Date(log.startedAt).getTime()))
.map((log) => ({
// Ensure workflow name is visible in the table for multi-select
...log,
workflowName: (log as any).workflowName || wf.workflowName,
}))
const minStart = new Date(Math.min(...windows.map((w) => w.start)))
const maxEnd = new Date(Math.max(...windows.map((w) => w.end)))
let filteredErrorRates = (details.errorRates || []).filter((p: any) =>
inAnyWindow(new Date(p.timestamp).getTime())
)
let filteredDurations = (
Array.isArray((details as any).durations) ? (details as any).durations : []
).filter((p: any) => inAnyWindow(new Date(p.timestamp).getTime()))
let filteredExecutionCounts = (details.executionCounts || []).filter((p: any) =>
inAnyWindow(new Date(p.timestamp).getTime())
)
if (filteredErrorRates.length === 0 || filteredExecutionCounts.length === 0) {
const series = buildSeriesFromLogs(logsToDisplay, minStart, maxEnd, 8)
filteredErrorRates = series.errorRates
filteredExecutionCounts = series.executionCounts
filteredDurations = series.durations
}
;(details as any).__filtered = {
errorRates: filteredErrorRates,
durations: filteredDurations,
executionCounts: filteredExecutionCounts,
}
}
const detailsWithFilteredLogs = details
? {
...details,
logs: logsToDisplay,
errorRates:
(details as any).__filtered?.errorRates ||
details.errorRates ||
buildSeriesFromLogs(
logsToDisplay,
new Date(
wf.segments[0]?.timestamp ||
logsToDisplay[0]?.startedAt ||
new Date().toISOString()
),
endTime,
8
).errorRates,
durations:
(details as any).__filtered?.durations ||
(details as any).durations ||
buildSeriesFromLogs(
logsToDisplay,
new Date(
wf.segments[0]?.timestamp ||
logsToDisplay[0]?.startedAt ||
new Date().toISOString()
),
endTime,
8
).durations,
executionCounts:
(details as any).__filtered?.executionCounts ||
details.executionCounts ||
buildSeriesFromLogs(
logsToDisplay,
new Date(
wf.segments[0]?.timestamp ||
logsToDisplay[0]?.startedAt ||
new Date().toISOString()
),
endTime,
8
).executionCounts,
}
: undefined
const selectedSegment =
selectedSegmentIndices.length === 1
? wf.segments[selectedSegmentIndices[0]]
: null
return (
<WorkflowDetails
workspaceId={workspaceId}
expandedWorkflowId={expandedWorkflowId}
workflowName={wf.workflowName}
overview={{ total, success, failures, rate }}
details={detailsWithFilteredLogs as any}
selectedSegmentIndex={selectedSegmentIndices}
selectedSegment={
selectedSegment
? {
timestamp: selectedSegment.timestamp,
totalExecutions: selectedSegment.totalExecutions,
}
: null
}
clearSegmentSelection={() => {
setSelectedSegmentIndices([])
setLastAnchorIndex(null)
}}
formatCost={formatCost}
/>
)
}
// Aggregate view for all workflows
if (!globalDetails) return null
const totals = aggregateSegments.reduce(
(acc, s) => {
acc.total += s.totalExecutions
acc.success += s.successfulExecutions
return acc
},
{ total: 0, success: 0 }
)
const failures = Math.max(totals.total - totals.success, 0)
const rate = totals.total > 0 ? (totals.success / totals.total) * 100 : 100
return (
<WorkflowDetails
workspaceId={workspaceId}
expandedWorkflowId={'all'}
workflowName={'All workflows'}
overview={{ total: totals.total, success: totals.success, failures, rate }}
details={globalDetails as any}
selectedSegmentIndex={[]}
selectedSegment={null}
clearSegmentSelection={() => {
setSelectedSegmentIndices([])
setLastAnchorIndex(null)
}}
formatCost={formatCost}
/>
)
})()}
</>
)}
</div>
</div>
</div>
)
}

View File

@@ -1,15 +1,15 @@
'use client'
import { useCallback, useEffect, useRef, useState } from 'react'
import { AlertCircle, Info, Loader2, Play, RefreshCw, Square } from 'lucide-react'
import { AlertCircle, Info, Loader2 } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button } from '@/components/ui/button'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { createLogger } from '@/lib/logs/console/logger'
import { parseQuery, queryToApiParams } from '@/lib/logs/query-parser'
import { cn } from '@/lib/utils'
import Controls from '@/app/workspace/[workspaceId]/logs/components/dashboard/controls'
import { AutocompleteSearch } from '@/app/workspace/[workspaceId]/logs/components/search/search'
import { Sidebar } from '@/app/workspace/[workspaceId]/logs/components/sidebar/sidebar'
import ExecutionsDashboard from '@/app/workspace/[workspaceId]/logs/executions-dashboard'
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils/format-date'
import { useDebounce } from '@/hooks/use-debounce'
import { useFolderStore } from '@/stores/folders/store'
@@ -76,6 +76,8 @@ export default function Logs() {
searchQuery: storeSearchQuery,
setSearchQuery: setStoreSearchQuery,
triggers,
viewMode,
setViewMode,
} = useFilterStore()
useEffect(() => {
@@ -661,8 +663,13 @@ export default function Logs() {
return () => window.removeEventListener('keydown', handleKeyDown)
}, [logs, selectedLogIndex, isSidebarOpen, selectedLog, handleNavigateNext, handleNavigatePrev])
// If in dashboard mode, show the dashboard
if (viewMode === 'dashboard') {
return <ExecutionsDashboard />
}
return (
<div className='flex h-[100vh] min-w-0 flex-col pl-64'>
<div className='flex h-full min-w-0 flex-col pl-64'>
{/* Add the animation styles */}
<style jsx global>
{selectedRowAnimation}
@@ -670,92 +677,28 @@ export default function Logs() {
<div className='flex min-w-0 flex-1 overflow-hidden'>
<div className='flex flex-1 flex-col overflow-auto p-6'>
{/* Header */}
<div className='mb-5'>
<h1 className='font-sans font-semibold text-3xl text-foreground tracking-[0.01em]'>
Logs
</h1>
</div>
{/* Search and Controls */}
<div className='mb-8 flex flex-col items-stretch justify-between gap-4 sm:flex-row sm:items-start'>
<AutocompleteSearch
value={searchQuery}
onChange={setSearchQuery}
placeholder='Search logs...'
availableWorkflows={availableWorkflows}
availableFolders={availableFolders}
onOpenChange={(open) => {
isSearchOpenRef.current = open
}}
/>
<div className='ml-auto flex flex-shrink-0 items-center gap-3'>
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='ghost'
size='icon'
onClick={handleRefresh}
className='h-9 rounded-[11px] hover:bg-secondary'
disabled={isRefreshing}
>
{isRefreshing ? (
<Loader2 className='h-5 w-5 animate-spin' />
) : (
<RefreshCw className='h-5 w-5' />
)}
<span className='sr-only'>Refresh</span>
</Button>
</TooltipTrigger>
<TooltipContent>{isRefreshing ? 'Refreshing...' : 'Refresh'}</TooltipContent>
</Tooltip>
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='ghost'
size='icon'
onClick={handleExport}
className='h-9 rounded-[11px] hover:bg-secondary'
aria-label='Export CSV'
>
{/* Download icon */}
<svg
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth='2'
className='h-5 w-5'
>
<path d='M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4' />
<polyline points='7 10 12 15 17 10' />
<line x1='12' y1='15' x2='12' y2='3' />
</svg>
<span className='sr-only'>Export CSV</span>
</Button>
</TooltipTrigger>
<TooltipContent>Export CSV</TooltipContent>
</Tooltip>
<Button
className={`group h-9 gap-2 rounded-[11px] border bg-card text-card-foreground shadow-xs transition-all duration-200 hover:border-[var(--brand-primary-hex)] hover:bg-[var(--brand-primary-hex)] hover:text-white ${
isLive
? 'border-[var(--brand-primary-hex)] bg-[var(--brand-primary-hex)] text-white'
: 'border-border'
}`}
onClick={toggleLive}
>
{isLive ? (
<Square className='!h-3.5 !w-3.5 fill-current' />
) : (
<Play className='!h-3.5 !w-3.5 group-hover:fill-current' />
)}
<span>Live</span>
</Button>
</div>
</div>
<Controls
isRefetching={isRefreshing}
resetToNow={handleRefresh}
live={isLive}
setLive={(fn) => setIsLive(fn)}
viewMode={viewMode as string}
setViewMode={setViewMode as (mode: 'logs' | 'dashboard') => void}
searchComponent={
<AutocompleteSearch
value={searchQuery}
onChange={setSearchQuery}
placeholder='Search logs...'
availableWorkflows={availableWorkflows}
availableFolders={availableFolders}
onOpenChange={(open) => {
isSearchOpenRef.current = open
}}
/>
}
showExport={true}
onExport={handleExport}
/>
{/* Table container */}
<div className='flex flex-1 flex-col overflow-hidden'>

View File

@@ -86,8 +86,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
const workspaceId = params.workspaceId as string
// Store hooks
const { history, revertToHistoryState, lastSaved, setNeedsRedeploymentFlag, blocks } =
useWorkflowStore()
const { lastSaved, setNeedsRedeploymentFlag, blocks } = useWorkflowStore()
const {
workflows,
updateWorkflow,

View File

@@ -19,6 +19,7 @@ interface ChannelSelectorInputProps {
onChannelSelect?: (channelId: string) => void
isPreview?: boolean
previewValue?: any | null
previewContextValues?: Record<string, any>
}
export function ChannelSelectorInput({
@@ -28,15 +29,18 @@ export function ChannelSelectorInput({
onChannelSelect,
isPreview = false,
previewValue,
previewContextValues,
}: ChannelSelectorInputProps) {
const params = useParams()
const workflowIdFromUrl = (params?.workflowId as string) || ''
// Use the proper hook to get the current value and setter (same as file-selector)
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlock.id)
// Reactive upstream fields
const [authMethod] = useSubBlockValue(blockId, 'authMethod')
const [botToken] = useSubBlockValue(blockId, 'botToken')
const [connectedCredential] = useSubBlockValue(blockId, 'credential')
const effectiveAuthMethod = previewContextValues?.authMethod ?? authMethod
const effectiveBotToken = previewContextValues?.botToken ?? botToken
const effectiveCredential = previewContextValues?.credential ?? connectedCredential
const [selectedChannelId, setSelectedChannelId] = useState<string>('')
const [_channelInfo, setChannelInfo] = useState<SlackChannelInfo | null>(null)
@@ -49,16 +53,16 @@ export function ChannelSelectorInput({
isPreview,
})
// Choose credential strictly based on auth method
// Choose credential strictly based on auth method - use effective values
const credential: string =
(authMethod as string) === 'bot_token'
? (botToken as string) || ''
: (connectedCredential as string) || ''
(effectiveAuthMethod as string) === 'bot_token'
? (effectiveBotToken as string) || ''
: (effectiveCredential as string) || ''
// Determine if connected OAuth credential is foreign (not applicable for bot tokens)
const { isForeignCredential } = useForeignCredential(
'slack',
(authMethod as string) === 'bot_token' ? '' : (connectedCredential as string) || ''
(effectiveAuthMethod as string) === 'bot_token' ? '' : (effectiveCredential as string) || ''
)
// Get the current value from the store or prop value if in preview mode (same pattern as file-selector)

View File

@@ -44,10 +44,20 @@ export function Dropdown({
const inputRef = useRef<HTMLInputElement>(null)
const dropdownRef = useRef<HTMLDivElement>(null)
const previousModeRef = useRef<string | null>(null)
// For response dataMode conversion - get builderData and data sub-blocks
const [builderData] = useSubBlockValue<any[]>(blockId, 'builderData')
const [, setData] = useSubBlockValue<string>(blockId, 'data')
const [builderData, setBuilderData] = useSubBlockValue<any[]>(blockId, 'builderData')
const [data, setData] = useSubBlockValue<string>(blockId, 'data')
// Keep refs with latest values to avoid stale closures
const builderDataRef = useRef(builderData)
const dataRef = useRef(data)
useEffect(() => {
builderDataRef.current = builderData
dataRef.current = data
}, [builderData, data])
// Use preview value when in preview mode, otherwise use store value or prop value
const value = isPreview ? previewValue : propValue !== undefined ? propValue : storeValue
@@ -103,23 +113,89 @@ export function Dropdown({
}
}, [storeInitialized, value, defaultOptionValue, setStoreValue])
// Helper function to normalize variable references in JSON strings
const normalizeVariableReferences = (jsonString: string): string => {
// Replace unquoted variable references with quoted ones
// Pattern: <variable.name> -> "<variable.name>"
return jsonString.replace(/([^"]<[^>]+>)/g, '"$1"')
}
// Helper function to convert JSON string to builder data format
const convertJsonToBuilderData = (jsonString: string): any[] => {
try {
// Always normalize variable references first
const normalizedJson = normalizeVariableReferences(jsonString)
const parsed = JSON.parse(normalizedJson)
if (typeof parsed === 'object' && parsed !== null && !Array.isArray(parsed)) {
return Object.entries(parsed).map(([key, value]) => {
const fieldType = inferType(value)
const fieldValue =
fieldType === 'object' || fieldType === 'array' ? JSON.stringify(value, null, 2) : value
return {
id: crypto.randomUUID(),
name: key,
type: fieldType,
value: fieldValue,
collapsed: false,
}
})
}
return []
} catch (error) {
return []
}
}
// Helper function to infer field type from value
const inferType = (value: any): 'string' | 'number' | 'boolean' | 'object' | 'array' => {
if (typeof value === 'boolean') return 'boolean'
if (typeof value === 'number') return 'number'
if (Array.isArray(value)) return 'array'
if (typeof value === 'object' && value !== null) return 'object'
return 'string'
}
// Handle data conversion when dataMode changes
useEffect(() => {
if (subBlockId !== 'dataMode' || isPreview || disabled) return
const currentMode = storeValue
const previousMode = previousModeRef.current
// Only convert if the mode actually changed
if (previousMode !== null && previousMode !== currentMode) {
// Builder to Editor mode (structured → json)
if (currentMode === 'json' && previousMode === 'structured') {
const currentBuilderData = builderDataRef.current
if (
currentBuilderData &&
Array.isArray(currentBuilderData) &&
currentBuilderData.length > 0
) {
const jsonString = ResponseBlockHandler.convertBuilderDataToJsonString(currentBuilderData)
setData(jsonString)
}
}
// Editor to Builder mode (json → structured)
else if (currentMode === 'structured' && previousMode === 'json') {
const currentData = dataRef.current
if (currentData && typeof currentData === 'string' && currentData.trim().length > 0) {
const builderArray = convertJsonToBuilderData(currentData)
setBuilderData(builderArray)
}
}
}
// Update the previous mode ref
previousModeRef.current = currentMode
}, [storeValue, subBlockId, isPreview, disabled, setData, setBuilderData])
// Event handlers
const handleSelect = (selectedValue: string) => {
if (!isPreview && !disabled) {
// Handle conversion when switching from Builder to Editor mode in response blocks
if (
subBlockId === 'dataMode' &&
storeValue === 'structured' &&
selectedValue === 'json' &&
builderData &&
Array.isArray(builderData) &&
builderData.length > 0
) {
// Convert builderData to JSON string for editor mode
const jsonString = ResponseBlockHandler.convertBuilderDataToJsonString(builderData)
setData(jsonString)
}
setStoreValue(selectedValue)
}
setOpen(false)

View File

@@ -1,4 +1,4 @@
import { useEffect, useState } from 'react'
import { useCallback, useEffect, useState } from 'react'
import { Calendar, ExternalLink } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button } from '@/components/ui/button'
@@ -33,17 +33,23 @@ export function ScheduleConfig({
disabled = false,
}: ScheduleConfigProps) {
const [error, setError] = useState<string | null>(null)
const [scheduleId, setScheduleId] = useState<string | null>(null)
const [nextRunAt, setNextRunAt] = useState<string | null>(null)
const [lastRanAt, setLastRanAt] = useState<string | null>(null)
const [cronExpression, setCronExpression] = useState<string | null>(null)
const [timezone, setTimezone] = useState<string>('UTC')
const [scheduleData, setScheduleData] = useState<{
id: string | null
nextRunAt: string | null
lastRanAt: string | null
cronExpression: string | null
timezone: string
}>({
id: null,
nextRunAt: null,
lastRanAt: null,
cronExpression: null,
timezone: 'UTC',
})
const [isLoading, setIsLoading] = useState(false)
const [isSaving, setIsSaving] = useState(false)
const [isDeleting, setIsDeleting] = useState(false)
const [isModalOpen, setIsModalOpen] = useState(false)
// Track when we need to force a refresh of schedule data
const [refreshCounter, setRefreshCounter] = useState(0)
const params = useParams()
const workflowId = params.workflowId as string
@@ -61,79 +67,88 @@ export function ScheduleConfig({
const blockWithValues = getBlockWithValues(blockId)
const isScheduleTriggerBlock = blockWithValues?.type === 'schedule'
// Function to check if schedule exists in the database
const checkSchedule = async () => {
// Fetch schedule data from API
const fetchSchedule = useCallback(async () => {
if (!workflowId) return
setIsLoading(true)
try {
// Check if there's a schedule for this workflow, passing the mode parameter
// For schedule trigger blocks, include blockId to get the specific schedule
const url = new URL('/api/schedules', window.location.origin)
url.searchParams.set('workflowId', workflowId)
url.searchParams.set('mode', 'schedule')
const params = new URLSearchParams({
workflowId,
mode: 'schedule',
})
if (isScheduleTriggerBlock) {
url.searchParams.set('blockId', blockId)
params.set('blockId', blockId)
}
const response = await fetch(url.toString(), {
// Add cache: 'no-store' to prevent caching of this request
const response = await fetch(`/api/schedules?${params}`, {
cache: 'no-store',
headers: {
'Cache-Control': 'no-cache',
},
headers: { 'Cache-Control': 'no-cache' },
})
if (response.ok) {
const data = await response.json()
logger.debug('Schedule check response:', data)
if (data.schedule) {
setScheduleId(data.schedule.id)
setNextRunAt(data.schedule.nextRunAt)
setLastRanAt(data.schedule.lastRanAt)
setCronExpression(data.schedule.cronExpression)
setTimezone(data.schedule.timezone || 'UTC')
// Note: We no longer set global schedule status from individual components
// The global schedule status should be managed by a higher-level component
setScheduleData({
id: data.schedule.id,
nextRunAt: data.schedule.nextRunAt,
lastRanAt: data.schedule.lastRanAt,
cronExpression: data.schedule.cronExpression,
timezone: data.schedule.timezone || 'UTC',
})
} else {
setScheduleId(null)
setNextRunAt(null)
setLastRanAt(null)
setCronExpression(null)
// Note: We no longer set global schedule status from individual components
setScheduleData({
id: null,
nextRunAt: null,
lastRanAt: null,
cronExpression: null,
timezone: 'UTC',
})
}
}
} catch (error) {
logger.error('Error checking schedule:', { error })
setError('Failed to check schedule status')
logger.error('Error fetching schedule:', error)
} finally {
setIsLoading(false)
}
}
}, [workflowId, blockId, isScheduleTriggerBlock])
// Check for schedule on mount and when relevant dependencies change
// Fetch schedule data on mount and when dependencies change
useEffect(() => {
// Check for schedules when workflowId changes, modal opens, or on initial mount
if (workflowId) {
checkSchedule()
fetchSchedule()
}, [fetchSchedule])
// Separate effect for event listener to avoid removing/re-adding on every dependency change
useEffect(() => {
const handleScheduleUpdate = (event: CustomEvent) => {
if (event.detail?.workflowId === workflowId && event.detail?.blockId === blockId) {
logger.debug('Schedule update event received in schedule-config, refetching')
fetchSchedule()
}
}
// Cleanup function to reset loading state
window.addEventListener('schedule-updated', handleScheduleUpdate as EventListener)
return () => {
setIsLoading(false)
window.removeEventListener('schedule-updated', handleScheduleUpdate as EventListener)
}
}, [workflowId, isModalOpen, refreshCounter])
}, [workflowId, blockId, fetchSchedule])
// Refetch when modal opens to get latest data
useEffect(() => {
if (isModalOpen) {
fetchSchedule()
}
}, [isModalOpen, fetchSchedule])
// Format the schedule information for display
const getScheduleInfo = () => {
if (!scheduleId || !nextRunAt) return null
if (!scheduleData.id || !scheduleData.nextRunAt) return null
let scheduleTiming = 'Unknown schedule'
if (cronExpression) {
scheduleTiming = parseCronToHumanReadable(cronExpression)
if (scheduleData.cronExpression) {
scheduleTiming = parseCronToHumanReadable(scheduleData.cronExpression, scheduleData.timezone)
} else if (scheduleType) {
scheduleTiming = `${scheduleType.charAt(0).toUpperCase() + scheduleType.slice(1)}`
}
@@ -142,8 +157,14 @@ export function ScheduleConfig({
<>
<div className='truncate font-normal text-sm'>{scheduleTiming}</div>
<div className='text-muted-foreground text-xs'>
<div>Next run: {formatDateTime(new Date(nextRunAt), timezone)}</div>
{lastRanAt && <div>Last run: {formatDateTime(new Date(lastRanAt), timezone)}</div>}
<div>
Next run: {formatDateTime(new Date(scheduleData.nextRunAt), scheduleData.timezone)}
</div>
{scheduleData.lastRanAt && (
<div>
Last run: {formatDateTime(new Date(scheduleData.lastRanAt), scheduleData.timezone)}
</div>
)}
</div>
</>
)
@@ -154,16 +175,11 @@ export function ScheduleConfig({
setIsModalOpen(true)
}
const handleCloseModal = () => {
const handleCloseModal = useCallback(() => {
setIsModalOpen(false)
// Force a refresh when closing the modal
// Use a small timeout to ensure backend updates are complete
setTimeout(() => {
setRefreshCounter((prev) => prev + 1)
}, 500)
}
}, [])
const handleSaveSchedule = async (): Promise<boolean> => {
const handleSaveSchedule = useCallback(async (): Promise<boolean> => {
if (isPreview || disabled) return false
setIsSaving(true)
@@ -246,17 +262,24 @@ export function ScheduleConfig({
logger.debug('Schedule save response:', responseData)
// 5. Update our local state with the response data
if (responseData.cronExpression) {
setCronExpression(responseData.cronExpression)
if (responseData.cronExpression || responseData.nextRunAt) {
setScheduleData((prev) => ({
...prev,
cronExpression: responseData.cronExpression || prev.cronExpression,
nextRunAt:
typeof responseData.nextRunAt === 'string'
? responseData.nextRunAt
: responseData.nextRunAt?.toISOString?.() || prev.nextRunAt,
}))
}
if (responseData.nextRunAt) {
setNextRunAt(
typeof responseData.nextRunAt === 'string'
? responseData.nextRunAt
: responseData.nextRunAt.toISOString?.() || responseData.nextRunAt
)
}
// 6. Dispatch custom event to notify parent workflow-block component to refetch schedule info
// This ensures the badge updates immediately after saving
const event = new CustomEvent('schedule-updated', {
detail: { workflowId, blockId },
})
window.dispatchEvent(event)
logger.debug('Dispatched schedule-updated event', { workflowId, blockId })
// 6. Update the schedule status and trigger a workflow update
// Note: Global schedule status is managed at a higher level
@@ -266,15 +289,8 @@ export function ScheduleConfig({
workflowStore.updateLastSaved()
workflowStore.triggerUpdate()
// 8. Force a refresh to update the UI
// Use a timeout to ensure the API changes are completed
setTimeout(() => {
logger.debug('Refreshing schedule information after save')
setRefreshCounter((prev) => prev + 1)
// Make a separate API call to ensure we get the latest schedule info
checkSchedule()
}, 500)
// 8. Refetch the schedule to update local state
await fetchSchedule()
return true
} catch (error) {
@@ -284,10 +300,10 @@ export function ScheduleConfig({
} finally {
setIsSaving(false)
}
}
}, [workflowId, blockId, isScheduleTriggerBlock, setStartWorkflow, fetchSchedule])
const handleDeleteSchedule = async (): Promise<boolean> => {
if (isPreview || !scheduleId || disabled) return false
const handleDeleteSchedule = useCallback(async (): Promise<boolean> => {
if (isPreview || !scheduleData.id || disabled) return false
setIsDeleting(true)
try {
@@ -315,7 +331,7 @@ export function ScheduleConfig({
}
// 4. Make the DELETE API call to remove the schedule
const response = await fetch(`/api/schedules/${scheduleId}`, {
const response = await fetch(`/api/schedules/${scheduleData.id}`, {
method: 'DELETE',
})
@@ -326,14 +342,23 @@ export function ScheduleConfig({
}
// 5. Clear schedule state
setScheduleId(null)
setNextRunAt(null)
setLastRanAt(null)
setCronExpression(null)
setScheduleData({
id: null,
nextRunAt: null,
lastRanAt: null,
cronExpression: null,
timezone: 'UTC',
})
// 6. Update schedule status and refresh UI
// Note: Global schedule status is managed at a higher level
setRefreshCounter((prev) => prev + 1)
// 7. Dispatch custom event to notify parent workflow-block component
const event = new CustomEvent('schedule-updated', {
detail: { workflowId, blockId },
})
window.dispatchEvent(event)
logger.debug('Dispatched schedule-updated event after delete', { workflowId, blockId })
return true
} catch (error) {
@@ -343,10 +368,18 @@ export function ScheduleConfig({
} finally {
setIsDeleting(false)
}
}
}, [
scheduleData.id,
isPreview,
disabled,
isScheduleTriggerBlock,
setStartWorkflow,
workflowId,
blockId,
])
// Check if the schedule is active
const isScheduleActive = !!scheduleId && !!nextRunAt
const isScheduleActive = !!scheduleData.id && !!scheduleData.nextRunAt
return (
<div className='w-full' onClick={(e) => e.stopPropagation()}>
@@ -399,7 +432,7 @@ export function ScheduleConfig({
blockId={blockId}
onSave={handleSaveSchedule}
onDelete={handleDeleteSchedule}
scheduleId={scheduleId}
scheduleId={scheduleData.id}
/>
</Dialog>
</div>

View File

@@ -83,9 +83,8 @@ export function FieldFormat({
const [activeSourceBlockId, setActiveSourceBlockId] = useState<string | null>(null)
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
// Use preview value when in preview mode, otherwise use store value
const value = isPreview ? previewValue : storeValue
const fields: Field[] = value || []
const fields: Field[] = Array.isArray(value) ? value : []
useEffect(() => {
const initial: Record<string, string> = {}
@@ -547,7 +546,7 @@ export function ResponseFormat(
emptyMessage='No response fields defined'
showType={false}
showValue={true}
valuePlaceholder='Enter test value'
valuePlaceholder='Enter return value'
/>
)
}

View File

@@ -1,5 +1,6 @@
import React, { useCallback, useEffect, useState } from 'react'
import { AlertCircle, PlusIcon, Server, WrenchIcon, XIcon } from 'lucide-react'
import type React from 'react'
import { useCallback, useEffect, useState } from 'react'
import { PlusIcon, Server, WrenchIcon, XIcon } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button } from '@/components/ui/button'
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
@@ -374,42 +375,40 @@ function FileUploadSyncWrapper({
)
}
// Error boundary component for tool input
class ToolInputErrorBoundary extends React.Component<
{ children: React.ReactNode; blockName?: string },
{ hasError: boolean; error?: Error }
> {
constructor(props: any) {
super(props)
this.state = { hasError: false }
}
static getDerivedStateFromError(error: Error) {
return { hasError: true, error }
}
componentDidCatch(error: Error, info: React.ErrorInfo) {
console.error('ToolInput error:', error, info)
}
render() {
if (this.state.hasError) {
return (
<div className='rounded-md bg-red-50 p-4 text-red-800 text-sm dark:bg-red-900/20 dark:text-red-200'>
<div className='flex items-center gap-2'>
<AlertCircle className='h-4 w-4' />
<span className='font-medium'>Tool Configuration Error</span>
</div>
<p className='mt-1 text-xs opacity-80'>
{this.props.blockName ? `Block "${this.props.blockName}": ` : ''}
Invalid tool reference. Please check the workflow configuration.
</p>
</div>
)
}
return this.props.children
}
function ChannelSelectorSyncWrapper({
blockId,
paramId,
value,
onChange,
uiComponent,
disabled,
previewContextValues,
}: {
blockId: string
paramId: string
value: string
onChange: (value: string) => void
uiComponent: any
disabled: boolean
previewContextValues?: Record<string, any>
}) {
return (
<GenericSyncWrapper blockId={blockId} paramId={paramId} value={value} onChange={onChange}>
<ChannelSelectorInput
blockId={blockId}
subBlock={{
id: paramId,
type: 'channel-selector' as const,
title: paramId,
provider: uiComponent.provider || 'slack',
placeholder: uiComponent.placeholder,
}}
onChannelSelect={onChange}
disabled={disabled}
previewContextValues={previewContextValues}
/>
</GenericSyncWrapper>
)
}
export function ToolInput({
@@ -1060,19 +1059,14 @@ export function ToolInput({
case 'channel-selector':
return (
<ChannelSelectorInput
<ChannelSelectorSyncWrapper
blockId={blockId}
subBlock={{
id: `tool-${toolIndex || 0}-${param.id}`,
type: 'channel-selector' as const,
title: param.id,
provider: uiComponent.provider || 'slack',
placeholder: uiComponent.placeholder,
}}
onChannelSelect={onChange}
paramId={param.id}
value={value}
onChange={onChange}
uiComponent={uiComponent}
disabled={disabled}
isPreview={true}
previewValue={value}
previewContextValues={currentToolParams as any}
/>
)

View File

@@ -20,6 +20,7 @@ import {
} from '@/components/ui/select'
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { cn } from '@/lib/utils'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { getTrigger } from '@/triggers'
@@ -284,8 +285,7 @@ export function TriggerModal({
}
if (finalPath) {
const baseUrl = window.location.origin
setWebhookUrl(`${baseUrl}/api/webhooks/trigger/${finalPath}`)
setWebhookUrl(`${getBaseUrl()}/api/webhooks/trigger/${finalPath}`)
}
}, [
triggerPath,

View File

@@ -9,6 +9,7 @@ import {
DialogTitle,
} from '@/components/ui/dialog'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import {
AirtableConfig,
DeleteConfirmDialog,
@@ -404,12 +405,7 @@ export function WebhookModal({
}, [webhookPath])
// Construct the full webhook URL
const baseUrl =
typeof window !== 'undefined'
? `${window.location.protocol}//${window.location.host}`
: 'https://your-domain.com'
const webhookUrl = `${baseUrl}/api/webhooks/trigger/${formattedPath}`
const webhookUrl = `${getBaseUrl()}/api/webhooks/trigger/${formattedPath}`
const generateTestUrl = async () => {
if (!webhookId) return

View File

@@ -280,83 +280,59 @@ export const WorkflowBlock = memo(
}
}
const fetchScheduleInfo = async (workflowId: string) => {
if (!workflowId) return
try {
setIsLoadingScheduleInfo(true)
// For schedule trigger blocks, always include the blockId parameter
const url = new URL('/api/schedules', window.location.origin)
url.searchParams.set('workflowId', workflowId)
url.searchParams.set('mode', 'schedule')
url.searchParams.set('blockId', id) // Always include blockId for schedule blocks
const response = await fetch(url.toString(), {
cache: 'no-store',
headers: {
'Cache-Control': 'no-cache',
},
})
if (!response.ok) {
setScheduleInfo(null)
return
}
const data = await response.json()
if (!data.schedule) {
setScheduleInfo(null)
return
}
let scheduleTiming = 'Unknown schedule'
if (data.schedule.cronExpression) {
scheduleTiming = parseCronToHumanReadable(data.schedule.cronExpression)
}
const baseInfo = {
scheduleTiming,
nextRunAt: data.schedule.nextRunAt as string | null,
lastRanAt: data.schedule.lastRanAt as string | null,
timezone: data.schedule.timezone || 'UTC',
status: data.schedule.status as string,
isDisabled: data.schedule.status === 'disabled',
id: data.schedule.id as string,
}
const fetchScheduleInfo = useCallback(
async (workflowId: string) => {
if (!workflowId) return
try {
const statusRes = await fetch(`/api/schedules/${baseInfo.id}/status`, {
setIsLoadingScheduleInfo(true)
const params = new URLSearchParams({
workflowId,
mode: 'schedule',
blockId: id,
})
const response = await fetch(`/api/schedules?${params}`, {
cache: 'no-store',
headers: { 'Cache-Control': 'no-cache' },
})
if (statusRes.ok) {
const statusData = await statusRes.json()
setScheduleInfo({
scheduleTiming: baseInfo.scheduleTiming,
nextRunAt: statusData.nextRunAt ?? baseInfo.nextRunAt,
lastRanAt: statusData.lastRanAt ?? baseInfo.lastRanAt,
timezone: baseInfo.timezone,
status: statusData.status ?? baseInfo.status,
isDisabled: statusData.isDisabled ?? baseInfo.isDisabled,
id: baseInfo.id,
})
if (!response.ok) {
setScheduleInfo(null)
return
}
} catch (err) {
logger.error('Error fetching schedule status:', err)
}
setScheduleInfo(baseInfo)
} catch (error) {
logger.error('Error fetching schedule info:', error)
setScheduleInfo(null)
} finally {
setIsLoadingScheduleInfo(false)
}
}
const data = await response.json()
if (!data.schedule) {
setScheduleInfo(null)
return
}
const schedule = data.schedule
const scheduleTimezone = schedule.timezone || 'UTC'
setScheduleInfo({
scheduleTiming: schedule.cronExpression
? parseCronToHumanReadable(schedule.cronExpression, scheduleTimezone)
: 'Unknown schedule',
nextRunAt: schedule.nextRunAt,
lastRanAt: schedule.lastRanAt,
timezone: scheduleTimezone,
status: schedule.status,
isDisabled: schedule.status === 'disabled',
id: schedule.id,
})
} catch (error) {
logger.error('Error fetching schedule info:', error)
setScheduleInfo(null)
} finally {
setIsLoadingScheduleInfo(false)
}
},
[id]
)
useEffect(() => {
if (type === 'schedule' && currentWorkflowId) {
@@ -366,11 +342,25 @@ export const WorkflowBlock = memo(
setIsLoadingScheduleInfo(false) // Reset loading state when not a schedule block
}
// Cleanup function to reset loading state when component unmounts or workflow changes
// Listen for schedule updates from the schedule-config component
const handleScheduleUpdate = (event: CustomEvent) => {
// Check if the update is for this workflow and block
if (event.detail?.workflowId === currentWorkflowId && event.detail?.blockId === id) {
logger.debug('Schedule update event received, refetching schedule info')
if (type === 'schedule') {
fetchScheduleInfo(currentWorkflowId)
}
}
}
window.addEventListener('schedule-updated', handleScheduleUpdate as EventListener)
// Cleanup function to reset loading state and remove listener
return () => {
setIsLoadingScheduleInfo(false)
window.removeEventListener('schedule-updated', handleScheduleUpdate as EventListener)
}
}, [isStarterBlock, isTriggerBlock, type, currentWorkflowId])
}, [type, currentWorkflowId, id, fetchScheduleInfo])
// Get webhook information for the tooltip
useEffect(() => {

View File

@@ -1,6 +1,8 @@
import { shallow } from 'zustand/shallow'
import { BlockPathCalculator } from '@/lib/block-path-calculator'
import { createLogger } from '@/lib/logs/console/logger'
import { getBlockOutputs } from '@/lib/workflows/block-outputs'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
@@ -92,6 +94,22 @@ export function useBlockConnections(blockId: string) {
shallow
)
const workflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
const workflowSubBlockValues = useSubBlockStore((state) =>
workflowId ? (state.workflowValues[workflowId] ?? {}) : {}
)
// Helper function to merge block subBlocks with live values from subblock store
const getMergedSubBlocks = (sourceBlockId: string): Record<string, any> => {
const base = blocks[sourceBlockId]?.subBlocks || {}
const live = workflowSubBlockValues?.[sourceBlockId] || {}
const merged: Record<string, any> = { ...base }
for (const [subId, liveVal] of Object.entries(live)) {
merged[subId] = { ...(base[subId] || {}), value: liveVal }
}
return merged
}
// Find all blocks along paths leading to this block
const allPathNodeIds = BlockPathCalculator.findAllPathNodes(edges, blockId)
@@ -101,20 +119,37 @@ export function useBlockConnections(blockId: string) {
const sourceBlock = blocks[sourceId]
if (!sourceBlock) return null
// Get merged subblocks for this source block
const mergedSubBlocks = getMergedSubBlocks(sourceId)
// Get the response format from the subblock store
const responseFormatValue = useSubBlockStore.getState().getValue(sourceId, 'responseFormat')
// Safely parse response format with proper error handling
const responseFormat = parseResponseFormatSafely(responseFormatValue, sourceId)
// Get the default output type from the block's outputs
const defaultOutputs: Field[] = Object.entries(sourceBlock.outputs || {}).map(([key]) => ({
name: key,
type: 'string',
}))
// Use getBlockOutputs to properly handle dynamic outputs from inputFormat
const blockOutputs = getBlockOutputs(
sourceBlock.type,
mergedSubBlocks,
sourceBlock.triggerMode
)
// Extract fields from the response format using our helper function
const outputFields = responseFormat ? extractFieldsFromSchema(responseFormat) : defaultOutputs
// Extract fields from the response format if available, otherwise use block outputs
let outputFields: Field[]
if (responseFormat) {
outputFields = extractFieldsFromSchema(responseFormat)
} else {
// Convert block outputs to field format
outputFields = Object.entries(blockOutputs).map(([key, value]: [string, any]) => ({
name: key,
type: value && typeof value === 'object' && 'type' in value ? value.type : 'string',
description:
value && typeof value === 'object' && 'description' in value
? value.description
: undefined,
}))
}
return {
id: sourceBlock.id,
@@ -133,6 +168,9 @@ export function useBlockConnections(blockId: string) {
const sourceBlock = blocks[edge.source]
if (!sourceBlock) return null
// Get merged subblocks for this source block
const mergedSubBlocks = getMergedSubBlocks(edge.source)
// Get the response format from the subblock store instead
const responseFormatValue = useSubBlockStore
.getState()
@@ -141,14 +179,28 @@ export function useBlockConnections(blockId: string) {
// Safely parse response format with proper error handling
const responseFormat = parseResponseFormatSafely(responseFormatValue, edge.source)
// Get the default output type from the block's outputs
const defaultOutputs: Field[] = Object.entries(sourceBlock.outputs || {}).map(([key]) => ({
name: key,
type: 'string',
}))
// Use getBlockOutputs to properly handle dynamic outputs from inputFormat
const blockOutputs = getBlockOutputs(
sourceBlock.type,
mergedSubBlocks,
sourceBlock.triggerMode
)
// Extract fields from the response format using our helper function
const outputFields = responseFormat ? extractFieldsFromSchema(responseFormat) : defaultOutputs
// Extract fields from the response format if available, otherwise use block outputs
let outputFields: Field[]
if (responseFormat) {
outputFields = extractFieldsFromSchema(responseFormat)
} else {
// Convert block outputs to field format
outputFields = Object.entries(blockOutputs).map(([key, value]: [string, any]) => ({
name: key,
type: value && typeof value === 'object' && 'type' in value ? value.type : 'string',
description:
value && typeof value === 'object' && 'description' in value
? value.description
: undefined,
}))
}
return {
id: sourceBlock.id,

View File

@@ -92,7 +92,8 @@ const WorkflowContent = React.memo(() => {
const [draggedNodeId, setDraggedNodeId] = useState<string | null>(null)
const [potentialParentId, setPotentialParentId] = useState<string | null>(null)
// State for tracking validation errors
const [nestedSubflowErrors, setNestedSubflowErrors] = useState<Set<string>>(new Set())
// Use a function initializer to ensure the Set is only created once
const [nestedSubflowErrors, setNestedSubflowErrors] = useState<Set<string>>(() => new Set())
// Enhanced edge selection with parent context and unique identifier
const [selectedEdgeInfo, setSelectedEdgeInfo] = useState<SelectedEdgeInfo | null>(null)
@@ -1292,8 +1293,6 @@ const WorkflowContent = React.memo(() => {
// Include dynamic dimensions for container resizing calculations
width: block.isWide ? 450 : 350, // Standard width based on isWide state
height: Math.max(block.height || 100, 100), // Use actual height with minimum
// Explicitly set measured to prevent ReactFlow from recalculating
measured: { width: block.isWide ? 450 : 350, height: Math.max(block.height || 100, 100) },
})
})
@@ -1967,7 +1966,13 @@ const WorkflowContent = React.memo(() => {
edgeTypes={edgeTypes}
onDrop={effectivePermissions.canEdit ? onDrop : undefined}
onDragOver={effectivePermissions.canEdit ? onDragOver : undefined}
fitView
onInit={(instance) => {
requestAnimationFrame(() => {
requestAnimationFrame(() => {
instance.fitView({ padding: 0.3 })
})
})
}}
minZoom={0.1}
maxZoom={1.3}
panOnScroll

View File

@@ -7,21 +7,27 @@ import Level from '@/app/workspace/[workspaceId]/logs/components/filters/compone
import Timeline from '@/app/workspace/[workspaceId]/logs/components/filters/components/timeline'
import Trigger from '@/app/workspace/[workspaceId]/logs/components/filters/components/trigger'
import Workflow from '@/app/workspace/[workspaceId]/logs/components/filters/components/workflow'
import { useFilterStore } from '@/stores/logs/filters/store'
export function LogsFilters() {
const viewMode = useFilterStore((state) => state.viewMode)
const sections = [
{ key: 'level', title: 'Level', component: <Level /> },
{ key: 'workflow', title: 'Workflow', component: <Workflow /> },
{ key: 'folder', title: 'Folder', component: <FolderFilter /> },
{ key: 'trigger', title: 'Trigger', component: <Trigger /> },
{ key: 'timeline', title: 'Timeline', component: <Timeline /> },
{ key: 'level', title: 'Level', component: <Level />, showInDashboard: false },
{ key: 'workflow', title: 'Workflow', component: <Workflow />, showInDashboard: true },
{ key: 'folder', title: 'Folder', component: <FolderFilter />, showInDashboard: true },
{ key: 'trigger', title: 'Trigger', component: <Trigger />, showInDashboard: true },
{ key: 'timeline', title: 'Timeline', component: <Timeline />, showInDashboard: true },
]
const filteredSections =
viewMode === 'dashboard' ? sections.filter((section) => section.showInDashboard) : sections
return (
<div className='h-full'>
<ScrollArea className='h-full' hideScrollbar={true}>
<div className='space-y-4 px-3 py-3'>
{sections.map((section) => (
{filteredSections.map((section) => (
<FilterSection key={section.key} title={section.title} content={section.component} />
))}
</div>

View File

@@ -12,6 +12,7 @@ import { Skeleton } from '@/components/ui/skeleton'
import { signOut, useSession } from '@/lib/auth-client'
import { useBrandConfig } from '@/lib/branding/branding'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { useProfilePictureUpload } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/account/hooks/use-profile-picture-upload'
import { clearUserData } from '@/stores'
@@ -208,7 +209,7 @@ export function Account(_props: AccountProps) {
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
email,
redirectTo: `${window.location.origin}/reset-password`,
redirectTo: `${getBaseUrl()}/reset-password`,
}),
})

View File

@@ -5,9 +5,9 @@ import { Check, ChevronDown, Copy, Eye, EyeOff } from 'lucide-react'
import { Alert, AlertDescription, Button, Input, Label } from '@/components/ui'
import { Skeleton } from '@/components/ui/skeleton'
import { useSession } from '@/lib/auth-client'
import { env } from '@/lib/env'
import { isBillingEnabled } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { cn } from '@/lib/utils'
import { useOrganizationStore } from '@/stores/organization'
@@ -441,7 +441,7 @@ export function SSO() {
})
}
const callbackUrl = `${env.NEXT_PUBLIC_APP_URL}/api/auth/sso/callback/${formData.providerId}`
const callbackUrl = `${getBaseUrl()}/api/auth/sso/callback/${formData.providerId}`
const copyCallback = async () => {
try {
@@ -551,14 +551,14 @@ export function SSO() {
<div className='relative mt-2'>
<Input
readOnly
value={`${env.NEXT_PUBLIC_APP_URL}/api/auth/sso/callback/${provider.providerId}`}
value={`${getBaseUrl()}/api/auth/sso/callback/${provider.providerId}`}
className='h-9 w-full cursor-text pr-10 font-mono text-xs focus-visible:ring-2 focus-visible:ring-primary/20'
onClick={(e) => (e.target as HTMLInputElement).select()}
/>
<button
type='button'
onClick={() => {
const url = `${env.NEXT_PUBLIC_APP_URL}/api/auth/sso/callback/${provider.providerId}`
const url = `${getBaseUrl()}/api/auth/sso/callback/${provider.providerId}`
navigator.clipboard.writeText(url)
setCopied(true)
setTimeout(() => setCopied(false), 1500)

View File

@@ -15,6 +15,7 @@ import { Button } from '@/components/ui/button'
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
import { useSession, useSubscription } from '@/lib/auth-client'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { cn } from '@/lib/utils'
import { useOrganizationStore } from '@/stores/organization'
import { useSubscriptionStore } from '@/stores/subscription/store'
@@ -89,7 +90,7 @@ export function CancelSubscription({ subscription, subscriptionData }: CancelSub
throw new Error('Subscription management not available')
}
const returnUrl = window.location.origin + window.location.pathname.split('/w/')[0]
const returnUrl = getBaseUrl() + window.location.pathname.split('/w/')[0]
const cancelParams: any = {
returnUrl,

View File

@@ -3,6 +3,7 @@ import { useCallback, useEffect, useRef, useState } from 'react'
import { Skeleton, Switch } from '@/components/ui'
import { useSession } from '@/lib/auth-client'
import { useSubscriptionUpgrade } from '@/lib/subscription/upgrade'
import { getBaseUrl } from '@/lib/urls/utils'
import { cn } from '@/lib/utils'
import { UsageHeader } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/shared/usage-header'
import {
@@ -391,7 +392,7 @@ export function Subscription({ onOpenChange }: SubscriptionProps) {
context:
subscription.isTeam || subscription.isEnterprise ? 'organization' : 'user',
organizationId: activeOrgId,
returnUrl: `${window.location.origin}/workspace?billing=updated`,
returnUrl: `${getBaseUrl()}/workspace?billing=updated`,
}),
})
const data = await res.json()

View File

@@ -4,6 +4,7 @@ import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert'
import { Skeleton } from '@/components/ui/skeleton'
import { useActiveOrganization } from '@/lib/auth-client'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { UsageHeader } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/shared/usage-header'
import {
UsageLimit,
@@ -122,7 +123,7 @@ export function TeamUsage({ hasAdminAccess }: TeamUsageProps) {
body: JSON.stringify({
context: 'organization',
organizationId: activeOrg?.id,
returnUrl: `${window.location.origin}/workspace?billing=updated`,
returnUrl: `${getBaseUrl()}/workspace?billing=updated`,
}),
})
const data = await res.json()

View File

@@ -49,8 +49,14 @@ function calculateNextRunTime(
const scheduleType = getSubBlockValue(scheduleBlock, 'scheduleType')
const scheduleValues = getScheduleTimeValues(scheduleBlock)
// Get timezone from schedule configuration (default to UTC)
const timezone = scheduleValues.timezone || 'UTC'
if (schedule.cronExpression) {
const cron = new Cron(schedule.cronExpression)
// Use Croner with timezone support for accurate scheduling
const cron = new Cron(schedule.cronExpression, {
timezone,
})
const nextDate = cron.nextRun()
if (!nextDate) throw new Error('Invalid cron expression or no future occurrences')
return nextDate

View File

@@ -11,7 +11,7 @@ export const ResponseBlock: BlockConfig<ResponseBlockOutput> = {
docsLink: 'https://docs.sim.ai/blocks/response',
bestPractices: `
- Only use this if the trigger block is the API Trigger.
- Prefer the editor mode over the builder mode.
- Prefer the builder mode over the editor mode.
- This is usually used as the last block in the workflow.
`,
category: 'blocks',

View File

@@ -0,0 +1,329 @@
import { ZepIcon } from '@/components/icons'
import { AuthMode, type BlockConfig } from '@/blocks/types'
import type { ZepResponse } from '@/tools/zep/types'
export const ZepBlock: BlockConfig<ZepResponse> = {
type: 'zep',
name: 'Zep',
description: 'Long-term memory for AI agents',
authMode: AuthMode.ApiKey,
longDescription:
'Integrate Zep for long-term memory management. Create threads, add messages, retrieve context with AI-powered summaries and facts extraction.',
bgColor: '#E8E8E8',
icon: ZepIcon,
category: 'tools',
docsLink: 'https://docs.sim.ai/tools/zep',
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
layout: 'half',
options: [
{ label: 'Create Thread', id: 'create_thread' },
{ label: 'Add Messages', id: 'add_messages' },
{ label: 'Get Context', id: 'get_context' },
{ label: 'Get Messages', id: 'get_messages' },
{ label: 'Get Threads', id: 'get_threads' },
{ label: 'Delete Thread', id: 'delete_thread' },
{ label: 'Add User', id: 'add_user' },
{ label: 'Get User', id: 'get_user' },
{ label: 'Get User Threads', id: 'get_user_threads' },
],
placeholder: 'Select an operation',
value: () => 'create_thread',
},
{
id: 'threadId',
title: 'Thread ID',
type: 'short-input',
layout: 'full',
placeholder: 'Enter unique thread identifier',
condition: {
field: 'operation',
value: ['create_thread', 'add_messages', 'get_context', 'get_messages', 'delete_thread'],
},
required: true,
},
{
id: 'userId',
title: 'User ID',
type: 'short-input',
layout: 'full',
placeholder: 'Enter user identifier',
condition: {
field: 'operation',
value: ['create_thread', 'add_user', 'get_user', 'get_user_threads'],
},
required: true,
},
{
id: 'email',
title: 'Email',
type: 'short-input',
layout: 'half',
placeholder: 'user@example.com',
condition: {
field: 'operation',
value: 'add_user',
},
},
{
id: 'firstName',
title: 'First Name',
type: 'short-input',
layout: 'half',
placeholder: 'John',
condition: {
field: 'operation',
value: 'add_user',
},
},
{
id: 'lastName',
title: 'Last Name',
type: 'short-input',
layout: 'half',
placeholder: 'Doe',
condition: {
field: 'operation',
value: 'add_user',
},
},
{
id: 'metadata',
title: 'Metadata',
type: 'code',
layout: 'full',
placeholder: '{"key": "value"}',
language: 'json',
condition: {
field: 'operation',
value: 'add_user',
},
},
{
id: 'messages',
title: 'Messages',
type: 'code',
layout: 'full',
placeholder: '[{"role": "user", "content": "Hello!"}]',
language: 'json',
condition: {
field: 'operation',
value: 'add_messages',
},
required: true,
},
{
id: 'mode',
title: 'Context Mode',
type: 'dropdown',
layout: 'half',
options: [
{ label: 'Summary (Natural Language)', id: 'summary' },
{ label: 'Basic (Raw Facts)', id: 'basic' },
],
placeholder: 'Select context mode',
value: () => 'summary',
condition: {
field: 'operation',
value: 'get_context',
},
},
{
id: 'apiKey',
title: 'API Key',
type: 'short-input',
layout: 'full',
placeholder: 'Enter your Zep API key',
password: true,
required: true,
},
{
id: 'limit',
title: 'Result Limit',
type: 'slider',
layout: 'full',
min: 1,
max: 100,
step: 1,
integer: true,
condition: {
field: 'operation',
value: ['get_messages', 'get_threads'],
},
},
],
tools: {
access: [
'zep_create_thread',
'zep_get_threads',
'zep_delete_thread',
'zep_get_context',
'zep_get_messages',
'zep_add_messages',
'zep_add_user',
'zep_get_user',
'zep_get_user_threads',
],
config: {
tool: (params: Record<string, any>) => {
const operation = params.operation || 'create_thread'
switch (operation) {
case 'create_thread':
return 'zep_create_thread'
case 'add_messages':
return 'zep_add_messages'
case 'get_context':
return 'zep_get_context'
case 'get_messages':
return 'zep_get_messages'
case 'get_threads':
return 'zep_get_threads'
case 'delete_thread':
return 'zep_delete_thread'
case 'add_user':
return 'zep_add_user'
case 'get_user':
return 'zep_get_user'
case 'get_user_threads':
return 'zep_get_user_threads'
default:
return 'zep_create_thread'
}
},
params: (params: Record<string, any>) => {
const errors: string[] = []
// Validate required API key for all operations
if (!params.apiKey) {
errors.push('API Key is required')
}
const operation = params.operation || 'create_thread'
// Validate operation-specific required fields
if (
[
'create_thread',
'add_messages',
'get_context',
'get_messages',
'delete_thread',
].includes(operation)
) {
if (!params.threadId) {
errors.push('Thread ID is required')
}
}
if (operation === 'create_thread' || operation === 'add_user') {
if (!params.userId) {
errors.push('User ID is required')
}
}
if (operation === 'get_user' || operation === 'get_user_threads') {
if (!params.userId) {
errors.push('User ID is required')
}
}
if (operation === 'add_messages') {
if (!params.messages) {
errors.push('Messages are required')
} else {
try {
const messagesArray =
typeof params.messages === 'string' ? JSON.parse(params.messages) : params.messages
if (!Array.isArray(messagesArray) || messagesArray.length === 0) {
errors.push('Messages must be a non-empty array')
} else {
for (const msg of messagesArray) {
if (!msg.role || !msg.content) {
errors.push("Each message must have 'role' and 'content' properties")
break
}
}
}
} catch (_e: any) {
errors.push('Messages must be valid JSON')
}
}
}
// Throw error if any required fields are missing
if (errors.length > 0) {
throw new Error(`Zep Block Error: ${errors.join(', ')}`)
}
// Build the result params
const result: Record<string, any> = {
apiKey: params.apiKey,
}
if (params.threadId) result.threadId = params.threadId
if (params.userId) result.userId = params.userId
if (params.mode) result.mode = params.mode
if (params.limit) result.limit = Number(params.limit)
if (params.email) result.email = params.email
if (params.firstName) result.firstName = params.firstName
if (params.lastName) result.lastName = params.lastName
if (params.metadata) result.metadata = params.metadata
// Add messages for add operation
if (operation === 'add_messages') {
if (params.messages) {
try {
const messagesArray =
typeof params.messages === 'string' ? JSON.parse(params.messages) : params.messages
result.messages = messagesArray
} catch (e: any) {
throw new Error(`Zep Block Error: ${e.message || 'Messages must be valid JSON'}`)
}
}
}
return result
},
},
},
inputs: {
operation: { type: 'string', description: 'Operation to perform' },
apiKey: { type: 'string', description: 'Zep API key' },
threadId: { type: 'string', description: 'Thread identifier' },
userId: { type: 'string', description: 'User identifier' },
messages: { type: 'json', description: 'Message data array' },
mode: { type: 'string', description: 'Context mode (summary or basic)' },
limit: { type: 'number', description: 'Result limit' },
email: { type: 'string', description: 'User email' },
firstName: { type: 'string', description: 'User first name' },
lastName: { type: 'string', description: 'User last name' },
metadata: { type: 'json', description: 'User metadata' },
},
outputs: {
threadId: { type: 'string', description: 'Thread identifier' },
userId: { type: 'string', description: 'User identifier' },
uuid: { type: 'string', description: 'Internal UUID' },
createdAt: { type: 'string', description: 'Creation timestamp' },
updatedAt: { type: 'string', description: 'Update timestamp' },
threads: { type: 'json', description: 'Array of threads' },
deleted: { type: 'boolean', description: 'Deletion status' },
messages: { type: 'json', description: 'Message data' },
messageIds: { type: 'json', description: 'Message identifiers' },
context: { type: 'string', description: 'User context string' },
facts: { type: 'json', description: 'Extracted facts' },
entities: { type: 'json', description: 'Extracted entities' },
summary: { type: 'string', description: 'Conversation summary' },
batchId: { type: 'string', description: 'Batch operation ID' },
email: { type: 'string', description: 'User email' },
firstName: { type: 'string', description: 'User first name' },
lastName: { type: 'string', description: 'User last name' },
metadata: { type: 'json', description: 'User metadata' },
responseCount: { type: 'number', description: 'Number of items in response' },
totalCount: { type: 'number', description: 'Total number of items available' },
rowCount: { type: 'number', description: 'Number of rows in response' },
},
}

View File

@@ -87,6 +87,7 @@ import { WorkflowBlock } from '@/blocks/blocks/workflow'
import { WorkflowInputBlock } from '@/blocks/blocks/workflow_input'
import { XBlock } from '@/blocks/blocks/x'
import { YouTubeBlock } from '@/blocks/blocks/youtube'
import { ZepBlock } from '@/blocks/blocks/zep'
import type { BlockConfig } from '@/blocks/types'
// Registry of all available blocks, alphabetically sorted
@@ -127,6 +128,7 @@ export const registry: Record<string, BlockConfig> = {
linkup: LinkupBlock,
mcp: McpBlock,
mem0: Mem0Block,
zep: ZepBlock,
microsoft_excel: MicrosoftExcelBlock,
microsoft_planner: MicrosoftPlannerBlock,
microsoft_teams: MicrosoftTeamsBlock,

View File

@@ -12,7 +12,7 @@ import {
Text,
} from '@react-email/components'
import { getBrandConfig } from '@/lib/branding/branding'
import { env } from '@/lib/env'
import { getBaseUrl } from '@/lib/urls/utils'
import { baseStyles } from './base-styles'
import EmailFooter from './footer'
@@ -30,8 +30,6 @@ interface BatchInvitationEmailProps {
acceptUrl: string
}
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
const getPermissionLabel = (permission: string) => {
switch (permission) {
case 'admin':
@@ -64,6 +62,7 @@ export const BatchInvitationEmail = ({
acceptUrl,
}: BatchInvitationEmailProps) => {
const brand = getBrandConfig()
const baseUrl = getBaseUrl()
const hasWorkspaces = workspaceInvitations.length > 0
return (

View File

@@ -13,7 +13,7 @@ import {
} from '@react-email/components'
import { format } from 'date-fns'
import { getBrandConfig } from '@/lib/branding/branding'
import { getEnv } from '@/lib/env'
import { getBaseUrl } from '@/lib/urls/utils'
import { baseStyles } from './base-styles'
import EmailFooter from './footer'
@@ -24,15 +24,15 @@ interface EnterpriseSubscriptionEmailProps {
createdDate?: Date
}
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL') || 'https://sim.ai'
export const EnterpriseSubscriptionEmail = ({
userName = 'Valued User',
userEmail = '',
loginLink = `${baseUrl}/login`,
loginLink,
createdDate = new Date(),
}: EnterpriseSubscriptionEmailProps) => {
const brand = getBrandConfig()
const baseUrl = getBaseUrl()
const effectiveLoginLink = loginLink || `${baseUrl}/login`
return (
<Html>
@@ -75,7 +75,7 @@ export const EnterpriseSubscriptionEmail = ({
in and start exploring your new Enterprise features:
</Text>
<Link href={loginLink} style={{ textDecoration: 'none' }}>
<Link href={effectiveLoginLink} style={{ textDecoration: 'none' }}>
<Text style={baseStyles.button}>Access Your Enterprise Account</Text>
</Link>

View File

@@ -1,7 +1,7 @@
import { Container, Img, Link, Section, Text } from '@react-email/components'
import { getBrandConfig } from '@/lib/branding/branding'
import { getEnv } from '@/lib/env'
import { isHosted } from '@/lib/environment'
import { getBaseUrl } from '@/lib/urls/utils'
interface UnsubscribeOptions {
unsubscribeToken?: string
@@ -13,10 +13,7 @@ interface EmailFooterProps {
unsubscribe?: UnsubscribeOptions
}
export const EmailFooter = ({
baseUrl = getEnv('NEXT_PUBLIC_APP_URL') || 'https://sim.ai',
unsubscribe,
}: EmailFooterProps) => {
export const EmailFooter = ({ baseUrl = getBaseUrl(), unsubscribe }: EmailFooterProps) => {
const brand = getBrandConfig()
return (

View File

@@ -12,7 +12,7 @@ import {
} from '@react-email/components'
import { format } from 'date-fns'
import { getBrandConfig } from '@/lib/branding/branding'
import { getEnv } from '@/lib/env'
import { getBaseUrl } from '@/lib/urls/utils'
import { baseStyles } from './base-styles'
import EmailFooter from './footer'
@@ -23,8 +23,6 @@ interface HelpConfirmationEmailProps {
submittedDate?: Date
}
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL') || 'https://sim.ai'
const getTypeLabel = (type: string) => {
switch (type) {
case 'bug':
@@ -47,6 +45,7 @@ export const HelpConfirmationEmail = ({
submittedDate = new Date(),
}: HelpConfirmationEmailProps) => {
const brand = getBrandConfig()
const baseUrl = getBaseUrl()
const typeLabel = getTypeLabel(type)
return (

View File

@@ -13,8 +13,8 @@ import {
} from '@react-email/components'
import { format } from 'date-fns'
import { getBrandConfig } from '@/lib/branding/branding'
import { getEnv } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { baseStyles } from './base-styles'
import EmailFooter from './footer'
@@ -26,8 +26,6 @@ interface InvitationEmailProps {
updatedDate?: Date
}
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL') || 'https://sim.ai'
const logger = createLogger('InvitationEmail')
export const InvitationEmail = ({
@@ -38,6 +36,7 @@ export const InvitationEmail = ({
updatedDate = new Date(),
}: InvitationEmailProps) => {
const brand = getBrandConfig()
const baseUrl = getBaseUrl()
// Extract invitation ID or token from inviteLink if present
let enhancedLink = inviteLink

View File

@@ -11,7 +11,7 @@ import {
Text,
} from '@react-email/components'
import { getBrandConfig } from '@/lib/branding/branding'
import { getEnv } from '@/lib/env'
import { getBaseUrl } from '@/lib/urls/utils'
import { baseStyles } from './base-styles'
import EmailFooter from './footer'
@@ -22,8 +22,6 @@ interface OTPVerificationEmailProps {
chatTitle?: string
}
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL') || 'https://sim.ai'
const getSubjectByType = (type: string, brandName: string, chatTitle?: string) => {
switch (type) {
case 'sign-in':
@@ -46,6 +44,7 @@ export const OTPVerificationEmail = ({
chatTitle,
}: OTPVerificationEmailProps) => {
const brand = getBrandConfig()
const baseUrl = getBaseUrl()
// Get a message based on the type
const getMessage = () => {

View File

@@ -14,7 +14,7 @@ import {
} from '@react-email/components'
import EmailFooter from '@/components/emails/footer'
import { getBrandConfig } from '@/lib/branding/branding'
import { getEnv } from '@/lib/env'
import { getBaseUrl } from '@/lib/urls/utils'
import { baseStyles } from './base-styles'
interface PlanWelcomeEmailProps {
@@ -31,7 +31,7 @@ export function PlanWelcomeEmail({
createdDate = new Date(),
}: PlanWelcomeEmailProps) {
const brand = getBrandConfig()
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL') || 'https://sim.ai'
const baseUrl = getBaseUrl()
const cta = loginLink || `${baseUrl}/login`
const previewText = `${brand.name}: Your ${planName} plan is active`

View File

@@ -10,6 +10,7 @@ import {
UsageThresholdEmail,
} from '@/components/emails'
import { getBrandConfig } from '@/lib/branding/branding'
import { getBaseUrl } from '@/lib/urls/utils'
export async function renderOTPEmail(
otp: string,
@@ -89,7 +90,7 @@ export async function renderEnterpriseSubscriptionEmail(
userName: string,
userEmail: string
): Promise<string> {
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
const baseUrl = getBaseUrl()
const loginLink = `${baseUrl}/login`
return await render(

View File

@@ -13,7 +13,7 @@ import {
} from '@react-email/components'
import { format } from 'date-fns'
import { getBrandConfig } from '@/lib/branding/branding'
import { getEnv } from '@/lib/env'
import { getBaseUrl } from '@/lib/urls/utils'
import { baseStyles } from './base-styles'
import EmailFooter from './footer'
@@ -23,14 +23,13 @@ interface ResetPasswordEmailProps {
updatedDate?: Date
}
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL') || 'https://sim.ai'
export const ResetPasswordEmail = ({
username = '',
resetLink = '',
updatedDate = new Date(),
}: ResetPasswordEmailProps) => {
const brand = getBrandConfig()
const baseUrl = getBaseUrl()
return (
<Html>

View File

@@ -14,7 +14,7 @@ import {
} from '@react-email/components'
import EmailFooter from '@/components/emails/footer'
import { getBrandConfig } from '@/lib/branding/branding'
import { getEnv } from '@/lib/env'
import { getBaseUrl } from '@/lib/urls/utils'
import { baseStyles } from './base-styles'
interface UsageThresholdEmailProps {
@@ -37,7 +37,7 @@ export function UsageThresholdEmail({
updatedDate = new Date(),
}: UsageThresholdEmailProps) {
const brand = getBrandConfig()
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL') || 'https://sim.ai'
const baseUrl = getBaseUrl()
const previewText = `${brand.name}: You're at ${percentUsed}% of your ${planName} monthly budget`

View File

@@ -12,8 +12,8 @@ import {
Text,
} from '@react-email/components'
import { getBrandConfig } from '@/lib/branding/branding'
import { getEnv } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { baseStyles } from './base-styles'
import EmailFooter from './footer'
@@ -25,14 +25,13 @@ interface WorkspaceInvitationEmailProps {
invitationLink?: string
}
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL') || 'https://sim.ai'
export const WorkspaceInvitationEmail = ({
workspaceName = 'Workspace',
inviterName = 'Someone',
invitationLink = '',
}: WorkspaceInvitationEmailProps) => {
const brand = getBrandConfig()
const baseUrl = getBaseUrl()
// Extract token from the link to ensure we're using the correct format
let enhancedLink = invitationLink

View File

@@ -3746,3 +3746,32 @@ export const GoogleVaultIcon = (props: SVGProps<SVGSVGElement>) => (
/>
</svg>
)
export function ZepIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 233 196'
width='96px'
height='96px'
>
<path
d='m231.34,108.7l-1.48-1.55h-10.26l3.59-75.86-14.8-.45-2.77,49.31c-59.6-3.24-119.33-3.24-178.92-.02l-1.73-64.96-14.8.45,2.5,91.53H2.16l-1.41,1.47c-1.55,16.23-.66,32.68,2.26,48.89h10.83l.18,1.27c.67,19.34,16.1,34.68,35.9,34.68s44.86-.92,66.12-.92,46.56.92,65.95.92,35.19-15.29,35.9-34.61l.16-1.34h11.02c2.91-16.19,3.81-32.61,2.26-48.81Zm-158.23,58.01c-17.27,0-30.25-13.78-30.25-29.78s12.99-29.78,30.25-29.78,29.62,13.94,29.62,29.94-12.35,29.62-29.62,29.62Zm86.51,0c-17.27,0-30.25-13.78-30.25-29.78s12.99-29.78,30.25-29.78,29.62,13.94,29.62,29.94-12.35,29.62-29.62,29.62Z'
fill='#FF1493'
/>
<polygon
points='111.77 22.4 93.39 49.97 93.52 50.48 185.88 38.51 190.95 27.68 114.32 36.55 117.7 31.48 117.7 31.47 138.38 .49 138.25 0 47.67 11.6 42.85 22.27 118.34 12.61 111.77 22.4'
fill='#FF1493'
/>
<path
d='m72.97,121.47c-8.67,0-15.73,6.93-15.73,15.46s7.06,15.46,15.73,15.46,15.37-6.75,15.37-15.37-6.75-15.55-15.37-15.55Z'
fill='#FF1493'
/>
<path
d='m159.48,121.47c-8.67,0-15.73,6.93-15.73,15.46s7.06,15.46,15.73,15.46,15.37-6.75,15.37-15.37-6.75-15.55-15.37-15.55Z'
fill='#FF1493'
/>
</svg>
)
}

View File

@@ -453,6 +453,17 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
blockTags = [normalizedBlockName]
}
}
} else if (sourceBlock.type === 'api_trigger' || sourceBlock.type === 'input_trigger') {
// Handle API trigger and Input Form trigger with inputFormat
const inputFormatValue = mergedSubBlocks?.inputFormat?.value
if (inputFormatValue && Array.isArray(inputFormatValue) && inputFormatValue.length > 0) {
blockTags = inputFormatValue
.filter((field: { name?: string }) => field.name && field.name.trim() !== '')
.map((field: { name: string }) => `${normalizedBlockName}.${field.name}`)
} else {
blockTags = []
}
} else {
blockTags = [normalizedBlockName]
}

View File

@@ -1,4 +1,5 @@
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/execution/constants'
import { BlockType } from '@/executor/consts'
import { FunctionBlockHandler } from '@/executor/handlers/function/function-handler'
import type { ExecutionContext } from '@/executor/types'
@@ -82,7 +83,7 @@ describe('FunctionBlockHandler', () => {
workflowVariables: {},
blockData: {},
blockNameMapping: {},
_context: { workflowId: mockContext.workflowId },
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
}
const expectedOutput: any = { result: 'Success' }
@@ -116,7 +117,7 @@ describe('FunctionBlockHandler', () => {
workflowVariables: {},
blockData: {},
blockNameMapping: {},
_context: { workflowId: mockContext.workflowId },
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
}
const expectedOutput: any = { result: 'Success' }
@@ -138,12 +139,12 @@ describe('FunctionBlockHandler', () => {
code: inputs.code,
language: 'javascript',
useLocalVM: true,
timeout: 5000, // Default timeout
timeout: DEFAULT_EXECUTION_TIMEOUT_MS,
envVars: {},
workflowVariables: {},
blockData: {},
blockNameMapping: {},
_context: { workflowId: mockContext.workflowId },
_context: { workflowId: mockContext.workflowId, workspaceId: mockContext.workspaceId },
}
await handler.execute(mockBlock, inputs, mockContext)

View File

@@ -1,3 +1,4 @@
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/execution/constants'
import { DEFAULT_CODE_LANGUAGE } from '@/lib/execution/languages'
import { createLogger } from '@/lib/logs/console/logger'
import { BlockType } from '@/executor/consts'
@@ -61,7 +62,7 @@ export class FunctionBlockHandler implements BlockHandler {
code: codeContent,
language: inputs.language || DEFAULT_CODE_LANGUAGE,
useLocalVM: !inputs.remoteExecution,
timeout: inputs.timeout || 5000,
timeout: inputs.timeout || DEFAULT_EXECUTION_TIMEOUT_MS,
envVars: context.environmentVariables || {},
workflowVariables: context.workflowVariables || {},
blockData: blockData, // Pass block data for variable resolution

View File

@@ -9,16 +9,13 @@ import {
} from 'better-auth/client/plugins'
import { createAuthClient } from 'better-auth/react'
import type { auth } from '@/lib/auth'
import { env, getEnv } from '@/lib/env'
import { env } from '@/lib/env'
import { isBillingEnabled } from '@/lib/environment'
import { SessionContext, type SessionHookResult } from '@/lib/session/session-context'
export function getBaseURL() {
return getEnv('NEXT_PUBLIC_APP_URL') || 'http://localhost:3000'
}
import { getBaseUrl } from '@/lib/urls/utils'
export const client = createAuthClient({
baseURL: getBaseURL(),
baseURL: getBaseUrl(),
plugins: [
emailOTPClient(),
genericOAuthClient(),

View File

@@ -22,7 +22,6 @@ import {
renderOTPEmail,
renderPasswordResetEmail,
} from '@/components/emails/render-email'
import { getBaseURL } from '@/lib/auth-client'
import { sendPlanWelcomeEmail } from '@/lib/billing'
import { authorizeSubscriptionReference } from '@/lib/billing/authorization'
import { handleNewUser } from '@/lib/billing/core/usage'
@@ -44,6 +43,7 @@ import { quickValidateEmail } from '@/lib/email/validation'
import { env, isTruthy } from '@/lib/env'
import { isBillingEnabled, isEmailVerificationEnabled } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { SSO_TRUSTED_PROVIDERS } from './sso/consts'
const logger = createLogger('Auth')
@@ -60,9 +60,9 @@ if (validStripeKey) {
}
export const auth = betterAuth({
baseURL: getBaseURL(),
baseURL: getBaseUrl(),
trustedOrigins: [
env.NEXT_PUBLIC_APP_URL,
getBaseUrl(),
...(env.NEXT_PUBLIC_SOCKET_URL ? [env.NEXT_PUBLIC_SOCKET_URL] : []),
].filter(Boolean),
database: drizzleAdapter(db, {
@@ -319,7 +319,7 @@ export const auth = betterAuth({
tokenUrl: 'https://github.com/login/oauth/access_token',
userInfoUrl: 'https://api.github.com/user',
scopes: ['user:email', 'repo', 'read:user', 'workflow'],
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/github-repo`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/github-repo`,
getUserInfo: async (tokens) => {
try {
const profileResponse = await fetch('https://api.github.com/user', {
@@ -400,7 +400,7 @@ export const auth = betterAuth({
'https://www.googleapis.com/auth/gmail.labels',
],
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-email`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/google-email`,
},
{
providerId: 'google-calendar',
@@ -414,7 +414,7 @@ export const auth = betterAuth({
'https://www.googleapis.com/auth/calendar',
],
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-calendar`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/google-calendar`,
},
{
providerId: 'google-drive',
@@ -428,7 +428,7 @@ export const auth = betterAuth({
'https://www.googleapis.com/auth/drive.file',
],
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-drive`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/google-drive`,
},
{
providerId: 'google-docs',
@@ -442,7 +442,7 @@ export const auth = betterAuth({
'https://www.googleapis.com/auth/drive.file',
],
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-docs`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/google-docs`,
},
{
providerId: 'google-sheets',
@@ -456,7 +456,7 @@ export const auth = betterAuth({
'https://www.googleapis.com/auth/drive.file',
],
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-sheets`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/google-sheets`,
},
{
@@ -471,7 +471,7 @@ export const auth = betterAuth({
'https://www.googleapis.com/auth/forms.responses.readonly',
],
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-forms`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/google-forms`,
},
{
@@ -487,7 +487,7 @@ export const auth = betterAuth({
'https://www.googleapis.com/auth/devstorage.read_only',
],
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-vault`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/google-vault`,
},
{
@@ -517,7 +517,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
pkce: true,
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/microsoft-teams`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/microsoft-teams`,
},
{
@@ -532,7 +532,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
pkce: true,
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/microsoft-excel`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/microsoft-excel`,
},
{
providerId: 'microsoft-planner',
@@ -554,7 +554,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
pkce: true,
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/microsoft-planner`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/microsoft-planner`,
},
{
@@ -578,7 +578,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
pkce: true,
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/outlook`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/outlook`,
},
{
@@ -593,7 +593,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
pkce: true,
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/onedrive`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/onedrive`,
},
{
@@ -616,7 +616,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
pkce: true,
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/sharepoint`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/sharepoint`,
},
{
@@ -628,7 +628,7 @@ export const auth = betterAuth({
userInfoUrl: 'https://dummy-not-used.wealthbox.com', // Dummy URL since no user info endpoint exists
scopes: ['login', 'data'],
responseType: 'code',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/wealthbox`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/wealthbox`,
getUserInfo: async (tokens) => {
try {
logger.info('Creating Wealthbox user profile from token data')
@@ -662,7 +662,7 @@ export const auth = betterAuth({
scopes: ['database.read', 'database.write', 'projects.read'],
responseType: 'code',
pkce: true,
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/supabase`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/supabase`,
getUserInfo: async (tokens) => {
try {
logger.info('Creating Supabase user profile from token data')
@@ -715,7 +715,7 @@ export const auth = betterAuth({
responseType: 'code',
prompt: 'consent',
authentication: 'basic',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/x`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/x`,
getUserInfo: async (tokens) => {
try {
const response = await fetch(
@@ -774,7 +774,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/confluence`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/confluence`,
getUserInfo: async (tokens) => {
try {
const response = await fetch('https://api.atlassian.com/me', {
@@ -824,7 +824,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/discord`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/discord`,
getUserInfo: async (tokens) => {
try {
const response = await fetch('https://discord.com/api/users/@me', {
@@ -895,7 +895,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/jira`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/jira`,
getUserInfo: async (tokens) => {
try {
const response = await fetch('https://api.atlassian.com/me', {
@@ -946,7 +946,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/airtable`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/airtable`,
},
// Notion provider
@@ -963,7 +963,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/notion`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/notion`,
getUserInfo: async (tokens) => {
try {
const response = await fetch('https://api.notion.com/v1/users/me', {
@@ -1013,7 +1013,7 @@ export const auth = betterAuth({
accessType: 'offline',
authentication: 'basic',
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/reddit`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/reddit`,
getUserInfo: async (tokens) => {
try {
const response = await fetch('https://oauth.reddit.com/api/v1/me', {
@@ -1058,7 +1058,7 @@ export const auth = betterAuth({
tokenUrl: 'https://api.linear.app/oauth/token',
scopes: ['read', 'write'],
responseType: 'code',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/linear`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/linear`,
pkce: true,
prompt: 'consent',
accessType: 'offline',
@@ -1145,7 +1145,7 @@ export const auth = betterAuth({
responseType: 'code',
accessType: 'offline',
prompt: 'consent',
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/slack`,
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/slack`,
getUserInfo: async (tokens) => {
try {
logger.info('Creating Slack bot profile from token data')
@@ -1413,7 +1413,7 @@ export const auth = betterAuth({
try {
const { invitation, organization, inviter } = data
const inviteUrl = `${env.NEXT_PUBLIC_APP_URL}/invite/${invitation.id}`
const inviteUrl = `${getBaseUrl()}/invite/${invitation.id}`
const inviterName = inviter.user?.name || 'A team member'
const html = await renderInvitationEmail(

View File

@@ -9,9 +9,9 @@ import {
getPerUserMinimumLimit,
} from '@/lib/billing/subscriptions/utils'
import type { UserSubscriptionState } from '@/lib/billing/types'
import { env } from '@/lib/env'
import { isProd } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
const logger = createLogger('SubscriptionCore')
@@ -303,7 +303,7 @@ export async function sendPlanWelcomeEmail(subscription: any): Promise<void> {
)
const { sendEmail } = await import('@/lib/email/mailer')
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
const baseUrl = getBaseUrl()
const html = await renderPlanWelcomeEmail({
planName: subPlan === 'pro' ? 'Pro' : 'Team',
userName: users[0].name || undefined,

View File

@@ -13,6 +13,7 @@ import { sendEmail } from '@/lib/email/mailer'
import { getEmailPreferences } from '@/lib/email/unsubscribe'
import { isBillingEnabled } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
const logger = createLogger('UsageManagement')
@@ -617,7 +618,7 @@ export async function maybeSendUsageThresholdEmail(params: {
if (!(params.percentBefore < 80 && params.percentAfter >= 80)) return
if (params.limit <= 0 || params.currentUsageAfter <= 0) return
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
const baseUrl = getBaseUrl()
const ctaLink = `${baseUrl}/workspace?billing=usage`
const sendTo = async (email: string, name?: string) => {
const prefs = await getEmailPreferences(email)

View File

@@ -1,6 +1,6 @@
import type { Metadata } from 'next'
import { getBrandConfig } from '@/lib/branding/branding'
import { env } from '@/lib/env'
import { getBaseUrl } from '@/lib/urls/utils'
/**
* Generate dynamic metadata based on brand configuration
@@ -40,9 +40,7 @@ export function generateBrandedMetadata(override: Partial<Metadata> = {}): Metad
referrer: 'origin-when-cross-origin',
creator: brand.name,
publisher: brand.name,
metadataBase: env.NEXT_PUBLIC_APP_URL
? new URL(env.NEXT_PUBLIC_APP_URL)
: new URL('https://sim.ai'),
metadataBase: new URL(getBaseUrl()),
alternates: {
canonical: '/',
languages: {
@@ -63,7 +61,7 @@ export function generateBrandedMetadata(override: Partial<Metadata> = {}): Metad
openGraph: {
type: 'website',
locale: 'en_US',
url: env.NEXT_PUBLIC_APP_URL || 'https://sim.ai',
url: getBaseUrl(),
title: defaultTitle,
description: summaryFull,
siteName: brand.name,

View File

@@ -43,6 +43,7 @@ vi.mock('@/lib/env', () => ({
vi.mock('@/lib/urls/utils', () => ({
getEmailDomain: vi.fn().mockReturnValue('sim.ai'),
getBaseUrl: vi.fn().mockReturnValue('https://test.sim.ai'),
}))
import { type EmailType, sendBatchEmails, sendEmail } from '@/lib/email/mailer'

View File

@@ -4,6 +4,7 @@ import { generateUnsubscribeToken, isUnsubscribed } from '@/lib/email/unsubscrib
import { getFromEmailAddress } from '@/lib/email/utils'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
const logger = createLogger('Mailer')
@@ -167,7 +168,7 @@ async function processEmailData(options: EmailOptions): Promise<ProcessedEmailDa
// For arrays, use the first email for unsubscribe (batch emails typically go to similar recipients)
const primaryEmail = Array.isArray(to) ? to[0] : to
const unsubscribeToken = generateUnsubscribeToken(primaryEmail, emailType)
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://sim.ai'
const baseUrl = getBaseUrl()
const unsubscribeUrl = `${baseUrl}/unsubscribe?token=${unsubscribeToken}&email=${encodeURIComponent(primaryEmail)}`
headers['List-Unsubscribe'] = `<${unsubscribeUrl}>`

View File

@@ -0,0 +1,10 @@
/**
* Execution timeout constants
*
* These constants define the timeout values for code execution.
* - DEFAULT_EXECUTION_TIMEOUT_MS: The default timeout for executing user code (3 minutes)
* - MAX_EXECUTION_DURATION: The maximum duration for the API route (adds 30s buffer for overhead)
*/
export const DEFAULT_EXECUTION_TIMEOUT_MS = 180000 // 3 minutes (180 seconds)
export const MAX_EXECUTION_DURATION = 210 // 3.5 minutes (210 seconds) - includes buffer for sandbox creation

View File

@@ -1,5 +1,5 @@
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { executeProviderRequest } from '@/providers'
import { getApiKey, getProviderFromModel } from '@/providers/utils'
@@ -41,7 +41,7 @@ async function queryKnowledgeBase(
})
// Call the knowledge base search API directly
const searchUrl = `${env.NEXT_PUBLIC_APP_URL || 'http://localhost:3000'}/api/knowledge/search`
const searchUrl = `${getBaseUrl()}/api/knowledge/search`
const response = await fetch(searchUrl, {
method: 'POST',

View File

@@ -858,6 +858,7 @@ function getProviderAuthConfig(provider: string): ProviderAuthConfig {
clientId,
clientSecret,
useBasicAuth: false,
supportsRefreshTokenRotation: true,
}
}
case 'reddit': {

View File

@@ -223,7 +223,7 @@ describe('Schedule Utilities', () => {
vi.useRealTimers()
})
it.concurrent('should calculate next run for minutes schedule', () => {
it.concurrent('should calculate next run for minutes schedule using Croner', () => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
@@ -244,14 +244,14 @@ describe('Schedule Utilities', () => {
expect(nextRun instanceof Date).toBe(true)
expect(nextRun > new Date()).toBe(true)
// Check minute is a multiple of the interval
expect(nextRun.getMinutes() % 15).toBe(0)
// Croner will calculate based on the cron expression */15 * * * *
// The exact minute depends on Croner's calculation
})
it.concurrent('should respect scheduleTime for minutes schedule', () => {
it.concurrent('should handle scheduleStartAt with scheduleTime', () => {
const scheduleValues = {
scheduleTime: '14:30', // Specific start time
scheduleStartAt: '',
scheduleStartAt: '2025-04-15', // Future date
timezone: 'UTC',
minutesInterval: 15,
hourlyMinute: 0,
@@ -265,12 +265,13 @@ describe('Schedule Utilities', () => {
const nextRun = calculateNextRunTime('minutes', scheduleValues)
// Should be 14:30
expect(nextRun.getHours()).toBe(14)
expect(nextRun.getMinutes()).toBe(30)
// Should return the future start date with time
expect(nextRun.getFullYear()).toBe(2025)
expect(nextRun.getMonth()).toBe(3) // April
expect(nextRun.getDate()).toBe(15)
})
it.concurrent('should calculate next run for hourly schedule', () => {
it.concurrent('should calculate next run for hourly schedule using Croner', () => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
@@ -287,13 +288,14 @@ describe('Schedule Utilities', () => {
const nextRun = calculateNextRunTime('hourly', scheduleValues)
// Just verify it's a valid future date with the right minute
// Verify it's a valid future date using Croner's calculation
expect(nextRun instanceof Date).toBe(true)
expect(nextRun > new Date()).toBe(true)
// Croner calculates based on cron "30 * * * *"
expect(nextRun.getMinutes()).toBe(30)
})
it.concurrent('should calculate next run for daily schedule', () => {
it.concurrent('should calculate next run for daily schedule using Croner with timezone', () => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
@@ -310,88 +312,96 @@ describe('Schedule Utilities', () => {
const nextRun = calculateNextRunTime('daily', scheduleValues)
// Verify it's a future date at exactly 9:00
// Verify it's a future date at exactly 9:00 UTC using Croner
expect(nextRun instanceof Date).toBe(true)
expect(nextRun > new Date()).toBe(true)
expect(nextRun.getHours()).toBe(9)
expect(nextRun.getMinutes()).toBe(0)
expect(nextRun.getUTCHours()).toBe(9)
expect(nextRun.getUTCMinutes()).toBe(0)
})
it.concurrent('should calculate next run for weekly schedule', () => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'UTC',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 0] as [number, number],
weeklyDay: 1, // Monday
weeklyTime: [10, 0] as [number, number],
monthlyDay: 1,
monthlyTime: [9, 0] as [number, number],
cronExpression: null,
it.concurrent(
'should calculate next run for weekly schedule using Croner with timezone',
() => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'UTC',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 0] as [number, number],
weeklyDay: 1, // Monday
weeklyTime: [10, 0] as [number, number],
monthlyDay: 1,
monthlyTime: [9, 0] as [number, number],
cronExpression: null,
}
const nextRun = calculateNextRunTime('weekly', scheduleValues)
// Should be next Monday at 10:00 AM UTC using Croner
expect(nextRun.getUTCDay()).toBe(1) // Monday
expect(nextRun.getUTCHours()).toBe(10)
expect(nextRun.getUTCMinutes()).toBe(0)
}
)
const nextRun = calculateNextRunTime('weekly', scheduleValues)
it.concurrent(
'should calculate next run for monthly schedule using Croner with timezone',
() => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'UTC',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 0] as [number, number],
weeklyDay: 1,
weeklyTime: [9, 0] as [number, number],
monthlyDay: 15,
monthlyTime: [14, 30] as [number, number],
cronExpression: null,
}
// Should be next Monday at 10:00 AM
expect(nextRun.getDay()).toBe(1) // Monday
expect(nextRun.getHours()).toBe(10)
expect(nextRun.getMinutes()).toBe(0)
})
const nextRun = calculateNextRunTime('monthly', scheduleValues)
it.concurrent('should calculate next run for monthly schedule', () => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'UTC',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 0] as [number, number],
weeklyDay: 1,
weeklyTime: [9, 0] as [number, number],
monthlyDay: 15,
monthlyTime: [14, 30] as [number, number],
cronExpression: null,
// Current date is 2025-04-12 12:00, so next run should be 2025-04-15 14:30 UTC using Croner
expect(nextRun.getFullYear()).toBe(2025)
expect(nextRun.getUTCMonth()).toBe(3) // April (0-indexed)
expect(nextRun.getUTCDate()).toBe(15)
expect(nextRun.getUTCHours()).toBe(14)
expect(nextRun.getUTCMinutes()).toBe(30)
}
)
const nextRun = calculateNextRunTime('monthly', scheduleValues)
it.concurrent(
'should work with lastRanAt parameter (though Croner calculates independently)',
() => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'UTC',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 0] as [number, number],
weeklyDay: 1,
weeklyTime: [9, 0] as [number, number],
monthlyDay: 1,
monthlyTime: [9, 0] as [number, number],
cronExpression: null,
}
// Current date is 2025-04-12 12:00, so next run should be 2025-04-15 14:30
expect(nextRun.getFullYear()).toBe(2025)
expect(nextRun.getMonth()).toBe(3) // April (0-indexed)
expect(nextRun.getDate()).toBe(15)
expect(nextRun.getHours()).toBe(14)
expect(nextRun.getMinutes()).toBe(30)
})
// Last ran 10 minutes ago
const lastRanAt = new Date()
lastRanAt.setMinutes(lastRanAt.getMinutes() - 10)
it.concurrent('should consider lastRanAt for better interval calculation', () => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'UTC',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 0] as [number, number],
weeklyDay: 1,
weeklyTime: [9, 0] as [number, number],
monthlyDay: 1,
monthlyTime: [9, 0] as [number, number],
cronExpression: null,
const nextRun = calculateNextRunTime('minutes', scheduleValues, lastRanAt)
// With Croner, it calculates based on cron expression, not lastRanAt
// Just verify we get a future date
expect(nextRun instanceof Date).toBe(true)
expect(nextRun > new Date()).toBe(true)
}
// Last ran 10 minutes ago
const lastRanAt = new Date()
lastRanAt.setMinutes(lastRanAt.getMinutes() - 10)
const nextRun = calculateNextRunTime('minutes', scheduleValues, lastRanAt)
// Should be 5 minutes from the last run (15 min interval)
const expectedNextRun = new Date(lastRanAt)
expectedNextRun.setMinutes(expectedNextRun.getMinutes() + 15)
expect(nextRun.getMinutes()).toBe(expectedNextRun.getMinutes())
})
)
it.concurrent('should respect future scheduleStartAt date', () => {
const scheduleValues = {
@@ -453,6 +463,12 @@ describe('Schedule Utilities', () => {
})
})
it.concurrent('should validate cron expressions with timezone', () => {
const result = validateCronExpression('0 9 * * *', 'America/Los_Angeles')
expect(result.isValid).toBe(true)
expect(result.nextRun).toBeInstanceOf(Date)
})
it.concurrent('should reject invalid cron expressions', () => {
expect(validateCronExpression('invalid')).toEqual({
isValid: false,
@@ -482,27 +498,181 @@ describe('Schedule Utilities', () => {
})
describe('parseCronToHumanReadable', () => {
it.concurrent('should parse common cron patterns', () => {
it.concurrent('should parse common cron patterns using cronstrue', () => {
// cronstrue produces "Every minute" for '* * * * *'
expect(parseCronToHumanReadable('* * * * *')).toBe('Every minute')
// cronstrue produces "Every 15 minutes" for '*/15 * * * *'
expect(parseCronToHumanReadable('*/15 * * * *')).toBe('Every 15 minutes')
expect(parseCronToHumanReadable('30 * * * *')).toBe('Hourly at 30 minutes past the hour')
expect(parseCronToHumanReadable('0 9 * * *')).toBe('Daily at 9:00 AM')
expect(parseCronToHumanReadable('30 14 * * *')).toBe('Daily at 2:30 PM')
expect(parseCronToHumanReadable('0 9 * * 1')).toMatch(/Monday at 9:00 AM/)
expect(parseCronToHumanReadable('30 14 15 * *')).toMatch(/Monthly on the 15th at 2:30 PM/)
// cronstrue produces "At 30 minutes past the hour" for '30 * * * *'
expect(parseCronToHumanReadable('30 * * * *')).toContain('30 minutes past the hour')
// cronstrue produces "At 09:00 AM" for '0 9 * * *'
expect(parseCronToHumanReadable('0 9 * * *')).toContain('09:00 AM')
// cronstrue produces "At 02:30 PM" for '30 14 * * *'
expect(parseCronToHumanReadable('30 14 * * *')).toContain('02:30 PM')
// cronstrue produces "At 09:00 AM, only on Monday" for '0 9 * * 1'
expect(parseCronToHumanReadable('0 9 * * 1')).toContain('Monday')
// cronstrue produces "At 02:30 PM, on day 15 of the month" for '30 14 15 * *'
expect(parseCronToHumanReadable('30 14 15 * *')).toContain('15')
})
it.concurrent('should handle complex patterns', () => {
// Test with various combinations
expect(parseCronToHumanReadable('* */2 * * *')).toMatch(/Runs/)
expect(parseCronToHumanReadable('0 9 * * 1-5')).toMatch(/Runs/)
expect(parseCronToHumanReadable('0 9 1,15 * *')).toMatch(/Runs/)
it.concurrent('should include timezone information when provided', () => {
const resultPT = parseCronToHumanReadable('0 9 * * *', 'America/Los_Angeles')
expect(resultPT).toContain('(PT)')
expect(resultPT).toContain('09:00 AM')
const resultET = parseCronToHumanReadable('30 14 * * *', 'America/New_York')
expect(resultET).toContain('(ET)')
expect(resultET).toContain('02:30 PM')
const resultUTC = parseCronToHumanReadable('0 12 * * *', 'UTC')
expect(resultUTC).not.toContain('(UTC)') // UTC should not be explicitly shown
})
it.concurrent('should return a fallback for unrecognized patterns', () => {
const result = parseCronToHumanReadable('*/10 */6 31 2 *') // Invalid (Feb 31)
// Just check that we get something back that's not empty
expect(result.length).toBeGreaterThan(5)
it.concurrent('should handle complex patterns with cronstrue', () => {
// cronstrue can handle complex patterns better than our custom parser
const result1 = parseCronToHumanReadable('0 9 * * 1-5')
expect(result1).toContain('Monday through Friday')
const result2 = parseCronToHumanReadable('0 9 1,15 * *')
expect(result2).toContain('day 1 and 15')
})
it.concurrent('should return a fallback for invalid patterns', () => {
const result = parseCronToHumanReadable('invalid cron')
// Should fallback to "Schedule: <expression>"
expect(result).toContain('Schedule:')
expect(result).toContain('invalid cron')
})
})
describe('Timezone-aware scheduling with Croner', () => {
it.concurrent('should calculate daily schedule in Pacific Time correctly', () => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'America/Los_Angeles',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 0] as [number, number], // 9 AM Pacific
weeklyDay: 1,
weeklyTime: [9, 0] as [number, number],
monthlyDay: 1,
monthlyTime: [9, 0] as [number, number],
cronExpression: null,
}
const nextRun = calculateNextRunTime('daily', scheduleValues)
// 9 AM Pacific should be 16:00 or 17:00 UTC depending on DST
// Croner handles this automatically
expect(nextRun instanceof Date).toBe(true)
expect(nextRun > new Date()).toBe(true)
})
it.concurrent('should handle DST transition for schedules', () => {
// Set a date during DST transition in March
vi.useFakeTimers()
vi.setSystemTime(new Date('2025-03-08T10:00:00.000Z')) // Before DST
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'America/New_York',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [14, 0] as [number, number], // 2 PM Eastern
weeklyDay: 1,
weeklyTime: [14, 0] as [number, number],
monthlyDay: 1,
monthlyTime: [14, 0] as [number, number],
cronExpression: null,
}
const nextRun = calculateNextRunTime('daily', scheduleValues)
// Croner should handle DST transition correctly
expect(nextRun instanceof Date).toBe(true)
expect(nextRun > new Date()).toBe(true)
vi.useRealTimers()
})
it.concurrent('should calculate weekly schedule in Tokyo timezone', () => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'Asia/Tokyo',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 0] as [number, number],
weeklyDay: 1, // Monday
weeklyTime: [10, 0] as [number, number], // 10 AM Japan Time
monthlyDay: 1,
monthlyTime: [9, 0] as [number, number],
cronExpression: null,
}
const nextRun = calculateNextRunTime('weekly', scheduleValues)
// Verify it's a valid future date
// Tokyo is UTC+9, so 10 AM JST = 1 AM UTC
expect(nextRun instanceof Date).toBe(true)
expect(nextRun > new Date()).toBe(true)
})
it.concurrent('should handle custom cron with timezone', () => {
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'Europe/London',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 0] as [number, number],
weeklyDay: 1,
weeklyTime: [9, 0] as [number, number],
monthlyDay: 1,
monthlyTime: [9, 0] as [number, number],
cronExpression: '30 15 * * *', // 3:30 PM London time
}
const nextRun = calculateNextRunTime('custom', scheduleValues)
// Verify it's a valid future date
expect(nextRun instanceof Date).toBe(true)
expect(nextRun > new Date()).toBe(true)
})
it.concurrent('should handle monthly schedule on last day of month', () => {
vi.useFakeTimers()
vi.setSystemTime(new Date('2025-02-15T12:00:00.000Z'))
const scheduleValues = {
scheduleTime: '',
scheduleStartAt: '',
timezone: 'America/Chicago',
minutesInterval: 15,
hourlyMinute: 0,
dailyTime: [9, 0] as [number, number],
weeklyDay: 1,
weeklyTime: [9, 0] as [number, number],
monthlyDay: 28, // Last day of Feb (non-leap year)
monthlyTime: [12, 0] as [number, number], // Noon Central
cronExpression: null,
}
const nextRun = calculateNextRunTime('monthly', scheduleValues)
// Should calculate Feb 28 at noon Central time
expect(nextRun.getUTCDate()).toBe(28)
expect(nextRun.getUTCMonth()).toBe(1) // February
vi.useRealTimers()
})
})

View File

@@ -1,4 +1,5 @@
import { Cron } from 'croner'
import cronstrue from 'cronstrue'
import { createLogger } from '@/lib/logs/console/logger'
import { formatDateTime } from '@/lib/utils'
@@ -7,9 +8,13 @@ const logger = createLogger('ScheduleUtils')
/**
* Validates a cron expression and returns validation results
* @param cronExpression - The cron expression to validate
* @param timezone - Optional IANA timezone string (e.g., 'America/Los_Angeles'). Defaults to 'UTC'
* @returns Validation result with isValid flag, error message, and next run date
*/
export function validateCronExpression(cronExpression: string): {
export function validateCronExpression(
cronExpression: string,
timezone?: string
): {
isValid: boolean
error?: string
nextRun?: Date
@@ -22,7 +27,8 @@ export function validateCronExpression(cronExpression: string): {
}
try {
const cron = new Cron(cronExpression)
// Validate with timezone if provided for accurate next run calculation
const cron = new Cron(cronExpression, timezone ? { timezone } : undefined)
const nextRun = cron.nextRun()
if (!nextRun) {
@@ -267,6 +273,21 @@ export function createDateWithTimezone(
/**
* Generate cron expression based on schedule type and values
*
* IMPORTANT: The generated cron expressions use local time values (hours/minutes)
* from the user's configured timezone. When used with Croner, pass the timezone
* option to ensure proper scheduling:
*
* Example:
* const cronExpr = generateCronExpression('daily', { dailyTime: [14, 30], timezone: 'America/Los_Angeles' })
* const cron = new Cron(cronExpr, { timezone: 'America/Los_Angeles' })
*
* This will schedule the job at 2:30 PM Pacific Time, which Croner will correctly
* convert to the appropriate UTC time, handling DST transitions automatically.
*
* @param scheduleType - Type of schedule (minutes, hourly, daily, weekly, monthly, custom)
* @param scheduleValues - Object containing schedule configuration including timezone
* @returns Cron expression string representing the schedule in local time
*/
export function generateCronExpression(
scheduleType: string,
@@ -308,6 +329,7 @@ export function generateCronExpression(
/**
* Calculate the next run time based on schedule configuration
* Uses Croner library with timezone support for accurate scheduling across timezones and DST transitions
* @param scheduleType - Type of schedule (minutes, hourly, daily, etc)
* @param scheduleValues - Object with schedule configuration values
* @param lastRanAt - Optional last execution time
@@ -398,270 +420,83 @@ export function calculateNextRunTime(
}
}
// If we have a scheduleTime (but no future scheduleStartAt), use it for today
const scheduleTimeOverride = scheduleValues.scheduleTime
? parseTimeString(scheduleValues.scheduleTime)
: null
// For recurring schedules, use Croner with timezone support
// This ensures proper timezone handling and DST transitions
try {
const cronExpression = generateCronExpression(scheduleType, scheduleValues)
logger.debug(`Using cron expression: ${cronExpression} with timezone: ${timezone}`)
// Create next run date based on the current date
const nextRun = new Date(baseDate)
// Create Croner instance with timezone support
const cron = new Cron(cronExpression, {
timezone,
})
switch (scheduleType) {
case 'minutes': {
const { minutesInterval } = scheduleValues
const nextDate = cron.nextRun()
// If we have a time override, use it
if (scheduleTimeOverride) {
const [hours, minutes] = scheduleTimeOverride
nextRun.setHours(hours, minutes, 0, 0)
// Add intervals until we're in the future
while (nextRun <= new Date()) {
nextRun.setMinutes(nextRun.getMinutes() + minutesInterval)
}
return nextRun
}
// For subsequent runs after lastRanAt
if (lastRanAt) {
const baseTime = new Date(lastRanAt)
nextRun.setTime(baseTime.getTime())
nextRun.setMinutes(nextRun.getMinutes() + minutesInterval, 0, 0)
// Make sure we're in the future
while (nextRun <= new Date()) {
nextRun.setMinutes(nextRun.getMinutes() + minutesInterval)
}
return nextRun
}
// Calculate next boundary for minutes
const now = new Date()
const currentMinutes = now.getMinutes()
const nextIntervalBoundary = Math.ceil(currentMinutes / minutesInterval) * minutesInterval
nextRun.setMinutes(nextIntervalBoundary, 0, 0)
// If we're past this time but haven't reached baseDate, adjust
if (nextRun <= now) {
nextRun.setMinutes(nextRun.getMinutes() + minutesInterval)
}
return nextRun
if (!nextDate) {
throw new Error(`No next run date calculated for cron: ${cronExpression}`)
}
case 'hourly': {
// Use the override time if available, otherwise use hourly config
const [targetHours, _] = scheduleTimeOverride || [nextRun.getHours(), 0]
const targetMinutes = scheduleValues.hourlyMinute
nextRun.setHours(targetHours, targetMinutes, 0, 0)
// If we're in the past relative to now (not baseDate), move to next hour
if (nextRun <= new Date()) {
nextRun.setHours(nextRun.getHours() + 1)
}
return nextRun
}
case 'daily': {
// Use either schedule override or daily time values
const [hours, minutes] = scheduleTimeOverride || scheduleValues.dailyTime
nextRun.setHours(hours, minutes, 0, 0)
// If we're in the past relative to now (not baseDate), move to tomorrow
if (nextRun <= new Date()) {
nextRun.setDate(nextRun.getDate() + 1)
}
return nextRun
}
case 'weekly': {
// Use either schedule override or weekly time values
const [hours, minutes] = scheduleTimeOverride || scheduleValues.weeklyTime
nextRun.setHours(hours, minutes, 0, 0)
// Add days until we reach the target day in the future
while (nextRun.getDay() !== scheduleValues.weeklyDay || nextRun <= new Date()) {
nextRun.setDate(nextRun.getDate() + 1)
}
return nextRun
}
case 'monthly': {
// Use either schedule override or monthly time values
const [hours, minutes] = scheduleTimeOverride || scheduleValues.monthlyTime
const { monthlyDay } = scheduleValues
nextRun.setDate(monthlyDay)
nextRun.setHours(hours, minutes, 0, 0)
// If we're in the past relative to now (not baseDate), move to next month
if (nextRun <= new Date()) {
nextRun.setMonth(nextRun.getMonth() + 1)
}
return nextRun
}
default:
throw new Error(`Unsupported schedule type: ${scheduleType}`)
logger.debug(`Next run calculated: ${nextDate.toISOString()}`)
return nextDate
} catch (error) {
logger.error('Error calculating next run with Croner:', error)
throw new Error(
`Failed to calculate next run time for schedule type ${scheduleType}: ${error instanceof Error ? error.message : String(error)}`
)
}
}
/**
* Converts a cron expression to a human-readable string format
* Helper function to get a friendly timezone abbreviation
*/
export const parseCronToHumanReadable = (cronExpression: string): string => {
// Parse the cron parts
const parts = cronExpression.split(' ')
// Handle standard patterns
if (cronExpression === '* * * * *') {
return 'Every minute'
function getTimezoneAbbreviation(timezone: string): string {
const timezoneMap: Record<string, string> = {
'America/Los_Angeles': 'PT',
'America/Denver': 'MT',
'America/Chicago': 'CT',
'America/New_York': 'ET',
'Europe/London': 'GMT/BST',
'Europe/Paris': 'CET/CEST',
'Asia/Tokyo': 'JST',
'Asia/Singapore': 'SGT',
'Australia/Sydney': 'AEDT/AEST',
UTC: 'UTC',
}
// Every X minutes
if (cronExpression.match(/^\*\/\d+ \* \* \* \*$/)) {
const minutes = cronExpression.split(' ')[0].split('/')[1]
return `Every ${minutes} minutes`
}
return timezoneMap[timezone] || timezone
}
// Daily at specific time
if (cronExpression.match(/^\d+ \d+ \* \* \*$/)) {
const minute = Number.parseInt(parts[0], 10)
const hour = Number.parseInt(parts[1], 10)
const period = hour >= 12 ? 'PM' : 'AM'
const hour12 = hour % 12 || 12
return `Daily at ${hour12}:${minute.toString().padStart(2, '0')} ${period}`
}
// Every hour at specific minute
if (cronExpression.match(/^\d+ \* \* \* \*$/)) {
const minute = parts[0]
return `Hourly at ${minute} minutes past the hour`
}
// Specific day of week at specific time
if (cronExpression.match(/^\d+ \d+ \* \* \d+$/)) {
const minute = Number.parseInt(parts[0], 10)
const hour = Number.parseInt(parts[1], 10)
const dayOfWeek = Number.parseInt(parts[4], 10)
const days = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
const day = days[dayOfWeek % 7]
const period = hour >= 12 ? 'PM' : 'AM'
const hour12 = hour % 12 || 12
return `Every ${day} at ${hour12}:${minute.toString().padStart(2, '0')} ${period}`
}
// Specific day of month at specific time
if (cronExpression.match(/^\d+ \d+ \d+ \* \*$/)) {
const minute = Number.parseInt(parts[0], 10)
const hour = Number.parseInt(parts[1], 10)
const dayOfMonth = parts[2]
const period = hour >= 12 ? 'PM' : 'AM'
const hour12 = hour % 12 || 12
const day =
dayOfMonth === '1'
? '1st'
: dayOfMonth === '2'
? '2nd'
: dayOfMonth === '3'
? '3rd'
: `${dayOfMonth}th`
return `Monthly on the ${day} at ${hour12}:${minute.toString().padStart(2, '0')} ${period}`
}
// Weekly at specific time
if (cronExpression.match(/^\d+ \d+ \* \* [0-6]$/)) {
const minute = Number.parseInt(parts[0], 10)
const hour = Number.parseInt(parts[1], 10)
const dayOfWeek = Number.parseInt(parts[4], 10)
const days = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
const day = days[dayOfWeek % 7]
const period = hour >= 12 ? 'PM' : 'AM'
const hour12 = hour % 12 || 12
return `Weekly on ${day} at ${hour12}:${minute.toString().padStart(2, '0')} ${period}`
}
// Return a more detailed breakdown if none of the patterns match
/**
* Converts a cron expression to a human-readable string format
* Uses the cronstrue library for accurate parsing of complex cron expressions
*
* @param cronExpression - The cron expression to parse
* @param timezone - Optional IANA timezone string to include in the description
* @returns Human-readable description of the schedule
*/
export const parseCronToHumanReadable = (cronExpression: string, timezone?: string): string => {
try {
const [minute, hour, dayOfMonth, month, dayOfWeek] = parts
let description = 'Runs '
// Use cronstrue for reliable cron expression parsing
const baseDescription = cronstrue.toString(cronExpression, {
use24HourTimeFormat: false, // Use 12-hour format with AM/PM
verbose: false, // Keep it concise
})
// Time component
if (minute === '*' && hour === '*') {
description += 'every minute '
} else if (minute.includes('/') && hour === '*') {
const interval = minute.split('/')[1]
description += `every ${interval} minutes `
} else if (minute !== '*' && hour !== '*') {
const hourVal = Number.parseInt(hour, 10)
const period = hourVal >= 12 ? 'PM' : 'AM'
const hour12 = hourVal % 12 || 12
description += `at ${hour12}:${minute.padStart(2, '0')} ${period} `
// Add timezone information if provided and not UTC
if (timezone && timezone !== 'UTC') {
const tzAbbr = getTimezoneAbbreviation(timezone)
return `${baseDescription} (${tzAbbr})`
}
// Day component
if (dayOfMonth !== '*' && month !== '*') {
const months = [
'January',
'February',
'March',
'April',
'May',
'June',
'July',
'August',
'September',
'October',
'November',
'December',
]
if (month.includes(',')) {
const monthNames = month.split(',').map((m) => months[Number.parseInt(m, 10) - 1])
description += `on day ${dayOfMonth} of ${monthNames.join(', ')}`
} else if (month.includes('/')) {
// Handle interval patterns like */3, 1/3, etc.
const interval = month.split('/')[1]
description += `on day ${dayOfMonth} every ${interval} months`
} else if (month.includes('-')) {
// Handle range patterns like 1-6
const [start, end] = month.split('-').map((m) => Number.parseInt(m, 10))
const startMonth = months[start - 1]
const endMonth = months[end - 1]
description += `on day ${dayOfMonth} from ${startMonth} to ${endMonth}`
} else {
// Handle specific month numbers
const monthIndex = Number.parseInt(month, 10) - 1
const monthName = months[monthIndex]
if (monthName) {
description += `on day ${dayOfMonth} of ${monthName}`
} else {
description += `on day ${dayOfMonth} of month ${month}`
}
}
} else if (dayOfWeek !== '*') {
const days = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
if (dayOfWeek.includes(',')) {
const dayNames = dayOfWeek.split(',').map((d) => days[Number.parseInt(d, 10) % 7])
description += `on ${dayNames.join(', ')}`
} else if (dayOfWeek.includes('-')) {
const [start, end] = dayOfWeek.split('-').map((d) => Number.parseInt(d, 10) % 7)
description += `from ${days[start]} to ${days[end]}`
} else {
description += `on ${days[Number.parseInt(dayOfWeek, 10) % 7]}`
}
}
return description.trim()
} catch (_e) {
return `Schedule: ${cronExpression}`
return baseDescription
} catch (error) {
logger.warn('Failed to parse cron expression with cronstrue:', {
cronExpression,
error: error instanceof Error ? error.message : String(error),
})
// Fallback to displaying the raw cron expression
return `Schedule: ${cronExpression}${timezone && timezone !== 'UTC' ? ` (${getTimezoneAbbreviation(timezone)})` : ''}`
}
}
@@ -672,7 +507,8 @@ export const getScheduleInfo = (
cronExpression: string | null,
nextRunAt: string | null,
lastRanAt: string | null,
scheduleType?: string | null
scheduleType?: string | null,
timezone?: string | null
): {
scheduleTiming: string
nextRunFormatted: string | null
@@ -689,7 +525,8 @@ export const getScheduleInfo = (
let scheduleTiming = 'Unknown schedule'
if (cronExpression) {
scheduleTiming = parseCronToHumanReadable(cronExpression)
// Pass timezone to parseCronToHumanReadable for accurate display
scheduleTiming = parseCronToHumanReadable(cronExpression, timezone || undefined)
} else if (scheduleType) {
scheduleTiming = `${scheduleType.charAt(0).toUpperCase() + scheduleType.slice(1)}`
}

View File

@@ -2,25 +2,26 @@ import { getEnv } from '@/lib/env'
import { isProd } from '@/lib/environment'
/**
* Returns the base URL of the application, respecting environment variables for deployment environments
* Returns the base URL of the application from NEXT_PUBLIC_APP_URL
* This ensures webhooks, callbacks, and other integrations always use the correct public URL
* @returns The base URL string (e.g., 'http://localhost:3000' or 'https://example.com')
* @throws Error if NEXT_PUBLIC_APP_URL is not configured
*/
export function getBaseUrl(): string {
if (typeof window !== 'undefined' && window.location?.origin) {
return window.location.origin
}
const baseUrl = getEnv('NEXT_PUBLIC_APP_URL')
if (baseUrl) {
if (baseUrl.startsWith('http://') || baseUrl.startsWith('https://')) {
return baseUrl
}
const protocol = isProd ? 'https://' : 'http://'
return `${protocol}${baseUrl}`
if (!baseUrl) {
throw new Error(
'NEXT_PUBLIC_APP_URL must be configured for webhooks and callbacks to work correctly'
)
}
return 'http://localhost:3000'
if (baseUrl.startsWith('http://') || baseUrl.startsWith('https://')) {
return baseUrl
}
const protocol = isProd ? 'https://' : 'http://'
return `${protocol}${baseUrl}`
}
/**

View File

@@ -2,8 +2,8 @@ import { db } from '@sim/db'
import { webhook as webhookTable } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
const teamsLogger = createLogger('TeamsSubscription')
@@ -71,11 +71,8 @@ export async function createTeamsSubscription(
}
// Build notification URL
const requestOrigin = new URL(request.url).origin
const effectiveOrigin = requestOrigin.includes('localhost')
? env.NEXT_PUBLIC_APP_URL || requestOrigin
: requestOrigin
const notificationUrl = `${effectiveOrigin}/api/webhooks/trigger/${webhook.path}`
// Always use NEXT_PUBLIC_APP_URL to ensure Microsoft Graph can reach the public endpoint
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhook.path}`
// Subscribe to the specified chat
const resource = `/chats/${chatId}/messages`
@@ -221,14 +218,7 @@ export async function createTelegramWebhook(
return false
}
if (!env.NEXT_PUBLIC_APP_URL) {
telegramLogger.error(
`[${requestId}] NEXT_PUBLIC_APP_URL not configured, cannot register Telegram webhook`
)
return false
}
const notificationUrl = `${env.NEXT_PUBLIC_APP_URL}/api/webhooks/trigger/${webhook.path}`
const notificationUrl = `${getBaseUrl()}/api/webhooks/trigger/${webhook.path}`
const telegramApiUrl = `https://api.telegram.org/bot${botToken}/setWebhook`
const telegramResponse = await fetch(telegramApiUrl, {

View File

@@ -1,18 +1,12 @@
import { db } from '@sim/db'
import {
apiKey,
permissions,
userStats,
workflow as workflowTable,
workspace,
} from '@sim/db/schema'
import { apiKey, permissions, workflow as workflowTable, workspace } from '@sim/db/schema'
import type { InferSelectModel } from 'drizzle-orm'
import { and, eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { getEnv } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import type { PermissionType } from '@/lib/permissions/utils'
import { getBaseUrl } from '@/lib/urls/utils'
import type { ExecutionResult } from '@/executor/types'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
@@ -178,61 +172,19 @@ export async function updateWorkflowRunCounts(workflowId: string, runs = 1) {
throw new Error(`Workflow ${workflowId} not found`)
}
// Get the origin from the environment or use direct DB update as fallback
const origin =
getEnv('NEXT_PUBLIC_APP_URL') || (typeof window !== 'undefined' ? window.location.origin : '')
// Use the API to update stats
const response = await fetch(`${getBaseUrl()}/api/workflows/${workflowId}/stats?runs=${runs}`, {
method: 'POST',
})
if (origin) {
// Use absolute URL with origin
const response = await fetch(`${origin}/api/workflows/${workflowId}/stats?runs=${runs}`, {
method: 'POST',
})
if (!response.ok) {
const error = await response.json()
throw new Error(error.error || 'Failed to update workflow stats')
}
return response.json()
}
logger.warn('No origin available, updating workflow stats directly via DB')
// Update workflow directly through database
await db
.update(workflowTable)
.set({
runCount: (workflow.runCount as number) + runs,
lastRunAt: new Date(),
})
.where(eq(workflowTable.id, workflowId))
// Update user stats if needed
if (workflow.userId) {
const userStatsRecord = await db
.select()
.from(userStats)
.where(eq(userStats.userId, workflow.userId))
.limit(1)
if (userStatsRecord.length === 0) {
console.warn('User stats record not found - should be created during onboarding', {
userId: workflow.userId,
})
return // Skip stats update if record doesn't exist
}
// Update existing record
await db
.update(userStats)
.set({
totalManualExecutions: userStatsRecord[0].totalManualExecutions + runs,
lastActive: new Date(),
})
.where(eq(userStats.userId, workflow.userId))
if (!response.ok) {
const error = await response.json()
throw new Error(error.error || 'Failed to update workflow stats')
}
return { success: true, runsAdded: runs }
return response.json()
} catch (error) {
logger.error('Error updating workflow run counts:', error)
logger.error(`Error updating workflow stats for ${workflowId}`, error)
throw error
}
}

View File

@@ -3,6 +3,7 @@ import { Server } from 'socket.io'
import { env } from '@/lib/env'
import { isProd } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { getBaseUrl } from '@/lib/urls/utils'
const logger = createLogger('SocketIOConfig')
@@ -11,7 +12,7 @@ const logger = createLogger('SocketIOConfig')
*/
function getAllowedOrigins(): string[] {
const allowedOrigins = [
env.NEXT_PUBLIC_APP_URL,
getBaseUrl(),
'http://localhost:3000',
'http://localhost:3001',
...(env.ALLOWED_ORIGINS?.split(',') || []),

View File

@@ -15,16 +15,30 @@ const updateURL = (params: URLSearchParams) => {
window.history.replaceState({}, '', url)
}
const DEFAULT_TIME_RANGE: TimeRange = 'Past 12 hours'
const parseTimeRangeFromURL = (value: string | null): TimeRange => {
switch (value) {
case 'past-30-minutes':
return 'Past 30 minutes'
case 'past-hour':
return 'Past hour'
case 'past-6-hours':
return 'Past 6 hours'
case 'past-12-hours':
return 'Past 12 hours'
case 'past-24-hours':
return 'Past 24 hours'
case 'past-3-days':
return 'Past 3 days'
case 'past-7-days':
return 'Past 7 days'
case 'past-14-days':
return 'Past 14 days'
case 'past-30-days':
return 'Past 30 days'
default:
return 'All time'
return DEFAULT_TIME_RANGE
}
}
@@ -51,8 +65,20 @@ const timeRangeToURL = (timeRange: TimeRange): string => {
return 'past-30-minutes'
case 'Past hour':
return 'past-hour'
case 'Past 6 hours':
return 'past-6-hours'
case 'Past 12 hours':
return 'past-12-hours'
case 'Past 24 hours':
return 'past-24-hours'
case 'Past 3 days':
return 'past-3-days'
case 'Past 7 days':
return 'past-7-days'
case 'Past 14 days':
return 'past-14-days'
case 'Past 30 days':
return 'past-30-days'
default:
return 'all-time'
}
@@ -61,7 +87,8 @@ const timeRangeToURL = (timeRange: TimeRange): string => {
export const useFilterStore = create<FilterState>((set, get) => ({
logs: [],
workspaceId: '',
timeRange: 'All time',
viewMode: 'logs',
timeRange: DEFAULT_TIME_RANGE,
level: 'all',
workflowIds: [],
folderIds: [],
@@ -86,6 +113,8 @@ export const useFilterStore = create<FilterState>((set, get) => ({
setWorkspaceId: (workspaceId) => set({ workspaceId }),
setViewMode: (viewMode) => set({ viewMode }),
setTimeRange: (timeRange) => {
set({ timeRange })
get().resetPagination()
@@ -230,7 +259,7 @@ export const useFilterStore = create<FilterState>((set, get) => ({
const params = new URLSearchParams()
// Only add non-default values to keep URL clean
if (timeRange !== 'All time') {
if (timeRange !== DEFAULT_TIME_RANGE) {
params.set('timeRange', timeRangeToURL(timeRange))
}
@@ -298,9 +327,27 @@ export const useFilterStore = create<FilterState>((set, get) => ({
case 'Past hour':
startDate = new Date(now.getTime() - 60 * 60 * 1000)
break
case 'Past 6 hours':
startDate = new Date(now.getTime() - 6 * 60 * 60 * 1000)
break
case 'Past 12 hours':
startDate = new Date(now.getTime() - 12 * 60 * 60 * 1000)
break
case 'Past 24 hours':
startDate = new Date(now.getTime() - 24 * 60 * 60 * 1000)
break
case 'Past 3 days':
startDate = new Date(now.getTime() - 3 * 24 * 60 * 60 * 1000)
break
case 'Past 7 days':
startDate = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000)
break
case 'Past 14 days':
startDate = new Date(now.getTime() - 14 * 24 * 60 * 60 * 1000)
break
case 'Past 30 days':
startDate = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000)
break
default:
startDate = new Date(0)
}

View File

@@ -151,7 +151,17 @@ export interface LogsResponse {
totalPages: number
}
export type TimeRange = 'Past 30 minutes' | 'Past hour' | 'Past 24 hours' | 'All time'
export type TimeRange =
| 'Past 30 minutes'
| 'Past hour'
| 'Past 6 hours'
| 'Past 12 hours'
| 'Past 24 hours'
| 'Past 3 days'
| 'Past 7 days'
| 'Past 14 days'
| 'Past 30 days'
| 'All time'
export type LogLevel = 'error' | 'info' | 'all'
export type TriggerType = 'chat' | 'api' | 'webhook' | 'manual' | 'schedule' | 'all'
@@ -162,6 +172,9 @@ export interface FilterState {
// Workspace context
workspaceId: string
// View mode
viewMode: 'logs' | 'dashboard'
// Filter states
timeRange: TimeRange
level: LogLevel
@@ -185,6 +198,7 @@ export interface FilterState {
// Actions
setLogs: (logs: WorkflowLog[], append?: boolean) => void
setWorkspaceId: (workspaceId: string) => void
setViewMode: (viewMode: 'logs' | 'dashboard') => void
setTimeRange: (timeRange: TimeRange) => void
setLevel: (level: LogLevel) => void
setWorkflowIds: (workflowIds: string[]) => void

View File

@@ -21,6 +21,11 @@ describe('Console Store', () => {
isOpen: false,
})
vi.clearAllMocks()
// Clear localStorage mock
if (global.localStorage) {
vi.mocked(global.localStorage.getItem).mockReturnValue(null)
vi.mocked(global.localStorage.setItem).mockClear()
}
})
describe('addConsole', () => {

View File

@@ -1,265 +0,0 @@
import type { StateCreator } from 'zustand'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import type { WorkflowState, WorkflowStore } from '@/stores/workflows/workflow/types'
interface HistoryEntry {
state: WorkflowState
timestamp: number
action: string
subblockValues: Record<string, Record<string, any>>
}
interface WorkflowHistory {
past: HistoryEntry[]
present: HistoryEntry
future: HistoryEntry[]
}
interface HistoryActions {
undo: () => void
redo: () => void
canUndo: () => boolean
canRedo: () => boolean
revertToHistoryState: (index: number) => void
}
// MAX for each individual workflow
const MAX_HISTORY_LENGTH = 20
// Default empty state for consistent initialization
const EMPTY_WORKFLOW_STATE = {
blocks: {},
edges: [] as any[],
loops: {},
parallels: {},
}
// Types for workflow store with history management capabilities
export interface WorkflowStoreWithHistory extends WorkflowStore, HistoryActions {
history: WorkflowHistory
revertToDeployedState: (deployedState: WorkflowState) => void
}
// Higher-order store middleware that adds undo/redo functionality
export const withHistory = (
config: StateCreator<WorkflowStoreWithHistory>
): StateCreator<WorkflowStoreWithHistory> => {
return (set, get, api) => {
// Initialize store with history tracking
const initialState = config(set, get, api)
const initialHistoryEntry: HistoryEntry = {
state: {
blocks: initialState.blocks,
edges: initialState.edges,
loops: initialState.loops,
parallels: initialState.parallels,
},
timestamp: Date.now(),
action: 'Initial state',
subblockValues: {}, // Add storage for subblock values
}
return {
...initialState,
history: {
past: [],
present: initialHistoryEntry,
future: [],
},
// Check if undo operation is available
canUndo: () => get().history.past.length > 0,
// Check if redo operation is available
canRedo: () => get().history.future.length > 0,
// Restore previous state from history
undo: () => {
const { history, ...state } = get()
if (history.past.length === 0) return
const previous = history.past[history.past.length - 1]
const newPast = history.past.slice(0, history.past.length - 1)
// Get active workflow ID for subblock handling
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
if (!activeWorkflowId) return
// Apply the state change
set({
...state,
...previous.state,
history: {
past: newPast,
present: previous,
future: [history.present, ...history.future],
},
lastSaved: Date.now(),
})
// Restore subblock values from the previous state's snapshot
if (previous.subblockValues && activeWorkflowId) {
// Update the subblock store with the saved values
useSubBlockStore.setState({
workflowValues: {
...useSubBlockStore.getState().workflowValues,
[activeWorkflowId]: previous.subblockValues,
},
})
}
},
// Restore next state from history
redo: () => {
const { history, ...state } = get()
if (history.future.length === 0) return
const next = history.future[0]
const newFuture = history.future.slice(1)
// Get active workflow ID for subblock handling
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
if (!activeWorkflowId) return
// Apply the state change
set({
...state,
...next.state,
history: {
past: [...history.past, history.present],
present: next,
future: newFuture,
},
lastSaved: Date.now(),
})
// Restore subblock values from the next state's snapshot
if (next.subblockValues && activeWorkflowId) {
// Update the subblock store with the saved values
useSubBlockStore.setState({
workflowValues: {
...useSubBlockStore.getState().workflowValues,
[activeWorkflowId]: next.subblockValues,
},
})
}
},
// Reset workflow to empty state
clear: () => {
const newState = {
...EMPTY_WORKFLOW_STATE,
history: {
past: [],
present: {
state: { ...EMPTY_WORKFLOW_STATE },
timestamp: Date.now(),
action: 'Clear workflow',
subblockValues: {},
},
future: [],
},
lastSaved: Date.now(),
}
set(newState)
return newState
},
// Jump to specific point in history
revertToHistoryState: (index: number) => {
const { history, ...state } = get()
const allStates = [...history.past, history.present, ...history.future]
const targetState = allStates[index]
if (!targetState) return
// Get active workflow ID for subblock handling
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
if (!activeWorkflowId) return
const newPast = allStates.slice(0, index)
const newFuture = allStates.slice(index + 1)
set({
...state,
...targetState.state,
history: {
past: newPast,
present: targetState,
future: newFuture,
},
lastSaved: Date.now(),
})
// Restore subblock values from the target state's snapshot
if (targetState.subblockValues && activeWorkflowId) {
// Update the subblock store with the saved values
useSubBlockStore.setState({
workflowValues: {
...useSubBlockStore.getState().workflowValues,
[activeWorkflowId]: targetState.subblockValues,
},
})
}
},
}
}
}
// Create a new history entry with current state snapshot
export const createHistoryEntry = (state: WorkflowState, action: string): HistoryEntry => {
// Get active workflow ID for subblock handling
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
// Create a deep copy of the state
const stateCopy = {
blocks: { ...state.blocks },
edges: [...state.edges],
loops: { ...state.loops },
parallels: { ...state.parallels },
}
// Capture the current subblock values for this workflow
let subblockValues = {}
if (activeWorkflowId) {
// Get the current subblock values from the store
const currentValues = useSubBlockStore.getState().workflowValues[activeWorkflowId] || {}
// Create a deep copy to ensure we don't have reference issues
subblockValues = JSON.parse(JSON.stringify(currentValues))
}
return {
state: stateCopy,
timestamp: Date.now(),
action,
subblockValues,
}
}
// Add new entry to history and maintain history size limit
export const pushHistory = (
set: (
partial:
| Partial<WorkflowStoreWithHistory>
| ((state: WorkflowStoreWithHistory) => Partial<WorkflowStoreWithHistory>),
replace?: boolean
) => void,
get: () => WorkflowStoreWithHistory,
newState: WorkflowState,
action: string
) => {
const { history } = get()
const newEntry = createHistoryEntry(newState, action)
set({
history: {
past: [...history.past, history.present].slice(-MAX_HISTORY_LENGTH),
present: newEntry,
future: [],
},
lastSaved: Date.now(),
})
}

View File

@@ -90,7 +90,6 @@ async function fetchWorkflowsFromDB(workspaceId?: string): Promise<void> {
name,
description,
color,
state,
variables,
createdAt,
marketplaceData,
@@ -186,26 +185,10 @@ function resetWorkflowStores() {
blocks: {},
edges: [],
loops: {},
parallels: {},
isDeployed: false,
deployedAt: undefined,
deploymentStatuses: {}, // Reset deployment statuses map
history: {
past: [],
present: {
state: {
blocks: {},
edges: [],
loops: {},
parallels: {},
isDeployed: false,
deployedAt: undefined,
},
timestamp: Date.now(),
action: 'Initial state',
subblockValues: {},
},
future: [],
},
lastSaved: Date.now(),
})
@@ -438,13 +421,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
logger.info(`Switching to workflow ${id}`)
// First, sync the current workflow before switching (if there is one)
if (activeWorkflowId && activeWorkflowId !== id) {
// Mark current workflow as dirty and sync (fire and forget)
useWorkflowStore.getState().sync.markDirty()
useWorkflowStore.getState().sync.forceSync()
}
// Fetch workflow state from database
const response = await fetch(`/api/workflows/${id}`, { method: 'GET' })
const workflowData = response.ok ? (await response.json()).data : null
@@ -464,25 +440,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
lastSaved: Date.now(),
marketplaceData: workflowData.marketplaceData || null,
deploymentStatuses: {},
history: {
past: [],
present: {
state: {
blocks: workflowData.state.blocks || {},
edges: workflowData.state.edges || [],
loops: workflowData.state.loops || {},
parallels: workflowData.state.parallels || {},
isDeployed: workflowData.isDeployed || false,
deployedAt: workflowData.deployedAt
? new Date(workflowData.deployedAt)
: undefined,
},
timestamp: Date.now(),
action: 'Loaded from database (normalized tables)',
subblockValues: {},
},
future: [],
},
}
} else {
// If no state in DB, use empty state - server should have created start block
@@ -494,23 +451,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
isDeployed: false,
deployedAt: undefined,
deploymentStatuses: {},
history: {
past: [],
present: {
state: {
blocks: {},
edges: [],
loops: {},
parallels: {},
isDeployed: false,
deployedAt: undefined,
},
timestamp: Date.now(),
action: 'Empty initial state - server should provide start block',
subblockValues: {},
},
future: [],
},
lastSaved: Date.now(),
}
@@ -604,72 +544,8 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
folderId: createdWorkflow.folderId,
}
let initialState: any
// If this is a marketplace import with existing state
if (options.marketplaceId && options.marketplaceState) {
initialState = {
blocks: options.marketplaceState.blocks || {},
edges: options.marketplaceState.edges || [],
loops: options.marketplaceState.loops || {},
parallels: options.marketplaceState.parallels || {},
isDeployed: false,
deployedAt: undefined,
deploymentStatuses: {}, // Initialize empty deployment statuses map
workspaceId, // Include workspace ID in the state object
history: {
past: [],
present: {
state: {
blocks: options.marketplaceState.blocks || {},
edges: options.marketplaceState.edges || [],
loops: options.marketplaceState.loops || {},
parallels: options.marketplaceState.parallels || {},
isDeployed: false,
deployedAt: undefined,
workspaceId, // Include workspace ID in history
},
timestamp: Date.now(),
action: 'Imported from marketplace',
subblockValues: {},
},
future: [],
},
lastSaved: Date.now(),
}
logger.info(`Created workflow from marketplace: ${options.marketplaceId}`)
} else {
// Create empty workflow (no default blocks)
initialState = {
blocks: {},
edges: [],
loops: {},
parallels: {},
isDeployed: false,
deployedAt: undefined,
deploymentStatuses: {}, // Initialize empty deployment statuses map
workspaceId, // Include workspace ID in the state object
history: {
past: [],
present: {
state: {
blocks: {},
edges: [],
loops: {},
parallels: {},
isDeployed: false,
deployedAt: undefined,
workspaceId, // Include workspace ID in history
},
timestamp: Date.now(),
action: 'Initial state',
subblockValues: {},
},
future: [],
},
lastSaved: Date.now(),
}
}
// Add workflow to registry with server-generated ID
@@ -747,23 +623,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
parallels: state.parallels || {},
isDeployed: false,
deployedAt: undefined,
history: {
past: [],
present: {
state: {
blocks: state.blocks || {},
edges: state.edges || [],
loops: state.loops || {},
parallels: state.parallels || {},
isDeployed: false,
deployedAt: undefined,
},
timestamp: Date.now(),
action: 'Imported from marketplace',
subblockValues: {},
},
future: [],
},
lastSaved: Date.now(),
}
@@ -1027,24 +886,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
deployedAt: undefined,
workspaceId,
deploymentStatuses: {},
history: {
past: [],
present: {
state: {
blocks: sourceState.blocks,
edges: sourceState.edges,
loops: sourceState.loops,
parallels: sourceState.parallels,
isDeployed: false,
deployedAt: undefined,
workspaceId,
},
timestamp: Date.now(),
action: 'Duplicated workflow',
subblockValues: {},
},
future: [],
},
lastSaved: Date.now(),
}
@@ -1156,23 +997,6 @@ export const useWorkflowRegistry = create<WorkflowRegistry>()(
parallels: {},
isDeployed: false,
deployedAt: undefined,
history: {
past: [],
present: {
state: {
blocks: {},
edges: [],
loops: {},
parallels: {},
isDeployed: false,
deployedAt: undefined,
},
timestamp: Date.now(),
action: 'Workflow deleted',
subblockValues: {},
},
future: [],
},
lastSaved: Date.now(),
})

View File

@@ -253,44 +253,6 @@ describe('workflow store', () => {
expect(state.parallels.parallel1).toBeDefined()
expect(state.parallels.parallel1.parallelType).toBe('count')
})
it('should save to history when updating parallel properties', () => {
const { addBlock, updateParallelCollection, updateParallelCount, updateParallelType } =
useWorkflowStore.getState()
// Add a parallel block
addBlock(
'parallel1',
'parallel',
'Test Parallel',
{ x: 0, y: 0 },
{
count: 3,
collection: '',
}
)
// Get initial history length
const initialHistoryLength = useWorkflowStore.getState().history.past.length
// Update collection
updateParallelCollection('parallel1', '["a", "b", "c"]')
let state = useWorkflowStore.getState()
expect(state.history.past.length).toBe(initialHistoryLength + 1)
// Update count
updateParallelCount('parallel1', 5)
state = useWorkflowStore.getState()
expect(state.history.past.length).toBe(initialHistoryLength + 2)
// Update parallel type
updateParallelType('parallel1', 'count')
state = useWorkflowStore.getState()
expect(state.history.past.length).toBe(initialHistoryLength + 3)
})
})
describe('mode switching', () => {

View File

@@ -5,11 +5,6 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getBlockOutputs } from '@/lib/workflows/block-outputs'
import { getBlock } from '@/blocks'
import { resolveOutputType } from '@/blocks/utils'
import {
pushHistory,
type WorkflowStoreWithHistory,
withHistory,
} from '@/stores/workflows/middleware'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import {
@@ -20,8 +15,8 @@ import {
import type {
Position,
SubBlockState,
SyncControl,
WorkflowState,
WorkflowStore,
} from '@/stores/workflows/workflow/types'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
@@ -39,54 +34,12 @@ const initialState = {
// New field for per-workflow deployment tracking
deploymentStatuses: {},
needsRedeployment: false,
history: {
past: [],
present: {
state: {
blocks: {},
edges: [],
loops: {},
parallels: {},
isDeployed: false,
isPublished: false,
},
timestamp: 0,
action: 'Initial state',
subblockValues: {},
},
future: [],
},
}
// Create a consolidated sync control implementation
/**
* Socket-based SyncControl implementation (replaces HTTP sync)
*/
const createSyncControl = (): SyncControl => ({
markDirty: () => {
// No-op: Socket-based sync handles this automatically
},
isDirty: () => {
// Always return false since socket sync is real-time
return false
},
forceSync: () => {
// No-op: Socket-based sync is always in sync
},
})
export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
export const useWorkflowStore = create<WorkflowStore>()(
devtools(
withHistory((set, get) => ({
(set, get) => ({
...initialState,
undo: () => {},
redo: () => {},
canUndo: () => false,
canRedo: () => false,
revertToHistoryState: () => {},
// Implement sync control interface
sync: createSyncControl(),
setNeedsRedeploymentFlag: (needsRedeployment: boolean) => {
set({ needsRedeployment })
@@ -143,9 +96,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
}
set(newState)
pushHistory(set, get, newState, `Add ${type} node`)
get().updateLastSaved()
// get().sync.markDirty() // Disabled: Using socket-based sync
return
}
@@ -199,9 +150,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
}
set(newState)
pushHistory(set, get, newState, `Add ${type} block`)
get().updateLastSaved()
// get().sync.markDirty() // Disabled: Using socket-based sync
},
updateBlockPosition: (id: string, position: Position) => {
@@ -310,12 +259,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
})
set(newState)
pushHistory(
set,
get,
newState,
parentId ? `Set parent for ${block.name}` : `Remove parent for ${block.name}`
)
get().updateLastSaved()
// Note: Socket.IO handles real-time sync automatically
},
@@ -386,7 +329,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
})
set(newState)
pushHistory(set, get, newState, 'Remove block and children')
get().updateLastSaved()
// Note: Socket.IO handles real-time sync automatically
},
@@ -426,9 +368,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
}
set(newState)
pushHistory(set, get, newState, 'Add connection')
get().updateLastSaved()
// get().sync.markDirty() // Disabled: Using socket-based sync
},
removeEdge: (edgeId: string) => {
@@ -449,7 +389,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
}
set(newState)
pushHistory(set, get, newState, 'Remove connection')
get().updateLastSaved()
},
@@ -459,26 +398,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
edges: [],
loops: {},
parallels: {},
history: {
past: [],
present: {
state: {
blocks: {},
edges: [],
loops: {},
parallels: {},
isDeployed: false,
isPublished: false,
},
timestamp: Date.now(),
action: 'Initial state',
subblockValues: {},
},
future: [],
},
lastSaved: Date.now(),
isDeployed: false,
isPublished: false,
}
set(newState)
// Note: Socket.IO handles real-time sync automatically
@@ -585,7 +505,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
}
set(newState)
pushHistory(set, get, newState, `Duplicate ${block.type} block`)
get().updateLastSaved()
// Note: Socket.IO handles real-time sync automatically
},
@@ -723,7 +642,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
}
set(newState)
pushHistory(set, get, newState, `${name} block name updated`)
get().updateLastSaved()
// Note: Socket.IO handles real-time sync automatically
@@ -958,7 +876,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
},
})
pushHistory(set, get, newState, 'Reverted to deployed state')
get().updateLastSaved()
// Call API to persist the revert to normalized tables
@@ -1042,7 +959,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
}
set(newState)
pushHistory(set, get, newState, `Toggle trigger mode for ${block.type} block`)
get().updateLastSaved()
// Handle webhook enable/disable when toggling trigger mode
@@ -1111,7 +1027,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
}
set(newState)
pushHistory(set, get, newState, `Update parallel count`)
get().updateLastSaved()
// Note: Socket.IO handles real-time sync automatically
},
@@ -1139,7 +1054,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
}
set(newState)
pushHistory(set, get, newState, `Update parallel collection`)
get().updateLastSaved()
// Note: Socket.IO handles real-time sync automatically
},
@@ -1167,7 +1081,6 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
}
set(newState)
pushHistory(set, get, newState, `Update parallel type`)
get().updateLastSaved()
// Note: Socket.IO handles real-time sync automatically
},
@@ -1184,7 +1097,7 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
getDragStartPosition: () => {
return get().dragStartPosition || null
},
})),
}),
{ name: 'workflow-store' }
)
)

View File

@@ -26,7 +26,6 @@ export interface ParallelConfig {
parallelType?: 'count' | 'collection'
}
// Generic subflow interface
export interface Subflow {
id: string
workflowId: string
@@ -158,16 +157,6 @@ export interface WorkflowState {
dragStartPosition?: DragStartPosition | null
}
// New interface for sync control
export interface SyncControl {
// Mark the workflow as changed, requiring sync
markDirty: () => void
// Check if the workflow has unsaved changes
isDirty: () => boolean
// Immediately trigger a sync
forceSync: () => void
}
export interface WorkflowActions {
addBlock: (
id: string,
@@ -218,11 +207,6 @@ export interface WorkflowActions {
toggleBlockTriggerMode: (id: string) => void
setDragStartPosition: (position: DragStartPosition | null) => void
getDragStartPosition: () => DragStartPosition | null
// Add the sync control methods to the WorkflowActions interface
sync: SyncControl
// Add method to get current workflow state (eliminates duplication in diff store)
getWorkflowState: () => WorkflowState
}

Some files were not shown because too many files have changed in this diff Show More