Merge origin/staging into feat/lock

Resolved conflicts:
- Removed addBlock from store (now using batchAddBlocks)
- Updated lock tests to use test helper addBlock function
- Kept both staging's optimization tests and lock feature tests

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
waleed
2026-01-31 18:02:03 -08:00
20 changed files with 1370 additions and 607 deletions

View File

@@ -27,16 +27,16 @@ All API responses include information about your workflow execution limits and u
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 60, // Sustained rate limit per minute
"maxBurst": 120, // Maximum burst capacity
"remaining": 118, // Current tokens available (up to maxBurst)
"resetAt": "..." // When tokens next refill
"requestsPerMinute": 150, // Sustained rate limit per minute
"maxBurst": 300, // Maximum burst capacity
"remaining": 298, // Current tokens available (up to maxBurst)
"resetAt": "..." // When tokens next refill
},
"async": {
"requestsPerMinute": 200, // Sustained rate limit per minute
"maxBurst": 400, // Maximum burst capacity
"remaining": 398, // Current tokens available
"resetAt": "..." // When tokens next refill
"requestsPerMinute": 1000, // Sustained rate limit per minute
"maxBurst": 2000, // Maximum burst capacity
"remaining": 1998, // Current tokens available
"resetAt": "..." // When tokens next refill
}
},
"usage": {
@@ -107,28 +107,28 @@ Query workflow execution logs with extensive filtering options.
}
],
"nextCursor": "eyJzIjoiMjAyNS0wMS0wMVQxMjozNDo1Ni43ODlaIiwiaWQiOiJsb2dfYWJjMTIzIn0",
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 60,
"maxBurst": 120,
"remaining": 118,
"resetAt": "2025-01-01T12:35:56.789Z"
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 150,
"maxBurst": 300,
"remaining": 298,
"resetAt": "2025-01-01T12:35:56.789Z"
},
"async": {
"requestsPerMinute": 1000,
"maxBurst": 2000,
"remaining": 1998,
"resetAt": "2025-01-01T12:35:56.789Z"
}
},
"async": {
"requestsPerMinute": 200,
"maxBurst": 400,
"remaining": 398,
"resetAt": "2025-01-01T12:35:56.789Z"
"usage": {
"currentPeriodCost": 1.234,
"limit": 10,
"plan": "pro",
"isExceeded": false
}
},
"usage": {
"currentPeriodCost": 1.234,
"limit": 10,
"plan": "pro",
"isExceeded": false
}
}
}
```
</Tab>
@@ -188,15 +188,15 @@ Retrieve detailed information about a specific log entry.
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 60,
"maxBurst": 120,
"remaining": 118,
"requestsPerMinute": 150,
"maxBurst": 300,
"remaining": 298,
"resetAt": "2025-01-01T12:35:56.789Z"
},
"async": {
"requestsPerMinute": 200,
"maxBurst": 400,
"remaining": 398,
"requestsPerMinute": 1000,
"maxBurst": 2000,
"remaining": 1998,
"resetAt": "2025-01-01T12:35:56.789Z"
}
},
@@ -477,10 +477,10 @@ The API uses a **token bucket algorithm** for rate limiting, providing fair usag
| Plan | Requests/Minute | Burst Capacity |
|------|-----------------|----------------|
| Free | 10 | 20 |
| Pro | 30 | 60 |
| Team | 60 | 120 |
| Enterprise | 120 | 240 |
| Free | 30 | 60 |
| Pro | 100 | 200 |
| Team | 200 | 400 |
| Enterprise | 500 | 1000 |
**How it works:**
- Tokens refill at `requestsPerMinute` rate

View File

@@ -170,16 +170,16 @@ curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" htt
"rateLimit": {
"sync": {
"isLimited": false,
"requestsPerMinute": 25,
"maxBurst": 50,
"remaining": 50,
"requestsPerMinute": 150,
"maxBurst": 300,
"remaining": 300,
"resetAt": "2025-09-08T22:51:55.999Z"
},
"async": {
"isLimited": false,
"requestsPerMinute": 200,
"maxBurst": 400,
"remaining": 400,
"requestsPerMinute": 1000,
"maxBurst": 2000,
"remaining": 2000,
"resetAt": "2025-09-08T22:51:56.155Z"
},
"authType": "api"
@@ -206,11 +206,11 @@ curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" htt
Different subscription plans have different usage limits:
| Plan | Monthly Usage Limit | Rate Limits (per minute) |
|------|-------------------|-------------------------|
| **Free** | $20 | 5 sync, 10 async |
| **Pro** | $100 | 10 sync, 50 async |
| **Team** | $500 (pooled) | 50 sync, 100 async |
| Plan | Monthly Usage Included | Rate Limits (per minute) |
|------|------------------------|-------------------------|
| **Free** | $20 | 50 sync, 200 async |
| **Pro** | $20 (adjustable) | 150 sync, 1,000 async |
| **Team** | $40/seat (pooled, adjustable) | 300 sync, 2,500 async |
| **Enterprise** | Custom | Custom |
## Billing Model

View File

@@ -1,10 +1,11 @@
'use client'
import type React from 'react'
import { createContext, useCallback, useContext, useEffect, useMemo, useState } from 'react'
import { createContext, useCallback, useContext, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { useParams } from 'next/navigation'
import { useSocket } from '@/app/workspace/providers/socket-provider'
import {
useWorkspacePermissionsQuery,
type WorkspacePermissions,
@@ -57,14 +58,42 @@ export function WorkspacePermissionsProvider({ children }: WorkspacePermissionsP
const [hasShownOfflineNotification, setHasShownOfflineNotification] = useState(false)
const hasOperationError = useOperationQueueStore((state) => state.hasOperationError)
const addNotification = useNotificationStore((state) => state.addNotification)
const removeNotification = useNotificationStore((state) => state.removeNotification)
const { isReconnecting } = useSocket()
const reconnectingNotificationIdRef = useRef<string | null>(null)
const isOfflineMode = hasOperationError
useEffect(() => {
if (isReconnecting && !reconnectingNotificationIdRef.current && !isOfflineMode) {
const id = addNotification({
level: 'error',
message: 'Reconnecting...',
})
reconnectingNotificationIdRef.current = id
} else if (!isReconnecting && reconnectingNotificationIdRef.current) {
removeNotification(reconnectingNotificationIdRef.current)
reconnectingNotificationIdRef.current = null
}
return () => {
if (reconnectingNotificationIdRef.current) {
removeNotification(reconnectingNotificationIdRef.current)
reconnectingNotificationIdRef.current = null
}
}
}, [isReconnecting, isOfflineMode, addNotification, removeNotification])
useEffect(() => {
if (!isOfflineMode || hasShownOfflineNotification) {
return
}
if (reconnectingNotificationIdRef.current) {
removeNotification(reconnectingNotificationIdRef.current)
reconnectingNotificationIdRef.current = null
}
try {
addNotification({
level: 'error',
@@ -78,7 +107,7 @@ export function WorkspacePermissionsProvider({ children }: WorkspacePermissionsP
} catch (error) {
logger.error('Failed to add offline notification', { error })
}
}, [addNotification, hasShownOfflineNotification, isOfflineMode])
}, [addNotification, removeNotification, hasShownOfflineNotification, isOfflineMode])
const {
data: workspacePermissions,

View File

@@ -0,0 +1,443 @@
/**
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
interface StoredTool {
type: string
title?: string
toolId?: string
params?: Record<string, string>
customToolId?: string
schema?: any
code?: string
operation?: string
usageControl?: 'auto' | 'force' | 'none'
}
const isMcpToolAlreadySelected = (selectedTools: StoredTool[], mcpToolId: string): boolean => {
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
}
const isCustomToolAlreadySelected = (
selectedTools: StoredTool[],
customToolId: string
): boolean => {
return selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
)
}
const isWorkflowAlreadySelected = (selectedTools: StoredTool[], workflowId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
)
}
describe('isMcpToolAlreadySelected', () => {
describe('basic functionality', () => {
it.concurrent('returns false when selectedTools is empty', () => {
expect(isMcpToolAlreadySelected([], 'mcp-tool-123')).toBe(false)
})
it.concurrent('returns false when MCP tool is not in selectedTools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'different-mcp-tool', title: 'Different Tool' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(false)
})
it.concurrent('returns true when MCP tool is already selected', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-123', title: 'My MCP Tool' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(true)
})
it.concurrent('returns true when MCP tool is one of many selected tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'mcp', toolId: 'mcp-tool-123', title: 'My MCP Tool' },
{ type: 'workflow_input', toolId: 'workflow_executor' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(true)
})
})
describe('type discrimination', () => {
it.concurrent('does not match non-MCP tools with same toolId', () => {
const selectedTools: StoredTool[] = [{ type: 'http_request', toolId: 'mcp-tool-123' }]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(false)
})
it.concurrent('does not match custom tools even with toolId set', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', toolId: 'custom-mcp-tool-123', customToolId: 'db-id' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(false)
})
})
describe('multiple MCP tools', () => {
it.concurrent('correctly identifies first of multiple MCP tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
{ type: 'mcp', toolId: 'mcp-tool-3', title: 'Tool 3' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-1')).toBe(true)
})
it.concurrent('correctly identifies middle MCP tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
{ type: 'mcp', toolId: 'mcp-tool-3', title: 'Tool 3' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-2')).toBe(true)
})
it.concurrent('correctly identifies last MCP tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
{ type: 'mcp', toolId: 'mcp-tool-3', title: 'Tool 3' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-3')).toBe(true)
})
it.concurrent('returns false for non-existent MCP tool among many', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-999')).toBe(false)
})
})
})
describe('isCustomToolAlreadySelected', () => {
describe('basic functionality', () => {
it.concurrent('returns false when selectedTools is empty', () => {
expect(isCustomToolAlreadySelected([], 'custom-tool-123')).toBe(false)
})
it.concurrent('returns false when custom tool is not in selectedTools', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'different-custom-tool' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
it.concurrent('returns true when custom tool is already selected', () => {
const selectedTools: StoredTool[] = [{ type: 'custom-tool', customToolId: 'custom-tool-123' }]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(true)
})
it.concurrent('returns true when custom tool is one of many selected tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-1', title: 'MCP Tool' },
{ type: 'custom-tool', customToolId: 'custom-tool-123' },
{ type: 'http_request', toolId: 'http_request_tool' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(true)
})
})
describe('type discrimination', () => {
it.concurrent('does not match non-custom tools with similar IDs', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'custom-tool-123', title: 'MCP with similar ID' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
it.concurrent('does not match MCP tools even if customToolId happens to match', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-id', customToolId: 'custom-tool-123' } as StoredTool,
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
})
describe('legacy inline custom tools', () => {
it.concurrent('does not match legacy inline tools without customToolId', () => {
const selectedTools: StoredTool[] = [
{
type: 'custom-tool',
title: 'Legacy Tool',
toolId: 'custom-myFunction',
schema: { function: { name: 'myFunction' } },
code: 'return true',
},
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
it.concurrent('does not false-positive on legacy tools when checking for database tool', () => {
const selectedTools: StoredTool[] = [
{
type: 'custom-tool',
title: 'Legacy Tool',
schema: { function: { name: 'sameName' } },
code: 'return true',
},
]
expect(isCustomToolAlreadySelected(selectedTools, 'db-tool-1')).toBe(false)
})
})
describe('multiple custom tools', () => {
it.concurrent('correctly identifies first of multiple custom tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
{ type: 'custom-tool', customToolId: 'custom-3' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-1')).toBe(true)
})
it.concurrent('correctly identifies middle custom tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
{ type: 'custom-tool', customToolId: 'custom-3' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-2')).toBe(true)
})
it.concurrent('correctly identifies last custom tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
{ type: 'custom-tool', customToolId: 'custom-3' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-3')).toBe(true)
})
it.concurrent('returns false for non-existent custom tool among many', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-999')).toBe(false)
})
})
describe('mixed tool types', () => {
it.concurrent('correctly identifies custom tool in mixed list', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'MCP Tool' },
{ type: 'custom-tool', customToolId: 'custom-tool-123' },
{ type: 'http_request', toolId: 'http_request' },
{ type: 'workflow_input', toolId: 'workflow_executor' },
{ type: 'custom-tool', title: 'Legacy', schema: {}, code: '' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(true)
})
it.concurrent('does not confuse MCP toolId with custom customToolId', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'shared-id-123', title: 'MCP Tool' },
{ type: 'custom-tool', customToolId: 'different-id' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'shared-id-123')).toBe(false)
})
})
})
describe('isWorkflowAlreadySelected', () => {
describe('basic functionality', () => {
it.concurrent('returns false when selectedTools is empty', () => {
expect(isWorkflowAlreadySelected([], 'workflow-123')).toBe(false)
})
it.concurrent('returns false when workflow is not in selectedTools', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'different-workflow' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
it.concurrent('returns true when workflow is already selected', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-123' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(true)
})
it.concurrent('returns true when workflow is one of many selected tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-1', title: 'MCP Tool' },
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-123' },
},
{ type: 'custom-tool', customToolId: 'custom-1' },
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(true)
})
})
describe('type discrimination', () => {
it.concurrent('does not match non-workflow_input tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'workflow-123', params: { workflowId: 'workflow-123' } },
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
it.concurrent('does not match workflow_input without params', () => {
const selectedTools: StoredTool[] = [{ type: 'workflow_input', toolId: 'workflow_executor' }]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
it.concurrent('does not match workflow_input with different workflowId in params', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'other-workflow' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
})
describe('multiple workflows', () => {
it.concurrent('allows different workflows to be selected', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-a' },
},
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-b' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-a')).toBe(true)
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-b')).toBe(true)
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-c')).toBe(false)
})
it.concurrent('correctly identifies specific workflow among many', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-1' },
},
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-2' },
},
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-3' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-2')).toBe(true)
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-999')).toBe(false)
})
})
})
describe('duplicate prevention integration scenarios', () => {
describe('add then try to re-add', () => {
it.concurrent('prevents re-adding the same MCP tool', () => {
const selectedTools: StoredTool[] = [
{
type: 'mcp',
toolId: 'planetscale-query',
title: 'PlanetScale Query',
params: { serverId: 'server-1' },
},
]
expect(isMcpToolAlreadySelected(selectedTools, 'planetscale-query')).toBe(true)
})
it.concurrent('prevents re-adding the same custom tool', () => {
const selectedTools: StoredTool[] = [
{
type: 'custom-tool',
customToolId: 'my-custom-tool-uuid',
usageControl: 'auto',
},
]
expect(isCustomToolAlreadySelected(selectedTools, 'my-custom-tool-uuid')).toBe(true)
})
it.concurrent('prevents re-adding the same workflow', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'my-workflow-uuid' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'my-workflow-uuid')).toBe(true)
})
})
describe('remove then re-add', () => {
it.concurrent('allows re-adding MCP tool after removal', () => {
const selectedToolsAfterRemoval: StoredTool[] = []
expect(isMcpToolAlreadySelected(selectedToolsAfterRemoval, 'planetscale-query')).toBe(false)
})
it.concurrent('allows re-adding custom tool after removal', () => {
const selectedToolsAfterRemoval: StoredTool[] = [
{ type: 'mcp', toolId: 'some-other-tool', title: 'Other' },
]
expect(isCustomToolAlreadySelected(selectedToolsAfterRemoval, 'my-custom-tool-uuid')).toBe(
false
)
})
it.concurrent('allows re-adding workflow after removal', () => {
const selectedToolsAfterRemoval: StoredTool[] = [
{ type: 'mcp', toolId: 'some-tool', title: 'Other' },
]
expect(isWorkflowAlreadySelected(selectedToolsAfterRemoval, 'my-workflow-uuid')).toBe(false)
})
})
describe('different tools with similar names', () => {
it.concurrent('allows adding different MCP tools from same server', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'server1-tool-a', title: 'Tool A', params: { serverId: 'server1' } },
]
expect(isMcpToolAlreadySelected(selectedTools, 'server1-tool-b')).toBe(false)
})
it.concurrent('allows adding different custom tools', () => {
const selectedTools: StoredTool[] = [{ type: 'custom-tool', customToolId: 'custom-a' }]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-b')).toBe(false)
})
it.concurrent('allows adding different workflows', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-a' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-b')).toBe(false)
})
})
})

View File

@@ -1226,6 +1226,40 @@ export const ToolInput = memo(function ToolInput({
return selectedTools.some((tool) => tool.toolId === toolId)
}
/**
* Checks if an MCP tool is already selected.
*
* @param mcpToolId - The MCP tool identifier to check
* @returns `true` if the MCP tool is already selected
*/
const isMcpToolAlreadySelected = (mcpToolId: string): boolean => {
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
}
/**
* Checks if a custom tool is already selected.
*
* @param customToolId - The custom tool identifier to check
* @returns `true` if the custom tool is already selected
*/
const isCustomToolAlreadySelected = (customToolId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
)
}
/**
* Checks if a workflow is already selected.
*
* @param workflowId - The workflow identifier to check
* @returns `true` if the workflow is already selected
*/
const isWorkflowAlreadySelected = (workflowId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
)
}
/**
* Checks if a block supports multiple operations.
*
@@ -1745,24 +1779,29 @@ export const ToolInput = memo(function ToolInput({
if (!permissionConfig.disableCustomTools && customTools.length > 0) {
groups.push({
section: 'Custom Tools',
items: customTools.map((customTool) => ({
label: customTool.title,
value: `custom-${customTool.id}`,
iconElement: createToolIcon('#3B82F6', WrenchIcon),
onSelect: () => {
const newTool: StoredTool = {
type: 'custom-tool',
customToolId: customTool.id,
usageControl: 'auto',
isExpanded: true,
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
})),
items: customTools.map((customTool) => {
const alreadySelected = isCustomToolAlreadySelected(customTool.id)
return {
label: customTool.title,
value: `custom-${customTool.id}`,
iconElement: createToolIcon('#3B82F6', WrenchIcon),
disabled: isPreview || alreadySelected,
onSelect: () => {
if (alreadySelected) return
const newTool: StoredTool = {
type: 'custom-tool',
customToolId: customTool.id,
usageControl: 'auto',
isExpanded: true,
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
}
}),
})
}
@@ -1772,11 +1811,13 @@ export const ToolInput = memo(function ToolInput({
section: 'MCP Tools',
items: availableMcpTools.map((mcpTool) => {
const server = mcpServers.find((s) => s.id === mcpTool.serverId)
const alreadySelected = isMcpToolAlreadySelected(mcpTool.id)
return {
label: mcpTool.name,
value: `mcp-${mcpTool.id}`,
iconElement: createToolIcon(mcpTool.bgColor || '#6366F1', mcpTool.icon || McpIcon),
onSelect: () => {
if (alreadySelected) return
const newTool: StoredTool = {
type: 'mcp',
title: mcpTool.name,
@@ -1796,7 +1837,7 @@ export const ToolInput = memo(function ToolInput({
}
handleMcpToolSelect(newTool, true)
},
disabled: isPreview || disabled,
disabled: isPreview || disabled || alreadySelected,
}
}),
})
@@ -1810,12 +1851,17 @@ export const ToolInput = memo(function ToolInput({
if (builtInTools.length > 0) {
groups.push({
section: 'Built-in Tools',
items: builtInTools.map((block) => ({
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
onSelect: () => handleSelectTool(block),
})),
items: builtInTools.map((block) => {
const toolId = getToolIdForOperation(block.type, undefined)
const alreadySelected = toolId ? isToolAlreadySelected(toolId, block.type) : false
return {
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
disabled: isPreview || alreadySelected,
onSelect: () => handleSelectTool(block),
}
}),
})
}
@@ -1823,12 +1869,17 @@ export const ToolInput = memo(function ToolInput({
if (integrations.length > 0) {
groups.push({
section: 'Integrations',
items: integrations.map((block) => ({
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
onSelect: () => handleSelectTool(block),
})),
items: integrations.map((block) => {
const toolId = getToolIdForOperation(block.type, undefined)
const alreadySelected = toolId ? isToolAlreadySelected(toolId, block.type) : false
return {
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
disabled: isPreview || alreadySelected,
onSelect: () => handleSelectTool(block),
}
}),
})
}
@@ -1836,29 +1887,33 @@ export const ToolInput = memo(function ToolInput({
if (availableWorkflows.length > 0) {
groups.push({
section: 'Workflows',
items: availableWorkflows.map((workflow) => ({
label: workflow.name,
value: `workflow-${workflow.id}`,
iconElement: createToolIcon('#6366F1', WorkflowIcon),
onSelect: () => {
const newTool: StoredTool = {
type: 'workflow_input',
title: 'Workflow',
toolId: 'workflow_executor',
params: {
workflowId: workflow.id,
},
isExpanded: true,
usageControl: 'auto',
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
disabled: isPreview || disabled,
})),
items: availableWorkflows.map((workflow) => {
const alreadySelected = isWorkflowAlreadySelected(workflow.id)
return {
label: workflow.name,
value: `workflow-${workflow.id}`,
iconElement: createToolIcon('#6366F1', WorkflowIcon),
onSelect: () => {
if (alreadySelected) return
const newTool: StoredTool = {
type: 'workflow_input',
title: 'Workflow',
toolId: 'workflow_executor',
params: {
workflowId: workflow.id,
},
isExpanded: true,
usageControl: 'auto',
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
disabled: isPreview || disabled || alreadySelected,
}
}),
})
}
@@ -1877,6 +1932,11 @@ export const ToolInput = memo(function ToolInput({
permissionConfig.disableCustomTools,
permissionConfig.disableMcpTools,
availableWorkflows,
getToolIdForOperation,
isToolAlreadySelected,
isMcpToolAlreadySelected,
isCustomToolAlreadySelected,
isWorkflowAlreadySelected,
])
const toolRequiresOAuth = (toolId: string): boolean => {

View File

@@ -13,8 +13,8 @@ import { SlackMonoIcon } from '@/components/icons'
import type { PlanFeature } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components/plan-card'
export const PRO_PLAN_FEATURES: PlanFeature[] = [
{ icon: Zap, text: '25 runs per minute (sync)' },
{ icon: Clock, text: '200 runs per minute (async)' },
{ icon: Zap, text: '150 runs per minute (sync)' },
{ icon: Clock, text: '1,000 runs per minute (async)' },
{ icon: HardDrive, text: '50GB file storage' },
{ icon: Building2, text: 'Unlimited workspaces' },
{ icon: Users, text: 'Unlimited invites' },
@@ -22,8 +22,8 @@ export const PRO_PLAN_FEATURES: PlanFeature[] = [
]
export const TEAM_PLAN_FEATURES: PlanFeature[] = [
{ icon: Zap, text: '75 runs per minute (sync)' },
{ icon: Clock, text: '500 runs per minute (async)' },
{ icon: Zap, text: '300 runs per minute (sync)' },
{ icon: Clock, text: '2,500 runs per minute (async)' },
{ icon: HardDrive, text: '500GB file storage (pooled)' },
{ icon: Building2, text: 'Unlimited workspaces' },
{ icon: Users, text: 'Unlimited invites' },

View File

@@ -49,6 +49,7 @@ interface SocketContextType {
socket: Socket | null
isConnected: boolean
isConnecting: boolean
isReconnecting: boolean
authFailed: boolean
currentWorkflowId: string | null
currentSocketId: string | null
@@ -66,9 +67,16 @@ interface SocketContextType {
blockId: string,
subblockId: string,
value: any,
operationId?: string
operationId: string | undefined,
workflowId: string
) => void
emitVariableUpdate: (
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void
emitVariableUpdate: (variableId: string, field: string, value: any, operationId?: string) => void
emitCursorUpdate: (cursor: { x: number; y: number } | null) => void
emitSelectionUpdate: (selection: { type: 'block' | 'edge' | 'none'; id?: string }) => void
@@ -88,6 +96,7 @@ const SocketContext = createContext<SocketContextType>({
socket: null,
isConnected: false,
isConnecting: false,
isReconnecting: false,
authFailed: false,
currentWorkflowId: null,
currentSocketId: null,
@@ -122,6 +131,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
const [socket, setSocket] = useState<Socket | null>(null)
const [isConnected, setIsConnected] = useState(false)
const [isConnecting, setIsConnecting] = useState(false)
const [isReconnecting, setIsReconnecting] = useState(false)
const [currentWorkflowId, setCurrentWorkflowId] = useState<string | null>(null)
const [currentSocketId, setCurrentSocketId] = useState<string | null>(null)
const [presenceUsers, setPresenceUsers] = useState<PresenceUser[]>([])
@@ -236,20 +246,19 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
setCurrentWorkflowId(null)
setPresenceUsers([])
logger.info('Socket disconnected', {
reason,
})
// socket.active indicates if auto-reconnect will happen
if (socketInstance.active) {
setIsReconnecting(true)
logger.info('Socket disconnected, will auto-reconnect', { reason })
} else {
setIsReconnecting(false)
logger.info('Socket disconnected, no auto-reconnect', { reason })
}
})
socketInstance.on('connect_error', (error: any) => {
socketInstance.on('connect_error', (error: Error) => {
setIsConnecting(false)
logger.error('Socket connection error:', {
message: error.message,
stack: error.stack,
description: error.description,
type: error.type,
transport: error.transport,
})
logger.error('Socket connection error:', { message: error.message })
// Check if this is an authentication failure
const isAuthError =
@@ -261,43 +270,41 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
logger.warn(
'Authentication failed - stopping reconnection attempts. User may need to refresh/re-login.'
)
// Stop reconnection attempts to prevent infinite loop
socketInstance.disconnect()
// Reset state to allow re-initialization when session is restored
setSocket(null)
setAuthFailed(true)
setIsReconnecting(false)
initializedRef.current = false
} else if (socketInstance.active) {
// Temporary failure, will auto-reconnect
setIsReconnecting(true)
}
})
socketInstance.on('reconnect', (attemptNumber) => {
// Reconnection events are on the Manager (socket.io), not the socket itself
socketInstance.io.on('reconnect', (attemptNumber) => {
setIsConnected(true)
setIsReconnecting(false)
setCurrentSocketId(socketInstance.id ?? null)
logger.info('Socket reconnected successfully', {
attemptNumber,
socketId: socketInstance.id,
transport: socketInstance.io.engine?.transport?.name,
})
// Note: join-workflow is handled by the useEffect watching isConnected
})
socketInstance.on('reconnect_attempt', (attemptNumber) => {
logger.info('Socket reconnection attempt (fresh token will be generated)', {
attemptNumber,
timestamp: new Date().toISOString(),
})
socketInstance.io.on('reconnect_attempt', (attemptNumber) => {
setIsReconnecting(true)
logger.info('Socket reconnection attempt', { attemptNumber })
})
socketInstance.on('reconnect_error', (error: any) => {
logger.error('Socket reconnection error:', {
message: error.message,
attemptNumber: error.attemptNumber,
type: error.type,
})
socketInstance.io.on('reconnect_error', (error: Error) => {
logger.error('Socket reconnection error:', { message: error.message })
})
socketInstance.on('reconnect_failed', () => {
socketInstance.io.on('reconnect_failed', () => {
logger.error('Socket reconnection failed - all attempts exhausted')
setIsReconnecting(false)
setIsConnecting(false)
})
@@ -629,6 +636,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
if (commit) {
socket.emit('workflow-operation', {
workflowId: currentWorkflowId,
operation,
target,
payload,
@@ -645,6 +653,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
}
pendingPositionUpdates.current.set(blockId, {
workflowId: currentWorkflowId,
operation,
target,
payload,
@@ -666,6 +675,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
}
} else {
socket.emit('workflow-operation', {
workflowId: currentWorkflowId,
operation,
target,
payload,
@@ -678,47 +688,51 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
)
const emitSubblockUpdate = useCallback(
(blockId: string, subblockId: string, value: any, operationId?: string) => {
if (socket && currentWorkflowId) {
socket.emit('subblock-update', {
blockId,
subblockId,
value,
timestamp: Date.now(),
operationId,
})
} else {
logger.warn('Cannot emit subblock update: no socket connection or workflow room', {
hasSocket: !!socket,
currentWorkflowId,
blockId,
subblockId,
})
(
blockId: string,
subblockId: string,
value: any,
operationId: string | undefined,
workflowId: string
) => {
if (!socket) {
logger.warn('Cannot emit subblock update: no socket connection', { workflowId, blockId })
return
}
socket.emit('subblock-update', {
workflowId,
blockId,
subblockId,
value,
timestamp: Date.now(),
operationId,
})
},
[socket, currentWorkflowId]
[socket]
)
const emitVariableUpdate = useCallback(
(variableId: string, field: string, value: any, operationId?: string) => {
if (socket && currentWorkflowId) {
socket.emit('variable-update', {
variableId,
field,
value,
timestamp: Date.now(),
operationId,
})
} else {
logger.warn('Cannot emit variable update: no socket connection or workflow room', {
hasSocket: !!socket,
currentWorkflowId,
variableId,
field,
})
(
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => {
if (!socket) {
logger.warn('Cannot emit variable update: no socket connection', { workflowId, variableId })
return
}
socket.emit('variable-update', {
workflowId,
variableId,
field,
value,
timestamp: Date.now(),
operationId,
})
},
[socket, currentWorkflowId]
[socket]
)
const lastCursorEmit = useRef(0)
@@ -794,6 +808,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socket,
isConnected,
isConnecting,
isReconnecting,
authFailed,
currentWorkflowId,
currentSocketId,
@@ -820,6 +835,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socket,
isConnected,
isConnecting,
isReconnecting,
authFailed,
currentWorkflowId,
currentSocketId,

View File

@@ -13,8 +13,8 @@ interface FreeTierUpgradeEmailProps {
const proFeatures = [
{ label: '$20/month', desc: 'in credits included' },
{ label: '25 runs/min', desc: 'sync executions' },
{ label: '200 runs/min', desc: 'async executions' },
{ label: '150 runs/min', desc: 'sync executions' },
{ label: '1,000 runs/min', desc: 'async executions' },
{ label: '50GB storage', desc: 'for files & assets' },
{ label: 'Unlimited', desc: 'workspaces & invites' },
]

View File

@@ -146,10 +146,6 @@ export function useCollaborativeWorkflow() {
cancelOperationsForVariable,
} = useOperationQueue()
const isInActiveRoom = useCallback(() => {
return !!currentWorkflowId && activeWorkflowId === currentWorkflowId
}, [currentWorkflowId, activeWorkflowId])
// Register emit functions with operation queue store
useEffect(() => {
registerEmitFunctions(
@@ -162,10 +158,19 @@ export function useCollaborativeWorkflow() {
useEffect(() => {
const handleWorkflowOperation = (data: any) => {
const { operation, target, payload, userId } = data
const { operation, target, payload, userId, metadata } = data
if (isApplyingRemoteChange.current) return
// Filter broadcasts by workflowId to prevent cross-workflow updates
if (metadata?.workflowId && metadata.workflowId !== activeWorkflowId) {
logger.debug('Ignoring workflow operation for different workflow', {
broadcastWorkflowId: metadata.workflowId,
activeWorkflowId,
})
return
}
logger.info(`Received ${operation} on ${target} from user ${userId}`)
// Apply the operation to local state
@@ -450,16 +455,24 @@ export function useCollaborativeWorkflow() {
}
const handleSubblockUpdate = (data: any) => {
const { blockId, subblockId, value, userId } = data
const { workflowId, blockId, subblockId, value, userId } = data
if (isApplyingRemoteChange.current) return
// Filter broadcasts by workflowId to prevent cross-workflow updates
if (workflowId && workflowId !== activeWorkflowId) {
logger.debug('Ignoring subblock update for different workflow', {
broadcastWorkflowId: workflowId,
activeWorkflowId,
})
return
}
logger.info(`Received subblock update from user ${userId}: ${blockId}.${subblockId}`)
isApplyingRemoteChange.current = true
try {
// The setValue function automatically uses the active workflow ID
useSubBlockStore.getState().setValue(blockId, subblockId, value)
const blockType = useWorkflowStore.getState().blocks?.[blockId]?.type
if (activeWorkflowId && blockType === 'function' && subblockId === 'code') {
@@ -473,10 +486,19 @@ export function useCollaborativeWorkflow() {
}
const handleVariableUpdate = (data: any) => {
const { variableId, field, value, userId } = data
const { workflowId, variableId, field, value, userId } = data
if (isApplyingRemoteChange.current) return
// Filter broadcasts by workflowId to prevent cross-workflow updates
if (workflowId && workflowId !== activeWorkflowId) {
logger.debug('Ignoring variable update for different workflow', {
broadcastWorkflowId: workflowId,
activeWorkflowId,
})
return
}
logger.info(`Received variable update from user ${userId}: ${variableId}.${field}`)
isApplyingRemoteChange.current = true
@@ -637,13 +659,9 @@ export function useCollaborativeWorkflow() {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping operation - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
operation,
target,
})
// Queue operations if we have an active workflow - queue handles socket readiness
if (!activeWorkflowId) {
logger.debug('Skipping operation - no active workflow', { operation, target })
return
}
@@ -656,20 +674,13 @@ export function useCollaborativeWorkflow() {
target,
payload,
},
workflowId: activeWorkflowId || '',
workflowId: activeWorkflowId,
userId: session?.user?.id || 'unknown',
})
localAction()
},
[
addToQueue,
session?.user?.id,
isBaselineDiffView,
activeWorkflowId,
isInActiveRoom,
currentWorkflowId,
]
[addToQueue, session?.user?.id, isBaselineDiffView, activeWorkflowId]
)
const collaborativeBatchUpdatePositions = useCallback(
@@ -683,8 +694,8 @@ export function useCollaborativeWorkflow() {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch position update - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch position update - no active workflow')
return
}
@@ -728,7 +739,7 @@ export function useCollaborativeWorkflow() {
}
}
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeUpdateBlockName = useCallback(
@@ -885,8 +896,8 @@ export function useCollaborativeWorkflow() {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch update parent - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch update parent - no active workflow')
return
}
@@ -955,7 +966,7 @@ export function useCollaborativeWorkflow() {
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
},
[isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
[isBaselineDiffView, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
)
const collaborativeToggleBlockAdvancedMode = useCallback(
@@ -1099,8 +1110,8 @@ export function useCollaborativeWorkflow() {
return false
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch add edges - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch add edges - no active workflow')
return false
}
@@ -1134,7 +1145,7 @@ export function useCollaborativeWorkflow() {
return true
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeBatchRemoveEdges = useCallback(
@@ -1143,8 +1154,8 @@ export function useCollaborativeWorkflow() {
return false
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove edges - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch remove edges - no active workflow')
return false
}
@@ -1192,7 +1203,7 @@ export function useCollaborativeWorkflow() {
logger.info('Batch removed edges', { count: validEdgeIds.length })
return true
},
[isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
[isBaselineDiffView, addToQueue, activeWorkflowId, session, undoRedo]
)
const collaborativeSetSubblockValue = useCallback(
@@ -1227,11 +1238,9 @@ export function useCollaborativeWorkflow() {
// Best-effort; do not block on clearing
}
// Only emit to socket if in active room
if (!isInActiveRoom()) {
logger.debug('Local update applied, skipping socket emit - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
// Queue socket operation if we have an active workflow
if (!activeWorkflowId) {
logger.debug('Local update applied, skipping socket queue - no active workflow', {
blockId,
subblockId,
})
@@ -1253,14 +1262,7 @@ export function useCollaborativeWorkflow() {
userId: session?.user?.id || 'unknown',
})
},
[
currentWorkflowId,
activeWorkflowId,
addToQueue,
session?.user?.id,
isBaselineDiffView,
isInActiveRoom,
]
[activeWorkflowId, addToQueue, session?.user?.id, isBaselineDiffView]
)
// Immediate tag selection (uses queue but processes immediately, no debouncing)
@@ -1272,13 +1274,8 @@ export function useCollaborativeWorkflow() {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping tag selection - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
blockId,
subblockId,
})
if (!activeWorkflowId) {
logger.debug('Skipping tag selection - no active workflow', { blockId, subblockId })
return
}
@@ -1299,14 +1296,7 @@ export function useCollaborativeWorkflow() {
userId: session?.user?.id || 'unknown',
})
},
[
isBaselineDiffView,
addToQueue,
currentWorkflowId,
activeWorkflowId,
session?.user?.id,
isInActiveRoom,
]
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id]
)
const collaborativeUpdateLoopType = useCallback(
@@ -1593,8 +1583,8 @@ export function useCollaborativeWorkflow() {
subBlockValues: Record<string, Record<string, unknown>> = {},
options?: { skipUndoRedo?: boolean }
) => {
if (!isInActiveRoom()) {
logger.debug('Skipping batch add blocks - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch add blocks - no active workflow')
return false
}
@@ -1647,7 +1637,7 @@ export function useCollaborativeWorkflow() {
return true
},
[addToQueue, activeWorkflowId, session?.user?.id, isBaselineDiffView, isInActiveRoom, undoRedo]
[addToQueue, activeWorkflowId, session?.user?.id, isBaselineDiffView, undoRedo]
)
const collaborativeBatchRemoveBlocks = useCallback(
@@ -1656,8 +1646,8 @@ export function useCollaborativeWorkflow() {
return false
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove blocks - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch remove blocks - no active workflow')
return false
}
@@ -1741,7 +1731,6 @@ export function useCollaborativeWorkflow() {
addToQueue,
activeWorkflowId,
session?.user?.id,
isInActiveRoom,
cancelOperationsForBlock,
undoRedo,
]

View File

@@ -30,7 +30,6 @@ import {
useUndoRedoStore,
} from '@/stores/undo-redo'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState } from '@/stores/workflows/workflow/types'
@@ -535,47 +534,9 @@ export function useUndoRedo() {
userId,
})
blocksToAdd.forEach((block) => {
useWorkflowStore
.getState()
.addBlock(
block.id,
block.type,
block.name,
block.position,
block.data,
block.data?.parentId,
block.data?.extent,
{
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode,
height: block.height,
}
)
})
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
...subBlockValues,
},
},
}))
}
if (edgeSnapshots && edgeSnapshots.length > 0) {
const edgesToAdd = edgeSnapshots.filter(
(edge) => !useWorkflowStore.getState().edges.find((e) => e.id === edge.id)
)
if (edgesToAdd.length > 0) {
useWorkflowStore.getState().batchAddEdges(edgesToAdd)
}
}
useWorkflowStore
.getState()
.batchAddBlocks(blocksToAdd, edgeSnapshots || [], subBlockValues || {})
break
}
case UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES: {
@@ -1148,47 +1109,9 @@ export function useUndoRedo() {
userId,
})
blocksToAdd.forEach((block) => {
useWorkflowStore
.getState()
.addBlock(
block.id,
block.type,
block.name,
block.position,
block.data,
block.data?.parentId,
block.data?.extent,
{
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode,
height: block.height,
}
)
})
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
...subBlockValues,
},
},
}))
}
if (edgeSnapshots && edgeSnapshots.length > 0) {
const edgesToAdd = edgeSnapshots.filter(
(edge) => !useWorkflowStore.getState().edges.find((e) => e.id === edge.id)
)
if (edgesToAdd.length > 0) {
useWorkflowStore.getState().batchAddEdges(edgesToAdd)
}
}
useWorkflowStore
.getState()
.batchAddBlocks(blocksToAdd, edgeSnapshots || [], subBlockValues || {})
break
}
case UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS: {

View File

@@ -161,14 +161,14 @@ export const env = createEnv({
// Rate Limiting Configuration
RATE_LIMIT_WINDOW_MS: z.string().optional().default('60000'), // Rate limit window duration in milliseconds (default: 1 minute)
MANUAL_EXECUTION_LIMIT: z.string().optional().default('999999'),// Manual execution bypass value (effectively unlimited)
RATE_LIMIT_FREE_SYNC: z.string().optional().default('10'), // Free tier sync API executions per minute
RATE_LIMIT_FREE_ASYNC: z.string().optional().default('50'), // Free tier async API executions per minute
RATE_LIMIT_PRO_SYNC: z.string().optional().default('25'), // Pro tier sync API executions per minute
RATE_LIMIT_PRO_ASYNC: z.string().optional().default('200'), // Pro tier async API executions per minute
RATE_LIMIT_TEAM_SYNC: z.string().optional().default('75'), // Team tier sync API executions per minute
RATE_LIMIT_TEAM_ASYNC: z.string().optional().default('500'), // Team tier async API executions per minute
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('150'), // Enterprise tier sync API executions per minute
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('1000'), // Enterprise tier async API executions per minute
RATE_LIMIT_FREE_SYNC: z.string().optional().default('50'), // Free tier sync API executions per minute
RATE_LIMIT_FREE_ASYNC: z.string().optional().default('200'), // Free tier async API executions per minute
RATE_LIMIT_PRO_SYNC: z.string().optional().default('150'), // Pro tier sync API executions per minute
RATE_LIMIT_PRO_ASYNC: z.string().optional().default('1000'), // Pro tier async API executions per minute
RATE_LIMIT_TEAM_SYNC: z.string().optional().default('300'), // Team tier sync API executions per minute
RATE_LIMIT_TEAM_ASYNC: z.string().optional().default('2500'), // Team tier async API executions per minute
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('600'), // Enterprise tier sync API executions per minute
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('5000'), // Enterprise tier async API executions per minute
// Knowledge Base Processing Configuration - Shared across all processing methods
KB_CONFIG_MAX_DURATION: z.number().optional().default(600), // Max processing duration in seconds (10 minutes)

View File

@@ -28,24 +28,24 @@ function createBucketConfig(ratePerMinute: number, burstMultiplier = 2): TokenBu
export const RATE_LIMITS: Record<SubscriptionPlan, RateLimitConfig> = {
free: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_SYNC) || 10),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_ASYNC) || 50),
apiEndpoint: createBucketConfig(10),
},
pro: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_SYNC) || 25),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_ASYNC) || 200),
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_SYNC) || 50),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_ASYNC) || 200),
apiEndpoint: createBucketConfig(30),
},
pro: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_SYNC) || 150),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_ASYNC) || 1000),
apiEndpoint: createBucketConfig(100),
},
team: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_SYNC) || 75),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_ASYNC) || 500),
apiEndpoint: createBucketConfig(60),
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_SYNC) || 300),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_ASYNC) || 2500),
apiEndpoint: createBucketConfig(200),
},
enterprise: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_SYNC) || 150),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_ASYNC) || 1000),
apiEndpoint: createBucketConfig(120),
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_SYNC) || 600),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_ASYNC) || 5000),
apiEndpoint: createBucketConfig(500),
},
}

View File

@@ -199,10 +199,11 @@ export class McpClient {
protocolVersion: this.getNegotiatedVersion(),
})
const sdkResult = await this.client.callTool({
name: toolCall.name,
arguments: toolCall.arguments,
})
const sdkResult = await this.client.callTool(
{ name: toolCall.name, arguments: toolCall.arguments },
undefined,
{ timeout: 600000 } // 10 minutes - override SDK's 60s default
)
return sdkResult as McpToolResult
} catch (error) {

View File

@@ -39,16 +39,23 @@ export function cleanupPendingSubblocksForSocket(socketId: string): void {
export function setupSubblocksHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket.on('subblock-update', async (data) => {
const { blockId, subblockId, value, timestamp, operationId } = data
const {
workflowId: payloadWorkflowId,
blockId,
subblockId,
value,
timestamp,
operationId,
} = data
try {
const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const sessionWorkflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const session = await roomManager.getUserSession(socket.id)
if (!workflowId || !session) {
if (!sessionWorkflowId || !session) {
logger.debug(`Ignoring subblock update: socket not connected to any workflow room`, {
socketId: socket.id,
hasWorkflowId: !!workflowId,
hasWorkflowId: !!sessionWorkflowId,
hasSession: !!session,
})
socket.emit('operation-forbidden', {
@@ -61,6 +68,24 @@ export function setupSubblocksHandlers(socket: AuthenticatedSocket, roomManager:
return
}
const workflowId = payloadWorkflowId || sessionWorkflowId
if (payloadWorkflowId && payloadWorkflowId !== sessionWorkflowId) {
logger.warn('Workflow ID mismatch in subblock update', {
payloadWorkflowId,
sessionWorkflowId,
socketId: socket.id,
})
if (operationId) {
socket.emit('operation-failed', {
operationId,
error: 'Workflow ID mismatch',
retryable: true,
})
}
return
}
const hasRoom = await roomManager.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`Ignoring subblock update: workflow room not found`, {
@@ -182,20 +207,17 @@ async function flushSubblockUpdate(
if (updateSuccessful) {
// Broadcast to room excluding all senders (works cross-pod via Redis adapter)
const senderSocketIds = [...pending.opToSocket.values()]
const broadcastPayload = {
workflowId,
blockId,
subblockId,
value,
timestamp,
}
if (senderSocketIds.length > 0) {
io.to(workflowId).except(senderSocketIds).emit('subblock-update', {
blockId,
subblockId,
value,
timestamp,
})
io.to(workflowId).except(senderSocketIds).emit('subblock-update', broadcastPayload)
} else {
io.to(workflowId).emit('subblock-update', {
blockId,
subblockId,
value,
timestamp,
})
io.to(workflowId).emit('subblock-update', broadcastPayload)
}
// Confirm all coalesced operationIds (io.to(socketId) works cross-pod)

View File

@@ -35,16 +35,16 @@ export function cleanupPendingVariablesForSocket(socketId: string): void {
export function setupVariablesHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket.on('variable-update', async (data) => {
const { variableId, field, value, timestamp, operationId } = data
const { workflowId: payloadWorkflowId, variableId, field, value, timestamp, operationId } = data
try {
const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const sessionWorkflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const session = await roomManager.getUserSession(socket.id)
if (!workflowId || !session) {
if (!sessionWorkflowId || !session) {
logger.debug(`Ignoring variable update: socket not connected to any workflow room`, {
socketId: socket.id,
hasWorkflowId: !!workflowId,
hasWorkflowId: !!sessionWorkflowId,
hasSession: !!session,
})
socket.emit('operation-forbidden', {
@@ -57,6 +57,24 @@ export function setupVariablesHandlers(socket: AuthenticatedSocket, roomManager:
return
}
const workflowId = payloadWorkflowId || sessionWorkflowId
if (payloadWorkflowId && payloadWorkflowId !== sessionWorkflowId) {
logger.warn('Workflow ID mismatch in variable update', {
payloadWorkflowId,
sessionWorkflowId,
socketId: socket.id,
})
if (operationId) {
socket.emit('operation-failed', {
operationId,
error: 'Workflow ID mismatch',
retryable: true,
})
}
return
}
const hasRoom = await roomManager.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`Ignoring variable update: workflow room not found`, {
@@ -179,20 +197,17 @@ async function flushVariableUpdate(
if (updateSuccessful) {
// Broadcast to room excluding all senders (works cross-pod via Redis adapter)
const senderSocketIds = [...pending.opToSocket.values()]
const broadcastPayload = {
workflowId,
variableId,
field,
value,
timestamp,
}
if (senderSocketIds.length > 0) {
io.to(workflowId).except(senderSocketIds).emit('variable-update', {
variableId,
field,
value,
timestamp,
})
io.to(workflowId).except(senderSocketIds).emit('variable-update', broadcastPayload)
} else {
io.to(workflowId).emit('variable-update', {
variableId,
field,
value,
timestamp,
})
io.to(workflowId).emit('variable-update', broadcastPayload)
}
// Confirm all coalesced operationIds (io.to(socketId) works cross-pod)

View File

@@ -24,16 +24,40 @@ let emitWorkflowOperation:
| ((operation: string, target: string, payload: any, operationId?: string) => void)
| null = null
let emitSubblockUpdate:
| ((blockId: string, subblockId: string, value: any, operationId?: string) => void)
| ((
blockId: string,
subblockId: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void)
| null = null
let emitVariableUpdate:
| ((variableId: string, field: string, value: any, operationId?: string) => void)
| ((
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void)
| null = null
export function registerEmitFunctions(
workflowEmit: (operation: string, target: string, payload: any, operationId?: string) => void,
subblockEmit: (blockId: string, subblockId: string, value: any, operationId?: string) => void,
variableEmit: (variableId: string, field: string, value: any, operationId?: string) => void,
subblockEmit: (
blockId: string,
subblockId: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void,
variableEmit: (
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void,
workflowId: string | null
) {
emitWorkflowOperation = workflowEmit
@@ -196,14 +220,16 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
}
if (!retryable) {
logger.debug('Operation marked as non-retryable, removing from queue', { operationId })
logger.error(
'Operation failed with non-retryable error - state out of sync, triggering offline mode',
{
operationId,
operation: operation.operation.operation,
target: operation.operation.target,
}
)
set((state) => ({
operations: state.operations.filter((op) => op.id !== operationId),
isProcessing: false,
}))
get().processNextOperation()
get().triggerOfflineMode()
return
}
@@ -305,11 +331,23 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
const { operation: op, target, payload } = nextOperation.operation
if (op === 'subblock-update' && target === 'subblock') {
if (emitSubblockUpdate) {
emitSubblockUpdate(payload.blockId, payload.subblockId, payload.value, nextOperation.id)
emitSubblockUpdate(
payload.blockId,
payload.subblockId,
payload.value,
nextOperation.id,
nextOperation.workflowId
)
}
} else if (op === 'variable-update' && target === 'variable') {
if (emitVariableUpdate) {
emitVariableUpdate(payload.variableId, payload.field, payload.value, nextOperation.id)
emitVariableUpdate(
payload.variableId,
payload.field,
payload.value,
nextOperation.id,
nextOperation.workflowId
)
}
} else {
if (emitWorkflowOperation) {

View File

@@ -26,6 +26,49 @@ import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
/**
* Helper function to add a single block using batchAddBlocks.
* Provides a simpler interface for tests.
*/
function addBlock(
id: string,
type: string,
name: string,
position: { x: number; y: number },
data?: Record<string, unknown>,
parentId?: string,
extent?: 'parent',
blockProperties?: {
enabled?: boolean
horizontalHandles?: boolean
advancedMode?: boolean
triggerMode?: boolean
height?: number
}
) {
const blockData = {
...data,
...(parentId && { parentId, extent: extent || 'parent' }),
}
useWorkflowStore.getState().batchAddBlocks([
{
id,
type,
name,
position,
subBlocks: {},
outputs: {},
enabled: blockProperties?.enabled ?? true,
horizontalHandles: blockProperties?.horizontalHandles ?? true,
advancedMode: blockProperties?.advancedMode ?? false,
triggerMode: blockProperties?.triggerMode ?? false,
height: blockProperties?.height ?? 0,
data: blockData,
},
])
}
describe('workflow store', () => {
beforeEach(() => {
const localStorageMock = createMockStorage()
@@ -39,10 +82,8 @@ describe('workflow store', () => {
})
})
describe('addBlock', () => {
describe('batchAddBlocks (via addBlock helper)', () => {
it('should add a block with correct default properties', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('agent-1', 'agent', 'My Agent', { x: 100, y: 200 })
const { blocks } = useWorkflowStore.getState()
@@ -53,8 +94,6 @@ describe('workflow store', () => {
})
it('should add a block with parent relationship for containers', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('loop-1', 'loop', 'My Loop', { x: 0, y: 0 }, { loopType: 'for', count: 3 })
addBlock(
'child-1',
@@ -73,8 +112,6 @@ describe('workflow store', () => {
})
it('should add multiple blocks correctly', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'agent', 'Agent', { x: 200, y: 0 })
addBlock('block-3', 'function', 'Function', { x: 400, y: 0 })
@@ -87,8 +124,6 @@ describe('workflow store', () => {
})
it('should create a block with default properties when no blockProperties provided', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('agent1', 'agent', 'Test Agent', { x: 100, y: 200 })
const state = useWorkflowStore.getState()
@@ -105,8 +140,6 @@ describe('workflow store', () => {
})
it('should create a block with custom blockProperties for regular blocks', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'agent1',
'agent',
@@ -134,8 +167,6 @@ describe('workflow store', () => {
})
it('should create a loop block with custom blockProperties', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'loop1',
'loop',
@@ -163,8 +194,6 @@ describe('workflow store', () => {
})
it('should create a parallel block with custom blockProperties', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'parallel1',
'parallel',
@@ -192,8 +221,6 @@ describe('workflow store', () => {
})
it('should handle partial blockProperties (only some properties provided)', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'agent1',
'agent',
@@ -216,8 +243,6 @@ describe('workflow store', () => {
})
it('should handle blockProperties with parent relationships', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('loop1', 'loop', 'Parent Loop', { x: 0, y: 0 })
addBlock(
@@ -249,7 +274,7 @@ describe('workflow store', () => {
describe('batchRemoveBlocks', () => {
it('should remove a block', () => {
const { addBlock, batchRemoveBlocks } = useWorkflowStore.getState()
const { batchRemoveBlocks } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
batchRemoveBlocks(['block-1'])
@@ -259,7 +284,7 @@ describe('workflow store', () => {
})
it('should remove connected edges when block is removed', () => {
const { addBlock, batchAddEdges, batchRemoveBlocks } = useWorkflowStore.getState()
const { batchAddEdges, batchRemoveBlocks } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'Middle', { x: 200, y: 0 })
@@ -286,7 +311,7 @@ describe('workflow store', () => {
describe('batchAddEdges', () => {
it('should add an edge between two blocks', () => {
const { addBlock, batchAddEdges } = useWorkflowStore.getState()
const { batchAddEdges } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -298,7 +323,7 @@ describe('workflow store', () => {
})
it('should not add duplicate connections', () => {
const { addBlock, batchAddEdges } = useWorkflowStore.getState()
const { batchAddEdges } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -313,7 +338,7 @@ describe('workflow store', () => {
describe('batchRemoveEdges', () => {
it('should remove an edge by id', () => {
const { addBlock, batchAddEdges, batchRemoveEdges } = useWorkflowStore.getState()
const { batchAddEdges, batchRemoveEdges } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -335,7 +360,7 @@ describe('workflow store', () => {
describe('clear', () => {
it('should clear all blocks and edges', () => {
const { addBlock, batchAddEdges, clear } = useWorkflowStore.getState()
const { batchAddEdges, clear } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -351,7 +376,7 @@ describe('workflow store', () => {
describe('batchToggleEnabled', () => {
it('should toggle block enabled state', () => {
const { addBlock, batchToggleEnabled } = useWorkflowStore.getState()
const { batchToggleEnabled } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
@@ -367,7 +392,7 @@ describe('workflow store', () => {
describe('duplicateBlock', () => {
it('should duplicate a block', () => {
const { addBlock, duplicateBlock } = useWorkflowStore.getState()
const { duplicateBlock } = useWorkflowStore.getState()
addBlock('original', 'agent', 'Original Agent', { x: 0, y: 0 })
@@ -391,7 +416,7 @@ describe('workflow store', () => {
describe('batchUpdatePositions', () => {
it('should update block position', () => {
const { addBlock, batchUpdatePositions } = useWorkflowStore.getState()
const { batchUpdatePositions } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
@@ -404,7 +429,7 @@ describe('workflow store', () => {
describe('loop management', () => {
it('should regenerate loops when updateLoopCount is called', () => {
const { addBlock, updateLoopCount } = useWorkflowStore.getState()
const { updateLoopCount } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -428,7 +453,7 @@ describe('workflow store', () => {
})
it('should regenerate loops when updateLoopType is called', () => {
const { addBlock, updateLoopType } = useWorkflowStore.getState()
const { updateLoopType } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -453,7 +478,7 @@ describe('workflow store', () => {
})
it('should regenerate loops when updateLoopCollection is called', () => {
const { addBlock, updateLoopCollection } = useWorkflowStore.getState()
const { updateLoopCollection } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -476,7 +501,7 @@ describe('workflow store', () => {
})
it('should clamp loop count between 1 and 1000', () => {
const { addBlock, updateLoopCount } = useWorkflowStore.getState()
const { updateLoopCount } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -502,7 +527,7 @@ describe('workflow store', () => {
describe('parallel management', () => {
it('should regenerate parallels when updateParallelCount is called', () => {
const { addBlock, updateParallelCount } = useWorkflowStore.getState()
const { updateParallelCount } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -525,7 +550,7 @@ describe('workflow store', () => {
})
it('should regenerate parallels when updateParallelCollection is called', () => {
const { addBlock, updateParallelCollection } = useWorkflowStore.getState()
const { updateParallelCollection } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -552,7 +577,7 @@ describe('workflow store', () => {
})
it('should clamp parallel count between 1 and 20', () => {
const { addBlock, updateParallelCount } = useWorkflowStore.getState()
const { updateParallelCount } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -575,7 +600,7 @@ describe('workflow store', () => {
})
it('should regenerate parallels when updateParallelType is called', () => {
const { addBlock, updateParallelType } = useWorkflowStore.getState()
const { updateParallelType } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -601,7 +626,7 @@ describe('workflow store', () => {
describe('mode switching', () => {
it('should toggle advanced mode on a block', () => {
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
addBlock('agent1', 'agent', 'Test Agent', { x: 0, y: 0 })
@@ -618,7 +643,7 @@ describe('workflow store', () => {
})
it('should preserve systemPrompt and userPrompt when switching modes', () => {
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { setState: setSubBlockState } = useSubBlockStore
useWorkflowRegistry.setState({ activeWorkflowId: 'test-workflow' })
addBlock('agent1', 'agent', 'Test Agent', { x: 0, y: 0 })
@@ -651,7 +676,7 @@ describe('workflow store', () => {
})
it('should preserve memories when switching from advanced to basic mode', () => {
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { setState: setSubBlockState } = useSubBlockStore
useWorkflowRegistry.setState({ activeWorkflowId: 'test-workflow' })
@@ -691,7 +716,7 @@ describe('workflow store', () => {
})
it('should handle mode switching when no subblock values exist', () => {
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
useWorkflowRegistry.setState({ activeWorkflowId: 'test-workflow' })
@@ -753,7 +778,7 @@ describe('workflow store', () => {
describe('replaceWorkflowState', () => {
it('should replace entire workflow state', () => {
const { addBlock, replaceWorkflowState } = useWorkflowStore.getState()
const { replaceWorkflowState } = useWorkflowStore.getState()
addBlock('old-1', 'function', 'Old', { x: 0, y: 0 })
@@ -769,7 +794,7 @@ describe('workflow store', () => {
describe('getWorkflowState', () => {
it('should return current workflow state', () => {
const { addBlock, getWorkflowState } = useWorkflowStore.getState()
const { getWorkflowState } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -782,9 +807,346 @@ describe('workflow store', () => {
})
})
describe('loop/parallel regeneration optimization', () => {
it('should NOT regenerate loops when adding a regular block without parentId', () => {
// Add a loop first
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
const stateAfterLoop = useWorkflowStore.getState()
const loopsAfterLoop = stateAfterLoop.loops
// Add a regular block (no parentId)
addBlock('agent-1', 'agent', 'Agent 1', { x: 200, y: 0 })
const stateAfterAgent = useWorkflowStore.getState()
// Loops should be unchanged (same content)
expect(Object.keys(stateAfterAgent.loops)).toEqual(Object.keys(loopsAfterLoop))
expect(stateAfterAgent.loops['loop-1'].nodes).toEqual(loopsAfterLoop['loop-1'].nodes)
})
it('should regenerate loops when adding a child to a loop', () => {
// Add a loop
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
const stateAfterLoop = useWorkflowStore.getState()
expect(stateAfterLoop.loops['loop-1'].nodes).toEqual([])
// Add a child block to the loop
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
const stateAfterChild = useWorkflowStore.getState()
// Loop should now include the child
expect(stateAfterChild.loops['loop-1'].nodes).toContain('child-1')
})
it('should NOT regenerate parallels when adding a child to a loop', () => {
// Add both a loop and a parallel
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
addBlock('parallel-1', 'parallel', 'Parallel 1', { x: 300, y: 0 }, { count: 3 })
const stateAfterContainers = useWorkflowStore.getState()
const parallelsAfterContainers = stateAfterContainers.parallels
// Add a child to the loop (not the parallel)
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
const stateAfterChild = useWorkflowStore.getState()
// Parallels should be unchanged
expect(stateAfterChild.parallels['parallel-1'].nodes).toEqual(
parallelsAfterContainers['parallel-1'].nodes
)
})
it('should regenerate parallels when adding a child to a parallel', () => {
// Add a parallel
addBlock('parallel-1', 'parallel', 'Parallel 1', { x: 0, y: 0 }, { count: 3 })
const stateAfterParallel = useWorkflowStore.getState()
expect(stateAfterParallel.parallels['parallel-1'].nodes).toEqual([])
// Add a child block to the parallel
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'parallel-1' },
'parallel-1',
'parent'
)
const stateAfterChild = useWorkflowStore.getState()
// Parallel should now include the child
expect(stateAfterChild.parallels['parallel-1'].nodes).toContain('child-1')
})
it('should handle adding blocks in any order and produce correct final state', () => {
// Add child BEFORE the loop (simulating undo-redo edge case)
// Note: The child's parentId points to a loop that doesn't exist yet
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
// At this point, the child exists but loop doesn't
const stateAfterChild = useWorkflowStore.getState()
expect(stateAfterChild.blocks['child-1']).toBeDefined()
expect(stateAfterChild.loops['loop-1']).toBeUndefined()
// Now add the loop
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
// Final state should be correct - loop should include the child
const finalState = useWorkflowStore.getState()
expect(finalState.loops['loop-1']).toBeDefined()
expect(finalState.loops['loop-1'].nodes).toContain('child-1')
})
})
describe('batchAddBlocks optimization', () => {
it('should NOT regenerate loops/parallels when adding regular blocks', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
// Set up initial state with a loop
useWorkflowStore.setState({
blocks: {
'loop-1': {
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
horizontalHandles: true,
advancedMode: false,
triggerMode: false,
height: 0,
data: { loopType: 'for', count: 5 },
},
},
edges: [],
loops: {
'loop-1': {
id: 'loop-1',
nodes: [],
iterations: 5,
loopType: 'for',
enabled: true,
},
},
parallels: {},
})
const stateBefore = useWorkflowStore.getState()
// Add regular blocks (no parentId, not loop/parallel type)
batchAddBlocks([
{
id: 'agent-1',
type: 'agent',
name: 'Agent 1',
position: { x: 200, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
},
{
id: 'function-1',
type: 'function',
name: 'Function 1',
position: { x: 400, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
},
])
const stateAfter = useWorkflowStore.getState()
// Loops should be unchanged
expect(stateAfter.loops['loop-1'].nodes).toEqual(stateBefore.loops['loop-1'].nodes)
})
it('should regenerate loops when batch adding a loop block', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
batchAddBlocks([
{
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
data: { loopType: 'for', count: 5 },
},
])
const state = useWorkflowStore.getState()
expect(state.loops['loop-1']).toBeDefined()
expect(state.loops['loop-1'].iterations).toBe(5)
})
it('should regenerate loops when batch adding a child of a loop', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
// First add a loop
batchAddBlocks([
{
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
data: { loopType: 'for', count: 5 },
},
])
// Then add a child
batchAddBlocks([
{
id: 'child-1',
type: 'function',
name: 'Child 1',
position: { x: 50, y: 50 },
subBlocks: {},
outputs: {},
enabled: true,
data: { parentId: 'loop-1' },
},
])
const state = useWorkflowStore.getState()
expect(state.loops['loop-1'].nodes).toContain('child-1')
})
it('should correctly handle batch adding loop and its children together', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
// Add loop and child in same batch
batchAddBlocks([
{
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
data: { loopType: 'for', count: 5 },
},
{
id: 'child-1',
type: 'function',
name: 'Child 1',
position: { x: 50, y: 50 },
subBlocks: {},
outputs: {},
enabled: true,
data: { parentId: 'loop-1' },
},
])
const state = useWorkflowStore.getState()
expect(state.loops['loop-1']).toBeDefined()
expect(state.loops['loop-1'].nodes).toContain('child-1')
})
})
describe('edge operations should not affect loops/parallels', () => {
it('should preserve loops when adding edges', () => {
const { batchAddEdges } = useWorkflowStore.getState()
// Create a loop with a child
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
addBlock('external-1', 'function', 'External', { x: 300, y: 0 })
const stateBeforeEdge = useWorkflowStore.getState()
const loopsBeforeEdge = stateBeforeEdge.loops
// Add an edge (should not affect loops)
batchAddEdges([{ id: 'e1', source: 'loop-1', target: 'external-1' }])
const stateAfterEdge = useWorkflowStore.getState()
// Loops should be unchanged
expect(stateAfterEdge.loops['loop-1'].nodes).toEqual(loopsBeforeEdge['loop-1'].nodes)
expect(stateAfterEdge.loops['loop-1'].iterations).toEqual(
loopsBeforeEdge['loop-1'].iterations
)
})
it('should preserve loops when removing edges', () => {
const { batchAddEdges, batchRemoveEdges } = useWorkflowStore.getState()
// Create a loop with a child and an edge
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
addBlock('external-1', 'function', 'External', { x: 300, y: 0 })
batchAddEdges([{ id: 'e1', source: 'loop-1', target: 'external-1' }])
const stateBeforeRemove = useWorkflowStore.getState()
const loopsBeforeRemove = stateBeforeRemove.loops
// Remove the edge
batchRemoveEdges(['e1'])
const stateAfterRemove = useWorkflowStore.getState()
// Loops should be unchanged
expect(stateAfterRemove.loops['loop-1'].nodes).toEqual(loopsBeforeRemove['loop-1'].nodes)
})
})
describe('batchToggleLocked', () => {
it('should toggle block locked state', () => {
const { addBlock, batchToggleLocked } = useWorkflowStore.getState()
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
@@ -799,7 +1161,7 @@ describe('workflow store', () => {
})
it('should cascade lock to children when locking a loop', () => {
const { addBlock, batchToggleLocked } = useWorkflowStore.getState()
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('loop-1', 'loop', 'My Loop', { x: 0, y: 0 }, { loopType: 'for', count: 3 })
addBlock(
@@ -820,7 +1182,7 @@ describe('workflow store', () => {
})
it('should cascade unlock to children when unlocking a parallel', () => {
const { addBlock, batchToggleLocked } = useWorkflowStore.getState()
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('parallel-1', 'parallel', 'My Parallel', { x: 0, y: 0 }, { count: 3 })
addBlock(
@@ -846,7 +1208,7 @@ describe('workflow store', () => {
})
it('should toggle multiple blocks at once', () => {
const { addBlock, batchToggleLocked } = useWorkflowStore.getState()
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test 1', { x: 0, y: 0 })
addBlock('block-2', 'function', 'Test 2', { x: 100, y: 0 })
@@ -861,7 +1223,7 @@ describe('workflow store', () => {
describe('setBlockLocked', () => {
it('should set block locked state', () => {
const { addBlock, setBlockLocked } = useWorkflowStore.getState()
const { setBlockLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
@@ -873,7 +1235,7 @@ describe('workflow store', () => {
})
it('should not update if locked state is already the target value', () => {
const { addBlock, setBlockLocked } = useWorkflowStore.getState()
const { setBlockLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
@@ -889,7 +1251,7 @@ describe('workflow store', () => {
describe('duplicateBlock with locked', () => {
it('should unlock duplicate when duplicating a locked block', () => {
const { addBlock, setBlockLocked, duplicateBlock } = useWorkflowStore.getState()
const { setBlockLocked, duplicateBlock } = useWorkflowStore.getState()
addBlock('original', 'agent', 'Original Agent', { x: 0, y: 0 })
setBlockLocked('original', true)
@@ -915,7 +1277,7 @@ describe('workflow store', () => {
})
it('should create unlocked duplicate when duplicating an unlocked block', () => {
const { addBlock, duplicateBlock } = useWorkflowStore.getState()
const { duplicateBlock } = useWorkflowStore.getState()
addBlock('original', 'agent', 'Original Agent', { x: 0, y: 0 })
@@ -940,8 +1302,6 @@ describe('workflow store', () => {
parallels: {},
})
const { addBlock } = useWorkflowStore.getState()
addBlock('block1', 'agent', 'Column AD', { x: 0, y: 0 })
addBlock('block2', 'function', 'Employee Length', { x: 100, y: 0 })
addBlock('block3', 'starter', 'Start', { x: 200, y: 0 })

View File

@@ -3,8 +3,6 @@ import type { Edge } from 'reactflow'
import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import { DEFAULT_DUPLICATE_OFFSET } from '@/lib/workflows/autolayout/constants'
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { getBlock } from '@/blocks'
import type { SubBlockConfig } from '@/blocks/types'
import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -114,138 +112,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
set({ needsRedeployment })
},
addBlock: (
id: string,
type: string,
name: string,
position: Position,
data?: Record<string, any>,
parentId?: string,
extent?: 'parent',
blockProperties?: {
enabled?: boolean
horizontalHandles?: boolean
advancedMode?: boolean
triggerMode?: boolean
height?: number
locked?: boolean
}
) => {
const blockConfig = getBlock(type)
// For custom nodes like loop and parallel that don't use BlockConfig
if (!blockConfig && (type === 'loop' || type === 'parallel')) {
// Merge parentId and extent into data if provided
const nodeData = {
...data,
...(parentId && { parentId, extent: extent || 'parent' }),
}
const newState = {
blocks: {
...get().blocks,
[id]: {
id,
type,
name,
position,
subBlocks: {},
outputs: {},
enabled: blockProperties?.enabled ?? true,
horizontalHandles: blockProperties?.horizontalHandles ?? true,
advancedMode: blockProperties?.advancedMode ?? false,
triggerMode: blockProperties?.triggerMode ?? false,
height: blockProperties?.height ?? 0,
data: nodeData,
locked: blockProperties?.locked ?? false,
},
},
edges: [...get().edges],
loops: get().generateLoopBlocks(),
parallels: get().generateParallelBlocks(),
}
set(newState)
get().updateLastSaved()
return
}
if (!blockConfig) return
// Merge parentId and extent into data for regular blocks
const nodeData = {
...data,
...(parentId && { parentId, extent: extent || 'parent' }),
}
const subBlocks: Record<string, SubBlockState> = {}
const subBlockStore = useSubBlockStore.getState()
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
blockConfig.subBlocks.forEach((subBlock) => {
const subBlockId = subBlock.id
const initialValue = resolveInitialSubblockValue(subBlock)
const normalizedValue =
initialValue !== undefined && initialValue !== null ? initialValue : null
subBlocks[subBlockId] = {
id: subBlockId,
type: subBlock.type,
value: normalizedValue as SubBlockState['value'],
}
if (activeWorkflowId) {
try {
const valueToStore =
initialValue !== undefined ? cloneInitialSubblockValue(initialValue) : null
subBlockStore.setValue(id, subBlockId, valueToStore)
} catch (error) {
logger.warn('Failed to seed sub-block store value during block creation', {
blockId: id,
subBlockId,
error: error instanceof Error ? error.message : String(error),
})
}
} else {
logger.warn('Cannot seed sub-block store value: activeWorkflowId not available', {
blockId: id,
subBlockId,
})
}
})
// Get outputs based on trigger mode
const triggerMode = blockProperties?.triggerMode ?? false
const outputs = getBlockOutputs(type, subBlocks, triggerMode)
const newState = {
blocks: {
...get().blocks,
[id]: {
id,
type,
name,
position,
subBlocks,
outputs,
enabled: blockProperties?.enabled ?? true,
horizontalHandles: blockProperties?.horizontalHandles ?? true,
advancedMode: blockProperties?.advancedMode ?? false,
triggerMode: triggerMode,
height: blockProperties?.height ?? 0,
layout: {},
data: nodeData,
locked: blockProperties?.locked ?? false,
},
},
edges: [...get().edges],
loops: get().generateLoopBlocks(),
parallels: get().generateParallelBlocks(),
}
set(newState)
get().updateLastSaved()
},
updateNodeDimensions: (id: string, dimensions: { width: number; height: number }) => {
set((state) => {
const block = state.blocks[id]
@@ -391,11 +257,27 @@ export const useWorkflowStore = create<WorkflowStore>()(
}
}
// Only regenerate loops/parallels if we're adding blocks that affect them:
// - Adding a loop/parallel container block
// - Adding a block as a child of a loop/parallel (has parentId pointing to one)
const needsLoopRegeneration = blocks.some(
(block) =>
block.type === 'loop' ||
(block.data?.parentId && newBlocks[block.data.parentId]?.type === 'loop')
)
const needsParallelRegeneration = blocks.some(
(block) =>
block.type === 'parallel' ||
(block.data?.parentId && newBlocks[block.data.parentId]?.type === 'parallel')
)
set({
blocks: newBlocks,
edges: newEdges,
loops: generateLoopBlocks(newBlocks),
parallels: generateParallelBlocks(newBlocks),
loops: needsLoopRegeneration ? generateLoopBlocks(newBlocks) : { ...get().loops },
parallels: needsParallelRegeneration
? generateParallelBlocks(newBlocks)
: { ...get().parallels },
})
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
@@ -568,8 +450,9 @@ export const useWorkflowStore = create<WorkflowStore>()(
set({
blocks: { ...blocks },
edges: newEdges,
loops: generateLoopBlocks(blocks),
parallels: generateParallelBlocks(blocks),
// Edges don't affect loop/parallel structure (determined by parentId), skip regeneration
loops: { ...get().loops },
parallels: { ...get().parallels },
})
get().updateLastSaved()
@@ -583,8 +466,9 @@ export const useWorkflowStore = create<WorkflowStore>()(
set({
blocks: { ...blocks },
edges: newEdges,
loops: generateLoopBlocks(blocks),
parallels: generateParallelBlocks(blocks),
// Edges don't affect loop/parallel structure (determined by parentId), skip regeneration
loops: { ...get().loops },
parallels: { ...get().parallels },
})
get().updateLastSaved()

View File

@@ -178,23 +178,6 @@ export interface WorkflowState {
}
export interface WorkflowActions {
addBlock: (
id: string,
type: string,
name: string,
position: Position,
data?: Record<string, any>,
parentId?: string,
extent?: 'parent',
blockProperties?: {
enabled?: boolean
horizontalHandles?: boolean
advancedMode?: boolean
triggerMode?: boolean
height?: number
locked?: boolean
}
) => void
updateNodeDimensions: (id: string, dimensions: { width: number; height: number }) => void
batchUpdateBlocksWithParent: (
updates: Array<{

View File

@@ -125,8 +125,8 @@ app:
# Rate Limiting Configuration (per minute)
RATE_LIMIT_WINDOW_MS: "60000" # Rate limit window duration (1 minute)
RATE_LIMIT_FREE_SYNC: "10" # Sync API executions per minute
RATE_LIMIT_FREE_ASYNC: "50" # Async API executions per minute
RATE_LIMIT_FREE_SYNC: "50" # Sync API executions per minute
RATE_LIMIT_FREE_ASYNC: "200" # Async API executions per minute
# UI Branding & Whitelabeling Configuration
NEXT_PUBLIC_BRAND_NAME: "Sim" # Custom brand name