mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-16 18:38:08 -05:00
Compare commits
7 Commits
staging
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
08e3cc8a48 | ||
|
|
f0cb711564 | ||
|
|
95c2792ffd | ||
|
|
f9ee8059d1 | ||
|
|
b467baceb6 | ||
|
|
65b18d1333 | ||
|
|
8d9ceca1b1 |
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"pages": ["index", "basics", "api", "logging", "costs"]
|
||||
"pages": ["index", "basics", "api", "form", "logging", "costs"]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ import { getSession } from '@/lib/auth'
|
||||
import { generateChatTitle } from '@/lib/copilot/chat-title'
|
||||
import { getCopilotModel } from '@/lib/copilot/config'
|
||||
import { SIM_AGENT_API_URL_DEFAULT, SIM_AGENT_VERSION } from '@/lib/copilot/constants'
|
||||
import { COPILOT_MODEL_IDS, COPILOT_REQUEST_MODES } from '@/lib/copilot/models'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
@@ -41,8 +40,34 @@ const ChatMessageSchema = z.object({
|
||||
userMessageId: z.string().optional(), // ID from frontend for the user message
|
||||
chatId: z.string().optional(),
|
||||
workflowId: z.string().min(1, 'Workflow ID is required'),
|
||||
model: z.enum(COPILOT_MODEL_IDS).optional().default('claude-4.5-opus'),
|
||||
mode: z.enum(COPILOT_REQUEST_MODES).optional().default('agent'),
|
||||
model: z
|
||||
.enum([
|
||||
'gpt-5-fast',
|
||||
'gpt-5',
|
||||
'gpt-5-medium',
|
||||
'gpt-5-high',
|
||||
'gpt-5.1-fast',
|
||||
'gpt-5.1',
|
||||
'gpt-5.1-medium',
|
||||
'gpt-5.1-high',
|
||||
'gpt-5-codex',
|
||||
'gpt-5.1-codex',
|
||||
'gpt-5.2',
|
||||
'gpt-5.2-codex',
|
||||
'gpt-5.2-pro',
|
||||
'gpt-4o',
|
||||
'gpt-4.1',
|
||||
'o3',
|
||||
'claude-4-sonnet',
|
||||
'claude-4.5-haiku',
|
||||
'claude-4.5-sonnet',
|
||||
'claude-4.5-opus',
|
||||
'claude-4.1-opus',
|
||||
'gemini-3-pro',
|
||||
])
|
||||
.optional()
|
||||
.default('claude-4.5-opus'),
|
||||
mode: z.enum(['ask', 'agent', 'plan']).optional().default('agent'),
|
||||
prefetch: z.boolean().optional(),
|
||||
createNewChat: z.boolean().optional().default(false),
|
||||
stream: z.boolean().optional().default(true),
|
||||
@@ -270,8 +295,7 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
|
||||
const defaults = getCopilotModel('chat')
|
||||
const selectedModel = model || defaults.model
|
||||
const envModel = env.COPILOT_MODEL || defaults.model
|
||||
const modelToUse = env.COPILOT_MODEL || defaults.model
|
||||
|
||||
let providerConfig: CopilotProviderConfig | undefined
|
||||
const providerEnv = env.COPILOT_PROVIDER as any
|
||||
@@ -280,7 +304,7 @@ export async function POST(req: NextRequest) {
|
||||
if (providerEnv === 'azure-openai') {
|
||||
providerConfig = {
|
||||
provider: 'azure-openai',
|
||||
model: envModel,
|
||||
model: modelToUse,
|
||||
apiKey: env.AZURE_OPENAI_API_KEY,
|
||||
apiVersion: 'preview',
|
||||
endpoint: env.AZURE_OPENAI_ENDPOINT,
|
||||
@@ -288,7 +312,7 @@ export async function POST(req: NextRequest) {
|
||||
} else if (providerEnv === 'vertex') {
|
||||
providerConfig = {
|
||||
provider: 'vertex',
|
||||
model: envModel,
|
||||
model: modelToUse,
|
||||
apiKey: env.COPILOT_API_KEY,
|
||||
vertexProject: env.VERTEX_PROJECT,
|
||||
vertexLocation: env.VERTEX_LOCATION,
|
||||
@@ -296,15 +320,12 @@ export async function POST(req: NextRequest) {
|
||||
} else {
|
||||
providerConfig = {
|
||||
provider: providerEnv,
|
||||
model: selectedModel,
|
||||
model: modelToUse,
|
||||
apiKey: env.COPILOT_API_KEY,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const effectiveMode = mode === 'agent' ? 'build' : mode
|
||||
const transportMode = effectiveMode === 'build' ? 'agent' : effectiveMode
|
||||
|
||||
// Determine conversationId to use for this request
|
||||
const effectiveConversationId =
|
||||
(currentChat?.conversationId as string | undefined) || conversationId
|
||||
@@ -324,7 +345,7 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
} | null = null
|
||||
|
||||
if (effectiveMode === 'build') {
|
||||
if (mode === 'agent') {
|
||||
// Build base tools (executed locally, not deferred)
|
||||
// Include function_execute for code execution capability
|
||||
baseTools = [
|
||||
@@ -431,8 +452,8 @@ export async function POST(req: NextRequest) {
|
||||
userId: authenticatedUserId,
|
||||
stream: stream,
|
||||
streamToolCalls: true,
|
||||
model: selectedModel,
|
||||
mode: transportMode,
|
||||
model: model,
|
||||
mode: mode,
|
||||
messageId: userMessageIdToUse,
|
||||
version: SIM_AGENT_VERSION,
|
||||
...(providerConfig ? { provider: providerConfig } : {}),
|
||||
@@ -456,7 +477,7 @@ export async function POST(req: NextRequest) {
|
||||
hasConversationId: !!effectiveConversationId,
|
||||
hasFileAttachments: processedFileContents.length > 0,
|
||||
messageLength: message.length,
|
||||
mode: effectiveMode,
|
||||
mode,
|
||||
hasTools: integrationTools.length > 0,
|
||||
toolCount: integrationTools.length,
|
||||
hasBaseTools: baseTools.length > 0,
|
||||
|
||||
@@ -4,7 +4,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { COPILOT_MODES } from '@/lib/copilot/models'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createInternalServerErrorResponse,
|
||||
@@ -46,7 +45,7 @@ const UpdateMessagesSchema = z.object({
|
||||
planArtifact: z.string().nullable().optional(),
|
||||
config: z
|
||||
.object({
|
||||
mode: z.enum(COPILOT_MODES).optional(),
|
||||
mode: z.enum(['ask', 'build', 'plan']).optional(),
|
||||
model: z.string().optional(),
|
||||
})
|
||||
.nullable()
|
||||
|
||||
@@ -14,7 +14,8 @@ import {
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
|
||||
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
|
||||
import { executeTool } from '@/tools'
|
||||
import { getTool, resolveToolId } from '@/tools/utils'
|
||||
|
||||
@@ -27,6 +28,45 @@ const ExecuteToolSchema = z.object({
|
||||
workflowId: z.string().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Resolves all {{ENV_VAR}} references in a value recursively
|
||||
* Works with strings, arrays, and objects
|
||||
*/
|
||||
function resolveEnvVarReferences(value: any, envVars: Record<string, string>): any {
|
||||
if (typeof value === 'string') {
|
||||
// Check for exact match: entire string is "{{VAR_NAME}}"
|
||||
const exactMatchPattern = new RegExp(
|
||||
`^\\${REFERENCE.ENV_VAR_START}([^}]+)\\${REFERENCE.ENV_VAR_END}$`
|
||||
)
|
||||
const exactMatch = exactMatchPattern.exec(value)
|
||||
if (exactMatch) {
|
||||
const envVarName = exactMatch[1].trim()
|
||||
return envVars[envVarName] ?? value
|
||||
}
|
||||
|
||||
// Check for embedded references: "prefix {{VAR}} suffix"
|
||||
const envVarPattern = createEnvVarPattern()
|
||||
return value.replace(envVarPattern, (match, varName) => {
|
||||
const trimmedName = varName.trim()
|
||||
return envVars[trimmedName] ?? match
|
||||
})
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((item) => resolveEnvVarReferences(item, envVars))
|
||||
}
|
||||
|
||||
if (value !== null && typeof value === 'object') {
|
||||
const resolved: Record<string, any> = {}
|
||||
for (const [key, val] of Object.entries(value)) {
|
||||
resolved[key] = resolveEnvVarReferences(val, envVars)
|
||||
}
|
||||
return resolved
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
const tracker = createRequestTracker()
|
||||
|
||||
@@ -105,17 +145,7 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
// Build execution params starting with LLM-provided arguments
|
||||
// Resolve all {{ENV_VAR}} references in the arguments
|
||||
const executionParams: Record<string, any> = resolveEnvVarReferences(
|
||||
toolArgs,
|
||||
decryptedEnvVars,
|
||||
{
|
||||
resolveExactMatch: true,
|
||||
allowEmbedded: true,
|
||||
trimKeys: true,
|
||||
onMissing: 'keep',
|
||||
deep: true,
|
||||
}
|
||||
) as Record<string, any>
|
||||
const executionParams: Record<string, any> = resolveEnvVarReferences(toolArgs, decryptedEnvVars)
|
||||
|
||||
logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, {
|
||||
toolName,
|
||||
|
||||
@@ -2,13 +2,12 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import type { CopilotModelId } from '@/lib/copilot/models'
|
||||
import { db } from '@/../../packages/db'
|
||||
import { settings } from '@/../../packages/db/schema'
|
||||
|
||||
const logger = createLogger('CopilotUserModelsAPI')
|
||||
|
||||
const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
|
||||
const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
|
||||
'gpt-4o': false,
|
||||
'gpt-4.1': false,
|
||||
'gpt-5-fast': false,
|
||||
@@ -29,7 +28,7 @@ const DEFAULT_ENABLED_MODELS: Record<CopilotModelId, boolean> = {
|
||||
'claude-4.5-haiku': true,
|
||||
'claude-4.5-sonnet': true,
|
||||
'claude-4.5-opus': true,
|
||||
'claude-4.1-opus': false,
|
||||
// 'claude-4.1-opus': true,
|
||||
'gemini-3-pro': true,
|
||||
}
|
||||
|
||||
@@ -55,9 +54,7 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
const mergedModels = { ...DEFAULT_ENABLED_MODELS }
|
||||
for (const [modelId, enabled] of Object.entries(userModelsMap)) {
|
||||
if (modelId in mergedModels) {
|
||||
mergedModels[modelId as CopilotModelId] = enabled
|
||||
}
|
||||
mergedModels[modelId] = enabled
|
||||
}
|
||||
|
||||
const hasNewModels = Object.keys(DEFAULT_ENABLED_MODELS).some(
|
||||
|
||||
@@ -9,7 +9,6 @@ import { escapeRegExp, normalizeName, REFERENCE } from '@/executor/constants'
|
||||
import {
|
||||
createEnvVarPattern,
|
||||
createWorkflowVariablePattern,
|
||||
resolveEnvVarReferences,
|
||||
} from '@/executor/utils/reference-validation'
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
@@ -480,29 +479,9 @@ function resolveEnvironmentVariables(
|
||||
const replacements: Array<{ match: string; index: number; varName: string; varValue: string }> =
|
||||
[]
|
||||
|
||||
const resolverVars: Record<string, string> = {}
|
||||
Object.entries(params).forEach(([key, value]) => {
|
||||
if (value) {
|
||||
resolverVars[key] = String(value)
|
||||
}
|
||||
})
|
||||
Object.entries(envVars).forEach(([key, value]) => {
|
||||
if (value) {
|
||||
resolverVars[key] = value
|
||||
}
|
||||
})
|
||||
|
||||
while ((match = regex.exec(code)) !== null) {
|
||||
const varName = match[1].trim()
|
||||
const resolved = resolveEnvVarReferences(match[0], resolverVars, {
|
||||
allowEmbedded: true,
|
||||
resolveExactMatch: true,
|
||||
trimKeys: true,
|
||||
onMissing: 'empty',
|
||||
deep: false,
|
||||
})
|
||||
const varValue =
|
||||
typeof resolved === 'string' ? resolved : resolved == null ? '' : String(resolved)
|
||||
const varValue = envVars[varName] || params[varName] || ''
|
||||
replacements.push({
|
||||
match: match[0],
|
||||
index: match.index,
|
||||
|
||||
@@ -5,7 +5,8 @@ import { McpClient } from '@/lib/mcp/client'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import type { McpServerConfig, McpTransport } from '@/lib/mcp/types'
|
||||
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
|
||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
|
||||
|
||||
const logger = createLogger('McpServerTestAPI')
|
||||
|
||||
@@ -23,23 +24,22 @@ function isUrlBasedTransport(transport: McpTransport): boolean {
|
||||
* Resolve environment variables in strings
|
||||
*/
|
||||
function resolveEnvVars(value: string, envVars: Record<string, string>): string {
|
||||
const missingVars: string[] = []
|
||||
const resolvedValue = resolveEnvVarReferences(value, envVars, {
|
||||
allowEmbedded: true,
|
||||
resolveExactMatch: true,
|
||||
trimKeys: true,
|
||||
onMissing: 'keep',
|
||||
deep: false,
|
||||
missingKeys: missingVars,
|
||||
}) as string
|
||||
const envVarPattern = createEnvVarPattern()
|
||||
const envMatches = value.match(envVarPattern)
|
||||
if (!envMatches) return value
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
const uniqueMissing = Array.from(new Set(missingVars))
|
||||
uniqueMissing.forEach((envKey) => {
|
||||
let resolvedValue = value
|
||||
for (const match of envMatches) {
|
||||
const envKey = match.slice(REFERENCE.ENV_VAR_START.length, -REFERENCE.ENV_VAR_END.length).trim()
|
||||
const envValue = envVars[envKey]
|
||||
|
||||
if (envValue === undefined) {
|
||||
logger.warn(`Environment variable "${envKey}" not found in MCP server test`)
|
||||
})
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
resolvedValue = resolvedValue.replace(match, envValue)
|
||||
}
|
||||
return resolvedValue
|
||||
}
|
||||
|
||||
|
||||
@@ -57,7 +57,6 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
not: vi.fn((condition) => ({ type: 'not', condition })),
|
||||
isNull: vi.fn((field) => ({ type: 'isNull', field })),
|
||||
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
|
||||
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/db', () => {
|
||||
@@ -93,17 +92,6 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
status: 'status',
|
||||
nextRunAt: 'nextRunAt',
|
||||
lastQueuedAt: 'lastQueuedAt',
|
||||
deploymentVersionId: 'deploymentVersionId',
|
||||
},
|
||||
workflowDeploymentVersion: {
|
||||
id: 'id',
|
||||
workflowId: 'workflowId',
|
||||
isActive: 'isActive',
|
||||
},
|
||||
workflow: {
|
||||
id: 'id',
|
||||
userId: 'userId',
|
||||
workspaceId: 'workspaceId',
|
||||
},
|
||||
}
|
||||
})
|
||||
@@ -146,7 +134,6 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
not: vi.fn((condition) => ({ type: 'not', condition })),
|
||||
isNull: vi.fn((field) => ({ type: 'isNull', field })),
|
||||
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
|
||||
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/db', () => {
|
||||
@@ -182,17 +169,6 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
status: 'status',
|
||||
nextRunAt: 'nextRunAt',
|
||||
lastQueuedAt: 'lastQueuedAt',
|
||||
deploymentVersionId: 'deploymentVersionId',
|
||||
},
|
||||
workflowDeploymentVersion: {
|
||||
id: 'id',
|
||||
workflowId: 'workflowId',
|
||||
isActive: 'isActive',
|
||||
},
|
||||
workflow: {
|
||||
id: 'id',
|
||||
userId: 'userId',
|
||||
workspaceId: 'workspaceId',
|
||||
},
|
||||
}
|
||||
})
|
||||
@@ -230,7 +206,6 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
not: vi.fn((condition) => ({ type: 'not', condition })),
|
||||
isNull: vi.fn((field) => ({ type: 'isNull', field })),
|
||||
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
|
||||
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/db', () => {
|
||||
@@ -253,17 +228,6 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
status: 'status',
|
||||
nextRunAt: 'nextRunAt',
|
||||
lastQueuedAt: 'lastQueuedAt',
|
||||
deploymentVersionId: 'deploymentVersionId',
|
||||
},
|
||||
workflowDeploymentVersion: {
|
||||
id: 'id',
|
||||
workflowId: 'workflowId',
|
||||
isActive: 'isActive',
|
||||
},
|
||||
workflow: {
|
||||
id: 'id',
|
||||
userId: 'userId',
|
||||
workspaceId: 'workspaceId',
|
||||
},
|
||||
}
|
||||
})
|
||||
@@ -301,7 +265,6 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
not: vi.fn((condition) => ({ type: 'not', condition })),
|
||||
isNull: vi.fn((field) => ({ type: 'isNull', field })),
|
||||
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
|
||||
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/db', () => {
|
||||
@@ -347,17 +310,6 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
status: 'status',
|
||||
nextRunAt: 'nextRunAt',
|
||||
lastQueuedAt: 'lastQueuedAt',
|
||||
deploymentVersionId: 'deploymentVersionId',
|
||||
},
|
||||
workflowDeploymentVersion: {
|
||||
id: 'id',
|
||||
workflowId: 'workflowId',
|
||||
isActive: 'isActive',
|
||||
},
|
||||
workflow: {
|
||||
id: 'id',
|
||||
userId: 'userId',
|
||||
workspaceId: 'workspaceId',
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db, workflowDeploymentVersion, workflowSchedule } from '@sim/db'
|
||||
import { db, workflowSchedule } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { tasks } from '@trigger.dev/sdk'
|
||||
import { and, eq, isNull, lt, lte, not, or, sql } from 'drizzle-orm'
|
||||
import { and, eq, isNull, lt, lte, not, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
||||
@@ -37,8 +37,7 @@ export async function GET(request: NextRequest) {
|
||||
or(
|
||||
isNull(workflowSchedule.lastQueuedAt),
|
||||
lt(workflowSchedule.lastQueuedAt, workflowSchedule.nextRunAt)
|
||||
),
|
||||
sql`${workflowSchedule.deploymentVersionId} = (select ${workflowDeploymentVersion.id} from ${workflowDeploymentVersion} where ${workflowDeploymentVersion.workflowId} = ${workflowSchedule.workflowId} and ${workflowDeploymentVersion.isActive} = true)`
|
||||
)
|
||||
)
|
||||
)
|
||||
.returning({
|
||||
|
||||
@@ -29,23 +29,12 @@ vi.mock('@sim/db', () => ({
|
||||
|
||||
vi.mock('@sim/db/schema', () => ({
|
||||
workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' },
|
||||
workflowSchedule: {
|
||||
workflowId: 'workflowId',
|
||||
blockId: 'blockId',
|
||||
deploymentVersionId: 'deploymentVersionId',
|
||||
},
|
||||
workflowDeploymentVersion: {
|
||||
id: 'id',
|
||||
workflowId: 'workflowId',
|
||||
isActive: 'isActive',
|
||||
},
|
||||
workflowSchedule: { workflowId: 'workflowId', blockId: 'blockId' },
|
||||
}))
|
||||
|
||||
vi.mock('drizzle-orm', () => ({
|
||||
eq: vi.fn(),
|
||||
and: vi.fn(),
|
||||
or: vi.fn(),
|
||||
isNull: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
@@ -67,11 +56,6 @@ function mockDbChain(results: any[]) {
|
||||
where: () => ({
|
||||
limit: () => results[callIndex++] || [],
|
||||
}),
|
||||
leftJoin: () => ({
|
||||
where: () => ({
|
||||
limit: () => results[callIndex++] || [],
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}))
|
||||
}
|
||||
@@ -90,16 +74,7 @@ describe('Schedule GET API', () => {
|
||||
it('returns schedule data for authorized user', async () => {
|
||||
mockDbChain([
|
||||
[{ userId: 'user-1', workspaceId: null }],
|
||||
[
|
||||
{
|
||||
schedule: {
|
||||
id: 'sched-1',
|
||||
cronExpression: '0 9 * * *',
|
||||
status: 'active',
|
||||
failedCount: 0,
|
||||
},
|
||||
},
|
||||
],
|
||||
[{ id: 'sched-1', cronExpression: '0 9 * * *', status: 'active', failedCount: 0 }],
|
||||
])
|
||||
|
||||
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
||||
@@ -153,7 +128,7 @@ describe('Schedule GET API', () => {
|
||||
it('allows workspace members to view', async () => {
|
||||
mockDbChain([
|
||||
[{ userId: 'other-user', workspaceId: 'ws-1' }],
|
||||
[{ schedule: { id: 'sched-1', status: 'active', failedCount: 0 } }],
|
||||
[{ id: 'sched-1', status: 'active', failedCount: 0 }],
|
||||
])
|
||||
|
||||
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
||||
@@ -164,7 +139,7 @@ describe('Schedule GET API', () => {
|
||||
it('indicates disabled schedule with failures', async () => {
|
||||
mockDbChain([
|
||||
[{ userId: 'user-1', workspaceId: null }],
|
||||
[{ schedule: { id: 'sched-1', status: 'disabled', failedCount: 100 } }],
|
||||
[{ id: 'sched-1', status: 'disabled', failedCount: 100 }],
|
||||
])
|
||||
|
||||
const res = await GET(createRequest('http://test/api/schedules?workflowId=wf-1'))
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowDeploymentVersion, workflowSchedule } from '@sim/db/schema'
|
||||
import { workflow, workflowSchedule } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, or } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
@@ -62,24 +62,9 @@ export async function GET(req: NextRequest) {
|
||||
}
|
||||
|
||||
const schedule = await db
|
||||
.select({ schedule: workflowSchedule })
|
||||
.select()
|
||||
.from(workflowSchedule)
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflowSchedule.workflowId),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
...conditions,
|
||||
or(
|
||||
eq(workflowSchedule.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(workflowSchedule.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(conditions.length > 1 ? and(...conditions) : conditions[0])
|
||||
.limit(1)
|
||||
|
||||
const headers = new Headers()
|
||||
@@ -89,7 +74,7 @@ export async function GET(req: NextRequest) {
|
||||
return NextResponse.json({ schedule: null }, { headers })
|
||||
}
|
||||
|
||||
const scheduleData = schedule[0].schedule
|
||||
const scheduleData = schedule[0]
|
||||
const isDisabled = scheduleData.status === 'disabled'
|
||||
const hasFailures = scheduleData.failedCount > 0
|
||||
|
||||
|
||||
@@ -60,17 +60,7 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
|
||||
return internalErrorResponse(deployResult.error || 'Failed to deploy workflow')
|
||||
}
|
||||
|
||||
if (!deployResult.deploymentVersionId) {
|
||||
await undeployWorkflow({ workflowId })
|
||||
return internalErrorResponse('Failed to resolve deployment version')
|
||||
}
|
||||
|
||||
const scheduleResult = await createSchedulesForDeploy(
|
||||
workflowId,
|
||||
normalizedData.blocks,
|
||||
db,
|
||||
deployResult.deploymentVersionId
|
||||
)
|
||||
const scheduleResult = await createSchedulesForDeploy(workflowId, normalizedData.blocks, db)
|
||||
if (!scheduleResult.success) {
|
||||
logger.warn(`Schedule creation failed for workflow ${workflowId}: ${scheduleResult.error}`)
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { webhook, workflow, workflowDeploymentVersion } from '@sim/db/schema'
|
||||
import { webhook, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, desc, eq, isNull, or } from 'drizzle-orm'
|
||||
import { and, desc, eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
@@ -71,23 +71,7 @@ export async function GET(request: NextRequest) {
|
||||
})
|
||||
.from(webhook)
|
||||
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflow.id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.workflowId, workflowId),
|
||||
eq(webhook.blockId, blockId),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
|
||||
.orderBy(desc(webhook.updatedAt))
|
||||
|
||||
logger.info(
|
||||
@@ -165,23 +149,7 @@ export async function POST(request: NextRequest) {
|
||||
const existingForBlock = await db
|
||||
.select({ id: webhook.id, path: webhook.path })
|
||||
.from(webhook)
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflowId),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.workflowId, workflowId),
|
||||
eq(webhook.blockId, blockId),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
|
||||
.limit(1)
|
||||
|
||||
if (existingForBlock.length > 0) {
|
||||
@@ -257,23 +225,7 @@ export async function POST(request: NextRequest) {
|
||||
const existingForBlock = await db
|
||||
.select({ id: webhook.id })
|
||||
.from(webhook)
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflowId),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.workflowId, workflowId),
|
||||
eq(webhook.blockId, blockId),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
|
||||
.limit(1)
|
||||
if (existingForBlock.length > 0) {
|
||||
targetWebhookId = existingForBlock[0].id
|
||||
|
||||
@@ -152,6 +152,7 @@ export async function POST(
|
||||
const response = await queueWebhookExecution(foundWebhook, foundWorkflow, body, request, {
|
||||
requestId,
|
||||
path,
|
||||
executionTarget: 'deployed',
|
||||
})
|
||||
responses.push(response)
|
||||
}
|
||||
|
||||
@@ -22,13 +22,6 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
|
||||
.select({
|
||||
id: chat.id,
|
||||
identifier: chat.identifier,
|
||||
title: chat.title,
|
||||
description: chat.description,
|
||||
customizations: chat.customizations,
|
||||
authType: chat.authType,
|
||||
allowedEmails: chat.allowedEmails,
|
||||
outputConfigs: chat.outputConfigs,
|
||||
password: chat.password,
|
||||
isActive: chat.isActive,
|
||||
})
|
||||
.from(chat)
|
||||
@@ -41,13 +34,6 @@ export async function GET(_request: Request, { params }: { params: Promise<{ id:
|
||||
? {
|
||||
id: deploymentResults[0].id,
|
||||
identifier: deploymentResults[0].identifier,
|
||||
title: deploymentResults[0].title,
|
||||
description: deploymentResults[0].description,
|
||||
customizations: deploymentResults[0].customizations,
|
||||
authType: deploymentResults[0].authType,
|
||||
allowedEmails: deploymentResults[0].allowedEmails,
|
||||
outputConfigs: deploymentResults[0].outputConfigs,
|
||||
hasPassword: Boolean(deploymentResults[0].password),
|
||||
}
|
||||
: null
|
||||
|
||||
|
||||
@@ -10,11 +10,7 @@ import {
|
||||
loadWorkflowFromNormalizedTables,
|
||||
undeployWorkflow,
|
||||
} from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
cleanupDeploymentVersion,
|
||||
createSchedulesForDeploy,
|
||||
validateWorkflowSchedules,
|
||||
} from '@/lib/workflows/schedules'
|
||||
import { createSchedulesForDeploy, validateWorkflowSchedules } from '@/lib/workflows/schedules'
|
||||
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
@@ -135,6 +131,22 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
|
||||
}
|
||||
|
||||
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
|
||||
request,
|
||||
workflowId: id,
|
||||
workflow: workflowData,
|
||||
userId: actorUserId,
|
||||
blocks: normalizedData.blocks,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (!triggerSaveResult.success) {
|
||||
return createErrorResponse(
|
||||
triggerSaveResult.error?.message || 'Failed to save trigger configuration',
|
||||
triggerSaveResult.error?.status || 500
|
||||
)
|
||||
}
|
||||
|
||||
const deployResult = await deployWorkflow({
|
||||
workflowId: id,
|
||||
deployedBy: actorUserId,
|
||||
@@ -146,58 +158,14 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
}
|
||||
|
||||
const deployedAt = deployResult.deployedAt!
|
||||
const deploymentVersionId = deployResult.deploymentVersionId
|
||||
|
||||
if (!deploymentVersionId) {
|
||||
await undeployWorkflow({ workflowId: id })
|
||||
return createErrorResponse('Failed to resolve deployment version', 500)
|
||||
}
|
||||
|
||||
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
|
||||
request,
|
||||
workflowId: id,
|
||||
workflow: workflowData,
|
||||
userId: actorUserId,
|
||||
blocks: normalizedData.blocks,
|
||||
requestId,
|
||||
deploymentVersionId,
|
||||
})
|
||||
|
||||
if (!triggerSaveResult.success) {
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId,
|
||||
})
|
||||
await undeployWorkflow({ workflowId: id })
|
||||
return createErrorResponse(
|
||||
triggerSaveResult.error?.message || 'Failed to save trigger configuration',
|
||||
triggerSaveResult.error?.status || 500
|
||||
)
|
||||
}
|
||||
|
||||
let scheduleInfo: { scheduleId?: string; cronExpression?: string; nextRunAt?: Date } = {}
|
||||
const scheduleResult = await createSchedulesForDeploy(
|
||||
id,
|
||||
normalizedData.blocks,
|
||||
db,
|
||||
deploymentVersionId
|
||||
)
|
||||
const scheduleResult = await createSchedulesForDeploy(id, normalizedData.blocks, db)
|
||||
if (!scheduleResult.success) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to create schedule for workflow ${id}: ${scheduleResult.error}`
|
||||
)
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId,
|
||||
})
|
||||
await undeployWorkflow({ workflowId: id })
|
||||
return createErrorResponse(scheduleResult.error || 'Failed to create schedule', 500)
|
||||
}
|
||||
if (scheduleResult.scheduleId) {
|
||||
} else if (scheduleResult.scheduleId) {
|
||||
scheduleInfo = {
|
||||
scheduleId: scheduleResult.scheduleId,
|
||||
cronExpression: scheduleResult.cronExpression,
|
||||
|
||||
@@ -1,19 +1,10 @@
|
||||
import { db, workflowDeploymentVersion } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
|
||||
import { saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
|
||||
import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
cleanupDeploymentVersion,
|
||||
createSchedulesForDeploy,
|
||||
validateWorkflowSchedules,
|
||||
} from '@/lib/workflows/schedules'
|
||||
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('WorkflowActivateDeploymentAPI')
|
||||
|
||||
@@ -28,135 +19,30 @@ export async function POST(
|
||||
const { id, version } = await params
|
||||
|
||||
try {
|
||||
const {
|
||||
error,
|
||||
session,
|
||||
workflow: workflowData,
|
||||
} = await validateWorkflowPermissions(id, requestId, 'admin')
|
||||
const { error } = await validateWorkflowPermissions(id, requestId, 'admin')
|
||||
if (error) {
|
||||
return createErrorResponse(error.message, error.status)
|
||||
}
|
||||
|
||||
const actorUserId = session?.user?.id
|
||||
if (!actorUserId) {
|
||||
logger.warn(`[${requestId}] Unable to resolve actor user for deployment activation: ${id}`)
|
||||
return createErrorResponse('Unable to determine activating user', 400)
|
||||
}
|
||||
|
||||
const versionNum = Number(version)
|
||||
if (!Number.isFinite(versionNum)) {
|
||||
return createErrorResponse('Invalid version number', 400)
|
||||
}
|
||||
|
||||
const [versionRow] = await db
|
||||
.select({
|
||||
id: workflowDeploymentVersion.id,
|
||||
state: workflowDeploymentVersion.state,
|
||||
})
|
||||
.from(workflowDeploymentVersion)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, id),
|
||||
eq(workflowDeploymentVersion.version, versionNum)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!versionRow?.state) {
|
||||
return createErrorResponse('Deployment version not found', 404)
|
||||
}
|
||||
|
||||
const [currentActiveVersion] = await db
|
||||
.select({ id: workflowDeploymentVersion.id })
|
||||
.from(workflowDeploymentVersion)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
const previousVersionId = currentActiveVersion?.id
|
||||
|
||||
const deployedState = versionRow.state as { blocks?: Record<string, BlockState> }
|
||||
const blocks = deployedState.blocks
|
||||
if (!blocks || typeof blocks !== 'object') {
|
||||
return createErrorResponse('Invalid deployed state structure', 500)
|
||||
}
|
||||
|
||||
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
|
||||
request,
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
userId: actorUserId,
|
||||
blocks,
|
||||
requestId,
|
||||
deploymentVersionId: versionRow.id,
|
||||
})
|
||||
|
||||
if (!triggerSaveResult.success) {
|
||||
return createErrorResponse(
|
||||
triggerSaveResult.error?.message || 'Failed to sync trigger configuration',
|
||||
triggerSaveResult.error?.status || 500
|
||||
)
|
||||
}
|
||||
|
||||
const scheduleValidation = validateWorkflowSchedules(blocks)
|
||||
if (!scheduleValidation.isValid) {
|
||||
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
|
||||
}
|
||||
|
||||
const scheduleResult = await createSchedulesForDeploy(id, blocks, db, versionRow.id)
|
||||
|
||||
if (!scheduleResult.success) {
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId: versionRow.id,
|
||||
})
|
||||
return createErrorResponse(scheduleResult.error || 'Failed to sync schedules', 500)
|
||||
}
|
||||
|
||||
const result = await activateWorkflowVersion({ workflowId: id, version: versionNum })
|
||||
if (!result.success) {
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId: versionRow.id,
|
||||
})
|
||||
return createErrorResponse(result.error || 'Failed to activate deployment', 400)
|
||||
}
|
||||
|
||||
if (previousVersionId && previousVersionId !== versionRow.id) {
|
||||
try {
|
||||
logger.info(
|
||||
`[${requestId}] Cleaning up previous version ${previousVersionId} webhooks/schedules`
|
||||
)
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId: previousVersionId,
|
||||
})
|
||||
logger.info(`[${requestId}] Previous version cleanup completed`)
|
||||
} catch (cleanupError) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to clean up previous version ${previousVersionId}`,
|
||||
cleanupError
|
||||
)
|
||||
}
|
||||
if (result.state) {
|
||||
await syncMcpToolsForWorkflow({
|
||||
workflowId: id,
|
||||
requestId,
|
||||
state: result.state,
|
||||
context: 'activate',
|
||||
})
|
||||
}
|
||||
|
||||
await syncMcpToolsForWorkflow({
|
||||
workflowId: id,
|
||||
requestId,
|
||||
state: versionRow.state,
|
||||
context: 'activate',
|
||||
})
|
||||
|
||||
return createSuccessResponse({ success: true, deployedAt: result.deployedAt })
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error activating deployment for workflow: ${id}`, error)
|
||||
|
||||
@@ -110,7 +110,6 @@ type AsyncExecutionParams = {
|
||||
userId: string
|
||||
input: any
|
||||
triggerType: CoreTriggerType
|
||||
preflighted?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -133,7 +132,6 @@ async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextR
|
||||
userId,
|
||||
input,
|
||||
triggerType,
|
||||
preflighted: params.preflighted,
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -266,7 +264,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
requestId
|
||||
)
|
||||
|
||||
const shouldPreflightEnvVars = isAsyncMode && isTriggerDevEnabled
|
||||
const preprocessResult = await preprocessExecution({
|
||||
workflowId,
|
||||
userId,
|
||||
@@ -275,9 +272,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
requestId,
|
||||
checkDeployment: !shouldUseDraftState,
|
||||
loggingSession,
|
||||
preflightEnvVars: shouldPreflightEnvVars,
|
||||
useDraftState: shouldUseDraftState,
|
||||
envUserId: isClientSession ? userId : undefined,
|
||||
})
|
||||
|
||||
if (!preprocessResult.success) {
|
||||
@@ -309,7 +303,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
userId: actorUserId,
|
||||
input,
|
||||
triggerType: loggingTriggerType,
|
||||
preflighted: shouldPreflightEnvVars,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -8,6 +8,9 @@ import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useCopilotStore, usePanelStore } from '@/stores/panel'
|
||||
import { useTerminalStore } from '@/stores/terminal'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
const logger = createLogger('DiffControls')
|
||||
const NOTIFICATION_WIDTH = 240
|
||||
@@ -16,29 +19,188 @@ const NOTIFICATION_GAP = 16
|
||||
export const DiffControls = memo(function DiffControls() {
|
||||
const isTerminalResizing = useTerminalStore((state) => state.isResizing)
|
||||
const isPanelResizing = usePanelStore((state) => state.isResizing)
|
||||
const { isDiffReady, hasActiveDiff, acceptChanges, rejectChanges } = useWorkflowDiffStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
isDiffReady: state.isDiffReady,
|
||||
hasActiveDiff: state.hasActiveDiff,
|
||||
acceptChanges: state.acceptChanges,
|
||||
rejectChanges: state.rejectChanges,
|
||||
}),
|
||||
[]
|
||||
const { isDiffReady, hasActiveDiff, acceptChanges, rejectChanges, baselineWorkflow } =
|
||||
useWorkflowDiffStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
isDiffReady: state.isDiffReady,
|
||||
hasActiveDiff: state.hasActiveDiff,
|
||||
acceptChanges: state.acceptChanges,
|
||||
rejectChanges: state.rejectChanges,
|
||||
baselineWorkflow: state.baselineWorkflow,
|
||||
}),
|
||||
[]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
const { updatePreviewToolCallState } = useCopilotStore(
|
||||
const { updatePreviewToolCallState, currentChat, messages } = useCopilotStore(
|
||||
useCallback(
|
||||
(state) => ({
|
||||
updatePreviewToolCallState: state.updatePreviewToolCallState,
|
||||
currentChat: state.currentChat,
|
||||
messages: state.messages,
|
||||
}),
|
||||
[]
|
||||
)
|
||||
)
|
||||
|
||||
const { activeWorkflowId } = useWorkflowRegistry(
|
||||
useCallback((state) => ({ activeWorkflowId: state.activeWorkflowId }), [])
|
||||
)
|
||||
|
||||
const allNotifications = useNotificationStore((state) => state.notifications)
|
||||
const hasVisibleNotifications = allNotifications.length > 0
|
||||
const hasVisibleNotifications = useMemo(() => {
|
||||
if (!activeWorkflowId) return false
|
||||
return allNotifications.some((n) => !n.workflowId || n.workflowId === activeWorkflowId)
|
||||
}, [allNotifications, activeWorkflowId])
|
||||
|
||||
const createCheckpoint = useCallback(async () => {
|
||||
if (!activeWorkflowId || !currentChat?.id) {
|
||||
logger.warn('Cannot create checkpoint: missing workflowId or chatId', {
|
||||
workflowId: activeWorkflowId,
|
||||
chatId: currentChat?.id,
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info('Creating checkpoint before accepting changes')
|
||||
|
||||
// Use the baseline workflow (state before diff) instead of current state
|
||||
// This ensures reverting to the checkpoint restores the pre-diff state
|
||||
const rawState = baselineWorkflow || useWorkflowStore.getState().getWorkflowState()
|
||||
|
||||
// The baseline already has merged subblock values, but we'll merge again to be safe
|
||||
// This ensures all user inputs and subblock data are captured
|
||||
const blocksWithSubblockValues = mergeSubblockState(rawState.blocks, activeWorkflowId)
|
||||
|
||||
// Filter and complete blocks to ensure all required fields are present
|
||||
// This matches the validation logic from /api/workflows/[id]/state
|
||||
const filteredBlocks = Object.entries(blocksWithSubblockValues).reduce(
|
||||
(acc, [blockId, block]) => {
|
||||
if (block.type && block.name) {
|
||||
// Ensure all required fields are present
|
||||
acc[blockId] = {
|
||||
...block,
|
||||
id: block.id || blockId, // Ensure id field is set
|
||||
enabled: block.enabled !== undefined ? block.enabled : true,
|
||||
horizontalHandles:
|
||||
block.horizontalHandles !== undefined ? block.horizontalHandles : true,
|
||||
height: block.height !== undefined ? block.height : 90,
|
||||
subBlocks: block.subBlocks || {},
|
||||
outputs: block.outputs || {},
|
||||
data: block.data || {},
|
||||
position: block.position || { x: 0, y: 0 }, // Ensure position exists
|
||||
}
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{} as typeof rawState.blocks
|
||||
)
|
||||
|
||||
// Clean the workflow state - only include valid fields, exclude null/undefined values
|
||||
const workflowState = {
|
||||
blocks: filteredBlocks,
|
||||
edges: rawState.edges || [],
|
||||
loops: rawState.loops || {},
|
||||
parallels: rawState.parallels || {},
|
||||
lastSaved: rawState.lastSaved || Date.now(),
|
||||
deploymentStatuses: rawState.deploymentStatuses || {},
|
||||
}
|
||||
|
||||
logger.info('Prepared complete workflow state for checkpoint', {
|
||||
blocksCount: Object.keys(workflowState.blocks).length,
|
||||
edgesCount: workflowState.edges.length,
|
||||
loopsCount: Object.keys(workflowState.loops).length,
|
||||
parallelsCount: Object.keys(workflowState.parallels).length,
|
||||
hasRequiredFields: Object.values(workflowState.blocks).every(
|
||||
(block) => block.id && block.type && block.name && block.position
|
||||
),
|
||||
hasSubblockValues: Object.values(workflowState.blocks).some((block) =>
|
||||
Object.values(block.subBlocks || {}).some(
|
||||
(subblock) => subblock.value !== null && subblock.value !== undefined
|
||||
)
|
||||
),
|
||||
sampleBlock: Object.values(workflowState.blocks)[0],
|
||||
})
|
||||
|
||||
// Find the most recent user message ID from the current chat
|
||||
const userMessages = messages.filter((msg) => msg.role === 'user')
|
||||
const lastUserMessage = userMessages[userMessages.length - 1]
|
||||
const messageId = lastUserMessage?.id
|
||||
|
||||
logger.info('Creating checkpoint with message association', {
|
||||
totalMessages: messages.length,
|
||||
userMessageCount: userMessages.length,
|
||||
lastUserMessageId: messageId,
|
||||
chatId: currentChat.id,
|
||||
entireMessageArray: messages,
|
||||
allMessageIds: messages.map((m) => ({
|
||||
id: m.id,
|
||||
role: m.role,
|
||||
content: m.content.substring(0, 50),
|
||||
})),
|
||||
selectedUserMessages: userMessages.map((m) => ({
|
||||
id: m.id,
|
||||
content: m.content.substring(0, 100),
|
||||
})),
|
||||
allRawMessageIds: messages.map((m) => m.id),
|
||||
userMessageIds: userMessages.map((m) => m.id),
|
||||
checkpointData: {
|
||||
workflowId: activeWorkflowId,
|
||||
chatId: currentChat.id,
|
||||
messageId: messageId,
|
||||
messageFound: !!lastUserMessage,
|
||||
},
|
||||
})
|
||||
|
||||
const response = await fetch('/api/copilot/checkpoints', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowId: activeWorkflowId,
|
||||
chatId: currentChat.id,
|
||||
messageId,
|
||||
workflowState: JSON.stringify(workflowState),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to create checkpoint: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
const newCheckpoint = result.checkpoint
|
||||
|
||||
logger.info('Checkpoint created successfully', {
|
||||
messageId,
|
||||
chatId: currentChat.id,
|
||||
checkpointId: newCheckpoint?.id,
|
||||
})
|
||||
|
||||
// Update the copilot store immediately to show the checkpoint icon
|
||||
if (newCheckpoint && messageId) {
|
||||
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
|
||||
const existingCheckpoints = currentCheckpoints[messageId] || []
|
||||
|
||||
const updatedCheckpoints = {
|
||||
...currentCheckpoints,
|
||||
[messageId]: [newCheckpoint, ...existingCheckpoints],
|
||||
}
|
||||
|
||||
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
|
||||
logger.info('Updated copilot store with new checkpoint', {
|
||||
messageId,
|
||||
checkpointId: newCheckpoint.id,
|
||||
})
|
||||
}
|
||||
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Failed to create checkpoint:', error)
|
||||
return false
|
||||
}
|
||||
}, [activeWorkflowId, currentChat, messages, baselineWorkflow])
|
||||
|
||||
const handleAccept = useCallback(() => {
|
||||
logger.info('Accepting proposed changes with backup protection')
|
||||
@@ -76,8 +238,12 @@ export const DiffControls = memo(function DiffControls() {
|
||||
})
|
||||
|
||||
// Create checkpoint in the background (fire-and-forget) so it doesn't block UI
|
||||
createCheckpoint().catch((error) => {
|
||||
logger.warn('Failed to create checkpoint after accept:', error)
|
||||
})
|
||||
|
||||
logger.info('Accept triggered; UI will update optimistically')
|
||||
}, [updatePreviewToolCallState, acceptChanges])
|
||||
}, [createCheckpoint, updatePreviewToolCallState, acceptChanges])
|
||||
|
||||
const handleReject = useCallback(() => {
|
||||
logger.info('Rejecting proposed changes (optimistic)')
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { memo, useEffect, useRef, useState } from 'react'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||
|
||||
/**
|
||||
@@ -7,23 +6,14 @@ import CopilotMarkdownRenderer from '@/app/workspace/[workspaceId]/w/[workflowId
|
||||
*/
|
||||
const CHARACTER_DELAY = 3
|
||||
|
||||
/**
|
||||
* Props for the StreamingIndicator component
|
||||
*/
|
||||
interface StreamingIndicatorProps {
|
||||
/** Optional class name for layout adjustments */
|
||||
className?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* StreamingIndicator shows animated dots during message streaming
|
||||
* Used as a standalone indicator when no content has arrived yet
|
||||
*
|
||||
* @param props - Component props
|
||||
* @returns Animated loading indicator
|
||||
*/
|
||||
export const StreamingIndicator = memo(({ className }: StreamingIndicatorProps) => (
|
||||
<div className={cn('flex h-[1.25rem] items-center text-muted-foreground', className)}>
|
||||
export const StreamingIndicator = memo(() => (
|
||||
<div className='flex h-[1.25rem] items-center text-muted-foreground'>
|
||||
<div className='flex space-x-0.5'>
|
||||
<div className='h-1 w-1 animate-bounce rounded-full bg-muted-foreground [animation-delay:0ms] [animation-duration:1.2s]' />
|
||||
<div className='h-1 w-1 animate-bounce rounded-full bg-muted-foreground [animation-delay:150ms] [animation-duration:1.2s]' />
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
'use client'
|
||||
|
||||
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { memo, useEffect, useRef, useState } from 'react'
|
||||
import clsx from 'clsx'
|
||||
import { ChevronUp } from 'lucide-react'
|
||||
import CopilotMarkdownRenderer from './markdown-renderer'
|
||||
|
||||
/**
|
||||
* Removes thinking tags (raw or escaped) from streamed content.
|
||||
*/
|
||||
function stripThinkingTags(text: string): string {
|
||||
return text
|
||||
.replace(/<\/?thinking[^>]*>/gi, '')
|
||||
.replace(/<\/?thinking[^&]*>/gi, '')
|
||||
.trim()
|
||||
}
|
||||
|
||||
/**
|
||||
* Max height for thinking content before internal scrolling kicks in
|
||||
*/
|
||||
@@ -197,9 +187,6 @@ export function ThinkingBlock({
|
||||
label = 'Thought',
|
||||
hasSpecialTags = false,
|
||||
}: ThinkingBlockProps) {
|
||||
// Strip thinking tags from content on render to handle persisted messages
|
||||
const cleanContent = useMemo(() => stripThinkingTags(content || ''), [content])
|
||||
|
||||
const [isExpanded, setIsExpanded] = useState(false)
|
||||
const [duration, setDuration] = useState(0)
|
||||
const [userHasScrolledAway, setUserHasScrolledAway] = useState(false)
|
||||
@@ -222,10 +209,10 @@ export function ThinkingBlock({
|
||||
return
|
||||
}
|
||||
|
||||
if (!userCollapsedRef.current && cleanContent && cleanContent.length > 0) {
|
||||
if (!userCollapsedRef.current && content && content.trim().length > 0) {
|
||||
setIsExpanded(true)
|
||||
}
|
||||
}, [isStreaming, cleanContent, hasFollowingContent, hasSpecialTags])
|
||||
}, [isStreaming, content, hasFollowingContent, hasSpecialTags])
|
||||
|
||||
// Reset start time when streaming begins
|
||||
useEffect(() => {
|
||||
@@ -311,7 +298,7 @@ export function ThinkingBlock({
|
||||
return `${seconds}s`
|
||||
}
|
||||
|
||||
const hasContent = cleanContent.length > 0
|
||||
const hasContent = content && content.trim().length > 0
|
||||
// Thinking is "done" when streaming ends OR when there's following content (like a tool call) OR when special tags appear
|
||||
const isThinkingDone = !isStreaming || hasFollowingContent || hasSpecialTags
|
||||
const durationText = `${label} for ${formatDuration(duration)}`
|
||||
@@ -387,10 +374,7 @@ export function ThinkingBlock({
|
||||
isExpanded ? 'mt-1.5 max-h-[150px] opacity-100' : 'max-h-0 opacity-0'
|
||||
)}
|
||||
>
|
||||
<SmoothThinkingText
|
||||
content={cleanContent}
|
||||
isStreaming={isStreaming && !hasFollowingContent}
|
||||
/>
|
||||
<SmoothThinkingText content={content} isStreaming={isStreaming && !hasFollowingContent} />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
@@ -428,7 +412,7 @@ export function ThinkingBlock({
|
||||
>
|
||||
{/* Completed thinking text - dimmed with markdown */}
|
||||
<div className='[&_*]:!text-[var(--text-muted)] [&_*]:!text-[12px] [&_*]:!leading-[1.4] [&_p]:!m-0 [&_p]:!mb-1 [&_h1]:!text-[12px] [&_h1]:!font-semibold [&_h1]:!m-0 [&_h1]:!mb-1 [&_h2]:!text-[12px] [&_h2]:!font-semibold [&_h2]:!m-0 [&_h2]:!mb-1 [&_h3]:!text-[12px] [&_h3]:!font-semibold [&_h3]:!m-0 [&_h3]:!mb-1 [&_code]:!text-[11px] [&_ul]:!pl-5 [&_ul]:!my-1 [&_ol]:!pl-6 [&_ol]:!my-1 [&_li]:!my-0.5 [&_li]:!py-0 font-season text-[12px] text-[var(--text-muted)]'>
|
||||
<CopilotMarkdownRenderer content={cleanContent} />
|
||||
<CopilotMarkdownRenderer content={content} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { type FC, memo, useCallback, useMemo, useRef, useState } from 'react'
|
||||
import { type FC, memo, useCallback, useMemo, useState } from 'react'
|
||||
import { RotateCcw } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import {
|
||||
@@ -93,8 +93,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
// UI state
|
||||
const [isHoveringMessage, setIsHoveringMessage] = useState(false)
|
||||
|
||||
const cancelEditRef = useRef<(() => void) | null>(null)
|
||||
|
||||
// Checkpoint management hook
|
||||
const {
|
||||
showRestoreConfirmation,
|
||||
@@ -114,8 +112,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
messages,
|
||||
messageCheckpoints,
|
||||
onRevertModeChange,
|
||||
onEditModeChange,
|
||||
() => cancelEditRef.current?.()
|
||||
onEditModeChange
|
||||
)
|
||||
|
||||
// Message editing hook
|
||||
@@ -145,8 +142,6 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
pendingEditRef,
|
||||
})
|
||||
|
||||
cancelEditRef.current = handleCancelEdit
|
||||
|
||||
// Get clean text content with double newline parsing
|
||||
const cleanTextContent = useMemo(() => {
|
||||
if (!message.content) return ''
|
||||
@@ -493,9 +488,8 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
|
||||
{/* Content blocks in chronological order */}
|
||||
{memoizedContentBlocks}
|
||||
|
||||
{isStreaming && (
|
||||
<StreamingIndicator className={!hasVisibleContent ? 'mt-1' : undefined} />
|
||||
)}
|
||||
{/* Streaming indicator always at bottom during streaming */}
|
||||
{isStreaming && <StreamingIndicator />}
|
||||
|
||||
{message.errorType === 'usage_limit' && (
|
||||
<div className='flex gap-1.5'>
|
||||
|
||||
@@ -22,8 +22,7 @@ export function useCheckpointManagement(
|
||||
messages: CopilotMessage[],
|
||||
messageCheckpoints: any[],
|
||||
onRevertModeChange?: (isReverting: boolean) => void,
|
||||
onEditModeChange?: (isEditing: boolean) => void,
|
||||
onCancelEdit?: () => void
|
||||
onEditModeChange?: (isEditing: boolean) => void
|
||||
) {
|
||||
const [showRestoreConfirmation, setShowRestoreConfirmation] = useState(false)
|
||||
const [showCheckpointDiscardModal, setShowCheckpointDiscardModal] = useState(false)
|
||||
@@ -58,7 +57,7 @@ export function useCheckpointManagement(
|
||||
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
|
||||
const updatedCheckpoints = {
|
||||
...currentCheckpoints,
|
||||
[message.id]: [],
|
||||
[message.id]: messageCheckpoints.slice(1),
|
||||
}
|
||||
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
|
||||
|
||||
@@ -141,7 +140,7 @@ export function useCheckpointManagement(
|
||||
const { messageCheckpoints: currentCheckpoints } = useCopilotStore.getState()
|
||||
const updatedCheckpoints = {
|
||||
...currentCheckpoints,
|
||||
[message.id]: [],
|
||||
[message.id]: messageCheckpoints.slice(1),
|
||||
}
|
||||
useCopilotStore.setState({ messageCheckpoints: updatedCheckpoints })
|
||||
|
||||
@@ -155,8 +154,6 @@ export function useCheckpointManagement(
|
||||
}
|
||||
|
||||
setShowCheckpointDiscardModal(false)
|
||||
onEditModeChange?.(false)
|
||||
onCancelEdit?.()
|
||||
|
||||
const { sendMessage } = useCopilotStore.getState()
|
||||
if (pendingEditRef.current) {
|
||||
@@ -183,17 +180,15 @@ export function useCheckpointManagement(
|
||||
} finally {
|
||||
setIsProcessingDiscard(false)
|
||||
}
|
||||
}, [messageCheckpoints, revertToCheckpoint, message, messages, onEditModeChange, onCancelEdit])
|
||||
}, [messageCheckpoints, revertToCheckpoint, message, messages])
|
||||
|
||||
/**
|
||||
* Cancels checkpoint discard and clears pending edit
|
||||
*/
|
||||
const handleCancelCheckpointDiscard = useCallback(() => {
|
||||
setShowCheckpointDiscardModal(false)
|
||||
onEditModeChange?.(false)
|
||||
onCancelEdit?.()
|
||||
pendingEditRef.current = null
|
||||
}, [onEditModeChange, onCancelEdit])
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Continues with edit WITHOUT reverting checkpoint
|
||||
@@ -223,7 +218,7 @@ export function useCheckpointManagement(
|
||||
}
|
||||
pendingEditRef.current = null
|
||||
}
|
||||
}, [message, messages, onEditModeChange, onCancelEdit])
|
||||
}, [message, messages])
|
||||
|
||||
/**
|
||||
* Handles keyboard events for restore confirmation (Escape/Enter)
|
||||
|
||||
@@ -1446,10 +1446,8 @@ function WorkflowEditSummary({ toolCall }: { toolCall: CopilotToolCall }) {
|
||||
blockType = blockType || op.block_type || ''
|
||||
}
|
||||
|
||||
if (!blockName) blockName = blockType || ''
|
||||
if (!blockName && !blockType) {
|
||||
continue
|
||||
}
|
||||
// Fallback name to type or ID
|
||||
if (!blockName) blockName = blockType || blockId
|
||||
|
||||
const change: BlockChange = { blockId, blockName, blockType }
|
||||
|
||||
|
||||
@@ -22,9 +22,6 @@ interface UseContextManagementProps {
|
||||
export function useContextManagement({ message, initialContexts }: UseContextManagementProps) {
|
||||
const [selectedContexts, setSelectedContexts] = useState<ChatContext[]>(initialContexts ?? [])
|
||||
const initializedRef = useRef(false)
|
||||
const escapeRegex = useCallback((value: string) => {
|
||||
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
||||
}, [])
|
||||
|
||||
// Initialize with initial contexts when they're first provided (for edit mode)
|
||||
useEffect(() => {
|
||||
@@ -81,10 +78,10 @@ export function useContextManagement({ message, initialContexts }: UseContextMan
|
||||
// Check for slash command tokens or mention tokens based on kind
|
||||
const isSlashCommand = c.kind === 'slash_command'
|
||||
const prefix = isSlashCommand ? '/' : '@'
|
||||
const tokenPattern = new RegExp(
|
||||
`(^|\\s)${escapeRegex(prefix)}${escapeRegex(c.label)}(\\s|$)`
|
||||
)
|
||||
return tokenPattern.test(message)
|
||||
const tokenWithSpaces = ` ${prefix}${c.label} `
|
||||
const tokenAtStart = `${prefix}${c.label} `
|
||||
// Token can appear with leading space OR at the start of the message
|
||||
return message.includes(tokenWithSpaces) || message.startsWith(tokenAtStart)
|
||||
})
|
||||
return filtered.length === prev.length ? prev : filtered
|
||||
})
|
||||
|
||||
@@ -76,15 +76,6 @@ export function useMentionTokens({
|
||||
ranges.push({ start: idx, end: idx + token.length, label })
|
||||
fromIndex = idx + token.length
|
||||
}
|
||||
|
||||
// Token at end of message without trailing space: "@label" or " /label"
|
||||
const tokenAtEnd = `${prefix}${label}`
|
||||
if (message.endsWith(tokenAtEnd)) {
|
||||
const idx = message.lastIndexOf(tokenAtEnd)
|
||||
const hasLeadingSpace = idx > 0 && message[idx - 1] === ' '
|
||||
const start = hasLeadingSpace ? idx - 1 : idx
|
||||
ranges.push({ start, end: message.length, label })
|
||||
}
|
||||
}
|
||||
|
||||
ranges.sort((a, b) => a.start - b.start)
|
||||
|
||||
@@ -613,7 +613,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
|
||||
const insertTriggerAndOpenMenu = useCallback(
|
||||
(trigger: '@' | '/') => {
|
||||
if (disabled) return
|
||||
if (disabled || isLoading) return
|
||||
const textarea = mentionMenu.textareaRef.current
|
||||
if (!textarea) return
|
||||
|
||||
@@ -642,7 +642,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
}
|
||||
mentionMenu.setSubmenuActiveIndex(0)
|
||||
},
|
||||
[disabled, mentionMenu, message, setMessage]
|
||||
[disabled, isLoading, mentionMenu, message, setMessage]
|
||||
)
|
||||
|
||||
const handleOpenMentionMenuWithAt = useCallback(
|
||||
@@ -737,7 +737,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
title='Insert @'
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] p-[4.5px]',
|
||||
disabled && 'cursor-not-allowed'
|
||||
(disabled || isLoading) && 'cursor-not-allowed'
|
||||
)}
|
||||
>
|
||||
<AtSign className='h-3 w-3' strokeWidth={1.75} />
|
||||
@@ -749,7 +749,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
title='Insert /'
|
||||
className={cn(
|
||||
'cursor-pointer rounded-[6px] p-[4.5px]',
|
||||
disabled && 'cursor-not-allowed'
|
||||
(disabled || isLoading) && 'cursor-not-allowed'
|
||||
)}
|
||||
>
|
||||
<span className='flex h-3 w-3 items-center justify-center font-medium text-[11px] leading-none'>
|
||||
@@ -816,7 +816,7 @@ const UserInput = forwardRef<UserInputRef, UserInputProps>(
|
||||
placeholder={fileAttachments.isDragging ? 'Drop files here...' : effectivePlaceholder}
|
||||
disabled={disabled}
|
||||
rows={2}
|
||||
className='relative z-[2] m-0 box-border h-auto max-h-[120px] min-h-[48px] w-full resize-none overflow-y-auto overflow-x-hidden break-words border-0 bg-transparent px-[2px] py-1 font-medium font-sans text-sm text-transparent leading-[1.25rem] caret-foreground outline-none [-ms-overflow-style:none] [scrollbar-width:none] [text-rendering:auto] placeholder:text-[var(--text-muted)] focus-visible:ring-0 focus-visible:ring-offset-0 dark:placeholder:text-[var(--text-muted)] [&::-webkit-scrollbar]:hidden'
|
||||
className='relative z-[2] m-0 box-border h-auto min-h-[48px] w-full resize-none overflow-y-auto overflow-x-hidden break-words border-0 bg-transparent px-[2px] py-1 font-medium font-sans text-sm text-transparent leading-[1.25rem] caret-foreground outline-none [-ms-overflow-style:none] [scrollbar-width:none] [text-rendering:auto] placeholder:text-[var(--text-muted)] focus-visible:ring-0 focus-visible:ring-offset-0 dark:placeholder:text-[var(--text-muted)] [&::-webkit-scrollbar]:hidden'
|
||||
/>
|
||||
|
||||
{/* Mention Menu Portal */}
|
||||
|
||||
@@ -2,19 +2,16 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useReactFlow } from 'reactflow'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
|
||||
import { SubBlockInputController } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/sub-block-input-controller'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { getDependsOnFields } from '@/blocks/utils'
|
||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||
import { getProviderFromModel } from '@/providers/utils'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
/**
|
||||
* Constants for ComboBox component behavior
|
||||
@@ -94,24 +91,15 @@ export function ComboBox({
|
||||
// Dependency tracking for fetchOptions
|
||||
const dependsOnFields = useMemo(() => getDependsOnFields(dependsOn), [dependsOn])
|
||||
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId)
|
||||
const blockState = useWorkflowStore((state) => state.blocks[blockId])
|
||||
const blockConfig = blockState?.type ? getBlock(blockState.type) : null
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(blockConfig?.subBlocks || []),
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
const dependencyValues = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (dependsOnFields.length === 0 || !activeWorkflowId) return []
|
||||
const workflowValues = state.workflowValues[activeWorkflowId] || {}
|
||||
const blockValues = workflowValues[blockId] || {}
|
||||
return dependsOnFields.map((depKey) =>
|
||||
resolveDependencyValue(depKey, blockValues, canonicalIndex, canonicalModeOverrides)
|
||||
)
|
||||
return dependsOnFields.map((depKey) => blockValues[depKey] ?? null)
|
||||
},
|
||||
[dependsOnFields, activeWorkflowId, blockId, canonicalIndex, canonicalModeOverrides]
|
||||
[dependsOnFields, activeWorkflowId, blockId]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Badge } from '@/components/emcn'
|
||||
import { Combobox, type ComboboxOption } from '@/components/emcn/components'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { getDependsOnFields } from '@/blocks/utils'
|
||||
import { ResponseBlockHandler } from '@/executor/handlers/response/response-handler'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
/**
|
||||
* Dropdown option type - can be a simple string or an object with label, id, and optional icon
|
||||
@@ -92,24 +89,15 @@ export function Dropdown({
|
||||
const dependsOnFields = useMemo(() => getDependsOnFields(dependsOn), [dependsOn])
|
||||
|
||||
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId)
|
||||
const blockState = useWorkflowStore((state) => state.blocks[blockId])
|
||||
const blockConfig = blockState?.type ? getBlock(blockState.type) : null
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(blockConfig?.subBlocks || []),
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
const dependencyValues = useSubBlockStore(
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (dependsOnFields.length === 0 || !activeWorkflowId) return []
|
||||
const workflowValues = state.workflowValues[activeWorkflowId] || {}
|
||||
const blockValues = workflowValues[blockId] || {}
|
||||
return dependsOnFields.map((depKey) =>
|
||||
resolveDependencyValue(depKey, blockValues, canonicalIndex, canonicalModeOverrides)
|
||||
)
|
||||
return dependsOnFields.map((depKey) => blockValues[depKey] ?? null)
|
||||
},
|
||||
[dependsOnFields, activeWorkflowId, blockId, canonicalIndex, canonicalModeOverrides]
|
||||
[dependsOnFields, activeWorkflowId, blockId]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -4,19 +4,15 @@ import { useMemo } from 'react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { isDependency } from '@/blocks/utils'
|
||||
import { resolveSelectorForSubBlock, type SelectorResolution } from '@/hooks/selectors/resolution'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
interface FileSelectorInputProps {
|
||||
blockId: string
|
||||
@@ -46,59 +42,21 @@ export function FileSelectorInput({
|
||||
previewContextValues,
|
||||
})
|
||||
|
||||
const blockState = useWorkflowStore((state) => state.blocks[blockId])
|
||||
const blockConfig = blockState?.type ? getBlock(blockState.type) : null
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(blockConfig?.subBlocks || []),
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
|
||||
const blockValues = useSubBlockStore((state) => {
|
||||
if (!activeWorkflowId) return {}
|
||||
const workflowValues = state.workflowValues[activeWorkflowId] || {}
|
||||
return (workflowValues as Record<string, Record<string, unknown>>)[blockId] || {}
|
||||
})
|
||||
|
||||
const [connectedCredentialFromStore] = useSubBlockValue(blockId, 'credential')
|
||||
const [domainValueFromStore] = useSubBlockValue(blockId, 'domain')
|
||||
const [projectIdValueFromStore] = useSubBlockValue(blockId, 'projectId')
|
||||
const [planIdValueFromStore] = useSubBlockValue(blockId, 'planId')
|
||||
const [teamIdValueFromStore] = useSubBlockValue(blockId, 'teamId')
|
||||
const [siteIdValueFromStore] = useSubBlockValue(blockId, 'siteId')
|
||||
const [collectionIdValueFromStore] = useSubBlockValue(blockId, 'collectionId')
|
||||
|
||||
const connectedCredential = previewContextValues?.credential ?? blockValues.credential
|
||||
const connectedCredential = previewContextValues?.credential ?? connectedCredentialFromStore
|
||||
const domainValue = previewContextValues?.domain ?? domainValueFromStore
|
||||
|
||||
const teamIdValue = useMemo(
|
||||
() =>
|
||||
previewContextValues?.teamId ??
|
||||
resolveDependencyValue('teamId', blockValues, canonicalIndex, canonicalModeOverrides),
|
||||
[previewContextValues?.teamId, blockValues, canonicalIndex, canonicalModeOverrides]
|
||||
)
|
||||
|
||||
const siteIdValue = useMemo(
|
||||
() =>
|
||||
previewContextValues?.siteId ??
|
||||
resolveDependencyValue('siteId', blockValues, canonicalIndex, canonicalModeOverrides),
|
||||
[previewContextValues?.siteId, blockValues, canonicalIndex, canonicalModeOverrides]
|
||||
)
|
||||
|
||||
const collectionIdValue = useMemo(
|
||||
() =>
|
||||
previewContextValues?.collectionId ??
|
||||
resolveDependencyValue('collectionId', blockValues, canonicalIndex, canonicalModeOverrides),
|
||||
[previewContextValues?.collectionId, blockValues, canonicalIndex, canonicalModeOverrides]
|
||||
)
|
||||
|
||||
const projectIdValue = useMemo(
|
||||
() =>
|
||||
previewContextValues?.projectId ??
|
||||
resolveDependencyValue('projectId', blockValues, canonicalIndex, canonicalModeOverrides),
|
||||
[previewContextValues?.projectId, blockValues, canonicalIndex, canonicalModeOverrides]
|
||||
)
|
||||
|
||||
const planIdValue = useMemo(
|
||||
() =>
|
||||
previewContextValues?.planId ??
|
||||
resolveDependencyValue('planId', blockValues, canonicalIndex, canonicalModeOverrides),
|
||||
[previewContextValues?.planId, blockValues, canonicalIndex, canonicalModeOverrides]
|
||||
)
|
||||
const projectIdValue = previewContextValues?.projectId ?? projectIdValueFromStore
|
||||
const planIdValue = previewContextValues?.planId ?? planIdValueFromStore
|
||||
const teamIdValue = previewContextValues?.teamId ?? teamIdValueFromStore
|
||||
const siteIdValue = previewContextValues?.siteId ?? siteIdValueFromStore
|
||||
const collectionIdValue = previewContextValues?.collectionId ?? collectionIdValueFromStore
|
||||
|
||||
const normalizedCredentialId =
|
||||
typeof connectedCredential === 'string'
|
||||
@@ -107,6 +65,7 @@ export function FileSelectorInput({
|
||||
? ((connectedCredential as Record<string, any>).id ?? '')
|
||||
: ''
|
||||
|
||||
// Derive provider from serviceId using OAuth config (same pattern as credential-selector)
|
||||
const serviceId = subBlock.serviceId || ''
|
||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||
|
||||
|
||||
@@ -4,17 +4,14 @@ import { useEffect, useMemo, useState } from 'react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { resolveSelectorForSubBlock } from '@/hooks/selectors/resolution'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
interface ProjectSelectorInputProps {
|
||||
blockId: string
|
||||
@@ -35,36 +32,21 @@ export function ProjectSelectorInput({
|
||||
previewValue,
|
||||
previewContextValues,
|
||||
}: ProjectSelectorInputProps) {
|
||||
const { collaborativeSetSubblockValue } = useCollaborativeWorkflow()
|
||||
const params = useParams()
|
||||
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId) as string | null
|
||||
const [selectedProjectId, setSelectedProjectId] = useState<string>('')
|
||||
// Use the proper hook to get the current value and setter
|
||||
const [storeValue] = useSubBlockValue(blockId, subBlock.id)
|
||||
const [connectedCredentialFromStore] = useSubBlockValue(blockId, 'credential')
|
||||
const [linearTeamIdFromStore] = useSubBlockValue(blockId, 'teamId')
|
||||
const [jiraDomainFromStore] = useSubBlockValue(blockId, 'domain')
|
||||
|
||||
const blockState = useWorkflowStore((state) => state.blocks[blockId])
|
||||
const blockConfig = blockState?.type ? getBlock(blockState.type) : null
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(blockConfig?.subBlocks || []),
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
|
||||
const blockValues = useSubBlockStore((state) => {
|
||||
if (!activeWorkflowId) return {}
|
||||
const workflowValues = state.workflowValues[activeWorkflowId] || {}
|
||||
return (workflowValues as Record<string, Record<string, unknown>>)[blockId] || {}
|
||||
})
|
||||
|
||||
const connectedCredential = previewContextValues?.credential ?? blockValues.credential
|
||||
// Use previewContextValues if provided (for tools inside agent blocks), otherwise use store values
|
||||
const connectedCredential = previewContextValues?.credential ?? connectedCredentialFromStore
|
||||
const linearTeamId = previewContextValues?.teamId ?? linearTeamIdFromStore
|
||||
const jiraDomain = previewContextValues?.domain ?? jiraDomainFromStore
|
||||
|
||||
const linearTeamId = useMemo(
|
||||
() =>
|
||||
previewContextValues?.teamId ??
|
||||
resolveDependencyValue('teamId', blockValues, canonicalIndex, canonicalModeOverrides),
|
||||
[previewContextValues?.teamId, blockValues, canonicalIndex, canonicalModeOverrides]
|
||||
)
|
||||
|
||||
// Derive provider from serviceId using OAuth config
|
||||
const serviceId = subBlock.serviceId || ''
|
||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||
|
||||
@@ -72,6 +54,7 @@ export function ProjectSelectorInput({
|
||||
effectiveProviderId,
|
||||
(connectedCredential as string) || ''
|
||||
)
|
||||
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId) as string | null
|
||||
const workflowIdFromUrl = (params?.workflowId as string) || activeWorkflowId || ''
|
||||
const { finalDisabled } = useDependsOnGate(blockId, subBlock, {
|
||||
disabled,
|
||||
@@ -79,8 +62,12 @@ export function ProjectSelectorInput({
|
||||
previewContextValues,
|
||||
})
|
||||
|
||||
// Jira/Discord upstream fields - use values from previewContextValues or store
|
||||
const domain = (jiraDomain as string) || ''
|
||||
|
||||
// Verify Jira credential belongs to current user; if not, treat as absent
|
||||
|
||||
// Get the current value from the store or prop value if in preview mode
|
||||
useEffect(() => {
|
||||
if (isPreview && previewValue !== undefined) {
|
||||
setSelectedProjectId(previewValue)
|
||||
|
||||
@@ -4,17 +4,14 @@ import { useMemo } from 'react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Tooltip } from '@/components/emcn'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { SelectorCombobox } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/selector-combobox/selector-combobox'
|
||||
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
|
||||
import { useForeignCredential } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-foreign-credential'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { resolveSelectorForSubBlock, type SelectorResolution } from '@/hooks/selectors/resolution'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
interface SheetSelectorInputProps {
|
||||
blockId: string
|
||||
@@ -44,32 +41,16 @@ export function SheetSelectorInput({
|
||||
previewContextValues,
|
||||
})
|
||||
|
||||
const blockState = useWorkflowStore((state) => state.blocks[blockId])
|
||||
const blockConfig = blockState?.type ? getBlock(blockState.type) : null
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(blockConfig?.subBlocks || []),
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
|
||||
const blockValues = useSubBlockStore((state) => {
|
||||
if (!activeWorkflowId) return {}
|
||||
const workflowValues = state.workflowValues[activeWorkflowId] || {}
|
||||
return (workflowValues as Record<string, Record<string, unknown>>)[blockId] || {}
|
||||
})
|
||||
|
||||
const connectedCredentialFromStore = blockValues.credential
|
||||
|
||||
const spreadsheetIdFromStore = useMemo(
|
||||
() =>
|
||||
resolveDependencyValue('spreadsheetId', blockValues, canonicalIndex, canonicalModeOverrides),
|
||||
[blockValues, canonicalIndex, canonicalModeOverrides]
|
||||
)
|
||||
const [connectedCredentialFromStore] = useSubBlockValue(blockId, 'credential')
|
||||
const [spreadsheetIdFromStore] = useSubBlockValue(blockId, 'spreadsheetId')
|
||||
const [manualSpreadsheetIdFromStore] = useSubBlockValue(blockId, 'manualSpreadsheetId')
|
||||
|
||||
const connectedCredential = previewContextValues?.credential ?? connectedCredentialFromStore
|
||||
const spreadsheetId = previewContextValues
|
||||
? (previewContextValues.spreadsheetId ?? previewContextValues.manualSpreadsheetId)
|
||||
: spreadsheetIdFromStore
|
||||
const spreadsheetId =
|
||||
previewContextValues?.spreadsheetId ??
|
||||
spreadsheetIdFromStore ??
|
||||
previewContextValues?.manualSpreadsheetId ??
|
||||
manualSpreadsheetIdFromStore
|
||||
|
||||
const normalizedCredentialId =
|
||||
typeof connectedCredential === 'string'
|
||||
@@ -80,6 +61,7 @@ export function SheetSelectorInput({
|
||||
|
||||
const normalizedSpreadsheetId = typeof spreadsheetId === 'string' ? spreadsheetId.trim() : ''
|
||||
|
||||
// Derive provider from serviceId using OAuth config
|
||||
const serviceId = subBlock.serviceId || ''
|
||||
const effectiveProviderId = useMemo(() => getProviderIdFromServiceId(serviceId), [serviceId])
|
||||
|
||||
|
||||
@@ -1,16 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
isNonEmptyValue,
|
||||
resolveDependencyValue,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
type DependsOnConfig = string[] | { all?: string[]; any?: string[] }
|
||||
|
||||
@@ -57,13 +50,6 @@ export function useDependsOnGate(
|
||||
const previewContextValues = opts?.previewContextValues
|
||||
|
||||
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId)
|
||||
const blockState = useWorkflowStore((state) => state.blocks[blockId])
|
||||
const blockConfig = blockState?.type ? getBlock(blockState.type) : null
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(blockConfig?.subBlocks || []),
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
|
||||
// Parse dependsOn config to get all/any field lists
|
||||
const { allFields, anyFields, allDependsOnFields } = useMemo(
|
||||
@@ -105,13 +91,7 @@ export function useDependsOnGate(
|
||||
if (previewContextValues) {
|
||||
const map: Record<string, unknown> = {}
|
||||
for (const key of allDependsOnFields) {
|
||||
const resolvedValue = resolveDependencyValue(
|
||||
key,
|
||||
previewContextValues,
|
||||
canonicalIndex,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
map[key] = normalizeDependencyValue(resolvedValue)
|
||||
map[key] = normalizeDependencyValue(previewContextValues[key])
|
||||
}
|
||||
return map
|
||||
}
|
||||
@@ -128,25 +108,32 @@ export function useDependsOnGate(
|
||||
const blockValues = (workflowValues as any)[blockId] || {}
|
||||
const map: Record<string, unknown> = {}
|
||||
for (const key of allDependsOnFields) {
|
||||
const resolvedValue = resolveDependencyValue(
|
||||
key,
|
||||
blockValues,
|
||||
canonicalIndex,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
map[key] = normalizeDependencyValue(resolvedValue)
|
||||
map[key] = normalizeDependencyValue((blockValues as any)[key])
|
||||
}
|
||||
return map
|
||||
})
|
||||
|
||||
// For backward compatibility, also provide array of values
|
||||
const dependencyValues = useMemo(
|
||||
() => allDependsOnFields.map((key) => dependencyValuesMap[key]),
|
||||
[allDependsOnFields, dependencyValuesMap]
|
||||
) as any[]
|
||||
|
||||
const isValueSatisfied = (value: unknown): boolean => {
|
||||
if (value === null || value === undefined) return false
|
||||
if (typeof value === 'string') return value.trim().length > 0
|
||||
if (Array.isArray(value)) return value.length > 0
|
||||
return value !== ''
|
||||
}
|
||||
|
||||
const depsSatisfied = useMemo(() => {
|
||||
// Check all fields (AND logic) - all must be satisfied
|
||||
const allSatisfied =
|
||||
allFields.length === 0 || allFields.every((key) => isNonEmptyValue(dependencyValuesMap[key]))
|
||||
allFields.length === 0 || allFields.every((key) => isValueSatisfied(dependencyValuesMap[key]))
|
||||
|
||||
// Check any fields (OR logic) - at least one must be satisfied
|
||||
const anySatisfied =
|
||||
anyFields.length === 0 || anyFields.some((key) => isNonEmptyValue(dependencyValuesMap[key]))
|
||||
anyFields.length === 0 || anyFields.some((key) => isValueSatisfied(dependencyValuesMap[key]))
|
||||
|
||||
return allSatisfied && anySatisfied
|
||||
}, [allFields, anyFields, dependencyValuesMap])
|
||||
@@ -159,6 +146,7 @@ export function useDependsOnGate(
|
||||
|
||||
return {
|
||||
dependsOn,
|
||||
dependencyValues,
|
||||
depsSatisfied,
|
||||
blocked,
|
||||
finalDisabled,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { type JSX, type MouseEvent, memo, useRef, useState } from 'react'
|
||||
import { AlertTriangle, ArrowLeftRight, ArrowUp } from 'lucide-react'
|
||||
import { AlertTriangle, ArrowUp } from 'lucide-react'
|
||||
import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { FieldDiffStatus } from '@/lib/workflows/diff/types'
|
||||
@@ -67,11 +67,6 @@ interface SubBlockProps {
|
||||
disabled?: boolean
|
||||
fieldDiffStatus?: FieldDiffStatus
|
||||
allowExpandInPreview?: boolean
|
||||
canonicalToggle?: {
|
||||
mode: 'basic' | 'advanced'
|
||||
disabled?: boolean
|
||||
onToggle?: () => void
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -187,11 +182,6 @@ const renderLabel = (
|
||||
onSearchSubmit: () => void
|
||||
onSearchCancel: () => void
|
||||
searchInputRef: React.RefObject<HTMLInputElement | null>
|
||||
},
|
||||
canonicalToggle?: {
|
||||
mode: 'basic' | 'advanced'
|
||||
disabled?: boolean
|
||||
onToggle?: () => void
|
||||
}
|
||||
): JSX.Element | null => {
|
||||
if (config.type === 'switch') return null
|
||||
@@ -199,12 +189,13 @@ const renderLabel = (
|
||||
|
||||
const required = isFieldRequired(config, subBlockValues)
|
||||
const showWand = wandState?.isWandEnabled && !wandState.isPreview && !wandState.disabled
|
||||
const showCanonicalToggle = !!canonicalToggle && !wandState?.isPreview
|
||||
const canonicalToggleDisabled = wandState?.disabled || canonicalToggle?.disabled
|
||||
|
||||
return (
|
||||
<div className='flex items-center justify-between gap-[6px] pl-[2px]'>
|
||||
<Label className='flex items-center gap-[6px] whitespace-nowrap'>
|
||||
<Label
|
||||
className='flex items-center justify-between gap-[6px] pl-[2px]'
|
||||
onClick={(e) => e.preventDefault()}
|
||||
>
|
||||
<div className='flex items-center gap-[6px] whitespace-nowrap'>
|
||||
{config.title}
|
||||
{required && <span className='ml-0.5'>*</span>}
|
||||
{config.type === 'code' && config.language === 'json' && (
|
||||
@@ -222,82 +213,58 @@ const renderLabel = (
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
</Label>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
{showWand && (
|
||||
<>
|
||||
{!wandState.isSearchActive ? (
|
||||
<Button
|
||||
variant='active'
|
||||
className='-my-1 h-5 px-2 py-0 text-[11px]'
|
||||
onClick={wandState.onSearchClick}
|
||||
>
|
||||
Generate
|
||||
</Button>
|
||||
) : (
|
||||
<div className='-my-1 flex items-center gap-[4px]'>
|
||||
<Input
|
||||
ref={wandState.searchInputRef}
|
||||
value={wandState.isStreaming ? 'Generating...' : wandState.searchQuery}
|
||||
onChange={(e) => wandState.onSearchChange(e.target.value)}
|
||||
onBlur={wandState.onSearchBlur}
|
||||
onKeyDown={(e) => {
|
||||
if (
|
||||
e.key === 'Enter' &&
|
||||
wandState.searchQuery.trim() &&
|
||||
!wandState.isStreaming
|
||||
) {
|
||||
wandState.onSearchSubmit()
|
||||
} else if (e.key === 'Escape') {
|
||||
wandState.onSearchCancel()
|
||||
}
|
||||
}}
|
||||
disabled={wandState.isStreaming}
|
||||
className={cn(
|
||||
'h-5 max-w-[200px] flex-1 text-[11px]',
|
||||
wandState.isStreaming && 'text-muted-foreground'
|
||||
)}
|
||||
placeholder='Generate with AI...'
|
||||
/>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
disabled={!wandState.searchQuery.trim() || wandState.isStreaming}
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
}}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
wandState.onSearchSubmit()
|
||||
}}
|
||||
className='h-[20px] w-[20px] flex-shrink-0 p-0'
|
||||
>
|
||||
<ArrowUp className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{showCanonicalToggle && (
|
||||
<button
|
||||
type='button'
|
||||
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0 disabled:cursor-not-allowed disabled:opacity-50'
|
||||
onClick={canonicalToggle?.onToggle}
|
||||
disabled={canonicalToggleDisabled}
|
||||
aria-label={canonicalToggle?.mode === 'advanced' ? 'Use selector' : 'Enter manual ID'}
|
||||
>
|
||||
<ArrowLeftRight
|
||||
className={cn(
|
||||
'!h-[12px] !w-[12px]',
|
||||
canonicalToggle?.mode === 'advanced'
|
||||
? 'text-[var(--text-primary)]'
|
||||
: 'text-[var(--text-secondary)]'
|
||||
)}
|
||||
/>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{showWand && (
|
||||
<>
|
||||
{!wandState.isSearchActive ? (
|
||||
<Button
|
||||
variant='active'
|
||||
className='-my-1 h-5 px-2 py-0 text-[11px]'
|
||||
onClick={wandState.onSearchClick}
|
||||
>
|
||||
Generate
|
||||
</Button>
|
||||
) : (
|
||||
<div className='-my-1 flex items-center gap-[4px]'>
|
||||
<Input
|
||||
ref={wandState.searchInputRef}
|
||||
value={wandState.isStreaming ? 'Generating...' : wandState.searchQuery}
|
||||
onChange={(e) => wandState.onSearchChange(e.target.value)}
|
||||
onBlur={wandState.onSearchBlur}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === 'Enter' && wandState.searchQuery.trim() && !wandState.isStreaming) {
|
||||
wandState.onSearchSubmit()
|
||||
} else if (e.key === 'Escape') {
|
||||
wandState.onSearchCancel()
|
||||
}
|
||||
}}
|
||||
disabled={wandState.isStreaming}
|
||||
className={cn(
|
||||
'h-5 max-w-[200px] flex-1 text-[11px]',
|
||||
wandState.isStreaming && 'text-muted-foreground'
|
||||
)}
|
||||
placeholder='Generate...'
|
||||
/>
|
||||
<Button
|
||||
variant='tertiary'
|
||||
disabled={!wandState.searchQuery.trim() || wandState.isStreaming}
|
||||
onMouseDown={(e) => {
|
||||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
}}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
wandState.onSearchSubmit()
|
||||
}}
|
||||
className='h-[20px] w-[20px] flex-shrink-0 p-0'
|
||||
>
|
||||
<ArrowUp className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</Label>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -320,9 +287,7 @@ const arePropsEqual = (prevProps: SubBlockProps, nextProps: SubBlockProps): bool
|
||||
prevProps.subBlockValues === nextProps.subBlockValues &&
|
||||
prevProps.disabled === nextProps.disabled &&
|
||||
prevProps.fieldDiffStatus === nextProps.fieldDiffStatus &&
|
||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview &&
|
||||
prevProps.canonicalToggle?.mode === nextProps.canonicalToggle?.mode &&
|
||||
prevProps.canonicalToggle?.disabled === nextProps.canonicalToggle?.disabled
|
||||
prevProps.allowExpandInPreview === nextProps.allowExpandInPreview
|
||||
)
|
||||
}
|
||||
|
||||
@@ -351,7 +316,6 @@ function SubBlockComponent({
|
||||
disabled = false,
|
||||
fieldDiffStatus,
|
||||
allowExpandInPreview,
|
||||
canonicalToggle,
|
||||
}: SubBlockProps): JSX.Element {
|
||||
const [isValidJson, setIsValidJson] = useState(true)
|
||||
const [isSearchActive, setIsSearchActive] = useState(false)
|
||||
@@ -923,26 +887,20 @@ function SubBlockComponent({
|
||||
|
||||
return (
|
||||
<div onMouseDown={handleMouseDown} className='subblock-content flex flex-col gap-[10px]'>
|
||||
{renderLabel(
|
||||
config,
|
||||
isValidJson,
|
||||
subBlockValues,
|
||||
{
|
||||
isSearchActive,
|
||||
searchQuery,
|
||||
isWandEnabled,
|
||||
isPreview,
|
||||
isStreaming: wandControlRef.current?.isWandStreaming ?? false,
|
||||
disabled: isDisabled,
|
||||
onSearchClick: handleSearchClick,
|
||||
onSearchBlur: handleSearchBlur,
|
||||
onSearchChange: handleSearchChange,
|
||||
onSearchSubmit: handleSearchSubmit,
|
||||
onSearchCancel: handleSearchCancel,
|
||||
searchInputRef,
|
||||
},
|
||||
canonicalToggle
|
||||
)}
|
||||
{renderLabel(config, isValidJson, subBlockValues, {
|
||||
isSearchActive,
|
||||
searchQuery,
|
||||
isWandEnabled,
|
||||
isPreview,
|
||||
isStreaming: wandControlRef.current?.isWandStreaming ?? false,
|
||||
disabled: isDisabled,
|
||||
onSearchClick: handleSearchClick,
|
||||
onSearchBlur: handleSearchBlur,
|
||||
onSearchChange: handleSearchChange,
|
||||
onSearchSubmit: handleSearchSubmit,
|
||||
onSearchCancel: handleSearchCancel,
|
||||
searchInputRef,
|
||||
})}
|
||||
{renderInput()}
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { BookOpen, Check, ChevronDown, ChevronUp, Pencil } from 'lucide-react'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { BookOpen, Check, ChevronUp, Pencil, Settings } from 'lucide-react'
|
||||
import { Button, Tooltip } from '@/components/emcn'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
hasAdvancedValues,
|
||||
hasStandaloneAdvancedFields,
|
||||
isCanonicalPair,
|
||||
resolveCanonicalMode,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import {
|
||||
ConnectionBlocks,
|
||||
@@ -27,7 +20,6 @@ import { ParallelTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/compo
|
||||
import { getSubBlockStableKey } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/utils'
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockType } from '@/blocks/types'
|
||||
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
||||
import { usePanelEditorStore } from '@/stores/panel'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -97,65 +89,17 @@ export function Editor() {
|
||||
)
|
||||
)
|
||||
|
||||
const subBlocksForCanonical = useMemo(() => {
|
||||
const subBlocks = blockConfig?.subBlocks || []
|
||||
if (!triggerMode) return subBlocks
|
||||
return subBlocks.filter(
|
||||
(subBlock) =>
|
||||
subBlock.mode === 'trigger' || subBlock.type === ('trigger-config' as SubBlockType)
|
||||
)
|
||||
}, [blockConfig?.subBlocks, triggerMode])
|
||||
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(subBlocksForCanonical),
|
||||
[subBlocksForCanonical]
|
||||
)
|
||||
const canonicalModeOverrides = currentBlock?.data?.canonicalModes
|
||||
const advancedValuesPresent = hasAdvancedValues(
|
||||
subBlocksForCanonical,
|
||||
blockSubBlockValues,
|
||||
canonicalIndex
|
||||
)
|
||||
const displayAdvancedOptions = advancedMode || advancedValuesPresent
|
||||
|
||||
const hasAdvancedOnlyFields = useMemo(
|
||||
() => hasStandaloneAdvancedFields(subBlocksForCanonical, canonicalIndex),
|
||||
[subBlocksForCanonical, canonicalIndex]
|
||||
)
|
||||
|
||||
// Get subblock layout using custom hook
|
||||
const { subBlocks, stateToUse: subBlockState } = useEditorSubblockLayout(
|
||||
blockConfig || ({} as any),
|
||||
currentBlockId || '',
|
||||
displayAdvancedOptions,
|
||||
advancedMode,
|
||||
triggerMode,
|
||||
activeWorkflowId,
|
||||
blockSubBlockValues,
|
||||
currentWorkflow.isSnapshotView
|
||||
)
|
||||
|
||||
/**
|
||||
* Partitions subBlocks into regular fields and standalone advanced-only fields.
|
||||
* Standalone advanced fields have mode 'advanced' and are not part of a canonical swap pair.
|
||||
*/
|
||||
const { regularSubBlocks, advancedOnlySubBlocks } = useMemo(() => {
|
||||
const regular: typeof subBlocks = []
|
||||
const advancedOnly: typeof subBlocks = []
|
||||
|
||||
for (const subBlock of subBlocks) {
|
||||
const isStandaloneAdvanced =
|
||||
subBlock.mode === 'advanced' && !canonicalIndex.canonicalIdBySubBlockId[subBlock.id]
|
||||
|
||||
if (isStandaloneAdvanced) {
|
||||
advancedOnly.push(subBlock)
|
||||
} else {
|
||||
regular.push(subBlock)
|
||||
}
|
||||
}
|
||||
|
||||
return { regularSubBlocks: regular, advancedOnlySubBlocks: advancedOnly }
|
||||
}, [subBlocks, canonicalIndex.canonicalIdBySubBlockId])
|
||||
|
||||
// Get block connections
|
||||
const { incomingConnections, hasIncomingConnections } = useBlockConnections(currentBlockId || '')
|
||||
|
||||
@@ -165,23 +109,21 @@ export function Editor() {
|
||||
})
|
||||
|
||||
// Collaborative actions
|
||||
const {
|
||||
collaborativeSetBlockCanonicalMode,
|
||||
collaborativeUpdateBlockName,
|
||||
collaborativeToggleBlockAdvancedMode,
|
||||
} = useCollaborativeWorkflow()
|
||||
|
||||
// Advanced mode toggle handler
|
||||
const handleToggleAdvancedMode = useCallback(() => {
|
||||
if (!currentBlockId || !userPermissions.canEdit) return
|
||||
collaborativeToggleBlockAdvancedMode(currentBlockId)
|
||||
}, [currentBlockId, userPermissions.canEdit, collaborativeToggleBlockAdvancedMode])
|
||||
const { collaborativeToggleBlockAdvancedMode, collaborativeUpdateBlockName } =
|
||||
useCollaborativeWorkflow()
|
||||
|
||||
// Rename state
|
||||
const [isRenaming, setIsRenaming] = useState(false)
|
||||
const [editedName, setEditedName] = useState('')
|
||||
const nameInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
// Mode toggle handlers
|
||||
const handleToggleAdvancedMode = useCallback(() => {
|
||||
if (currentBlockId && userPermissions.canEdit) {
|
||||
collaborativeToggleBlockAdvancedMode(currentBlockId)
|
||||
}
|
||||
}, [currentBlockId, userPermissions.canEdit, collaborativeToggleBlockAdvancedMode])
|
||||
|
||||
/**
|
||||
* Handles starting the rename process.
|
||||
*/
|
||||
@@ -241,6 +183,9 @@ export function Editor() {
|
||||
}
|
||||
}
|
||||
|
||||
// Check if block has advanced mode or trigger mode available
|
||||
const hasAdvancedMode = blockConfig?.subBlocks?.some((sb) => sb.mode === 'advanced')
|
||||
|
||||
// Determine if connections are at minimum height (collapsed state)
|
||||
const isConnectionsAtMinHeight = connectionsHeight <= 35
|
||||
|
||||
@@ -333,6 +278,25 @@ export function Editor() {
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)} */}
|
||||
{/* Mode toggles - Only show for regular blocks, not subflows */}
|
||||
{currentBlock && !isSubflow && hasAdvancedMode && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='p-0'
|
||||
onClick={handleToggleAdvancedMode}
|
||||
disabled={!userPermissions.canEdit}
|
||||
aria-label='Toggle advanced mode'
|
||||
>
|
||||
<Settings className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
<p>Advanced mode</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
{currentBlock && (isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink) && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
@@ -378,111 +342,14 @@ export function Editor() {
|
||||
ref={subBlocksRef}
|
||||
className='subblocks-section flex flex-1 flex-col overflow-hidden'
|
||||
>
|
||||
<div className='flex-1 overflow-y-auto overflow-x-hidden px-[8px] pt-[12px] pb-[8px] [overflow-anchor:none]'>
|
||||
<div className='flex-1 overflow-y-auto overflow-x-hidden px-[8px] pt-[12px] pb-[8px]'>
|
||||
{subBlocks.length === 0 ? (
|
||||
<div className='flex h-full items-center justify-center text-center text-[#8D8D8D] text-[13px]'>
|
||||
This block has no subblocks
|
||||
</div>
|
||||
) : (
|
||||
<div className='flex flex-col'>
|
||||
{regularSubBlocks.map((subBlock, index) => {
|
||||
const stableKey = getSubBlockStableKey(
|
||||
currentBlockId || '',
|
||||
subBlock,
|
||||
subBlockState
|
||||
)
|
||||
const canonicalId = canonicalIndex.canonicalIdBySubBlockId[subBlock.id]
|
||||
const canonicalGroup = canonicalId
|
||||
? canonicalIndex.groupsById[canonicalId]
|
||||
: undefined
|
||||
const isCanonicalSwap = isCanonicalPair(canonicalGroup)
|
||||
const canonicalMode =
|
||||
canonicalGroup && isCanonicalSwap
|
||||
? resolveCanonicalMode(
|
||||
canonicalGroup,
|
||||
blockSubBlockValues,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
: undefined
|
||||
|
||||
const showDivider =
|
||||
index < regularSubBlocks.length - 1 ||
|
||||
(!hasAdvancedOnlyFields && index < subBlocks.length - 1)
|
||||
|
||||
return (
|
||||
<div key={stableKey} className='subblock-row'>
|
||||
<SubBlock
|
||||
blockId={currentBlockId}
|
||||
config={subBlock}
|
||||
isPreview={false}
|
||||
subBlockValues={subBlockState}
|
||||
disabled={!userPermissions.canEdit}
|
||||
fieldDiffStatus={undefined}
|
||||
allowExpandInPreview={false}
|
||||
canonicalToggle={
|
||||
isCanonicalSwap && canonicalMode && canonicalId
|
||||
? {
|
||||
mode: canonicalMode,
|
||||
disabled: !userPermissions.canEdit,
|
||||
onToggle: () => {
|
||||
if (!currentBlockId) return
|
||||
const nextMode =
|
||||
canonicalMode === 'advanced' ? 'basic' : 'advanced'
|
||||
collaborativeSetBlockCanonicalMode(
|
||||
currentBlockId,
|
||||
canonicalId,
|
||||
nextMode
|
||||
)
|
||||
},
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
/>
|
||||
{showDivider && (
|
||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||
<div
|
||||
className='h-[1.25px]'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
|
||||
{hasAdvancedOnlyFields && userPermissions.canEdit && (
|
||||
<div className='flex items-center gap-[10px] px-[2px] pt-[14px] pb-[12px]'>
|
||||
<div
|
||||
className='h-[1.25px] flex-1'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
<button
|
||||
type='button'
|
||||
onClick={handleToggleAdvancedMode}
|
||||
className='flex items-center gap-[6px] whitespace-nowrap font-medium text-[13px] text-[var(--text-secondary)] hover:text-[var(--text-primary)]'
|
||||
>
|
||||
{displayAdvancedOptions ? 'Hide advanced fields' : 'Show advanced fields'}
|
||||
<ChevronDown
|
||||
className={`h-[14px] w-[14px] transition-transform duration-200 ${displayAdvancedOptions ? 'rotate-180' : ''}`}
|
||||
/>
|
||||
</button>
|
||||
<div
|
||||
className='h-[1.25px] flex-1'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{advancedOnlySubBlocks.map((subBlock, index) => {
|
||||
{subBlocks.map((subBlock, index) => {
|
||||
const stableKey = getSubBlockStableKey(
|
||||
currentBlockId || '',
|
||||
subBlock,
|
||||
@@ -500,7 +367,7 @@ export function Editor() {
|
||||
fieldDiffStatus={undefined}
|
||||
allowExpandInPreview={false}
|
||||
/>
|
||||
{index < advancedOnlySubBlocks.length - 1 && (
|
||||
{index < subBlocks.length - 1 && (
|
||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||
<div
|
||||
className='h-[1.25px]'
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
evaluateSubBlockCondition,
|
||||
isSubBlockFeatureEnabled,
|
||||
isSubBlockVisibleForMode,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import { useMemo } from 'react'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import type { BlockConfig, SubBlockConfig, SubBlockType } from '@/blocks/types'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
@@ -32,10 +27,6 @@ export function useEditorSubblockLayout(
|
||||
blockSubBlockValues: Record<string, any>,
|
||||
isSnapshotView: boolean
|
||||
) {
|
||||
const blockDataFromStore = useWorkflowStore(
|
||||
useCallback((state) => state.blocks?.[blockId]?.data, [blockId])
|
||||
)
|
||||
|
||||
return useMemo(() => {
|
||||
// Guard against missing config or block selection
|
||||
if (!config || !Array.isArray((config as any).subBlocks) || !blockId) {
|
||||
@@ -55,7 +46,6 @@ export function useEditorSubblockLayout(
|
||||
|
||||
const mergedState = mergedMap ? mergedMap[blockId] : undefined
|
||||
const mergedSubBlocks = mergedState?.subBlocks || {}
|
||||
const blockData = isSnapshotView ? mergedState?.data || {} : blockDataFromStore || {}
|
||||
|
||||
const stateToUse = Object.keys(mergedSubBlocks).reduce(
|
||||
(acc, key) => {
|
||||
@@ -79,29 +69,13 @@ export function useEditorSubblockLayout(
|
||||
}
|
||||
|
||||
// Filter visible blocks and those that meet their conditions
|
||||
const rawValues = Object.entries(stateToUse).reduce<Record<string, unknown>>(
|
||||
(acc, [key, entry]) => {
|
||||
acc[key] = entry?.value
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
const subBlocksForCanonical = displayTriggerMode
|
||||
? (config.subBlocks || []).filter(
|
||||
(subBlock) =>
|
||||
subBlock.mode === 'trigger' || subBlock.type === ('trigger-config' as SubBlockType)
|
||||
)
|
||||
: config.subBlocks || []
|
||||
const canonicalIndex = buildCanonicalIndex(subBlocksForCanonical)
|
||||
const effectiveAdvanced = displayAdvancedMode
|
||||
const canonicalModeOverrides = blockData?.canonicalModes
|
||||
|
||||
const visibleSubBlocks = (config.subBlocks || []).filter((block) => {
|
||||
if (block.hidden) return false
|
||||
|
||||
// Check required feature if specified - declarative feature gating
|
||||
if (!isSubBlockFeatureEnabled(block)) return false
|
||||
if (block.requiresFeature && !isTruthy(getEnv(block.requiresFeature))) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Special handling for trigger-config type (legacy trigger configuration UI)
|
||||
if (block.type === ('trigger-config' as SubBlockType)) {
|
||||
@@ -110,8 +84,13 @@ export function useEditorSubblockLayout(
|
||||
}
|
||||
|
||||
// Filter by mode if specified
|
||||
if (block.mode === 'trigger') {
|
||||
if (!displayTriggerMode) return false
|
||||
if (block.mode) {
|
||||
if (block.mode === 'basic' && displayAdvancedMode) return false
|
||||
if (block.mode === 'advanced' && !displayAdvancedMode) return false
|
||||
if (block.mode === 'trigger') {
|
||||
// Show trigger mode blocks only when in trigger mode
|
||||
if (!displayTriggerMode) return false
|
||||
}
|
||||
}
|
||||
|
||||
// When in trigger mode, hide blocks that don't have mode: 'trigger'
|
||||
@@ -119,22 +98,42 @@ export function useEditorSubblockLayout(
|
||||
return false
|
||||
}
|
||||
|
||||
if (
|
||||
!isSubBlockVisibleForMode(
|
||||
block,
|
||||
effectiveAdvanced,
|
||||
canonicalIndex,
|
||||
rawValues,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
) {
|
||||
return false
|
||||
}
|
||||
|
||||
// If there's no condition, the block should be shown
|
||||
if (!block.condition) return true
|
||||
|
||||
return evaluateSubBlockCondition(block.condition, rawValues)
|
||||
// If condition is a function, call it to get the actual condition object
|
||||
const actualCondition =
|
||||
typeof block.condition === 'function' ? block.condition() : block.condition
|
||||
|
||||
// Get the values of the fields this block depends on from the appropriate state
|
||||
const fieldValue = stateToUse[actualCondition.field]?.value
|
||||
const andFieldValue = actualCondition.and
|
||||
? stateToUse[actualCondition.and.field]?.value
|
||||
: undefined
|
||||
|
||||
// Check if the condition value is an array
|
||||
const isValueMatch = Array.isArray(actualCondition.value)
|
||||
? fieldValue != null &&
|
||||
(actualCondition.not
|
||||
? !actualCondition.value.includes(fieldValue as string | number | boolean)
|
||||
: actualCondition.value.includes(fieldValue as string | number | boolean))
|
||||
: actualCondition.not
|
||||
? fieldValue !== actualCondition.value
|
||||
: fieldValue === actualCondition.value
|
||||
|
||||
// Check both conditions if 'and' is present
|
||||
const isAndValueMatch =
|
||||
!actualCondition.and ||
|
||||
(Array.isArray(actualCondition.and.value)
|
||||
? andFieldValue != null &&
|
||||
(actualCondition.and.not
|
||||
? !actualCondition.and.value.includes(andFieldValue as string | number | boolean)
|
||||
: actualCondition.and.value.includes(andFieldValue as string | number | boolean))
|
||||
: actualCondition.and.not
|
||||
? andFieldValue !== actualCondition.and.value
|
||||
: andFieldValue === actualCondition.and.value)
|
||||
|
||||
return isValueMatch && isAndValueMatch
|
||||
})
|
||||
|
||||
return { subBlocks: visibleSubBlocks, stateToUse }
|
||||
@@ -148,6 +147,5 @@ export function useEditorSubblockLayout(
|
||||
blockSubBlockValues,
|
||||
activeWorkflowId,
|
||||
isSnapshotView,
|
||||
blockDataFromStore,
|
||||
])
|
||||
}
|
||||
|
||||
@@ -3,18 +3,11 @@ import { createLogger } from '@sim/logger'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Handle, type NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
|
||||
import { Badge, Tooltip } from '@/components/emcn'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createMcpToolId } from '@/lib/mcp/utils'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
evaluateSubBlockCondition,
|
||||
hasAdvancedValues,
|
||||
isSubBlockFeatureEnabled,
|
||||
isSubBlockVisibleForMode,
|
||||
resolveDependencyValue,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { ActionBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/action-bar/action-bar'
|
||||
import {
|
||||
@@ -337,9 +330,6 @@ const SubBlockRow = ({
|
||||
workflowId,
|
||||
blockId,
|
||||
allSubBlockValues,
|
||||
displayAdvancedOptions,
|
||||
canonicalIndex,
|
||||
canonicalModeOverrides,
|
||||
}: {
|
||||
title: string
|
||||
value?: string
|
||||
@@ -349,9 +339,6 @@ const SubBlockRow = ({
|
||||
workflowId?: string
|
||||
blockId?: string
|
||||
allSubBlockValues?: Record<string, { value: unknown }>
|
||||
displayAdvancedOptions?: boolean
|
||||
canonicalIndex?: ReturnType<typeof buildCanonicalIndex>
|
||||
canonicalModeOverrides?: Record<string, 'basic' | 'advanced'>
|
||||
}) => {
|
||||
const getStringValue = useCallback(
|
||||
(key?: string): string | undefined => {
|
||||
@@ -362,43 +349,17 @@ const SubBlockRow = ({
|
||||
[allSubBlockValues]
|
||||
)
|
||||
|
||||
const rawValues = useMemo(() => {
|
||||
if (!allSubBlockValues) return {}
|
||||
return Object.entries(allSubBlockValues).reduce<Record<string, unknown>>(
|
||||
(acc, [key, entry]) => {
|
||||
acc[key] = entry?.value
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
}, [allSubBlockValues])
|
||||
|
||||
const dependencyValues = useMemo(() => {
|
||||
const fields = getDependsOnFields(subBlock?.dependsOn)
|
||||
if (!fields.length) return {}
|
||||
return fields.reduce<Record<string, string>>((accumulator, dependency) => {
|
||||
const dependencyValue = resolveDependencyValue(
|
||||
dependency,
|
||||
rawValues,
|
||||
canonicalIndex || buildCanonicalIndex([]),
|
||||
canonicalModeOverrides
|
||||
)
|
||||
const dependencyString =
|
||||
typeof dependencyValue === 'string' && dependencyValue.length > 0
|
||||
? dependencyValue
|
||||
: undefined
|
||||
if (dependencyString) {
|
||||
accumulator[dependency] = dependencyString
|
||||
const dependencyValue = getStringValue(dependency)
|
||||
if (dependencyValue) {
|
||||
accumulator[dependency] = dependencyValue
|
||||
}
|
||||
return accumulator
|
||||
}, {})
|
||||
}, [
|
||||
canonicalIndex,
|
||||
canonicalModeOverrides,
|
||||
displayAdvancedOptions,
|
||||
rawValues,
|
||||
subBlock?.dependsOn,
|
||||
])
|
||||
}, [getStringValue, subBlock?.dependsOn])
|
||||
|
||||
const credentialSourceId =
|
||||
subBlock?.type === 'oauth-input' && typeof rawValue === 'string' ? rawValue : undefined
|
||||
@@ -622,8 +583,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
|
||||
const { mutate: deployChildWorkflow, isPending: isDeploying } = useDeployChildWorkflow()
|
||||
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
const currentStoreBlock = currentWorkflow.getBlockById(id)
|
||||
|
||||
const isStarterBlock = type === 'starter'
|
||||
@@ -642,8 +601,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
[activeWorkflowId, id]
|
||||
)
|
||||
)
|
||||
const canonicalIndex = useMemo(() => buildCanonicalIndex(config.subBlocks), [config.subBlocks])
|
||||
const canonicalModeOverrides = currentStoreBlock?.data?.canonicalModes
|
||||
|
||||
const subBlockRowsData = useMemo(() => {
|
||||
const rows: SubBlockConfig[][] = []
|
||||
@@ -666,23 +623,16 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
{} as Record<string, { value: unknown }>
|
||||
)
|
||||
|
||||
const rawValues = Object.entries(stateToUse).reduce<Record<string, unknown>>(
|
||||
(acc, [key, entry]) => {
|
||||
acc[key] = entry?.value
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
const effectiveAdvanced = userPermissions.canEdit
|
||||
? displayAdvancedMode
|
||||
: displayAdvancedMode || hasAdvancedValues(config.subBlocks, rawValues, canonicalIndex)
|
||||
const effectiveAdvanced = displayAdvancedMode
|
||||
const effectiveTrigger = displayTriggerMode
|
||||
|
||||
const visibleSubBlocks = config.subBlocks.filter((block) => {
|
||||
if (block.hidden) return false
|
||||
if (block.hideFromPreview) return false
|
||||
if (!isSubBlockFeatureEnabled(block)) return false
|
||||
|
||||
if (block.requiresFeature && !isTruthy(getEnv(block.requiresFeature))) {
|
||||
return false
|
||||
}
|
||||
|
||||
const isPureTriggerBlock = config?.triggers?.enabled && config.category === 'triggers'
|
||||
|
||||
@@ -700,21 +650,40 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
!isSubBlockVisibleForMode(
|
||||
block,
|
||||
effectiveAdvanced,
|
||||
canonicalIndex,
|
||||
rawValues,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
) {
|
||||
return false
|
||||
}
|
||||
if (block.mode === 'basic' && effectiveAdvanced) return false
|
||||
if (block.mode === 'advanced' && !effectiveAdvanced) return false
|
||||
|
||||
if (!block.condition) return true
|
||||
|
||||
return evaluateSubBlockCondition(block.condition, rawValues)
|
||||
const actualCondition =
|
||||
typeof block.condition === 'function' ? block.condition() : block.condition
|
||||
|
||||
const fieldValue = stateToUse[actualCondition.field]?.value
|
||||
const andFieldValue = actualCondition.and
|
||||
? stateToUse[actualCondition.and.field]?.value
|
||||
: undefined
|
||||
|
||||
const isValueMatch = Array.isArray(actualCondition.value)
|
||||
? fieldValue != null &&
|
||||
(actualCondition.not
|
||||
? !actualCondition.value.includes(fieldValue as string | number | boolean)
|
||||
: actualCondition.value.includes(fieldValue as string | number | boolean))
|
||||
: actualCondition.not
|
||||
? fieldValue !== actualCondition.value
|
||||
: fieldValue === actualCondition.value
|
||||
|
||||
const isAndValueMatch =
|
||||
!actualCondition.and ||
|
||||
(Array.isArray(actualCondition.and.value)
|
||||
? andFieldValue != null &&
|
||||
(actualCondition.and.not
|
||||
? !actualCondition.and.value.includes(andFieldValue as string | number | boolean)
|
||||
: actualCondition.and.value.includes(andFieldValue as string | number | boolean))
|
||||
: actualCondition.and.not
|
||||
? andFieldValue !== actualCondition.and.value
|
||||
: andFieldValue === actualCondition.and.value)
|
||||
|
||||
return isValueMatch && isAndValueMatch
|
||||
})
|
||||
|
||||
visibleSubBlocks.forEach((block) => {
|
||||
@@ -746,33 +715,12 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
data.subBlockValues,
|
||||
currentWorkflow.isDiffMode,
|
||||
currentBlock,
|
||||
canonicalModeOverrides,
|
||||
userPermissions.canEdit,
|
||||
canonicalIndex,
|
||||
blockSubBlockValues,
|
||||
activeWorkflowId,
|
||||
])
|
||||
|
||||
const subBlockRows = subBlockRowsData.rows
|
||||
const subBlockState = subBlockRowsData.stateToUse
|
||||
const effectiveAdvanced = useMemo(() => {
|
||||
const rawValues = Object.entries(subBlockState).reduce<Record<string, unknown>>(
|
||||
(acc, [key, entry]) => {
|
||||
acc[key] = entry?.value
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
return userPermissions.canEdit
|
||||
? displayAdvancedMode
|
||||
: displayAdvancedMode || hasAdvancedValues(config.subBlocks, rawValues, canonicalIndex)
|
||||
}, [
|
||||
subBlockState,
|
||||
displayAdvancedMode,
|
||||
config.subBlocks,
|
||||
canonicalIndex,
|
||||
userPermissions.canEdit,
|
||||
])
|
||||
|
||||
/**
|
||||
* Determine if block has content below the header (subblocks or error row).
|
||||
@@ -935,6 +883,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
const showWebhookIndicator = (isStarterBlock || isWebhookTriggerBlock) && isWebhookConfigured
|
||||
const shouldShowScheduleBadge =
|
||||
type === 'schedule' && !isLoadingScheduleInfo && scheduleInfo !== null
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
const isWorkflowSelector = type === 'workflow' || type === 'workflow_input'
|
||||
|
||||
return (
|
||||
@@ -1146,9 +1095,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
workflowId={currentWorkflowId}
|
||||
blockId={id}
|
||||
allSubBlockValues={subBlockState}
|
||||
displayAdvancedOptions={effectiveAdvanced}
|
||||
canonicalIndex={canonicalIndex}
|
||||
canonicalModeOverrides={canonicalModeOverrides}
|
||||
/>
|
||||
)
|
||||
})
|
||||
|
||||
@@ -14,13 +14,6 @@ import { ReactFlowProvider } from 'reactflow'
|
||||
import { Badge, Button, ChevronDown, Code, Combobox, Input, Label } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { extractReferencePrefixes } from '@/lib/workflows/sanitization/references'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
evaluateSubBlockCondition,
|
||||
hasAdvancedValues,
|
||||
isSubBlockFeatureEnabled,
|
||||
isSubBlockVisibleForMode,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import { SnapshotContextMenu } from '@/app/workspace/[workspaceId]/logs/components/log-details/components/execution-snapshot/components'
|
||||
import { SubBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components'
|
||||
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
|
||||
@@ -31,6 +24,56 @@ import { navigatePath } from '@/executor/variables/resolvers/reference'
|
||||
import { useCodeViewerFeatures } from '@/hooks/use-code-viewer'
|
||||
import type { BlockState, Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
|
||||
/**
|
||||
* Evaluate whether a subblock's condition is met based on current values.
|
||||
*/
|
||||
function evaluateCondition(
|
||||
condition: SubBlockConfig['condition'],
|
||||
subBlockValues: Record<string, { value: unknown } | unknown>
|
||||
): boolean {
|
||||
if (!condition) return true
|
||||
|
||||
const actualCondition = typeof condition === 'function' ? condition() : condition
|
||||
|
||||
const fieldValueObj = subBlockValues[actualCondition.field]
|
||||
const fieldValue =
|
||||
fieldValueObj && typeof fieldValueObj === 'object' && 'value' in fieldValueObj
|
||||
? (fieldValueObj as { value: unknown }).value
|
||||
: fieldValueObj
|
||||
|
||||
const conditionValues = Array.isArray(actualCondition.value)
|
||||
? actualCondition.value
|
||||
: [actualCondition.value]
|
||||
|
||||
let isMatch = conditionValues.some((v) => v === fieldValue)
|
||||
|
||||
if (actualCondition.not) {
|
||||
isMatch = !isMatch
|
||||
}
|
||||
|
||||
if (actualCondition.and && isMatch) {
|
||||
const andFieldValueObj = subBlockValues[actualCondition.and.field]
|
||||
const andFieldValue =
|
||||
andFieldValueObj && typeof andFieldValueObj === 'object' && 'value' in andFieldValueObj
|
||||
? (andFieldValueObj as { value: unknown }).value
|
||||
: andFieldValueObj
|
||||
|
||||
const andConditionValues = Array.isArray(actualCondition.and.value)
|
||||
? actualCondition.and.value
|
||||
: [actualCondition.and.value]
|
||||
|
||||
let andMatch = andConditionValues.some((v) => v === andFieldValue)
|
||||
|
||||
if (actualCondition.and.not) {
|
||||
andMatch = !andMatch
|
||||
}
|
||||
|
||||
isMatch = isMatch && andMatch
|
||||
}
|
||||
|
||||
return isMatch
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a value for display as JSON string
|
||||
*/
|
||||
@@ -1079,44 +1122,15 @@ function BlockDetailsSidebarContent({
|
||||
)
|
||||
}
|
||||
|
||||
const rawValues = useMemo(() => {
|
||||
return Object.entries(subBlockValues).reduce<Record<string, unknown>>((acc, [key, entry]) => {
|
||||
if (entry && typeof entry === 'object' && 'value' in entry) {
|
||||
acc[key] = (entry as { value: unknown }).value
|
||||
} else {
|
||||
acc[key] = entry
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
}, [subBlockValues])
|
||||
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(blockConfig.subBlocks),
|
||||
[blockConfig.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = block.data?.canonicalModes
|
||||
const effectiveAdvanced =
|
||||
(block.advancedMode ?? false) ||
|
||||
hasAdvancedValues(blockConfig.subBlocks, rawValues, canonicalIndex)
|
||||
|
||||
const visibleSubBlocks = blockConfig.subBlocks.filter((subBlock) => {
|
||||
if (subBlock.hidden || subBlock.hideFromPreview) return false
|
||||
// Only filter out trigger-mode subblocks for non-trigger blocks
|
||||
// Trigger-only blocks (category 'triggers') should display their trigger subblocks
|
||||
if (subBlock.mode === 'trigger' && blockConfig.category !== 'triggers') return false
|
||||
if (!isSubBlockFeatureEnabled(subBlock)) return false
|
||||
if (
|
||||
!isSubBlockVisibleForMode(
|
||||
subBlock,
|
||||
effectiveAdvanced,
|
||||
canonicalIndex,
|
||||
rawValues,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
) {
|
||||
return false
|
||||
if (subBlock.condition) {
|
||||
return evaluateCondition(subBlock.condition, subBlockValues)
|
||||
}
|
||||
return evaluateSubBlockCondition(subBlock.condition, rawValues)
|
||||
return true
|
||||
})
|
||||
|
||||
const statusVariant =
|
||||
|
||||
@@ -161,20 +161,6 @@ interface ContextMenuProps {
|
||||
* Set to true when creation is in progress or user lacks permissions
|
||||
*/
|
||||
disableCreateFolder?: boolean
|
||||
/**
|
||||
* Callback when leave is clicked (for workspaces)
|
||||
*/
|
||||
onLeave?: () => void
|
||||
/**
|
||||
* Whether to show the leave option (default: false)
|
||||
* Set to true for workspaces the user can leave
|
||||
*/
|
||||
showLeave?: boolean
|
||||
/**
|
||||
* Whether the leave option is disabled (default: false)
|
||||
* Set to true when user cannot leave (e.g., last admin)
|
||||
*/
|
||||
disableLeave?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -212,9 +198,6 @@ export function ContextMenu({
|
||||
disableDelete = false,
|
||||
disableCreate = false,
|
||||
disableCreateFolder = false,
|
||||
onLeave,
|
||||
showLeave = false,
|
||||
disableLeave = false,
|
||||
}: ContextMenuProps) {
|
||||
const [hexInput, setHexInput] = useState(currentColor || '#ffffff')
|
||||
|
||||
@@ -429,20 +412,8 @@ export function ContextMenu({
|
||||
</PopoverItem>
|
||||
)}
|
||||
|
||||
{/* Destructive actions */}
|
||||
{/* Destructive action */}
|
||||
{(hasNavigationSection || hasEditSection || hasCopySection) && <PopoverDivider rootOnly />}
|
||||
{showLeave && onLeave && (
|
||||
<PopoverItem
|
||||
rootOnly
|
||||
disabled={disableLeave}
|
||||
onClick={() => {
|
||||
onLeave()
|
||||
onClose()
|
||||
}}
|
||||
>
|
||||
Leave
|
||||
</PopoverItem>
|
||||
)}
|
||||
<PopoverItem
|
||||
rootOnly
|
||||
disabled={disableDelete}
|
||||
|
||||
@@ -103,14 +103,6 @@ interface WorkspaceHeaderProps {
|
||||
* Whether to show the collapse button
|
||||
*/
|
||||
showCollapseButton?: boolean
|
||||
/**
|
||||
* Callback to leave the workspace
|
||||
*/
|
||||
onLeaveWorkspace?: (workspaceId: string) => Promise<void>
|
||||
/**
|
||||
* Current user's session ID for owner check
|
||||
*/
|
||||
sessionUserId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -136,8 +128,6 @@ export function WorkspaceHeader({
|
||||
onImportWorkspace,
|
||||
isImportingWorkspace,
|
||||
showCollapseButton = true,
|
||||
onLeaveWorkspace,
|
||||
sessionUserId,
|
||||
}: WorkspaceHeaderProps) {
|
||||
const [isInviteModalOpen, setIsInviteModalOpen] = useState(false)
|
||||
const [isDeleteModalOpen, setIsDeleteModalOpen] = useState(false)
|
||||
@@ -277,16 +267,6 @@ export function WorkspaceHeader({
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles leave action from context menu
|
||||
*/
|
||||
const handleLeaveAction = async () => {
|
||||
if (!capturedWorkspaceRef.current || !onLeaveWorkspace) return
|
||||
|
||||
await onLeaveWorkspace(capturedWorkspaceRef.current.id)
|
||||
setIsWorkspaceMenuOpen(false)
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle delete workspace
|
||||
*/
|
||||
@@ -532,8 +512,6 @@ export function WorkspaceHeader({
|
||||
const capturedPermissions = capturedWorkspaceRef.current?.permissions
|
||||
const contextCanEdit = capturedPermissions === 'admin' || capturedPermissions === 'write'
|
||||
const contextCanAdmin = capturedPermissions === 'admin'
|
||||
const capturedWorkspace = workspaces.find((w) => w.id === capturedWorkspaceRef.current?.id)
|
||||
const isOwner = capturedWorkspace && sessionUserId === capturedWorkspace.ownerId
|
||||
|
||||
return (
|
||||
<ContextMenu
|
||||
@@ -545,12 +523,10 @@ export function WorkspaceHeader({
|
||||
onDuplicate={handleDuplicateAction}
|
||||
onExport={handleExportAction}
|
||||
onDelete={handleDeleteAction}
|
||||
onLeave={handleLeaveAction}
|
||||
showRename={true}
|
||||
showDuplicate={true}
|
||||
showExport={true}
|
||||
showLeave={!isOwner && !!onLeaveWorkspace}
|
||||
disableRename={!contextCanAdmin}
|
||||
disableRename={!contextCanEdit}
|
||||
disableDuplicate={!contextCanEdit}
|
||||
disableExport={!contextCanAdmin}
|
||||
disableDelete={!contextCanAdmin}
|
||||
|
||||
@@ -157,7 +157,6 @@ export function Sidebar() {
|
||||
isCreatingWorkspace,
|
||||
updateWorkspaceName,
|
||||
confirmDeleteWorkspace,
|
||||
handleLeaveWorkspace,
|
||||
} = useWorkspaceManagement({
|
||||
workspaceId,
|
||||
sessionUserId: sessionData?.user?.id,
|
||||
@@ -379,17 +378,6 @@ export function Sidebar() {
|
||||
[workspaces, confirmDeleteWorkspace]
|
||||
)
|
||||
|
||||
/** Leaves a workspace */
|
||||
const handleLeaveWorkspaceWrapper = useCallback(
|
||||
async (workspaceIdToLeave: string) => {
|
||||
const workspaceToLeave = workspaces.find((w) => w.id === workspaceIdToLeave)
|
||||
if (workspaceToLeave) {
|
||||
await handleLeaveWorkspace(workspaceToLeave)
|
||||
}
|
||||
},
|
||||
[workspaces, handleLeaveWorkspace]
|
||||
)
|
||||
|
||||
/** Duplicates a workspace */
|
||||
const handleDuplicateWorkspace = useCallback(
|
||||
async (_workspaceIdToDuplicate: string, workspaceName: string) => {
|
||||
@@ -521,8 +509,6 @@ export function Sidebar() {
|
||||
onImportWorkspace={handleImportWorkspace}
|
||||
isImportingWorkspace={isImportingWorkspace}
|
||||
showCollapseButton={isOnWorkflowPage}
|
||||
onLeaveWorkspace={handleLeaveWorkspaceWrapper}
|
||||
sessionUserId={sessionData?.user?.id}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
@@ -556,8 +542,6 @@ export function Sidebar() {
|
||||
onImportWorkspace={handleImportWorkspace}
|
||||
isImportingWorkspace={isImportingWorkspace}
|
||||
showCollapseButton={isOnWorkflowPage}
|
||||
onLeaveWorkspace={handleLeaveWorkspaceWrapper}
|
||||
sessionUserId={sessionData?.user?.id}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import { Cron } from 'croner'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import type { ZodRecord, ZodString } from 'zod'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
@@ -21,9 +22,12 @@ import {
|
||||
getScheduleTimeValues,
|
||||
getSubBlockValue,
|
||||
} from '@/lib/workflows/schedules/utils'
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata } from '@/executor/execution/types'
|
||||
import type { ExecutionResult } from '@/executor/types'
|
||||
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
|
||||
import { mergeSubblockState } from '@/stores/workflows/server-utils'
|
||||
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
||||
|
||||
const logger = createLogger('TriggerScheduleExecution')
|
||||
@@ -115,6 +119,68 @@ async function determineNextRunAfterError(
|
||||
return new Date(now.getTime() + 24 * 60 * 60 * 1000)
|
||||
}
|
||||
|
||||
async function ensureBlockVariablesResolvable(
|
||||
blocks: Record<string, BlockState>,
|
||||
variables: Record<string, string>,
|
||||
requestId: string
|
||||
) {
|
||||
await Promise.all(
|
||||
Object.values(blocks).map(async (block) => {
|
||||
const subBlocks = block.subBlocks ?? {}
|
||||
await Promise.all(
|
||||
Object.values(subBlocks).map(async (subBlock) => {
|
||||
const value = subBlock.value
|
||||
if (
|
||||
typeof value !== 'string' ||
|
||||
!value.includes(REFERENCE.ENV_VAR_START) ||
|
||||
!value.includes(REFERENCE.ENV_VAR_END)
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
const envVarPattern = createEnvVarPattern()
|
||||
const matches = value.match(envVarPattern)
|
||||
if (!matches) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const match of matches) {
|
||||
const varName = match.slice(
|
||||
REFERENCE.ENV_VAR_START.length,
|
||||
-REFERENCE.ENV_VAR_END.length
|
||||
)
|
||||
const encryptedValue = variables[varName]
|
||||
if (!encryptedValue) {
|
||||
throw new Error(`Environment variable "${varName}" was not found`)
|
||||
}
|
||||
|
||||
try {
|
||||
await decryptSecret(encryptedValue)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error decrypting value for variable "${varName}"`, error)
|
||||
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
throw new Error(`Failed to decrypt environment variable "${varName}": ${message}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async function ensureEnvVarsDecryptable(variables: Record<string, string>, requestId: string) {
|
||||
for (const [key, encryptedValue] of Object.entries(variables)) {
|
||||
try {
|
||||
await decryptSecret(encryptedValue)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to decrypt environment variable "${key}"`, error)
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
throw new Error(`Failed to decrypt environment variable "${key}": ${message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function runWorkflowExecution({
|
||||
payload,
|
||||
workflowRecord,
|
||||
@@ -151,6 +217,8 @@ async function runWorkflowExecution({
|
||||
}
|
||||
}
|
||||
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
|
||||
const workspaceId = workflowRecord.workspaceId
|
||||
if (!workspaceId) {
|
||||
throw new Error(`Workflow ${payload.workflowId} has no associated workspace`)
|
||||
@@ -168,6 +236,9 @@ async function runWorkflowExecution({
|
||||
...workspaceEncrypted,
|
||||
})
|
||||
|
||||
await ensureBlockVariablesResolvable(mergedStates, variables, requestId)
|
||||
await ensureEnvVarsDecryptable(variables, requestId)
|
||||
|
||||
const input = {
|
||||
_context: {
|
||||
workflowId: payload.workflowId,
|
||||
@@ -277,7 +348,6 @@ export type ScheduleExecutionPayload = {
|
||||
failedCount?: number
|
||||
now: string
|
||||
scheduledFor?: string
|
||||
preflighted?: boolean
|
||||
}
|
||||
|
||||
function calculateNextRunTime(
|
||||
@@ -337,7 +407,6 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
|
||||
checkRateLimit: true,
|
||||
checkDeployment: true,
|
||||
loggingSession,
|
||||
preflightEnvVars: !payload.preflighted,
|
||||
})
|
||||
|
||||
if (!preprocessResult.success) {
|
||||
|
||||
@@ -12,11 +12,16 @@ import { WebhookAttachmentProcessor } from '@/lib/webhooks/attachment-processor'
|
||||
import { fetchAndProcessAirtablePayloads, formatWebhookInput } from '@/lib/webhooks/utils.server'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
|
||||
import { loadDeployedWorkflowState } from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
loadDeployedWorkflowState,
|
||||
loadWorkflowFromNormalizedTables,
|
||||
} from '@/lib/workflows/persistence/utils'
|
||||
import { getWorkflowById } from '@/lib/workflows/utils'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata } from '@/executor/execution/types'
|
||||
import type { ExecutionResult } from '@/executor/types'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { mergeSubblockState } from '@/stores/workflows/server-utils'
|
||||
import { getTrigger, isTriggerValid } from '@/triggers'
|
||||
|
||||
const logger = createLogger('TriggerWebhookExecution')
|
||||
@@ -87,6 +92,7 @@ export type WebhookExecutionPayload = {
|
||||
headers: Record<string, string>
|
||||
path: string
|
||||
blockId?: string
|
||||
executionTarget?: 'deployed' | 'live'
|
||||
credentialId?: string
|
||||
credentialAccountUserId?: string
|
||||
}
|
||||
@@ -137,16 +143,20 @@ async function executeWebhookJobInternal(
|
||||
let deploymentVersionId: string | undefined
|
||||
|
||||
try {
|
||||
const workflowData = await loadDeployedWorkflowState(payload.workflowId)
|
||||
const useDraftState = payload.executionTarget === 'live'
|
||||
const workflowData = useDraftState
|
||||
? await loadWorkflowFromNormalizedTables(payload.workflowId)
|
||||
: await loadDeployedWorkflowState(payload.workflowId)
|
||||
if (!workflowData) {
|
||||
throw new Error(
|
||||
'Workflow state not found. The workflow may not be deployed or the deployment data may be corrupted.'
|
||||
`Workflow state not found. The workflow may not be ${useDraftState ? 'saved' : 'deployed'} or the deployment data may be corrupted.`
|
||||
)
|
||||
}
|
||||
|
||||
const { blocks, edges, loops, parallels } = workflowData
|
||||
// Only deployed executions have a deployment version ID
|
||||
deploymentVersionId =
|
||||
'deploymentVersionId' in workflowData
|
||||
!useDraftState && 'deploymentVersionId' in workflowData
|
||||
? (workflowData.deploymentVersionId as string)
|
||||
: undefined
|
||||
|
||||
@@ -161,6 +171,19 @@ async function executeWebhookJobInternal(
|
||||
}
|
||||
const workflowVariables = (wfRows[0]?.variables as Record<string, any>) || {}
|
||||
|
||||
// Merge subblock states (matching workflow-execution pattern)
|
||||
const mergedStates = mergeSubblockState(blocks)
|
||||
|
||||
// Create serialized workflow
|
||||
const serializer = new Serializer()
|
||||
const serializedWorkflow = serializer.serializeWorkflow(
|
||||
mergedStates,
|
||||
edges,
|
||||
loops || {},
|
||||
parallels || {},
|
||||
true // Enable validation during execution
|
||||
)
|
||||
|
||||
// Handle special Airtable case
|
||||
if (payload.provider === 'airtable') {
|
||||
logger.info(`[${requestId}] Processing Airtable webhook via fetchAndProcessAirtablePayloads`)
|
||||
@@ -295,6 +318,7 @@ async function executeWebhookJobInternal(
|
||||
variables: {},
|
||||
triggerData: {
|
||||
isTest: false,
|
||||
executionTarget: payload.executionTarget || 'deployed',
|
||||
},
|
||||
deploymentVersionId,
|
||||
})
|
||||
@@ -352,6 +376,7 @@ async function executeWebhookJobInternal(
|
||||
variables: {},
|
||||
triggerData: {
|
||||
isTest: false,
|
||||
executionTarget: payload.executionTarget || 'deployed',
|
||||
},
|
||||
deploymentVersionId,
|
||||
})
|
||||
@@ -570,6 +595,7 @@ async function executeWebhookJobInternal(
|
||||
variables: {},
|
||||
triggerData: {
|
||||
isTest: false,
|
||||
executionTarget: payload.executionTarget || 'deployed',
|
||||
},
|
||||
deploymentVersionId,
|
||||
})
|
||||
|
||||
@@ -20,7 +20,6 @@ export type WorkflowExecutionPayload = {
|
||||
input?: any
|
||||
triggerType?: CoreTriggerType
|
||||
metadata?: Record<string, any>
|
||||
preflighted?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -52,7 +51,6 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||
checkRateLimit: true,
|
||||
checkDeployment: true,
|
||||
loggingSession: loggingSession,
|
||||
preflightEnvVars: !payload.preflighted,
|
||||
})
|
||||
|
||||
if (!preprocessResult.success) {
|
||||
|
||||
@@ -3,7 +3,6 @@ import { member, permissionGroup, permissionGroupMember } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { isOrganizationOnEnterprisePlan } from '@/lib/billing'
|
||||
import { isAccessControlEnabled, isHosted } from '@/lib/core/config/feature-flags'
|
||||
import {
|
||||
type PermissionGroupConfig,
|
||||
parsePermissionGroupConfig,
|
||||
@@ -53,10 +52,6 @@ export class InvitationsNotAllowedError extends Error {
|
||||
export async function getUserPermissionConfig(
|
||||
userId: string
|
||||
): Promise<PermissionGroupConfig | null> {
|
||||
if (!isHosted && !isAccessControlEnabled) {
|
||||
return null
|
||||
}
|
||||
|
||||
const [membership] = await db
|
||||
.select({ organizationId: member.organizationId })
|
||||
.from(member)
|
||||
|
||||
@@ -19,85 +19,6 @@ export function createEnvVarPattern(): RegExp {
|
||||
return new RegExp(`\\${REFERENCE.ENV_VAR_START}([^}]+)\\${REFERENCE.ENV_VAR_END}`, 'g')
|
||||
}
|
||||
|
||||
export interface EnvVarResolveOptions {
|
||||
allowEmbedded?: boolean
|
||||
resolveExactMatch?: boolean
|
||||
trimKeys?: boolean
|
||||
onMissing?: 'keep' | 'throw' | 'empty'
|
||||
deep?: boolean
|
||||
missingKeys?: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve {{ENV_VAR}} references in values using provided env vars.
|
||||
*/
|
||||
export function resolveEnvVarReferences(
|
||||
value: unknown,
|
||||
envVars: Record<string, string>,
|
||||
options: EnvVarResolveOptions = {}
|
||||
): unknown {
|
||||
const {
|
||||
allowEmbedded = true,
|
||||
resolveExactMatch = true,
|
||||
trimKeys = false,
|
||||
onMissing = 'keep',
|
||||
deep = true,
|
||||
} = options
|
||||
|
||||
if (typeof value === 'string') {
|
||||
if (resolveExactMatch) {
|
||||
const exactMatchPattern = new RegExp(
|
||||
`^\\${REFERENCE.ENV_VAR_START}([^}]+)\\${REFERENCE.ENV_VAR_END}$`
|
||||
)
|
||||
const exactMatch = exactMatchPattern.exec(value)
|
||||
if (exactMatch) {
|
||||
const envKey = trimKeys ? exactMatch[1].trim() : exactMatch[1]
|
||||
const envValue = envVars[envKey]
|
||||
if (envValue !== undefined) return envValue
|
||||
if (options.missingKeys) options.missingKeys.push(envKey)
|
||||
if (onMissing === 'throw') {
|
||||
throw new Error(`Environment variable "${envKey}" was not found`)
|
||||
}
|
||||
if (onMissing === 'empty') {
|
||||
return ''
|
||||
}
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
if (!allowEmbedded) return value
|
||||
|
||||
const envVarPattern = createEnvVarPattern()
|
||||
return value.replace(envVarPattern, (match, varName) => {
|
||||
const envKey = trimKeys ? String(varName).trim() : String(varName)
|
||||
const envValue = envVars[envKey]
|
||||
if (envValue !== undefined) return envValue
|
||||
if (options.missingKeys) options.missingKeys.push(envKey)
|
||||
if (onMissing === 'throw') {
|
||||
throw new Error(`Environment variable "${envKey}" was not found`)
|
||||
}
|
||||
if (onMissing === 'empty') {
|
||||
return ''
|
||||
}
|
||||
return match
|
||||
})
|
||||
}
|
||||
|
||||
if (deep && Array.isArray(value)) {
|
||||
return value.map((item) => resolveEnvVarReferences(item, envVars, options))
|
||||
}
|
||||
|
||||
if (deep && value !== null && typeof value === 'object') {
|
||||
const resolved: Record<string, any> = {}
|
||||
for (const [key, val] of Object.entries(value)) {
|
||||
resolved[key] = resolveEnvVarReferences(val, envVars, options)
|
||||
}
|
||||
return resolved
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a regex pattern for matching workflow variables <variable.name>
|
||||
* Captures the variable name (after "variable.") in group 1
|
||||
|
||||
@@ -126,14 +126,16 @@ describe('BlockResolver', () => {
|
||||
expect(resolver.resolve('<source.items.1.id>', ctx)).toBe(2)
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for non-existent path', () => {
|
||||
it.concurrent('should throw error for non-existent path', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'source' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { existing: 'value' },
|
||||
})
|
||||
|
||||
expect(resolver.resolve('<source.nonexistent>', ctx)).toBeUndefined()
|
||||
expect(() => resolver.resolve('<source.nonexistent>', ctx)).toThrow(
|
||||
/No value found at path "nonexistent" in block "source"/
|
||||
)
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for non-existent block', () => {
|
||||
@@ -969,17 +971,19 @@ describe('BlockResolver', () => {
|
||||
source: { value: undefined, other: 'exists' },
|
||||
})
|
||||
|
||||
expect(resolver.resolve('<source.value>', ctx)).toBeUndefined()
|
||||
expect(() => resolver.resolve('<source.value>', ctx)).toThrow()
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for deeply nested non-existent path', () => {
|
||||
it.concurrent('should handle deeply nested path errors', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'source' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { level1: { level2: {} } },
|
||||
})
|
||||
|
||||
expect(resolver.resolve('<source.level1.level2.level3>', ctx)).toBeUndefined()
|
||||
expect(() => resolver.resolve('<source.level1.level2.level3>', ctx)).toThrow(
|
||||
/No value found at path "level1.level2.level3"/
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -108,7 +108,11 @@ export class BlockResolver implements Resolver {
|
||||
}
|
||||
}
|
||||
|
||||
return undefined
|
||||
// If still undefined, throw error with original path
|
||||
const availableKeys = output && typeof output === 'object' ? Object.keys(output) : []
|
||||
throw new Error(
|
||||
`No value found at path "${pathParts.join('.')}" in block "${blockName}". Available fields: ${availableKeys.join(', ')}`
|
||||
)
|
||||
}
|
||||
|
||||
private getBlockOutput(blockId: string, context: ResolutionContext): any {
|
||||
|
||||
@@ -203,13 +203,6 @@ export function useCollaborativeWorkflow() {
|
||||
case BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE:
|
||||
workflowStore.setBlockAdvancedMode(payload.id, payload.advancedMode)
|
||||
break
|
||||
case BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE:
|
||||
workflowStore.setBlockCanonicalMode(
|
||||
payload.id,
|
||||
payload.canonicalId,
|
||||
payload.canonicalMode
|
||||
)
|
||||
break
|
||||
}
|
||||
} else if (target === OPERATION_TARGETS.BLOCKS) {
|
||||
switch (operation) {
|
||||
@@ -925,26 +918,16 @@ export function useCollaborativeWorkflow() {
|
||||
|
||||
const collaborativeToggleBlockAdvancedMode = useCallback(
|
||||
(id: string) => {
|
||||
const block = workflowStore.blocks[id]
|
||||
if (!block) return
|
||||
const newAdvancedMode = !block.advancedMode
|
||||
const currentBlock = workflowStore.blocks[id]
|
||||
if (!currentBlock) return
|
||||
|
||||
const newAdvancedMode = !currentBlock.advancedMode
|
||||
|
||||
executeQueuedOperation(
|
||||
BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE,
|
||||
OPERATION_TARGETS.BLOCK,
|
||||
{ id, advancedMode: newAdvancedMode },
|
||||
() => workflowStore.setBlockAdvancedMode(id, newAdvancedMode)
|
||||
)
|
||||
},
|
||||
[executeQueuedOperation, workflowStore]
|
||||
)
|
||||
|
||||
const collaborativeSetBlockCanonicalMode = useCallback(
|
||||
(id: string, canonicalId: string, canonicalMode: 'basic' | 'advanced') => {
|
||||
executeQueuedOperation(
|
||||
BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
||||
OPERATION_TARGETS.BLOCK,
|
||||
{ id, canonicalId, canonicalMode },
|
||||
() => workflowStore.setBlockCanonicalMode(id, canonicalId, canonicalMode)
|
||||
() => workflowStore.toggleBlockAdvancedMode(id)
|
||||
)
|
||||
},
|
||||
[executeQueuedOperation, workflowStore]
|
||||
@@ -1624,7 +1607,6 @@ export function useCollaborativeWorkflow() {
|
||||
collaborativeBatchToggleBlockEnabled,
|
||||
collaborativeBatchUpdateParent,
|
||||
collaborativeToggleBlockAdvancedMode,
|
||||
collaborativeSetBlockCanonicalMode,
|
||||
collaborativeBatchToggleBlockHandles,
|
||||
collaborativeBatchAddBlocks,
|
||||
collaborativeBatchRemoveBlocks,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { useMemo } from 'react'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { isAccessControlEnabled, isHosted } from '@/lib/core/config/feature-flags'
|
||||
import {
|
||||
DEFAULT_PERMISSION_GROUP_CONFIG,
|
||||
type PermissionGroupConfig,
|
||||
@@ -20,23 +19,19 @@ export interface PermissionConfigResult {
|
||||
}
|
||||
|
||||
export function usePermissionConfig(): PermissionConfigResult {
|
||||
const accessControlDisabled = !isHosted && !isAccessControlEnabled
|
||||
const { data: organizationsData } = useOrganizations()
|
||||
const activeOrganization = organizationsData?.activeOrganization
|
||||
|
||||
const { data: permissionData, isLoading } = useUserPermissionConfig(activeOrganization?.id)
|
||||
|
||||
const config = useMemo(() => {
|
||||
if (accessControlDisabled) {
|
||||
return DEFAULT_PERMISSION_GROUP_CONFIG
|
||||
}
|
||||
if (!permissionData?.config) {
|
||||
return DEFAULT_PERMISSION_GROUP_CONFIG
|
||||
}
|
||||
return permissionData.config
|
||||
}, [permissionData, accessControlDisabled])
|
||||
}, [permissionData])
|
||||
|
||||
const isInPermissionGroup = !accessControlDisabled && !!permissionData?.permissionGroupId
|
||||
const isInPermissionGroup = !!permissionData?.permissionGroupId
|
||||
|
||||
const isBlockAllowed = useMemo(() => {
|
||||
return (blockType: string) => {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { CopilotMode, CopilotModelId, CopilotTransportMode } from '@/lib/copilot/models'
|
||||
|
||||
const logger = createLogger('CopilotAPI')
|
||||
|
||||
@@ -28,8 +27,8 @@ export interface CopilotMessage {
|
||||
* Chat config stored in database
|
||||
*/
|
||||
export interface CopilotChatConfig {
|
||||
mode?: CopilotMode
|
||||
model?: CopilotModelId
|
||||
mode?: 'ask' | 'build' | 'plan'
|
||||
model?: string
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -66,8 +65,30 @@ export interface SendMessageRequest {
|
||||
userMessageId?: string // ID from frontend for the user message
|
||||
chatId?: string
|
||||
workflowId?: string
|
||||
mode?: CopilotMode | CopilotTransportMode
|
||||
model?: CopilotModelId
|
||||
mode?: 'ask' | 'agent' | 'plan'
|
||||
model?:
|
||||
| 'gpt-5-fast'
|
||||
| 'gpt-5'
|
||||
| 'gpt-5-medium'
|
||||
| 'gpt-5-high'
|
||||
| 'gpt-5.1-fast'
|
||||
| 'gpt-5.1'
|
||||
| 'gpt-5.1-medium'
|
||||
| 'gpt-5.1-high'
|
||||
| 'gpt-5-codex'
|
||||
| 'gpt-5.1-codex'
|
||||
| 'gpt-5.2'
|
||||
| 'gpt-5.2-codex'
|
||||
| 'gpt-5.2-pro'
|
||||
| 'gpt-4o'
|
||||
| 'gpt-4.1'
|
||||
| 'o3'
|
||||
| 'claude-4-sonnet'
|
||||
| 'claude-4.5-haiku'
|
||||
| 'claude-4.5-sonnet'
|
||||
| 'claude-4.5-opus'
|
||||
| 'claude-4.1-opus'
|
||||
| 'gemini-3-pro'
|
||||
prefetch?: boolean
|
||||
createNewChat?: boolean
|
||||
stream?: boolean
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
export const COPILOT_MODEL_IDS = [
|
||||
'gpt-5-fast',
|
||||
'gpt-5',
|
||||
'gpt-5-medium',
|
||||
'gpt-5-high',
|
||||
'gpt-5.1-fast',
|
||||
'gpt-5.1',
|
||||
'gpt-5.1-medium',
|
||||
'gpt-5.1-high',
|
||||
'gpt-5-codex',
|
||||
'gpt-5.1-codex',
|
||||
'gpt-5.2',
|
||||
'gpt-5.2-codex',
|
||||
'gpt-5.2-pro',
|
||||
'gpt-4o',
|
||||
'gpt-4.1',
|
||||
'o3',
|
||||
'claude-4-sonnet',
|
||||
'claude-4.5-haiku',
|
||||
'claude-4.5-sonnet',
|
||||
'claude-4.5-opus',
|
||||
'claude-4.1-opus',
|
||||
'gemini-3-pro',
|
||||
] as const
|
||||
|
||||
export type CopilotModelId = (typeof COPILOT_MODEL_IDS)[number]
|
||||
|
||||
export const COPILOT_MODES = ['ask', 'build', 'plan'] as const
|
||||
export type CopilotMode = (typeof COPILOT_MODES)[number]
|
||||
|
||||
export const COPILOT_TRANSPORT_MODES = ['ask', 'agent', 'plan'] as const
|
||||
export type CopilotTransportMode = (typeof COPILOT_TRANSPORT_MODES)[number]
|
||||
|
||||
export const COPILOT_REQUEST_MODES = ['ask', 'build', 'plan', 'agent'] as const
|
||||
export type CopilotRequestMode = (typeof COPILOT_REQUEST_MODES)[number]
|
||||
@@ -25,41 +25,36 @@ export class GetBlockOptionsClientTool extends BaseClientTool {
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Getting block operations', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved block operations', icon: ListFilter },
|
||||
[ClientToolCallState.error]: { text: 'Failed to get block operations', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block operations', icon: XCircle },
|
||||
[ClientToolCallState.generating]: { text: 'Getting block options', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Getting block options', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Getting block options', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Retrieved block options', icon: ListFilter },
|
||||
[ClientToolCallState.error]: { text: 'Failed to get block options', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted getting block options', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: {
|
||||
text: 'Skipped getting block operations',
|
||||
text: 'Skipped getting block options',
|
||||
icon: MinusCircle,
|
||||
},
|
||||
},
|
||||
getDynamicText: (params, state) => {
|
||||
const blockId =
|
||||
(params as any)?.blockId ||
|
||||
(params as any)?.blockType ||
|
||||
(params as any)?.block_id ||
|
||||
(params as any)?.block_type
|
||||
if (typeof blockId === 'string') {
|
||||
if (params?.blockId && typeof params.blockId === 'string') {
|
||||
// Look up the block config to get the human-readable name
|
||||
const blockConfig = getBlock(blockId)
|
||||
const blockName = (blockConfig?.name ?? blockId.replace(/_/g, ' ')).toLowerCase()
|
||||
const blockConfig = getBlock(params.blockId)
|
||||
const blockName = (blockConfig?.name ?? params.blockId.replace(/_/g, ' ')).toLowerCase()
|
||||
|
||||
switch (state) {
|
||||
case ClientToolCallState.success:
|
||||
return `Retrieved ${blockName} operations`
|
||||
return `Retrieved ${blockName} options`
|
||||
case ClientToolCallState.executing:
|
||||
case ClientToolCallState.generating:
|
||||
case ClientToolCallState.pending:
|
||||
return `Retrieving ${blockName} operations`
|
||||
return `Retrieving ${blockName} options`
|
||||
case ClientToolCallState.error:
|
||||
return `Failed to retrieve ${blockName} operations`
|
||||
return `Failed to retrieve ${blockName} options`
|
||||
case ClientToolCallState.aborted:
|
||||
return `Aborted retrieving ${blockName} operations`
|
||||
return `Aborted retrieving ${blockName} options`
|
||||
case ClientToolCallState.rejected:
|
||||
return `Skipped retrieving ${blockName} operations`
|
||||
return `Skipped retrieving ${blockName} options`
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
|
||||
@@ -28,7 +28,6 @@ import './workflow/deploy-api'
|
||||
import './workflow/deploy-chat'
|
||||
import './workflow/deploy-mcp'
|
||||
import './workflow/edit-workflow'
|
||||
import './workflow/redeploy'
|
||||
import './workflow/run-workflow'
|
||||
import './workflow/set-global-workflow-variables'
|
||||
|
||||
|
||||
@@ -15,8 +15,6 @@ interface ApiDeploymentDetails {
|
||||
isDeployed: boolean
|
||||
deployedAt: string | null
|
||||
endpoint: string | null
|
||||
apiKey: string | null
|
||||
needsRedeployment: boolean
|
||||
}
|
||||
|
||||
interface ChatDeploymentDetails {
|
||||
@@ -24,14 +22,6 @@ interface ChatDeploymentDetails {
|
||||
chatId: string | null
|
||||
identifier: string | null
|
||||
chatUrl: string | null
|
||||
title: string | null
|
||||
description: string | null
|
||||
authType: string | null
|
||||
allowedEmails: string[] | null
|
||||
outputConfigs: Array<{ blockId: string; path: string }> | null
|
||||
welcomeMessage: string | null
|
||||
primaryColor: string | null
|
||||
hasPassword: boolean
|
||||
}
|
||||
|
||||
interface McpDeploymentDetails {
|
||||
@@ -41,8 +31,6 @@ interface McpDeploymentDetails {
|
||||
serverName: string
|
||||
toolName: string
|
||||
toolDescription: string | null
|
||||
parameterSchema?: Record<string, unknown> | null
|
||||
toolId?: string | null
|
||||
}>
|
||||
}
|
||||
|
||||
@@ -108,8 +96,6 @@ export class CheckDeploymentStatusClientTool extends BaseClientTool {
|
||||
isDeployed: isApiDeployed,
|
||||
deployedAt: apiDeploy?.deployedAt || null,
|
||||
endpoint: isApiDeployed ? `${appUrl}/api/workflows/${workflowId}/execute` : null,
|
||||
apiKey: apiDeploy?.apiKey || null,
|
||||
needsRedeployment: apiDeploy?.needsRedeployment === true,
|
||||
}
|
||||
|
||||
// Chat deployment details
|
||||
@@ -119,18 +105,6 @@ export class CheckDeploymentStatusClientTool extends BaseClientTool {
|
||||
chatId: chatDeploy?.deployment?.id || null,
|
||||
identifier: chatDeploy?.deployment?.identifier || null,
|
||||
chatUrl: isChatDeployed ? `${appUrl}/chat/${chatDeploy?.deployment?.identifier}` : null,
|
||||
title: chatDeploy?.deployment?.title || null,
|
||||
description: chatDeploy?.deployment?.description || null,
|
||||
authType: chatDeploy?.deployment?.authType || null,
|
||||
allowedEmails: Array.isArray(chatDeploy?.deployment?.allowedEmails)
|
||||
? chatDeploy?.deployment?.allowedEmails
|
||||
: null,
|
||||
outputConfigs: Array.isArray(chatDeploy?.deployment?.outputConfigs)
|
||||
? chatDeploy?.deployment?.outputConfigs
|
||||
: null,
|
||||
welcomeMessage: chatDeploy?.deployment?.customizations?.welcomeMessage || null,
|
||||
primaryColor: chatDeploy?.deployment?.customizations?.primaryColor || null,
|
||||
hasPassword: chatDeploy?.deployment?.hasPassword === true,
|
||||
}
|
||||
|
||||
// MCP deployment details - find servers that have this workflow as a tool
|
||||
@@ -155,8 +129,6 @@ export class CheckDeploymentStatusClientTool extends BaseClientTool {
|
||||
serverName: server.name,
|
||||
toolName: tool.toolName,
|
||||
toolDescription: tool.toolDescription,
|
||||
parameterSchema: tool.parameterSchema ?? null,
|
||||
toolId: tool.id ?? null,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -208,70 +208,54 @@ export class DeployChatClientTool extends BaseClientTool {
|
||||
return
|
||||
}
|
||||
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
const statusRes = await fetch(`/api/workflows/${workflowId}/chat/status`)
|
||||
const statusJson = statusRes.ok ? await statusRes.json() : null
|
||||
const existingDeployment = statusJson?.deployment || null
|
||||
|
||||
const baseIdentifier =
|
||||
existingDeployment?.identifier || this.generateIdentifier(workflow?.name || 'chat')
|
||||
const baseTitle = existingDeployment?.title || workflow?.name || 'Chat'
|
||||
const baseDescription = existingDeployment?.description || ''
|
||||
const baseAuthType = existingDeployment?.authType || 'public'
|
||||
const baseWelcomeMessage =
|
||||
existingDeployment?.customizations?.welcomeMessage || 'Hi there! How can I help you today?'
|
||||
const basePrimaryColor =
|
||||
existingDeployment?.customizations?.primaryColor || 'var(--brand-primary-hover-hex)'
|
||||
const baseAllowedEmails = Array.isArray(existingDeployment?.allowedEmails)
|
||||
? existingDeployment.allowedEmails
|
||||
: []
|
||||
const baseOutputConfigs = Array.isArray(existingDeployment?.outputConfigs)
|
||||
? existingDeployment.outputConfigs
|
||||
: []
|
||||
|
||||
const identifier = args?.identifier || baseIdentifier
|
||||
const title = args?.title || baseTitle
|
||||
const description = args?.description ?? baseDescription
|
||||
const authType = args?.authType || baseAuthType
|
||||
const welcomeMessage = args?.welcomeMessage || baseWelcomeMessage
|
||||
const outputConfigs = args?.outputConfigs || baseOutputConfigs
|
||||
const allowedEmails = args?.allowedEmails || baseAllowedEmails
|
||||
const primaryColor = basePrimaryColor
|
||||
|
||||
if (!identifier || !title) {
|
||||
throw new Error('Chat identifier and title are required')
|
||||
// Deploy action - validate required fields
|
||||
if (!args?.identifier && !workflow?.name) {
|
||||
throw new Error('Either identifier or workflow name is required')
|
||||
}
|
||||
|
||||
if (authType === 'password' && !args?.password && !existingDeployment?.hasPassword) {
|
||||
if (!args?.title && !workflow?.name) {
|
||||
throw new Error('Chat title is required')
|
||||
}
|
||||
|
||||
const identifier = args?.identifier || this.generateIdentifier(workflow?.name || 'chat')
|
||||
const title = args?.title || workflow?.name || 'Chat'
|
||||
const description = args?.description || ''
|
||||
const authType = args?.authType || 'public'
|
||||
const welcomeMessage = args?.welcomeMessage || 'Hi there! How can I help you today?'
|
||||
|
||||
// Validate auth-specific requirements
|
||||
if (authType === 'password' && !args?.password) {
|
||||
throw new Error('Password is required when using password protection')
|
||||
}
|
||||
|
||||
if ((authType === 'email' || authType === 'sso') && allowedEmails.length === 0) {
|
||||
if (
|
||||
(authType === 'email' || authType === 'sso') &&
|
||||
(!args?.allowedEmails || args.allowedEmails.length === 0)
|
||||
) {
|
||||
throw new Error(`At least one email or domain is required when using ${authType} access`)
|
||||
}
|
||||
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
const outputConfigs = args?.outputConfigs || []
|
||||
|
||||
const payload = {
|
||||
workflowId,
|
||||
identifier: identifier.trim(),
|
||||
title: title.trim(),
|
||||
description: description.trim(),
|
||||
customizations: {
|
||||
primaryColor,
|
||||
primaryColor: 'var(--brand-primary-hover-hex)',
|
||||
welcomeMessage: welcomeMessage.trim(),
|
||||
},
|
||||
authType,
|
||||
password: authType === 'password' ? args?.password : undefined,
|
||||
allowedEmails: authType === 'email' || authType === 'sso' ? allowedEmails : [],
|
||||
allowedEmails: authType === 'email' || authType === 'sso' ? args?.allowedEmails : [],
|
||||
outputConfigs,
|
||||
}
|
||||
|
||||
const isUpdating = Boolean(existingDeployment?.id)
|
||||
const endpoint = isUpdating ? `/api/chat/manage/${existingDeployment.id}` : '/api/chat'
|
||||
const method = isUpdating ? 'PATCH' : 'POST'
|
||||
|
||||
const res = await fetch(endpoint, {
|
||||
method,
|
||||
const res = await fetch('/api/chat', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(payload),
|
||||
})
|
||||
|
||||
@@ -128,6 +128,7 @@ export class DeployMcpClientTool extends BaseClientTool {
|
||||
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
// Build parameter schema with descriptions if provided
|
||||
let parameterSchema: Record<string, unknown> | undefined
|
||||
if (args?.parameterDescriptions && args.parameterDescriptions.length > 0) {
|
||||
const properties: Record<string, { description: string }> = {}
|
||||
@@ -154,49 +155,9 @@ export class DeployMcpClientTool extends BaseClientTool {
|
||||
const data = await res.json()
|
||||
|
||||
if (!res.ok) {
|
||||
// Handle specific error cases
|
||||
if (data.error?.includes('already added')) {
|
||||
const toolsRes = await fetch(
|
||||
`/api/mcp/workflow-servers/${args.serverId}/tools?workspaceId=${workspaceId}`
|
||||
)
|
||||
const toolsJson = toolsRes.ok ? await toolsRes.json() : null
|
||||
const tools = toolsJson?.data?.tools || []
|
||||
const existingTool = tools.find((tool: any) => tool.workflowId === workflowId)
|
||||
if (!existingTool?.id) {
|
||||
throw new Error('This workflow is already deployed to this MCP server')
|
||||
}
|
||||
const patchRes = await fetch(
|
||||
`/api/mcp/workflow-servers/${args.serverId}/tools/${existingTool.id}?workspaceId=${workspaceId}`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
toolName: args.toolName?.trim(),
|
||||
toolDescription: args.toolDescription?.trim(),
|
||||
parameterSchema,
|
||||
}),
|
||||
}
|
||||
)
|
||||
const patchJson = patchRes.ok ? await patchRes.json() : null
|
||||
if (!patchRes.ok) {
|
||||
const patchError = patchJson?.error || `Failed to update MCP tool (${patchRes.status})`
|
||||
throw new Error(patchError)
|
||||
}
|
||||
const updatedTool = patchJson?.data?.tool
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(
|
||||
200,
|
||||
`Workflow MCP tool updated to "${updatedTool?.toolName || existingTool.toolName}".`,
|
||||
{
|
||||
success: true,
|
||||
toolId: updatedTool?.id || existingTool.id,
|
||||
toolName: updatedTool?.toolName || existingTool.toolName,
|
||||
toolDescription: updatedTool?.toolDescription || existingTool.toolDescription,
|
||||
serverId: args.serverId,
|
||||
updated: true,
|
||||
}
|
||||
)
|
||||
logger.info('Updated workflow MCP tool', { toolId: existingTool.id })
|
||||
return
|
||||
throw new Error('This workflow is already deployed to this MCP server')
|
||||
}
|
||||
if (data.error?.includes('not deployed')) {
|
||||
throw new Error('Workflow must be deployed before adding as an MCP tool')
|
||||
|
||||
@@ -38,18 +38,6 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
super(toolCallId, EditWorkflowClientTool.id, EditWorkflowClientTool.metadata)
|
||||
}
|
||||
|
||||
async markToolComplete(status: number, message?: any, data?: any): Promise<boolean> {
|
||||
const logger = createLogger('EditWorkflowClientTool')
|
||||
logger.info('markToolComplete payload', {
|
||||
toolCallId: this.toolCallId,
|
||||
toolName: this.name,
|
||||
status,
|
||||
message,
|
||||
data,
|
||||
})
|
||||
return super.markToolComplete(status, message, data)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get sanitized workflow JSON from a workflow state, merge subblocks, and sanitize for copilot
|
||||
* This matches what get_user_workflow returns
|
||||
@@ -185,13 +173,21 @@ export class EditWorkflowClientTool extends BaseClientTool {
|
||||
async execute(args?: EditWorkflowArgs): Promise<void> {
|
||||
const logger = createLogger('EditWorkflowClientTool')
|
||||
|
||||
if (this.hasExecuted) {
|
||||
logger.info('execute skipped (already executed)', { toolCallId: this.toolCallId })
|
||||
return
|
||||
}
|
||||
|
||||
// Use timeout protection to ensure tool always completes
|
||||
await this.executeWithTimeout(async () => {
|
||||
if (this.hasExecuted) {
|
||||
logger.info('execute skipped (already executed)', { toolCallId: this.toolCallId })
|
||||
// Even if skipped, ensure we mark complete with current workflow state
|
||||
if (!this.hasBeenMarkedComplete()) {
|
||||
const currentWorkflowJson = this.getCurrentWorkflowJsonSafe(logger)
|
||||
await this.markToolComplete(
|
||||
200,
|
||||
'Tool already executed',
|
||||
currentWorkflowJson ? { userWorkflow: currentWorkflowJson } : undefined
|
||||
)
|
||||
}
|
||||
return
|
||||
}
|
||||
this.hasExecuted = true
|
||||
logger.info('execute called', { toolCallId: this.toolCallId, argsProvided: !!args })
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Loader2, Rocket, XCircle } from 'lucide-react'
|
||||
import {
|
||||
BaseClientTool,
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
export class RedeployClientTool extends BaseClientTool {
|
||||
static readonly id = 'redeploy'
|
||||
private hasExecuted = false
|
||||
|
||||
constructor(toolCallId: string) {
|
||||
super(toolCallId, RedeployClientTool.id, RedeployClientTool.metadata)
|
||||
}
|
||||
|
||||
static readonly metadata: BaseClientToolMetadata = {
|
||||
displayNames: {
|
||||
[ClientToolCallState.generating]: { text: 'Redeploying workflow', icon: Loader2 },
|
||||
[ClientToolCallState.pending]: { text: 'Redeploy workflow', icon: Loader2 },
|
||||
[ClientToolCallState.executing]: { text: 'Redeploying workflow', icon: Loader2 },
|
||||
[ClientToolCallState.success]: { text: 'Redeployed workflow', icon: Rocket },
|
||||
[ClientToolCallState.error]: { text: 'Failed to redeploy workflow', icon: XCircle },
|
||||
[ClientToolCallState.aborted]: { text: 'Aborted redeploy', icon: XCircle },
|
||||
[ClientToolCallState.rejected]: { text: 'Skipped redeploy', icon: XCircle },
|
||||
},
|
||||
interrupt: undefined,
|
||||
}
|
||||
|
||||
async execute(): Promise<void> {
|
||||
const logger = createLogger('RedeployClientTool')
|
||||
try {
|
||||
if (this.hasExecuted) {
|
||||
logger.info('execute skipped (already executed)', { toolCallId: this.toolCallId })
|
||||
return
|
||||
}
|
||||
this.hasExecuted = true
|
||||
|
||||
this.setState(ClientToolCallState.executing)
|
||||
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
if (!activeWorkflowId) {
|
||||
throw new Error('No workflow ID provided')
|
||||
}
|
||||
|
||||
const res = await fetch(`/api/workflows/${activeWorkflowId}/deploy`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ deployChatEnabled: false }),
|
||||
})
|
||||
|
||||
const json = await res.json().catch(() => ({}))
|
||||
if (!res.ok) {
|
||||
const errorText = json?.error || `Server error (${res.status})`
|
||||
throw new Error(errorText)
|
||||
}
|
||||
|
||||
this.setState(ClientToolCallState.success)
|
||||
await this.markToolComplete(200, 'Workflow redeployed', {
|
||||
workflowId: activeWorkflowId,
|
||||
deployedAt: json?.deployedAt || null,
|
||||
schedule: json?.schedule,
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error('Redeploy failed', { message: error?.message })
|
||||
this.setState(ClientToolCallState.error)
|
||||
await this.markToolComplete(500, error?.message || 'Failed to redeploy workflow')
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -627,9 +627,12 @@ function createBlockFromParams(
|
||||
|
||||
let sanitizedValue = value
|
||||
|
||||
// Normalize array subblocks with id fields (inputFormat, table rows, etc.)
|
||||
if (shouldNormalizeArrayIds(key)) {
|
||||
sanitizedValue = normalizeArrayWithIds(value)
|
||||
// Special handling for inputFormat - ensure it's an array
|
||||
if (key === 'inputFormat' && value !== null && value !== undefined) {
|
||||
if (!Array.isArray(value)) {
|
||||
// Invalid format, default to empty array
|
||||
sanitizedValue = []
|
||||
}
|
||||
}
|
||||
|
||||
// Special handling for tools - normalize and filter disallowed
|
||||
@@ -717,55 +720,6 @@ function normalizeTools(tools: any[]): any[] {
|
||||
})
|
||||
}
|
||||
|
||||
/** UUID v4 regex pattern for validation */
|
||||
const UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i
|
||||
|
||||
/**
|
||||
* Subblock types that store arrays of objects with `id` fields.
|
||||
* The LLM may generate arbitrary IDs which need to be converted to proper UUIDs.
|
||||
*/
|
||||
const ARRAY_WITH_ID_SUBBLOCK_TYPES = new Set([
|
||||
'inputFormat', // input-format: Fields with id, name, type, value, collapsed
|
||||
'headers', // table: Rows with id, cells (used for HTTP headers)
|
||||
'params', // table: Rows with id, cells (used for query params)
|
||||
'variables', // table or variables-input: Rows/assignments with id
|
||||
'tagFilters', // knowledge-tag-filters: Filters with id, tagName, etc.
|
||||
'documentTags', // document-tag-entry: Tags with id, tagName, etc.
|
||||
'metrics', // eval-input: Metrics with id, name, description, range
|
||||
])
|
||||
|
||||
/**
|
||||
* Normalizes array subblock values by ensuring each item has a valid UUID.
|
||||
* The LLM may generate arbitrary IDs like "input-desc-001" or "row-1" which need
|
||||
* to be converted to proper UUIDs for consistency with UI-created items.
|
||||
*/
|
||||
function normalizeArrayWithIds(value: unknown): any[] {
|
||||
if (!Array.isArray(value)) {
|
||||
return []
|
||||
}
|
||||
|
||||
return value.map((item: any) => {
|
||||
if (!item || typeof item !== 'object') {
|
||||
return item
|
||||
}
|
||||
|
||||
// Check if id is missing or not a valid UUID
|
||||
const hasValidUUID = typeof item.id === 'string' && UUID_REGEX.test(item.id)
|
||||
if (!hasValidUUID) {
|
||||
return { ...item, id: crypto.randomUUID() }
|
||||
}
|
||||
|
||||
return item
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a subblock key should have its array items normalized with UUIDs.
|
||||
*/
|
||||
function shouldNormalizeArrayIds(key: string): boolean {
|
||||
return ARRAY_WITH_ID_SUBBLOCK_TYPES.has(key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize responseFormat to ensure consistent storage
|
||||
* Handles both string (JSON) and object formats
|
||||
@@ -1406,9 +1360,12 @@ function applyOperationsToWorkflowState(
|
||||
}
|
||||
let sanitizedValue = value
|
||||
|
||||
// Normalize array subblocks with id fields (inputFormat, table rows, etc.)
|
||||
if (shouldNormalizeArrayIds(key)) {
|
||||
sanitizedValue = normalizeArrayWithIds(value)
|
||||
// Special handling for inputFormat - ensure it's an array
|
||||
if (key === 'inputFormat' && value !== null && value !== undefined) {
|
||||
if (!Array.isArray(value)) {
|
||||
// Invalid format, default to empty array
|
||||
sanitizedValue = []
|
||||
}
|
||||
}
|
||||
|
||||
// Special handling for tools - normalize and filter disallowed
|
||||
@@ -2054,9 +2011,10 @@ function applyOperationsToWorkflowState(
|
||||
|
||||
let sanitizedValue = value
|
||||
|
||||
// Normalize array subblocks with id fields (inputFormat, table rows, etc.)
|
||||
if (shouldNormalizeArrayIds(key)) {
|
||||
sanitizedValue = normalizeArrayWithIds(value)
|
||||
if (key === 'inputFormat' && value !== null && value !== undefined) {
|
||||
if (!Array.isArray(value)) {
|
||||
sanitizedValue = []
|
||||
}
|
||||
}
|
||||
|
||||
// Special handling for tools - normalize and filter disallowed
|
||||
|
||||
@@ -53,25 +53,14 @@ export function extractFieldsFromSchema(schema: any): Field[] {
|
||||
* Helper function to safely parse response format
|
||||
* Handles both string and object formats
|
||||
*/
|
||||
export function parseResponseFormatSafely(
|
||||
responseFormatValue: any,
|
||||
blockId: string,
|
||||
options?: { allowReferences?: boolean }
|
||||
): any {
|
||||
export function parseResponseFormatSafely(responseFormatValue: any, blockId: string): any {
|
||||
if (!responseFormatValue) {
|
||||
return null
|
||||
}
|
||||
|
||||
const allowReferences = options?.allowReferences ?? false
|
||||
|
||||
try {
|
||||
if (typeof responseFormatValue === 'string') {
|
||||
const trimmedValue = responseFormatValue.trim()
|
||||
if (trimmedValue === '') return null
|
||||
if (allowReferences && trimmedValue.startsWith('<') && trimmedValue.includes('>')) {
|
||||
return trimmedValue
|
||||
}
|
||||
return JSON.parse(trimmedValue)
|
||||
return JSON.parse(responseFormatValue)
|
||||
}
|
||||
return responseFormatValue
|
||||
} catch (error) {
|
||||
|
||||
@@ -3,9 +3,6 @@ import { environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('EnvironmentUtils')
|
||||
|
||||
@@ -110,86 +107,3 @@ export async function getEffectiveDecryptedEnv(
|
||||
)
|
||||
return { ...personalDecrypted, ...workspaceDecrypted }
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure all environment variables can be decrypted.
|
||||
*/
|
||||
export async function ensureEnvVarsDecryptable(
|
||||
variables: Record<string, string>,
|
||||
options: { requestId?: string } = {}
|
||||
): Promise<void> {
|
||||
const requestId = options.requestId
|
||||
for (const [key, encryptedValue] of Object.entries(variables)) {
|
||||
try {
|
||||
await decryptSecret(encryptedValue)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
if (requestId) {
|
||||
logger.error(`[${requestId}] Failed to decrypt environment variable "${key}"`, error)
|
||||
} else {
|
||||
logger.error(`Failed to decrypt environment variable "${key}"`, error)
|
||||
}
|
||||
throw new Error(`Failed to decrypt environment variable "${key}": ${message}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure all {{ENV_VAR}} references in block subblocks resolve to decryptable values.
|
||||
*/
|
||||
export async function ensureBlockEnvVarsResolvable(
|
||||
blocks: Record<string, BlockState>,
|
||||
variables: Record<string, string>,
|
||||
options: { requestId?: string } = {}
|
||||
): Promise<void> {
|
||||
const requestId = options.requestId
|
||||
await Promise.all(
|
||||
Object.values(blocks).map(async (block) => {
|
||||
const subBlocks = block.subBlocks ?? {}
|
||||
await Promise.all(
|
||||
Object.values(subBlocks).map(async (subBlock) => {
|
||||
const value = subBlock.value
|
||||
if (
|
||||
typeof value !== 'string' ||
|
||||
!value.includes(REFERENCE.ENV_VAR_START) ||
|
||||
!value.includes(REFERENCE.ENV_VAR_END)
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
const envVarPattern = createEnvVarPattern()
|
||||
const matches = value.match(envVarPattern)
|
||||
if (!matches) {
|
||||
return
|
||||
}
|
||||
|
||||
for (const match of matches) {
|
||||
const varName = match.slice(
|
||||
REFERENCE.ENV_VAR_START.length,
|
||||
-REFERENCE.ENV_VAR_END.length
|
||||
)
|
||||
const encryptedValue = variables[varName]
|
||||
if (!encryptedValue) {
|
||||
throw new Error(`Environment variable "${varName}" was not found`)
|
||||
}
|
||||
|
||||
try {
|
||||
await decryptSecret(encryptedValue)
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Unknown error'
|
||||
if (requestId) {
|
||||
logger.error(
|
||||
`[${requestId}] Error decrypting value for variable "${varName}"`,
|
||||
error
|
||||
)
|
||||
} else {
|
||||
logger.error(`Error decrypting value for variable "${varName}"`, error)
|
||||
}
|
||||
throw new Error(`Failed to decrypt environment variable "${varName}": ${message}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import { checkServerSideUsageLimits } from '@/lib/billing/calculations/usage-mon
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { preflightWorkflowEnvVars } from '@/lib/workflows/executor/preflight'
|
||||
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||
import type { CoreTriggerType } from '@/stores/logs/filters/types'
|
||||
|
||||
@@ -118,15 +117,11 @@ export interface PreprocessExecutionOptions {
|
||||
checkRateLimit?: boolean // Default: false for manual/chat, true for others
|
||||
checkDeployment?: boolean // Default: true for non-manual triggers
|
||||
skipUsageLimits?: boolean // Default: false (only use for test mode)
|
||||
preflightEnvVars?: boolean // Default: false
|
||||
|
||||
// Context information
|
||||
workspaceId?: string // If known, used for billing resolution
|
||||
loggingSession?: LoggingSession // If provided, will be used for error logging
|
||||
isResumeContext?: boolean // If true, allows fallback billing on resolution failure (for paused workflow resumes)
|
||||
/** @deprecated No longer used - preflight always uses deployed state */
|
||||
useDraftState?: boolean
|
||||
envUserId?: string // Optional override for env var resolution user
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -164,11 +159,9 @@ export async function preprocessExecution(
|
||||
checkRateLimit = triggerType !== 'manual' && triggerType !== 'chat',
|
||||
checkDeployment = triggerType !== 'manual',
|
||||
skipUsageLimits = false,
|
||||
preflightEnvVars = false,
|
||||
workspaceId: providedWorkspaceId,
|
||||
loggingSession: providedLoggingSession,
|
||||
isResumeContext = false,
|
||||
envUserId,
|
||||
} = options
|
||||
|
||||
logger.info(`[${requestId}] Starting execution preprocessing`, {
|
||||
@@ -483,44 +476,6 @@ export async function preprocessExecution(
|
||||
}
|
||||
|
||||
// ========== SUCCESS: All Checks Passed ==========
|
||||
if (preflightEnvVars) {
|
||||
try {
|
||||
const resolvedEnvUserId = envUserId || workflowRecord.userId || userId
|
||||
await preflightWorkflowEnvVars({
|
||||
workflowId,
|
||||
workspaceId,
|
||||
envUserId: resolvedEnvUserId,
|
||||
requestId,
|
||||
})
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : 'Env var preflight failed'
|
||||
logger.warn(`[${requestId}] Env var preflight failed`, {
|
||||
workflowId,
|
||||
message,
|
||||
})
|
||||
|
||||
await logPreprocessingError({
|
||||
workflowId,
|
||||
executionId,
|
||||
triggerType,
|
||||
requestId,
|
||||
userId: actorUserId,
|
||||
workspaceId,
|
||||
errorMessage: message,
|
||||
loggingSession: providedLoggingSession,
|
||||
})
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
message,
|
||||
statusCode: 400,
|
||||
logCreated: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] All preprocessing checks passed`, {
|
||||
workflowId,
|
||||
actorUserId,
|
||||
|
||||
@@ -25,7 +25,8 @@ import type {
|
||||
McpTransport,
|
||||
} from '@/lib/mcp/types'
|
||||
import { MCP_CONSTANTS } from '@/lib/mcp/utils'
|
||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
|
||||
|
||||
const logger = createLogger('McpService')
|
||||
|
||||
@@ -50,21 +51,31 @@ class McpService {
|
||||
* Resolve environment variables in strings
|
||||
*/
|
||||
private resolveEnvVars(value: string, envVars: Record<string, string>): string {
|
||||
const envVarPattern = createEnvVarPattern()
|
||||
const envMatches = value.match(envVarPattern)
|
||||
if (!envMatches) return value
|
||||
|
||||
let resolvedValue = value
|
||||
const missingVars: string[] = []
|
||||
const resolvedValue = resolveEnvVarReferences(value, envVars, {
|
||||
allowEmbedded: true,
|
||||
resolveExactMatch: true,
|
||||
trimKeys: true,
|
||||
onMissing: 'keep',
|
||||
deep: false,
|
||||
missingKeys: missingVars,
|
||||
}) as string
|
||||
|
||||
for (const match of envMatches) {
|
||||
const envKey = match
|
||||
.slice(REFERENCE.ENV_VAR_START.length, -REFERENCE.ENV_VAR_END.length)
|
||||
.trim()
|
||||
const envValue = envVars[envKey]
|
||||
|
||||
if (envValue === undefined) {
|
||||
missingVars.push(envKey)
|
||||
continue
|
||||
}
|
||||
|
||||
resolvedValue = resolvedValue.replace(match, envValue)
|
||||
}
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
const uniqueMissing = Array.from(new Set(missingVars))
|
||||
throw new Error(
|
||||
`Missing required environment variable${uniqueMissing.length > 1 ? 's' : ''}: ${uniqueMissing.join(', ')}. ` +
|
||||
`Please set ${uniqueMissing.length > 1 ? 'these variables' : 'this variable'} in your workspace or personal environment settings.`
|
||||
`Missing required environment variable${missingVars.length > 1 ? 's' : ''}: ${missingVars.join(', ')}. ` +
|
||||
`Please set ${missingVars.length > 1 ? 'these variables' : 'this variable'} in your workspace or personal environment settings.`
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { webhook } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { eq, inArray } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
@@ -41,7 +41,6 @@ interface SaveTriggerWebhooksInput {
|
||||
userId: string
|
||||
blocks: Record<string, BlockState>
|
||||
requestId: string
|
||||
deploymentVersionId?: string
|
||||
}
|
||||
|
||||
function getSubBlockValue(block: BlockState, subBlockId: string): unknown {
|
||||
@@ -247,17 +246,8 @@ async function syncCredentialSetWebhooks(params: {
|
||||
triggerPath: string
|
||||
providerConfig: Record<string, unknown>
|
||||
requestId: string
|
||||
deploymentVersionId?: string
|
||||
}): Promise<TriggerSaveError | null> {
|
||||
const {
|
||||
workflowId,
|
||||
blockId,
|
||||
provider,
|
||||
triggerPath,
|
||||
providerConfig,
|
||||
requestId,
|
||||
deploymentVersionId,
|
||||
} = params
|
||||
const { workflowId, blockId, provider, triggerPath, providerConfig, requestId } = params
|
||||
|
||||
const credentialSetId = providerConfig.credentialSetId as string | undefined
|
||||
if (!credentialSetId) {
|
||||
@@ -277,7 +267,6 @@ async function syncCredentialSetWebhooks(params: {
|
||||
oauthProviderId,
|
||||
providerConfig: baseConfig as Record<string, any>,
|
||||
requestId,
|
||||
deploymentVersionId,
|
||||
})
|
||||
|
||||
if (syncResult.webhooks.length === 0) {
|
||||
@@ -319,7 +308,6 @@ async function createWebhookForBlock(params: {
|
||||
providerConfig: Record<string, unknown>
|
||||
triggerPath: string
|
||||
requestId: string
|
||||
deploymentVersionId?: string
|
||||
}): Promise<TriggerSaveError | null> {
|
||||
const {
|
||||
request,
|
||||
@@ -331,7 +319,6 @@ async function createWebhookForBlock(params: {
|
||||
providerConfig,
|
||||
triggerPath,
|
||||
requestId,
|
||||
deploymentVersionId,
|
||||
} = params
|
||||
|
||||
const webhookId = nanoid()
|
||||
@@ -359,7 +346,6 @@ async function createWebhookForBlock(params: {
|
||||
.values({
|
||||
id: webhookId,
|
||||
workflowId,
|
||||
deploymentVersionId: deploymentVersionId || null,
|
||||
blockId: block.id,
|
||||
path: triggerPath,
|
||||
provider,
|
||||
@@ -397,31 +383,16 @@ export async function saveTriggerWebhooksForDeploy({
|
||||
userId,
|
||||
blocks,
|
||||
requestId,
|
||||
deploymentVersionId,
|
||||
}: SaveTriggerWebhooksInput): Promise<TriggerSaveResult> {
|
||||
const triggerBlocks = Object.values(blocks || {}).filter(Boolean)
|
||||
const currentBlockIds = new Set(triggerBlocks.map((b) => b.id))
|
||||
|
||||
// 1. Get all existing webhooks for this workflow
|
||||
const existingWebhooks = await db
|
||||
.select()
|
||||
.from(webhook)
|
||||
.where(
|
||||
deploymentVersionId
|
||||
? and(
|
||||
eq(webhook.workflowId, workflowId),
|
||||
eq(webhook.deploymentVersionId, deploymentVersionId)
|
||||
)
|
||||
: eq(webhook.workflowId, workflowId)
|
||||
)
|
||||
const existingWebhooks = await db.select().from(webhook).where(eq(webhook.workflowId, workflowId))
|
||||
|
||||
const webhooksByBlockId = new Map<string, typeof existingWebhooks>()
|
||||
for (const wh of existingWebhooks) {
|
||||
if (!wh.blockId) continue
|
||||
const existingForBlock = webhooksByBlockId.get(wh.blockId) ?? []
|
||||
existingForBlock.push(wh)
|
||||
webhooksByBlockId.set(wh.blockId, existingForBlock)
|
||||
}
|
||||
const webhooksByBlockId = new Map(
|
||||
existingWebhooks.filter((wh) => wh.blockId).map((wh) => [wh.blockId!, wh])
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Starting webhook sync`, {
|
||||
workflowId,
|
||||
@@ -432,7 +403,6 @@ export async function saveTriggerWebhooksForDeploy({
|
||||
// 2. Determine which webhooks to delete (orphaned or config changed)
|
||||
const webhooksToDelete: typeof existingWebhooks = []
|
||||
const blocksNeedingWebhook: BlockState[] = []
|
||||
const blocksNeedingCredentialSetSync: BlockState[] = []
|
||||
|
||||
for (const block of triggerBlocks) {
|
||||
const triggerId = resolveTriggerId(block)
|
||||
@@ -459,24 +429,11 @@ export async function saveTriggerWebhooksForDeploy({
|
||||
|
||||
;(block as any)._webhookConfig = { provider, providerConfig, triggerPath, triggerDef }
|
||||
|
||||
if (providerConfig.credentialSetId) {
|
||||
blocksNeedingCredentialSetSync.push(block)
|
||||
continue
|
||||
}
|
||||
|
||||
const existingForBlock = webhooksByBlockId.get(block.id) ?? []
|
||||
if (existingForBlock.length === 0) {
|
||||
const existingWh = webhooksByBlockId.get(block.id)
|
||||
if (!existingWh) {
|
||||
// No existing webhook - needs creation
|
||||
blocksNeedingWebhook.push(block)
|
||||
} else {
|
||||
const [existingWh, ...extraWebhooks] = existingForBlock
|
||||
if (extraWebhooks.length > 0) {
|
||||
webhooksToDelete.push(...extraWebhooks)
|
||||
logger.info(
|
||||
`[${requestId}] Found ${extraWebhooks.length} extra webhook(s) for block ${block.id}`
|
||||
)
|
||||
}
|
||||
|
||||
// Check if config changed
|
||||
const existingConfig = (existingWh.providerConfig as Record<string, unknown>) || {}
|
||||
if (
|
||||
@@ -522,40 +479,7 @@ export async function saveTriggerWebhooksForDeploy({
|
||||
await db.delete(webhook).where(inArray(webhook.id, idsToDelete))
|
||||
}
|
||||
|
||||
// 4. Sync credential set webhooks
|
||||
for (const block of blocksNeedingCredentialSetSync) {
|
||||
const config = (block as any)._webhookConfig
|
||||
if (!config) continue
|
||||
|
||||
const { provider, providerConfig, triggerPath } = config
|
||||
|
||||
try {
|
||||
const credentialSetError = await syncCredentialSetWebhooks({
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
provider,
|
||||
triggerPath,
|
||||
providerConfig,
|
||||
requestId,
|
||||
deploymentVersionId,
|
||||
})
|
||||
|
||||
if (credentialSetError) {
|
||||
return { success: false, error: credentialSetError }
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Failed to create webhook for ${block.id}`, error)
|
||||
return {
|
||||
success: false,
|
||||
error: {
|
||||
message: error?.message || 'Failed to save trigger configuration',
|
||||
status: 500,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Create webhooks for blocks that need them
|
||||
// 4. Create webhooks for blocks that need them
|
||||
for (const block of blocksNeedingWebhook) {
|
||||
const config = (block as any)._webhookConfig
|
||||
if (!config) continue
|
||||
@@ -563,6 +487,24 @@ export async function saveTriggerWebhooksForDeploy({
|
||||
const { provider, providerConfig, triggerPath } = config
|
||||
|
||||
try {
|
||||
// Handle credential sets
|
||||
const credentialSetError = await syncCredentialSetWebhooks({
|
||||
workflowId,
|
||||
blockId: block.id,
|
||||
provider,
|
||||
triggerPath,
|
||||
providerConfig,
|
||||
requestId,
|
||||
})
|
||||
|
||||
if (credentialSetError) {
|
||||
return { success: false, error: credentialSetError }
|
||||
}
|
||||
|
||||
if (providerConfig.credentialSetId) {
|
||||
continue
|
||||
}
|
||||
|
||||
const createError = await createWebhookForBlock({
|
||||
request,
|
||||
workflowId,
|
||||
@@ -573,7 +515,6 @@ export async function saveTriggerWebhooksForDeploy({
|
||||
providerConfig,
|
||||
triggerPath,
|
||||
requestId,
|
||||
deploymentVersionId,
|
||||
})
|
||||
|
||||
if (createError) {
|
||||
@@ -606,20 +547,9 @@ export async function saveTriggerWebhooksForDeploy({
|
||||
export async function cleanupWebhooksForWorkflow(
|
||||
workflowId: string,
|
||||
workflow: Record<string, unknown>,
|
||||
requestId: string,
|
||||
deploymentVersionId?: string
|
||||
requestId: string
|
||||
): Promise<void> {
|
||||
const existingWebhooks = await db
|
||||
.select()
|
||||
.from(webhook)
|
||||
.where(
|
||||
deploymentVersionId
|
||||
? and(
|
||||
eq(webhook.workflowId, workflowId),
|
||||
eq(webhook.deploymentVersionId, deploymentVersionId)
|
||||
)
|
||||
: eq(webhook.workflowId, workflowId)
|
||||
)
|
||||
const existingWebhooks = await db.select().from(webhook).where(eq(webhook.workflowId, workflowId))
|
||||
|
||||
if (existingWebhooks.length === 0) {
|
||||
logger.debug(`[${requestId}] No webhooks to clean up for workflow ${workflowId}`)
|
||||
@@ -628,7 +558,6 @@ export async function cleanupWebhooksForWorkflow(
|
||||
|
||||
logger.info(`[${requestId}] Cleaning up ${existingWebhooks.length} webhook(s) for undeploy`, {
|
||||
workflowId,
|
||||
deploymentVersionId,
|
||||
webhookIds: existingWebhooks.map((wh) => wh.id),
|
||||
})
|
||||
|
||||
@@ -643,20 +572,7 @@ export async function cleanupWebhooksForWorkflow(
|
||||
}
|
||||
|
||||
// Delete all webhook records
|
||||
await db
|
||||
.delete(webhook)
|
||||
.where(
|
||||
deploymentVersionId
|
||||
? and(
|
||||
eq(webhook.workflowId, workflowId),
|
||||
eq(webhook.deploymentVersionId, deploymentVersionId)
|
||||
)
|
||||
: eq(webhook.workflowId, workflowId)
|
||||
)
|
||||
await db.delete(webhook).where(eq(webhook.workflowId, workflowId))
|
||||
|
||||
logger.info(
|
||||
deploymentVersionId
|
||||
? `[${requestId}] Cleaned up webhooks for workflow ${workflowId} deployment ${deploymentVersionId}`
|
||||
: `[${requestId}] Cleaned up all webhooks for workflow ${workflowId}`
|
||||
)
|
||||
logger.info(`[${requestId}] Cleaned up all webhooks for workflow ${workflowId}`)
|
||||
}
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
account,
|
||||
credentialSet,
|
||||
webhook,
|
||||
workflow,
|
||||
workflowDeploymentVersion,
|
||||
} from '@sim/db/schema'
|
||||
import { account, credentialSet, webhook, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, or, sql } from 'drizzle-orm'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { isOrganizationOnTeamOrEnterprisePlan } from '@/lib/billing'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||
@@ -117,22 +111,11 @@ export async function pollGmailWebhooks() {
|
||||
.select({ webhook })
|
||||
.from(webhook)
|
||||
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflow.id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.provider, 'gmail'),
|
||||
eq(webhook.isActive, true),
|
||||
eq(workflow.isDeployed, true),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
eq(workflow.isDeployed, true)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { db } from '@sim/db'
|
||||
import { webhook, workflow, workflowDeploymentVersion } from '@sim/db/schema'
|
||||
import { webhook, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { InferSelectModel } from 'drizzle-orm'
|
||||
import { and, eq, isNull, or, sql } from 'drizzle-orm'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import type { FetchMessageObject, MailboxLockObject } from 'imapflow'
|
||||
import { ImapFlow } from 'imapflow'
|
||||
import { nanoid } from 'nanoid'
|
||||
@@ -113,23 +113,8 @@ export async function pollImapWebhooks() {
|
||||
.select({ webhook })
|
||||
.from(webhook)
|
||||
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflow.id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.provider, 'imap'),
|
||||
eq(webhook.isActive, true),
|
||||
eq(workflow.isDeployed, true),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
and(eq(webhook.provider, 'imap'), eq(webhook.isActive, true), eq(workflow.isDeployed, true))
|
||||
)
|
||||
|
||||
const activeWebhooks = activeWebhooksResult.map((r) => r.webhook)
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
account,
|
||||
credentialSet,
|
||||
webhook,
|
||||
workflow,
|
||||
workflowDeploymentVersion,
|
||||
} from '@sim/db/schema'
|
||||
import { account, credentialSet, webhook, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, or, sql } from 'drizzle-orm'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { htmlToText } from 'html-to-text'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { isOrganizationOnTeamOrEnterprisePlan } from '@/lib/billing'
|
||||
@@ -167,22 +161,11 @@ export async function pollOutlookWebhooks() {
|
||||
.select({ webhook })
|
||||
.from(webhook)
|
||||
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflow.id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.provider, 'outlook'),
|
||||
eq(webhook.isActive, true),
|
||||
eq(workflow.isDeployed, true),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
eq(workflow.isDeployed, true)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { db, webhook, workflow, workflowDeploymentVersion } from '@sim/db'
|
||||
import { db, webhook, workflow } from '@sim/db'
|
||||
import { credentialSet, subscription } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { tasks } from '@trigger.dev/sdk'
|
||||
import { and, eq, isNull, or } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { checkEnterprisePlan, checkTeamPlan } from '@/lib/billing/subscriptions/utils'
|
||||
@@ -16,7 +16,8 @@ import {
|
||||
verifyProviderWebhook,
|
||||
} from '@/lib/webhooks/utils.server'
|
||||
import { executeWebhookJob } from '@/background/webhook-execution'
|
||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
|
||||
|
||||
const logger = createLogger('WebhookProcessor')
|
||||
|
||||
@@ -24,6 +25,7 @@ export interface WebhookProcessorOptions {
|
||||
requestId: string
|
||||
path?: string
|
||||
webhookId?: string
|
||||
executionTarget?: 'deployed' | 'live'
|
||||
}
|
||||
|
||||
function getExternalUrl(request: NextRequest): string {
|
||||
@@ -294,23 +296,7 @@ export async function findWebhookAndWorkflow(
|
||||
})
|
||||
.from(webhook)
|
||||
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflow.id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.id, options.webhookId),
|
||||
eq(webhook.isActive, true),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(and(eq(webhook.id, options.webhookId), eq(webhook.isActive, true)))
|
||||
.limit(1)
|
||||
|
||||
if (results.length === 0) {
|
||||
@@ -329,23 +315,7 @@ export async function findWebhookAndWorkflow(
|
||||
})
|
||||
.from(webhook)
|
||||
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflow.id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.path, options.path),
|
||||
eq(webhook.isActive, true),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(and(eq(webhook.path, options.path), eq(webhook.isActive, true)))
|
||||
.limit(1)
|
||||
|
||||
if (results.length === 0) {
|
||||
@@ -377,23 +347,7 @@ export async function findAllWebhooksForPath(
|
||||
})
|
||||
.from(webhook)
|
||||
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflow.id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.path, options.path),
|
||||
eq(webhook.isActive, true),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(and(eq(webhook.path, options.path), eq(webhook.isActive, true)))
|
||||
|
||||
if (results.length === 0) {
|
||||
logger.warn(`[${options.requestId}] No active webhooks found for path: ${options.path}`)
|
||||
@@ -413,13 +367,19 @@ export async function findAllWebhooksForPath(
|
||||
* @returns String with all {{VARIABLE}} references replaced
|
||||
*/
|
||||
function resolveEnvVars(value: string, envVars: Record<string, string>): string {
|
||||
return resolveEnvVarReferences(value, envVars, {
|
||||
allowEmbedded: true,
|
||||
resolveExactMatch: true,
|
||||
trimKeys: true,
|
||||
onMissing: 'keep',
|
||||
deep: false,
|
||||
}) as string
|
||||
const envVarPattern = createEnvVarPattern()
|
||||
const envMatches = value.match(envVarPattern)
|
||||
if (!envMatches) return value
|
||||
|
||||
let resolvedValue = value
|
||||
for (const match of envMatches) {
|
||||
const envKey = match.slice(REFERENCE.ENV_VAR_START.length, -REFERENCE.ENV_VAR_END.length).trim()
|
||||
const envValue = envVars[envKey]
|
||||
if (envValue !== undefined) {
|
||||
resolvedValue = resolvedValue.replaceAll(match, envValue)
|
||||
}
|
||||
}
|
||||
return resolvedValue
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -804,7 +764,6 @@ export async function checkWebhookPreprocessing(
|
||||
checkRateLimit: true,
|
||||
checkDeployment: true,
|
||||
workspaceId: foundWorkflow.workspaceId,
|
||||
preflightEnvVars: isTriggerDevEnabled,
|
||||
})
|
||||
|
||||
if (!preprocessResult.success) {
|
||||
@@ -1003,6 +962,7 @@ export async function queueWebhookExecution(
|
||||
headers,
|
||||
path: options.path || foundWebhook.path,
|
||||
blockId: foundWebhook.blockId,
|
||||
executionTarget: options.executionTarget,
|
||||
...(credentialId ? { credentialId } : {}),
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { webhook, workflow, workflowDeploymentVersion } from '@sim/db/schema'
|
||||
import { webhook, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, or, sql } from 'drizzle-orm'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import Parser from 'rss-parser'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||
@@ -119,23 +119,8 @@ export async function pollRssWebhooks() {
|
||||
.select({ webhook })
|
||||
.from(webhook)
|
||||
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, workflow.id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.provider, 'rss'),
|
||||
eq(webhook.isActive, true),
|
||||
eq(workflow.isDeployed, true),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
and(eq(webhook.provider, 'rss'), eq(webhook.isActive, true), eq(workflow.isDeployed, true))
|
||||
)
|
||||
|
||||
const activeWebhooks = activeWebhooksResult.map((r) => r.webhook)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db, workflowDeploymentVersion } from '@sim/db'
|
||||
import { db } from '@sim/db'
|
||||
import { account, webhook } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, or } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import type { DbOrTx } from '@/lib/db/types'
|
||||
@@ -28,28 +28,11 @@ export async function handleWhatsAppVerification(
|
||||
}
|
||||
|
||||
const webhooks = await db
|
||||
.select({ webhook })
|
||||
.select()
|
||||
.from(webhook)
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, webhook.workflowId),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.provider, 'whatsapp'),
|
||||
eq(webhook.isActive, true),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(and(eq(webhook.provider, 'whatsapp'), eq(webhook.isActive, true)))
|
||||
|
||||
for (const row of webhooks) {
|
||||
const wh = row.webhook
|
||||
for (const wh of webhooks) {
|
||||
const providerConfig = (wh.providerConfig as Record<string, any>) || {}
|
||||
const verificationToken = providerConfig.verificationToken
|
||||
|
||||
@@ -1962,7 +1945,6 @@ export async function syncWebhooksForCredentialSet(params: {
|
||||
providerConfig: Record<string, any>
|
||||
requestId: string
|
||||
tx?: DbOrTx
|
||||
deploymentVersionId?: string
|
||||
}): Promise<CredentialSetWebhookSyncResult> {
|
||||
const {
|
||||
workflowId,
|
||||
@@ -1974,7 +1956,6 @@ export async function syncWebhooksForCredentialSet(params: {
|
||||
providerConfig,
|
||||
requestId,
|
||||
tx,
|
||||
deploymentVersionId,
|
||||
} = params
|
||||
|
||||
const dbCtx = tx ?? db
|
||||
@@ -2009,15 +1990,7 @@ export async function syncWebhooksForCredentialSet(params: {
|
||||
const existingWebhooks = await dbCtx
|
||||
.select()
|
||||
.from(webhook)
|
||||
.where(
|
||||
deploymentVersionId
|
||||
? and(
|
||||
eq(webhook.workflowId, workflowId),
|
||||
eq(webhook.blockId, blockId),
|
||||
eq(webhook.deploymentVersionId, deploymentVersionId)
|
||||
)
|
||||
: and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId))
|
||||
)
|
||||
.where(and(eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId)))
|
||||
|
||||
// Filter to only webhooks belonging to this credential set
|
||||
const credentialSetWebhooks = existingWebhooks.filter((wh) => {
|
||||
@@ -2071,7 +2044,6 @@ export async function syncWebhooksForCredentialSet(params: {
|
||||
await dbCtx
|
||||
.update(webhook)
|
||||
.set({
|
||||
...(deploymentVersionId ? { deploymentVersionId } : {}),
|
||||
providerConfig: updatedConfig,
|
||||
isActive: true,
|
||||
updatedAt: new Date(),
|
||||
@@ -2110,7 +2082,6 @@ export async function syncWebhooksForCredentialSet(params: {
|
||||
providerConfig: newConfig,
|
||||
credentialSetId, // Indexed column for efficient credential set queries
|
||||
isActive: true,
|
||||
...(deploymentVersionId ? { deploymentVersionId } : {}),
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
@@ -2166,24 +2137,9 @@ export async function syncAllWebhooksForCredentialSet(
|
||||
|
||||
// Find all webhooks that use this credential set using the indexed column
|
||||
const webhooksForSet = await dbCtx
|
||||
.select({ webhook })
|
||||
.select()
|
||||
.from(webhook)
|
||||
.leftJoin(
|
||||
workflowDeploymentVersion,
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, webhook.workflowId),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.where(
|
||||
and(
|
||||
eq(webhook.credentialSetId, credentialSetId),
|
||||
or(
|
||||
eq(webhook.deploymentVersionId, workflowDeploymentVersion.id),
|
||||
and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId))
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(eq(webhook.credentialSetId, credentialSetId))
|
||||
|
||||
if (webhooksForSet.length === 0) {
|
||||
syncLogger.info(`[${requestId}] No webhooks found using credential set ${credentialSetId}`)
|
||||
@@ -2191,9 +2147,8 @@ export async function syncAllWebhooksForCredentialSet(
|
||||
}
|
||||
|
||||
// Group webhooks by workflow+block to find unique triggers
|
||||
const triggerGroups = new Map<string, (typeof webhooksForSet)[number]['webhook']>()
|
||||
for (const row of webhooksForSet) {
|
||||
const wh = row.webhook
|
||||
const triggerGroups = new Map<string, (typeof webhooksForSet)[number]>()
|
||||
for (const wh of webhooksForSet) {
|
||||
const key = `${wh.workflowId}:${wh.blockId}`
|
||||
// Keep the first webhook as representative (they all have same config)
|
||||
if (!triggerGroups.has(key)) {
|
||||
@@ -2233,7 +2188,6 @@ export async function syncAllWebhooksForCredentialSet(
|
||||
providerConfig: baseConfig,
|
||||
requestId,
|
||||
tx: dbCtx,
|
||||
deploymentVersionId: representativeWebhook.deploymentVersionId || undefined,
|
||||
})
|
||||
|
||||
workflowsUpdated++
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
buildSubBlockValues,
|
||||
evaluateSubBlockCondition,
|
||||
hasAdvancedValues,
|
||||
isSubBlockFeatureEnabled,
|
||||
isSubBlockVisibleForMode,
|
||||
type SubBlockCondition,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type { BlockState, SubBlockState, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
/** Condition type for SubBlock visibility - mirrors the inline type from blocks/types.ts */
|
||||
interface SubBlockCondition {
|
||||
field: string
|
||||
value: string | number | boolean | Array<string | number | boolean> | undefined
|
||||
not?: boolean
|
||||
and?: SubBlockCondition
|
||||
}
|
||||
|
||||
// Credential types based on actual patterns in the codebase
|
||||
export enum CredentialType {
|
||||
OAUTH = 'oauth',
|
||||
@@ -118,32 +117,36 @@ export function extractRequiredCredentials(
|
||||
|
||||
/** Helper to check visibility, respecting mode and conditions */
|
||||
function isSubBlockVisible(block: BlockState, subBlockConfig: SubBlockConfig): boolean {
|
||||
if (!isSubBlockFeatureEnabled(subBlockConfig)) return false
|
||||
const mode = subBlockConfig.mode ?? 'both'
|
||||
if (mode === 'trigger' && !block?.triggerMode) return false
|
||||
if (mode === 'basic' && block?.advancedMode) return false
|
||||
if (mode === 'advanced' && !block?.advancedMode) return false
|
||||
|
||||
const values = buildSubBlockValues(block?.subBlocks || {})
|
||||
const blockConfig = getBlock(block.type)
|
||||
const blockSubBlocks = blockConfig?.subBlocks || []
|
||||
const canonicalIndex = buildCanonicalIndex(blockSubBlocks)
|
||||
const effectiveAdvanced =
|
||||
(block?.advancedMode ?? false) || hasAdvancedValues(blockSubBlocks, values, canonicalIndex)
|
||||
const canonicalModeOverrides = block.data?.canonicalModes
|
||||
if (!subBlockConfig.condition) return true
|
||||
|
||||
if (subBlockConfig.mode === 'trigger' && !block?.triggerMode) return false
|
||||
if (block?.triggerMode && subBlockConfig.mode && subBlockConfig.mode !== 'trigger') return false
|
||||
const condition =
|
||||
typeof subBlockConfig.condition === 'function'
|
||||
? subBlockConfig.condition()
|
||||
: subBlockConfig.condition
|
||||
|
||||
if (
|
||||
!isSubBlockVisibleForMode(
|
||||
subBlockConfig,
|
||||
effectiveAdvanced,
|
||||
canonicalIndex,
|
||||
values,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
) {
|
||||
return false
|
||||
const evaluate = (cond: SubBlockCondition): boolean => {
|
||||
const currentValue = block?.subBlocks?.[cond.field]?.value
|
||||
const expected = cond.value
|
||||
|
||||
let match =
|
||||
expected === undefined
|
||||
? true
|
||||
: Array.isArray(expected)
|
||||
? expected.includes(currentValue as string)
|
||||
: currentValue === expected
|
||||
|
||||
if (cond.not) match = !match
|
||||
if (cond.and) match = match && evaluate(cond.and)
|
||||
|
||||
return match
|
||||
}
|
||||
|
||||
return evaluateSubBlockCondition(subBlockConfig.condition as SubBlockCondition, values)
|
||||
return evaluate(condition)
|
||||
}
|
||||
|
||||
// Sort: OAuth first, then secrets, alphabetically within each type
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { Edge } from 'reactflow'
|
||||
import { z } from 'zod'
|
||||
import { parseResponseFormatSafely } from '@/lib/core/utils/response-format'
|
||||
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
|
||||
import { clearExecutionCancellation } from '@/lib/execution/cancellation'
|
||||
import type { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
@@ -18,6 +17,7 @@ import {
|
||||
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
|
||||
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
|
||||
import { Executor } from '@/executor'
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
import type { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type {
|
||||
ContextExtensions,
|
||||
@@ -25,7 +25,7 @@ import type {
|
||||
IterationContext,
|
||||
} from '@/executor/execution/types'
|
||||
import type { ExecutionResult, NormalizedBlockOutput } from '@/executor/types'
|
||||
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
|
||||
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
|
||||
import { Serializer } from '@/serializer'
|
||||
import { mergeSubblockState } from '@/stores/workflows/server-utils'
|
||||
|
||||
@@ -203,13 +203,25 @@ export async function executeWorkflowCore(
|
||||
(subAcc, [key, subBlock]) => {
|
||||
let value = subBlock.value
|
||||
|
||||
if (typeof value === 'string') {
|
||||
value = resolveEnvVarReferences(value, decryptedEnvVars, {
|
||||
resolveExactMatch: false,
|
||||
trimKeys: false,
|
||||
onMissing: 'keep',
|
||||
deep: false,
|
||||
}) as string
|
||||
if (
|
||||
typeof value === 'string' &&
|
||||
value.includes(REFERENCE.ENV_VAR_START) &&
|
||||
value.includes(REFERENCE.ENV_VAR_END)
|
||||
) {
|
||||
const envVarPattern = createEnvVarPattern()
|
||||
const matches = value.match(envVarPattern)
|
||||
if (matches) {
|
||||
for (const match of matches) {
|
||||
const varName = match.slice(
|
||||
REFERENCE.ENV_VAR_START.length,
|
||||
-REFERENCE.ENV_VAR_END.length
|
||||
)
|
||||
const decryptedValue = decryptedEnvVars[varName]
|
||||
if (decryptedValue !== undefined) {
|
||||
value = (value as string).replace(match, decryptedValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
subAcc[key] = value
|
||||
@@ -225,16 +237,26 @@ export async function executeWorkflowCore(
|
||||
// Process response format
|
||||
const processedBlockStates = Object.entries(currentBlockStates).reduce(
|
||||
(acc, [blockId, blockState]) => {
|
||||
const responseFormatValue = blockState.responseFormat
|
||||
if (responseFormatValue === undefined || responseFormatValue === null) {
|
||||
if (blockState.responseFormat && typeof blockState.responseFormat === 'string') {
|
||||
const responseFormatValue = blockState.responseFormat.trim()
|
||||
if (responseFormatValue && !responseFormatValue.startsWith(REFERENCE.START)) {
|
||||
try {
|
||||
acc[blockId] = {
|
||||
...blockState,
|
||||
responseFormat: JSON.parse(responseFormatValue),
|
||||
}
|
||||
} catch {
|
||||
acc[blockId] = {
|
||||
...blockState,
|
||||
responseFormat: undefined,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
acc[blockId] = blockState
|
||||
}
|
||||
} else {
|
||||
acc[blockId] = blockState
|
||||
return acc
|
||||
}
|
||||
|
||||
const responseFormat = parseResponseFormatSafely(responseFormatValue, blockId, {
|
||||
allowReferences: true,
|
||||
})
|
||||
acc[blockId] = { ...blockState, responseFormat: responseFormat ?? undefined }
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, Record<string, any>>
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import {
|
||||
ensureBlockEnvVarsResolvable,
|
||||
ensureEnvVarsDecryptable,
|
||||
getPersonalAndWorkspaceEnv,
|
||||
} from '@/lib/environment/utils'
|
||||
import { loadDeployedWorkflowState } from '@/lib/workflows/persistence/utils'
|
||||
import { mergeSubblockState } from '@/stores/workflows/server-utils'
|
||||
|
||||
const logger = createLogger('ExecutionPreflight')
|
||||
|
||||
export interface EnvVarPreflightOptions {
|
||||
workflowId: string
|
||||
workspaceId: string
|
||||
envUserId: string
|
||||
requestId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Preflight env var checks to avoid scheduling executions that will fail.
|
||||
* Always uses deployed workflow state since preflight is only done for async
|
||||
* executions which always run on deployed state.
|
||||
*/
|
||||
export async function preflightWorkflowEnvVars({
|
||||
workflowId,
|
||||
workspaceId,
|
||||
envUserId,
|
||||
requestId,
|
||||
}: EnvVarPreflightOptions): Promise<void> {
|
||||
const workflowData = await loadDeployedWorkflowState(workflowId)
|
||||
|
||||
if (!workflowData) {
|
||||
throw new Error('Workflow state not found')
|
||||
}
|
||||
|
||||
const mergedStates = mergeSubblockState(workflowData.blocks)
|
||||
const { personalEncrypted, workspaceEncrypted } = await getPersonalAndWorkspaceEnv(
|
||||
envUserId,
|
||||
workspaceId
|
||||
)
|
||||
const variables = { ...personalEncrypted, ...workspaceEncrypted }
|
||||
|
||||
await ensureBlockEnvVarsResolvable(mergedStates, variables, { requestId })
|
||||
await ensureEnvVarsDecryptable(variables, { requestId })
|
||||
|
||||
if (requestId) {
|
||||
logger.debug(`[${requestId}] Env var preflight passed`, { workflowId })
|
||||
} else {
|
||||
logger.debug('Env var preflight passed', { workflowId })
|
||||
}
|
||||
}
|
||||
@@ -495,7 +495,6 @@ export async function deployWorkflow(params: {
|
||||
}): Promise<{
|
||||
success: boolean
|
||||
version?: number
|
||||
deploymentVersionId?: string
|
||||
deployedAt?: Date
|
||||
currentState?: any
|
||||
error?: string
|
||||
@@ -534,7 +533,6 @@ export async function deployWorkflow(params: {
|
||||
.where(eq(workflowDeploymentVersion.workflowId, workflowId))
|
||||
|
||||
const nextVersion = Number(maxVersion) + 1
|
||||
const deploymentVersionId = uuidv4()
|
||||
|
||||
// Deactivate all existing versions
|
||||
await tx
|
||||
@@ -544,7 +542,7 @@ export async function deployWorkflow(params: {
|
||||
|
||||
// Create new deployment version
|
||||
await tx.insert(workflowDeploymentVersion).values({
|
||||
id: deploymentVersionId,
|
||||
id: uuidv4(),
|
||||
workflowId,
|
||||
version: nextVersion,
|
||||
state: currentState,
|
||||
@@ -564,10 +562,10 @@ export async function deployWorkflow(params: {
|
||||
// Note: Templates are NOT automatically updated on deployment
|
||||
// Template updates must be done explicitly through the "Update Template" button
|
||||
|
||||
return { version: nextVersion, deploymentVersionId }
|
||||
return nextVersion
|
||||
})
|
||||
|
||||
logger.info(`Deployed workflow ${workflowId} as v${deployedVersion.version}`)
|
||||
logger.info(`Deployed workflow ${workflowId} as v${deployedVersion}`)
|
||||
|
||||
if (workflowName) {
|
||||
try {
|
||||
@@ -584,7 +582,7 @@ export async function deployWorkflow(params: {
|
||||
workflowName,
|
||||
blocksCount: Object.keys(currentState.blocks).length,
|
||||
edgesCount: currentState.edges.length,
|
||||
version: deployedVersion.version,
|
||||
version: deployedVersion,
|
||||
loopsCount: Object.keys(currentState.loops).length,
|
||||
parallelsCount: Object.keys(currentState.parallels).length,
|
||||
blockTypes: JSON.stringify(blockTypeCounts),
|
||||
@@ -596,8 +594,7 @@ export async function deployWorkflow(params: {
|
||||
|
||||
return {
|
||||
success: true,
|
||||
version: deployedVersion.version,
|
||||
deploymentVersionId: deployedVersion.deploymentVersionId,
|
||||
version: deployedVersion,
|
||||
deployedAt: now,
|
||||
currentState,
|
||||
}
|
||||
|
||||
@@ -35,18 +35,11 @@ vi.mock('@sim/db', () => ({
|
||||
workflowSchedule: {
|
||||
workflowId: 'workflow_id',
|
||||
blockId: 'block_id',
|
||||
deploymentVersionId: 'deployment_version_id',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('drizzle-orm', () => ({
|
||||
eq: vi.fn((...args) => ({ type: 'eq', args })),
|
||||
and: vi.fn((...args) => ({ type: 'and', args })),
|
||||
sql: vi.fn((strings, ...values) => ({ type: 'sql', strings, values })),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/webhooks/deploy', () => ({
|
||||
cleanupWebhooksForWorkflow: vi.fn().mockResolvedValue(undefined),
|
||||
}))
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { db, workflowSchedule } from '@sim/db'
|
||||
import { workflowSchedule } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { DbOrTx } from '@/lib/db/types'
|
||||
import { cleanupWebhooksForWorkflow } from '@/lib/webhooks/deploy'
|
||||
import type { BlockState } from '@/lib/workflows/schedules/utils'
|
||||
import { findScheduleBlocks, validateScheduleBlock } from '@/lib/workflows/schedules/validation'
|
||||
|
||||
@@ -27,8 +26,7 @@ export interface ScheduleDeployResult {
|
||||
export async function createSchedulesForDeploy(
|
||||
workflowId: string,
|
||||
blocks: Record<string, BlockState>,
|
||||
tx: DbOrTx,
|
||||
deploymentVersionId?: string
|
||||
tx: DbOrTx
|
||||
): Promise<ScheduleDeployResult> {
|
||||
const scheduleBlocks = findScheduleBlocks(blocks)
|
||||
|
||||
@@ -63,7 +61,6 @@ export async function createSchedulesForDeploy(
|
||||
const values = {
|
||||
id: scheduleId,
|
||||
workflowId,
|
||||
deploymentVersionId: deploymentVersionId || null,
|
||||
blockId,
|
||||
cronExpression: cronExpression!,
|
||||
triggerType: 'schedule',
|
||||
@@ -78,7 +75,6 @@ export async function createSchedulesForDeploy(
|
||||
const setValues = {
|
||||
blockId,
|
||||
cronExpression: cronExpression!,
|
||||
...(deploymentVersionId ? { deploymentVersionId } : {}),
|
||||
updatedAt: now,
|
||||
nextRunAt: nextRunAt!,
|
||||
timezone: timezone!,
|
||||
@@ -90,11 +86,7 @@ export async function createSchedulesForDeploy(
|
||||
.insert(workflowSchedule)
|
||||
.values(values)
|
||||
.onConflictDoUpdate({
|
||||
target: [
|
||||
workflowSchedule.workflowId,
|
||||
workflowSchedule.blockId,
|
||||
workflowSchedule.deploymentVersionId,
|
||||
],
|
||||
target: [workflowSchedule.workflowId, workflowSchedule.blockId],
|
||||
set: setValues,
|
||||
})
|
||||
|
||||
@@ -117,36 +109,8 @@ export async function createSchedulesForDeploy(
|
||||
* Delete all schedules for a workflow
|
||||
* This should be called within a database transaction during undeploy
|
||||
*/
|
||||
export async function deleteSchedulesForWorkflow(
|
||||
workflowId: string,
|
||||
tx: DbOrTx,
|
||||
deploymentVersionId?: string
|
||||
): Promise<void> {
|
||||
await tx
|
||||
.delete(workflowSchedule)
|
||||
.where(
|
||||
deploymentVersionId
|
||||
? and(
|
||||
eq(workflowSchedule.workflowId, workflowId),
|
||||
eq(workflowSchedule.deploymentVersionId, deploymentVersionId)
|
||||
)
|
||||
: eq(workflowSchedule.workflowId, workflowId)
|
||||
)
|
||||
export async function deleteSchedulesForWorkflow(workflowId: string, tx: DbOrTx): Promise<void> {
|
||||
await tx.delete(workflowSchedule).where(eq(workflowSchedule.workflowId, workflowId))
|
||||
|
||||
logger.info(
|
||||
deploymentVersionId
|
||||
? `Deleted schedules for workflow ${workflowId} deployment ${deploymentVersionId}`
|
||||
: `Deleted all schedules for workflow ${workflowId}`
|
||||
)
|
||||
}
|
||||
|
||||
export async function cleanupDeploymentVersion(params: {
|
||||
workflowId: string
|
||||
workflow: Record<string, unknown>
|
||||
requestId: string
|
||||
deploymentVersionId: string
|
||||
}): Promise<void> {
|
||||
const { workflowId, workflow, requestId, deploymentVersionId } = params
|
||||
await cleanupWebhooksForWorkflow(workflowId, workflow, requestId, deploymentVersionId)
|
||||
await deleteSchedulesForWorkflow(workflowId, db, deploymentVersionId)
|
||||
logger.info(`Deleted all schedules for workflow ${workflowId}`)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
export {
|
||||
cleanupDeploymentVersion,
|
||||
createSchedulesForDeploy,
|
||||
deleteSchedulesForWorkflow,
|
||||
type ScheduleDeployResult,
|
||||
|
||||
@@ -1,269 +0,0 @@
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
|
||||
export type CanonicalMode = 'basic' | 'advanced'
|
||||
|
||||
export interface CanonicalGroup {
|
||||
canonicalId: string
|
||||
basicId?: string
|
||||
advancedIds: string[]
|
||||
}
|
||||
|
||||
export interface CanonicalIndex {
|
||||
groupsById: Record<string, CanonicalGroup>
|
||||
canonicalIdBySubBlockId: Record<string, string>
|
||||
}
|
||||
|
||||
export interface SubBlockCondition {
|
||||
field: string
|
||||
value: string | number | boolean | Array<string | number | boolean> | undefined
|
||||
not?: boolean
|
||||
and?: SubBlockCondition
|
||||
}
|
||||
|
||||
export interface CanonicalModeOverrides {
|
||||
[canonicalId: string]: CanonicalMode | undefined
|
||||
}
|
||||
|
||||
export interface CanonicalValueSelection {
|
||||
basicValue: unknown
|
||||
advancedValue: unknown
|
||||
advancedSourceId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a flat map of subblock values keyed by subblock id.
|
||||
*/
|
||||
export function buildSubBlockValues(
|
||||
subBlocks: Record<string, { value?: unknown } | null | undefined>
|
||||
): Record<string, unknown> {
|
||||
return Object.entries(subBlocks).reduce<Record<string, unknown>>((acc, [key, subBlock]) => {
|
||||
acc[key] = subBlock?.value
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
||||
/**
|
||||
* Build canonical group indices for a block's subblocks.
|
||||
*/
|
||||
export function buildCanonicalIndex(subBlocks: SubBlockConfig[]): CanonicalIndex {
|
||||
const groupsById: Record<string, CanonicalGroup> = {}
|
||||
const canonicalIdBySubBlockId: Record<string, string> = {}
|
||||
|
||||
subBlocks.forEach((subBlock) => {
|
||||
if (!subBlock.canonicalParamId) return
|
||||
const canonicalId = subBlock.canonicalParamId
|
||||
if (!groupsById[canonicalId]) {
|
||||
groupsById[canonicalId] = { canonicalId, advancedIds: [] }
|
||||
}
|
||||
const group = groupsById[canonicalId]
|
||||
if (subBlock.mode === 'advanced') {
|
||||
group.advancedIds.push(subBlock.id)
|
||||
} else {
|
||||
group.basicId = subBlock.id
|
||||
}
|
||||
canonicalIdBySubBlockId[subBlock.id] = canonicalId
|
||||
})
|
||||
|
||||
return { groupsById, canonicalIdBySubBlockId }
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve if a canonical group is a swap pair (basic + advanced).
|
||||
*/
|
||||
export function isCanonicalPair(group?: CanonicalGroup): boolean {
|
||||
return Boolean(group?.basicId && group?.advancedIds?.length)
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the active mode for a canonical group.
|
||||
*/
|
||||
export function resolveCanonicalMode(
|
||||
group: CanonicalGroup,
|
||||
values: Record<string, unknown>,
|
||||
overrides?: CanonicalModeOverrides
|
||||
): CanonicalMode {
|
||||
const override = overrides?.[group.canonicalId]
|
||||
if (override === 'advanced' && group.advancedIds.length > 0) return 'advanced'
|
||||
if (override === 'basic' && group.basicId) return 'basic'
|
||||
|
||||
const { basicValue, advancedValue } = getCanonicalValues(group, values)
|
||||
const hasBasic = isNonEmptyValue(basicValue)
|
||||
const hasAdvanced = isNonEmptyValue(advancedValue)
|
||||
|
||||
if (!group.basicId) return 'advanced'
|
||||
if (!hasBasic && hasAdvanced) return 'advanced'
|
||||
return 'basic'
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate a subblock condition against a map of raw values.
|
||||
*/
|
||||
export function evaluateSubBlockCondition(
|
||||
condition: SubBlockCondition | (() => SubBlockCondition) | undefined,
|
||||
values: Record<string, unknown>
|
||||
): boolean {
|
||||
if (!condition) return true
|
||||
const actual = typeof condition === 'function' ? condition() : condition
|
||||
const fieldValue = values[actual.field]
|
||||
const valueMatch = Array.isArray(actual.value)
|
||||
? fieldValue != null &&
|
||||
(actual.not
|
||||
? !actual.value.includes(fieldValue as any)
|
||||
: actual.value.includes(fieldValue as any))
|
||||
: actual.not
|
||||
? fieldValue !== actual.value
|
||||
: fieldValue === actual.value
|
||||
const andMatch = !actual.and
|
||||
? true
|
||||
: (() => {
|
||||
const andFieldValue = values[actual.and!.field]
|
||||
const andValueMatch = Array.isArray(actual.and!.value)
|
||||
? andFieldValue != null &&
|
||||
(actual.and!.not
|
||||
? !actual.and!.value.includes(andFieldValue as any)
|
||||
: actual.and!.value.includes(andFieldValue as any))
|
||||
: actual.and!.not
|
||||
? andFieldValue !== actual.and!.value
|
||||
: andFieldValue === actual.and!.value
|
||||
return andValueMatch
|
||||
})()
|
||||
|
||||
return valueMatch && andMatch
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a value is considered set for advanced visibility/selection.
|
||||
*/
|
||||
export function isNonEmptyValue(value: unknown): boolean {
|
||||
if (value === null || value === undefined) return false
|
||||
if (typeof value === 'string') return value.trim().length > 0
|
||||
if (Array.isArray(value)) return value.length > 0
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve basic and advanced values for a canonical group.
|
||||
*/
|
||||
export function getCanonicalValues(
|
||||
group: CanonicalGroup,
|
||||
values: Record<string, unknown>
|
||||
): CanonicalValueSelection {
|
||||
const basicValue = group.basicId ? values[group.basicId] : undefined
|
||||
let advancedValue: unknown
|
||||
let advancedSourceId: string | undefined
|
||||
|
||||
group.advancedIds.forEach((advancedId) => {
|
||||
if (advancedValue !== undefined) return
|
||||
const candidate = values[advancedId]
|
||||
if (isNonEmptyValue(candidate)) {
|
||||
advancedValue = candidate
|
||||
advancedSourceId = advancedId
|
||||
}
|
||||
})
|
||||
|
||||
return { basicValue, advancedValue, advancedSourceId }
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a block has any standalone advanced-only fields (not part of canonical pairs).
|
||||
* These require the block-level advanced mode toggle to be visible.
|
||||
*/
|
||||
export function hasStandaloneAdvancedFields(
|
||||
subBlocks: SubBlockConfig[],
|
||||
canonicalIndex: CanonicalIndex
|
||||
): boolean {
|
||||
for (const subBlock of subBlocks) {
|
||||
if (subBlock.mode !== 'advanced') continue
|
||||
if (!canonicalIndex.canonicalIdBySubBlockId[subBlock.id]) return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if any advanced-only or canonical advanced values are present.
|
||||
*/
|
||||
export function hasAdvancedValues(
|
||||
subBlocks: SubBlockConfig[],
|
||||
values: Record<string, unknown>,
|
||||
canonicalIndex: CanonicalIndex
|
||||
): boolean {
|
||||
const checkedCanonical = new Set<string>()
|
||||
|
||||
for (const subBlock of subBlocks) {
|
||||
const canonicalId = canonicalIndex.canonicalIdBySubBlockId[subBlock.id]
|
||||
if (canonicalId) {
|
||||
const group = canonicalIndex.groupsById[canonicalId]
|
||||
if (group && isCanonicalPair(group) && !checkedCanonical.has(canonicalId)) {
|
||||
checkedCanonical.add(canonicalId)
|
||||
const { advancedValue } = getCanonicalValues(group, values)
|
||||
if (isNonEmptyValue(advancedValue)) return true
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (subBlock.mode === 'advanced' && isNonEmptyValue(values[subBlock.id])) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether a subblock is visible based on mode and canonical swaps.
|
||||
*/
|
||||
export function isSubBlockVisibleForMode(
|
||||
subBlock: SubBlockConfig,
|
||||
displayAdvancedOptions: boolean,
|
||||
canonicalIndex: CanonicalIndex,
|
||||
values: Record<string, unknown>,
|
||||
overrides?: CanonicalModeOverrides
|
||||
): boolean {
|
||||
const canonicalId = canonicalIndex.canonicalIdBySubBlockId[subBlock.id]
|
||||
const group = canonicalId ? canonicalIndex.groupsById[canonicalId] : undefined
|
||||
|
||||
if (group && isCanonicalPair(group)) {
|
||||
const mode = resolveCanonicalMode(group, values, overrides)
|
||||
if (mode === 'advanced') return group.advancedIds.includes(subBlock.id)
|
||||
return group.basicId === subBlock.id
|
||||
}
|
||||
|
||||
if (subBlock.mode === 'basic' && displayAdvancedOptions) return false
|
||||
if (subBlock.mode === 'advanced' && !displayAdvancedOptions) return false
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the dependency value for a dependsOn key, honoring canonical swaps.
|
||||
*/
|
||||
export function resolveDependencyValue(
|
||||
dependencyKey: string,
|
||||
values: Record<string, unknown>,
|
||||
canonicalIndex: CanonicalIndex,
|
||||
overrides?: CanonicalModeOverrides
|
||||
): unknown {
|
||||
const canonicalId =
|
||||
canonicalIndex.groupsById[dependencyKey]?.canonicalId ||
|
||||
canonicalIndex.canonicalIdBySubBlockId[dependencyKey]
|
||||
|
||||
if (!canonicalId) {
|
||||
return values[dependencyKey]
|
||||
}
|
||||
|
||||
const group = canonicalIndex.groupsById[canonicalId]
|
||||
if (!group) return values[dependencyKey]
|
||||
|
||||
const { basicValue, advancedValue } = getCanonicalValues(group, values)
|
||||
const mode = resolveCanonicalMode(group, values, overrides)
|
||||
if (mode === 'advanced') return advancedValue ?? basicValue
|
||||
return basicValue ?? advancedValue
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a subblock is gated by a feature flag.
|
||||
*/
|
||||
export function isSubBlockFeatureEnabled(subBlock: SubBlockConfig): boolean {
|
||||
if (!subBlock.requiresFeature) return true
|
||||
return isTruthy(getEnv(subBlock.requiresFeature))
|
||||
}
|
||||
@@ -515,131 +515,103 @@ describe('Serializer', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('canonical mode field selection', () => {
|
||||
it.concurrent('should use advanced value when canonicalModes specifies advanced', () => {
|
||||
/**
|
||||
* Advanced mode field filtering tests
|
||||
*/
|
||||
describe('advanced mode field filtering', () => {
|
||||
it.concurrent('should include all fields when block is in advanced mode', () => {
|
||||
const serializer = new Serializer()
|
||||
|
||||
const block: any = {
|
||||
const advancedModeBlock: any = {
|
||||
id: 'slack-1',
|
||||
type: 'slack',
|
||||
name: 'Test Slack Block',
|
||||
position: { x: 0, y: 0 },
|
||||
data: {
|
||||
canonicalModes: { channel: 'advanced' },
|
||||
},
|
||||
advancedMode: true, // Advanced mode enabled
|
||||
subBlocks: {
|
||||
operation: { value: 'send' },
|
||||
destinationType: { value: 'channel' },
|
||||
channel: { value: 'general' },
|
||||
manualChannel: { value: 'C1234567890' },
|
||||
text: { value: 'Hello world' },
|
||||
username: { value: 'bot' },
|
||||
channel: { value: 'general' }, // basic mode field
|
||||
manualChannel: { value: 'C1234567890' }, // advanced mode field
|
||||
text: { value: 'Hello world' }, // both mode field
|
||||
username: { value: 'bot' }, // both mode field
|
||||
},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
const serialized = serializer.serializeWorkflow({ 'slack-1': block }, [], {})
|
||||
const slackBlock = serialized.blocks.find((b) => b.id === 'slack-1')
|
||||
const serialized = serializer.serializeWorkflow({ 'slack-1': advancedModeBlock }, [], {})
|
||||
|
||||
const slackBlock = serialized.blocks.find((b) => b.id === 'slack-1')
|
||||
expect(slackBlock).toBeDefined()
|
||||
expect(slackBlock?.config.params.channel).toBe('C1234567890')
|
||||
expect(slackBlock?.config.params.manualChannel).toBeUndefined()
|
||||
|
||||
expect(slackBlock?.config.params.channel).toBe('general')
|
||||
expect(slackBlock?.config.params.manualChannel).toBe('C1234567890')
|
||||
expect(slackBlock?.config.params.text).toBe('Hello world')
|
||||
expect(slackBlock?.config.params.username).toBe('bot')
|
||||
})
|
||||
|
||||
it.concurrent('should use basic value when canonicalModes specifies basic', () => {
|
||||
it.concurrent('should exclude advanced-only fields when block is in basic mode', () => {
|
||||
const serializer = new Serializer()
|
||||
|
||||
const block: any = {
|
||||
const basicModeBlock: any = {
|
||||
id: 'slack-1',
|
||||
type: 'slack',
|
||||
name: 'Test Slack Block',
|
||||
position: { x: 0, y: 0 },
|
||||
data: {
|
||||
canonicalModes: { channel: 'basic' },
|
||||
},
|
||||
advancedMode: false, // Basic mode enabled
|
||||
subBlocks: {
|
||||
operation: { value: 'send' },
|
||||
destinationType: { value: 'channel' },
|
||||
channel: { value: 'general' },
|
||||
manualChannel: { value: 'C1234567890' },
|
||||
text: { value: 'Hello world' },
|
||||
username: { value: 'bot' },
|
||||
channel: { value: 'general' }, // basic mode field
|
||||
manualChannel: { value: 'C1234567890' }, // advanced mode field
|
||||
text: { value: 'Hello world' }, // both mode field
|
||||
username: { value: 'bot' }, // both mode field
|
||||
},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
const serialized = serializer.serializeWorkflow({ 'slack-1': block }, [], {})
|
||||
const slackBlock = serialized.blocks.find((b) => b.id === 'slack-1')
|
||||
const serialized = serializer.serializeWorkflow({ 'slack-1': basicModeBlock }, [], {})
|
||||
|
||||
const slackBlock = serialized.blocks.find((b) => b.id === 'slack-1')
|
||||
expect(slackBlock).toBeDefined()
|
||||
|
||||
expect(slackBlock?.config.params.channel).toBe('general')
|
||||
expect(slackBlock?.config.params.manualChannel).toBeUndefined()
|
||||
expect(slackBlock?.config.params.text).toBe('Hello world')
|
||||
expect(slackBlock?.config.params.username).toBe('bot')
|
||||
})
|
||||
|
||||
it.concurrent('should fall back to legacy advancedMode when canonicalModes not set', () => {
|
||||
const serializer = new Serializer()
|
||||
it.concurrent(
|
||||
'should exclude advanced-only fields when advancedMode is undefined (defaults to basic mode)',
|
||||
() => {
|
||||
const serializer = new Serializer()
|
||||
|
||||
const block: any = {
|
||||
id: 'slack-1',
|
||||
type: 'slack',
|
||||
name: 'Test Slack Block',
|
||||
position: { x: 0, y: 0 },
|
||||
advancedMode: true,
|
||||
subBlocks: {
|
||||
operation: { value: 'send' },
|
||||
destinationType: { value: 'channel' },
|
||||
channel: { value: 'general' },
|
||||
manualChannel: { value: 'C1234567890' },
|
||||
text: { value: 'Hello world' },
|
||||
username: { value: 'bot' },
|
||||
},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
const defaultModeBlock: any = {
|
||||
id: 'slack-1',
|
||||
type: 'slack',
|
||||
name: 'Test Slack Block',
|
||||
position: { x: 0, y: 0 },
|
||||
subBlocks: {
|
||||
channel: { value: 'general' },
|
||||
manualChannel: { value: 'C1234567890' },
|
||||
text: { value: 'Hello world' },
|
||||
username: { value: 'bot' },
|
||||
},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
const serialized = serializer.serializeWorkflow({ 'slack-1': defaultModeBlock }, [], {})
|
||||
|
||||
const slackBlock = serialized.blocks.find((b) => b.id === 'slack-1')
|
||||
expect(slackBlock).toBeDefined()
|
||||
|
||||
expect(slackBlock?.config.params.channel).toBe('general')
|
||||
expect(slackBlock?.config.params.manualChannel).toBeUndefined()
|
||||
expect(slackBlock?.config.params.text).toBe('Hello world')
|
||||
expect(slackBlock?.config.params.username).toBe('bot')
|
||||
}
|
||||
)
|
||||
|
||||
const serialized = serializer.serializeWorkflow({ 'slack-1': block }, [], {})
|
||||
const slackBlock = serialized.blocks.find((b) => b.id === 'slack-1')
|
||||
|
||||
expect(slackBlock).toBeDefined()
|
||||
expect(slackBlock?.config.params.channel).toBe('C1234567890')
|
||||
expect(slackBlock?.config.params.manualChannel).toBeUndefined()
|
||||
})
|
||||
|
||||
it.concurrent('should use basic value by default when no mode specified', () => {
|
||||
const serializer = new Serializer()
|
||||
|
||||
const block: any = {
|
||||
id: 'slack-1',
|
||||
type: 'slack',
|
||||
name: 'Test Slack Block',
|
||||
position: { x: 0, y: 0 },
|
||||
subBlocks: {
|
||||
operation: { value: 'send' },
|
||||
destinationType: { value: 'channel' },
|
||||
channel: { value: 'general' },
|
||||
manualChannel: { value: 'C1234567890' },
|
||||
text: { value: 'Hello world' },
|
||||
username: { value: 'bot' },
|
||||
},
|
||||
outputs: {},
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
const serialized = serializer.serializeWorkflow({ 'slack-1': block }, [], {})
|
||||
const slackBlock = serialized.blocks.find((b) => b.id === 'slack-1')
|
||||
|
||||
expect(slackBlock).toBeDefined()
|
||||
expect(slackBlock?.config.params.channel).toBe('general')
|
||||
expect(slackBlock?.config.params.manualChannel).toBeUndefined()
|
||||
})
|
||||
|
||||
it.concurrent('should preserve advanced-only values when present in basic mode', () => {
|
||||
it.concurrent('should filter memories field correctly in agent blocks', () => {
|
||||
const serializer = new Serializer()
|
||||
|
||||
const agentInBasicMode: any = {
|
||||
@@ -665,9 +637,7 @@ describe('Serializer', () => {
|
||||
|
||||
expect(agentBlock?.config.params.systemPrompt).toBe('You are helpful')
|
||||
expect(agentBlock?.config.params.userPrompt).toBe('Hello')
|
||||
expect(agentBlock?.config.params.memories).toEqual([
|
||||
{ role: 'user', content: 'My name is John' },
|
||||
])
|
||||
expect(agentBlock?.config.params.memories).toBeUndefined()
|
||||
expect(agentBlock?.config.params.model).toBe('claude-3-sonnet')
|
||||
})
|
||||
|
||||
|
||||
@@ -1,17 +1,9 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { Edge } from 'reactflow'
|
||||
import { parseResponseFormatSafely } from '@/lib/core/utils/response-format'
|
||||
import { BlockPathCalculator } from '@/lib/workflows/blocks/block-path-calculator'
|
||||
import {
|
||||
buildCanonicalIndex,
|
||||
buildSubBlockValues,
|
||||
evaluateSubBlockCondition,
|
||||
getCanonicalValues,
|
||||
isNonEmptyValue,
|
||||
isSubBlockFeatureEnabled,
|
||||
} from '@/lib/workflows/subblocks/visibility'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { SubBlockConfig } from '@/blocks/types'
|
||||
import { REFERENCE } from '@/executor/constants'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
import type { BlockState, Loop, Parallel } from '@/stores/workflows/workflow/types'
|
||||
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
|
||||
@@ -35,37 +27,67 @@ export class WorkflowValidationError extends Error {
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to check if a subblock should be serialized.
|
||||
* Helper function to check if a subblock should be included in serialization based on current mode
|
||||
*/
|
||||
function shouldSerializeSubBlock(
|
||||
subBlockConfig: SubBlockConfig,
|
||||
values: Record<string, unknown>,
|
||||
displayAdvancedOptions: boolean,
|
||||
isTriggerContext: boolean,
|
||||
isTriggerCategory: boolean,
|
||||
canonicalIndex: ReturnType<typeof buildCanonicalIndex>
|
||||
function shouldIncludeField(subBlockConfig: SubBlockConfig, isAdvancedMode: boolean): boolean {
|
||||
const fieldMode = subBlockConfig.mode
|
||||
|
||||
if (fieldMode === 'advanced' && !isAdvancedMode) {
|
||||
return false // Skip advanced-only fields when in basic mode
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluates a condition object against current field values.
|
||||
* Used to determine if a conditionally-visible field should be included in params.
|
||||
*/
|
||||
function evaluateCondition(
|
||||
condition:
|
||||
| {
|
||||
field: string
|
||||
value: any
|
||||
not?: boolean
|
||||
and?: { field: string; value: any; not?: boolean }
|
||||
}
|
||||
| (() => {
|
||||
field: string
|
||||
value: any
|
||||
not?: boolean
|
||||
and?: { field: string; value: any; not?: boolean }
|
||||
})
|
||||
| undefined,
|
||||
values: Record<string, any>
|
||||
): boolean {
|
||||
if (!isSubBlockFeatureEnabled(subBlockConfig)) return false
|
||||
if (!condition) return true
|
||||
|
||||
if (subBlockConfig.mode === 'trigger') {
|
||||
if (!isTriggerContext && !isTriggerCategory) return false
|
||||
} else if (isTriggerContext && !isTriggerCategory) {
|
||||
return false
|
||||
}
|
||||
const actual = typeof condition === 'function' ? condition() : condition
|
||||
const fieldValue = values[actual.field]
|
||||
|
||||
const isCanonicalMember = Boolean(canonicalIndex.canonicalIdBySubBlockId[subBlockConfig.id])
|
||||
if (isCanonicalMember) {
|
||||
return evaluateSubBlockCondition(subBlockConfig.condition, values)
|
||||
}
|
||||
const valueMatch = Array.isArray(actual.value)
|
||||
? fieldValue != null &&
|
||||
(actual.not ? !actual.value.includes(fieldValue) : actual.value.includes(fieldValue))
|
||||
: actual.not
|
||||
? fieldValue !== actual.value
|
||||
: fieldValue === actual.value
|
||||
|
||||
if (subBlockConfig.mode === 'advanced' && !displayAdvancedOptions) {
|
||||
return isNonEmptyValue(values[subBlockConfig.id])
|
||||
}
|
||||
if (subBlockConfig.mode === 'basic' && displayAdvancedOptions) {
|
||||
return false
|
||||
}
|
||||
const andMatch = !actual.and
|
||||
? true
|
||||
: (() => {
|
||||
const andFieldValue = values[actual.and!.field]
|
||||
const andValueMatch = Array.isArray(actual.and!.value)
|
||||
? andFieldValue != null &&
|
||||
(actual.and!.not
|
||||
? !actual.and!.value.includes(andFieldValue)
|
||||
: actual.and!.value.includes(andFieldValue))
|
||||
: actual.and!.not
|
||||
? andFieldValue !== actual.and!.value
|
||||
: andFieldValue === actual.and!.value
|
||||
return andValueMatch
|
||||
})()
|
||||
|
||||
return evaluateSubBlockCondition(subBlockConfig.condition, values)
|
||||
return valueMatch && andMatch
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -219,12 +241,16 @@ export class Serializer {
|
||||
// Extract parameters from UI state
|
||||
const params = this.extractParams(block)
|
||||
|
||||
const isTriggerCategory = blockConfig.category === 'triggers'
|
||||
if (block.triggerMode === true || isTriggerCategory) {
|
||||
params.triggerMode = true
|
||||
}
|
||||
if (block.advancedMode === true) {
|
||||
params.advancedMode = true
|
||||
try {
|
||||
const isTriggerCategory = blockConfig.category === 'triggers'
|
||||
if (block.triggerMode === true || isTriggerCategory) {
|
||||
params.triggerMode = true
|
||||
}
|
||||
if (block.advancedMode === true) {
|
||||
params.advancedMode = true
|
||||
}
|
||||
} catch (_) {
|
||||
// no-op: conservative, avoid blocking serialization if blockConfig is unexpected
|
||||
}
|
||||
|
||||
// Validate required fields that only users can provide (before execution starts)
|
||||
@@ -245,7 +271,16 @@ export class Serializer {
|
||||
// For non-custom tools, we determine the tool ID
|
||||
const nonCustomTools = tools.filter((tool: any) => tool.type !== 'custom-tool')
|
||||
if (nonCustomTools.length > 0) {
|
||||
toolId = this.selectToolId(blockConfig, params)
|
||||
try {
|
||||
toolId = blockConfig.tools.config?.tool
|
||||
? blockConfig.tools.config.tool(params)
|
||||
: blockConfig.tools.access[0]
|
||||
} catch (error) {
|
||||
logger.warn('Tool selection failed during serialization, using default:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
toolId = blockConfig.tools.access[0]
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error processing tools in agent block:', { error })
|
||||
@@ -254,7 +289,16 @@ export class Serializer {
|
||||
}
|
||||
} else {
|
||||
// For non-agent blocks, get tool ID from block config as usual
|
||||
toolId = this.selectToolId(blockConfig, params)
|
||||
try {
|
||||
toolId = blockConfig.tools.config?.tool
|
||||
? blockConfig.tools.config.tool(params)
|
||||
: blockConfig.tools.access[0]
|
||||
} catch (error) {
|
||||
logger.warn('Tool selection failed during serialization, using default:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
toolId = blockConfig.tools.access[0]
|
||||
}
|
||||
}
|
||||
|
||||
// Get inputs from block config
|
||||
@@ -278,10 +322,7 @@ export class Serializer {
|
||||
// Include response format fields if available
|
||||
...(params.responseFormat
|
||||
? {
|
||||
responseFormat:
|
||||
parseResponseFormatSafely(params.responseFormat, block.id, {
|
||||
allowReferences: true,
|
||||
}) ?? undefined,
|
||||
responseFormat: this.parseResponseFormatSafely(params.responseFormat),
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
@@ -296,9 +337,52 @@ export class Serializer {
|
||||
}
|
||||
}
|
||||
|
||||
private parseResponseFormatSafely(responseFormat: any): any {
|
||||
if (!responseFormat) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// If already an object, return as-is
|
||||
if (typeof responseFormat === 'object' && responseFormat !== null) {
|
||||
return responseFormat
|
||||
}
|
||||
|
||||
// Handle string values
|
||||
if (typeof responseFormat === 'string') {
|
||||
const trimmedValue = responseFormat.trim()
|
||||
|
||||
// Check for variable references like <start.input>
|
||||
if (trimmedValue.startsWith(REFERENCE.START) && trimmedValue.includes(REFERENCE.END)) {
|
||||
// Keep variable references as-is
|
||||
return trimmedValue
|
||||
}
|
||||
|
||||
if (trimmedValue === '') {
|
||||
return undefined
|
||||
}
|
||||
|
||||
// Try to parse as JSON
|
||||
try {
|
||||
return JSON.parse(trimmedValue)
|
||||
} catch (error) {
|
||||
// If parsing fails, return undefined to avoid crashes
|
||||
// This allows the workflow to continue without structured response format
|
||||
logger.warn('Failed to parse response format as JSON in serializer, using undefined:', {
|
||||
value: trimmedValue,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
// For any other type, return undefined
|
||||
return undefined
|
||||
}
|
||||
|
||||
private extractParams(block: BlockState): Record<string, any> {
|
||||
// Special handling for subflow blocks (loops, parallels, etc.)
|
||||
if (block.type === 'loop' || block.type === 'parallel') {
|
||||
return {}
|
||||
return {} // Loop and parallel blocks don't have traditional params
|
||||
}
|
||||
|
||||
const blockConfig = getBlock(block.type)
|
||||
@@ -307,42 +391,43 @@ export class Serializer {
|
||||
}
|
||||
|
||||
const params: Record<string, any> = {}
|
||||
const legacyAdvancedMode = block.advancedMode ?? false
|
||||
const canonicalModeOverrides = block.data?.canonicalModes
|
||||
const isAdvancedMode = block.advancedMode ?? false
|
||||
const isStarterBlock = block.type === 'starter'
|
||||
const isAgentBlock = block.type === 'agent'
|
||||
const isTriggerContext = block.triggerMode ?? false
|
||||
const isTriggerCategory = blockConfig.category === 'triggers'
|
||||
const canonicalIndex = buildCanonicalIndex(blockConfig.subBlocks)
|
||||
const allValues = buildSubBlockValues(block.subBlocks)
|
||||
|
||||
// First pass: collect ALL raw values for condition evaluation
|
||||
const allValues: Record<string, any> = {}
|
||||
Object.entries(block.subBlocks).forEach(([id, subBlock]) => {
|
||||
allValues[id] = subBlock.value
|
||||
})
|
||||
|
||||
// Second pass: filter by mode and conditions
|
||||
Object.entries(block.subBlocks).forEach(([id, subBlock]) => {
|
||||
const matchingConfigs = blockConfig.subBlocks.filter((config) => config.id === id)
|
||||
|
||||
// Include field if it matches current mode OR if it's the starter inputFormat with values
|
||||
const hasStarterInputFormatValues =
|
||||
isStarterBlock &&
|
||||
id === 'inputFormat' &&
|
||||
Array.isArray(subBlock.value) &&
|
||||
subBlock.value.length > 0
|
||||
|
||||
// Include legacy agent block fields (systemPrompt, userPrompt, memories) even if not in current config
|
||||
// This ensures backward compatibility with old workflows that were exported before the messages array migration
|
||||
const isLegacyAgentField =
|
||||
isAgentBlock && ['systemPrompt', 'userPrompt', 'memories'].includes(id)
|
||||
|
||||
const shouldInclude =
|
||||
matchingConfigs.length === 0 ||
|
||||
matchingConfigs.some((config) =>
|
||||
shouldSerializeSubBlock(
|
||||
config,
|
||||
allValues,
|
||||
legacyAdvancedMode,
|
||||
isTriggerContext,
|
||||
isTriggerCategory,
|
||||
canonicalIndex
|
||||
)
|
||||
)
|
||||
const anyConditionMet =
|
||||
matchingConfigs.length === 0
|
||||
? true
|
||||
: matchingConfigs.some(
|
||||
(config) =>
|
||||
shouldIncludeField(config, isAdvancedMode) &&
|
||||
evaluateCondition(config.condition, allValues)
|
||||
)
|
||||
|
||||
if (
|
||||
(matchingConfigs.length > 0 && shouldInclude) ||
|
||||
(matchingConfigs.length > 0 && anyConditionMet) ||
|
||||
hasStarterInputFormatValues ||
|
||||
isLegacyAgentField
|
||||
) {
|
||||
@@ -350,38 +435,56 @@ export class Serializer {
|
||||
}
|
||||
})
|
||||
|
||||
// Then check for any subBlocks with default values
|
||||
blockConfig.subBlocks.forEach((subBlockConfig) => {
|
||||
const id = subBlockConfig.id
|
||||
if (
|
||||
params[id] == null &&
|
||||
(params[id] === null || params[id] === undefined) &&
|
||||
subBlockConfig.value &&
|
||||
shouldSerializeSubBlock(
|
||||
subBlockConfig,
|
||||
allValues,
|
||||
legacyAdvancedMode,
|
||||
isTriggerContext,
|
||||
isTriggerCategory,
|
||||
canonicalIndex
|
||||
)
|
||||
shouldIncludeField(subBlockConfig, isAdvancedMode)
|
||||
) {
|
||||
// If the value is absent and there's a default value function, use it
|
||||
params[id] = subBlockConfig.value(params)
|
||||
}
|
||||
})
|
||||
|
||||
Object.values(canonicalIndex.groupsById).forEach((group) => {
|
||||
const { basicValue, advancedValue } = getCanonicalValues(group, params)
|
||||
const pairMode =
|
||||
canonicalModeOverrides?.[group.canonicalId] ?? (legacyAdvancedMode ? 'advanced' : 'basic')
|
||||
const chosen = pairMode === 'advanced' ? advancedValue : basicValue
|
||||
// Finally, consolidate canonical parameters (e.g., selector and manual ID into a single param)
|
||||
const canonicalGroups: Record<string, { basic?: string; advanced: string[] }> = {}
|
||||
blockConfig.subBlocks.forEach((sb) => {
|
||||
if (!sb.canonicalParamId) return
|
||||
const key = sb.canonicalParamId
|
||||
if (!canonicalGroups[key]) canonicalGroups[key] = { basic: undefined, advanced: [] }
|
||||
if (sb.mode === 'advanced') canonicalGroups[key].advanced.push(sb.id)
|
||||
else canonicalGroups[key].basic = sb.id
|
||||
})
|
||||
|
||||
const sourceIds = [group.basicId, ...group.advancedIds].filter(Boolean) as string[]
|
||||
sourceIds.forEach((id) => {
|
||||
if (id !== group.canonicalId) delete params[id]
|
||||
})
|
||||
Object.entries(canonicalGroups).forEach(([canonicalKey, group]) => {
|
||||
const basicId = group.basic
|
||||
const advancedIds = group.advanced
|
||||
const basicVal = basicId ? params[basicId] : undefined
|
||||
const advancedVal = advancedIds
|
||||
.map((id) => params[id])
|
||||
.find(
|
||||
(v) => v !== undefined && v !== null && (typeof v !== 'string' || v.trim().length > 0)
|
||||
)
|
||||
|
||||
if (chosen !== undefined) {
|
||||
params[group.canonicalId] = chosen
|
||||
let chosen: any
|
||||
if (advancedVal !== undefined && basicVal !== undefined) {
|
||||
chosen = isAdvancedMode ? advancedVal : basicVal
|
||||
} else if (advancedVal !== undefined) {
|
||||
chosen = advancedVal
|
||||
} else if (basicVal !== undefined) {
|
||||
chosen = isAdvancedMode ? undefined : basicVal
|
||||
} else {
|
||||
chosen = undefined
|
||||
}
|
||||
|
||||
const sourceIds = [basicId, ...advancedIds].filter(Boolean) as string[]
|
||||
sourceIds.forEach((id) => {
|
||||
if (id !== canonicalKey) delete params[id]
|
||||
})
|
||||
if (chosen !== undefined) params[canonicalKey] = chosen
|
||||
else delete params[canonicalKey]
|
||||
})
|
||||
|
||||
return params
|
||||
@@ -417,7 +520,17 @@ export class Serializer {
|
||||
}
|
||||
|
||||
// Determine the current tool ID using the same logic as the serializer
|
||||
const currentToolId = this.selectToolId(blockConfig, params)
|
||||
let currentToolId = ''
|
||||
try {
|
||||
currentToolId = blockConfig.tools.config?.tool
|
||||
? blockConfig.tools.config.tool(params)
|
||||
: blockConfig.tools.access[0]
|
||||
} catch (error) {
|
||||
logger.warn('Tool selection failed during validation, using default:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
currentToolId = blockConfig.tools.access[0]
|
||||
}
|
||||
|
||||
// Get the specific tool to validate against
|
||||
const currentTool = getTool(currentToolId)
|
||||
@@ -425,11 +538,8 @@ export class Serializer {
|
||||
return // Tool not found, skip validation
|
||||
}
|
||||
|
||||
// Check required user-only parameters for the current tool
|
||||
const missingFields: string[] = []
|
||||
const displayAdvancedOptions = block.advancedMode ?? false
|
||||
const isTriggerContext = block.triggerMode ?? false
|
||||
const isTriggerCategory = blockConfig.category === 'triggers'
|
||||
const canonicalIndex = buildCanonicalIndex(blockConfig.subBlocks || [])
|
||||
|
||||
// Iterate through the tool's parameters, not the block's subBlocks
|
||||
Object.entries(currentTool.params || {}).forEach(([paramId, paramConfig]) => {
|
||||
@@ -439,23 +549,20 @@ export class Serializer {
|
||||
let shouldValidateParam = true
|
||||
|
||||
if (matchingConfigs.length > 0) {
|
||||
const isAdvancedMode = block.advancedMode ?? false
|
||||
|
||||
shouldValidateParam = matchingConfigs.some((subBlockConfig: any) => {
|
||||
const includedByMode = shouldSerializeSubBlock(
|
||||
subBlockConfig,
|
||||
params,
|
||||
displayAdvancedOptions,
|
||||
isTriggerContext,
|
||||
isTriggerCategory,
|
||||
canonicalIndex
|
||||
)
|
||||
const includedByMode = shouldIncludeField(subBlockConfig, isAdvancedMode)
|
||||
|
||||
const includedByCondition = evaluateCondition(subBlockConfig.condition, params)
|
||||
|
||||
const isRequired = (() => {
|
||||
if (!subBlockConfig.required) return false
|
||||
if (typeof subBlockConfig.required === 'boolean') return subBlockConfig.required
|
||||
return evaluateSubBlockCondition(subBlockConfig.required, params)
|
||||
return evaluateCondition(subBlockConfig.required, params)
|
||||
})()
|
||||
|
||||
return includedByMode && isRequired
|
||||
return includedByMode && includedByCondition && isRequired
|
||||
})
|
||||
}
|
||||
|
||||
@@ -465,15 +572,10 @@ export class Serializer {
|
||||
|
||||
const fieldValue = params[paramId]
|
||||
if (fieldValue === undefined || fieldValue === null || fieldValue === '') {
|
||||
const activeConfig = matchingConfigs.find((config: any) =>
|
||||
shouldSerializeSubBlock(
|
||||
config,
|
||||
params,
|
||||
displayAdvancedOptions,
|
||||
isTriggerContext,
|
||||
isTriggerCategory,
|
||||
canonicalIndex
|
||||
)
|
||||
const activeConfig = matchingConfigs.find(
|
||||
(config: any) =>
|
||||
shouldIncludeField(config, block.advancedMode ?? false) &&
|
||||
evaluateCondition(config.condition, params)
|
||||
)
|
||||
const displayName = activeConfig?.title || paramId
|
||||
missingFields.push(displayName)
|
||||
@@ -527,19 +629,6 @@ export class Serializer {
|
||||
return accessibleMap
|
||||
}
|
||||
|
||||
private selectToolId(blockConfig: any, params: Record<string, any>): string {
|
||||
try {
|
||||
return blockConfig.tools.config?.tool
|
||||
? blockConfig.tools.config.tool(params)
|
||||
: blockConfig.tools.access[0]
|
||||
} catch (error) {
|
||||
logger.warn('Tool selection failed during serialization, using default:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return blockConfig.tools.access[0]
|
||||
}
|
||||
}
|
||||
|
||||
deserializeWorkflow(workflow: SerializedWorkflow): {
|
||||
blocks: Record<string, BlockState>
|
||||
edges: Edge[]
|
||||
|
||||
@@ -147,19 +147,20 @@ const { mockBlockConfigs, createMockGetBlock, slackWithCanonicalParam } = vi.hoi
|
||||
config: { tool: () => 'slack_send_message' },
|
||||
},
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'channel',
|
||||
type: 'dropdown',
|
||||
label: 'Channel',
|
||||
mode: 'basic',
|
||||
canonicalParamId: 'channel',
|
||||
},
|
||||
{ id: 'channel', type: 'dropdown', label: 'Channel', mode: 'basic' },
|
||||
{
|
||||
id: 'manualChannel',
|
||||
type: 'short-input',
|
||||
label: 'Channel ID',
|
||||
mode: 'advanced',
|
||||
canonicalParamId: 'channel',
|
||||
canonicalParamId: 'targetChannel',
|
||||
},
|
||||
{
|
||||
id: 'channelSelector',
|
||||
type: 'dropdown',
|
||||
label: 'Channel Selector',
|
||||
mode: 'basic',
|
||||
canonicalParamId: 'targetChannel',
|
||||
},
|
||||
{ id: 'text', type: 'long-input', label: 'Message' },
|
||||
{ id: 'username', type: 'short-input', label: 'Username', mode: 'both' },
|
||||
@@ -655,18 +656,16 @@ describe('Serializer Extended Tests', () => {
|
||||
})
|
||||
|
||||
describe('canonical parameter handling', () => {
|
||||
it('should use advanced value when canonicalModes specifies advanced', () => {
|
||||
it('should consolidate basic/advanced mode fields into canonical param in advanced mode', () => {
|
||||
const serializer = new Serializer()
|
||||
const block: BlockState = {
|
||||
id: 'slack-1',
|
||||
type: 'slack',
|
||||
name: 'Slack',
|
||||
position: { x: 0, y: 0 },
|
||||
data: {
|
||||
canonicalModes: { channel: 'advanced' },
|
||||
},
|
||||
advancedMode: true,
|
||||
subBlocks: {
|
||||
channel: { id: 'channel', type: 'channel-selector', value: 'general' },
|
||||
channelSelector: { id: 'channelSelector', type: 'dropdown', value: 'general' },
|
||||
manualChannel: { id: 'manualChannel', type: 'short-input', value: 'C12345' },
|
||||
text: { id: 'text', type: 'long-input', value: 'Hello' },
|
||||
},
|
||||
@@ -677,23 +676,22 @@ describe('Serializer Extended Tests', () => {
|
||||
const serialized = serializer.serializeWorkflow({ 'slack-1': block }, [], {})
|
||||
const slackBlock = serialized.blocks.find((b) => b.id === 'slack-1')
|
||||
|
||||
expect(slackBlock?.config.params.channel).toBe('C12345')
|
||||
expect(slackBlock?.config.params.targetChannel).toBe('C12345')
|
||||
expect(slackBlock?.config.params.channelSelector).toBeUndefined()
|
||||
expect(slackBlock?.config.params.manualChannel).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should use basic value when canonicalModes specifies basic', () => {
|
||||
it('should consolidate to basic value when in basic mode', () => {
|
||||
const serializer = new Serializer()
|
||||
const block: BlockState = {
|
||||
id: 'slack-1',
|
||||
type: 'slack',
|
||||
name: 'Slack',
|
||||
position: { x: 0, y: 0 },
|
||||
data: {
|
||||
canonicalModes: { channel: 'basic' },
|
||||
},
|
||||
advancedMode: false,
|
||||
subBlocks: {
|
||||
channel: { id: 'channel', type: 'channel-selector', value: 'general' },
|
||||
manualChannel: { id: 'manualChannel', type: 'short-input', value: 'C12345' },
|
||||
channelSelector: { id: 'channelSelector', type: 'dropdown', value: 'general' },
|
||||
manualChannel: { id: 'manualChannel', type: 'short-input', value: '' },
|
||||
text: { id: 'text', type: 'long-input', value: 'Hello' },
|
||||
},
|
||||
outputs: {},
|
||||
@@ -703,7 +701,7 @@ describe('Serializer Extended Tests', () => {
|
||||
const serialized = serializer.serializeWorkflow({ 'slack-1': block }, [], {})
|
||||
const slackBlock = serialized.blocks.find((b) => b.id === 'slack-1')
|
||||
|
||||
expect(slackBlock?.config.params.channel).toBe('general')
|
||||
expect(slackBlock?.config.params.targetChannel).toBe('general')
|
||||
})
|
||||
|
||||
it('should handle missing canonical param values', () => {
|
||||
@@ -713,8 +711,9 @@ describe('Serializer Extended Tests', () => {
|
||||
type: 'slack',
|
||||
name: 'Slack',
|
||||
position: { x: 0, y: 0 },
|
||||
advancedMode: false,
|
||||
subBlocks: {
|
||||
channel: { id: 'channel', type: 'channel-selector', value: null },
|
||||
channelSelector: { id: 'channelSelector', type: 'dropdown', value: null },
|
||||
manualChannel: { id: 'manualChannel', type: 'short-input', value: null },
|
||||
text: { id: 'text', type: 'long-input', value: 'Hello' },
|
||||
},
|
||||
@@ -725,7 +724,8 @@ describe('Serializer Extended Tests', () => {
|
||||
const serialized = serializer.serializeWorkflow({ 'slack-1': block }, [], {})
|
||||
const slackBlock = serialized.blocks.find((b) => b.id === 'slack-1')
|
||||
|
||||
expect(slackBlock?.config.params.channel).toBeNull()
|
||||
// When both values are null, the canonical param is set to null (preserving the null value)
|
||||
expect(slackBlock?.config.params.targetChannel).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ export const BLOCK_OPERATIONS = {
|
||||
TOGGLE_ENABLED: 'toggle-enabled',
|
||||
UPDATE_PARENT: 'update-parent',
|
||||
UPDATE_ADVANCED_MODE: 'update-advanced-mode',
|
||||
UPDATE_CANONICAL_MODE: 'update-canonical-mode',
|
||||
TOGGLE_HANDLES: 'toggle-handles',
|
||||
} as const
|
||||
|
||||
|
||||
@@ -398,46 +398,6 @@ async function handleBlockOperationTx(
|
||||
break
|
||||
}
|
||||
|
||||
case BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE: {
|
||||
if (!payload.id || !payload.canonicalId || !payload.canonicalMode) {
|
||||
throw new Error('Missing required fields for update canonical mode operation')
|
||||
}
|
||||
|
||||
const existingBlock = await tx
|
||||
.select({ data: workflowBlocks.data })
|
||||
.from(workflowBlocks)
|
||||
.where(and(eq(workflowBlocks.id, payload.id), eq(workflowBlocks.workflowId, workflowId)))
|
||||
.limit(1)
|
||||
|
||||
const currentData = (existingBlock?.[0]?.data as Record<string, unknown>) || {}
|
||||
const currentCanonicalModes = (currentData.canonicalModes as Record<string, unknown>) || {}
|
||||
const canonicalModes = {
|
||||
...currentCanonicalModes,
|
||||
[payload.canonicalId]: payload.canonicalMode,
|
||||
}
|
||||
|
||||
const updateResult = await tx
|
||||
.update(workflowBlocks)
|
||||
.set({
|
||||
data: {
|
||||
...currentData,
|
||||
canonicalModes,
|
||||
},
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(and(eq(workflowBlocks.id, payload.id), eq(workflowBlocks.workflowId, workflowId)))
|
||||
.returning({ id: workflowBlocks.id })
|
||||
|
||||
if (updateResult.length === 0) {
|
||||
throw new Error(`Block ${payload.id} not found in workflow ${workflowId}`)
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`Updated block canonical mode: ${payload.id} -> ${payload.canonicalId}: ${payload.canonicalMode}`
|
||||
)
|
||||
break
|
||||
}
|
||||
|
||||
case BLOCK_OPERATIONS.TOGGLE_HANDLES: {
|
||||
if (!payload.id || payload.horizontalHandles === undefined) {
|
||||
throw new Error('Missing required fields for toggle handles operation')
|
||||
|
||||
@@ -208,7 +208,7 @@ describe('checkRolePermission', () => {
|
||||
{ operation: 'toggle-enabled', adminAllowed: true, writeAllowed: true, readAllowed: false },
|
||||
{ operation: 'update-parent', adminAllowed: true, writeAllowed: true, readAllowed: false },
|
||||
{
|
||||
operation: 'update-canonical-mode',
|
||||
operation: 'update-advanced-mode',
|
||||
adminAllowed: true,
|
||||
writeAllowed: true,
|
||||
readAllowed: false,
|
||||
|
||||
@@ -22,7 +22,6 @@ const WRITE_OPERATIONS: string[] = [
|
||||
BLOCK_OPERATIONS.TOGGLE_ENABLED,
|
||||
BLOCK_OPERATIONS.UPDATE_PARENT,
|
||||
BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE,
|
||||
BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
||||
BLOCK_OPERATIONS.TOGGLE_HANDLES,
|
||||
// Batch block operations
|
||||
BLOCKS_OPERATIONS.BATCH_UPDATE_POSITIONS,
|
||||
|
||||
@@ -31,7 +31,6 @@ export const BlockOperationSchema = z.object({
|
||||
BLOCK_OPERATIONS.TOGGLE_ENABLED,
|
||||
BLOCK_OPERATIONS.UPDATE_PARENT,
|
||||
BLOCK_OPERATIONS.UPDATE_ADVANCED_MODE,
|
||||
BLOCK_OPERATIONS.UPDATE_CANONICAL_MODE,
|
||||
BLOCK_OPERATIONS.TOGGLE_HANDLES,
|
||||
]),
|
||||
target: z.literal(OPERATION_TARGETS.BLOCK),
|
||||
@@ -47,10 +46,8 @@ export const BlockOperationSchema = z.object({
|
||||
parentId: z.string().nullable().optional(),
|
||||
extent: z.enum(['parent']).nullable().optional(),
|
||||
enabled: z.boolean().optional(),
|
||||
advancedMode: z.boolean().optional(),
|
||||
horizontalHandles: z.boolean().optional(),
|
||||
canonicalId: z.string().optional(),
|
||||
canonicalMode: z.enum(['basic', 'advanced']).optional(),
|
||||
advancedMode: z.boolean().optional(),
|
||||
triggerMode: z.boolean().optional(),
|
||||
height: z.number().optional(),
|
||||
}),
|
||||
|
||||
@@ -4,7 +4,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { type CopilotChat, sendStreamingMessage } from '@/lib/copilot/api'
|
||||
import type { CopilotTransportMode } from '@/lib/copilot/models'
|
||||
import type {
|
||||
BaseClientToolMetadata,
|
||||
ClientToolDisplay,
|
||||
@@ -72,7 +71,6 @@ import { ListUserWorkflowsClientTool } from '@/lib/copilot/tools/client/workflow
|
||||
import { ListWorkspaceMcpServersClientTool } from '@/lib/copilot/tools/client/workflow/list-workspace-mcp-servers'
|
||||
import { ManageCustomToolClientTool } from '@/lib/copilot/tools/client/workflow/manage-custom-tool'
|
||||
import { ManageMcpToolClientTool } from '@/lib/copilot/tools/client/workflow/manage-mcp-tool'
|
||||
import { RedeployClientTool } from '@/lib/copilot/tools/client/workflow/redeploy'
|
||||
import { RunWorkflowClientTool } from '@/lib/copilot/tools/client/workflow/run-workflow'
|
||||
import { SetGlobalWorkflowVariablesClientTool } from '@/lib/copilot/tools/client/workflow/set-global-workflow-variables'
|
||||
import { getQueryClient } from '@/app/_shell/providers/query-provider'
|
||||
@@ -86,9 +84,7 @@ import type {
|
||||
} from '@/stores/panel/copilot/types'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('CopilotStore')
|
||||
|
||||
@@ -151,7 +147,6 @@ const CLIENT_TOOL_INSTANTIATORS: Record<string, (id: string) => any> = {
|
||||
deploy_api: (id) => new DeployApiClientTool(id),
|
||||
deploy_chat: (id) => new DeployChatClientTool(id),
|
||||
deploy_mcp: (id) => new DeployMcpClientTool(id),
|
||||
redeploy: (id) => new RedeployClientTool(id),
|
||||
list_workspace_mcp_servers: (id) => new ListWorkspaceMcpServersClientTool(id),
|
||||
create_workspace_mcp_server: (id) => new CreateWorkspaceMcpServerClientTool(id),
|
||||
check_deployment_status: (id) => new CheckDeploymentStatusClientTool(id),
|
||||
@@ -214,7 +209,6 @@ export const CLASS_TOOL_METADATA: Record<string, BaseClientToolMetadata | undefi
|
||||
deploy_api: (DeployApiClientTool as any)?.metadata,
|
||||
deploy_chat: (DeployChatClientTool as any)?.metadata,
|
||||
deploy_mcp: (DeployMcpClientTool as any)?.metadata,
|
||||
redeploy: (RedeployClientTool as any)?.metadata,
|
||||
list_workspace_mcp_servers: (ListWorkspaceMcpServersClientTool as any)?.metadata,
|
||||
create_workspace_mcp_server: (CreateWorkspaceMcpServerClientTool as any)?.metadata,
|
||||
check_deployment_status: (CheckDeploymentStatusClientTool as any)?.metadata,
|
||||
@@ -243,7 +237,6 @@ const TEXT_BLOCK_TYPE = 'text'
|
||||
const THINKING_BLOCK_TYPE = 'thinking'
|
||||
const DATA_PREFIX = 'data: '
|
||||
const DATA_PREFIX_LENGTH = 6
|
||||
const CONTINUE_OPTIONS_TAG = '<options>{"1":"Continue"}</options>'
|
||||
|
||||
// Resolve display text/icon for a tool based on its state
|
||||
function resolveToolDisplay(
|
||||
@@ -367,7 +360,6 @@ function abortAllInProgressTools(set: any, get: () => CopilotStore) {
|
||||
const { toolCallsById, messages } = get()
|
||||
const updatedMap = { ...toolCallsById }
|
||||
const abortedIds = new Set<string>()
|
||||
let hasUpdates = false
|
||||
for (const [id, tc] of Object.entries(toolCallsById)) {
|
||||
const st = tc.state as any
|
||||
// Abort anything not already terminal success/error/rejected/aborted
|
||||
@@ -381,19 +373,11 @@ function abortAllInProgressTools(set: any, get: () => CopilotStore) {
|
||||
updatedMap[id] = {
|
||||
...tc,
|
||||
state: ClientToolCallState.aborted,
|
||||
subAgentStreaming: false,
|
||||
display: resolveToolDisplay(tc.name, ClientToolCallState.aborted, id, (tc as any).params),
|
||||
}
|
||||
hasUpdates = true
|
||||
} else if (tc.subAgentStreaming) {
|
||||
updatedMap[id] = {
|
||||
...tc,
|
||||
subAgentStreaming: false,
|
||||
}
|
||||
hasUpdates = true
|
||||
}
|
||||
}
|
||||
if (abortedIds.size > 0 || hasUpdates) {
|
||||
if (abortedIds.size > 0) {
|
||||
set({ toolCallsById: updatedMap })
|
||||
// Update inline blocks in-place for the latest assistant message only (most relevant)
|
||||
set((s: CopilotStore) => {
|
||||
@@ -636,97 +620,6 @@ function createErrorMessage(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a workflow snapshot suitable for checkpoint persistence.
|
||||
*/
|
||||
function buildCheckpointWorkflowState(workflowId: string): WorkflowState | null {
|
||||
const rawState = useWorkflowStore.getState().getWorkflowState()
|
||||
if (!rawState) return null
|
||||
|
||||
const blocksWithSubblockValues = mergeSubblockState(rawState.blocks, workflowId)
|
||||
|
||||
const filteredBlocks = Object.entries(blocksWithSubblockValues).reduce(
|
||||
(acc, [blockId, block]) => {
|
||||
if (block?.type && block?.name) {
|
||||
acc[blockId] = {
|
||||
...block,
|
||||
id: block.id || blockId,
|
||||
enabled: block.enabled !== undefined ? block.enabled : true,
|
||||
horizontalHandles: block.horizontalHandles !== undefined ? block.horizontalHandles : true,
|
||||
height: block.height !== undefined ? block.height : 90,
|
||||
subBlocks: block.subBlocks || {},
|
||||
outputs: block.outputs || {},
|
||||
data: block.data || {},
|
||||
position: block.position || { x: 0, y: 0 },
|
||||
}
|
||||
}
|
||||
return acc
|
||||
},
|
||||
{} as WorkflowState['blocks']
|
||||
)
|
||||
|
||||
return {
|
||||
blocks: filteredBlocks,
|
||||
edges: rawState.edges || [],
|
||||
loops: rawState.loops || {},
|
||||
parallels: rawState.parallels || {},
|
||||
lastSaved: rawState.lastSaved || Date.now(),
|
||||
deploymentStatuses: rawState.deploymentStatuses || {},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists a previously captured snapshot as a workflow checkpoint.
|
||||
*/
|
||||
async function saveMessageCheckpoint(
|
||||
messageId: string,
|
||||
get: () => CopilotStore,
|
||||
set: (partial: Partial<CopilotStore> | ((state: CopilotStore) => Partial<CopilotStore>)) => void
|
||||
): Promise<boolean> {
|
||||
const { workflowId, currentChat, messageSnapshots, messageCheckpoints } = get()
|
||||
if (!workflowId || !currentChat?.id) return false
|
||||
|
||||
const snapshot = messageSnapshots[messageId]
|
||||
if (!snapshot) return false
|
||||
|
||||
const nextSnapshots = { ...messageSnapshots }
|
||||
delete nextSnapshots[messageId]
|
||||
set({ messageSnapshots: nextSnapshots })
|
||||
|
||||
try {
|
||||
const response = await fetch('/api/copilot/checkpoints', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowId,
|
||||
chatId: currentChat.id,
|
||||
messageId,
|
||||
workflowState: JSON.stringify(snapshot),
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to create checkpoint: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
const newCheckpoint = result.checkpoint
|
||||
if (newCheckpoint) {
|
||||
const existingCheckpoints = messageCheckpoints[messageId] || []
|
||||
const updatedCheckpoints = {
|
||||
...messageCheckpoints,
|
||||
[messageId]: [newCheckpoint, ...existingCheckpoints],
|
||||
}
|
||||
set({ messageCheckpoints: updatedCheckpoints })
|
||||
}
|
||||
|
||||
return true
|
||||
} catch (error) {
|
||||
logger.error('Failed to create checkpoint from snapshot:', error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function stripTodoTags(text: string): string {
|
||||
if (!text) return text
|
||||
return text
|
||||
@@ -933,8 +826,6 @@ interface StreamingContext {
|
||||
newChatId?: string
|
||||
doneEventCount: number
|
||||
streamComplete?: boolean
|
||||
wasAborted?: boolean
|
||||
suppressContinueOption?: boolean
|
||||
/** Track active subagent sessions by parent tool call ID */
|
||||
subAgentParentToolCallId?: string
|
||||
/** Track subagent content per parent tool call */
|
||||
@@ -952,132 +843,6 @@ type SSEHandler = (
|
||||
set: any
|
||||
) => Promise<void> | void
|
||||
|
||||
function appendTextBlock(context: StreamingContext, text: string) {
|
||||
if (!text) return
|
||||
context.accumulatedContent.append(text)
|
||||
if (context.currentTextBlock && context.contentBlocks.length > 0) {
|
||||
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||
if (lastBlock.type === TEXT_BLOCK_TYPE && lastBlock === context.currentTextBlock) {
|
||||
lastBlock.content += text
|
||||
return
|
||||
}
|
||||
}
|
||||
context.currentTextBlock = contentBlockPool.get()
|
||||
context.currentTextBlock.type = TEXT_BLOCK_TYPE
|
||||
context.currentTextBlock.content = text
|
||||
context.currentTextBlock.timestamp = Date.now()
|
||||
context.contentBlocks.push(context.currentTextBlock)
|
||||
}
|
||||
|
||||
function appendContinueOption(content: string): string {
|
||||
if (/<options>/i.test(content)) return content
|
||||
const suffix = content.trim().length > 0 ? '\n\n' : ''
|
||||
return `${content}${suffix}${CONTINUE_OPTIONS_TAG}`
|
||||
}
|
||||
|
||||
function appendContinueOptionBlock(blocks: any[]): any[] {
|
||||
if (!Array.isArray(blocks)) return blocks
|
||||
const hasOptions = blocks.some(
|
||||
(block) =>
|
||||
block?.type === TEXT_BLOCK_TYPE &&
|
||||
typeof block.content === 'string' &&
|
||||
/<options>/i.test(block.content)
|
||||
)
|
||||
if (hasOptions) return blocks
|
||||
return [
|
||||
...blocks,
|
||||
{
|
||||
type: TEXT_BLOCK_TYPE,
|
||||
content: CONTINUE_OPTIONS_TAG,
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
function beginThinkingBlock(context: StreamingContext) {
|
||||
if (!context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock = contentBlockPool.get()
|
||||
context.currentThinkingBlock.type = THINKING_BLOCK_TYPE
|
||||
context.currentThinkingBlock.content = ''
|
||||
context.currentThinkingBlock.timestamp = Date.now()
|
||||
;(context.currentThinkingBlock as any).startTime = Date.now()
|
||||
context.contentBlocks.push(context.currentThinkingBlock)
|
||||
}
|
||||
context.isInThinkingBlock = true
|
||||
context.currentTextBlock = null
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes thinking tags (raw or escaped) from streamed content.
|
||||
*/
|
||||
function stripThinkingTags(text: string): string {
|
||||
return text.replace(/<\/?thinking[^>]*>/gi, '').replace(/<\/?thinking[^&]*>/gi, '')
|
||||
}
|
||||
|
||||
function appendThinkingContent(context: StreamingContext, text: string) {
|
||||
if (!text) return
|
||||
const cleanedText = stripThinkingTags(text)
|
||||
if (!cleanedText) return
|
||||
if (context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock.content += cleanedText
|
||||
} else {
|
||||
context.currentThinkingBlock = contentBlockPool.get()
|
||||
context.currentThinkingBlock.type = THINKING_BLOCK_TYPE
|
||||
context.currentThinkingBlock.content = cleanedText
|
||||
context.currentThinkingBlock.timestamp = Date.now()
|
||||
context.currentThinkingBlock.startTime = Date.now()
|
||||
context.contentBlocks.push(context.currentThinkingBlock)
|
||||
}
|
||||
context.isInThinkingBlock = true
|
||||
context.currentTextBlock = null
|
||||
}
|
||||
|
||||
function finalizeThinkingBlock(context: StreamingContext) {
|
||||
if (context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock.duration =
|
||||
Date.now() - (context.currentThinkingBlock.startTime || Date.now())
|
||||
}
|
||||
context.isInThinkingBlock = false
|
||||
context.currentThinkingBlock = null
|
||||
context.currentTextBlock = null
|
||||
}
|
||||
|
||||
function upsertToolCallBlock(context: StreamingContext, toolCall: CopilotToolCall) {
|
||||
let found = false
|
||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||
const b = context.contentBlocks[i] as any
|
||||
if (b.type === 'tool_call' && b.toolCall?.id === toolCall.id) {
|
||||
context.contentBlocks[i] = { ...b, toolCall }
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
context.contentBlocks.push({ type: 'tool_call', toolCall, timestamp: Date.now() })
|
||||
}
|
||||
}
|
||||
|
||||
function appendSubAgentText(context: StreamingContext, parentToolCallId: string, text: string) {
|
||||
if (!context.subAgentContent[parentToolCallId]) {
|
||||
context.subAgentContent[parentToolCallId] = ''
|
||||
}
|
||||
if (!context.subAgentBlocks[parentToolCallId]) {
|
||||
context.subAgentBlocks[parentToolCallId] = []
|
||||
}
|
||||
context.subAgentContent[parentToolCallId] += text
|
||||
const blocks = context.subAgentBlocks[parentToolCallId]
|
||||
const lastBlock = blocks[blocks.length - 1]
|
||||
if (lastBlock && lastBlock.type === 'subagent_text') {
|
||||
lastBlock.content = (lastBlock.content || '') + text
|
||||
} else {
|
||||
blocks.push({
|
||||
type: 'subagent_text',
|
||||
content: text,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const sseHandlers: Record<string, SSEHandler> = {
|
||||
chat_id: async (data, context, get) => {
|
||||
context.newChatId = data.chatId
|
||||
@@ -1268,7 +1033,17 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
logger.info('[toolCallsById] map updated', updated)
|
||||
|
||||
// Add/refresh inline content block
|
||||
upsertToolCallBlock(context, tc)
|
||||
let found = false
|
||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||
const b = context.contentBlocks[i] as any
|
||||
if (b.type === 'tool_call' && b.toolCall?.id === toolCallId) {
|
||||
context.contentBlocks[i] = { ...b, toolCall: tc }
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (!found)
|
||||
context.contentBlocks.push({ type: 'tool_call', toolCall: tc, timestamp: Date.now() })
|
||||
updateStreamingMessage(set, context)
|
||||
}
|
||||
},
|
||||
@@ -1304,13 +1079,19 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
logger.info('[toolCallsById] → pending', { id, name, params: args })
|
||||
|
||||
// Ensure an inline content block exists/updated for this tool call
|
||||
upsertToolCallBlock(context, next)
|
||||
updateStreamingMessage(set, context)
|
||||
|
||||
// Do not execute on partial tool_call frames
|
||||
if (isPartial) {
|
||||
return
|
||||
let found = false
|
||||
for (let i = 0; i < context.contentBlocks.length; i++) {
|
||||
const b = context.contentBlocks[i] as any
|
||||
if (b.type === 'tool_call' && b.toolCall?.id === id) {
|
||||
context.contentBlocks[i] = { ...b, toolCall: next }
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
context.contentBlocks.push({ type: 'tool_call', toolCall: next, timestamp: Date.now() })
|
||||
}
|
||||
updateStreamingMessage(set, context)
|
||||
|
||||
// Prefer interface-based registry to determine interrupt and execute
|
||||
try {
|
||||
@@ -1494,18 +1275,44 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
reasoning: (data, context, _get, set) => {
|
||||
const phase = (data && (data.phase || data?.data?.phase)) as string | undefined
|
||||
if (phase === 'start') {
|
||||
beginThinkingBlock(context)
|
||||
if (!context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock = contentBlockPool.get()
|
||||
context.currentThinkingBlock.type = THINKING_BLOCK_TYPE
|
||||
context.currentThinkingBlock.content = ''
|
||||
context.currentThinkingBlock.timestamp = Date.now()
|
||||
;(context.currentThinkingBlock as any).startTime = Date.now()
|
||||
context.contentBlocks.push(context.currentThinkingBlock)
|
||||
}
|
||||
context.isInThinkingBlock = true
|
||||
context.currentTextBlock = null
|
||||
updateStreamingMessage(set, context)
|
||||
return
|
||||
}
|
||||
if (phase === 'end') {
|
||||
finalizeThinkingBlock(context)
|
||||
if (context.currentThinkingBlock) {
|
||||
;(context.currentThinkingBlock as any).duration =
|
||||
Date.now() - ((context.currentThinkingBlock as any).startTime || Date.now())
|
||||
}
|
||||
context.isInThinkingBlock = false
|
||||
context.currentThinkingBlock = null
|
||||
context.currentTextBlock = null
|
||||
updateStreamingMessage(set, context)
|
||||
return
|
||||
}
|
||||
const chunk: string = typeof data?.data === 'string' ? data.data : data?.content || ''
|
||||
if (!chunk) return
|
||||
appendThinkingContent(context, chunk)
|
||||
if (context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock.content += chunk
|
||||
} else {
|
||||
context.currentThinkingBlock = contentBlockPool.get()
|
||||
context.currentThinkingBlock.type = THINKING_BLOCK_TYPE
|
||||
context.currentThinkingBlock.content = chunk
|
||||
context.currentThinkingBlock.timestamp = Date.now()
|
||||
;(context.currentThinkingBlock as any).startTime = Date.now()
|
||||
context.contentBlocks.push(context.currentThinkingBlock)
|
||||
}
|
||||
context.isInThinkingBlock = true
|
||||
context.currentTextBlock = null
|
||||
updateStreamingMessage(set, context)
|
||||
},
|
||||
content: (data, context, get, set) => {
|
||||
@@ -1520,23 +1327,21 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
const designWorkflowStartRegex = /<design_workflow>/
|
||||
const designWorkflowEndRegex = /<\/design_workflow>/
|
||||
|
||||
const splitTrailingPartialTag = (
|
||||
text: string,
|
||||
tags: string[]
|
||||
): { text: string; remaining: string } => {
|
||||
const partialIndex = text.lastIndexOf('<')
|
||||
if (partialIndex < 0) {
|
||||
return { text, remaining: '' }
|
||||
}
|
||||
const possibleTag = text.substring(partialIndex)
|
||||
const matchesTagStart = tags.some((tag) => tag.startsWith(possibleTag))
|
||||
if (!matchesTagStart) {
|
||||
return { text, remaining: '' }
|
||||
}
|
||||
return {
|
||||
text: text.substring(0, partialIndex),
|
||||
remaining: possibleTag,
|
||||
const appendTextToContent = (text: string) => {
|
||||
if (!text) return
|
||||
context.accumulatedContent.append(text)
|
||||
if (context.currentTextBlock && context.contentBlocks.length > 0) {
|
||||
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||
if (lastBlock.type === TEXT_BLOCK_TYPE && lastBlock === context.currentTextBlock) {
|
||||
lastBlock.content += text
|
||||
return
|
||||
}
|
||||
}
|
||||
context.currentTextBlock = contentBlockPool.get()
|
||||
context.currentTextBlock.type = TEXT_BLOCK_TYPE
|
||||
context.currentTextBlock.content = text
|
||||
context.currentTextBlock.timestamp = Date.now()
|
||||
context.contentBlocks.push(context.currentTextBlock)
|
||||
}
|
||||
|
||||
while (contentToProcess.length > 0) {
|
||||
@@ -1558,19 +1363,13 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
hasProcessedContent = true
|
||||
} else {
|
||||
// Still in design_workflow block, accumulate content
|
||||
const { text, remaining } = splitTrailingPartialTag(contentToProcess, [
|
||||
'</design_workflow>',
|
||||
])
|
||||
context.designWorkflowContent += text
|
||||
context.designWorkflowContent += contentToProcess
|
||||
|
||||
// Update store with partial content for streaming effect (available in all modes)
|
||||
set({ streamingPlanContent: context.designWorkflowContent })
|
||||
|
||||
contentToProcess = remaining
|
||||
contentToProcess = ''
|
||||
hasProcessedContent = true
|
||||
if (remaining) {
|
||||
break
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
@@ -1581,7 +1380,7 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
if (designStartMatch) {
|
||||
const textBeforeDesign = contentToProcess.substring(0, designStartMatch.index)
|
||||
if (textBeforeDesign) {
|
||||
appendTextBlock(context, textBeforeDesign)
|
||||
appendTextToContent(textBeforeDesign)
|
||||
hasProcessedContent = true
|
||||
}
|
||||
context.isInDesignWorkflowBlock = true
|
||||
@@ -1672,27 +1471,63 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
const endMatch = thinkingEndRegex.exec(contentToProcess)
|
||||
if (endMatch) {
|
||||
const thinkingContent = contentToProcess.substring(0, endMatch.index)
|
||||
appendThinkingContent(context, thinkingContent)
|
||||
finalizeThinkingBlock(context)
|
||||
if (context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock.content += thinkingContent
|
||||
} else {
|
||||
context.currentThinkingBlock = contentBlockPool.get()
|
||||
context.currentThinkingBlock.type = THINKING_BLOCK_TYPE
|
||||
context.currentThinkingBlock.content = thinkingContent
|
||||
context.currentThinkingBlock.timestamp = Date.now()
|
||||
context.currentThinkingBlock.startTime = Date.now()
|
||||
context.contentBlocks.push(context.currentThinkingBlock)
|
||||
}
|
||||
context.isInThinkingBlock = false
|
||||
if (context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock.duration =
|
||||
Date.now() - (context.currentThinkingBlock.startTime || Date.now())
|
||||
}
|
||||
context.currentThinkingBlock = null
|
||||
context.currentTextBlock = null
|
||||
contentToProcess = contentToProcess.substring(endMatch.index + endMatch[0].length)
|
||||
hasProcessedContent = true
|
||||
} else {
|
||||
const { text, remaining } = splitTrailingPartialTag(contentToProcess, ['</thinking>'])
|
||||
if (text) {
|
||||
appendThinkingContent(context, text)
|
||||
hasProcessedContent = true
|
||||
}
|
||||
contentToProcess = remaining
|
||||
if (remaining) {
|
||||
break
|
||||
if (context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock.content += contentToProcess
|
||||
} else {
|
||||
context.currentThinkingBlock = contentBlockPool.get()
|
||||
context.currentThinkingBlock.type = THINKING_BLOCK_TYPE
|
||||
context.currentThinkingBlock.content = contentToProcess
|
||||
context.currentThinkingBlock.timestamp = Date.now()
|
||||
context.currentThinkingBlock.startTime = Date.now()
|
||||
context.contentBlocks.push(context.currentThinkingBlock)
|
||||
}
|
||||
contentToProcess = ''
|
||||
hasProcessedContent = true
|
||||
}
|
||||
} else {
|
||||
const startMatch = thinkingStartRegex.exec(contentToProcess)
|
||||
if (startMatch) {
|
||||
const textBeforeThinking = contentToProcess.substring(0, startMatch.index)
|
||||
if (textBeforeThinking) {
|
||||
appendTextBlock(context, textBeforeThinking)
|
||||
context.accumulatedContent.append(textBeforeThinking)
|
||||
if (context.currentTextBlock && context.contentBlocks.length > 0) {
|
||||
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||
if (lastBlock.type === TEXT_BLOCK_TYPE && lastBlock === context.currentTextBlock) {
|
||||
lastBlock.content += textBeforeThinking
|
||||
} else {
|
||||
context.currentTextBlock = contentBlockPool.get()
|
||||
context.currentTextBlock.type = TEXT_BLOCK_TYPE
|
||||
context.currentTextBlock.content = textBeforeThinking
|
||||
context.currentTextBlock.timestamp = Date.now()
|
||||
context.contentBlocks.push(context.currentTextBlock)
|
||||
}
|
||||
} else {
|
||||
context.currentTextBlock = contentBlockPool.get()
|
||||
context.currentTextBlock.type = TEXT_BLOCK_TYPE
|
||||
context.currentTextBlock.content = textBeforeThinking
|
||||
context.currentTextBlock.timestamp = Date.now()
|
||||
context.contentBlocks.push(context.currentTextBlock)
|
||||
}
|
||||
hasProcessedContent = true
|
||||
}
|
||||
context.isInThinkingBlock = true
|
||||
@@ -1721,7 +1556,25 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
remaining = contentToProcess.substring(partialTagIndex)
|
||||
}
|
||||
if (textToAdd) {
|
||||
appendTextBlock(context, textToAdd)
|
||||
context.accumulatedContent.append(textToAdd)
|
||||
if (context.currentTextBlock && context.contentBlocks.length > 0) {
|
||||
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||
if (lastBlock.type === TEXT_BLOCK_TYPE && lastBlock === context.currentTextBlock) {
|
||||
lastBlock.content += textToAdd
|
||||
} else {
|
||||
context.currentTextBlock = contentBlockPool.get()
|
||||
context.currentTextBlock.type = TEXT_BLOCK_TYPE
|
||||
context.currentTextBlock.content = textToAdd
|
||||
context.currentTextBlock.timestamp = Date.now()
|
||||
context.contentBlocks.push(context.currentTextBlock)
|
||||
}
|
||||
} else {
|
||||
context.currentTextBlock = contentBlockPool.get()
|
||||
context.currentTextBlock.type = TEXT_BLOCK_TYPE
|
||||
context.currentTextBlock.content = textToAdd
|
||||
context.currentTextBlock.timestamp = Date.now()
|
||||
context.contentBlocks.push(context.currentTextBlock)
|
||||
}
|
||||
hasProcessedContent = true
|
||||
}
|
||||
contentToProcess = remaining
|
||||
@@ -1759,13 +1612,37 @@ const sseHandlers: Record<string, SSEHandler> = {
|
||||
stream_end: (_data, context, _get, set) => {
|
||||
if (context.pendingContent) {
|
||||
if (context.isInThinkingBlock && context.currentThinkingBlock) {
|
||||
appendThinkingContent(context, context.pendingContent)
|
||||
context.currentThinkingBlock.content += context.pendingContent
|
||||
} else if (context.pendingContent.trim()) {
|
||||
appendTextBlock(context, context.pendingContent)
|
||||
context.accumulatedContent.append(context.pendingContent)
|
||||
if (context.currentTextBlock && context.contentBlocks.length > 0) {
|
||||
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||
if (lastBlock.type === TEXT_BLOCK_TYPE && lastBlock === context.currentTextBlock) {
|
||||
lastBlock.content += context.pendingContent
|
||||
} else {
|
||||
context.currentTextBlock = contentBlockPool.get()
|
||||
context.currentTextBlock.type = TEXT_BLOCK_TYPE
|
||||
context.currentTextBlock.content = context.pendingContent
|
||||
context.currentTextBlock.timestamp = Date.now()
|
||||
context.contentBlocks.push(context.currentTextBlock)
|
||||
}
|
||||
} else {
|
||||
context.currentTextBlock = contentBlockPool.get()
|
||||
context.currentTextBlock.type = TEXT_BLOCK_TYPE
|
||||
context.currentTextBlock.content = context.pendingContent
|
||||
context.currentTextBlock.timestamp = Date.now()
|
||||
context.contentBlocks.push(context.currentTextBlock)
|
||||
}
|
||||
}
|
||||
context.pendingContent = ''
|
||||
}
|
||||
finalizeThinkingBlock(context)
|
||||
if (context.currentThinkingBlock) {
|
||||
context.currentThinkingBlock.duration =
|
||||
Date.now() - (context.currentThinkingBlock.startTime || Date.now())
|
||||
}
|
||||
context.isInThinkingBlock = false
|
||||
context.currentThinkingBlock = null
|
||||
context.currentTextBlock = null
|
||||
updateStreamingMessage(set, context)
|
||||
},
|
||||
default: () => {},
|
||||
@@ -1863,7 +1740,29 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
return
|
||||
}
|
||||
|
||||
appendSubAgentText(context, parentToolCallId, data.data)
|
||||
// Initialize if needed
|
||||
if (!context.subAgentContent[parentToolCallId]) {
|
||||
context.subAgentContent[parentToolCallId] = ''
|
||||
}
|
||||
if (!context.subAgentBlocks[parentToolCallId]) {
|
||||
context.subAgentBlocks[parentToolCallId] = []
|
||||
}
|
||||
|
||||
// Append content
|
||||
context.subAgentContent[parentToolCallId] += data.data
|
||||
|
||||
// Update or create the last text block in subAgentBlocks
|
||||
const blocks = context.subAgentBlocks[parentToolCallId]
|
||||
const lastBlock = blocks[blocks.length - 1]
|
||||
if (lastBlock && lastBlock.type === 'subagent_text') {
|
||||
lastBlock.content = (lastBlock.content || '') + data.data
|
||||
} else {
|
||||
blocks.push({
|
||||
type: 'subagent_text',
|
||||
content: data.data,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
|
||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||
},
|
||||
@@ -1874,13 +1773,34 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
const phase = data?.phase || data?.data?.phase
|
||||
if (!parentToolCallId) return
|
||||
|
||||
// Initialize if needed
|
||||
if (!context.subAgentContent[parentToolCallId]) {
|
||||
context.subAgentContent[parentToolCallId] = ''
|
||||
}
|
||||
if (!context.subAgentBlocks[parentToolCallId]) {
|
||||
context.subAgentBlocks[parentToolCallId] = []
|
||||
}
|
||||
|
||||
// For reasoning, we just append the content (treating start/end as markers)
|
||||
if (phase === 'start' || phase === 'end') return
|
||||
|
||||
const chunk = typeof data?.data === 'string' ? data.data : data?.content || ''
|
||||
if (!chunk) return
|
||||
|
||||
appendSubAgentText(context, parentToolCallId, chunk)
|
||||
context.subAgentContent[parentToolCallId] += chunk
|
||||
|
||||
// Update or create the last text block in subAgentBlocks
|
||||
const blocks = context.subAgentBlocks[parentToolCallId]
|
||||
const lastBlock = blocks[blocks.length - 1]
|
||||
if (lastBlock && lastBlock.type === 'subagent_text') {
|
||||
lastBlock.content = (lastBlock.content || '') + chunk
|
||||
} else {
|
||||
blocks.push({
|
||||
type: 'subagent_text',
|
||||
content: chunk,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
|
||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||
},
|
||||
@@ -1899,7 +1819,6 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
const id: string | undefined = toolData.id || data?.toolCallId
|
||||
const name: string | undefined = toolData.name || data?.toolName
|
||||
if (!id || !name) return
|
||||
const isPartial = toolData.partial === true
|
||||
|
||||
// Arguments can come in different locations depending on SSE format
|
||||
// Check multiple possible locations
|
||||
@@ -1966,10 +1885,6 @@ const subAgentSSEHandlers: Record<string, SSEHandler> = {
|
||||
|
||||
updateToolCallWithSubAgentData(context, get, set, parentToolCallId)
|
||||
|
||||
if (isPartial) {
|
||||
return
|
||||
}
|
||||
|
||||
// Execute client tools in parallel (non-blocking) - same pattern as main tool_call handler
|
||||
try {
|
||||
const def = getTool(name)
|
||||
@@ -2087,14 +2002,6 @@ const MIN_BATCH_INTERVAL = 16
|
||||
const MAX_BATCH_INTERVAL = 50
|
||||
const MAX_QUEUE_SIZE = 5
|
||||
|
||||
function stopStreamingUpdates() {
|
||||
if (streamingUpdateRAF !== null) {
|
||||
cancelAnimationFrame(streamingUpdateRAF)
|
||||
streamingUpdateRAF = null
|
||||
}
|
||||
streamingUpdateQueue.clear()
|
||||
}
|
||||
|
||||
function createOptimizedContentBlocks(contentBlocks: any[]): any[] {
|
||||
const result: any[] = new Array(contentBlocks.length)
|
||||
for (let i = 0; i < contentBlocks.length; i++) {
|
||||
@@ -2202,7 +2109,6 @@ const initialState = {
|
||||
messages: [] as CopilotMessage[],
|
||||
checkpoints: [] as any[],
|
||||
messageCheckpoints: {} as Record<string, any[]>,
|
||||
messageSnapshots: {} as Record<string, WorkflowState>,
|
||||
isLoading: false,
|
||||
isLoadingChats: false,
|
||||
isLoadingCheckpoints: false,
|
||||
@@ -2226,7 +2132,6 @@ const initialState = {
|
||||
suppressAutoSelect: false,
|
||||
autoAllowedTools: [] as string[],
|
||||
messageQueue: [] as import('./types').QueuedMessage[],
|
||||
suppressAbortContinueOption: false,
|
||||
}
|
||||
|
||||
export const useCopilotStore = create<CopilotStore>()(
|
||||
@@ -2249,7 +2154,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
// Abort all in-progress tools and clear any diff preview
|
||||
abortAllInProgressTools(set, get)
|
||||
try {
|
||||
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
|
||||
useWorkflowDiffStore.getState().clearDiff()
|
||||
} catch {}
|
||||
|
||||
set({
|
||||
@@ -2283,7 +2188,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
// Abort in-progress tools and clear diff when changing chats
|
||||
abortAllInProgressTools(set, get)
|
||||
try {
|
||||
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
|
||||
useWorkflowDiffStore.getState().clearDiff()
|
||||
} catch {}
|
||||
|
||||
// Restore plan content and config (mode/model) from selected chat
|
||||
@@ -2376,7 +2281,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
// Abort in-progress tools and clear diff on new chat
|
||||
abortAllInProgressTools(set, get)
|
||||
try {
|
||||
useWorkflowDiffStore.getState().clearDiff({ restoreBaseline: false })
|
||||
useWorkflowDiffStore.getState().clearDiff()
|
||||
} catch {}
|
||||
|
||||
// Background-save the current chat before clearing (optimistic)
|
||||
@@ -2549,14 +2454,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
|
||||
// Send a message (streaming only)
|
||||
sendMessage: async (message: string, options = {}) => {
|
||||
const {
|
||||
workflowId,
|
||||
currentChat,
|
||||
mode,
|
||||
revertState,
|
||||
isSendingMessage,
|
||||
abortController: activeAbortController,
|
||||
} = get()
|
||||
const { workflowId, currentChat, mode, revertState, isSendingMessage } = get()
|
||||
const {
|
||||
stream = true,
|
||||
fileAttachments,
|
||||
@@ -2572,17 +2470,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
if (!workflowId) return
|
||||
|
||||
// If already sending a message, queue this one instead
|
||||
if (isSendingMessage && !activeAbortController) {
|
||||
logger.warn('[Copilot] sendMessage: stale sending state detected, clearing', {
|
||||
originalMessageId: messageId,
|
||||
})
|
||||
set({ isSendingMessage: false })
|
||||
} else if (isSendingMessage && activeAbortController?.signal.aborted) {
|
||||
logger.warn('[Copilot] sendMessage: aborted controller detected, clearing', {
|
||||
originalMessageId: messageId,
|
||||
})
|
||||
set({ isSendingMessage: false, abortController: null })
|
||||
} else if (isSendingMessage) {
|
||||
if (isSendingMessage) {
|
||||
get().addToQueue(message, { fileAttachments, contexts, messageId })
|
||||
logger.info('[Copilot] Message queued (already sending)', {
|
||||
queueLength: get().messageQueue.length + 1,
|
||||
@@ -2591,17 +2479,11 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
return
|
||||
}
|
||||
|
||||
const nextAbortController = new AbortController()
|
||||
set({ isSendingMessage: true, error: null, abortController: nextAbortController })
|
||||
const abortController = new AbortController()
|
||||
set({ isSendingMessage: true, error: null, abortController })
|
||||
|
||||
const userMessage = createUserMessage(message, fileAttachments, contexts, messageId)
|
||||
const streamingMessage = createStreamingMessage()
|
||||
const snapshot = workflowId ? buildCheckpointWorkflowState(workflowId) : null
|
||||
if (snapshot) {
|
||||
set((state) => ({
|
||||
messageSnapshots: { ...state.messageSnapshots, [userMessage.id]: snapshot },
|
||||
}))
|
||||
}
|
||||
|
||||
let newMessages: CopilotMessage[]
|
||||
if (revertState) {
|
||||
@@ -2666,7 +2548,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
}
|
||||
|
||||
// Call copilot API
|
||||
const apiMode: CopilotTransportMode =
|
||||
const apiMode: 'ask' | 'agent' | 'plan' =
|
||||
mode === 'ask' ? 'ask' : mode === 'plan' ? 'plan' : 'agent'
|
||||
|
||||
// Extract slash commands from contexts (lowercase) and filter them out from contexts
|
||||
@@ -2688,7 +2570,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
fileAttachments,
|
||||
contexts: filteredContexts,
|
||||
commands: commands?.length ? commands : undefined,
|
||||
abortSignal: nextAbortController.signal,
|
||||
abortSignal: abortController.signal,
|
||||
})
|
||||
|
||||
if (result.success && result.stream) {
|
||||
@@ -2758,14 +2640,12 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
},
|
||||
|
||||
// Abort streaming
|
||||
abortMessage: (options?: { suppressContinueOption?: boolean }) => {
|
||||
abortMessage: () => {
|
||||
const { abortController, isSendingMessage, messages } = get()
|
||||
if (!isSendingMessage || !abortController) return
|
||||
const suppressContinueOption = options?.suppressContinueOption === true
|
||||
set({ isAborting: true, suppressAbortContinueOption: suppressContinueOption })
|
||||
set({ isAborting: true })
|
||||
try {
|
||||
abortController.abort()
|
||||
stopStreamingUpdates()
|
||||
const lastMessage = messages[messages.length - 1]
|
||||
if (lastMessage && lastMessage.role === 'assistant') {
|
||||
const textContent =
|
||||
@@ -2773,21 +2653,10 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
?.filter((b) => b.type === 'text')
|
||||
.map((b: any) => b.content)
|
||||
.join('') || ''
|
||||
const nextContentBlocks = suppressContinueOption
|
||||
? (lastMessage.contentBlocks ?? [])
|
||||
: appendContinueOptionBlock(
|
||||
lastMessage.contentBlocks ? [...lastMessage.contentBlocks] : []
|
||||
)
|
||||
set((state) => ({
|
||||
messages: state.messages.map((msg) =>
|
||||
msg.id === lastMessage.id
|
||||
? {
|
||||
...msg,
|
||||
content: suppressContinueOption
|
||||
? textContent.trim() || 'Message was aborted'
|
||||
: appendContinueOption(textContent.trim() || 'Message was aborted'),
|
||||
contentBlocks: nextContentBlocks,
|
||||
}
|
||||
? { ...msg, content: textContent.trim() || 'Message was aborted' }
|
||||
: msg
|
||||
),
|
||||
isSendingMessage: false,
|
||||
@@ -3086,10 +2955,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
if (!workflowId) return
|
||||
set({ isRevertingCheckpoint: true, checkpointError: null })
|
||||
try {
|
||||
const { messageCheckpoints } = get()
|
||||
const checkpointMessageId = Object.entries(messageCheckpoints).find(([, cps]) =>
|
||||
(cps || []).some((cp: any) => cp?.id === checkpointId)
|
||||
)?.[0]
|
||||
const response = await fetch('/api/copilot/checkpoints/revert', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -3135,11 +3000,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
},
|
||||
})
|
||||
}
|
||||
if (checkpointMessageId) {
|
||||
const { messageCheckpoints: currentCheckpoints } = get()
|
||||
const updatedCheckpoints = { ...currentCheckpoints, [checkpointMessageId]: [] }
|
||||
set({ messageCheckpoints: updatedCheckpoints })
|
||||
}
|
||||
set({ isRevertingCheckpoint: false })
|
||||
} catch (error) {
|
||||
set({
|
||||
@@ -3153,10 +3013,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
const { messageCheckpoints } = get()
|
||||
return messageCheckpoints[messageId] || []
|
||||
},
|
||||
saveMessageCheckpoint: async (messageId: string) => {
|
||||
if (!messageId) return false
|
||||
return saveMessageCheckpoint(messageId, get, set)
|
||||
},
|
||||
|
||||
// Handle streaming response
|
||||
handleStreamingResponse: async (
|
||||
@@ -3204,19 +3060,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
try {
|
||||
for await (const data of parseSSEStream(reader, decoder)) {
|
||||
const { abortController } = get()
|
||||
if (abortController?.signal.aborted) {
|
||||
context.wasAborted = true
|
||||
const { suppressAbortContinueOption } = get()
|
||||
context.suppressContinueOption = suppressAbortContinueOption === true
|
||||
if (suppressAbortContinueOption) {
|
||||
set({ suppressAbortContinueOption: false })
|
||||
}
|
||||
context.pendingContent = ''
|
||||
finalizeThinkingBlock(context)
|
||||
stopStreamingUpdates()
|
||||
reader.cancel()
|
||||
break
|
||||
}
|
||||
if (abortController?.signal.aborted) break
|
||||
|
||||
// Log SSE events for debugging
|
||||
logger.info('[SSE] Received event', {
|
||||
@@ -3316,9 +3160,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
if (context.streamComplete) break
|
||||
}
|
||||
|
||||
if (!context.wasAborted && sseHandlers.stream_end) {
|
||||
sseHandlers.stream_end({}, context, get, set)
|
||||
}
|
||||
if (sseHandlers.stream_end) sseHandlers.stream_end({}, context, get, set)
|
||||
|
||||
if (streamingUpdateRAF !== null) {
|
||||
cancelAnimationFrame(streamingUpdateRAF)
|
||||
@@ -3335,9 +3177,6 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
: block
|
||||
)
|
||||
}
|
||||
if (context.wasAborted && !context.suppressContinueOption) {
|
||||
sanitizedContentBlocks = appendContinueOptionBlock(sanitizedContentBlocks)
|
||||
}
|
||||
|
||||
if (context.contentBlocks) {
|
||||
context.contentBlocks.forEach((block) => {
|
||||
@@ -3348,37 +3187,21 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
}
|
||||
|
||||
const finalContent = stripTodoTags(context.accumulatedContent.toString())
|
||||
const finalContentWithOptions =
|
||||
context.wasAborted && !context.suppressContinueOption
|
||||
? appendContinueOption(finalContent)
|
||||
: finalContent
|
||||
set((state) => {
|
||||
const snapshotId = state.currentUserMessageId
|
||||
const nextSnapshots =
|
||||
snapshotId && state.messageSnapshots[snapshotId]
|
||||
? (() => {
|
||||
const updated = { ...state.messageSnapshots }
|
||||
delete updated[snapshotId]
|
||||
return updated
|
||||
})()
|
||||
: state.messageSnapshots
|
||||
return {
|
||||
messages: state.messages.map((msg) =>
|
||||
msg.id === assistantMessageId
|
||||
? {
|
||||
...msg,
|
||||
content: finalContentWithOptions,
|
||||
contentBlocks: sanitizedContentBlocks,
|
||||
}
|
||||
: msg
|
||||
),
|
||||
isSendingMessage: false,
|
||||
isAborting: false,
|
||||
abortController: null,
|
||||
currentUserMessageId: null,
|
||||
messageSnapshots: nextSnapshots,
|
||||
}
|
||||
})
|
||||
set((state) => ({
|
||||
messages: state.messages.map((msg) =>
|
||||
msg.id === assistantMessageId
|
||||
? {
|
||||
...msg,
|
||||
content: finalContent,
|
||||
contentBlocks: sanitizedContentBlocks,
|
||||
}
|
||||
: msg
|
||||
),
|
||||
isSendingMessage: false,
|
||||
isAborting: false,
|
||||
abortController: null,
|
||||
currentUserMessageId: null,
|
||||
}))
|
||||
|
||||
if (context.newChatId && !get().currentChat) {
|
||||
await get().handleNewChatCreation(context.newChatId)
|
||||
@@ -3886,7 +3709,7 @@ export const useCopilotStore = create<CopilotStore>()(
|
||||
// If currently sending, abort and send this one
|
||||
const { isSendingMessage } = get()
|
||||
if (isSendingMessage) {
|
||||
get().abortMessage({ suppressContinueOption: true })
|
||||
get().abortMessage()
|
||||
// Wait a tick for abort to complete
|
||||
await new Promise((resolve) => setTimeout(resolve, 50))
|
||||
}
|
||||
|
||||
@@ -1,9 +1,4 @@
|
||||
import type { CopilotMode, CopilotModelId } from '@/lib/copilot/models'
|
||||
|
||||
export type { CopilotMode, CopilotModelId } from '@/lib/copilot/models'
|
||||
|
||||
import type { ClientToolCallState, ClientToolDisplay } from '@/lib/copilot/tools/client/base-tool'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
export type ToolState = ClientToolCallState
|
||||
|
||||
@@ -96,9 +91,33 @@ import type { CopilotChat as ApiCopilotChat } from '@/lib/copilot/api'
|
||||
|
||||
export type CopilotChat = ApiCopilotChat
|
||||
|
||||
export type CopilotMode = 'ask' | 'build' | 'plan'
|
||||
|
||||
export interface CopilotState {
|
||||
mode: CopilotMode
|
||||
selectedModel: CopilotModelId
|
||||
selectedModel:
|
||||
| 'gpt-5-fast'
|
||||
| 'gpt-5'
|
||||
| 'gpt-5-medium'
|
||||
| 'gpt-5-high'
|
||||
| 'gpt-5.1-fast'
|
||||
| 'gpt-5.1'
|
||||
| 'gpt-5.1-medium'
|
||||
| 'gpt-5.1-high'
|
||||
| 'gpt-5-codex'
|
||||
| 'gpt-5.1-codex'
|
||||
| 'gpt-5.2'
|
||||
| 'gpt-5.2-codex'
|
||||
| 'gpt-5.2-pro'
|
||||
| 'gpt-4o'
|
||||
| 'gpt-4.1'
|
||||
| 'o3'
|
||||
| 'claude-4-sonnet'
|
||||
| 'claude-4.5-haiku'
|
||||
| 'claude-4.5-sonnet'
|
||||
| 'claude-4.5-opus'
|
||||
| 'claude-4.1-opus'
|
||||
| 'gemini-3-pro'
|
||||
agentPrefetch: boolean
|
||||
enabledModels: string[] | null // Null means not loaded yet, array of model IDs when loaded
|
||||
isCollapsed: boolean
|
||||
@@ -110,7 +129,6 @@ export interface CopilotState {
|
||||
|
||||
checkpoints: any[]
|
||||
messageCheckpoints: Record<string, any[]>
|
||||
messageSnapshots: Record<string, WorkflowState>
|
||||
|
||||
isLoading: boolean
|
||||
isLoadingChats: boolean
|
||||
@@ -119,8 +137,6 @@ export interface CopilotState {
|
||||
isSaving: boolean
|
||||
isRevertingCheckpoint: boolean
|
||||
isAborting: boolean
|
||||
/** Skip adding Continue option on abort for queued send-now */
|
||||
suppressAbortContinueOption?: boolean
|
||||
|
||||
error: string | null
|
||||
saveError: string | null
|
||||
@@ -181,7 +197,7 @@ export interface CopilotActions {
|
||||
messageId?: string
|
||||
}
|
||||
) => Promise<void>
|
||||
abortMessage: (options?: { suppressContinueOption?: boolean }) => void
|
||||
abortMessage: () => void
|
||||
sendImplicitFeedback: (
|
||||
implicitFeedback: string,
|
||||
toolCallState?: 'accepted' | 'rejected' | 'error'
|
||||
@@ -199,7 +215,6 @@ export interface CopilotActions {
|
||||
loadMessageCheckpoints: (chatId: string) => Promise<void>
|
||||
revertToCheckpoint: (checkpointId: string) => Promise<void>
|
||||
getCheckpointsForMessage: (messageId: string) => any[]
|
||||
saveMessageCheckpoint: (messageId: string) => Promise<boolean>
|
||||
|
||||
clearMessages: () => void
|
||||
clearError: () => void
|
||||
|
||||
@@ -23,32 +23,6 @@ import {
|
||||
const logger = createLogger('WorkflowDiffStore')
|
||||
const diffEngine = new WorkflowDiffEngine()
|
||||
|
||||
/**
|
||||
* Detects when a diff contains no meaningful changes.
|
||||
*/
|
||||
function isEmptyDiffAnalysis(
|
||||
diffAnalysis?: {
|
||||
new_blocks?: string[]
|
||||
edited_blocks?: string[]
|
||||
deleted_blocks?: string[]
|
||||
field_diffs?: Record<string, { changed_fields: string[] }>
|
||||
edge_diff?: { new_edges?: string[]; deleted_edges?: string[] }
|
||||
} | null
|
||||
): boolean {
|
||||
if (!diffAnalysis) return false
|
||||
const hasBlockChanges =
|
||||
(diffAnalysis.new_blocks?.length || 0) > 0 ||
|
||||
(diffAnalysis.edited_blocks?.length || 0) > 0 ||
|
||||
(diffAnalysis.deleted_blocks?.length || 0) > 0
|
||||
const hasEdgeChanges =
|
||||
(diffAnalysis.edge_diff?.new_edges?.length || 0) > 0 ||
|
||||
(diffAnalysis.edge_diff?.deleted_edges?.length || 0) > 0
|
||||
const hasFieldChanges = Object.values(diffAnalysis.field_diffs || {}).some(
|
||||
(diff) => (diff?.changed_fields?.length || 0) > 0
|
||||
)
|
||||
return !hasBlockChanges && !hasEdgeChanges && !hasFieldChanges
|
||||
}
|
||||
|
||||
export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActions>()(
|
||||
devtools(
|
||||
(set, get) => {
|
||||
@@ -101,24 +75,6 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
|
||||
const diffAnalysisResult = diffResult.diff.diffAnalysis || null
|
||||
if (isEmptyDiffAnalysis(diffAnalysisResult)) {
|
||||
logger.info('No workflow diff detected; skipping diff view')
|
||||
diffEngine.clearDiff()
|
||||
batchedUpdate({
|
||||
hasActiveDiff: false,
|
||||
isShowingDiff: false,
|
||||
isDiffReady: false,
|
||||
baselineWorkflow: null,
|
||||
baselineWorkflowId: null,
|
||||
diffAnalysis: null,
|
||||
diffMetadata: null,
|
||||
diffError: null,
|
||||
_triggerMessageId: null,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const candidateState = diffResult.diff.proposedState
|
||||
|
||||
// Validate proposed workflow using serializer round-trip
|
||||
@@ -147,22 +103,12 @@ export const useWorkflowDiffStore = create<WorkflowDiffState & WorkflowDiffActio
|
||||
isDiffReady: true,
|
||||
baselineWorkflow: baselineWorkflow,
|
||||
baselineWorkflowId,
|
||||
diffAnalysis: diffAnalysisResult,
|
||||
diffAnalysis: diffResult.diff.diffAnalysis || null,
|
||||
diffMetadata: diffResult.diff.metadata,
|
||||
diffError: null,
|
||||
_triggerMessageId: triggerMessageId ?? null,
|
||||
})
|
||||
|
||||
if (triggerMessageId) {
|
||||
import('@/stores/panel/copilot/store')
|
||||
.then(({ useCopilotStore }) =>
|
||||
useCopilotStore.getState().saveMessageCheckpoint(triggerMessageId)
|
||||
)
|
||||
.catch((error) => {
|
||||
logger.warn('Failed to save checkpoint for diff', { error })
|
||||
})
|
||||
}
|
||||
|
||||
logger.info('Workflow diff applied optimistically', {
|
||||
workflowId: activeWorkflowId,
|
||||
blocks: Object.keys(candidateState.blocks || {}).length,
|
||||
|
||||
@@ -848,35 +848,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
|
||||
// Note: Socket.IO handles real-time sync automatically
|
||||
},
|
||||
|
||||
setBlockCanonicalMode: (id: string, canonicalId: string, mode: 'basic' | 'advanced') => {
|
||||
set((state) => {
|
||||
const block = state.blocks[id]
|
||||
if (!block) {
|
||||
return state
|
||||
}
|
||||
|
||||
const currentData = block.data || {}
|
||||
const currentCanonicalModes = currentData.canonicalModes || {}
|
||||
const canonicalModes = { ...currentCanonicalModes, [canonicalId]: mode }
|
||||
|
||||
return {
|
||||
blocks: {
|
||||
...state.blocks,
|
||||
[id]: {
|
||||
...block,
|
||||
data: {
|
||||
...currentData,
|
||||
canonicalModes,
|
||||
},
|
||||
},
|
||||
},
|
||||
edges: [...state.edges],
|
||||
loops: { ...state.loops },
|
||||
}
|
||||
})
|
||||
get().updateLastSaved()
|
||||
},
|
||||
|
||||
setBlockTriggerMode: (id: string, triggerMode: boolean) => {
|
||||
set((state) => ({
|
||||
blocks: {
|
||||
|
||||
@@ -63,9 +63,6 @@ export interface BlockData {
|
||||
|
||||
// Container node type (for ReactFlow node type determination)
|
||||
type?: string
|
||||
|
||||
/** Canonical swap overrides keyed by canonicalParamId */
|
||||
canonicalModes?: Record<string, 'basic' | 'advanced'>
|
||||
}
|
||||
|
||||
export interface BlockLayoutState {
|
||||
@@ -221,7 +218,6 @@ export interface WorkflowActions {
|
||||
changedSubblocks: Array<{ blockId: string; subBlockId: string; newValue: any }>
|
||||
}
|
||||
setBlockAdvancedMode: (id: string, advancedMode: boolean) => void
|
||||
setBlockCanonicalMode: (id: string, canonicalId: string, mode: 'basic' | 'advanced') => void
|
||||
setBlockTriggerMode: (id: string, triggerMode: boolean) => void
|
||||
updateBlockLayoutMetrics: (id: string, dimensions: { width: number; height: number }) => void
|
||||
triggerUpdate: () => void
|
||||
|
||||
@@ -85,24 +85,17 @@ const coreOutputs = {
|
||||
type: 'string',
|
||||
description: 'Lemlist team identifier',
|
||||
},
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Campaign-related fields - only present when activity is part of a campaign
|
||||
* These may be missing for first replies or activities outside campaign context
|
||||
*/
|
||||
const campaignOutputs = {
|
||||
leadId: {
|
||||
type: 'string',
|
||||
description: 'Lead identifier (only present for campaign activities)',
|
||||
description: 'Lead identifier',
|
||||
},
|
||||
campaignId: {
|
||||
type: 'string',
|
||||
description: 'Campaign identifier (only present for campaign activities)',
|
||||
description: 'Campaign identifier',
|
||||
},
|
||||
campaignName: {
|
||||
type: 'string',
|
||||
description: 'Campaign name (only present for campaign activities)',
|
||||
description: 'Campaign name',
|
||||
},
|
||||
} as const
|
||||
|
||||
@@ -200,7 +193,6 @@ const emailContentOutputs = {
|
||||
export function buildEmailSentOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...coreOutputs,
|
||||
...campaignOutputs,
|
||||
...leadOutputs,
|
||||
...sequenceOutputs,
|
||||
...senderOutputs,
|
||||
@@ -214,7 +206,6 @@ export function buildEmailSentOutputs(): Record<string, TriggerOutput> {
|
||||
export function buildEmailRepliedOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...coreOutputs,
|
||||
...campaignOutputs,
|
||||
...leadOutputs,
|
||||
...sequenceOutputs,
|
||||
...senderOutputs,
|
||||
@@ -228,7 +219,6 @@ export function buildEmailRepliedOutputs(): Record<string, TriggerOutput> {
|
||||
export function buildEmailOpenedOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...coreOutputs,
|
||||
...campaignOutputs,
|
||||
...leadOutputs,
|
||||
...sequenceOutputs,
|
||||
...senderOutputs,
|
||||
@@ -245,7 +235,6 @@ export function buildEmailOpenedOutputs(): Record<string, TriggerOutput> {
|
||||
export function buildEmailClickedOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...coreOutputs,
|
||||
...campaignOutputs,
|
||||
...leadOutputs,
|
||||
...sequenceOutputs,
|
||||
...senderOutputs,
|
||||
@@ -266,7 +255,6 @@ export function buildEmailClickedOutputs(): Record<string, TriggerOutput> {
|
||||
export function buildEmailBouncedOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...coreOutputs,
|
||||
...campaignOutputs,
|
||||
...leadOutputs,
|
||||
...sequenceOutputs,
|
||||
...senderOutputs,
|
||||
@@ -287,7 +275,6 @@ export function buildEmailBouncedOutputs(): Record<string, TriggerOutput> {
|
||||
export function buildLinkedInRepliedOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...coreOutputs,
|
||||
...campaignOutputs,
|
||||
...leadOutputs,
|
||||
...sequenceOutputs,
|
||||
text: {
|
||||
@@ -303,7 +290,6 @@ export function buildLinkedInRepliedOutputs(): Record<string, TriggerOutput> {
|
||||
export function buildInterestOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...coreOutputs,
|
||||
...campaignOutputs,
|
||||
...leadOutputs,
|
||||
...sequenceOutputs,
|
||||
} as Record<string, TriggerOutput>
|
||||
@@ -316,7 +302,6 @@ export function buildInterestOutputs(): Record<string, TriggerOutput> {
|
||||
export function buildLemlistOutputs(): Record<string, TriggerOutput> {
|
||||
return {
|
||||
...coreOutputs,
|
||||
...campaignOutputs,
|
||||
...leadOutputs,
|
||||
...sequenceOutputs,
|
||||
...senderOutputs,
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
ALTER TABLE "webhook" DROP CONSTRAINT "webhook_block_id_workflow_blocks_id_fk";
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "workflow_schedule" DROP CONSTRAINT "workflow_schedule_block_id_workflow_blocks_id_fk";
|
||||
--> statement-breakpoint
|
||||
DROP INDEX "path_idx";--> statement-breakpoint
|
||||
DROP INDEX "workflow_schedule_workflow_block_unique";--> statement-breakpoint
|
||||
ALTER TABLE "webhook" ADD COLUMN "deployment_version_id" text;--> statement-breakpoint
|
||||
ALTER TABLE "workflow_schedule" ADD COLUMN "deployment_version_id" text;--> statement-breakpoint
|
||||
ALTER TABLE "webhook" ADD CONSTRAINT "webhook_deployment_version_id_workflow_deployment_version_id_fk" FOREIGN KEY ("deployment_version_id") REFERENCES "public"."workflow_deployment_version"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "workflow_schedule" ADD CONSTRAINT "workflow_schedule_deployment_version_id_workflow_deployment_version_id_fk" FOREIGN KEY ("deployment_version_id") REFERENCES "public"."workflow_deployment_version"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "path_deployment_unique" ON "webhook" USING btree ("path","deployment_version_id");--> statement-breakpoint
|
||||
CREATE INDEX "webhook_workflow_deployment_idx" ON "webhook" USING btree ("workflow_id","deployment_version_id");--> statement-breakpoint
|
||||
CREATE UNIQUE INDEX "workflow_schedule_workflow_block_deployment_unique" ON "workflow_schedule" USING btree ("workflow_id","block_id","deployment_version_id");--> statement-breakpoint
|
||||
CREATE INDEX "workflow_schedule_workflow_deployment_idx" ON "workflow_schedule" USING btree ("workflow_id","deployment_version_id");--> statement-breakpoint
|
||||
UPDATE "webhook" AS w
|
||||
SET "deployment_version_id" = dv."id"
|
||||
FROM "workflow_deployment_version" AS dv
|
||||
WHERE dv."workflow_id" = w."workflow_id"
|
||||
AND dv."is_active" = true
|
||||
AND w."deployment_version_id" IS NULL;--> statement-breakpoint
|
||||
UPDATE "workflow_schedule" AS ws
|
||||
SET "deployment_version_id" = dv."id"
|
||||
FROM "workflow_deployment_version" AS dv
|
||||
WHERE dv."workflow_id" = ws."workflow_id"
|
||||
AND dv."is_active" = true
|
||||
AND ws."deployment_version_id" IS NULL;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1009,13 +1009,6 @@
|
||||
"when": 1768582494384,
|
||||
"tag": "0144_old_killer_shrike",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 145,
|
||||
"version": "7",
|
||||
"when": 1768602646955,
|
||||
"tag": "0145_messy_archangel",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -492,11 +492,7 @@ export const workflowSchedule = pgTable(
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
deploymentVersionId: text('deployment_version_id').references(
|
||||
() => workflowDeploymentVersion.id,
|
||||
{ onDelete: 'cascade' }
|
||||
),
|
||||
blockId: text('block_id'),
|
||||
blockId: text('block_id').references(() => workflowBlocks.id, { onDelete: 'cascade' }),
|
||||
cronExpression: text('cron_expression'),
|
||||
nextRunAt: timestamp('next_run_at'),
|
||||
lastRanAt: timestamp('last_ran_at'),
|
||||
@@ -511,14 +507,9 @@ export const workflowSchedule = pgTable(
|
||||
},
|
||||
(table) => {
|
||||
return {
|
||||
workflowBlockUnique: uniqueIndex('workflow_schedule_workflow_block_deployment_unique').on(
|
||||
workflowBlockUnique: uniqueIndex('workflow_schedule_workflow_block_unique').on(
|
||||
table.workflowId,
|
||||
table.blockId,
|
||||
table.deploymentVersionId
|
||||
),
|
||||
workflowDeploymentIdx: index('workflow_schedule_workflow_deployment_idx').on(
|
||||
table.workflowId,
|
||||
table.deploymentVersionId
|
||||
table.blockId
|
||||
),
|
||||
}
|
||||
}
|
||||
@@ -531,11 +522,7 @@ export const webhook = pgTable(
|
||||
workflowId: text('workflow_id')
|
||||
.notNull()
|
||||
.references(() => workflow.id, { onDelete: 'cascade' }),
|
||||
deploymentVersionId: text('deployment_version_id').references(
|
||||
() => workflowDeploymentVersion.id,
|
||||
{ onDelete: 'cascade' }
|
||||
),
|
||||
blockId: text('block_id'),
|
||||
blockId: text('block_id').references(() => workflowBlocks.id, { onDelete: 'cascade' }), // ID of the webhook trigger block (nullable for legacy starter block webhooks)
|
||||
path: text('path').notNull(),
|
||||
provider: text('provider'), // e.g., "whatsapp", "github", etc.
|
||||
providerConfig: json('provider_config'), // Store provider-specific configuration
|
||||
@@ -550,17 +537,13 @@ export const webhook = pgTable(
|
||||
},
|
||||
(table) => {
|
||||
return {
|
||||
// Ensure webhook paths are unique per deployment version
|
||||
pathIdx: uniqueIndex('path_deployment_unique').on(table.path, table.deploymentVersionId),
|
||||
// Ensure webhook paths are unique
|
||||
pathIdx: uniqueIndex('path_idx').on(table.path),
|
||||
// Optimize queries for webhooks by workflow and block
|
||||
workflowBlockIdx: index('idx_webhook_on_workflow_id_block_id').on(
|
||||
table.workflowId,
|
||||
table.blockId
|
||||
),
|
||||
workflowDeploymentIdx: index('webhook_workflow_deployment_idx').on(
|
||||
table.workflowId,
|
||||
table.deploymentVersionId
|
||||
),
|
||||
// Optimize queries for credential set webhooks
|
||||
credentialSetIdIdx: index('webhook_credential_set_id_idx').on(table.credentialSetId),
|
||||
}
|
||||
|
||||
@@ -260,7 +260,6 @@ const BLOCK_OPERATIONS = {
|
||||
TOGGLE_ENABLED: 'toggle-enabled',
|
||||
UPDATE_PARENT: 'update-parent',
|
||||
UPDATE_ADVANCED_MODE: 'update-advanced-mode',
|
||||
UPDATE_CANONICAL_MODE: 'update-canonical-mode',
|
||||
TOGGLE_HANDLES: 'toggle-handles',
|
||||
} as const
|
||||
|
||||
|
||||
@@ -169,20 +169,8 @@ export const mockBlockConfigs: Record<string, any> = {
|
||||
config: { tool: () => 'slack_send_message' },
|
||||
},
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'channel',
|
||||
type: 'dropdown',
|
||||
title: 'Channel',
|
||||
mode: 'basic',
|
||||
canonicalParamId: 'channel',
|
||||
},
|
||||
{
|
||||
id: 'manualChannel',
|
||||
type: 'short-input',
|
||||
title: 'Channel ID',
|
||||
mode: 'advanced',
|
||||
canonicalParamId: 'channel',
|
||||
},
|
||||
{ id: 'channel', type: 'dropdown', title: 'Channel', mode: 'basic' },
|
||||
{ id: 'manualChannel', type: 'short-input', title: 'Channel ID', mode: 'advanced' },
|
||||
{ id: 'text', type: 'long-input', title: 'Message' },
|
||||
{ id: 'username', type: 'short-input', title: 'Username', mode: 'both' },
|
||||
],
|
||||
|
||||
Reference in New Issue
Block a user