Compare commits

...

15 Commits

Author SHA1 Message Date
Vikhyath Mondreti
bf22dd75ad address bugbot comments 2026-01-24 02:13:06 -08:00
Vikhyath Mondreti
eb767b5ede remove more dead code 2026-01-24 01:58:02 -08:00
Vikhyath Mondreti
594bcac5f2 type more code 2026-01-24 01:54:09 -08:00
Vikhyath Mondreti
d3f20311d0 update type check 2026-01-24 01:45:03 -08:00
Vikhyath Mondreti
587d44ad6f remove overly defensive programming 2026-01-24 01:44:53 -08:00
Vikhyath Mondreti
8bf2e69942 fix(child-workflow): nested spans handoff 2026-01-24 01:37:17 -08:00
Vikhyath Mondreti
12100e6881 improvement(webhooks): remove dead code (#2965)
* fix(webhooks): subscription recreation path

* improvement(webhooks): remove dead code

* fix tests

* address bugbot comments

* fix restoration edge case

* fix more edge cases

* address bugbot comments

* fix gmail polling

* add warnings for UI indication for credential sets
2026-01-23 23:18:20 -08:00
Siddharth Ganesan
23294683e1 fix(copilot): mask credentials fix (#2963)
* Fix copilot masking

* Clean up

* Lint
2026-01-23 19:34:55 -08:00
Vikhyath Mondreti
b913cff46e fix(envvars): resolution standardized (#2957)
* fix(envvars): resolution standardized

* remove comments

* address bugbot

* fix highlighting for env vars

* remove comments

* address greptile

* address bugbot
2026-01-23 18:59:04 -08:00
Waleed
428781ce7d feat(blog): enterprise post (#2961)
* feat(blog): enterprise post

* added more images, styling

* more content

* updated v0-5 post

* remove unused transition

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
2026-01-23 18:58:00 -08:00
Waleed
f0ee67f3ed improvement(helm): add internal ingress support and same-host path consolidation (#2960)
* improvement(helm): add internal ingress support and same-host path consolidation

* improvement(helm): clean up ingress template comments

Simplify verbose inline Helm comments and section dividers to match the
minimal style used in services.yaml.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* fix(helm): add missing copilot path consolidation for realtime host

When copilot.host equals realtime.host but differs from app.host,
copilot paths were not being routed. Added logic to consolidate
copilot paths into the realtime rule for this scenario.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* improvement(helm): follow ingress best practices

- Remove orphan comments that appeared when services were disabled
- Add documentation about path ordering requirements
- Paths rendered in order: realtime, copilot, app (specific before catch-all)
- Clean template output matching industry Helm chart standards

---------

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-23 18:44:18 -08:00
Waleed
f44594c380 fix(security): add authentication and input validation to API routes (#2959)
* fix(security): add authentication and input validation to API routes

* moved utils

* remove extraneous commetns

* removed unused dep
2026-01-23 17:48:39 -08:00
Emir Karabeg
6464cfa7f2 fix(logs): refresh logic to refresh logs details (#2958) 2026-01-23 17:22:33 -08:00
Waleed
7f4edc85ef fix(billing): handle missing userStats and prevent crashes (#2956)
* fix(billing): handle missing userStats and prevent crashes

* fix(billing): correct import path for getFilledPillColor

* fix(billing): add Number.isFinite check to lastPeriodCost
2026-01-23 14:45:11 -08:00
Siddharth Ganesan
efef91ece0 improvement(copilot): fast mode, subagent tool responses and allow preferences (#2955)
* Improvements

* Fix actions mapping

* Remove console logs
2026-01-23 13:03:05 -08:00
144 changed files with 2772 additions and 1888 deletions

View File

@@ -59,7 +59,7 @@ export default function StatusIndicator() {
href={statusUrl} href={statusUrl}
target='_blank' target='_blank'
rel='noopener noreferrer' rel='noopener noreferrer'
className={`flex items-center gap-[6px] whitespace-nowrap text-[12px] transition-colors ${STATUS_COLORS[status]}`} className={`flex min-w-[165px] items-center gap-[6px] whitespace-nowrap text-[12px] transition-colors ${STATUS_COLORS[status]}`}
aria-label={`System status: ${message}`} aria-label={`System status: ${message}`}
> >
<StatusDotIcon status={status} className='h-[6px] w-[6px]' aria-hidden='true' /> <StatusDotIcon status={status} className='h-[6px] w-[6px]' aria-hidden='true' />

View File

@@ -22,7 +22,7 @@ export default async function StudioIndex({
? filtered.sort((a, b) => { ? filtered.sort((a, b) => {
if (a.featured && !b.featured) return -1 if (a.featured && !b.featured) return -1
if (!a.featured && b.featured) return 1 if (!a.featured && b.featured) return 1
return 0 return new Date(b.date).getTime() - new Date(a.date).getTime()
}) })
: filtered : filtered

View File

@@ -8,6 +8,7 @@ import type { AgentCapabilities, AgentSkill } from '@/lib/a2a/types'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { getRedisClient } from '@/lib/core/config/redis' import { getRedisClient } from '@/lib/core/config/redis'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils' import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('A2AAgentCardAPI') const logger = createLogger('A2AAgentCardAPI')
@@ -95,6 +96,11 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<Ro
return NextResponse.json({ error: 'Agent not found' }, { status: 404 }) return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
} }
const workspaceAccess = await checkWorkspaceAccess(existingAgent.workspaceId, auth.userId)
if (!workspaceAccess.canWrite) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
const body = await request.json() const body = await request.json()
if ( if (
@@ -160,6 +166,11 @@ export async function DELETE(request: NextRequest, { params }: { params: Promise
return NextResponse.json({ error: 'Agent not found' }, { status: 404 }) return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
} }
const workspaceAccess = await checkWorkspaceAccess(existingAgent.workspaceId, auth.userId)
if (!workspaceAccess.canWrite) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
await db.delete(a2aAgent).where(eq(a2aAgent.id, agentId)) await db.delete(a2aAgent).where(eq(a2aAgent.id, agentId))
logger.info(`Deleted A2A agent: ${agentId}`) logger.info(`Deleted A2A agent: ${agentId}`)
@@ -194,6 +205,11 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
return NextResponse.json({ error: 'Agent not found' }, { status: 404 }) return NextResponse.json({ error: 'Agent not found' }, { status: 404 })
} }
const workspaceAccess = await checkWorkspaceAccess(existingAgent.workspaceId, auth.userId)
if (!workspaceAccess.canWrite) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
const body = await request.json() const body = await request.json()
const action = body.action as 'publish' | 'unpublish' | 'refresh' const action = body.action as 'publish' | 'unpublish' | 'refresh'

View File

@@ -16,6 +16,7 @@ import {
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { getBrandConfig } from '@/lib/branding/branding' import { getBrandConfig } from '@/lib/branding/branding'
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis' import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
import { validateExternalUrl } from '@/lib/core/security/input-validation'
import { SSE_HEADERS } from '@/lib/core/utils/sse' import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
import { markExecutionCancelled } from '@/lib/execution/cancellation' import { markExecutionCancelled } from '@/lib/execution/cancellation'
@@ -1118,17 +1119,13 @@ async function handlePushNotificationSet(
) )
} }
try { const urlValidation = validateExternalUrl(
const url = new URL(params.pushNotificationConfig.url) params.pushNotificationConfig.url,
if (url.protocol !== 'https:') { 'Push notification URL'
return NextResponse.json( )
createError(id, A2A_ERROR_CODES.INVALID_PARAMS, 'Push notification URL must use HTTPS'), if (!urlValidation.isValid) {
{ status: 400 }
)
}
} catch {
return NextResponse.json( return NextResponse.json(
createError(id, A2A_ERROR_CODES.INVALID_PARAMS, 'Invalid push notification URL'), createError(id, A2A_ERROR_CODES.INVALID_PARAMS, urlValidation.error || 'Invalid URL'),
{ status: 400 } { status: 400 }
) )
} }

View File

@@ -104,17 +104,11 @@ export async function POST(req: NextRequest) {
}) })
// Build execution params starting with LLM-provided arguments // Build execution params starting with LLM-provided arguments
// Resolve all {{ENV_VAR}} references in the arguments // Resolve all {{ENV_VAR}} references in the arguments (deep for nested objects)
const executionParams: Record<string, any> = resolveEnvVarReferences( const executionParams: Record<string, any> = resolveEnvVarReferences(
toolArgs, toolArgs,
decryptedEnvVars, decryptedEnvVars,
{ { deep: true }
resolveExactMatch: true,
allowEmbedded: true,
trimKeys: true,
onMissing: 'keep',
deep: true,
}
) as Record<string, any> ) as Record<string, any>
logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, { logger.info(`[${tracker.requestId}] Resolved env var references in arguments`, {

View File

@@ -84,6 +84,14 @@ vi.mock('@/lib/execution/isolated-vm', () => ({
vi.mock('@sim/logger', () => loggerMock) vi.mock('@sim/logger', () => loggerMock)
vi.mock('@/lib/auth/hybrid', () => ({
checkHybridAuth: vi.fn().mockResolvedValue({
success: true,
userId: 'user-123',
authType: 'session',
}),
}))
vi.mock('@/lib/execution/e2b', () => ({ vi.mock('@/lib/execution/e2b', () => ({
executeInE2B: vi.fn(), executeInE2B: vi.fn(),
})) }))
@@ -110,6 +118,24 @@ describe('Function Execute API Route', () => {
}) })
describe('Security Tests', () => { describe('Security Tests', () => {
it('should reject unauthorized requests', async () => {
const { checkHybridAuth } = await import('@/lib/auth/hybrid')
vi.mocked(checkHybridAuth).mockResolvedValueOnce({
success: false,
error: 'Unauthorized',
})
const req = createMockRequest('POST', {
code: 'return "test"',
})
const response = await POST(req)
const data = await response.json()
expect(response.status).toBe(401)
expect(data).toHaveProperty('error', 'Unauthorized')
})
it.concurrent('should use isolated-vm for secure sandboxed execution', async () => { it.concurrent('should use isolated-vm for secure sandboxed execution', async () => {
const req = createMockRequest('POST', { const req = createMockRequest('POST', {
code: 'return "test"', code: 'return "test"',

View File

@@ -1,5 +1,6 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { isE2bEnabled } from '@/lib/core/config/feature-flags' import { isE2bEnabled } from '@/lib/core/config/feature-flags'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { executeInE2B } from '@/lib/execution/e2b' import { executeInE2B } from '@/lib/execution/e2b'
@@ -581,6 +582,12 @@ export async function POST(req: NextRequest) {
let resolvedCode = '' // Store resolved code for error reporting let resolvedCode = '' // Store resolved code for error reporting
try { try {
const auth = await checkHybridAuth(req)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized function execution attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await req.json() const body = await req.json()
const { DEFAULT_EXECUTION_TIMEOUT_MS } = await import('@/lib/execution/constants') const { DEFAULT_EXECUTION_TIMEOUT_MS } = await import('@/lib/execution/constants')

View File

@@ -1,11 +1,10 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import type { NextRequest } from 'next/server' import type { NextRequest } from 'next/server'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { McpClient } from '@/lib/mcp/client' import { McpClient } from '@/lib/mcp/client'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import type { McpServerConfig, McpTransport } from '@/lib/mcp/types' import { resolveMcpConfigEnvVars } from '@/lib/mcp/resolve-config'
import type { McpTransport } from '@/lib/mcp/types'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils' import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
const logger = createLogger('McpServerTestAPI') const logger = createLogger('McpServerTestAPI')
@@ -19,30 +18,6 @@ function isUrlBasedTransport(transport: McpTransport): boolean {
return transport === 'streamable-http' return transport === 'streamable-http'
} }
/**
* Resolve environment variables in strings
*/
function resolveEnvVars(value: string, envVars: Record<string, string>): string {
const missingVars: string[] = []
const resolvedValue = resolveEnvVarReferences(value, envVars, {
allowEmbedded: true,
resolveExactMatch: true,
trimKeys: true,
onMissing: 'keep',
deep: false,
missingKeys: missingVars,
}) as string
if (missingVars.length > 0) {
const uniqueMissing = Array.from(new Set(missingVars))
uniqueMissing.forEach((envKey) => {
logger.warn(`Environment variable "${envKey}" not found in MCP server test`)
})
}
return resolvedValue
}
interface TestConnectionRequest { interface TestConnectionRequest {
name: string name: string
transport: McpTransport transport: McpTransport
@@ -96,39 +71,30 @@ export const POST = withMcpAuth('write')(
) )
} }
let resolvedUrl = body.url // Build initial config for resolution
let resolvedHeaders = body.headers || {} const initialConfig = {
try {
const envVars = await getEffectiveDecryptedEnv(userId, workspaceId)
if (resolvedUrl) {
resolvedUrl = resolveEnvVars(resolvedUrl, envVars)
}
const resolvedHeadersObj: Record<string, string> = {}
for (const [key, value] of Object.entries(resolvedHeaders)) {
resolvedHeadersObj[key] = resolveEnvVars(value, envVars)
}
resolvedHeaders = resolvedHeadersObj
} catch (envError) {
logger.warn(
`[${requestId}] Failed to resolve environment variables, using raw values:`,
envError
)
}
const testConfig: McpServerConfig = {
id: `test-${requestId}`, id: `test-${requestId}`,
name: body.name, name: body.name,
transport: body.transport, transport: body.transport,
url: resolvedUrl, url: body.url,
headers: resolvedHeaders, headers: body.headers || {},
timeout: body.timeout || 10000, timeout: body.timeout || 10000,
retries: 1, // Only one retry for tests retries: 1, // Only one retry for tests
enabled: true, enabled: true,
} }
// Resolve env vars using shared utility (non-strict mode for testing)
const { config: testConfig, missingVars } = await resolveMcpConfigEnvVars(
initialConfig,
userId,
workspaceId,
{ strict: false }
)
if (missingVars.length > 0) {
logger.warn(`[${requestId}] Some environment variables not found:`, { missingVars })
}
const testSecurityPolicy = { const testSecurityPolicy = {
requireConsent: false, requireConsent: false,
auditLevel: 'none' as const, auditLevel: 'none' as const,

View File

@@ -3,7 +3,9 @@ import { account } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm' import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils' import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import type { StreamingExecution } from '@/executor/types' import type { StreamingExecution } from '@/executor/types'
import { executeProviderRequest } from '@/providers' import { executeProviderRequest } from '@/providers'
@@ -20,6 +22,11 @@ export async function POST(request: NextRequest) {
const startTime = Date.now() const startTime = Date.now()
try { try {
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
logger.info(`[${requestId}] Provider API request started`, { logger.info(`[${requestId}] Provider API request started`, {
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
userAgent: request.headers.get('User-Agent'), userAgent: request.headers.get('User-Agent'),
@@ -85,6 +92,13 @@ export async function POST(request: NextRequest) {
verbosity, verbosity,
}) })
if (workspaceId) {
const workspaceAccess = await checkWorkspaceAccess(workspaceId, auth.userId)
if (!workspaceAccess.hasAccess) {
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
}
}
let finalApiKey: string | undefined = apiKey let finalApiKey: string | undefined = apiKey
try { try {
if (provider === 'vertex' && vertexCredential) { if (provider === 'vertex' && vertexCredential) {

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { createA2AClient } from '@/lib/a2a/utils' import { createA2AClient } from '@/lib/a2a/utils'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { validateExternalUrl } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -39,6 +40,18 @@ export async function POST(request: NextRequest) {
const body = await request.json() const body = await request.json()
const validatedData = A2ASetPushNotificationSchema.parse(body) const validatedData = A2ASetPushNotificationSchema.parse(body)
const urlValidation = validateExternalUrl(validatedData.webhookUrl, 'Webhook URL')
if (!urlValidation.isValid) {
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
return NextResponse.json(
{
success: false,
error: urlValidation.error,
},
{ status: 400 }
)
}
logger.info(`[${requestId}] A2A set push notification request`, { logger.info(`[${requestId}] A2A set push notification request`, {
agentUrl: validatedData.agentUrl, agentUrl: validatedData.agentUrl,
taskId: validatedData.taskId, taskId: validatedData.taskId,

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { buildDeleteQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils' import { buildDeleteQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLDeleteAPI') const logger = createLogger('MySQLDeleteAPI')
@@ -21,6 +22,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized MySQL delete attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = DeleteSchema.parse(body) const params = DeleteSchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils' import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLExecuteAPI') const logger = createLogger('MySQLExecuteAPI')
@@ -20,6 +21,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized MySQL execute attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = ExecuteSchema.parse(body) const params = ExecuteSchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { buildInsertQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils' import { buildInsertQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLInsertAPI') const logger = createLogger('MySQLInsertAPI')
@@ -42,6 +43,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized MySQL insert attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = InsertSchema.parse(body) const params = InsertSchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createMySQLConnection, executeIntrospect } from '@/app/api/tools/mysql/utils' import { createMySQLConnection, executeIntrospect } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLIntrospectAPI') const logger = createLogger('MySQLIntrospectAPI')
@@ -19,6 +20,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized MySQL introspect attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = IntrospectSchema.parse(body) const params = IntrospectSchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils' import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLQueryAPI') const logger = createLogger('MySQLQueryAPI')
@@ -20,6 +21,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized MySQL query attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = QuerySchema.parse(body) const params = QuerySchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { buildUpdateQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils' import { buildUpdateQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLUpdateAPI') const logger = createLogger('MySQLUpdateAPI')
@@ -40,6 +41,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized MySQL update attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = UpdateSchema.parse(body) const params = UpdateSchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createPostgresConnection, executeDelete } from '@/app/api/tools/postgresql/utils' import { createPostgresConnection, executeDelete } from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLDeleteAPI') const logger = createLogger('PostgreSQLDeleteAPI')
@@ -21,6 +22,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized PostgreSQL delete attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = DeleteSchema.parse(body) const params = DeleteSchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { import {
createPostgresConnection, createPostgresConnection,
executeQuery, executeQuery,
@@ -24,6 +25,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized PostgreSQL execute attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = ExecuteSchema.parse(body) const params = ExecuteSchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createPostgresConnection, executeInsert } from '@/app/api/tools/postgresql/utils' import { createPostgresConnection, executeInsert } from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLInsertAPI') const logger = createLogger('PostgreSQLInsertAPI')
@@ -42,6 +43,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized PostgreSQL insert attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = InsertSchema.parse(body) const params = InsertSchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createPostgresConnection, executeIntrospect } from '@/app/api/tools/postgresql/utils' import { createPostgresConnection, executeIntrospect } from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLIntrospectAPI') const logger = createLogger('PostgreSQLIntrospectAPI')
@@ -20,6 +21,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized PostgreSQL introspect attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = IntrospectSchema.parse(body) const params = IntrospectSchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createPostgresConnection, executeQuery } from '@/app/api/tools/postgresql/utils' import { createPostgresConnection, executeQuery } from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLQueryAPI') const logger = createLogger('PostgreSQLQueryAPI')
@@ -20,6 +21,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized PostgreSQL query attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = QuerySchema.parse(body) const params = QuerySchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createPostgresConnection, executeUpdate } from '@/app/api/tools/postgresql/utils' import { createPostgresConnection, executeUpdate } from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLUpdateAPI') const logger = createLogger('PostgreSQLUpdateAPI')
@@ -40,6 +41,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized PostgreSQL update attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = UpdateSchema.parse(body) const params = UpdateSchema.parse(body)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createSSHConnection, escapeShellArg, executeSSHCommand } from '@/app/api/tools/ssh/utils' import { createSSHConnection, escapeShellArg, executeSSHCommand } from '@/app/api/tools/ssh/utils'
const logger = createLogger('SSHCheckCommandExistsAPI') const logger = createLogger('SSHCheckCommandExistsAPI')
@@ -20,6 +21,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH check command exists attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = CheckCommandExistsSchema.parse(body) const params = CheckCommandExistsSchema.parse(body)

View File

@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import type { Client, SFTPWrapper, Stats } from 'ssh2' import type { Client, SFTPWrapper, Stats } from 'ssh2'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { import {
createSSHConnection, createSSHConnection,
getFileType, getFileType,
@@ -39,10 +40,15 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH check file exists attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = CheckFileExistsSchema.parse(body) const params = CheckFileExistsSchema.parse(body)
// Validate authentication
if (!params.password && !params.privateKey) { if (!params.password && !params.privateKey) {
return NextResponse.json( return NextResponse.json(
{ error: 'Either password or privateKey must be provided' }, { error: 'Either password or privateKey must be provided' },

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { import {
createSSHConnection, createSSHConnection,
escapeShellArg, escapeShellArg,
@@ -27,10 +28,15 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH create directory attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = CreateDirectorySchema.parse(body) const params = CreateDirectorySchema.parse(body)
// Validate authentication
if (!params.password && !params.privateKey) { if (!params.password && !params.privateKey) {
return NextResponse.json( return NextResponse.json(
{ error: 'Either password or privateKey must be provided' }, { error: 'Either password or privateKey must be provided' },
@@ -53,7 +59,6 @@ export async function POST(request: NextRequest) {
const dirPath = sanitizePath(params.path) const dirPath = sanitizePath(params.path)
const escapedPath = escapeShellArg(dirPath) const escapedPath = escapeShellArg(dirPath)
// Check if directory already exists
const checkResult = await executeSSHCommand( const checkResult = await executeSSHCommand(
client, client,
`test -d '${escapedPath}' && echo "exists"` `test -d '${escapedPath}' && echo "exists"`
@@ -70,7 +75,6 @@ export async function POST(request: NextRequest) {
}) })
} }
// Create directory
const mkdirFlag = params.recursive ? '-p' : '' const mkdirFlag = params.recursive ? '-p' : ''
const command = `mkdir ${mkdirFlag} -m ${params.permissions} '${escapedPath}'` const command = `mkdir ${mkdirFlag} -m ${params.permissions} '${escapedPath}'`
const result = await executeSSHCommand(client, command) const result = await executeSSHCommand(client, command)

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { import {
createSSHConnection, createSSHConnection,
escapeShellArg, escapeShellArg,
@@ -27,10 +28,15 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH delete file attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = DeleteFileSchema.parse(body) const params = DeleteFileSchema.parse(body)
// Validate authentication
if (!params.password && !params.privateKey) { if (!params.password && !params.privateKey) {
return NextResponse.json( return NextResponse.json(
{ error: 'Either password or privateKey must be provided' }, { error: 'Either password or privateKey must be provided' },
@@ -53,7 +59,6 @@ export async function POST(request: NextRequest) {
const filePath = sanitizePath(params.path) const filePath = sanitizePath(params.path)
const escapedPath = escapeShellArg(filePath) const escapedPath = escapeShellArg(filePath)
// Check if path exists
const checkResult = await executeSSHCommand( const checkResult = await executeSSHCommand(
client, client,
`test -e '${escapedPath}' && echo "exists"` `test -e '${escapedPath}' && echo "exists"`
@@ -62,7 +67,6 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: `Path does not exist: ${filePath}` }, { status: 404 }) return NextResponse.json({ error: `Path does not exist: ${filePath}` }, { status: 404 })
} }
// Build delete command
let command: string let command: string
if (params.recursive) { if (params.recursive) {
command = params.force ? `rm -rf '${escapedPath}'` : `rm -r '${escapedPath}'` command = params.force ? `rm -rf '${escapedPath}'` : `rm -r '${escapedPath}'`

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import type { Client, SFTPWrapper } from 'ssh2' import type { Client, SFTPWrapper } from 'ssh2'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils' import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
const logger = createLogger('SSHDownloadFileAPI') const logger = createLogger('SSHDownloadFileAPI')
@@ -34,10 +35,15 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH download file attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = DownloadFileSchema.parse(body) const params = DownloadFileSchema.parse(body)
// Validate authentication
if (!params.password && !params.privateKey) { if (!params.password && !params.privateKey) {
return NextResponse.json( return NextResponse.json(
{ error: 'Either password or privateKey must be provided' }, { error: 'Either password or privateKey must be provided' },

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createSSHConnection, executeSSHCommand, sanitizeCommand } from '@/app/api/tools/ssh/utils' import { createSSHConnection, executeSSHCommand, sanitizeCommand } from '@/app/api/tools/ssh/utils'
const logger = createLogger('SSHExecuteCommandAPI') const logger = createLogger('SSHExecuteCommandAPI')
@@ -21,10 +22,15 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH execute command attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = ExecuteCommandSchema.parse(body) const params = ExecuteCommandSchema.parse(body)
// Validate authentication
if (!params.password && !params.privateKey) { if (!params.password && !params.privateKey) {
return NextResponse.json( return NextResponse.json(
{ error: 'Either password or privateKey must be provided' }, { error: 'Either password or privateKey must be provided' },
@@ -44,7 +50,6 @@ export async function POST(request: NextRequest) {
}) })
try { try {
// Build command with optional working directory
let command = sanitizeCommand(params.command) let command = sanitizeCommand(params.command)
if (params.workingDirectory) { if (params.workingDirectory) {
command = `cd "${params.workingDirectory}" && ${command}` command = `cd "${params.workingDirectory}" && ${command}`

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createSSHConnection, escapeShellArg, executeSSHCommand } from '@/app/api/tools/ssh/utils' import { createSSHConnection, escapeShellArg, executeSSHCommand } from '@/app/api/tools/ssh/utils'
const logger = createLogger('SSHExecuteScriptAPI') const logger = createLogger('SSHExecuteScriptAPI')
@@ -22,10 +23,15 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH execute script attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = ExecuteScriptSchema.parse(body) const params = ExecuteScriptSchema.parse(body)
// Validate authentication
if (!params.password && !params.privateKey) { if (!params.password && !params.privateKey) {
return NextResponse.json( return NextResponse.json(
{ error: 'Either password or privateKey must be provided' }, { error: 'Either password or privateKey must be provided' },
@@ -45,13 +51,10 @@ export async function POST(request: NextRequest) {
}) })
try { try {
// Create a temporary script file, execute it, and clean up
const scriptPath = `/tmp/sim_script_${requestId}.sh` const scriptPath = `/tmp/sim_script_${requestId}.sh`
const escapedScriptPath = escapeShellArg(scriptPath) const escapedScriptPath = escapeShellArg(scriptPath)
const escapedInterpreter = escapeShellArg(params.interpreter) const escapedInterpreter = escapeShellArg(params.interpreter)
// Build the command to create, execute, and clean up the script
// Note: heredoc with quoted delimiter ('SIMEOF') prevents variable expansion
let command = `cat > '${escapedScriptPath}' << 'SIMEOF' let command = `cat > '${escapedScriptPath}' << 'SIMEOF'
${params.script} ${params.script}
SIMEOF SIMEOF

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createSSHConnection, executeSSHCommand } from '@/app/api/tools/ssh/utils' import { createSSHConnection, executeSSHCommand } from '@/app/api/tools/ssh/utils'
const logger = createLogger('SSHGetSystemInfoAPI') const logger = createLogger('SSHGetSystemInfoAPI')
@@ -19,10 +20,15 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH get system info attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = GetSystemInfoSchema.parse(body) const params = GetSystemInfoSchema.parse(body)
// Validate authentication
if (!params.password && !params.privateKey) { if (!params.password && !params.privateKey) {
return NextResponse.json( return NextResponse.json(
{ error: 'Either password or privateKey must be provided' }, { error: 'Either password or privateKey must be provided' },

View File

@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import type { Client, FileEntry, SFTPWrapper } from 'ssh2' import type { Client, FileEntry, SFTPWrapper } from 'ssh2'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { import {
createSSHConnection, createSSHConnection,
getFileType, getFileType,
@@ -60,10 +61,15 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH list directory attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = ListDirectorySchema.parse(body) const params = ListDirectorySchema.parse(body)
// Validate authentication
if (!params.password && !params.privateKey) { if (!params.password && !params.privateKey) {
return NextResponse.json( return NextResponse.json(
{ error: 'Either password or privateKey must be provided' }, { error: 'Either password or privateKey must be provided' },

View File

@@ -2,6 +2,7 @@ import { randomUUID } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { import {
createSSHConnection, createSSHConnection,
escapeShellArg, escapeShellArg,
@@ -27,9 +28,16 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH move/rename attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = MoveRenameSchema.parse(body) const params = MoveRenameSchema.parse(body)
// Validate SSH authentication
if (!params.password && !params.privateKey) { if (!params.password && !params.privateKey) {
return NextResponse.json( return NextResponse.json(
{ error: 'Either password or privateKey must be provided' }, { error: 'Either password or privateKey must be provided' },

View File

@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import type { Client, SFTPWrapper } from 'ssh2' import type { Client, SFTPWrapper } from 'ssh2'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils' import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
const logger = createLogger('SSHReadFileContentAPI') const logger = createLogger('SSHReadFileContentAPI')
@@ -35,6 +36,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH read file content attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = ReadFileContentSchema.parse(body) const params = ReadFileContentSchema.parse(body)

View File

@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import type { Client, SFTPWrapper } from 'ssh2' import type { Client, SFTPWrapper } from 'ssh2'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils' import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
const logger = createLogger('SSHUploadFileAPI') const logger = createLogger('SSHUploadFileAPI')
@@ -37,6 +38,12 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH upload file attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = UploadFileSchema.parse(body) const params = UploadFileSchema.parse(body)

View File

@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import type { Client, SFTPWrapper } from 'ssh2' import type { Client, SFTPWrapper } from 'ssh2'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils' import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
const logger = createLogger('SSHWriteFileContentAPI') const logger = createLogger('SSHWriteFileContentAPI')
@@ -36,10 +37,15 @@ export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8) const requestId = randomUUID().slice(0, 8)
try { try {
const auth = await checkHybridAuth(request)
if (!auth.success || !auth.userId) {
logger.warn(`[${requestId}] Unauthorized SSH write file content attempt`)
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json() const body = await request.json()
const params = WriteFileContentSchema.parse(body) const params = WriteFileContentSchema.parse(body)
// Validate authentication
if (!params.password && !params.privateKey) { if (!params.password && !params.privateKey) {
return NextResponse.json( return NextResponse.json(
{ error: 'Either password or privateKey must be provided' }, { error: 'Either password or privateKey must be provided' },

View File

@@ -640,6 +640,7 @@ export interface AdminDeployResult {
isDeployed: boolean isDeployed: boolean
version: number version: number
deployedAt: string deployedAt: string
warnings?: string[]
} }
export interface AdminUndeployResult { export interface AdminUndeployResult {

View File

@@ -1,14 +1,23 @@
import { db, workflow } from '@sim/db' import { db, workflow, workflowDeploymentVersion } from '@sim/db'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm' import { and, eq } from 'drizzle-orm'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { cleanupWebhooksForWorkflow } from '@/lib/webhooks/deploy' import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
import {
cleanupWebhooksForWorkflow,
restorePreviousVersionWebhooks,
saveTriggerWebhooksForDeploy,
} from '@/lib/webhooks/deploy'
import { import {
deployWorkflow, deployWorkflow,
loadWorkflowFromNormalizedTables, loadWorkflowFromNormalizedTables,
undeployWorkflow, undeployWorkflow,
} from '@/lib/workflows/persistence/utils' } from '@/lib/workflows/persistence/utils'
import { createSchedulesForDeploy, validateWorkflowSchedules } from '@/lib/workflows/schedules' import {
cleanupDeploymentVersion,
createSchedulesForDeploy,
validateWorkflowSchedules,
} from '@/lib/workflows/schedules'
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
import { import {
badRequestResponse, badRequestResponse,
@@ -28,10 +37,11 @@ interface RouteParams {
export const POST = withAdminAuthParams<RouteParams>(async (request, context) => { export const POST = withAdminAuthParams<RouteParams>(async (request, context) => {
const { id: workflowId } = await context.params const { id: workflowId } = await context.params
const requestId = generateRequestId()
try { try {
const [workflowRecord] = await db const [workflowRecord] = await db
.select({ id: workflow.id, name: workflow.name }) .select()
.from(workflow) .from(workflow)
.where(eq(workflow.id, workflowId)) .where(eq(workflow.id, workflowId))
.limit(1) .limit(1)
@@ -50,6 +60,18 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
return badRequestResponse(`Invalid schedule configuration: ${scheduleValidation.error}`) return badRequestResponse(`Invalid schedule configuration: ${scheduleValidation.error}`)
} }
const [currentActiveVersion] = await db
.select({ id: workflowDeploymentVersion.id })
.from(workflowDeploymentVersion)
.where(
and(
eq(workflowDeploymentVersion.workflowId, workflowId),
eq(workflowDeploymentVersion.isActive, true)
)
)
.limit(1)
const previousVersionId = currentActiveVersion?.id
const deployResult = await deployWorkflow({ const deployResult = await deployWorkflow({
workflowId, workflowId,
deployedBy: ADMIN_ACTOR_ID, deployedBy: ADMIN_ACTOR_ID,
@@ -65,6 +87,32 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
return internalErrorResponse('Failed to resolve deployment version') return internalErrorResponse('Failed to resolve deployment version')
} }
const workflowData = workflowRecord as Record<string, unknown>
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
request,
workflowId,
workflow: workflowData,
userId: workflowRecord.userId,
blocks: normalizedData.blocks,
requestId,
deploymentVersionId: deployResult.deploymentVersionId,
previousVersionId,
})
if (!triggerSaveResult.success) {
await cleanupDeploymentVersion({
workflowId,
workflow: workflowData,
requestId,
deploymentVersionId: deployResult.deploymentVersionId,
})
await undeployWorkflow({ workflowId })
return internalErrorResponse(
triggerSaveResult.error?.message || 'Failed to sync trigger configuration'
)
}
const scheduleResult = await createSchedulesForDeploy( const scheduleResult = await createSchedulesForDeploy(
workflowId, workflowId,
normalizedData.blocks, normalizedData.blocks,
@@ -72,15 +120,58 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
deployResult.deploymentVersionId deployResult.deploymentVersionId
) )
if (!scheduleResult.success) { if (!scheduleResult.success) {
logger.warn(`Schedule creation failed for workflow ${workflowId}: ${scheduleResult.error}`) logger.error(
`[${requestId}] Admin API: Schedule creation failed for workflow ${workflowId}: ${scheduleResult.error}`
)
await cleanupDeploymentVersion({
workflowId,
workflow: workflowData,
requestId,
deploymentVersionId: deployResult.deploymentVersionId,
})
if (previousVersionId) {
await restorePreviousVersionWebhooks({
request,
workflow: workflowData,
userId: workflowRecord.userId,
previousVersionId,
requestId,
})
}
await undeployWorkflow({ workflowId })
return internalErrorResponse(scheduleResult.error || 'Failed to create schedule')
} }
logger.info(`Admin API: Deployed workflow ${workflowId} as v${deployResult.version}`) if (previousVersionId && previousVersionId !== deployResult.deploymentVersionId) {
try {
logger.info(`[${requestId}] Admin API: Cleaning up previous version ${previousVersionId}`)
await cleanupDeploymentVersion({
workflowId,
workflow: workflowData,
requestId,
deploymentVersionId: previousVersionId,
skipExternalCleanup: true,
})
} catch (cleanupError) {
logger.error(
`[${requestId}] Admin API: Failed to clean up previous version ${previousVersionId}`,
cleanupError
)
}
}
logger.info(
`[${requestId}] Admin API: Deployed workflow ${workflowId} as v${deployResult.version}`
)
// Sync MCP tools with the latest parameter schema
await syncMcpToolsForWorkflow({ workflowId, requestId, context: 'deploy' })
const response: AdminDeployResult = { const response: AdminDeployResult = {
isDeployed: true, isDeployed: true,
version: deployResult.version!, version: deployResult.version!,
deployedAt: deployResult.deployedAt!.toISOString(), deployedAt: deployResult.deployedAt!.toISOString(),
warnings: triggerSaveResult.warnings,
} }
return singleResponse(response) return singleResponse(response)
@@ -105,7 +196,6 @@ export const DELETE = withAdminAuthParams<RouteParams>(async (request, context)
return notFoundResponse('Workflow') return notFoundResponse('Workflow')
} }
// Clean up external webhook subscriptions before undeploying
await cleanupWebhooksForWorkflow( await cleanupWebhooksForWorkflow(
workflowId, workflowId,
workflowRecord as Record<string, unknown>, workflowRecord as Record<string, unknown>,
@@ -117,6 +207,8 @@ export const DELETE = withAdminAuthParams<RouteParams>(async (request, context)
return internalErrorResponse(result.error || 'Failed to undeploy workflow') return internalErrorResponse(result.error || 'Failed to undeploy workflow')
} }
await removeMcpToolsForWorkflow(workflowId, requestId)
logger.info(`Admin API: Undeployed workflow ${workflowId}`) logger.info(`Admin API: Undeployed workflow ${workflowId}`)
const response: AdminUndeployResult = { const response: AdminUndeployResult = {

View File

@@ -1,7 +1,15 @@
import { db, workflow } from '@sim/db' import { db, workflow, workflowDeploymentVersion } from '@sim/db'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm' import { and, eq } from 'drizzle-orm'
import { generateRequestId } from '@/lib/core/utils/request'
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils' import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils'
import {
cleanupDeploymentVersion,
createSchedulesForDeploy,
validateWorkflowSchedules,
} from '@/lib/workflows/schedules'
import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware'
import { import {
badRequestResponse, badRequestResponse,
@@ -9,6 +17,7 @@ import {
notFoundResponse, notFoundResponse,
singleResponse, singleResponse,
} from '@/app/api/v1/admin/responses' } from '@/app/api/v1/admin/responses'
import type { BlockState } from '@/stores/workflows/workflow/types'
const logger = createLogger('AdminWorkflowActivateVersionAPI') const logger = createLogger('AdminWorkflowActivateVersionAPI')
@@ -18,11 +27,12 @@ interface RouteParams {
} }
export const POST = withAdminAuthParams<RouteParams>(async (request, context) => { export const POST = withAdminAuthParams<RouteParams>(async (request, context) => {
const requestId = generateRequestId()
const { id: workflowId, versionId } = await context.params const { id: workflowId, versionId } = await context.params
try { try {
const [workflowRecord] = await db const [workflowRecord] = await db
.select({ id: workflow.id }) .select()
.from(workflow) .from(workflow)
.where(eq(workflow.id, workflowId)) .where(eq(workflow.id, workflowId))
.limit(1) .limit(1)
@@ -36,23 +46,161 @@ export const POST = withAdminAuthParams<RouteParams>(async (request, context) =>
return badRequestResponse('Invalid version number') return badRequestResponse('Invalid version number')
} }
const [versionRow] = await db
.select({
id: workflowDeploymentVersion.id,
state: workflowDeploymentVersion.state,
})
.from(workflowDeploymentVersion)
.where(
and(
eq(workflowDeploymentVersion.workflowId, workflowId),
eq(workflowDeploymentVersion.version, versionNum)
)
)
.limit(1)
if (!versionRow?.state) {
return notFoundResponse('Deployment version')
}
const [currentActiveVersion] = await db
.select({ id: workflowDeploymentVersion.id })
.from(workflowDeploymentVersion)
.where(
and(
eq(workflowDeploymentVersion.workflowId, workflowId),
eq(workflowDeploymentVersion.isActive, true)
)
)
.limit(1)
const previousVersionId = currentActiveVersion?.id
const deployedState = versionRow.state as { blocks?: Record<string, BlockState> }
const blocks = deployedState.blocks
if (!blocks || typeof blocks !== 'object') {
return internalErrorResponse('Invalid deployed state structure')
}
const workflowData = workflowRecord as Record<string, unknown>
const scheduleValidation = validateWorkflowSchedules(blocks)
if (!scheduleValidation.isValid) {
return badRequestResponse(`Invalid schedule configuration: ${scheduleValidation.error}`)
}
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
request,
workflowId,
workflow: workflowData,
userId: workflowRecord.userId,
blocks,
requestId,
deploymentVersionId: versionRow.id,
previousVersionId,
forceRecreateSubscriptions: true,
})
if (!triggerSaveResult.success) {
logger.error(
`[${requestId}] Admin API: Failed to sync triggers for workflow ${workflowId}`,
triggerSaveResult.error
)
return internalErrorResponse(
triggerSaveResult.error?.message || 'Failed to sync trigger configuration'
)
}
const scheduleResult = await createSchedulesForDeploy(workflowId, blocks, db, versionRow.id)
if (!scheduleResult.success) {
await cleanupDeploymentVersion({
workflowId,
workflow: workflowData,
requestId,
deploymentVersionId: versionRow.id,
})
if (previousVersionId) {
await restorePreviousVersionWebhooks({
request,
workflow: workflowData,
userId: workflowRecord.userId,
previousVersionId,
requestId,
})
}
return internalErrorResponse(scheduleResult.error || 'Failed to sync schedules')
}
const result = await activateWorkflowVersion({ workflowId, version: versionNum }) const result = await activateWorkflowVersion({ workflowId, version: versionNum })
if (!result.success) { if (!result.success) {
await cleanupDeploymentVersion({
workflowId,
workflow: workflowData,
requestId,
deploymentVersionId: versionRow.id,
})
if (previousVersionId) {
await restorePreviousVersionWebhooks({
request,
workflow: workflowData,
userId: workflowRecord.userId,
previousVersionId,
requestId,
})
}
if (result.error === 'Deployment version not found') { if (result.error === 'Deployment version not found') {
return notFoundResponse('Deployment version') return notFoundResponse('Deployment version')
} }
return internalErrorResponse(result.error || 'Failed to activate version') return internalErrorResponse(result.error || 'Failed to activate version')
} }
logger.info(`Admin API: Activated version ${versionNum} for workflow ${workflowId}`) if (previousVersionId && previousVersionId !== versionRow.id) {
try {
logger.info(
`[${requestId}] Admin API: Cleaning up previous version ${previousVersionId} webhooks/schedules`
)
await cleanupDeploymentVersion({
workflowId,
workflow: workflowData,
requestId,
deploymentVersionId: previousVersionId,
skipExternalCleanup: true,
})
logger.info(`[${requestId}] Admin API: Previous version cleanup completed`)
} catch (cleanupError) {
logger.error(
`[${requestId}] Admin API: Failed to clean up previous version ${previousVersionId}`,
cleanupError
)
}
}
await syncMcpToolsForWorkflow({
workflowId,
requestId,
state: versionRow.state,
context: 'activate',
})
logger.info(
`[${requestId}] Admin API: Activated version ${versionNum} for workflow ${workflowId}`
)
return singleResponse({ return singleResponse({
success: true, success: true,
version: versionNum, version: versionNum,
deployedAt: result.deployedAt!.toISOString(), deployedAt: result.deployedAt!.toISOString(),
warnings: triggerSaveResult.warnings,
}) })
} catch (error) { } catch (error) {
logger.error(`Admin API: Failed to activate version for workflow ${workflowId}`, { error }) logger.error(
`[${requestId}] Admin API: Failed to activate version for workflow ${workflowId}`,
{
error,
}
)
return internalErrorResponse('Failed to activate deployment version') return internalErrorResponse('Failed to activate deployment version')
} }
}) })

View File

@@ -7,11 +7,7 @@ import { getSession } from '@/lib/auth'
import { validateInteger } from '@/lib/core/security/input-validation' import { validateInteger } from '@/lib/core/security/input-validation'
import { PlatformEvents } from '@/lib/core/telemetry' import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { import { cleanupExternalWebhook } from '@/lib/webhooks/provider-subscriptions'
cleanupExternalWebhook,
createExternalWebhookSubscription,
shouldRecreateExternalWebhookSubscription,
} from '@/lib/webhooks/provider-subscriptions'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WebhookAPI') const logger = createLogger('WebhookAPI')
@@ -87,7 +83,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
} }
} }
// Update a webhook
export async function PATCH(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { export async function PATCH(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId() const requestId = generateRequestId()
@@ -102,7 +97,7 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
} }
const body = await request.json() const body = await request.json()
const { path, provider, providerConfig, isActive, failedCount } = body const { isActive, failedCount } = body
if (failedCount !== undefined) { if (failedCount !== undefined) {
const validation = validateInteger(failedCount, 'failedCount', { min: 0 }) const validation = validateInteger(failedCount, 'failedCount', { min: 0 })
@@ -112,28 +107,6 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
} }
} }
let resolvedProviderConfig = providerConfig
if (providerConfig) {
const { resolveEnvVarsInObject } = await import('@/lib/webhooks/env-resolver')
const webhookDataForResolve = await db
.select({
workspaceId: workflow.workspaceId,
})
.from(webhook)
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
.where(eq(webhook.id, id))
.limit(1)
if (webhookDataForResolve.length > 0) {
resolvedProviderConfig = await resolveEnvVarsInObject(
providerConfig,
session.user.id,
webhookDataForResolve[0].workspaceId || undefined
)
}
}
// Find the webhook and check permissions
const webhooks = await db const webhooks = await db
.select({ .select({
webhook: webhook, webhook: webhook,
@@ -154,16 +127,12 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
} }
const webhookData = webhooks[0] const webhookData = webhooks[0]
// Check if user has permission to modify this webhook
let canModify = false let canModify = false
// Case 1: User owns the workflow
if (webhookData.workflow.userId === session.user.id) { if (webhookData.workflow.userId === session.user.id) {
canModify = true canModify = true
} }
// Case 2: Workflow belongs to a workspace and user has write or admin permission
if (!canModify && webhookData.workflow.workspaceId) { if (!canModify && webhookData.workflow.workspaceId) {
const userPermission = await getUserEntityPermissions( const userPermission = await getUserEntityPermissions(
session.user.id, session.user.id,
@@ -182,76 +151,14 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
return NextResponse.json({ error: 'Access denied' }, { status: 403 }) return NextResponse.json({ error: 'Access denied' }, { status: 403 })
} }
const existingProviderConfig =
(webhookData.webhook.providerConfig as Record<string, unknown>) || {}
let nextProviderConfig =
providerConfig !== undefined &&
resolvedProviderConfig &&
typeof resolvedProviderConfig === 'object'
? (resolvedProviderConfig as Record<string, unknown>)
: existingProviderConfig
const nextProvider = (provider ?? webhookData.webhook.provider) as string
if (
providerConfig !== undefined &&
shouldRecreateExternalWebhookSubscription({
previousProvider: webhookData.webhook.provider as string,
nextProvider,
previousConfig: existingProviderConfig,
nextConfig: nextProviderConfig,
})
) {
await cleanupExternalWebhook(
{ ...webhookData.webhook, providerConfig: existingProviderConfig },
webhookData.workflow,
requestId
)
const result = await createExternalWebhookSubscription(
request,
{
...webhookData.webhook,
provider: nextProvider,
providerConfig: nextProviderConfig,
},
webhookData.workflow,
session.user.id,
requestId
)
nextProviderConfig = result.updatedProviderConfig as Record<string, unknown>
}
logger.debug(`[${requestId}] Updating webhook properties`, { logger.debug(`[${requestId}] Updating webhook properties`, {
hasPathUpdate: path !== undefined,
hasProviderUpdate: provider !== undefined,
hasConfigUpdate: providerConfig !== undefined,
hasActiveUpdate: isActive !== undefined, hasActiveUpdate: isActive !== undefined,
hasFailedCountUpdate: failedCount !== undefined, hasFailedCountUpdate: failedCount !== undefined,
}) })
// Merge providerConfig to preserve credential-related fields
let finalProviderConfig = webhooks[0].webhook.providerConfig
if (providerConfig !== undefined) {
const existingConfig = existingProviderConfig
finalProviderConfig = {
...nextProviderConfig,
credentialId: existingConfig.credentialId,
credentialSetId: existingConfig.credentialSetId,
userId: existingConfig.userId,
historyId: existingConfig.historyId,
lastCheckedTimestamp: existingConfig.lastCheckedTimestamp,
setupCompleted: existingConfig.setupCompleted,
externalId: nextProviderConfig.externalId ?? existingConfig.externalId,
}
}
const updatedWebhook = await db const updatedWebhook = await db
.update(webhook) .update(webhook)
.set({ .set({
path: path !== undefined ? path : webhooks[0].webhook.path,
provider: provider !== undefined ? provider : webhooks[0].webhook.provider,
providerConfig: finalProviderConfig,
isActive: isActive !== undefined ? isActive : webhooks[0].webhook.isActive, isActive: isActive !== undefined ? isActive : webhooks[0].webhook.isActive,
failedCount: failedCount !== undefined ? failedCount : webhooks[0].webhook.failedCount, failedCount: failedCount !== undefined ? failedCount : webhooks[0].webhook.failedCount,
updatedAt: new Date(), updatedAt: new Date(),
@@ -334,11 +241,8 @@ export async function DELETE(
} }
const foundWebhook = webhookData.webhook const foundWebhook = webhookData.webhook
const { cleanupExternalWebhook } = await import('@/lib/webhooks/provider-subscriptions') const credentialSetId = foundWebhook.credentialSetId as string | undefined
const blockId = foundWebhook.blockId as string | undefined
const providerConfig = foundWebhook.providerConfig as Record<string, unknown> | null
const credentialSetId = providerConfig?.credentialSetId as string | undefined
const blockId = providerConfig?.blockId as string | undefined
if (credentialSetId && blockId) { if (credentialSetId && blockId) {
const allCredentialSetWebhooks = await db const allCredentialSetWebhooks = await db
@@ -346,10 +250,9 @@ export async function DELETE(
.from(webhook) .from(webhook)
.where(and(eq(webhook.workflowId, webhookData.workflow.id), eq(webhook.blockId, blockId))) .where(and(eq(webhook.workflowId, webhookData.workflow.id), eq(webhook.blockId, blockId)))
const webhooksToDelete = allCredentialSetWebhooks.filter((w) => { const webhooksToDelete = allCredentialSetWebhooks.filter(
const config = w.providerConfig as Record<string, unknown> | null (w) => w.credentialSetId === credentialSetId
return config?.credentialSetId === credentialSetId )
})
for (const w of webhooksToDelete) { for (const w of webhooksToDelete) {
await cleanupExternalWebhook(w, webhookData.workflow, requestId) await cleanupExternalWebhook(w, webhookData.workflow, requestId)

View File

@@ -7,8 +7,21 @@ import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth' import { getSession } from '@/lib/auth'
import { PlatformEvents } from '@/lib/core/telemetry' import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { createExternalWebhookSubscription } from '@/lib/webhooks/provider-subscriptions' import { getProviderIdFromServiceId } from '@/lib/oauth'
import { resolveEnvVarsInObject } from '@/lib/webhooks/env-resolver'
import {
cleanupExternalWebhook,
createExternalWebhookSubscription,
} from '@/lib/webhooks/provider-subscriptions'
import { mergeNonUserFields } from '@/lib/webhooks/utils'
import {
configureGmailPolling,
configureOutlookPolling,
configureRssPolling,
syncWebhooksForCredentialSet,
} from '@/lib/webhooks/utils.server'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { extractCredentialSetId, isCredentialSetValue } from '@/executor/constants'
const logger = createLogger('WebhooksAPI') const logger = createLogger('WebhooksAPI')
@@ -298,14 +311,10 @@ export async function POST(request: NextRequest) {
} }
} }
let savedWebhook: any = null // Variable to hold the result of save/update let savedWebhook: any = null
const originalProviderConfig = providerConfig || {}
// Use the original provider config - Gmail/Outlook configuration functions will inject userId automatically
const finalProviderConfig = providerConfig || {}
const { resolveEnvVarsInObject } = await import('@/lib/webhooks/env-resolver')
let resolvedProviderConfig = await resolveEnvVarsInObject( let resolvedProviderConfig = await resolveEnvVarsInObject(
finalProviderConfig, originalProviderConfig,
userId, userId,
workflowRecord.workspaceId || undefined workflowRecord.workspaceId || undefined
) )
@@ -319,8 +328,6 @@ export async function POST(request: NextRequest) {
const directCredentialSetId = resolvedProviderConfig?.credentialSetId as string | undefined const directCredentialSetId = resolvedProviderConfig?.credentialSetId as string | undefined
if (directCredentialSetId || rawCredentialId) { if (directCredentialSetId || rawCredentialId) {
const { isCredentialSetValue, extractCredentialSetId } = await import('@/executor/constants')
const credentialSetId = const credentialSetId =
directCredentialSetId || directCredentialSetId ||
(rawCredentialId && isCredentialSetValue(rawCredentialId) (rawCredentialId && isCredentialSetValue(rawCredentialId)
@@ -332,11 +339,6 @@ export async function POST(request: NextRequest) {
`[${requestId}] Credential set detected for ${provider} trigger. Syncing webhooks for set ${credentialSetId}` `[${requestId}] Credential set detected for ${provider} trigger. Syncing webhooks for set ${credentialSetId}`
) )
const { getProviderIdFromServiceId } = await import('@/lib/oauth')
const { syncWebhooksForCredentialSet, configureGmailPolling, configureOutlookPolling } =
await import('@/lib/webhooks/utils.server')
// Map provider to OAuth provider ID
const oauthProviderId = getProviderIdFromServiceId(provider) const oauthProviderId = getProviderIdFromServiceId(provider)
const { const {
@@ -469,6 +471,9 @@ export async function POST(request: NextRequest) {
providerConfig: providerConfigOverride, providerConfig: providerConfigOverride,
}) })
const userProvided = originalProviderConfig as Record<string, unknown>
const configToSave: Record<string, unknown> = { ...userProvided }
try { try {
const result = await createExternalWebhookSubscription( const result = await createExternalWebhookSubscription(
request, request,
@@ -477,7 +482,9 @@ export async function POST(request: NextRequest) {
userId, userId,
requestId requestId
) )
resolvedProviderConfig = result.updatedProviderConfig as Record<string, unknown> const updatedConfig = result.updatedProviderConfig as Record<string, unknown>
mergeNonUserFields(configToSave, updatedConfig, userProvided)
resolvedProviderConfig = updatedConfig
externalSubscriptionCreated = result.externalSubscriptionCreated externalSubscriptionCreated = result.externalSubscriptionCreated
} catch (err) { } catch (err) {
logger.error(`[${requestId}] Error creating external webhook subscription`, err) logger.error(`[${requestId}] Error creating external webhook subscription`, err)
@@ -490,25 +497,22 @@ export async function POST(request: NextRequest) {
) )
} }
// Now save to database (only if subscription succeeded or provider doesn't need external subscription)
try { try {
if (targetWebhookId) { if (targetWebhookId) {
logger.info(`[${requestId}] Updating existing webhook for path: ${finalPath}`, { logger.info(`[${requestId}] Updating existing webhook for path: ${finalPath}`, {
webhookId: targetWebhookId, webhookId: targetWebhookId,
provider, provider,
hasCredentialId: !!(resolvedProviderConfig as any)?.credentialId, hasCredentialId: !!(configToSave as any)?.credentialId,
credentialId: (resolvedProviderConfig as any)?.credentialId, credentialId: (configToSave as any)?.credentialId,
}) })
const updatedResult = await db const updatedResult = await db
.update(webhook) .update(webhook)
.set({ .set({
blockId, blockId,
provider, provider,
providerConfig: resolvedProviderConfig, providerConfig: configToSave,
credentialSetId: credentialSetId:
((resolvedProviderConfig as Record<string, unknown>)?.credentialSetId as ((configToSave as Record<string, unknown>)?.credentialSetId as string | null) || null,
| string
| null) || null,
isActive: true, isActive: true,
updatedAt: new Date(), updatedAt: new Date(),
}) })
@@ -531,11 +535,9 @@ export async function POST(request: NextRequest) {
blockId, blockId,
path: finalPath, path: finalPath,
provider, provider,
providerConfig: resolvedProviderConfig, providerConfig: configToSave,
credentialSetId: credentialSetId:
((resolvedProviderConfig as Record<string, unknown>)?.credentialSetId as ((configToSave as Record<string, unknown>)?.credentialSetId as string | null) || null,
| string
| null) || null,
isActive: true, isActive: true,
createdAt: new Date(), createdAt: new Date(),
updatedAt: new Date(), updatedAt: new Date(),
@@ -547,9 +549,8 @@ export async function POST(request: NextRequest) {
if (externalSubscriptionCreated) { if (externalSubscriptionCreated) {
logger.error(`[${requestId}] DB save failed, cleaning up external subscription`, dbError) logger.error(`[${requestId}] DB save failed, cleaning up external subscription`, dbError)
try { try {
const { cleanupExternalWebhook } = await import('@/lib/webhooks/provider-subscriptions')
await cleanupExternalWebhook( await cleanupExternalWebhook(
createTempWebhookData(resolvedProviderConfig), createTempWebhookData(configToSave),
workflowRecord, workflowRecord,
requestId requestId
) )
@@ -567,7 +568,6 @@ export async function POST(request: NextRequest) {
if (savedWebhook && provider === 'gmail') { if (savedWebhook && provider === 'gmail') {
logger.info(`[${requestId}] Gmail provider detected. Setting up Gmail webhook configuration.`) logger.info(`[${requestId}] Gmail provider detected. Setting up Gmail webhook configuration.`)
try { try {
const { configureGmailPolling } = await import('@/lib/webhooks/utils.server')
const success = await configureGmailPolling(savedWebhook, requestId) const success = await configureGmailPolling(savedWebhook, requestId)
if (!success) { if (!success) {
@@ -606,7 +606,6 @@ export async function POST(request: NextRequest) {
`[${requestId}] Outlook provider detected. Setting up Outlook webhook configuration.` `[${requestId}] Outlook provider detected. Setting up Outlook webhook configuration.`
) )
try { try {
const { configureOutlookPolling } = await import('@/lib/webhooks/utils.server')
const success = await configureOutlookPolling(savedWebhook, requestId) const success = await configureOutlookPolling(savedWebhook, requestId)
if (!success) { if (!success) {
@@ -643,7 +642,6 @@ export async function POST(request: NextRequest) {
if (savedWebhook && provider === 'rss') { if (savedWebhook && provider === 'rss') {
logger.info(`[${requestId}] RSS provider detected. Setting up RSS webhook configuration.`) logger.info(`[${requestId}] RSS provider detected. Setting up RSS webhook configuration.`)
try { try {
const { configureRssPolling } = await import('@/lib/webhooks/utils.server')
const success = await configureRssPolling(savedWebhook, requestId) const success = await configureRssPolling(savedWebhook, requestId)
if (!success) { if (!success) {

View File

@@ -4,7 +4,11 @@ import { and, desc, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server' import type { NextRequest } from 'next/server'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' import { removeMcpToolsForWorkflow, syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
import { cleanupWebhooksForWorkflow, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy' import {
cleanupWebhooksForWorkflow,
restorePreviousVersionWebhooks,
saveTriggerWebhooksForDeploy,
} from '@/lib/webhooks/deploy'
import { import {
deployWorkflow, deployWorkflow,
loadWorkflowFromNormalizedTables, loadWorkflowFromNormalizedTables,
@@ -135,6 +139,18 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400) return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
} }
const [currentActiveVersion] = await db
.select({ id: workflowDeploymentVersion.id })
.from(workflowDeploymentVersion)
.where(
and(
eq(workflowDeploymentVersion.workflowId, id),
eq(workflowDeploymentVersion.isActive, true)
)
)
.limit(1)
const previousVersionId = currentActiveVersion?.id
const deployResult = await deployWorkflow({ const deployResult = await deployWorkflow({
workflowId: id, workflowId: id,
deployedBy: actorUserId, deployedBy: actorUserId,
@@ -161,6 +177,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
blocks: normalizedData.blocks, blocks: normalizedData.blocks,
requestId, requestId,
deploymentVersionId, deploymentVersionId,
previousVersionId,
}) })
if (!triggerSaveResult.success) { if (!triggerSaveResult.success) {
@@ -194,6 +211,15 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
requestId, requestId,
deploymentVersionId, deploymentVersionId,
}) })
if (previousVersionId) {
await restorePreviousVersionWebhooks({
request,
workflow: workflowData as Record<string, unknown>,
userId: actorUserId,
previousVersionId,
requestId,
})
}
await undeployWorkflow({ workflowId: id }) await undeployWorkflow({ workflowId: id })
return createErrorResponse(scheduleResult.error || 'Failed to create schedule', 500) return createErrorResponse(scheduleResult.error || 'Failed to create schedule', 500)
} }
@@ -208,6 +234,25 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
) )
} }
if (previousVersionId && previousVersionId !== deploymentVersionId) {
try {
logger.info(`[${requestId}] Cleaning up previous version ${previousVersionId} DB records`)
await cleanupDeploymentVersion({
workflowId: id,
workflow: workflowData as Record<string, unknown>,
requestId,
deploymentVersionId: previousVersionId,
skipExternalCleanup: true,
})
} catch (cleanupError) {
logger.error(
`[${requestId}] Failed to clean up previous version ${previousVersionId}`,
cleanupError
)
// Non-fatal - continue with success response
}
}
logger.info(`[${requestId}] Workflow deployed successfully: ${id}`) logger.info(`[${requestId}] Workflow deployed successfully: ${id}`)
// Sync MCP tools with the latest parameter schema // Sync MCP tools with the latest parameter schema
@@ -228,6 +273,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
nextRunAt: scheduleInfo.nextRunAt, nextRunAt: scheduleInfo.nextRunAt,
} }
: undefined, : undefined,
warnings: triggerSaveResult.warnings,
}) })
} catch (error: any) { } catch (error: any) {
logger.error(`[${requestId}] Error deploying workflow: ${id}`, { logger.error(`[${requestId}] Error deploying workflow: ${id}`, {

View File

@@ -4,7 +4,7 @@ import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server' import type { NextRequest } from 'next/server'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
import { saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy' import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils' import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils'
import { import {
cleanupDeploymentVersion, cleanupDeploymentVersion,
@@ -85,6 +85,11 @@ export async function POST(
return createErrorResponse('Invalid deployed state structure', 500) return createErrorResponse('Invalid deployed state structure', 500)
} }
const scheduleValidation = validateWorkflowSchedules(blocks)
if (!scheduleValidation.isValid) {
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
}
const triggerSaveResult = await saveTriggerWebhooksForDeploy({ const triggerSaveResult = await saveTriggerWebhooksForDeploy({
request, request,
workflowId: id, workflowId: id,
@@ -93,6 +98,8 @@ export async function POST(
blocks, blocks,
requestId, requestId,
deploymentVersionId: versionRow.id, deploymentVersionId: versionRow.id,
previousVersionId,
forceRecreateSubscriptions: true,
}) })
if (!triggerSaveResult.success) { if (!triggerSaveResult.success) {
@@ -102,11 +109,6 @@ export async function POST(
) )
} }
const scheduleValidation = validateWorkflowSchedules(blocks)
if (!scheduleValidation.isValid) {
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
}
const scheduleResult = await createSchedulesForDeploy(id, blocks, db, versionRow.id) const scheduleResult = await createSchedulesForDeploy(id, blocks, db, versionRow.id)
if (!scheduleResult.success) { if (!scheduleResult.success) {
@@ -116,6 +118,15 @@ export async function POST(
requestId, requestId,
deploymentVersionId: versionRow.id, deploymentVersionId: versionRow.id,
}) })
if (previousVersionId) {
await restorePreviousVersionWebhooks({
request,
workflow: workflowData as Record<string, unknown>,
userId: actorUserId,
previousVersionId,
requestId,
})
}
return createErrorResponse(scheduleResult.error || 'Failed to sync schedules', 500) return createErrorResponse(scheduleResult.error || 'Failed to sync schedules', 500)
} }
@@ -127,6 +138,15 @@ export async function POST(
requestId, requestId,
deploymentVersionId: versionRow.id, deploymentVersionId: versionRow.id,
}) })
if (previousVersionId) {
await restorePreviousVersionWebhooks({
request,
workflow: workflowData as Record<string, unknown>,
userId: actorUserId,
previousVersionId,
requestId,
})
}
return createErrorResponse(result.error || 'Failed to activate deployment', 400) return createErrorResponse(result.error || 'Failed to activate deployment', 400)
} }
@@ -140,6 +160,7 @@ export async function POST(
workflow: workflowData as Record<string, unknown>, workflow: workflowData as Record<string, unknown>,
requestId, requestId,
deploymentVersionId: previousVersionId, deploymentVersionId: previousVersionId,
skipExternalCleanup: true,
}) })
logger.info(`[${requestId}] Previous version cleanup completed`) logger.info(`[${requestId}] Previous version cleanup completed`)
} catch (cleanupError) { } catch (cleanupError) {
@@ -157,7 +178,11 @@ export async function POST(
context: 'activate', context: 'activate',
}) })
return createSuccessResponse({ success: true, deployedAt: result.deployedAt }) return createSuccessResponse({
success: true,
deployedAt: result.deployedAt,
warnings: triggerSaveResult.warnings,
})
} catch (error: any) { } catch (error: any) {
logger.error(`[${requestId}] Error activating deployment for workflow: ${id}`, error) logger.error(`[${requestId}] Error activating deployment for workflow: ${id}`, error)
return createErrorResponse(error.message || 'Failed to activate deployment', 500) return createErrorResponse(error.message || 'Failed to activate deployment', 500)

View File

@@ -30,6 +30,7 @@ import { normalizeName } from '@/executor/constants'
import { ExecutionSnapshot } from '@/executor/execution/snapshot' import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types' import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types'
import type { NormalizedBlockOutput, StreamingExecution } from '@/executor/types' import type { NormalizedBlockOutput, StreamingExecution } from '@/executor/types'
import { hasExecutionResult } from '@/executor/utils/errors'
import { Serializer } from '@/serializer' import { Serializer } from '@/serializer'
import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types' import { CORE_TRIGGER_TYPES, type CoreTriggerType } from '@/stores/logs/filters/types'
@@ -116,7 +117,6 @@ type AsyncExecutionParams = {
userId: string userId: string
input: any input: any
triggerType: CoreTriggerType triggerType: CoreTriggerType
preflighted?: boolean
} }
/** /**
@@ -139,7 +139,6 @@ async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextR
userId, userId,
input, input,
triggerType, triggerType,
preflighted: params.preflighted,
} }
try { try {
@@ -276,7 +275,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
requestId requestId
) )
const shouldPreflightEnvVars = isAsyncMode && isTriggerDevEnabled
const preprocessResult = await preprocessExecution({ const preprocessResult = await preprocessExecution({
workflowId, workflowId,
userId, userId,
@@ -285,9 +283,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
requestId, requestId,
checkDeployment: !shouldUseDraftState, checkDeployment: !shouldUseDraftState,
loggingSession, loggingSession,
preflightEnvVars: shouldPreflightEnvVars,
useDraftState: shouldUseDraftState, useDraftState: shouldUseDraftState,
envUserId: isClientSession ? userId : undefined,
}) })
if (!preprocessResult.success) { if (!preprocessResult.success) {
@@ -319,7 +315,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
userId: actorUserId, userId: actorUserId,
input, input,
triggerType: loggingTriggerType, triggerType: loggingTriggerType,
preflighted: shouldPreflightEnvVars,
}) })
} }
@@ -473,17 +468,17 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
} }
return NextResponse.json(filteredResult) return NextResponse.json(filteredResult)
} catch (error: any) { } catch (error: unknown) {
const errorMessage = error.message || 'Unknown error' const errorMessage = error instanceof Error ? error.message : 'Unknown error'
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`) logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`)
const executionResult = error.executionResult const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
return NextResponse.json( return NextResponse.json(
{ {
success: false, success: false,
output: executionResult?.output, output: executionResult?.output,
error: executionResult?.error || error.message || 'Execution failed', error: executionResult?.error || errorMessage || 'Execution failed',
metadata: executionResult?.metadata metadata: executionResult?.metadata
? { ? {
duration: executionResult.metadata.duration, duration: executionResult.metadata.duration,
@@ -794,11 +789,11 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
// Cleanup base64 cache for this execution // Cleanup base64 cache for this execution
await cleanupExecutionBase64Cache(executionId) await cleanupExecutionBase64Cache(executionId)
} catch (error: any) { } catch (error: unknown) {
const errorMessage = error.message || 'Unknown error' const errorMessage = error instanceof Error ? error.message : 'Unknown error'
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`) logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`)
const executionResult = error.executionResult const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
sendEvent({ sendEvent({
type: 'execution:error', type: 'execution:error',

View File

@@ -94,7 +94,9 @@ export default function Logs() {
const [previewLogId, setPreviewLogId] = useState<string | null>(null) const [previewLogId, setPreviewLogId] = useState<string | null>(null)
const activeLogId = isPreviewOpen ? previewLogId : selectedLogId const activeLogId = isPreviewOpen ? previewLogId : selectedLogId
const activeLogQuery = useLogDetail(activeLogId ?? undefined) const activeLogQuery = useLogDetail(activeLogId ?? undefined, {
refetchInterval: isLive ? 3000 : false,
})
const logFilters = useMemo( const logFilters = useMemo(
() => ({ () => ({
@@ -113,7 +115,7 @@ export default function Logs() {
const logsQuery = useLogsList(workspaceId, logFilters, { const logsQuery = useLogsList(workspaceId, logFilters, {
enabled: Boolean(workspaceId) && isInitialized.current, enabled: Boolean(workspaceId) && isInitialized.current,
refetchInterval: isLive ? 5000 : false, refetchInterval: isLive ? 3000 : false,
}) })
const dashboardFilters = useMemo( const dashboardFilters = useMemo(
@@ -132,7 +134,7 @@ export default function Logs() {
const dashboardStatsQuery = useDashboardStats(workspaceId, dashboardFilters, { const dashboardStatsQuery = useDashboardStats(workspaceId, dashboardFilters, {
enabled: Boolean(workspaceId) && isInitialized.current, enabled: Boolean(workspaceId) && isInitialized.current,
refetchInterval: isLive ? 5000 : false, refetchInterval: isLive ? 3000 : false,
}) })
const logs = useMemo(() => { const logs = useMemo(() => {
@@ -160,12 +162,6 @@ export default function Logs() {
} }
}, [debouncedSearchQuery, setStoreSearchQuery]) }, [debouncedSearchQuery, setStoreSearchQuery])
useEffect(() => {
if (!isLive || !selectedLogId) return
const interval = setInterval(() => activeLogQuery.refetch(), 5000)
return () => clearInterval(interval)
}, [isLive, selectedLogId, activeLogQuery])
const handleLogClick = useCallback( const handleLogClick = useCallback(
(log: WorkflowLog) => { (log: WorkflowLog) => {
if (selectedLogId === log.id && isSidebarOpen) { if (selectedLogId === log.id && isSidebarOpen) {
@@ -279,8 +275,11 @@ export default function Logs() {
setIsVisuallyRefreshing(true) setIsVisuallyRefreshing(true)
setTimeout(() => setIsVisuallyRefreshing(false), REFRESH_SPINNER_DURATION_MS) setTimeout(() => setIsVisuallyRefreshing(false), REFRESH_SPINNER_DURATION_MS)
logsQuery.refetch() logsQuery.refetch()
if (selectedLogId) {
activeLogQuery.refetch()
}
} }
}, [isLive, logsQuery]) }, [isLive, logsQuery, activeLogQuery, selectedLogId])
const prevIsFetchingRef = useRef(logsQuery.isFetching) const prevIsFetchingRef = useRef(logsQuery.isFetching)
useEffect(() => { useEffect(() => {

View File

@@ -78,9 +78,10 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
mode, mode,
setMode, setMode,
isAborting, isAborting,
maskCredentialValue,
} = useCopilotStore() } = useCopilotStore()
const maskCredentialValue = useCopilotStore((s) => s.maskCredentialValue)
const messageCheckpoints = isUser ? allMessageCheckpoints[message.id] || [] : [] const messageCheckpoints = isUser ? allMessageCheckpoints[message.id] || [] : []
const hasCheckpoints = messageCheckpoints.length > 0 && messageCheckpoints.some((cp) => cp?.id) const hasCheckpoints = messageCheckpoints.length > 0 && messageCheckpoints.some((cp) => cp?.id)
@@ -265,7 +266,7 @@ const CopilotMessage: FC<CopilotMessageProps> = memo(
} }
return null return null
}) })
}, [message.contentBlocks, isActivelyStreaming, parsedTags, isLastMessage, maskCredentialValue]) }, [message.contentBlocks, isActivelyStreaming, parsedTags, isLastMessage])
if (isUser) { if (isUser) {
return ( return (

View File

@@ -1425,10 +1425,7 @@ function RunSkipButtons({
setIsProcessing(true) setIsProcessing(true)
setButtonsHidden(true) setButtonsHidden(true)
try { try {
// Add to auto-allowed list - this also executes all pending integration tools of this type
await addAutoAllowedTool(toolCall.name) await addAutoAllowedTool(toolCall.name)
// For client tools with interrupts (not integration tools), we still need to call handleRun
// since executeIntegrationTool only works for server-side tools
if (!isIntegrationTool(toolCall.name)) { if (!isIntegrationTool(toolCall.name)) {
await handleRun(toolCall, setToolCallState, onStateChange, editedParams) await handleRun(toolCall, setToolCallState, onStateChange, editedParams)
} }
@@ -1526,7 +1523,11 @@ export function ToolCall({
toolCall.name === 'user_memory' || toolCall.name === 'user_memory' ||
toolCall.name === 'edit_respond' || toolCall.name === 'edit_respond' ||
toolCall.name === 'debug_respond' || toolCall.name === 'debug_respond' ||
toolCall.name === 'plan_respond' toolCall.name === 'plan_respond' ||
toolCall.name === 'research_respond' ||
toolCall.name === 'info_respond' ||
toolCall.name === 'deploy_respond' ||
toolCall.name === 'superagent_respond'
) )
return null return null

View File

@@ -209,9 +209,20 @@ export interface SlashCommand {
export const TOP_LEVEL_COMMANDS: readonly SlashCommand[] = [ export const TOP_LEVEL_COMMANDS: readonly SlashCommand[] = [
{ id: 'fast', label: 'Fast' }, { id: 'fast', label: 'Fast' },
{ id: 'research', label: 'Research' }, { id: 'research', label: 'Research' },
{ id: 'superagent', label: 'Actions' }, { id: 'actions', label: 'Actions' },
] as const ] as const
/**
* Maps UI command IDs to API command IDs.
* Some commands have different IDs for display vs API (e.g., "actions" -> "superagent")
*/
export function getApiCommandId(uiCommandId: string): string {
const commandMapping: Record<string, string> = {
actions: 'superagent',
}
return commandMapping[uiCommandId] || uiCommandId
}
export const WEB_COMMANDS: readonly SlashCommand[] = [ export const WEB_COMMANDS: readonly SlashCommand[] = [
{ id: 'search', label: 'Search' }, { id: 'search', label: 'Search' },
{ id: 'read', label: 'Read' }, { id: 'read', label: 'Read' },

View File

@@ -95,6 +95,7 @@ export function DeployModal({
const [activeTab, setActiveTab] = useState<TabView>('general') const [activeTab, setActiveTab] = useState<TabView>('general')
const [chatSubmitting, setChatSubmitting] = useState(false) const [chatSubmitting, setChatSubmitting] = useState(false)
const [apiDeployError, setApiDeployError] = useState<string | null>(null) const [apiDeployError, setApiDeployError] = useState<string | null>(null)
const [apiDeployWarnings, setApiDeployWarnings] = useState<string[]>([])
const [isChatFormValid, setIsChatFormValid] = useState(false) const [isChatFormValid, setIsChatFormValid] = useState(false)
const [selectedStreamingOutputs, setSelectedStreamingOutputs] = useState<string[]>([]) const [selectedStreamingOutputs, setSelectedStreamingOutputs] = useState<string[]>([])
@@ -227,6 +228,7 @@ export function DeployModal({
if (open && workflowId) { if (open && workflowId) {
setActiveTab('general') setActiveTab('general')
setApiDeployError(null) setApiDeployError(null)
setApiDeployWarnings([])
} }
}, [open, workflowId]) }, [open, workflowId])
@@ -282,9 +284,13 @@ export function DeployModal({
if (!workflowId) return if (!workflowId) return
setApiDeployError(null) setApiDeployError(null)
setApiDeployWarnings([])
try { try {
await deployMutation.mutateAsync({ workflowId, deployChatEnabled: false }) const result = await deployMutation.mutateAsync({ workflowId, deployChatEnabled: false })
if (result.warnings && result.warnings.length > 0) {
setApiDeployWarnings(result.warnings)
}
await refetchDeployedState() await refetchDeployedState()
} catch (error: unknown) { } catch (error: unknown) {
logger.error('Error deploying workflow:', { error }) logger.error('Error deploying workflow:', { error })
@@ -297,8 +303,13 @@ export function DeployModal({
async (version: number) => { async (version: number) => {
if (!workflowId) return if (!workflowId) return
setApiDeployWarnings([])
try { try {
await activateVersionMutation.mutateAsync({ workflowId, version }) const result = await activateVersionMutation.mutateAsync({ workflowId, version })
if (result.warnings && result.warnings.length > 0) {
setApiDeployWarnings(result.warnings)
}
await refetchDeployedState() await refetchDeployedState()
} catch (error) { } catch (error) {
logger.error('Error promoting version:', { error }) logger.error('Error promoting version:', { error })
@@ -324,9 +335,13 @@ export function DeployModal({
if (!workflowId) return if (!workflowId) return
setApiDeployError(null) setApiDeployError(null)
setApiDeployWarnings([])
try { try {
await deployMutation.mutateAsync({ workflowId, deployChatEnabled: false }) const result = await deployMutation.mutateAsync({ workflowId, deployChatEnabled: false })
if (result.warnings && result.warnings.length > 0) {
setApiDeployWarnings(result.warnings)
}
await refetchDeployedState() await refetchDeployedState()
} catch (error: unknown) { } catch (error: unknown) {
logger.error('Error redeploying workflow:', { error }) logger.error('Error redeploying workflow:', { error })
@@ -338,6 +353,7 @@ export function DeployModal({
const handleCloseModal = useCallback(() => { const handleCloseModal = useCallback(() => {
setChatSubmitting(false) setChatSubmitting(false)
setApiDeployError(null) setApiDeployError(null)
setApiDeployWarnings([])
onOpenChange(false) onOpenChange(false)
}, [onOpenChange]) }, [onOpenChange])
@@ -479,6 +495,14 @@ export function DeployModal({
<div>{apiDeployError}</div> <div>{apiDeployError}</div>
</div> </div>
)} )}
{apiDeployWarnings.length > 0 && (
<div className='mb-3 rounded-[4px] border border-amber-500/30 bg-amber-500/10 p-3 text-amber-700 dark:text-amber-400 text-sm'>
<div className='font-semibold'>Deployment Warning</div>
{apiDeployWarnings.map((warning, index) => (
<div key={index}>{warning}</div>
))}
</div>
)}
<ModalTabsContent value='general'> <ModalTabsContent value='general'>
<GeneralDeploy <GeneralDeploy
workflowId={workflowId} workflowId={workflowId}

View File

@@ -38,6 +38,7 @@ import type { GenerationType } from '@/blocks/types'
import { normalizeName } from '@/executor/constants' import { normalizeName } from '@/executor/constants'
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation' import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
import { useTagSelection } from '@/hooks/kb/use-tag-selection' import { useTagSelection } from '@/hooks/kb/use-tag-selection'
import { createShouldHighlightEnvVar, useAvailableEnvVarKeys } from '@/hooks/use-available-env-vars'
const logger = createLogger('Code') const logger = createLogger('Code')
@@ -88,21 +89,27 @@ interface CodePlaceholder {
/** /**
* Creates a syntax highlighter function with custom reference and environment variable highlighting. * Creates a syntax highlighter function with custom reference and environment variable highlighting.
* @param effectiveLanguage - The language to use for syntax highlighting * @param effectiveLanguage - The language to use for syntax highlighting
* @param shouldHighlightReference - Function to determine if a reference should be highlighted * @param shouldHighlightReference - Function to determine if a block reference should be highlighted
* @param shouldHighlightEnvVar - Function to determine if an env var should be highlighted
* @returns A function that highlights code with syntax and custom highlights * @returns A function that highlights code with syntax and custom highlights
*/ */
const createHighlightFunction = ( const createHighlightFunction = (
effectiveLanguage: 'javascript' | 'python' | 'json', effectiveLanguage: 'javascript' | 'python' | 'json',
shouldHighlightReference: (part: string) => boolean shouldHighlightReference: (part: string) => boolean,
shouldHighlightEnvVar: (varName: string) => boolean
) => { ) => {
return (codeToHighlight: string): string => { return (codeToHighlight: string): string => {
const placeholders: CodePlaceholder[] = [] const placeholders: CodePlaceholder[] = []
let processedCode = codeToHighlight let processedCode = codeToHighlight
processedCode = processedCode.replace(createEnvVarPattern(), (match) => { processedCode = processedCode.replace(createEnvVarPattern(), (match) => {
const placeholder = `__ENV_VAR_${placeholders.length}__` const varName = match.slice(2, -2).trim()
placeholders.push({ placeholder, original: match, type: 'env' }) if (shouldHighlightEnvVar(varName)) {
return placeholder const placeholder = `__ENV_VAR_${placeholders.length}__`
placeholders.push({ placeholder, original: match, type: 'env' })
return placeholder
}
return match
}) })
processedCode = processedCode.replace(createReferencePattern(), (match) => { processedCode = processedCode.replace(createReferencePattern(), (match) => {
@@ -212,6 +219,7 @@ export const Code = memo(function Code({
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId) const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
const emitTagSelection = useTagSelection(blockId, subBlockId) const emitTagSelection = useTagSelection(blockId, subBlockId)
const [languageValue] = useSubBlockValue<string>(blockId, 'language') const [languageValue] = useSubBlockValue<string>(blockId, 'language')
const availableEnvVars = useAvailableEnvVarKeys(workspaceId)
const effectiveLanguage = (languageValue as 'javascript' | 'python' | 'json') || language const effectiveLanguage = (languageValue as 'javascript' | 'python' | 'json') || language
@@ -603,9 +611,15 @@ export const Code = memo(function Code({
[generateCodeStream, isPromptVisible, isAiStreaming] [generateCodeStream, isPromptVisible, isAiStreaming]
) )
const shouldHighlightEnvVar = useMemo(
() => createShouldHighlightEnvVar(availableEnvVars),
[availableEnvVars]
)
const highlightCode = useMemo( const highlightCode = useMemo(
() => createHighlightFunction(effectiveLanguage, shouldHighlightReference), () =>
[effectiveLanguage, shouldHighlightReference] createHighlightFunction(effectiveLanguage, shouldHighlightReference, shouldHighlightEnvVar),
[effectiveLanguage, shouldHighlightReference, shouldHighlightEnvVar]
) )
const handleValueChange = useCallback( const handleValueChange = useCallback(

View File

@@ -1,5 +1,5 @@
import type { ReactElement } from 'react' import type { ReactElement } from 'react'
import { useEffect, useRef, useState } from 'react' import { useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react' import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react'
import { useParams } from 'next/navigation' import { useParams } from 'next/navigation'
@@ -35,6 +35,7 @@ import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/
import { normalizeName } from '@/executor/constants' import { normalizeName } from '@/executor/constants'
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation' import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
import { useTagSelection } from '@/hooks/kb/use-tag-selection' import { useTagSelection } from '@/hooks/kb/use-tag-selection'
import { createShouldHighlightEnvVar, useAvailableEnvVarKeys } from '@/hooks/use-available-env-vars'
import { useWorkflowStore } from '@/stores/workflows/workflow/store' import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('ConditionInput') const logger = createLogger('ConditionInput')
@@ -123,6 +124,11 @@ export function ConditionInput({
const emitTagSelection = useTagSelection(blockId, subBlockId) const emitTagSelection = useTagSelection(blockId, subBlockId)
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId) const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
const availableEnvVars = useAvailableEnvVarKeys(workspaceId)
const shouldHighlightEnvVar = useMemo(
() => createShouldHighlightEnvVar(availableEnvVars),
[availableEnvVars]
)
const containerRef = useRef<HTMLDivElement>(null) const containerRef = useRef<HTMLDivElement>(null)
const inputRefs = useRef<Map<string, HTMLTextAreaElement>>(new Map()) const inputRefs = useRef<Map<string, HTMLTextAreaElement>>(new Map())
@@ -1136,14 +1142,18 @@ export function ConditionInput({
let processedCode = codeToHighlight let processedCode = codeToHighlight
processedCode = processedCode.replace(createEnvVarPattern(), (match) => { processedCode = processedCode.replace(createEnvVarPattern(), (match) => {
const placeholder = `__ENV_VAR_${placeholders.length}__` const varName = match.slice(2, -2).trim()
placeholders.push({ if (shouldHighlightEnvVar(varName)) {
placeholder, const placeholder = `__ENV_VAR_${placeholders.length}__`
original: match, placeholders.push({
type: 'env', placeholder,
shouldHighlight: true, original: match,
}) type: 'env',
return placeholder shouldHighlight: true,
})
return placeholder
}
return match
}) })
processedCode = processedCode.replace( processedCode = processedCode.replace(

View File

@@ -7,6 +7,7 @@ import { createCombinedPattern } from '@/executor/utils/reference-validation'
export interface HighlightContext { export interface HighlightContext {
accessiblePrefixes?: Set<string> accessiblePrefixes?: Set<string>
availableEnvVars?: Set<string>
highlightAll?: boolean highlightAll?: boolean
} }
@@ -43,9 +44,17 @@ export function formatDisplayText(text: string, context?: HighlightContext): Rea
return false return false
} }
const shouldHighlightEnvVar = (varName: string): boolean => {
if (context?.highlightAll) {
return true
}
if (context?.availableEnvVars === undefined) {
return true
}
return context.availableEnvVars.has(varName)
}
const nodes: ReactNode[] = [] const nodes: ReactNode[] = []
// Match variable references without allowing nested brackets to prevent matching across references
// e.g., "<3. text <real.ref>" should match "<3" and "<real.ref>", not the whole string
const regex = createCombinedPattern() const regex = createCombinedPattern()
let lastIndex = 0 let lastIndex = 0
let key = 0 let key = 0
@@ -65,11 +74,16 @@ export function formatDisplayText(text: string, context?: HighlightContext): Rea
} }
if (matchText.startsWith(REFERENCE.ENV_VAR_START)) { if (matchText.startsWith(REFERENCE.ENV_VAR_START)) {
nodes.push( const varName = matchText.slice(2, -2).trim()
<span key={key++} className='text-[var(--brand-secondary)]'> if (shouldHighlightEnvVar(varName)) {
{matchText} nodes.push(
</span> <span key={key++} className='text-[var(--brand-secondary)]'>
) {matchText}
</span>
)
} else {
nodes.push(<span key={key++}>{matchText}</span>)
}
} else { } else {
const split = splitReferenceSegment(matchText) const split = splitReferenceSegment(matchText)

View File

@@ -19,6 +19,9 @@ interface TextProps {
* - Automatically detects and renders HTML content safely * - Automatically detects and renders HTML content safely
* - Applies prose styling for HTML content (links, code, lists, etc.) * - Applies prose styling for HTML content (links, code, lists, etc.)
* - Falls back to plain text rendering for non-HTML content * - Falls back to plain text rendering for non-HTML content
*
* Note: This component renders trusted, internally-defined content only
* (e.g., trigger setup instructions). It is NOT used for user-generated content.
*/ */
export function Text({ blockId, subBlockId, content, className }: TextProps) { export function Text({ blockId, subBlockId, content, className }: TextProps) {
const containsHtml = /<[^>]+>/.test(content) const containsHtml = /<[^>]+>/.test(content)

View File

@@ -1,4 +1,5 @@
import { useCallback, useRef, useState } from 'react' import { useCallback, useMemo, useRef, useState } from 'react'
import { useParams } from 'next/navigation'
import { highlight, languages } from '@/components/emcn' import { highlight, languages } from '@/components/emcn'
import { import {
isLikelyReferenceSegment, isLikelyReferenceSegment,
@@ -9,6 +10,7 @@ import { checkTagTrigger } from '@/app/workspace/[workspaceId]/w/[workflowId]/co
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes' import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
import { normalizeName, REFERENCE } from '@/executor/constants' import { normalizeName, REFERENCE } from '@/executor/constants'
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation' import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
import { createShouldHighlightEnvVar, useAvailableEnvVarKeys } from '@/hooks/use-available-env-vars'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow' import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { useWorkflowStore } from '@/stores/workflows/workflow/store' import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState } from '@/stores/workflows/workflow/types' import type { BlockState } from '@/stores/workflows/workflow/types'
@@ -53,6 +55,9 @@ const SUBFLOW_CONFIG = {
* @returns Subflow editor state and handlers * @returns Subflow editor state and handlers
*/ */
export function useSubflowEditor(currentBlock: BlockState | null, currentBlockId: string | null) { export function useSubflowEditor(currentBlock: BlockState | null, currentBlockId: string | null) {
const params = useParams()
const workspaceId = params.workspaceId as string
const textareaRef = useRef<HTMLTextAreaElement | null>(null) const textareaRef = useRef<HTMLTextAreaElement | null>(null)
const editorContainerRef = useRef<HTMLDivElement>(null) const editorContainerRef = useRef<HTMLDivElement>(null)
@@ -81,6 +86,13 @@ export function useSubflowEditor(currentBlock: BlockState | null, currentBlockId
// Get accessible prefixes for tag dropdown // Get accessible prefixes for tag dropdown
const accessiblePrefixes = useAccessibleReferencePrefixes(currentBlockId || '') const accessiblePrefixes = useAccessibleReferencePrefixes(currentBlockId || '')
// Get available env vars for highlighting validation
const availableEnvVars = useAvailableEnvVarKeys(workspaceId)
const shouldHighlightEnvVar = useMemo(
() => createShouldHighlightEnvVar(availableEnvVars),
[availableEnvVars]
)
// Collaborative actions // Collaborative actions
const { const {
collaborativeUpdateLoopType, collaborativeUpdateLoopType,
@@ -140,9 +152,13 @@ export function useSubflowEditor(currentBlock: BlockState | null, currentBlockId
let processedCode = code let processedCode = code
processedCode = processedCode.replace(createEnvVarPattern(), (match) => { processedCode = processedCode.replace(createEnvVarPattern(), (match) => {
const placeholder = `__ENV_VAR_${placeholders.length}__` const varName = match.slice(2, -2).trim()
placeholders.push({ placeholder, original: match, type: 'env' }) if (shouldHighlightEnvVar(varName)) {
return placeholder const placeholder = `__ENV_VAR_${placeholders.length}__`
placeholders.push({ placeholder, original: match, type: 'env' })
return placeholder
}
return match
}) })
// Use [^<>]+ to prevent matching across nested brackets (e.g., "<3 <real.ref>" should match separately) // Use [^<>]+ to prevent matching across nested brackets (e.g., "<3 <real.ref>" should match separately)
@@ -174,7 +190,7 @@ export function useSubflowEditor(currentBlock: BlockState | null, currentBlockId
return highlightedCode return highlightedCode
}, },
[shouldHighlightReference] [shouldHighlightReference, shouldHighlightEnvVar]
) )
/** /**

View File

@@ -7,7 +7,7 @@ import { useStoreWithEqualityFn } from 'zustand/traditional'
import { Badge, Tooltip } from '@/components/emcn' import { Badge, Tooltip } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
import { createMcpToolId } from '@/lib/mcp/utils' import { createMcpToolId } from '@/lib/mcp/shared'
import { getProviderIdFromServiceId } from '@/lib/oauth' import { getProviderIdFromServiceId } from '@/lib/oauth'
import { BLOCK_DIMENSIONS, HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions' import { BLOCK_DIMENSIONS, HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
import { import {

View File

@@ -16,6 +16,7 @@ import {
} from '@/lib/workflows/triggers/triggers' } from '@/lib/workflows/triggers/triggers'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow' import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types' import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
import { hasExecutionResult } from '@/executor/utils/errors'
import { coerceValue } from '@/executor/utils/start-block' import { coerceValue } from '@/executor/utils/start-block'
import { subscriptionKeys } from '@/hooks/queries/subscription' import { subscriptionKeys } from '@/hooks/queries/subscription'
import { useExecutionStream } from '@/hooks/use-execution-stream' import { useExecutionStream } from '@/hooks/use-execution-stream'
@@ -76,17 +77,6 @@ function normalizeErrorMessage(error: unknown): string {
return WORKFLOW_EXECUTION_FAILURE_MESSAGE return WORKFLOW_EXECUTION_FAILURE_MESSAGE
} }
function isExecutionResult(value: unknown): value is ExecutionResult {
if (!isRecord(value)) return false
return typeof value.success === 'boolean' && isRecord(value.output)
}
function extractExecutionResult(error: unknown): ExecutionResult | null {
if (!isRecord(error)) return null
const candidate = error.executionResult
return isExecutionResult(candidate) ? candidate : null
}
export function useWorkflowExecution() { export function useWorkflowExecution() {
const queryClient = useQueryClient() const queryClient = useQueryClient()
const currentWorkflow = useCurrentWorkflow() const currentWorkflow = useCurrentWorkflow()
@@ -1138,11 +1128,11 @@ export function useWorkflowExecution() {
const handleExecutionError = (error: unknown, options?: { executionId?: string }) => { const handleExecutionError = (error: unknown, options?: { executionId?: string }) => {
const normalizedMessage = normalizeErrorMessage(error) const normalizedMessage = normalizeErrorMessage(error)
const executionResultFromError = extractExecutionResult(error)
let errorResult: ExecutionResult let errorResult: ExecutionResult
if (executionResultFromError) { if (hasExecutionResult(error)) {
const executionResultFromError = error.executionResult
const logs = Array.isArray(executionResultFromError.logs) ? executionResultFromError.logs : [] const logs = Array.isArray(executionResultFromError.logs) ? executionResultFromError.logs : []
errorResult = { errorResult = {

View File

@@ -448,7 +448,7 @@ export const SearchModal = memo(function SearchModal({
}, [workspaces, workflows, pages, blocks, triggers, tools, toolOperations, docs]) }, [workspaces, workflows, pages, blocks, triggers, tools, toolOperations, docs])
const sectionOrder = useMemo<SearchItem['type'][]>( const sectionOrder = useMemo<SearchItem['type'][]>(
() => ['block', 'tool', 'tool-operation', 'trigger', 'workflow', 'workspace', 'page', 'doc'], () => ['block', 'tool', 'trigger', 'doc', 'tool-operation', 'workflow', 'workspace', 'page'],
[] []
) )

View File

@@ -102,6 +102,47 @@ function calculateAliasScore(
return { score: 0, matchType: null } return { score: 0, matchType: null }
} }
/**
* Calculate multi-word match score
* Each word in the query must appear somewhere in the field
* Returns a score based on how well the words match
*/
function calculateMultiWordScore(
queryWords: string[],
field: string
): { score: number; matchType: 'word-boundary' | 'substring' | null } {
const normalizedField = field.toLowerCase().trim()
const fieldWords = normalizedField.split(/[\s\-_/:]+/)
let allWordsMatch = true
let totalScore = 0
let hasWordBoundary = false
for (const queryWord of queryWords) {
const wordBoundaryMatch = fieldWords.some((fw) => fw.startsWith(queryWord))
const substringMatch = normalizedField.includes(queryWord)
if (wordBoundaryMatch) {
totalScore += SCORE_WORD_BOUNDARY
hasWordBoundary = true
} else if (substringMatch) {
totalScore += SCORE_SUBSTRING_MATCH
} else {
allWordsMatch = false
break
}
}
if (!allWordsMatch) {
return { score: 0, matchType: null }
}
return {
score: totalScore / queryWords.length,
matchType: hasWordBoundary ? 'word-boundary' : 'substring',
}
}
/** /**
* Search items using tiered matching algorithm * Search items using tiered matching algorithm
* Returns items sorted by relevance (highest score first) * Returns items sorted by relevance (highest score first)
@@ -117,6 +158,8 @@ export function searchItems<T extends SearchableItem>(
} }
const results: SearchResult<T>[] = [] const results: SearchResult<T>[] = []
const queryWords = normalizedQuery.toLowerCase().split(/\s+/).filter(Boolean)
const isMultiWord = queryWords.length > 1
for (const item of items) { for (const item of items) {
const nameMatch = calculateFieldScore(normalizedQuery, item.name) const nameMatch = calculateFieldScore(normalizedQuery, item.name)
@@ -127,16 +170,35 @@ export function searchItems<T extends SearchableItem>(
const aliasMatch = calculateAliasScore(normalizedQuery, item.aliases) const aliasMatch = calculateAliasScore(normalizedQuery, item.aliases)
const nameScore = nameMatch.score let nameScore = nameMatch.score
const descScore = descMatch.score * DESCRIPTION_WEIGHT let descScore = descMatch.score * DESCRIPTION_WEIGHT
const aliasScore = aliasMatch.score const aliasScore = aliasMatch.score
let bestMatchType = nameMatch.matchType
// For multi-word queries, also try matching each word independently and take the better score
if (isMultiWord) {
const multiWordNameMatch = calculateMultiWordScore(queryWords, item.name)
if (multiWordNameMatch.score > nameScore) {
nameScore = multiWordNameMatch.score
bestMatchType = multiWordNameMatch.matchType
}
if (item.description) {
const multiWordDescMatch = calculateMultiWordScore(queryWords, item.description)
const multiWordDescScore = multiWordDescMatch.score * DESCRIPTION_WEIGHT
if (multiWordDescScore > descScore) {
descScore = multiWordDescScore
}
}
}
const bestScore = Math.max(nameScore, descScore, aliasScore) const bestScore = Math.max(nameScore, descScore, aliasScore)
if (bestScore > 0) { if (bestScore > 0) {
let matchType: SearchResult<T>['matchType'] = 'substring' let matchType: SearchResult<T>['matchType'] = 'substring'
if (nameScore >= descScore && nameScore >= aliasScore) { if (nameScore >= descScore && nameScore >= aliasScore) {
matchType = nameMatch.matchType || 'substring' matchType = bestMatchType || 'substring'
} else if (aliasScore >= descScore) { } else if (aliasScore >= descScore) {
matchType = 'alias' matchType = 'alias'
} else { } else {

View File

@@ -39,6 +39,7 @@ import {
useRefreshMcpServer, useRefreshMcpServer,
useStoredMcpTools, useStoredMcpTools,
} from '@/hooks/queries/mcp' } from '@/hooks/queries/mcp'
import { useAvailableEnvVarKeys } from '@/hooks/use-available-env-vars'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { FormField, McpServerSkeleton } from './components' import { FormField, McpServerSkeleton } from './components'
@@ -157,6 +158,7 @@ interface FormattedInputProps {
scrollLeft: number scrollLeft: number
showEnvVars: boolean showEnvVars: boolean
envVarProps: EnvVarDropdownConfig envVarProps: EnvVarDropdownConfig
availableEnvVars?: Set<string>
className?: string className?: string
onChange: (e: React.ChangeEvent<HTMLInputElement>) => void onChange: (e: React.ChangeEvent<HTMLInputElement>) => void
onScroll: (scrollLeft: number) => void onScroll: (scrollLeft: number) => void
@@ -169,6 +171,7 @@ function FormattedInput({
scrollLeft, scrollLeft,
showEnvVars, showEnvVars,
envVarProps, envVarProps,
availableEnvVars,
className, className,
onChange, onChange,
onScroll, onScroll,
@@ -190,7 +193,7 @@ function FormattedInput({
/> />
<div className='pointer-events-none absolute inset-0 flex items-center overflow-hidden px-[8px] py-[6px] font-medium font-sans text-sm'> <div className='pointer-events-none absolute inset-0 flex items-center overflow-hidden px-[8px] py-[6px] font-medium font-sans text-sm'>
<div className='whitespace-nowrap' style={{ transform: `translateX(-${scrollLeft}px)` }}> <div className='whitespace-nowrap' style={{ transform: `translateX(-${scrollLeft}px)` }}>
{formatDisplayText(value)} {formatDisplayText(value, { availableEnvVars })}
</div> </div>
</div> </div>
{showEnvVars && ( {showEnvVars && (
@@ -221,6 +224,7 @@ interface HeaderRowProps {
envSearchTerm: string envSearchTerm: string
cursorPosition: number cursorPosition: number
workspaceId: string workspaceId: string
availableEnvVars?: Set<string>
onInputChange: (field: InputFieldType, value: string, index?: number) => void onInputChange: (field: InputFieldType, value: string, index?: number) => void
onHeaderScroll: (key: string, scrollLeft: number) => void onHeaderScroll: (key: string, scrollLeft: number) => void
onEnvVarSelect: (value: string) => void onEnvVarSelect: (value: string) => void
@@ -238,6 +242,7 @@ function HeaderRow({
envSearchTerm, envSearchTerm,
cursorPosition, cursorPosition,
workspaceId, workspaceId,
availableEnvVars,
onInputChange, onInputChange,
onHeaderScroll, onHeaderScroll,
onEnvVarSelect, onEnvVarSelect,
@@ -265,6 +270,7 @@ function HeaderRow({
scrollLeft={headerScrollLeft[`key-${index}`] || 0} scrollLeft={headerScrollLeft[`key-${index}`] || 0}
showEnvVars={isKeyActive} showEnvVars={isKeyActive}
envVarProps={envVarProps} envVarProps={envVarProps}
availableEnvVars={availableEnvVars}
className='flex-1' className='flex-1'
onChange={(e) => onInputChange('header-key', e.target.value, index)} onChange={(e) => onInputChange('header-key', e.target.value, index)}
onScroll={(scrollLeft) => onHeaderScroll(`key-${index}`, scrollLeft)} onScroll={(scrollLeft) => onHeaderScroll(`key-${index}`, scrollLeft)}
@@ -276,6 +282,7 @@ function HeaderRow({
scrollLeft={headerScrollLeft[`value-${index}`] || 0} scrollLeft={headerScrollLeft[`value-${index}`] || 0}
showEnvVars={isValueActive} showEnvVars={isValueActive}
envVarProps={envVarProps} envVarProps={envVarProps}
availableEnvVars={availableEnvVars}
className='flex-1' className='flex-1'
onChange={(e) => onInputChange('header-value', e.target.value, index)} onChange={(e) => onInputChange('header-value', e.target.value, index)}
onScroll={(scrollLeft) => onHeaderScroll(`value-${index}`, scrollLeft)} onScroll={(scrollLeft) => onHeaderScroll(`value-${index}`, scrollLeft)}
@@ -371,6 +378,7 @@ export function MCP({ initialServerId }: MCPProps) {
const deleteServerMutation = useDeleteMcpServer() const deleteServerMutation = useDeleteMcpServer()
const refreshServerMutation = useRefreshMcpServer() const refreshServerMutation = useRefreshMcpServer()
const { testResult, isTestingConnection, testConnection, clearTestResult } = useMcpServerTest() const { testResult, isTestingConnection, testConnection, clearTestResult } = useMcpServerTest()
const availableEnvVars = useAvailableEnvVarKeys(workspaceId)
const urlInputRef = useRef<HTMLInputElement>(null) const urlInputRef = useRef<HTMLInputElement>(null)
@@ -1061,6 +1069,7 @@ export function MCP({ initialServerId }: MCPProps) {
onSelect: handleEnvVarSelect, onSelect: handleEnvVarSelect,
onClose: resetEnvVarState, onClose: resetEnvVarState,
}} }}
availableEnvVars={availableEnvVars}
onChange={(e) => handleInputChange('url', e.target.value)} onChange={(e) => handleInputChange('url', e.target.value)}
onScroll={(scrollLeft) => handleUrlScroll(scrollLeft)} onScroll={(scrollLeft) => handleUrlScroll(scrollLeft)}
/> />
@@ -1094,6 +1103,7 @@ export function MCP({ initialServerId }: MCPProps) {
envSearchTerm={envSearchTerm} envSearchTerm={envSearchTerm}
cursorPosition={cursorPosition} cursorPosition={cursorPosition}
workspaceId={workspaceId} workspaceId={workspaceId}
availableEnvVars={availableEnvVars}
onInputChange={handleInputChange} onInputChange={handleInputChange}
onHeaderScroll={handleHeaderScroll} onHeaderScroll={handleHeaderScroll}
onEnvVarSelect={handleEnvVarSelect} onEnvVarSelect={handleEnvVarSelect}

View File

@@ -6,8 +6,8 @@ import { useParams } from 'next/navigation'
import { Combobox, Label, Switch, Tooltip } from '@/components/emcn' import { Combobox, Label, Switch, Tooltip } from '@/components/emcn'
import { Skeleton } from '@/components/ui' import { Skeleton } from '@/components/ui'
import { useSession } from '@/lib/auth/auth-client' import { useSession } from '@/lib/auth/auth-client'
import { USAGE_THRESHOLDS } from '@/lib/billing/client/consts'
import { useSubscriptionUpgrade } from '@/lib/billing/client/upgrade' import { useSubscriptionUpgrade } from '@/lib/billing/client/upgrade'
import { USAGE_THRESHOLDS } from '@/lib/billing/client/usage-visualization'
import { getEffectiveSeats } from '@/lib/billing/subscriptions/utils' import { getEffectiveSeats } from '@/lib/billing/subscriptions/utils'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'

View File

@@ -2,11 +2,7 @@
import type { ReactNode } from 'react' import type { ReactNode } from 'react'
import { Badge } from '@/components/emcn' import { Badge } from '@/components/emcn'
import { import { getFilledPillColor, USAGE_PILL_COLORS, USAGE_THRESHOLDS } from '@/lib/billing/client'
getFilledPillColor,
USAGE_PILL_COLORS,
USAGE_THRESHOLDS,
} from '@/lib/billing/client/usage-visualization'
const PILL_COUNT = 5 const PILL_COUNT = 5

View File

@@ -5,13 +5,14 @@ import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query' import { useQueryClient } from '@tanstack/react-query'
import { Badge } from '@/components/emcn' import { Badge } from '@/components/emcn'
import { Skeleton } from '@/components/ui' import { Skeleton } from '@/components/ui'
import { USAGE_PILL_COLORS, USAGE_THRESHOLDS } from '@/lib/billing/client/consts'
import { useSubscriptionUpgrade } from '@/lib/billing/client/upgrade' import { useSubscriptionUpgrade } from '@/lib/billing/client/upgrade'
import { import {
getBillingStatus,
getFilledPillColor, getFilledPillColor,
USAGE_PILL_COLORS, getSubscriptionStatus,
USAGE_THRESHOLDS, getUsage,
} from '@/lib/billing/client/usage-visualization' } from '@/lib/billing/client/utils'
import { getBillingStatus, getSubscriptionStatus, getUsage } from '@/lib/billing/client/utils'
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks' import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { useSocket } from '@/app/workspace/providers/socket-provider' import { useSocket } from '@/app/workspace/providers/socket-provider'
import { subscriptionKeys, useSubscriptionData } from '@/hooks/queries/subscription' import { subscriptionKeys, useSubscriptionData } from '@/hooks/queries/subscription'

View File

@@ -4,8 +4,6 @@ import { task } from '@trigger.dev/sdk'
import { Cron } from 'croner' import { Cron } from 'croner'
import { eq } from 'drizzle-orm' import { eq } from 'drizzle-orm'
import { v4 as uuidv4 } from 'uuid' import { v4 as uuidv4 } from 'uuid'
import type { ZodRecord, ZodString } from 'zod'
import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
import { preprocessExecution } from '@/lib/execution/preprocessing' import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session' import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans' import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
@@ -23,7 +21,7 @@ import {
} from '@/lib/workflows/schedules/utils' } from '@/lib/workflows/schedules/utils'
import { ExecutionSnapshot } from '@/executor/execution/snapshot' import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionMetadata } from '@/executor/execution/types' import type { ExecutionMetadata } from '@/executor/execution/types'
import type { ExecutionResult } from '@/executor/types' import { hasExecutionResult } from '@/executor/utils/errors'
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants' import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
const logger = createLogger('TriggerScheduleExecution') const logger = createLogger('TriggerScheduleExecution')
@@ -122,7 +120,6 @@ async function runWorkflowExecution({
loggingSession, loggingSession,
requestId, requestId,
executionId, executionId,
EnvVarsSchema,
}: { }: {
payload: ScheduleExecutionPayload payload: ScheduleExecutionPayload
workflowRecord: WorkflowRecord workflowRecord: WorkflowRecord
@@ -130,7 +127,6 @@ async function runWorkflowExecution({
loggingSession: LoggingSession loggingSession: LoggingSession
requestId: string requestId: string
executionId: string executionId: string
EnvVarsSchema: ZodRecord<ZodString, ZodString>
}): Promise<RunWorkflowResult> { }): Promise<RunWorkflowResult> {
try { try {
logger.debug(`[${requestId}] Loading deployed workflow ${payload.workflowId}`) logger.debug(`[${requestId}] Loading deployed workflow ${payload.workflowId}`)
@@ -156,31 +152,12 @@ async function runWorkflowExecution({
throw new Error(`Workflow ${payload.workflowId} has no associated workspace`) throw new Error(`Workflow ${payload.workflowId} has no associated workspace`)
} }
const personalEnvUserId = workflowRecord.userId
const { personalEncrypted, workspaceEncrypted } = await getPersonalAndWorkspaceEnv(
personalEnvUserId,
workspaceId
)
const variables = EnvVarsSchema.parse({
...personalEncrypted,
...workspaceEncrypted,
})
const input = { const input = {
_context: { _context: {
workflowId: payload.workflowId, workflowId: payload.workflowId,
}, },
} }
await loggingSession.safeStart({
userId: actorUserId,
workspaceId,
variables: variables || {},
deploymentVersionId,
})
const metadata: ExecutionMetadata = { const metadata: ExecutionMetadata = {
requestId, requestId,
executionId, executionId,
@@ -254,8 +231,7 @@ async function runWorkflowExecution({
} catch (error: unknown) { } catch (error: unknown) {
logger.error(`[${requestId}] Early failure in scheduled workflow ${payload.workflowId}`, error) logger.error(`[${requestId}] Early failure in scheduled workflow ${payload.workflowId}`, error)
const errorWithResult = error as { executionResult?: ExecutionResult } const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
const executionResult = errorWithResult?.executionResult
const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] } const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] }
await loggingSession.safeCompleteWithError({ await loggingSession.safeCompleteWithError({
@@ -279,7 +255,6 @@ export type ScheduleExecutionPayload = {
failedCount?: number failedCount?: number
now: string now: string
scheduledFor?: string scheduledFor?: string
preflighted?: boolean
} }
function calculateNextRunTime( function calculateNextRunTime(
@@ -319,9 +294,6 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
executionId, executionId,
}) })
const zod = await import('zod')
const EnvVarsSchema = zod.z.record(zod.z.string())
try { try {
const loggingSession = new LoggingSession( const loggingSession = new LoggingSession(
payload.workflowId, payload.workflowId,
@@ -339,7 +311,6 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
checkRateLimit: true, checkRateLimit: true,
checkDeployment: true, checkDeployment: true,
loggingSession, loggingSession,
preflightEnvVars: !payload.preflighted,
}) })
if (!preprocessResult.success) { if (!preprocessResult.success) {
@@ -482,7 +453,6 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
loggingSession, loggingSession,
requestId, requestId,
executionId, executionId,
EnvVarsSchema,
}) })
if (executionResult.status === 'skip') { if (executionResult.status === 'skip') {

View File

@@ -16,7 +16,8 @@ import { loadDeployedWorkflowState } from '@/lib/workflows/persistence/utils'
import { getWorkflowById } from '@/lib/workflows/utils' import { getWorkflowById } from '@/lib/workflows/utils'
import { ExecutionSnapshot } from '@/executor/execution/snapshot' import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionMetadata } from '@/executor/execution/types' import type { ExecutionMetadata } from '@/executor/execution/types'
import type { ExecutionResult } from '@/executor/types' import { hasExecutionResult } from '@/executor/utils/errors'
import { safeAssign } from '@/tools/safe-assign'
import { getTrigger, isTriggerValid } from '@/triggers' import { getTrigger, isTriggerValid } from '@/triggers'
const logger = createLogger('TriggerWebhookExecution') const logger = createLogger('TriggerWebhookExecution')
@@ -397,7 +398,7 @@ async function executeWebhookJobInternal(
requestId, requestId,
userId: payload.userId, userId: payload.userId,
}) })
Object.assign(input, processedInput) safeAssign(input, processedInput as Record<string, unknown>)
} }
} else { } else {
logger.debug(`[${requestId}] No valid triggerId found for block ${payload.blockId}`) logger.debug(`[${requestId}] No valid triggerId found for block ${payload.blockId}`)
@@ -577,12 +578,13 @@ async function executeWebhookJobInternal(
deploymentVersionId, deploymentVersionId,
}) })
const errorWithResult = error as { executionResult?: ExecutionResult } const executionResult = hasExecutionResult(error)
const executionResult = errorWithResult?.executionResult || { ? error.executionResult
success: false, : {
output: {}, success: false,
logs: [], output: {},
} logs: [],
}
const { traceSpans } = buildTraceSpans(executionResult) const { traceSpans } = buildTraceSpans(executionResult)
await loggingSession.safeCompleteWithError({ await loggingSession.safeCompleteWithError({

View File

@@ -9,7 +9,7 @@ import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-m
import { getWorkflowById } from '@/lib/workflows/utils' import { getWorkflowById } from '@/lib/workflows/utils'
import { ExecutionSnapshot } from '@/executor/execution/snapshot' import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionMetadata } from '@/executor/execution/types' import type { ExecutionMetadata } from '@/executor/execution/types'
import type { ExecutionResult } from '@/executor/types' import { hasExecutionResult } from '@/executor/utils/errors'
import type { CoreTriggerType } from '@/stores/logs/filters/types' import type { CoreTriggerType } from '@/stores/logs/filters/types'
const logger = createLogger('TriggerWorkflowExecution') const logger = createLogger('TriggerWorkflowExecution')
@@ -20,7 +20,6 @@ export type WorkflowExecutionPayload = {
input?: any input?: any
triggerType?: CoreTriggerType triggerType?: CoreTriggerType
metadata?: Record<string, any> metadata?: Record<string, any>
preflighted?: boolean
} }
/** /**
@@ -52,7 +51,6 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
checkRateLimit: true, checkRateLimit: true,
checkDeployment: true, checkDeployment: true,
loggingSession: loggingSession, loggingSession: loggingSession,
preflightEnvVars: !payload.preflighted,
}) })
if (!preprocessResult.success) { if (!preprocessResult.success) {
@@ -162,8 +160,7 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
executionId, executionId,
}) })
const errorWithResult = error as { executionResult?: ExecutionResult } const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
const executionResult = errorWithResult?.executionResult
const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] } const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] }
await loggingSession.safeCompleteWithError({ await loggingSession.safeCompleteWithError({

View File

@@ -1,5 +1,5 @@
import { McpIcon } from '@/components/icons' import { McpIcon } from '@/components/icons'
import { createMcpToolId } from '@/lib/mcp/utils' import { createMcpToolId } from '@/lib/mcp/shared'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import type { ToolResponse } from '@/tools/types' import type { ToolResponse } from '@/tools/types'

View File

@@ -0,0 +1,51 @@
'use client'
import { useState } from 'react'
import { ArrowRight, ChevronRight } from 'lucide-react'
interface ContactButtonProps {
href: string
children: React.ReactNode
}
export function ContactButton({ href, children }: ContactButtonProps) {
const [isHovered, setIsHovered] = useState(false)
return (
<a
href={href}
target='_blank'
rel='noopener noreferrer'
onMouseEnter={() => setIsHovered(true)}
onMouseLeave={() => setIsHovered(false)}
style={{
display: 'inline-flex',
alignItems: 'center',
gap: '4px',
borderRadius: '10px',
background: 'linear-gradient(to bottom, #8357ff, #6f3dfa)',
border: '1px solid #6f3dfa',
boxShadow: 'inset 0 2px 4px 0 #9b77ff',
paddingTop: '6px',
paddingBottom: '6px',
paddingLeft: '12px',
paddingRight: '10px',
fontSize: '15px',
fontWeight: 500,
color: '#ffffff',
textDecoration: 'none',
opacity: isHovered ? 0.9 : 1,
transition: 'opacity 200ms',
}}
>
{children}
<span style={{ display: 'inline-flex' }}>
{isHovered ? (
<ArrowRight style={{ height: '16px', width: '16px' }} aria-hidden='true' />
) : (
<ChevronRight style={{ height: '16px', width: '16px' }} aria-hidden='true' />
)}
</span>
</a>
)
}

View File

@@ -0,0 +1,177 @@
---
slug: enterprise
title: 'Build with Sim for Enterprise'
description: 'Access control, BYOK, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, Admin API, and flexible data retention—enterprise features for teams with strict security and compliance requirements.'
date: 2026-01-23
updated: 2026-01-23
authors:
- vik
readingTime: 10
tags: [Enterprise, Security, Self-Hosted, SSO, SAML, Compliance, BYOK, Access Control, Copilot, Whitelabel, API, Import, Export]
ogImage: /studio/enterprise/cover.png
ogAlt: 'Sim Enterprise features overview'
about: ['Enterprise Software', 'Security', 'Compliance', 'Self-Hosting']
timeRequired: PT10M
canonical: https://sim.ai/studio/enterprise
featured: false
draft: true
---
We've been working with security teams at larger organizations to bring Sim into environments with strict compliance and data handling requirements. This post covers the enterprise capabilities we've built: granular access control, bring-your-own-keys, self-hosted deployments, on-prem Copilot, SSO & SAML, whitelabeling, compliance, and programmatic management via the Admin API.
## Access Control
![Access Control Settings](/studio/enterprise/access-control.png)
Permission groups let administrators control what features and integrations are available to different teams within an organization. This isn't just UI filtering—restrictions are enforced at the execution layer.
### Model Provider Restrictions
![Model Provider Restrictions](/studio/enterprise/model-providers.png)
Allowlist specific providers while blocking others. Users in a restricted group see only approved providers in the model selector. A workflow that tries to use an unapproved provider won't execute.
This is useful when you've approved certain providers for production use, negotiated enterprise agreements with specific vendors, or need to comply with data residency requirements that only certain providers meet.
### Integration Controls
![Integration Controls](/studio/enterprise/integration-controls.png)
Restrict which workflow blocks appear in the editor. Disable the HTTP block to prevent arbitrary external API calls. Block access to integrations that haven't completed your security review.
### Platform Feature Toggles
![Platform Feature Toggles](/studio/enterprise/platform-controls.png)
Control access to platform capabilities per permission group:
- **[Knowledge Base](https://docs.sim.ai/blocks/knowledge)** — Disable document uploads if RAG workflows aren't approved
- **[MCP Tools](https://docs.sim.ai/mcp)** — Block deployment of workflows as external tool endpoints
- **Custom Tools** — Prevent creation of arbitrary HTTP integrations
- **Invitations** — Disable self-service team invitations to maintain centralized control
Users not assigned to any permission group have full access, so restrictions are opt-in per team rather than requiring you to grant permissions to everyone.
---
## Bring Your Own Keys
![BYOK Configuration](/studio/enterprise/byok.png)
When you configure your own API keys for model providers—OpenAI, Anthropic, Google, Azure OpenAI, AWS Bedrock, or any supported provider—your prompts and completions route directly between Sim and that provider. The traffic doesn't pass through our infrastructure.
This matters because LLM requests contain the context you've assembled: customer data, internal documents, proprietary business logic. With your own keys, you maintain a direct relationship with your model provider. Their data handling policies and compliance certifications apply to your usage without an intermediary.
BYOK is available to everyone, not just enterprise plans. Connect your credentials in workspace settings, and all model calls use your keys. For self-hosted deployments, this is the default—there are no Sim-managed keys involved.
A healthcare organization can use Azure OpenAI with their BAA-covered subscription. A financial services firm can route through their approved API gateway with additional logging controls. The workflow builder stays the same; only the underlying data flow changes.
---
## Self-Hosted Deployments
![Self-Hosted Architecture](/studio/enterprise/self-hosted.png)
Run Sim entirely on your infrastructure. Deploy with [Docker Compose](https://docs.sim.ai/self-hosting/docker) or [Helm charts](https://docs.sim.ai/self-hosting/kubernetes) for Kubernetes—the application, WebSocket server, and PostgreSQL database all stay within your network.
**Single-node** — Docker Compose setup for smaller teams getting started.
**High availability** — Multi-replica Kubernetes deployments with horizontal pod autoscaling.
**Air-gapped** — No external network access required. Pair with [Ollama](https://docs.sim.ai/self-hosting/ollama) or [vLLM](https://docs.sim.ai/self-hosting/vllm) for local model inference.
Enterprise features like access control, SSO, and organization management are enabled through environment variables—no connection to our billing infrastructure required.
---
## On-Prem Copilot
Copilot—our context-aware AI assistant for building and debugging workflows—can run entirely within your self-hosted deployment using your own LLM keys.
When you configure Copilot with your API credentials, all assistant interactions route directly to your chosen provider. The prompts Copilot generates—which include context from your workflows, execution logs, and workspace configuration—never leave your network. You get the same capabilities as the hosted version: natural language workflow generation, error diagnosis, documentation lookup, and iterative editing through diffs.
This is particularly relevant for organizations where the context Copilot needs to be helpful is also the context that can't leave the building. Your workflow definitions, block configurations, and execution traces stay within your infrastructure even when you're asking Copilot for help debugging a failure or generating a new integration.
---
## SSO & SAML
![SSO Configuration](/studio/enterprise/sso.png)
Integrate with your existing identity provider through SAML 2.0 or OIDC. We support Okta, Azure AD (Entra ID), Google Workspace, OneLogin, Auth0, JumpCloud, Ping Identity, ADFS, and any compliant identity provider.
Once enabled, users authenticate through your IdP instead of Sim credentials. Your MFA policies apply automatically. Session management ties to your IdP—logout there terminates Sim sessions. Account deprovisioning immediately revokes access.
New users are provisioned on first SSO login based on IdP attributes. No invitation emails, no password setup, no manual account creation required.
This centralizes your authentication and audit trail. Your security team's policies apply to Sim access through the same system that tracks everything else.
---
## Whitelabeling
Customize Sim's appearance to match your brand. For self-hosted deployments, whitelabeling is configured through environment variables—no code changes required.
**Brand name & logo** — Replace "Sim" with your company name and logo throughout the interface.
**Theme colors** — Set primary, accent, and background colors to align with your brand palette.
**Support & documentation links** — Point help links to your internal documentation and support channels instead of ours.
**Legal pages** — Redirect terms of service and privacy policy links to your own policies.
This is useful for internal platforms, customer-facing deployments, or any scenario where you want Sim to feel like a native part of your product rather than a third-party tool.
---
## Compliance & Data Retention
![Compliance Certifications](/studio/enterprise/compliance.png)
Sim maintains **SOC 2 Type II** certification with annual audits covering security, availability, and confidentiality controls. We share our SOC 2 report directly with prospective customers under NDA.
**HIPAA** — Business Associate Agreements available for healthcare organizations. Requires self-hosted deployment or dedicated infrastructure.
**Data Retention** — Configure how long workflow execution traces, inputs, and outputs are stored before automatic deletion. We work with enterprise customers to set retention policies that match their compliance requirements.
We provide penetration test reports, architecture documentation, and completed security questionnaires (SIG, CAIQ, and custom formats) for your vendor review process.
---
## Admin API
Manage Sim programmatically through the Admin API. Every operation available in the UI has a corresponding API endpoint, enabling infrastructure-as-code workflows and integration with your existing tooling.
**User & Organization Management** — Provision users, create organizations, assign roles, and manage team membership. Integrate with your HR systems to automatically onboard and offboard employees.
**Workspace Administration** — Create workspaces, configure settings, and manage access. Useful for setting up isolated environments for different teams or clients.
**Workflow Lifecycle** — Deploy, undeploy, and manage workflow versions programmatically. Build CI/CD pipelines that promote workflows from development to staging to production.
The API uses standard REST conventions with JSON payloads. Authentication is via API keys scoped to your organization.
---
## Import & Export
Move workflows between environments, create backups, and maintain version control inside or outside of Sim.
**Workflow Export** — Export individual workflows or entire folders as JSON. The export includes block configurations, connections, environment variable references, and metadata. Use this to back up critical workflows or move them between Sim instances.
**Workspace Export** — Export an entire workspace as a ZIP archive containing all workflows, folder structure, and configuration. Useful for disaster recovery or migrating to a self-hosted deployment.
**Import** — Import workflows into any workspace. Sim handles ID remapping and validates the structure before import. This enables workflow templates, sharing between teams, and restoring from backups.
**Version History** — Each deployment creates a version snapshot. Roll back to previous versions if a deployment causes issues. The Admin API exposes version history for integration with your change management processes.
For teams practicing GitOps, export workflows to your repository and use the Admin API to deploy from CI/CD pipelines.
---
## Get Started
Enterprise features are available now. Check out our [self-hosting](https://docs.sim.ai/self-hosting) and [enterprise](https://docs.sim.ai/enterprise) docs to get started.
*Questions about enterprise deployments?*
<ContactButton href="https://form.typeform.com/to/jqCO12pF">Contact Us</ContactButton>

View File

@@ -29,10 +29,13 @@ Your workspace is indexed for hybrid retrieval. When you ask a question, Copilot
Copilot supports slash commands that trigger specialized capabilities: Copilot supports slash commands that trigger specialized capabilities:
- `/deep-research` — performs multi-step web research on a topic, synthesizing results from multiple sources - `/fast` — uses a faster model for quick responses when you need speed over depth
- `/api-docs` — fetches and parses API documentation from a URL, extracting endpoints, parameters, and authentication requirements - `/research` — performs multi-step web research on a topic, synthesizing results from multiple sources
- `/test` — runs your current workflow with sample inputs and reports results inline - `/actions` — enables agentic mode where Copilot can take actions on your behalf, like modifying blocks or creating workflows
- `/build` — generates a complete workflow from a natural language description, wiring up blocks and configuring integrations - `/search` — searches the web for relevant information
- `/read` — reads and extracts content from a URL
- `/scrape` — scrapes structured data from web pages
- `/crawl` — crawls multiple pages from a website to gather comprehensive information
Use `@` commands to pull specific context into your conversation. `@block` references a specific block's configuration and recent outputs. `@workflow` includes the full workflow structure. `@logs` pulls in recent execution traces. This lets you ask targeted questions like "why is `@Slack1` returning an error?" and Copilot has the exact context it needs to diagnose the issue. Use `@` commands to pull specific context into your conversation. `@block` references a specific block's configuration and recent outputs. `@workflow` includes the full workflow structure. `@logs` pulls in recent execution traces. This lets you ask targeted questions like "why is `@Slack1` returning an error?" and Copilot has the exact context it needs to diagnose the issue.

View File

@@ -0,0 +1,31 @@
import type { TraceSpan } from '@/lib/logs/types'
import type { ExecutionResult } from '@/executor/types'
interface ChildWorkflowErrorOptions {
message: string
childWorkflowName: string
childTraceSpans?: TraceSpan[]
executionResult?: ExecutionResult
cause?: Error
}
/**
* Error raised when a child workflow execution fails.
*/
export class ChildWorkflowError extends Error {
readonly childTraceSpans: TraceSpan[]
readonly childWorkflowName: string
readonly executionResult?: ExecutionResult
constructor(options: ChildWorkflowErrorOptions) {
super(options.message, { cause: options.cause })
this.name = 'ChildWorkflowError'
this.childWorkflowName = options.childWorkflowName
this.childTraceSpans = options.childTraceSpans ?? []
this.executionResult = options.executionResult
}
static isChildWorkflowError(error: unknown): error is ChildWorkflowError {
return error instanceof ChildWorkflowError
}
}

View File

@@ -1,7 +1,4 @@
import { db } from '@sim/db'
import { mcpServers } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq, inArray, isNull } from 'drizzle-orm'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
import { import {
containsUserFileWithMetadata, containsUserFileWithMetadata,
@@ -16,6 +13,7 @@ import {
isSentinelBlockType, isSentinelBlockType,
} from '@/executor/constants' } from '@/executor/constants'
import type { DAGNode } from '@/executor/dag/builder' import type { DAGNode } from '@/executor/dag/builder'
import { ChildWorkflowError } from '@/executor/errors/child-workflow-error'
import type { BlockStateWriter, ContextExtensions } from '@/executor/execution/types' import type { BlockStateWriter, ContextExtensions } from '@/executor/execution/types'
import { import {
generatePauseContextId, generatePauseContextId,
@@ -86,10 +84,6 @@ export class BlockExecutor {
resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block) resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block)
if (block.metadata?.id === BlockType.AGENT && resolvedInputs.tools) {
resolvedInputs = await this.filterUnavailableMcpToolsForLog(ctx, resolvedInputs)
}
if (blockLog) { if (blockLog) {
blockLog.input = resolvedInputs blockLog.input = resolvedInputs
} }
@@ -220,24 +214,26 @@ export class BlockExecutor {
? resolvedInputs ? resolvedInputs
: ((block.config?.params as Record<string, any> | undefined) ?? {}) : ((block.config?.params as Record<string, any> | undefined) ?? {})
const errorOutput: NormalizedBlockOutput = {
error: errorMessage,
}
if (ChildWorkflowError.isChildWorkflowError(error)) {
errorOutput.childTraceSpans = error.childTraceSpans
errorOutput.childWorkflowName = error.childWorkflowName
}
this.state.setBlockOutput(node.id, errorOutput, duration)
if (blockLog) { if (blockLog) {
blockLog.endedAt = new Date().toISOString() blockLog.endedAt = new Date().toISOString()
blockLog.durationMs = duration blockLog.durationMs = duration
blockLog.success = false blockLog.success = false
blockLog.error = errorMessage blockLog.error = errorMessage
blockLog.input = input blockLog.input = input
blockLog.output = this.filterOutputForLog(block, errorOutput)
} }
const errorOutput: NormalizedBlockOutput = {
error: errorMessage,
}
if (error && typeof error === 'object' && 'childTraceSpans' in error) {
errorOutput.childTraceSpans = (error as any).childTraceSpans
}
this.state.setBlockOutput(node.id, errorOutput, duration)
logger.error( logger.error(
phase === 'input_resolution' ? 'Failed to resolve block inputs' : 'Block execution failed', phase === 'input_resolution' ? 'Failed to resolve block inputs' : 'Block execution failed',
{ {
@@ -437,60 +433,6 @@ export class BlockExecutor {
return undefined return undefined
} }
/**
* Filters out unavailable MCP tools from agent inputs for logging.
* Only includes tools from servers with 'connected' status.
*/
private async filterUnavailableMcpToolsForLog(
ctx: ExecutionContext,
inputs: Record<string, any>
): Promise<Record<string, any>> {
const tools = inputs.tools
if (!Array.isArray(tools) || tools.length === 0) return inputs
const mcpTools = tools.filter((t: any) => t.type === 'mcp')
if (mcpTools.length === 0) return inputs
const serverIds = [
...new Set(mcpTools.map((t: any) => t.params?.serverId).filter(Boolean)),
] as string[]
if (serverIds.length === 0) return inputs
const availableServerIds = new Set<string>()
if (ctx.workspaceId && serverIds.length > 0) {
try {
const servers = await db
.select({ id: mcpServers.id, connectionStatus: mcpServers.connectionStatus })
.from(mcpServers)
.where(
and(
eq(mcpServers.workspaceId, ctx.workspaceId),
inArray(mcpServers.id, serverIds),
isNull(mcpServers.deletedAt)
)
)
for (const server of servers) {
if (server.connectionStatus === 'connected') {
availableServerIds.add(server.id)
}
}
} catch (error) {
logger.warn('Failed to check MCP server availability for logging:', error)
return inputs
}
}
const filteredTools = tools.filter((tool: any) => {
if (tool.type !== 'mcp') return true
const serverId = tool.params?.serverId
if (!serverId) return false
return availableServerIds.has(serverId)
})
return { ...inputs, tools: filteredTools }
}
private preparePauseResumeSelfReference( private preparePauseResumeSelfReference(
ctx: ExecutionContext, ctx: ExecutionContext,
node: DAGNode, node: DAGNode,

View File

@@ -13,7 +13,7 @@ import type {
PausePoint, PausePoint,
ResumeStatus, ResumeStatus,
} from '@/executor/types' } from '@/executor/types'
import { normalizeError } from '@/executor/utils/errors' import { attachExecutionResult, normalizeError } from '@/executor/utils/errors'
const logger = createLogger('ExecutionEngine') const logger = createLogger('ExecutionEngine')
@@ -170,8 +170,8 @@ export class ExecutionEngine {
metadata: this.context.metadata, metadata: this.context.metadata,
} }
if (error && typeof error === 'object') { if (error instanceof Error) {
;(error as any).executionResult = executionResult attachExecutionResult(error, executionResult)
} }
throw error throw error
} }

View File

@@ -1,6 +1,5 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { StartBlockPath } from '@/lib/workflows/triggers/triggers' import { StartBlockPath } from '@/lib/workflows/triggers/triggers'
import type { BlockOutput } from '@/blocks/types'
import { DAGBuilder } from '@/executor/dag/builder' import { DAGBuilder } from '@/executor/dag/builder'
import { BlockExecutor } from '@/executor/execution/block-executor' import { BlockExecutor } from '@/executor/execution/block-executor'
import { EdgeManager } from '@/executor/execution/edge-manager' import { EdgeManager } from '@/executor/execution/edge-manager'
@@ -24,7 +23,6 @@ const logger = createLogger('DAGExecutor')
export interface DAGExecutorOptions { export interface DAGExecutorOptions {
workflow: SerializedWorkflow workflow: SerializedWorkflow
currentBlockStates?: Record<string, BlockOutput>
envVarValues?: Record<string, string> envVarValues?: Record<string, string>
workflowInput?: WorkflowInput workflowInput?: WorkflowInput
workflowVariables?: Record<string, unknown> workflowVariables?: Record<string, unknown>

View File

@@ -4,12 +4,14 @@ import type { TraceSpan } from '@/lib/logs/types'
import type { BlockOutput } from '@/blocks/types' import type { BlockOutput } from '@/blocks/types'
import { Executor } from '@/executor' import { Executor } from '@/executor'
import { BlockType, DEFAULTS, HTTP } from '@/executor/constants' import { BlockType, DEFAULTS, HTTP } from '@/executor/constants'
import { ChildWorkflowError } from '@/executor/errors/child-workflow-error'
import type { import type {
BlockHandler, BlockHandler,
ExecutionContext, ExecutionContext,
ExecutionResult, ExecutionResult,
StreamingExecution, StreamingExecution,
} from '@/executor/types' } from '@/executor/types'
import { hasExecutionResult } from '@/executor/utils/errors'
import { buildAPIUrl, buildAuthHeaders } from '@/executor/utils/http' import { buildAPIUrl, buildAuthHeaders } from '@/executor/utils/http'
import { parseJSON } from '@/executor/utils/json' import { parseJSON } from '@/executor/utils/json'
import { lazyCleanupInputMapping } from '@/executor/utils/lazy-cleanup' import { lazyCleanupInputMapping } from '@/executor/utils/lazy-cleanup'
@@ -137,39 +139,39 @@ export class WorkflowBlockHandler implements BlockHandler {
) )
return mappedResult return mappedResult
} catch (error: any) { } catch (error: unknown) {
logger.error(`Error executing child workflow ${workflowId}:`, error) logger.error(`Error executing child workflow ${workflowId}:`, error)
const { workflows } = useWorkflowRegistry.getState() const { workflows } = useWorkflowRegistry.getState()
const workflowMetadata = workflows[workflowId] const workflowMetadata = workflows[workflowId]
const childWorkflowName = workflowMetadata?.name || workflowId const childWorkflowName = workflowMetadata?.name || workflowId
const originalError = error.message || 'Unknown error' const originalError = error instanceof Error ? error.message : 'Unknown error'
const wrappedError = new Error( let childTraceSpans: WorkflowTraceSpan[] = []
`Error in child workflow "${childWorkflowName}": ${originalError}` let executionResult: ExecutionResult | undefined
)
if (error.executionResult?.logs) { if (hasExecutionResult(error) && error.executionResult.logs) {
const executionResult = error.executionResult as ExecutionResult executionResult = error.executionResult
logger.info(`Extracting child trace spans from error.executionResult`, { logger.info(`Extracting child trace spans from error.executionResult`, {
hasLogs: (executionResult.logs?.length ?? 0) > 0, hasLogs: (executionResult.logs?.length ?? 0) > 0,
logCount: executionResult.logs?.length ?? 0, logCount: executionResult.logs?.length ?? 0,
}) })
const childTraceSpans = this.captureChildWorkflowLogs( childTraceSpans = this.captureChildWorkflowLogs(executionResult, childWorkflowName, ctx)
executionResult,
childWorkflowName,
ctx
)
logger.info(`Captured ${childTraceSpans.length} child trace spans from failed execution`) logger.info(`Captured ${childTraceSpans.length} child trace spans from failed execution`)
;(wrappedError as any).childTraceSpans = childTraceSpans } else if (ChildWorkflowError.isChildWorkflowError(error)) {
} else if (error.childTraceSpans && Array.isArray(error.childTraceSpans)) { childTraceSpans = error.childTraceSpans
;(wrappedError as any).childTraceSpans = error.childTraceSpans
} }
throw wrappedError throw new ChildWorkflowError({
message: `Error in child workflow "${childWorkflowName}": ${originalError}`,
childWorkflowName,
childTraceSpans,
executionResult,
cause: error instanceof Error ? error : undefined,
})
} }
} }
@@ -441,11 +443,11 @@ export class WorkflowBlockHandler implements BlockHandler {
if (!success) { if (!success) {
logger.warn(`Child workflow ${childWorkflowName} failed`) logger.warn(`Child workflow ${childWorkflowName} failed`)
const error = new Error( throw new ChildWorkflowError({
`Error in child workflow "${childWorkflowName}": ${childResult.error || 'Child workflow execution failed'}` message: `Error in child workflow "${childWorkflowName}": ${childResult.error || 'Child workflow execution failed'}`,
) childWorkflowName,
;(error as any).childTraceSpans = childTraceSpans || [] childTraceSpans: childTraceSpans || [],
throw error })
} }
return { return {

View File

@@ -1,6 +1,39 @@
import type { ExecutionContext } from '@/executor/types' import type { ExecutionContext, ExecutionResult } from '@/executor/types'
import type { SerializedBlock } from '@/serializer/types' import type { SerializedBlock } from '@/serializer/types'
/**
* Interface for errors that carry an ExecutionResult.
* Used when workflow execution fails and we want to preserve partial results.
*/
export interface ErrorWithExecutionResult extends Error {
executionResult: ExecutionResult
}
/**
* Type guard to check if an error carries an ExecutionResult.
* Validates that executionResult has required fields (success, output).
*/
export function hasExecutionResult(error: unknown): error is ErrorWithExecutionResult {
if (
!(error instanceof Error) ||
!('executionResult' in error) ||
error.executionResult == null ||
typeof error.executionResult !== 'object'
) {
return false
}
const result = error.executionResult as Record<string, unknown>
return typeof result.success === 'boolean' && result.output != null
}
/**
* Attaches an ExecutionResult to an error for propagation to parent workflows.
*/
export function attachExecutionResult(error: Error, executionResult: ExecutionResult): void {
Object.assign(error, { executionResult })
}
export interface BlockExecutionErrorDetails { export interface BlockExecutionErrorDetails {
block: SerializedBlock block: SerializedBlock
error: Error | string error: Error | string

View File

@@ -28,6 +28,23 @@ export interface EnvVarResolveOptions {
missingKeys?: string[] missingKeys?: string[]
} }
/**
* Standard defaults for env var resolution across all contexts.
*
* - `resolveExactMatch: true` - Resolves `{{VAR}}` when it's the entire value
* - `allowEmbedded: true` - Resolves `{{VAR}}` embedded in strings like `https://{{HOST}}/api`
* - `trimKeys: true` - `{{ VAR }}` works the same as `{{VAR}}` (whitespace tolerant)
* - `onMissing: 'keep'` - Unknown patterns pass through (e.g., Grafana's `{{instance}}`)
* - `deep: false` - Only processes strings by default; set `true` for nested objects
*/
export const ENV_VAR_RESOLVE_DEFAULTS: Required<Omit<EnvVarResolveOptions, 'missingKeys'>> = {
resolveExactMatch: true,
allowEmbedded: true,
trimKeys: true,
onMissing: 'keep',
deep: false,
} as const
/** /**
* Resolve {{ENV_VAR}} references in values using provided env vars. * Resolve {{ENV_VAR}} references in values using provided env vars.
*/ */
@@ -37,11 +54,11 @@ export function resolveEnvVarReferences(
options: EnvVarResolveOptions = {} options: EnvVarResolveOptions = {}
): unknown { ): unknown {
const { const {
allowEmbedded = true, allowEmbedded = ENV_VAR_RESOLVE_DEFAULTS.allowEmbedded,
resolveExactMatch = true, resolveExactMatch = ENV_VAR_RESOLVE_DEFAULTS.resolveExactMatch,
trimKeys = false, trimKeys = ENV_VAR_RESOLVE_DEFAULTS.trimKeys,
onMissing = 'keep', onMissing = ENV_VAR_RESOLVE_DEFAULTS.onMissing,
deep = true, deep = ENV_VAR_RESOLVE_DEFAULTS.deep,
} = options } = options
if (typeof value === 'string') { if (typeof value === 'string') {

View File

@@ -8,6 +8,7 @@ import {
import type { InputFormatField } from '@/lib/workflows/types' import type { InputFormatField } from '@/lib/workflows/types'
import type { NormalizedBlockOutput, UserFile } from '@/executor/types' import type { NormalizedBlockOutput, UserFile } from '@/executor/types'
import type { SerializedBlock } from '@/serializer/types' import type { SerializedBlock } from '@/serializer/types'
import { safeAssign } from '@/tools/safe-assign'
type ExecutionKind = 'chat' | 'manual' | 'api' type ExecutionKind = 'chat' | 'manual' | 'api'
@@ -346,7 +347,7 @@ function buildLegacyStarterOutput(
const finalObject = isPlainObject(finalInput) ? finalInput : undefined const finalObject = isPlainObject(finalInput) ? finalInput : undefined
if (finalObject) { if (finalObject) {
Object.assign(output, finalObject) safeAssign(output, finalObject)
output.input = { ...finalObject } output.input = { ...finalObject }
} else { } else {
output.input = finalInput output.input = finalInput

View File

@@ -2,8 +2,8 @@
import { useCallback, useState } from 'react' import { useCallback, useState } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { sanitizeForHttp, sanitizeHeaders } from '@/lib/mcp/shared'
import type { McpTransport } from '@/lib/mcp/types' import type { McpTransport } from '@/lib/mcp/types'
import { sanitizeForHttp, sanitizeHeaders } from '@/lib/mcp/utils'
const logger = createLogger('useMcpServerTest') const logger = createLogger('useMcpServerTest')

View File

@@ -10,7 +10,7 @@ import { useCallback, useMemo } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query' import { useQueryClient } from '@tanstack/react-query'
import { McpIcon } from '@/components/icons' import { McpIcon } from '@/components/icons'
import { createMcpToolId } from '@/lib/mcp/utils' import { createMcpToolId } from '@/lib/mcp/shared'
import { mcpKeys, useMcpToolsQuery } from '@/hooks/queries/mcp' import { mcpKeys, useMcpToolsQuery } from '@/hooks/queries/mcp'
const logger = createLogger('useMcpTools') const logger = createLogger('useMcpTools')

View File

@@ -237,6 +237,7 @@ interface DeployWorkflowResult {
isDeployed: boolean isDeployed: boolean
deployedAt?: string deployedAt?: string
apiKey?: string apiKey?: string
warnings?: string[]
} }
/** /**
@@ -272,6 +273,7 @@ export function useDeployWorkflow() {
isDeployed: data.isDeployed ?? false, isDeployed: data.isDeployed ?? false,
deployedAt: data.deployedAt, deployedAt: data.deployedAt,
apiKey: data.apiKey, apiKey: data.apiKey,
warnings: data.warnings,
} }
}, },
onSuccess: (data, variables) => { onSuccess: (data, variables) => {
@@ -360,6 +362,7 @@ interface ActivateVersionVariables {
interface ActivateVersionResult { interface ActivateVersionResult {
deployedAt?: string deployedAt?: string
apiKey?: string apiKey?: string
warnings?: string[]
} }
/** /**

View File

@@ -144,11 +144,17 @@ export function useLogsList(
}) })
} }
export function useLogDetail(logId: string | undefined) { interface UseLogDetailOptions {
enabled?: boolean
refetchInterval?: number | false
}
export function useLogDetail(logId: string | undefined, options?: UseLogDetailOptions) {
return useQuery({ return useQuery({
queryKey: logKeys.detail(logId), queryKey: logKeys.detail(logId),
queryFn: () => fetchLogDetail(logId as string), queryFn: () => fetchLogDetail(logId as string),
enabled: Boolean(logId), enabled: Boolean(logId) && (options?.enabled ?? true),
refetchInterval: options?.refetchInterval ?? false,
staleTime: 30 * 1000, staleTime: 30 * 1000,
placeholderData: keepPreviousData, placeholderData: keepPreviousData,
}) })

View File

@@ -1,7 +1,7 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query' import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { sanitizeForHttp, sanitizeHeaders } from '@/lib/mcp/shared'
import type { McpServerStatusConfig, McpTool, StoredMcpTool } from '@/lib/mcp/types' import type { McpServerStatusConfig, McpTool, StoredMcpTool } from '@/lib/mcp/types'
import { sanitizeForHttp, sanitizeHeaders } from '@/lib/mcp/utils'
const logger = createLogger('McpQueries') const logger = createLogger('McpQueries')

View File

@@ -0,0 +1,43 @@
import { useMemo } from 'react'
import { usePersonalEnvironment, useWorkspaceEnvironment } from '@/hooks/queries/environment'
export function useAvailableEnvVarKeys(workspaceId?: string): Set<string> | undefined {
const { data: personalEnv, isLoading: personalLoading } = usePersonalEnvironment()
const { data: workspaceEnvData, isLoading: workspaceLoading } = useWorkspaceEnvironment(
workspaceId || ''
)
return useMemo(() => {
if (personalLoading || (workspaceId && workspaceLoading)) {
return undefined
}
const keys = new Set<string>()
if (personalEnv) {
Object.keys(personalEnv).forEach((key) => keys.add(key))
}
if (workspaceId && workspaceEnvData) {
if (workspaceEnvData.workspace) {
Object.keys(workspaceEnvData.workspace).forEach((key) => keys.add(key))
}
if (workspaceEnvData.personal) {
Object.keys(workspaceEnvData.personal).forEach((key) => keys.add(key))
}
}
return keys
}, [personalEnv, workspaceEnvData, personalLoading, workspaceLoading, workspaceId])
}
export function createShouldHighlightEnvVar(
availableEnvVars: Set<string> | undefined
): (varName: string) => boolean {
return (varName: string): boolean => {
if (availableEnvVars === undefined) {
return true
}
return availableEnvVars.has(varName)
}
}

View File

@@ -100,8 +100,13 @@ export function useExecutionStream() {
}) })
if (!response.ok) { if (!response.ok) {
const error = await response.json() const errorResponse = await response.json()
throw new Error(error.error || 'Failed to start execution') const error = new Error(errorResponse.error || 'Failed to start execution')
// Attach the execution result from server response for error handling
if (errorResponse && typeof errorResponse === 'object') {
Object.assign(error, { executionResult: errorResponse })
}
throw error
} }
if (!response.body) { if (!response.body) {

View File

@@ -1,4 +1,4 @@
import { useCallback, useEffect, useMemo, useState } from 'react' import { useCallback, useEffect, useMemo } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { useParams } from 'next/navigation' import { useParams } from 'next/navigation'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
@@ -6,12 +6,10 @@ import { getBlock } from '@/blocks'
import { populateTriggerFieldsFromConfig } from '@/hooks/use-trigger-config-aggregation' import { populateTriggerFieldsFromConfig } from '@/hooks/use-trigger-config-aggregation'
import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store' import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import { getTrigger, isTriggerValid } from '@/triggers' import { isTriggerValid } from '@/triggers'
const logger = createLogger('useWebhookManagement') const logger = createLogger('useWebhookManagement')
const CREDENTIAL_SET_PREFIX = 'credentialSet:'
interface UseWebhookManagementProps { interface UseWebhookManagementProps {
blockId: string blockId: string
triggerId?: string triggerId?: string
@@ -24,9 +22,6 @@ interface WebhookManagementState {
webhookPath: string webhookPath: string
webhookId: string | null webhookId: string | null
isLoading: boolean isLoading: boolean
isSaving: boolean
saveConfig: () => Promise<boolean>
deleteConfig: () => Promise<boolean>
} }
/** /**
@@ -83,11 +78,9 @@ function resolveEffectiveTriggerId(
} }
/** /**
* Hook to manage webhook lifecycle for trigger blocks * Hook to load webhook info for trigger blocks.
* Handles: * Used for displaying webhook URLs in the UI.
* - Pre-generating webhook URLs based on blockId (without creating webhook) * Webhook creation/updates are handled by the deploy flow.
* - Loading existing webhooks from the API
* - Saving and deleting webhook configurations
*/ */
export function useWebhookManagement({ export function useWebhookManagement({
blockId, blockId,
@@ -98,8 +91,6 @@ export function useWebhookManagement({
const params = useParams() const params = useParams()
const workflowId = params.workflowId as string const workflowId = params.workflowId as string
const triggerDef = triggerId && isTriggerValid(triggerId) ? getTrigger(triggerId) : null
const webhookId = useSubBlockStore( const webhookId = useSubBlockStore(
useCallback((state) => state.getValue(blockId, 'webhookId') as string | null, [blockId]) useCallback((state) => state.getValue(blockId, 'webhookId') as string | null, [blockId])
) )
@@ -107,7 +98,6 @@ export function useWebhookManagement({
useCallback((state) => state.getValue(blockId, 'triggerPath') as string | null, [blockId]) useCallback((state) => state.getValue(blockId, 'triggerPath') as string | null, [blockId])
) )
const isLoading = useSubBlockStore((state) => state.loadingWebhooks.has(blockId)) const isLoading = useSubBlockStore((state) => state.loadingWebhooks.has(blockId))
const isChecked = useSubBlockStore((state) => state.checkedWebhooks.has(blockId))
const webhookUrl = useMemo(() => { const webhookUrl = useMemo(() => {
if (!webhookPath) { if (!webhookPath) {
@@ -118,8 +108,6 @@ export function useWebhookManagement({
return `${baseUrl}/api/webhooks/trigger/${webhookPath}` return `${baseUrl}/api/webhooks/trigger/${webhookPath}`
}, [webhookPath, blockId]) }, [webhookPath, blockId])
const [isSaving, setIsSaving] = useState(false)
useEffect(() => { useEffect(() => {
if (triggerId && !isPreview) { if (triggerId && !isPreview) {
const storedTriggerId = useSubBlockStore.getState().getValue(blockId, 'triggerId') const storedTriggerId = useSubBlockStore.getState().getValue(blockId, 'triggerId')
@@ -143,7 +131,7 @@ export function useWebhookManagement({
return return
} }
const loadWebhookOrGenerateUrl = async () => { const loadWebhookInfo = async () => {
useSubBlockStore.setState((state) => ({ useSubBlockStore.setState((state) => ({
loadingWebhooks: new Set([...state.loadingWebhooks, blockId]), loadingWebhooks: new Set([...state.loadingWebhooks, blockId]),
})) }))
@@ -171,8 +159,6 @@ export function useWebhookManagement({
if (webhook.providerConfig) { if (webhook.providerConfig) {
const effectiveTriggerId = resolveEffectiveTriggerId(blockId, triggerId, webhook) const effectiveTriggerId = resolveEffectiveTriggerId(blockId, triggerId, webhook)
// Filter out runtime/system fields from providerConfig before storing as triggerConfig
// These fields are managed by the system and should not be included in change detection
const { const {
credentialId: _credId, credentialId: _credId,
credentialSetId: _credSetId, credentialSetId: _credSetId,
@@ -224,202 +210,14 @@ export function useWebhookManagement({
} }
} }
if (useWebhookUrl) { if (useWebhookUrl) {
loadWebhookOrGenerateUrl() loadWebhookInfo()
} }
}, [isPreview, triggerId, workflowId, blockId, useWebhookUrl]) }, [isPreview, triggerId, workflowId, blockId, useWebhookUrl])
const createWebhook = async (
effectiveTriggerId: string | undefined,
selectedCredentialId: string | null
): Promise<boolean> => {
if (!triggerDef || !effectiveTriggerId) {
return false
}
const triggerConfig = useSubBlockStore.getState().getValue(blockId, 'triggerConfig')
const isCredentialSet = selectedCredentialId?.startsWith(CREDENTIAL_SET_PREFIX)
const credentialSetId = isCredentialSet
? selectedCredentialId!.slice(CREDENTIAL_SET_PREFIX.length)
: undefined
const credentialId = isCredentialSet ? undefined : selectedCredentialId
const webhookConfig = {
...(triggerConfig || {}),
...(credentialId ? { credentialId } : {}),
...(credentialSetId ? { credentialSetId } : {}),
triggerId: effectiveTriggerId,
}
const path = blockId
const response = await fetch('/api/webhooks', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
workflowId,
blockId,
path,
provider: triggerDef.provider,
providerConfig: webhookConfig,
}),
})
if (!response.ok) {
let errorMessage = 'Failed to create webhook'
try {
const errorData = await response.json()
errorMessage = errorData.details || errorData.error || errorMessage
} catch {
// If response is not JSON, use default message
}
logger.error('Failed to create webhook', { errorMessage })
throw new Error(errorMessage)
}
const data = await response.json()
const savedWebhookId = data.webhook.id
useSubBlockStore.getState().setValue(blockId, 'triggerPath', path)
useSubBlockStore.getState().setValue(blockId, 'triggerId', effectiveTriggerId)
useSubBlockStore.getState().setValue(blockId, 'webhookId', savedWebhookId)
useSubBlockStore.setState((state) => ({
checkedWebhooks: new Set([...state.checkedWebhooks, blockId]),
}))
logger.info('Trigger webhook created successfully', {
webhookId: savedWebhookId,
triggerId: effectiveTriggerId,
provider: triggerDef.provider,
blockId,
})
return true
}
const updateWebhook = async (
webhookIdToUpdate: string,
effectiveTriggerId: string | undefined,
selectedCredentialId: string | null
): Promise<boolean> => {
const triggerConfigRaw = useSubBlockStore.getState().getValue(blockId, 'triggerConfig')
const triggerConfig =
typeof triggerConfigRaw === 'object' && triggerConfigRaw !== null
? (triggerConfigRaw as Record<string, unknown>)
: {}
const isCredentialSet = selectedCredentialId?.startsWith(CREDENTIAL_SET_PREFIX)
const credentialSetId = isCredentialSet
? selectedCredentialId!.slice(CREDENTIAL_SET_PREFIX.length)
: undefined
const credentialId = isCredentialSet ? undefined : selectedCredentialId
const response = await fetch(`/api/webhooks/${webhookIdToUpdate}`, {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
providerConfig: {
...triggerConfig,
...(credentialId ? { credentialId } : {}),
...(credentialSetId ? { credentialSetId } : {}),
triggerId: effectiveTriggerId,
},
}),
})
if (response.status === 404) {
logger.warn('Webhook not found while updating, recreating', {
blockId,
lostWebhookId: webhookIdToUpdate,
})
useSubBlockStore.getState().setValue(blockId, 'webhookId', null)
return createWebhook(effectiveTriggerId, selectedCredentialId)
}
if (!response.ok) {
let errorMessage = 'Failed to save trigger configuration'
try {
const errorData = await response.json()
errorMessage = errorData.details || errorData.error || errorMessage
} catch {
// If response is not JSON, use default message
}
logger.error('Failed to save trigger config', { errorMessage })
throw new Error(errorMessage)
}
logger.info('Trigger config saved successfully', { blockId, webhookId: webhookIdToUpdate })
return true
}
const saveConfig = async (): Promise<boolean> => {
if (isPreview || !triggerDef) {
return false
}
const effectiveTriggerId = resolveEffectiveTriggerId(blockId, triggerId)
try {
setIsSaving(true)
const triggerCredentials = useSubBlockStore.getState().getValue(blockId, 'triggerCredentials')
const selectedCredentialId = (triggerCredentials as string | null) || null
if (!webhookId) {
return createWebhook(effectiveTriggerId, selectedCredentialId)
}
return updateWebhook(webhookId, effectiveTriggerId, selectedCredentialId)
} catch (error) {
logger.error('Error saving trigger config:', error)
throw error
} finally {
setIsSaving(false)
}
}
const deleteConfig = async (): Promise<boolean> => {
if (isPreview || !webhookId) {
return false
}
try {
setIsSaving(true)
const response = await fetch(`/api/webhooks/${webhookId}`, {
method: 'DELETE',
})
if (!response.ok) {
logger.error('Failed to delete webhook')
return false
}
useSubBlockStore.getState().setValue(blockId, 'triggerPath', '')
useSubBlockStore.getState().setValue(blockId, 'webhookId', null)
useSubBlockStore.setState((state) => {
const newSet = new Set(state.checkedWebhooks)
newSet.delete(blockId)
return { checkedWebhooks: newSet }
})
logger.info('Webhook deleted successfully')
return true
} catch (error) {
logger.error('Error deleting webhook:', error)
return false
} finally {
setIsSaving(false)
}
}
return { return {
webhookUrl, webhookUrl,
webhookPath: webhookPath || blockId, webhookPath: webhookPath || blockId,
webhookId, webhookId,
isLoading, isLoading,
isSaving,
saveConfig,
deleteConfig,
} }
} }

View File

@@ -4,6 +4,7 @@ import { a2aPushNotificationConfig, a2aTask } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm' import { eq } from 'drizzle-orm'
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags' import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
const logger = createLogger('A2APushNotifications') const logger = createLogger('A2APushNotifications')
@@ -45,7 +46,17 @@ export async function deliverPushNotification(taskId: string, state: TaskState):
} }
try { try {
const response = await fetch(config.url, { const urlValidation = await validateUrlWithDNS(config.url, 'webhook URL')
if (!urlValidation.isValid || !urlValidation.resolvedIP) {
logger.error('Push notification URL validation failed', {
taskId,
url: config.url,
error: urlValidation.error,
})
return false
}
const response = await secureFetchWithPinnedIP(config.url, urlValidation.resolvedIP, {
method: 'POST', method: 'POST',
headers, headers,
body: JSON.stringify({ body: JSON.stringify({
@@ -59,7 +70,7 @@ export async function deliverPushNotification(taskId: string, state: TaskState):
artifacts: (task.artifacts as Artifact[]) || [], artifacts: (task.artifacts as Artifact[]) || [],
}, },
}), }),
signal: AbortSignal.timeout(30000), timeout: 30000,
}) })
if (!response.ok) { if (!response.ok) {

View File

@@ -256,6 +256,17 @@ export const auth = betterAuth({
return { data: account } return { data: account }
}, },
after: async (account) => { after: async (account) => {
try {
const { ensureUserStatsExists } = await import('@/lib/billing/core/usage')
await ensureUserStatsExists(account.userId)
} catch (error) {
logger.error('[databaseHooks.account.create.after] Failed to ensure user stats', {
userId: account.userId,
accountId: account.id,
error,
})
}
if (account.providerId === 'salesforce') { if (account.providerId === 'salesforce') {
const updates: { const updates: {
accessTokenExpiresAt?: Date accessTokenExpiresAt?: Date
@@ -462,7 +473,6 @@ export const auth = betterAuth({
}, },
emailVerification: { emailVerification: {
autoSignInAfterVerification: true, autoSignInAfterVerification: true,
// onEmailVerification is called by the emailOTP plugin when email is verified via OTP
onEmailVerification: async (user) => { onEmailVerification: async (user) => {
if (isHosted && user.email) { if (isHosted && user.email) {
try { try {

View File

@@ -0,0 +1,28 @@
/**
* Number of pills to display in usage indicators.
*/
export const USAGE_PILL_COUNT = 8
/**
* Usage percentage thresholds for visual states.
*/
export const USAGE_THRESHOLDS = {
/** Warning threshold (yellow/orange state) */
WARNING: 75,
/** Critical threshold (red state) */
CRITICAL: 90,
} as const
/**
* Color values for usage pill states using CSS variables
*/
export const USAGE_PILL_COLORS = {
/** Unfilled pill color (gray) */
UNFILLED: 'var(--surface-7)',
/** Normal filled pill color (blue) */
FILLED: 'var(--brand-secondary)',
/** Warning state pill color (yellow/orange) */
WARNING: 'var(--warning)',
/** Critical/limit reached pill color (red) */
AT_LIMIT: 'var(--text-error)',
} as const

View File

@@ -1,3 +1,7 @@
export {
USAGE_PILL_COLORS,
USAGE_THRESHOLDS,
} from './consts'
export type { export type {
BillingStatus, BillingStatus,
SubscriptionData, SubscriptionData,
@@ -8,6 +12,7 @@ export {
canUpgrade, canUpgrade,
getBillingStatus, getBillingStatus,
getDaysRemainingInPeriod, getDaysRemainingInPeriod,
getFilledPillColor,
getRemainingBudget, getRemainingBudget,
getSubscriptionStatus, getSubscriptionStatus,
getUsage, getUsage,

View File

@@ -1,150 +0,0 @@
/**
* Shared utilities for consistent usage visualization across the application.
*
* This module provides a single source of truth for how usage metrics are
* displayed visually through "pills" or progress indicators.
*/
/**
* Number of pills to display in usage indicators.
*
* Using 8 pills provides:
* - 12.5% granularity per pill
* - Good balance between precision and visual clarity
* - Consistent representation across panel and settings
*/
export const USAGE_PILL_COUNT = 8
/**
* Usage percentage thresholds for visual states.
*/
export const USAGE_THRESHOLDS = {
/** Warning threshold (yellow/orange state) */
WARNING: 75,
/** Critical threshold (red state) */
CRITICAL: 90,
} as const
/**
* Color values for usage pill states using CSS variables
*/
export const USAGE_PILL_COLORS = {
/** Unfilled pill color (gray) */
UNFILLED: 'var(--surface-7)',
/** Normal filled pill color (blue) */
FILLED: 'var(--brand-secondary)',
/** Warning state pill color (yellow/orange) */
WARNING: 'var(--warning)',
/** Critical/limit reached pill color (red) */
AT_LIMIT: 'var(--text-error)',
} as const
/**
* Calculate the number of filled pills based on usage percentage.
*
* Uses Math.ceil() to ensure even minimal usage (0.01%) shows visual feedback.
* This provides better UX by making it clear that there is some usage, even if small.
*
* @param percentUsed - The usage percentage (0-100). Can be a decimal (e.g., 0.315 for 0.315%)
* @returns Number of pills that should be filled (0 to USAGE_PILL_COUNT)
*
* @example
* calculateFilledPills(0.315) // Returns 1 (shows feedback for 0.315% usage)
* calculateFilledPills(50) // Returns 4 (50% of 8 pills)
* calculateFilledPills(100) // Returns 8 (completely filled)
* calculateFilledPills(150) // Returns 8 (clamped to maximum)
*/
export function calculateFilledPills(percentUsed: number): number {
// Clamp percentage to valid range [0, 100]
const safePercent = Math.min(Math.max(percentUsed, 0), 100)
// Calculate filled pills using ceil to show feedback for any usage
return Math.ceil((safePercent / 100) * USAGE_PILL_COUNT)
}
/**
* Determine if usage has reached the limit (all pills filled).
*
* @param percentUsed - The usage percentage (0-100)
* @returns true if all pills should be filled (at or over limit)
*/
export function isUsageAtLimit(percentUsed: number): boolean {
return calculateFilledPills(percentUsed) >= USAGE_PILL_COUNT
}
/**
* Get the appropriate color for a pill based on its state.
*
* @param isFilled - Whether this pill should be filled
* @param isAtLimit - Whether usage has reached the limit
* @returns CSS color value
*/
export function getPillColor(isFilled: boolean, isAtLimit: boolean): string {
if (!isFilled) return USAGE_PILL_COLORS.UNFILLED
if (isAtLimit) return USAGE_PILL_COLORS.AT_LIMIT
return USAGE_PILL_COLORS.FILLED
}
/**
* Get the appropriate filled pill color based on usage thresholds.
*
* @param isCritical - Whether usage is at critical level (blocked or >= 90%)
* @param isWarning - Whether usage is at warning level (>= 75% but < critical)
* @returns CSS color value for filled pills
*/
export function getFilledPillColor(isCritical: boolean, isWarning: boolean): string {
if (isCritical) return USAGE_PILL_COLORS.AT_LIMIT
if (isWarning) return USAGE_PILL_COLORS.WARNING
return USAGE_PILL_COLORS.FILLED
}
/**
* Determine usage state based on percentage and blocked status.
*
* @param percentUsed - The usage percentage (0-100)
* @param isBlocked - Whether the account is blocked
* @returns Object containing isCritical and isWarning flags
*/
export function getUsageState(
percentUsed: number,
isBlocked = false
): { isCritical: boolean; isWarning: boolean } {
const isCritical = isBlocked || percentUsed >= USAGE_THRESHOLDS.CRITICAL
const isWarning = !isCritical && percentUsed >= USAGE_THRESHOLDS.WARNING
return { isCritical, isWarning }
}
/**
* Generate an array of pill states for rendering.
*
* @param percentUsed - The usage percentage (0-100)
* @param isBlocked - Whether the account is blocked
* @returns Array of pill states with colors
*
* @example
* const pills = generatePillStates(50)
* pills.forEach((pill, index) => (
* <Pill key={index} color={pill.color} filled={pill.filled} />
* ))
*/
export function generatePillStates(
percentUsed: number,
isBlocked = false
): Array<{
filled: boolean
color: string
index: number
}> {
const filledCount = calculateFilledPills(percentUsed)
const { isCritical, isWarning } = getUsageState(percentUsed, isBlocked)
const filledColor = getFilledPillColor(isCritical, isWarning)
return Array.from({ length: USAGE_PILL_COUNT }, (_, index) => {
const filled = index < filledCount
return {
filled,
color: filled ? filledColor : USAGE_PILL_COLORS.UNFILLED,
index,
}
})
}

View File

@@ -4,6 +4,7 @@
*/ */
import { DEFAULT_FREE_CREDITS } from '@/lib/billing/constants' import { DEFAULT_FREE_CREDITS } from '@/lib/billing/constants'
import { USAGE_PILL_COLORS } from './consts'
import type { BillingStatus, SubscriptionData, UsageData } from './types' import type { BillingStatus, SubscriptionData, UsageData } from './types'
const defaultUsage: UsageData = { const defaultUsage: UsageData = {
@@ -36,9 +37,35 @@ export function getSubscriptionStatus(subscriptionData: SubscriptionData | null
/** /**
* Get usage data from subscription data * Get usage data from subscription data
* Validates and sanitizes all numeric values to prevent crashes from malformed data
*/ */
export function getUsage(subscriptionData: SubscriptionData | null | undefined): UsageData { export function getUsage(subscriptionData: SubscriptionData | null | undefined): UsageData {
return subscriptionData?.usage ?? defaultUsage const usage = subscriptionData?.usage
if (!usage) {
return defaultUsage
}
return {
current:
typeof usage.current === 'number' && Number.isFinite(usage.current) ? usage.current : 0,
limit:
typeof usage.limit === 'number' && Number.isFinite(usage.limit)
? usage.limit
: DEFAULT_FREE_CREDITS,
percentUsed:
typeof usage.percentUsed === 'number' && Number.isFinite(usage.percentUsed)
? usage.percentUsed
: 0,
isWarning: Boolean(usage.isWarning),
isExceeded: Boolean(usage.isExceeded),
billingPeriodStart: usage.billingPeriodStart ?? null,
billingPeriodEnd: usage.billingPeriodEnd ?? null,
lastPeriodCost:
typeof usage.lastPeriodCost === 'number' && Number.isFinite(usage.lastPeriodCost)
? usage.lastPeriodCost
: 0,
}
} }
/** /**
@@ -100,3 +127,16 @@ export function canUpgrade(subscriptionData: SubscriptionData | null | undefined
const status = getSubscriptionStatus(subscriptionData) const status = getSubscriptionStatus(subscriptionData)
return status.plan === 'free' || status.plan === 'pro' return status.plan === 'free' || status.plan === 'pro'
} }
/**
* Get the appropriate filled pill color based on usage thresholds.
*
* @param isCritical - Whether usage is at critical level (blocked or >= 90%)
* @param isWarning - Whether usage is at warning level (>= 75% but < critical)
* @returns CSS color value for filled pills
*/
export function getFilledPillColor(isCritical: boolean, isWarning: boolean): string {
if (isCritical) return USAGE_PILL_COLORS.AT_LIMIT
if (isWarning) return USAGE_PILL_COLORS.WARNING
return USAGE_PILL_COLORS.FILLED
}

View File

@@ -96,11 +96,32 @@ export async function handleNewUser(userId: string): Promise<void> {
} }
} }
/**
* Ensures a userStats record exists for a user.
* Creates one with default values if missing.
* This is a fallback for cases where the user.create.after hook didn't fire
* (e.g., OAuth account linking to existing users).
*
*/
export async function ensureUserStatsExists(userId: string): Promise<void> {
await db
.insert(userStats)
.values({
id: crypto.randomUUID(),
userId: userId,
currentUsageLimit: getFreeTierLimit().toString(),
usageLimitUpdatedAt: new Date(),
})
.onConflictDoNothing({ target: userStats.userId })
}
/** /**
* Get comprehensive usage data for a user * Get comprehensive usage data for a user
*/ */
export async function getUserUsageData(userId: string): Promise<UsageData> { export async function getUserUsageData(userId: string): Promise<UsageData> {
try { try {
await ensureUserStatsExists(userId)
const [userStatsData, subscription] = await Promise.all([ const [userStatsData, subscription] = await Promise.all([
db.select().from(userStats).where(eq(userStats.userId, userId)).limit(1), db.select().from(userStats).where(eq(userStats.userId, userId)).limit(1),
getHighestPrioritySubscription(userId), getHighestPrioritySubscription(userId),

View File

@@ -16,6 +16,7 @@ import {
} from '@sim/db/schema' } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm' import { and, eq, sql } from 'drizzle-orm'
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
import { requireStripeClient } from '@/lib/billing/stripe-client' import { requireStripeClient } from '@/lib/billing/stripe-client'
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management' import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
@@ -556,6 +557,8 @@ export async function removeUserFromOrganization(
const restoreResult = await restoreUserProSubscription(userId) const restoreResult = await restoreUserProSubscription(userId)
billingActions.proRestored = restoreResult.restored billingActions.proRestored = restoreResult.restored
billingActions.usageRestored = restoreResult.usageRestored billingActions.usageRestored = restoreResult.usageRestored
await syncUsageLimitsFromSubscription(userId)
} }
} catch (postRemoveError) { } catch (postRemoveError) {
logger.error('Post-removal personal Pro restore check failed', { logger.error('Post-removal personal Pro restore check failed', {

View File

@@ -3,9 +3,6 @@ import { environment, workspaceEnvironment } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm' import { eq } from 'drizzle-orm'
import { decryptSecret } from '@/lib/core/security/encryption' import { decryptSecret } from '@/lib/core/security/encryption'
import { REFERENCE } from '@/executor/constants'
import { createEnvVarPattern } from '@/executor/utils/reference-validation'
import type { BlockState } from '@/stores/workflows/workflow/types'
const logger = createLogger('EnvironmentUtils') const logger = createLogger('EnvironmentUtils')
@@ -54,6 +51,7 @@ export async function getPersonalAndWorkspaceEnv(
personalDecrypted: Record<string, string> personalDecrypted: Record<string, string>
workspaceDecrypted: Record<string, string> workspaceDecrypted: Record<string, string>
conflicts: string[] conflicts: string[]
decryptionFailures: string[]
}> { }> {
const [personalRows, workspaceRows] = await Promise.all([ const [personalRows, workspaceRows] = await Promise.all([
db.select().from(environment).where(eq(environment.userId, userId)).limit(1), db.select().from(environment).where(eq(environment.userId, userId)).limit(1),
@@ -69,14 +67,23 @@ export async function getPersonalAndWorkspaceEnv(
const personalEncrypted: Record<string, string> = (personalRows[0]?.variables as any) || {} const personalEncrypted: Record<string, string> = (personalRows[0]?.variables as any) || {}
const workspaceEncrypted: Record<string, string> = (workspaceRows[0]?.variables as any) || {} const workspaceEncrypted: Record<string, string> = (workspaceRows[0]?.variables as any) || {}
const decryptAll = async (src: Record<string, string>) => { const decryptionFailures: string[] = []
const decryptAll = async (src: Record<string, string>, source: 'personal' | 'workspace') => {
const entries = Object.entries(src) const entries = Object.entries(src)
const results = await Promise.all( const results = await Promise.all(
entries.map(async ([k, v]) => { entries.map(async ([k, v]) => {
try { try {
const { decrypted } = await decryptSecret(v) const { decrypted } = await decryptSecret(v)
return [k, decrypted] as const return [k, decrypted] as const
} catch { } catch (error) {
logger.error(`Failed to decrypt ${source} environment variable "${k}"`, {
userId,
workspaceId,
source,
error: error instanceof Error ? error.message : 'Unknown error',
})
decryptionFailures.push(k)
return [k, ''] as const return [k, ''] as const
} }
}) })
@@ -85,18 +92,28 @@ export async function getPersonalAndWorkspaceEnv(
} }
const [personalDecrypted, workspaceDecrypted] = await Promise.all([ const [personalDecrypted, workspaceDecrypted] = await Promise.all([
decryptAll(personalEncrypted), decryptAll(personalEncrypted, 'personal'),
decryptAll(workspaceEncrypted), decryptAll(workspaceEncrypted, 'workspace'),
]) ])
const conflicts = Object.keys(personalEncrypted).filter((k) => k in workspaceEncrypted) const conflicts = Object.keys(personalEncrypted).filter((k) => k in workspaceEncrypted)
if (decryptionFailures.length > 0) {
logger.warn('Some environment variables failed to decrypt', {
userId,
workspaceId,
failedKeys: decryptionFailures,
failedCount: decryptionFailures.length,
})
}
return { return {
personalEncrypted, personalEncrypted,
workspaceEncrypted, workspaceEncrypted,
personalDecrypted, personalDecrypted,
workspaceDecrypted, workspaceDecrypted,
conflicts, conflicts,
decryptionFailures,
} }
} }
@@ -110,86 +127,3 @@ export async function getEffectiveDecryptedEnv(
) )
return { ...personalDecrypted, ...workspaceDecrypted } return { ...personalDecrypted, ...workspaceDecrypted }
} }
/**
* Ensure all environment variables can be decrypted.
*/
export async function ensureEnvVarsDecryptable(
variables: Record<string, string>,
options: { requestId?: string } = {}
): Promise<void> {
const requestId = options.requestId
for (const [key, encryptedValue] of Object.entries(variables)) {
try {
await decryptSecret(encryptedValue)
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error'
if (requestId) {
logger.error(`[${requestId}] Failed to decrypt environment variable "${key}"`, error)
} else {
logger.error(`Failed to decrypt environment variable "${key}"`, error)
}
throw new Error(`Failed to decrypt environment variable "${key}": ${message}`)
}
}
}
/**
* Ensure all {{ENV_VAR}} references in block subblocks resolve to decryptable values.
*/
export async function ensureBlockEnvVarsResolvable(
blocks: Record<string, BlockState>,
variables: Record<string, string>,
options: { requestId?: string } = {}
): Promise<void> {
const requestId = options.requestId
const envVarPattern = createEnvVarPattern()
await Promise.all(
Object.values(blocks).map(async (block) => {
const subBlocks = block.subBlocks ?? {}
await Promise.all(
Object.values(subBlocks).map(async (subBlock) => {
const value = subBlock.value
if (
typeof value !== 'string' ||
!value.includes(REFERENCE.ENV_VAR_START) ||
!value.includes(REFERENCE.ENV_VAR_END)
) {
return
}
const matches = value.match(envVarPattern)
if (!matches) {
return
}
for (const match of matches) {
const varName = match.slice(
REFERENCE.ENV_VAR_START.length,
-REFERENCE.ENV_VAR_END.length
)
const encryptedValue = variables[varName]
if (!encryptedValue) {
throw new Error(`Environment variable "${varName}" was not found`)
}
try {
await decryptSecret(encryptedValue)
} catch (error) {
const message = error instanceof Error ? error.message : 'Unknown error'
if (requestId) {
logger.error(
`[${requestId}] Error decrypting value for variable "${varName}"`,
error
)
} else {
logger.error(`Error decrypting value for variable "${varName}"`, error)
}
throw new Error(`Failed to decrypt environment variable "${varName}": ${message}`)
}
}
})
)
})
)
}

View File

@@ -6,7 +6,6 @@ import { checkServerSideUsageLimits } from '@/lib/billing/calculations/usage-mon
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription' import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter' import { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter'
import { LoggingSession } from '@/lib/logs/execution/logging-session' import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { preflightWorkflowEnvVars } from '@/lib/workflows/executor/preflight'
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils' import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
import type { CoreTriggerType } from '@/stores/logs/filters/types' import type { CoreTriggerType } from '@/stores/logs/filters/types'
@@ -118,15 +117,13 @@ export interface PreprocessExecutionOptions {
checkRateLimit?: boolean // Default: false for manual/chat, true for others checkRateLimit?: boolean // Default: false for manual/chat, true for others
checkDeployment?: boolean // Default: true for non-manual triggers checkDeployment?: boolean // Default: true for non-manual triggers
skipUsageLimits?: boolean // Default: false (only use for test mode) skipUsageLimits?: boolean // Default: false (only use for test mode)
preflightEnvVars?: boolean // Default: false
// Context information // Context information
workspaceId?: string // If known, used for billing resolution workspaceId?: string // If known, used for billing resolution
loggingSession?: LoggingSession // If provided, will be used for error logging loggingSession?: LoggingSession // If provided, will be used for error logging
isResumeContext?: boolean // If true, allows fallback billing on resolution failure (for paused workflow resumes) isResumeContext?: boolean // If true, allows fallback billing on resolution failure (for paused workflow resumes)
/** @deprecated No longer used - preflight always uses deployed state */ /** @deprecated No longer used - background/async executions always use deployed state */
useDraftState?: boolean useDraftState?: boolean
envUserId?: string // Optional override for env var resolution user
} }
/** /**
@@ -164,11 +161,9 @@ export async function preprocessExecution(
checkRateLimit = triggerType !== 'manual' && triggerType !== 'chat', checkRateLimit = triggerType !== 'manual' && triggerType !== 'chat',
checkDeployment = triggerType !== 'manual', checkDeployment = triggerType !== 'manual',
skipUsageLimits = false, skipUsageLimits = false,
preflightEnvVars = false,
workspaceId: providedWorkspaceId, workspaceId: providedWorkspaceId,
loggingSession: providedLoggingSession, loggingSession: providedLoggingSession,
isResumeContext = false, isResumeContext = false,
envUserId,
} = options } = options
logger.info(`[${requestId}] Starting execution preprocessing`, { logger.info(`[${requestId}] Starting execution preprocessing`, {
@@ -483,44 +478,6 @@ export async function preprocessExecution(
} }
// ========== SUCCESS: All Checks Passed ========== // ========== SUCCESS: All Checks Passed ==========
if (preflightEnvVars) {
try {
const resolvedEnvUserId = envUserId || workflowRecord.userId || userId
await preflightWorkflowEnvVars({
workflowId,
workspaceId,
envUserId: resolvedEnvUserId,
requestId,
})
} catch (error) {
const message = error instanceof Error ? error.message : 'Env var preflight failed'
logger.warn(`[${requestId}] Env var preflight failed`, {
workflowId,
message,
})
await logPreprocessingError({
workflowId,
executionId,
triggerType,
requestId,
userId: actorUserId,
workspaceId,
errorMessage: message,
loggingSession: providedLoggingSession,
})
return {
success: false,
error: {
message,
statusCode: 400,
logCreated: true,
},
}
}
}
logger.info(`[${requestId}] All preprocessing checks passed`, { logger.info(`[${requestId}] All preprocessing checks passed`, {
workflowId, workflowId,
actorUserId, actorUserId,

View File

@@ -0,0 +1,85 @@
/**
* Server-only MCP config resolution utilities.
* This file contains functions that require server-side dependencies (database access).
* Do NOT import this file in client components.
*/
import { createLogger } from '@sim/logger'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import type { McpServerConfig } from '@/lib/mcp/types'
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
const logger = createLogger('McpResolveConfig')
export interface ResolveMcpConfigOptions {
/** If true, throws an error when env vars are missing. Default: true */
strict?: boolean
}
/**
* Resolve environment variables in MCP server config (url, headers).
* Shared utility used by both MCP service and test-connection endpoint.
*
* @param config - MCP server config with potential {{ENV_VAR}} patterns
* @param userId - User ID to fetch environment variables for
* @param workspaceId - Workspace ID for workspace-specific env vars
* @param options - Resolution options (strict mode throws on missing vars)
* @returns Resolved config with env vars replaced
*/
export async function resolveMcpConfigEnvVars(
config: McpServerConfig,
userId: string,
workspaceId?: string,
options: ResolveMcpConfigOptions = {}
): Promise<{ config: McpServerConfig; missingVars: string[] }> {
const { strict = true } = options
const allMissingVars: string[] = []
let envVars: Record<string, string> = {}
try {
envVars = await getEffectiveDecryptedEnv(userId, workspaceId)
} catch (error) {
logger.error('Failed to fetch environment variables for MCP config:', error)
return { config, missingVars: [] }
}
const resolveValue = (value: string): string => {
const missingVars: string[] = []
const resolved = resolveEnvVarReferences(value, envVars, {
missingKeys: missingVars,
}) as string
allMissingVars.push(...missingVars)
return resolved
}
const resolvedConfig = { ...config }
if (resolvedConfig.url) {
resolvedConfig.url = resolveValue(resolvedConfig.url)
}
if (resolvedConfig.headers) {
const resolvedHeaders: Record<string, string> = {}
for (const [key, value] of Object.entries(resolvedConfig.headers)) {
resolvedHeaders[key] = resolveValue(value)
}
resolvedConfig.headers = resolvedHeaders
}
// Handle missing vars based on strict mode
if (allMissingVars.length > 0) {
const uniqueMissing = Array.from(new Set(allMissingVars))
if (strict) {
throw new Error(
`Missing required environment variable${uniqueMissing.length > 1 ? 's' : ''}: ${uniqueMissing.join(', ')}. ` +
`Please set ${uniqueMissing.length > 1 ? 'these variables' : 'this variable'} in your workspace or personal environment settings.`
)
}
uniqueMissing.forEach((envKey) => {
logger.warn(`Environment variable "${envKey}" not found in MCP config`)
})
}
return { config: resolvedConfig, missingVars: allMissingVars }
}

View File

@@ -8,8 +8,8 @@ import { createLogger } from '@sim/logger'
import { and, eq, isNull } from 'drizzle-orm' import { and, eq, isNull } from 'drizzle-orm'
import { isTest } from '@/lib/core/config/feature-flags' import { isTest } from '@/lib/core/config/feature-flags'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { McpClient } from '@/lib/mcp/client' import { McpClient } from '@/lib/mcp/client'
import { resolveMcpConfigEnvVars } from '@/lib/mcp/resolve-config'
import { import {
createMcpCacheAdapter, createMcpCacheAdapter,
getMcpCacheType, getMcpCacheType,
@@ -25,7 +25,6 @@ import type {
McpTransport, McpTransport,
} from '@/lib/mcp/types' } from '@/lib/mcp/types'
import { MCP_CONSTANTS } from '@/lib/mcp/utils' import { MCP_CONSTANTS } from '@/lib/mcp/utils'
import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
const logger = createLogger('McpService') const logger = createLogger('McpService')
@@ -47,60 +46,18 @@ class McpService {
} }
/** /**
* Resolve environment variables in strings * Resolve environment variables in server config.
*/ * Uses shared utility with strict mode (throws on missing vars).
private resolveEnvVars(value: string, envVars: Record<string, string>): string {
const missingVars: string[] = []
const resolvedValue = resolveEnvVarReferences(value, envVars, {
allowEmbedded: true,
resolveExactMatch: true,
trimKeys: true,
onMissing: 'keep',
deep: false,
missingKeys: missingVars,
}) as string
if (missingVars.length > 0) {
const uniqueMissing = Array.from(new Set(missingVars))
throw new Error(
`Missing required environment variable${uniqueMissing.length > 1 ? 's' : ''}: ${uniqueMissing.join(', ')}. ` +
`Please set ${uniqueMissing.length > 1 ? 'these variables' : 'this variable'} in your workspace or personal environment settings.`
)
}
return resolvedValue
}
/**
* Resolve environment variables in server config
*/ */
private async resolveConfigEnvVars( private async resolveConfigEnvVars(
config: McpServerConfig, config: McpServerConfig,
userId: string, userId: string,
workspaceId?: string workspaceId?: string
): Promise<McpServerConfig> { ): Promise<McpServerConfig> {
try { const { config: resolvedConfig } = await resolveMcpConfigEnvVars(config, userId, workspaceId, {
const envVars = await getEffectiveDecryptedEnv(userId, workspaceId) strict: true,
})
const resolvedConfig = { ...config } return resolvedConfig
if (resolvedConfig.url) {
resolvedConfig.url = this.resolveEnvVars(resolvedConfig.url, envVars)
}
if (resolvedConfig.headers) {
const resolvedHeaders: Record<string, string> = {}
for (const [key, value] of Object.entries(resolvedConfig.headers)) {
resolvedHeaders[key] = this.resolveEnvVars(value, envVars)
}
resolvedConfig.headers = resolvedHeaders
}
return resolvedConfig
} catch (error) {
logger.error('Failed to resolve environment variables for MCP server config:', error)
return config
}
} }
/** /**

View File

@@ -0,0 +1,48 @@
/**
* Shared MCP utilities - safe for both client and server.
* No server-side dependencies (database, fs, etc.) should be imported here.
*/
import { isMcpTool, MCP } from '@/executor/constants'
/**
* Sanitizes a string by removing invisible Unicode characters that cause HTTP header errors.
* Handles characters like U+2028 (Line Separator) that can be introduced via copy-paste.
*/
export function sanitizeForHttp(value: string): string {
return value
.replace(/[\u2028\u2029\u200B-\u200D\uFEFF]/g, '')
.replace(/[\x00-\x1F\x7F]/g, '')
.trim()
}
/**
* Sanitizes all header key-value pairs for HTTP usage.
*/
export function sanitizeHeaders(
headers: Record<string, string> | undefined
): Record<string, string> | undefined {
if (!headers) return headers
return Object.fromEntries(
Object.entries(headers)
.map(([key, value]) => [sanitizeForHttp(key), sanitizeForHttp(value)])
.filter(([key, value]) => key !== '' && value !== '')
)
}
/**
* Client-safe MCP constants
*/
export const MCP_CLIENT_CONSTANTS = {
CLIENT_TIMEOUT: 60000,
MAX_RETRIES: 3,
RECONNECT_DELAY: 1000,
} as const
/**
* Create standardized MCP tool ID from server ID and tool name
*/
export function createMcpToolId(serverId: string, toolName: string): string {
const normalizedServerId = isMcpTool(serverId) ? serverId : `${MCP.TOOL_PREFIX}${serverId}`
return `${normalizedServerId}-${toolName}`
}

View File

@@ -32,6 +32,12 @@ interface TriggerSaveError {
interface TriggerSaveResult { interface TriggerSaveResult {
success: boolean success: boolean
error?: TriggerSaveError error?: TriggerSaveError
warnings?: string[]
}
interface CredentialSetSyncResult {
error: TriggerSaveError | null
warnings: string[]
} }
interface SaveTriggerWebhooksInput { interface SaveTriggerWebhooksInput {
@@ -42,6 +48,16 @@ interface SaveTriggerWebhooksInput {
blocks: Record<string, BlockState> blocks: Record<string, BlockState>
requestId: string requestId: string
deploymentVersionId?: string deploymentVersionId?: string
/**
* The previous active version's ID. Only this version's external subscriptions
* will be cleaned up (along with draft webhooks). If not provided, skips cleanup.
*/
previousVersionId?: string
/**
* When true, forces recreation of external subscriptions even if webhook config is unchanged.
* Used when activating a previous deployment version whose subscriptions were cleaned up.
*/
forceRecreateSubscriptions?: boolean
} }
function getSubBlockValue(block: BlockState, subBlockId: string): unknown { function getSubBlockValue(block: BlockState, subBlockId: string): unknown {
@@ -248,7 +264,7 @@ async function syncCredentialSetWebhooks(params: {
providerConfig: Record<string, unknown> providerConfig: Record<string, unknown>
requestId: string requestId: string
deploymentVersionId?: string deploymentVersionId?: string
}): Promise<TriggerSaveError | null> { }): Promise<CredentialSetSyncResult> {
const { const {
workflowId, workflowId,
blockId, blockId,
@@ -261,7 +277,7 @@ async function syncCredentialSetWebhooks(params: {
const credentialSetId = providerConfig.credentialSetId as string | undefined const credentialSetId = providerConfig.credentialSetId as string | undefined
if (!credentialSetId) { if (!credentialSetId) {
return null return { error: null, warnings: [] }
} }
const oauthProviderId = getProviderIdFromServiceId(provider) const oauthProviderId = getProviderIdFromServiceId(provider)
@@ -280,10 +296,23 @@ async function syncCredentialSetWebhooks(params: {
deploymentVersionId, deploymentVersionId,
}) })
const warnings: string[] = []
if (syncResult.failed.length > 0) {
const failedCount = syncResult.failed.length
const totalCount = syncResult.webhooks.length + failedCount
warnings.push(
`${failedCount} of ${totalCount} credentials in the set failed to sync for ${provider}. Some team members may not receive triggers.`
)
}
if (syncResult.webhooks.length === 0) { if (syncResult.webhooks.length === 0) {
return { return {
message: `No valid credentials found in credential set for ${provider}. Please connect accounts and try again.`, error: {
status: 400, message: `No valid credentials found in credential set for ${provider}. Please connect accounts and try again.`,
status: 400,
},
warnings,
} }
} }
@@ -297,8 +326,11 @@ async function syncCredentialSetWebhooks(params: {
if (!success) { if (!success) {
await db.delete(webhook).where(eq(webhook.id, wh.id)) await db.delete(webhook).where(eq(webhook.id, wh.id))
return { return {
message: `Failed to configure ${provider} polling. Please check account permissions.`, error: {
status: 500, message: `Failed to configure ${provider} polling. Please check account permissions.`,
status: 500,
},
warnings,
} }
} }
} }
@@ -306,84 +338,7 @@ async function syncCredentialSetWebhooks(params: {
} }
} }
return null return { error: null, warnings }
}
async function createWebhookForBlock(params: {
request: NextRequest
workflowId: string
workflow: Record<string, unknown>
userId: string
block: BlockState
provider: string
providerConfig: Record<string, unknown>
triggerPath: string
requestId: string
deploymentVersionId?: string
}): Promise<TriggerSaveError | null> {
const {
request,
workflowId,
workflow,
userId,
block,
provider,
providerConfig,
triggerPath,
requestId,
deploymentVersionId,
} = params
const webhookId = nanoid()
const createPayload = {
id: webhookId,
path: triggerPath,
provider,
providerConfig,
}
const result = await createExternalWebhookSubscription(
request,
createPayload,
workflow,
userId,
requestId
)
const updatedProviderConfig = result.updatedProviderConfig as Record<string, unknown>
let savedWebhook: any
try {
const createdRows = await db
.insert(webhook)
.values({
id: webhookId,
workflowId,
deploymentVersionId: deploymentVersionId || null,
blockId: block.id,
path: triggerPath,
provider,
providerConfig: updatedProviderConfig,
credentialSetId: (updatedProviderConfig.credentialSetId as string | undefined) || null,
isActive: true,
createdAt: new Date(),
updatedAt: new Date(),
})
.returning()
savedWebhook = createdRows[0]
} catch (error) {
if (result.externalSubscriptionCreated) {
await cleanupExternalWebhook(createPayload, workflow, requestId)
}
throw error
}
const pollingError = await configurePollingIfNeeded(provider, savedWebhook, requestId)
if (pollingError) {
return pollingError
}
return null
} }
/** /**
@@ -398,23 +353,57 @@ export async function saveTriggerWebhooksForDeploy({
blocks, blocks,
requestId, requestId,
deploymentVersionId, deploymentVersionId,
previousVersionId,
forceRecreateSubscriptions = false,
}: SaveTriggerWebhooksInput): Promise<TriggerSaveResult> { }: SaveTriggerWebhooksInput): Promise<TriggerSaveResult> {
const triggerBlocks = Object.values(blocks || {}).filter(Boolean) const triggerBlocks = Object.values(blocks || {}).filter((b) => b && b.enabled !== false)
const currentBlockIds = new Set(triggerBlocks.map((b) => b.id)) const currentBlockIds = new Set(triggerBlocks.map((b) => b.id))
// 1. Get all existing webhooks for this workflow // 1. Get ALL webhooks for this workflow (all versions including draft)
const existingWebhooks = await db const allWorkflowWebhooks = await db
.select() .select()
.from(webhook) .from(webhook)
.where( .where(eq(webhook.workflowId, workflowId))
deploymentVersionId
? and( // Separate webhooks by version: current deployment vs others
eq(webhook.workflowId, workflowId), const existingWebhooks: typeof allWorkflowWebhooks = []
eq(webhook.deploymentVersionId, deploymentVersionId)
) for (const wh of allWorkflowWebhooks) {
: eq(webhook.workflowId, workflowId) if (deploymentVersionId && wh.deploymentVersionId === deploymentVersionId) {
existingWebhooks.push(wh)
}
}
if (previousVersionId) {
const webhooksToCleanup = allWorkflowWebhooks.filter(
(wh) => wh.deploymentVersionId === previousVersionId
) )
if (webhooksToCleanup.length > 0) {
logger.info(
`[${requestId}] Cleaning up ${webhooksToCleanup.length} external subscription(s) from previous version`
)
for (const wh of webhooksToCleanup) {
try {
await cleanupExternalWebhook(wh, workflow, requestId)
} catch (cleanupError) {
logger.warn(`[${requestId}] Failed to cleanup external webhook ${wh.id}`, cleanupError)
}
}
}
}
const restorePreviousSubscriptions = async () => {
if (!previousVersionId) return
await restorePreviousVersionWebhooks({
request,
workflow,
userId,
previousVersionId,
requestId,
})
}
const webhooksByBlockId = new Map<string, typeof existingWebhooks>() const webhooksByBlockId = new Map<string, typeof existingWebhooks>()
for (const wh of existingWebhooks) { for (const wh of existingWebhooks) {
if (!wh.blockId) continue if (!wh.blockId) continue
@@ -429,7 +418,14 @@ export async function saveTriggerWebhooksForDeploy({
existingWebhookBlockIds: Array.from(webhooksByBlockId.keys()), existingWebhookBlockIds: Array.from(webhooksByBlockId.keys()),
}) })
// 2. Determine which webhooks to delete (orphaned or config changed) type WebhookConfig = {
provider: string
providerConfig: Record<string, unknown>
triggerPath: string
triggerDef: ReturnType<typeof getTrigger>
}
const webhookConfigs = new Map<string, WebhookConfig>()
const webhooksToDelete: typeof existingWebhooks = [] const webhooksToDelete: typeof existingWebhooks = []
const blocksNeedingWebhook: BlockState[] = [] const blocksNeedingWebhook: BlockState[] = []
const blocksNeedingCredentialSetSync: BlockState[] = [] const blocksNeedingCredentialSetSync: BlockState[] = []
@@ -447,6 +443,7 @@ export async function saveTriggerWebhooksForDeploy({
) )
if (missingFields.length > 0) { if (missingFields.length > 0) {
await restorePreviousSubscriptions()
return { return {
success: false, success: false,
error: { error: {
@@ -455,9 +452,8 @@ export async function saveTriggerWebhooksForDeploy({
}, },
} }
} }
// Store config for later use
;(block as any)._webhookConfig = { provider, providerConfig, triggerPath, triggerDef } webhookConfigs.set(block.id, { provider, providerConfig, triggerPath, triggerDef })
if (providerConfig.credentialSetId) { if (providerConfig.credentialSetId) {
blocksNeedingCredentialSetSync.push(block) blocksNeedingCredentialSetSync.push(block)
@@ -477,22 +473,29 @@ export async function saveTriggerWebhooksForDeploy({
) )
} }
// Check if config changed // Check if config changed or if we're forcing recreation (e.g., activating old version)
const existingConfig = (existingWh.providerConfig as Record<string, unknown>) || {} const existingConfig = (existingWh.providerConfig as Record<string, unknown>) || {}
if ( const needsRecreation =
forceRecreateSubscriptions ||
shouldRecreateExternalWebhookSubscription({ shouldRecreateExternalWebhookSubscription({
previousProvider: existingWh.provider as string, previousProvider: existingWh.provider as string,
nextProvider: provider, nextProvider: provider,
previousConfig: existingConfig, previousConfig: existingConfig,
nextConfig: providerConfig, nextConfig: providerConfig,
}) })
) {
// Config changed - delete and recreate if (needsRecreation) {
webhooksToDelete.push(existingWh) webhooksToDelete.push(existingWh)
blocksNeedingWebhook.push(block) blocksNeedingWebhook.push(block)
logger.info(`[${requestId}] Webhook config changed for block ${block.id}, will recreate`) if (forceRecreateSubscriptions) {
logger.info(
`[${requestId}] Forcing webhook recreation for block ${block.id} (reactivating version)`
)
} else {
logger.info(`[${requestId}] Webhook config changed for block ${block.id}, will recreate`)
}
} }
// else: config unchanged, keep existing webhook // else: config unchanged and not forcing recreation, keep existing webhook
} }
} }
@@ -522,15 +525,16 @@ export async function saveTriggerWebhooksForDeploy({
await db.delete(webhook).where(inArray(webhook.id, idsToDelete)) await db.delete(webhook).where(inArray(webhook.id, idsToDelete))
} }
// 4. Sync credential set webhooks const collectedWarnings: string[] = []
for (const block of blocksNeedingCredentialSetSync) { for (const block of blocksNeedingCredentialSetSync) {
const config = (block as any)._webhookConfig const config = webhookConfigs.get(block.id)
if (!config) continue if (!config) continue
const { provider, providerConfig, triggerPath } = config const { provider, providerConfig, triggerPath } = config
try { try {
const credentialSetError = await syncCredentialSetWebhooks({ const syncResult = await syncCredentialSetWebhooks({
workflowId, workflowId,
blockId: block.id, blockId: block.id,
provider, provider,
@@ -540,74 +544,214 @@ export async function saveTriggerWebhooksForDeploy({
deploymentVersionId, deploymentVersionId,
}) })
if (credentialSetError) { if (syncResult.warnings.length > 0) {
return { success: false, error: credentialSetError } collectedWarnings.push(...syncResult.warnings)
}
if (syncResult.error) {
await restorePreviousSubscriptions()
return { success: false, error: syncResult.error, warnings: collectedWarnings }
} }
} catch (error: any) { } catch (error: any) {
logger.error(`[${requestId}] Failed to create webhook for ${block.id}`, error) logger.error(`[${requestId}] Failed to create webhook for ${block.id}`, error)
await restorePreviousSubscriptions()
return { return {
success: false, success: false,
error: { error: {
message: error?.message || 'Failed to save trigger configuration', message: error?.message || 'Failed to save trigger configuration',
status: 500, status: 500,
}, },
warnings: collectedWarnings,
} }
} }
} }
// 5. Create webhooks for blocks that need them // 5. Create webhooks for blocks that need them (two-phase approach for atomicity)
const createdSubscriptions: Array<{
webhookId: string
block: BlockState
provider: string
triggerPath: string
updatedProviderConfig: Record<string, unknown>
externalSubscriptionCreated: boolean
}> = []
for (const block of blocksNeedingWebhook) { for (const block of blocksNeedingWebhook) {
const config = (block as any)._webhookConfig const config = webhookConfigs.get(block.id)
if (!config) continue if (!config) continue
const { provider, providerConfig, triggerPath } = config const { provider, providerConfig, triggerPath } = config
const webhookId = nanoid()
const createPayload = {
id: webhookId,
path: triggerPath,
provider,
providerConfig,
}
try { try {
const createError = await createWebhookForBlock({ const result = await createExternalWebhookSubscription(
request, request,
workflowId, createPayload,
workflow, workflow,
userId, userId,
requestId
)
createdSubscriptions.push({
webhookId,
block, block,
provider, provider,
providerConfig,
triggerPath, triggerPath,
requestId, updatedProviderConfig: result.updatedProviderConfig as Record<string, unknown>,
deploymentVersionId, externalSubscriptionCreated: result.externalSubscriptionCreated,
}) })
if (createError) {
return { success: false, error: createError }
}
} catch (error: any) { } catch (error: any) {
logger.error(`[${requestId}] Failed to create webhook for ${block.id}`, error) logger.error(`[${requestId}] Failed to create external subscription for ${block.id}`, error)
for (const sub of createdSubscriptions) {
if (sub.externalSubscriptionCreated) {
try {
await cleanupExternalWebhook(
{
id: sub.webhookId,
path: sub.triggerPath,
provider: sub.provider,
providerConfig: sub.updatedProviderConfig,
},
workflow,
requestId
)
} catch (cleanupError) {
logger.warn(
`[${requestId}] Failed to cleanup external subscription for ${sub.block.id}`,
cleanupError
)
}
}
}
await restorePreviousSubscriptions()
return { return {
success: false, success: false,
error: { error: {
message: error?.message || 'Failed to save trigger configuration', message: error?.message || 'Failed to create external subscription',
status: 500, status: 500,
}, },
} }
} }
} }
// Clean up temp config // Phase 2: Insert all DB records in a transaction
for (const block of triggerBlocks) { try {
;(block as any)._webhookConfig = undefined await db.transaction(async (tx) => {
for (const sub of createdSubscriptions) {
await tx.insert(webhook).values({
id: sub.webhookId,
workflowId,
deploymentVersionId: deploymentVersionId || null,
blockId: sub.block.id,
path: sub.triggerPath,
provider: sub.provider,
providerConfig: sub.updatedProviderConfig,
credentialSetId:
(sub.updatedProviderConfig.credentialSetId as string | undefined) || null,
isActive: true,
createdAt: new Date(),
updatedAt: new Date(),
})
}
})
for (const sub of createdSubscriptions) {
const pollingError = await configurePollingIfNeeded(
sub.provider,
{ id: sub.webhookId, path: sub.triggerPath, providerConfig: sub.updatedProviderConfig },
requestId
)
if (pollingError) {
logger.error(
`[${requestId}] Polling configuration failed for ${sub.block.id}`,
pollingError
)
for (const otherSub of createdSubscriptions) {
if (otherSub.webhookId === sub.webhookId) continue
if (otherSub.externalSubscriptionCreated) {
try {
await cleanupExternalWebhook(
{
id: otherSub.webhookId,
path: otherSub.triggerPath,
provider: otherSub.provider,
providerConfig: otherSub.updatedProviderConfig,
},
workflow,
requestId
)
} catch (cleanupError) {
logger.warn(
`[${requestId}] Failed to cleanup external subscription for ${otherSub.block.id}`,
cleanupError
)
}
}
}
const otherWebhookIds = createdSubscriptions
.filter((s) => s.webhookId !== sub.webhookId)
.map((s) => s.webhookId)
if (otherWebhookIds.length > 0) {
await db.delete(webhook).where(inArray(webhook.id, otherWebhookIds))
}
await restorePreviousSubscriptions()
return { success: false, error: pollingError }
}
}
} catch (error: any) {
logger.error(`[${requestId}] Failed to insert webhook records`, error)
for (const sub of createdSubscriptions) {
if (sub.externalSubscriptionCreated) {
try {
await cleanupExternalWebhook(
{
id: sub.webhookId,
path: sub.triggerPath,
provider: sub.provider,
providerConfig: sub.updatedProviderConfig,
},
workflow,
requestId
)
} catch (cleanupError) {
logger.warn(
`[${requestId}] Failed to cleanup external subscription for ${sub.block.id}`,
cleanupError
)
}
}
}
await restorePreviousSubscriptions()
return {
success: false,
error: {
message: error?.message || 'Failed to save webhook records',
status: 500,
},
}
} }
return { success: true } return { success: true, warnings: collectedWarnings.length > 0 ? collectedWarnings : undefined }
} }
/** /**
* Clean up all webhooks for a workflow during undeploy. * Clean up all webhooks for a workflow during undeploy.
* Removes external subscriptions and deletes webhook records from the database. * Removes external subscriptions and deletes webhook records from the database.
*
* @param skipExternalCleanup - If true, skip external subscription cleanup (already done elsewhere)
*/ */
export async function cleanupWebhooksForWorkflow( export async function cleanupWebhooksForWorkflow(
workflowId: string, workflowId: string,
workflow: Record<string, unknown>, workflow: Record<string, unknown>,
requestId: string, requestId: string,
deploymentVersionId?: string deploymentVersionId?: string,
skipExternalCleanup = false
): Promise<void> { ): Promise<void> {
const existingWebhooks = await db const existingWebhooks = await db
.select() .select()
@@ -626,23 +770,26 @@ export async function cleanupWebhooksForWorkflow(
return return
} }
logger.info(`[${requestId}] Cleaning up ${existingWebhooks.length} webhook(s) for undeploy`, { logger.info(
workflowId, `[${requestId}] Cleaning up ${existingWebhooks.length} webhook(s) for ${skipExternalCleanup ? 'DB records only' : 'undeploy'}`,
deploymentVersionId, {
webhookIds: existingWebhooks.map((wh) => wh.id), workflowId,
}) deploymentVersionId,
webhookIds: existingWebhooks.map((wh) => wh.id),
}
)
// Clean up external subscriptions if (!skipExternalCleanup) {
for (const wh of existingWebhooks) { for (const wh of existingWebhooks) {
try { try {
await cleanupExternalWebhook(wh, workflow, requestId) await cleanupExternalWebhook(wh, workflow, requestId)
} catch (cleanupError) { } catch (cleanupError) {
logger.warn(`[${requestId}] Failed to cleanup external webhook ${wh.id}`, cleanupError) logger.warn(`[${requestId}] Failed to cleanup external webhook ${wh.id}`, cleanupError)
// Continue with other webhooks even if one fails // Continue with other webhooks even if one fails
}
} }
} }
// Delete all webhook records
await db await db
.delete(webhook) .delete(webhook)
.where( .where(
@@ -660,3 +807,55 @@ export async function cleanupWebhooksForWorkflow(
: `[${requestId}] Cleaned up all webhooks for workflow ${workflowId}` : `[${requestId}] Cleaned up all webhooks for workflow ${workflowId}`
) )
} }
/**
* Restore external subscriptions for a previous deployment version.
* Used when activation/deployment fails after webhooks were created,
* to restore the previous version's external subscriptions.
*/
export async function restorePreviousVersionWebhooks(params: {
request: NextRequest
workflow: Record<string, unknown>
userId: string
previousVersionId: string
requestId: string
}): Promise<void> {
const { request, workflow, userId, previousVersionId, requestId } = params
const previousWebhooks = await db
.select()
.from(webhook)
.where(eq(webhook.deploymentVersionId, previousVersionId))
if (previousWebhooks.length === 0) {
logger.debug(`[${requestId}] No previous webhooks to restore for version ${previousVersionId}`)
return
}
logger.info(
`[${requestId}] Restoring ${previousWebhooks.length} external subscription(s) for previous version ${previousVersionId}`
)
for (const wh of previousWebhooks) {
try {
await createExternalWebhookSubscription(
request,
{
id: wh.id,
path: wh.path,
provider: wh.provider,
providerConfig: (wh.providerConfig as Record<string, unknown>) || {},
},
workflow,
userId,
requestId
)
logger.info(`[${requestId}] Restored external subscription for webhook ${wh.id}`)
} catch (restoreError) {
logger.error(
`[${requestId}] Failed to restore external subscription for webhook ${wh.id}`,
restoreError
)
}
}
}

View File

@@ -1,72 +1,22 @@
import { createLogger } from '@sim/logger'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils' import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { extractEnvVarName, isEnvVarReference } from '@/executor/constants' import { resolveEnvVarReferences } from '@/executor/utils/reference-validation'
const logger = createLogger('EnvResolver')
/** /**
* Resolves environment variable references in a string value * Recursively resolves all environment variable references in a configuration object.
* Uses the same helper functions as the executor's EnvResolver * Supports both exact matches (`{{VAR_NAME}}`) and embedded patterns (`https://{{HOST}}/path`).
* *
* @param value - The string that may contain env var references * Uses `deep: true` because webhook configs have nested structures that need full resolution.
* @param envVars - Object containing environment variable key-value pairs
* @returns The resolved string with env vars replaced
*/
function resolveEnvVarInString(value: string, envVars: Record<string, string>): string {
if (!isEnvVarReference(value)) {
return value
}
const varName = extractEnvVarName(value)
const resolvedValue = envVars[varName]
if (resolvedValue === undefined) {
logger.warn(`Environment variable not found: ${varName}`)
return value // Return original if not found
}
logger.debug(`Resolved environment variable: ${varName}`)
return resolvedValue
}
/**
* Recursively resolves all environment variable references in a configuration object
* Supports the pattern: {{VAR_NAME}}
* *
* @param config - Configuration object that may contain env var references * @param config - Configuration object that may contain env var references
* @param userId - User ID to fetch environment variables for * @param userId - User ID to fetch environment variables for
* @param workspaceId - Optional workspace ID for workspace-specific env vars * @param workspaceId - Optional workspace ID for workspace-specific env vars
* @returns A new object with all env var references resolved * @returns A new object with all env var references resolved
*/ */
export async function resolveEnvVarsInObject( export async function resolveEnvVarsInObject<T extends Record<string, unknown>>(
config: Record<string, any>, config: T,
userId: string, userId: string,
workspaceId?: string workspaceId?: string
): Promise<Record<string, any>> { ): Promise<T> {
const envVars = await getEffectiveDecryptedEnv(userId, workspaceId) const envVars = await getEffectiveDecryptedEnv(userId, workspaceId)
return resolveEnvVarReferences(config, envVars, { deep: true }) as T
const resolved = { ...config }
function resolveValue(value: any): any {
if (typeof value === 'string') {
return resolveEnvVarInString(value, envVars)
}
if (Array.isArray(value)) {
return value.map(resolveValue)
}
if (value !== null && typeof value === 'object') {
const resolvedObj: Record<string, any> = {}
for (const [key, val] of Object.entries(value)) {
resolvedObj[key] = resolveValue(val)
}
return resolvedObj
}
return value
}
for (const [key, value] of Object.entries(resolved)) {
resolved[key] = resolveValue(value)
}
return resolved
} }

View File

@@ -161,7 +161,7 @@ export async function pollGmailWebhooks() {
const metadata = webhookData.providerConfig as any const metadata = webhookData.providerConfig as any
const credentialId: string | undefined = metadata?.credentialId const credentialId: string | undefined = metadata?.credentialId
const userId: string | undefined = metadata?.userId const userId: string | undefined = metadata?.userId
const credentialSetId: string | undefined = metadata?.credentialSetId const credentialSetId: string | undefined = webhookData.credentialSetId ?? undefined
if (!credentialId && !userId) { if (!credentialId && !userId) {
logger.error(`[${requestId}] Missing credential info for webhook ${webhookId}`) logger.error(`[${requestId}] Missing credential info for webhook ${webhookId}`)
@@ -697,7 +697,6 @@ async function processEmails(
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'X-Webhook-Secret': webhookData.secret || '',
'User-Agent': 'Sim/1.0', 'User-Agent': 'Sim/1.0',
}, },
body: JSON.stringify(payload), body: JSON.stringify(payload),
@@ -766,17 +765,21 @@ async function markEmailAsRead(accessToken: string, messageId: string) {
} }
async function updateWebhookLastChecked(webhookId: string, timestamp: string, historyId?: string) { async function updateWebhookLastChecked(webhookId: string, timestamp: string, historyId?: string) {
const result = await db.select().from(webhook).where(eq(webhook.id, webhookId)) try {
const existingConfig = (result[0]?.providerConfig as Record<string, any>) || {} const result = await db.select().from(webhook).where(eq(webhook.id, webhookId))
await db const existingConfig = (result[0]?.providerConfig as Record<string, any>) || {}
.update(webhook) await db
.set({ .update(webhook)
providerConfig: { .set({
...existingConfig, providerConfig: {
lastCheckedTimestamp: timestamp, ...existingConfig,
...(historyId ? { historyId } : {}), lastCheckedTimestamp: timestamp,
} as any, ...(historyId ? { historyId } : {}),
updatedAt: new Date(), } as any,
}) updatedAt: new Date(),
.where(eq(webhook.id, webhookId)) })
.where(eq(webhook.id, webhookId))
} catch (error) {
logger.error(`Error updating webhook ${webhookId} last checked timestamp:`, error)
}
} }

Some files were not shown because too many files have changed in this diff Show More