Compare commits

..

6 Commits

Author SHA1 Message Date
priyanshu.solanki
d615c78a30 undo-redo-functionality for code blocks 2025-12-19 13:34:05 -07:00
priyanshu.solanki
b7f25786ce fixed the refrencing errors and making sure it propogates to the console 2025-12-19 11:57:44 -07:00
priyanshu.solanki
3a9e5f3b78 extracted utility functions 2025-12-19 11:13:41 -07:00
priyanshu.solanki
39444fa1a8 fixed for empty loop and paralle blocks and showing input on dashboard 2025-12-18 22:16:55 -07:00
Siddharth Ganesan
45ca926e6d Fix array check for collection 2025-12-18 20:18:19 -08:00
priyanshu.solanki
77ee01747d fixed logs for parallel and loop execution flow 2025-12-18 21:01:13 -07:00
93 changed files with 1786 additions and 17645 deletions

View File

@@ -6,10 +6,7 @@ import { source } from '@/lib/source'
export const revalidate = false
export async function GET(
_request: NextRequest,
{ params }: { params: Promise<{ slug?: string[] }> }
) {
export async function GET(_req: NextRequest, { params }: { params: Promise<{ slug?: string[] }> }) {
const { slug } = await params
let lang: (typeof i18n.languages)[number] = i18n.defaultLanguage

View File

@@ -120,117 +120,117 @@ import {
type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
export const blockTypeToIconMap: Record<string, IconComponent> = {
ahrefs: AhrefsIcon,
airtable: AirtableIcon,
apify: ApifyIcon,
apollo: ApolloIcon,
arxiv: ArxivIcon,
asana: AsanaIcon,
browser_use: BrowserUseIcon,
calendly: CalendlyIcon,
clay: ClayIcon,
confluence: ConfluenceIcon,
cursor: CursorIcon,
datadog: DatadogIcon,
discord: DiscordIcon,
dropbox: DropboxIcon,
duckduckgo: DuckDuckGoIcon,
dynamodb: DynamoDBIcon,
elasticsearch: ElasticsearchIcon,
elevenlabs: ElevenLabsIcon,
exa: ExaAIIcon,
file: DocumentIcon,
firecrawl: FirecrawlIcon,
github: GithubIcon,
gitlab: GitLabIcon,
gmail: GmailIcon,
google_calendar: GoogleCalendarIcon,
google_docs: GoogleDocsIcon,
google_drive: GoogleDriveIcon,
google_forms: GoogleFormsIcon,
google_groups: GoogleGroupsIcon,
google_search: GoogleIcon,
google_sheets: GoogleSheetsIcon,
google_slides: GoogleSlidesIcon,
google_vault: GoogleVaultIcon,
grafana: GrafanaIcon,
hubspot: HubspotIcon,
huggingface: HuggingFaceIcon,
hunter: HunterIOIcon,
image_generator: ImageIcon,
incidentio: IncidentioIcon,
intercom: IntercomIcon,
jina: JinaAIIcon,
jira: JiraIcon,
kalshi: KalshiIcon,
knowledge: PackageSearchIcon,
linear: LinearIcon,
linkedin: LinkedInIcon,
linkup: LinkupIcon,
mailchimp: MailchimpIcon,
mailgun: MailgunIcon,
mem0: Mem0Icon,
memory: BrainIcon,
microsoft_excel: MicrosoftExcelIcon,
microsoft_planner: MicrosoftPlannerIcon,
microsoft_teams: MicrosoftTeamsIcon,
mistral_parse: MistralIcon,
mongodb: MongoDBIcon,
mysql: MySQLIcon,
neo4j: Neo4jIcon,
notion: NotionIcon,
onedrive: MicrosoftOneDriveIcon,
openai: OpenAIIcon,
outlook: OutlookIcon,
parallel_ai: ParallelIcon,
perplexity: PerplexityIcon,
pinecone: PineconeIcon,
pipedrive: PipedriveIcon,
polymarket: PolymarketIcon,
postgresql: PostgresIcon,
posthog: PosthogIcon,
qdrant: QdrantIcon,
rds: RDSIcon,
reddit: RedditIcon,
resend: ResendIcon,
s3: S3Icon,
salesforce: SalesforceIcon,
search: SearchIcon,
sendgrid: SendgridIcon,
sentry: SentryIcon,
serper: SerperIcon,
servicenow: ServiceNowIcon,
sftp: SftpIcon,
sharepoint: MicrosoftSharepointIcon,
shopify: ShopifyIcon,
slack: SlackIcon,
smtp: SmtpIcon,
spotify: SpotifyIcon,
sqs: SQSIcon,
ssh: SshIcon,
stagehand: StagehandIcon,
stripe: StripeIcon,
stt: STTIcon,
supabase: SupabaseIcon,
tavily: TavilyIcon,
telegram: TelegramIcon,
thinking: BrainIcon,
translate: TranslateIcon,
trello: TrelloIcon,
tts: TTSIcon,
twilio_sms: TwilioIcon,
twilio_voice: TwilioIcon,
typeform: TypeformIcon,
video_generator: VideoIcon,
vision: EyeIcon,
wealthbox: WealthboxIcon,
webflow: WebflowIcon,
whatsapp: WhatsAppIcon,
wikipedia: WikipediaIcon,
wordpress: WordpressIcon,
x: xIcon,
youtube: YouTubeIcon,
zendesk: ZendeskIcon,
zep: ZepIcon,
zoom: ZoomIcon,
zep: ZepIcon,
zendesk: ZendeskIcon,
youtube: YouTubeIcon,
x: xIcon,
wordpress: WordpressIcon,
wikipedia: WikipediaIcon,
whatsapp: WhatsAppIcon,
webflow: WebflowIcon,
wealthbox: WealthboxIcon,
vision: EyeIcon,
video_generator: VideoIcon,
typeform: TypeformIcon,
twilio_voice: TwilioIcon,
twilio_sms: TwilioIcon,
tts: TTSIcon,
trello: TrelloIcon,
translate: TranslateIcon,
thinking: BrainIcon,
telegram: TelegramIcon,
tavily: TavilyIcon,
supabase: SupabaseIcon,
stt: STTIcon,
stripe: StripeIcon,
stagehand: StagehandIcon,
ssh: SshIcon,
sqs: SQSIcon,
spotify: SpotifyIcon,
smtp: SmtpIcon,
slack: SlackIcon,
shopify: ShopifyIcon,
sharepoint: MicrosoftSharepointIcon,
sftp: SftpIcon,
servicenow: ServiceNowIcon,
serper: SerperIcon,
sentry: SentryIcon,
sendgrid: SendgridIcon,
search: SearchIcon,
salesforce: SalesforceIcon,
s3: S3Icon,
resend: ResendIcon,
reddit: RedditIcon,
rds: RDSIcon,
qdrant: QdrantIcon,
posthog: PosthogIcon,
postgresql: PostgresIcon,
polymarket: PolymarketIcon,
pipedrive: PipedriveIcon,
pinecone: PineconeIcon,
perplexity: PerplexityIcon,
parallel_ai: ParallelIcon,
outlook: OutlookIcon,
openai: OpenAIIcon,
onedrive: MicrosoftOneDriveIcon,
notion: NotionIcon,
neo4j: Neo4jIcon,
mysql: MySQLIcon,
mongodb: MongoDBIcon,
mistral_parse: MistralIcon,
microsoft_teams: MicrosoftTeamsIcon,
microsoft_planner: MicrosoftPlannerIcon,
microsoft_excel: MicrosoftExcelIcon,
memory: BrainIcon,
mem0: Mem0Icon,
mailgun: MailgunIcon,
mailchimp: MailchimpIcon,
linkup: LinkupIcon,
linkedin: LinkedInIcon,
linear: LinearIcon,
knowledge: PackageSearchIcon,
kalshi: KalshiIcon,
jira: JiraIcon,
jina: JinaAIIcon,
intercom: IntercomIcon,
incidentio: IncidentioIcon,
image_generator: ImageIcon,
hunter: HunterIOIcon,
huggingface: HuggingFaceIcon,
hubspot: HubspotIcon,
grafana: GrafanaIcon,
google_vault: GoogleVaultIcon,
google_slides: GoogleSlidesIcon,
google_sheets: GoogleSheetsIcon,
google_groups: GoogleGroupsIcon,
google_forms: GoogleFormsIcon,
google_drive: GoogleDriveIcon,
google_docs: GoogleDocsIcon,
google_calendar: GoogleCalendarIcon,
google_search: GoogleIcon,
gmail: GmailIcon,
gitlab: GitLabIcon,
github: GithubIcon,
firecrawl: FirecrawlIcon,
file: DocumentIcon,
exa: ExaAIIcon,
elevenlabs: ElevenLabsIcon,
elasticsearch: ElasticsearchIcon,
dynamodb: DynamoDBIcon,
duckduckgo: DuckDuckGoIcon,
dropbox: DropboxIcon,
discord: DiscordIcon,
datadog: DatadogIcon,
cursor: CursorIcon,
confluence: ConfluenceIcon,
clay: ClayIcon,
calendly: CalendlyIcon,
browser_use: BrowserUseIcon,
asana: AsanaIcon,
arxiv: ArxivIcon,
apollo: ApolloIcon,
apify: ApifyIcon,
airtable: AirtableIcon,
ahrefs: AhrefsIcon,
}

View File

@@ -109,12 +109,12 @@ Lesen Sie die neuesten Nachrichten aus Slack-Kanälen. Rufen Sie den Konversatio
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `authMethod` | string | Nein | Authentifizierungsmethode: oauth oder bot_token |
| `botToken` | string | Nein | Bot-Token für Custom Bot |
| `botToken` | string | Nein | Bot-Token für benutzerdefinierten Bot |
| `channel` | string | Nein | Slack-Kanal, aus dem Nachrichten gelesen werden sollen \(z.B. #general\) |
| `userId` | string | Nein | Benutzer-ID für DM-Konversation \(z.B. U1234567890\) |
| `limit` | number | Nein | Anzahl der abzurufenden Nachrichten \(Standard: 10, max: 15\) |
| `oldest` | string | Nein | Beginn des Zeitbereichs \(Zeitstempel\) |
| `latest` | string | Nein | Ende des Zeitbereichs \(Zeitstempel\) |
| `limit` | number | Nein | Anzahl der abzurufenden Nachrichten \(Standard: 10, max: 100\) |
| `oldest` | string | Nein | Beginn des Zeitraums \(Zeitstempel\) |
| `latest` | string | Nein | Ende des Zeitraums \(Zeitstempel\) |
#### Ausgabe

View File

@@ -114,7 +114,7 @@ Read the latest messages from Slack channels. Retrieve conversation history with
| `botToken` | string | No | Bot token for Custom Bot |
| `channel` | string | No | Slack channel to read messages from \(e.g., #general\) |
| `userId` | string | No | User ID for DM conversation \(e.g., U1234567890\) |
| `limit` | number | No | Number of messages to retrieve \(default: 10, max: 15\) |
| `limit` | number | No | Number of messages to retrieve \(default: 10, max: 100\) |
| `oldest` | string | No | Start of time range \(timestamp\) |
| `latest` | string | No | End of time range \(timestamp\) |

View File

@@ -111,8 +111,8 @@ Lee los últimos mensajes de los canales de Slack. Recupera el historial de conv
| `authMethod` | string | No | Método de autenticación: oauth o bot_token |
| `botToken` | string | No | Token del bot para Bot personalizado |
| `channel` | string | No | Canal de Slack del que leer mensajes (p. ej., #general) |
| `userId` | string | No | ID de usuario para conversación de mensaje directo (p. ej., U1234567890) |
| `limit` | number | No | Número de mensajes a recuperar (predeterminado: 10, máx: 15) |
| `userId` | string | No | ID de usuario para conversación por MD (p. ej., U1234567890) |
| `limit` | number | No | Número de mensajes a recuperar (predeterminado: 10, máx: 100) |
| `oldest` | string | No | Inicio del rango de tiempo (marca de tiempo) |
| `latest` | string | No | Fin del rango de tiempo (marca de tiempo) |

View File

@@ -107,14 +107,14 @@ Lisez les derniers messages des canaux Slack. Récupérez l'historique des conve
#### Entrée
| Paramètre | Type | Obligatoire | Description |
| --------- | ---- | ----------- | ----------- |
| --------- | ---- | ---------- | ----------- |
| `authMethod` | chaîne | Non | Méthode d'authentification : oauth ou bot_token |
| `botToken` | chaîne | Non | Jeton du bot pour Bot personnalisé |
| `channel` | chaîne | Non | Canal Slack depuis lequel lire les messages \(ex. : #general\) |
| `userId` | chaîne | Non | ID utilisateur pour la conversation en message direct \(ex. : U1234567890\) |
| `limit` | nombre | Non | Nombre de messages à récupérer \(par défaut : 10, max : 15\) |
| `oldest` | chaîne | Non | Début de la plage horaire \(horodatage\) |
| `latest` | chaîne | Non | Fin de la plage horaire \(horodatage\) |
| `channel` | chaîne | Non | Canal Slack pour lire les messages \(ex. : #general\) |
| `userId` | chaîne | Non | ID utilisateur pour la conversation en MP \(ex. : U1234567890\) |
| `limit` | nombre | Non | Nombre de messages à récupérer \(par défaut : 10, max : 100\) |
| `oldest` | chaîne | Non | Début de la plage temporelle \(horodatage\) |
| `latest` | chaîne | Non | Fin de la plage temporelle \(horodatage\) |
#### Sortie

View File

@@ -110,8 +110,8 @@ Slackチャンネルから最新のメッセージを読み取ります。フィ
| `authMethod` | string | いいえ | 認証方法oauthまたはbot_token |
| `botToken` | string | いいえ | カスタムボット用のボットトークン |
| `channel` | string | いいえ | メッセージを読み取るSlackチャンネル#general |
| `userId` | string | いいえ | DM会話用のユーザーIDU1234567890 |
| `limit` | number | いいえ | 取得するメッセージ数デフォルト10、最大15 |
| `userId` | string | いいえ | DM会話用のユーザーIDU1234567890 |
| `limit` | number | いいえ | 取得するメッセージ数デフォルト10、最大100 |
| `oldest` | string | いいえ | 時間範囲の開始(タイムスタンプ) |
| `latest` | string | いいえ | 時間範囲の終了(タイムスタンプ) |

View File

@@ -109,10 +109,10 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
| `authMethod` | string | 否 | 认证方法oauth 或 bot_token |
| `botToken` | string | 否 | 自定义 Bot 的令牌 |
| `channel` | string | 否 | 要读取消息的 Slack 频道(例如,#general |
| `userId` | string | 否 | DM 话的用户 ID例如U1234567890 |
| `limit` | number | 否 | 要检索的消息数量默认10最大15 |
| `oldest` | string | 否 | 时间范围始(时间戳) |
| `latest` | string | 否 | 时间范围结束(时间戳) |
| `userId` | string | 否 | DM 话的用户 ID例如U1234567890 |
| `limit` | number | 否 | 要检索的消息数量默认10最大100 |
| `oldest` | string | 否 | 时间范围的开始(时间戳) |
| `latest` | string | 否 | 时间范围结束(时间戳) |
#### 输出

View File

@@ -903,7 +903,7 @@ checksums:
content/24: 228a8ece96627883153b826a1cbaa06c
content/25: 53abe061a259c296c82676b4770ddd1b
content/26: 371d0e46b4bd2c23f559b8bc112f6955
content/27: 5b9546f77fbafc0741f3fc2548f81c7e
content/27: 03e8b10ec08b354de98e360b66b779e3
content/28: bcadfc362b69078beee0088e5936c98b
content/29: b82def7d82657f941fbe60df3924eeeb
content/30: 1ca7ee3856805fa1718031c5f75b6ffb

View File

@@ -11,7 +11,7 @@ import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('CopilotChatsListAPI')
export async function GET(_request: NextRequest) {
export async function GET(_req: NextRequest) {
try {
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
if (!isAuthenticated || !userId) {

View File

@@ -38,13 +38,14 @@ export async function GET(
const cloudKey = isCloudPath ? path.slice(1).join('/') : fullPath
const contextParam = request.nextUrl.searchParams.get('context')
const legacyBucketType = request.nextUrl.searchParams.get('bucket')
const context = contextParam || (isCloudPath ? inferContextFromKey(cloudKey) : undefined)
if (context === 'profile-pictures' || context === 'og-images') {
logger.info(`Serving public ${context}:`, { cloudKey })
if (context === 'profile-pictures') {
logger.info('Serving public profile picture:', { cloudKey })
if (isUsingCloudStorage() || isCloudPath) {
return await handleCloudProxyPublic(cloudKey, context)
return await handleCloudProxyPublic(cloudKey, context, legacyBucketType)
}
return await handleLocalFilePublic(fullPath)
}
@@ -181,7 +182,8 @@ async function handleCloudProxy(
async function handleCloudProxyPublic(
cloudKey: string,
context: StorageContext
context: StorageContext,
legacyBucketType?: string | null
): Promise<NextResponse> {
try {
let fileBuffer: Buffer

View File

@@ -1,6 +1,7 @@
import { runs } from '@trigger.dev/sdk'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { authenticateApiKeyFromHeader, updateApiKeyLastUsed } from '@/lib/api-key/service'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { createErrorResponse } from '@/app/api/workflows/utils'
@@ -17,44 +18,38 @@ export async function GET(
try {
logger.debug(`[${requestId}] Getting status for task: ${taskId}`)
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized task status request`)
return createErrorResponse(authResult.error || 'Authentication required', 401)
// Try session auth first (for web UI)
const session = await getSession()
let authenticatedUserId: string | null = session?.user?.id || null
if (!authenticatedUserId) {
const apiKeyHeader = request.headers.get('x-api-key')
if (apiKeyHeader) {
const authResult = await authenticateApiKeyFromHeader(apiKeyHeader)
if (authResult.success && authResult.userId) {
authenticatedUserId = authResult.userId
if (authResult.keyId) {
await updateApiKeyLastUsed(authResult.keyId).catch((error) => {
logger.warn(`[${requestId}] Failed to update API key last used timestamp:`, {
keyId: authResult.keyId,
error,
})
})
}
}
}
}
const authenticatedUserId = authResult.userId
if (!authenticatedUserId) {
return createErrorResponse('Authentication required', 401)
}
// Fetch task status from Trigger.dev
const run = await runs.retrieve(taskId)
logger.debug(`[${requestId}] Task ${taskId} status: ${run.status}`)
const payload = run.payload as any
if (payload?.workflowId) {
const { verifyWorkflowAccess } = await import('@/socket-server/middleware/permissions')
const accessCheck = await verifyWorkflowAccess(authenticatedUserId, payload.workflowId)
if (!accessCheck.hasAccess) {
logger.warn(`[${requestId}] User ${authenticatedUserId} denied access to task ${taskId}`, {
workflowId: payload.workflowId,
})
return createErrorResponse('Access denied', 403)
}
logger.debug(`[${requestId}] User ${authenticatedUserId} has access to task ${taskId}`)
} else {
if (payload?.userId && payload.userId !== authenticatedUserId) {
logger.warn(
`[${requestId}] User ${authenticatedUserId} attempted to access task ${taskId} owned by ${payload.userId}`
)
return createErrorResponse('Access denied', 403)
}
if (!payload?.userId) {
logger.warn(
`[${requestId}] Task ${taskId} has no ownership information in payload. Denying access for security.`
)
return createErrorResponse('Access denied', 403)
}
}
// Map Trigger.dev status to our format
const statusMap = {
QUEUED: 'queued',
WAITING_FOR_DEPLOY: 'queued',
@@ -72,6 +67,7 @@ export async function GET(
const mappedStatus = statusMap[run.status as keyof typeof statusMap] || 'unknown'
// Build response based on status
const response: any = {
success: true,
taskId,
@@ -81,18 +77,21 @@ export async function GET(
},
}
// Add completion details if finished
if (mappedStatus === 'completed') {
response.output = run.output // This contains the workflow execution results
response.metadata.completedAt = run.finishedAt
response.metadata.duration = run.durationMs
}
// Add error details if failed
if (mappedStatus === 'failed') {
response.error = run.error
response.metadata.completedAt = run.finishedAt
response.metadata.duration = run.durationMs
}
// Add progress info if still processing
if (mappedStatus === 'processing' || mappedStatus === 'queued') {
response.estimatedDuration = 180000 // 3 minutes max from our config
}
@@ -108,3 +107,6 @@ export async function GET(
return createErrorResponse('Failed to fetch task status', 500)
}
}
// TODO: Implement task cancellation via Trigger.dev API if needed
// export async function DELETE() { ... }

View File

@@ -27,7 +27,7 @@ const UpdateKnowledgeBaseSchema = z.object({
.optional(),
})
export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
export async function GET(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
@@ -133,10 +133,7 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
}
}
export async function DELETE(
_request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
export async function DELETE(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params

View File

@@ -1,72 +1,32 @@
import { db } from '@sim/db'
import {
permissions,
workflow,
workflowExecutionLogs,
workflowExecutionSnapshots,
} from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { workflowExecutionLogs, workflowExecutionSnapshots } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('LogsByExecutionIdAPI')
export async function GET(
request: NextRequest,
_request: NextRequest,
{ params }: { params: Promise<{ executionId: string }> }
) {
const requestId = generateRequestId()
try {
const { executionId } = await params
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized execution data access attempt for: ${executionId}`)
return NextResponse.json(
{ error: authResult.error || 'Authentication required' },
{ status: 401 }
)
}
const authenticatedUserId = authResult.userId
logger.debug(
`[${requestId}] Fetching execution data for: ${executionId} (auth: ${authResult.authType})`
)
logger.debug(`Fetching execution data for: ${executionId}`)
// Get the workflow execution log to find the snapshot
const [workflowLog] = await db
.select({
id: workflowExecutionLogs.id,
workflowId: workflowExecutionLogs.workflowId,
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
cost: workflowExecutionLogs.cost,
})
.select()
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, authenticatedUserId)
)
)
.where(eq(workflowExecutionLogs.executionId, executionId))
.limit(1)
if (!workflowLog) {
logger.warn(`[${requestId}] Execution not found or access denied: ${executionId}`)
return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 })
}
// Get the workflow state snapshot
const [snapshot] = await db
.select()
.from(workflowExecutionSnapshots)
@@ -74,7 +34,6 @@ export async function GET(
.limit(1)
if (!snapshot) {
logger.warn(`[${requestId}] Workflow state snapshot not found for execution: ${executionId}`)
return NextResponse.json({ error: 'Workflow state snapshot not found' }, { status: 404 })
}
@@ -91,14 +50,14 @@ export async function GET(
},
}
logger.debug(`[${requestId}] Successfully fetched execution data for: ${executionId}`)
logger.debug(`Successfully fetched execution data for: ${executionId}`)
logger.debug(
`[${requestId}] Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
`Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
)
return NextResponse.json(response)
} catch (error) {
logger.error(`[${requestId}] Error fetching execution data:`, error)
logger.error('Error fetching execution data:', error)
return NextResponse.json({ error: 'Failed to fetch execution data' }, { status: 500 })
}
}

View File

@@ -3,10 +3,8 @@ import { memory, workflowBlocks } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
const logger = createLogger('MemoryByIdAPI')
@@ -67,65 +65,6 @@ const memoryPutBodySchema = z.object({
workflowId: z.string().uuid('Invalid workflow ID format'),
})
/**
* Validates authentication and workflow access for memory operations
* @param request - The incoming request
* @param workflowId - The workflow ID to check access for
* @param requestId - Request ID for logging
* @param action - 'read' for GET, 'write' for PUT/DELETE
* @returns Object with userId if successful, or error response if failed
*/
async function validateMemoryAccess(
request: NextRequest,
workflowId: string,
requestId: string,
action: 'read' | 'write'
): Promise<{ userId: string } | { error: NextResponse }> {
const authResult = await checkHybridAuth(request, {
requireWorkflowId: false,
})
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized memory ${action} attempt`)
return {
error: NextResponse.json(
{ success: false, error: { message: 'Authentication required' } },
{ status: 401 }
),
}
}
const accessContext = await getWorkflowAccessContext(workflowId, authResult.userId)
if (!accessContext) {
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
return {
error: NextResponse.json(
{ success: false, error: { message: 'Workflow not found' } },
{ status: 404 }
),
}
}
const { isOwner, workspacePermission } = accessContext
const hasAccess =
action === 'read'
? isOwner || workspacePermission !== null
: isOwner || workspacePermission === 'write' || workspacePermission === 'admin'
if (!hasAccess) {
logger.warn(
`[${requestId}] User ${authResult.userId} denied ${action} access to workflow ${workflowId}`
)
return {
error: NextResponse.json(
{ success: false, error: { message: 'Access denied' } },
{ status: 403 }
),
}
}
return { userId: authResult.userId }
}
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
@@ -162,11 +101,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
const { workflowId: validatedWorkflowId } = validation.data
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'read')
if ('error' in accessCheck) {
return accessCheck.error
}
const memories = await db
.select()
.from(memory)
@@ -269,11 +203,6 @@ export async function DELETE(
const { workflowId: validatedWorkflowId } = validation.data
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'write')
if ('error' in accessCheck) {
return accessCheck.error
}
const existingMemory = await db
.select({ id: memory.id })
.from(memory)
@@ -367,11 +296,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
)
}
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'write')
if ('error' in accessCheck) {
return accessCheck.error
}
const existingMemories = await db
.select()
.from(memory)

View File

@@ -28,7 +28,7 @@ const updateInvitationSchema = z.object({
// Get invitation details
export async function GET(
_request: NextRequest,
_req: NextRequest,
{ params }: { params: Promise<{ id: string; invitationId: string }> }
) {
const { id: organizationId, invitationId } = await params

View File

@@ -1,19 +1,16 @@
import { db } from '@sim/db'
import { templates } from '@sim/db/schema'
import { templates, user } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { verifySuperUser } from '@/lib/templates/permissions'
const logger = createLogger('TemplateApprovalAPI')
export const revalidate = 0
/**
* POST /api/templates/[id]/approve - Approve a template (super users only)
*/
// POST /api/templates/[id]/approve - Approve a template (super users only)
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
@@ -25,18 +22,23 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
if (!currentUser[0]?.isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
}
// Check if template exists
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existingTemplate.length === 0) {
logger.warn(`[${requestId}] Template not found for approval: ${id}`)
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
// Update template status to approved
await db
.update(templates)
.set({ status: 'approved', updatedAt: new Date() })
@@ -54,11 +56,9 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
}
/**
* DELETE /api/templates/[id]/approve - Unapprove a template (super users only)
*/
// POST /api/templates/[id]/reject - Reject a template (super users only)
export async function DELETE(
_request: NextRequest,
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = generateRequestId()
@@ -71,18 +71,23 @@ export async function DELETE(
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
if (!currentUser[0]?.isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
}
// Check if template exists
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existingTemplate.length === 0) {
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
// Update template status to rejected
await db
.update(templates)
.set({ status: 'rejected', updatedAt: new Date() })

View File

@@ -1,142 +0,0 @@
import { db } from '@sim/db'
import { templates } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
import { verifyTemplateOwnership } from '@/lib/templates/permissions'
import { uploadFile } from '@/lib/uploads/core/storage-service'
import { isValidPng } from '@/lib/uploads/utils/validation'
const logger = createLogger('TemplateOGImageAPI')
/**
* PUT /api/templates/[id]/og-image
* Upload a pre-generated OG image for a template.
* Accepts base64-encoded image data in the request body.
*/
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized OG image upload attempt for template: ${id}`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { authorized, error, status } = await verifyTemplateOwnership(
id,
session.user.id,
'admin'
)
if (!authorized) {
logger.warn(`[${requestId}] User denied permission to upload OG image for template ${id}`)
return NextResponse.json({ error }, { status: status || 403 })
}
const body = await request.json()
const { imageData } = body
if (!imageData || typeof imageData !== 'string') {
return NextResponse.json(
{ error: 'Missing or invalid imageData (expected base64 string)' },
{ status: 400 }
)
}
const base64Data = imageData.includes(',') ? imageData.split(',')[1] : imageData
const imageBuffer = Buffer.from(base64Data, 'base64')
if (!isValidPng(imageBuffer)) {
return NextResponse.json({ error: 'Invalid PNG image data' }, { status: 400 })
}
const maxSize = 5 * 1024 * 1024
if (imageBuffer.length > maxSize) {
return NextResponse.json({ error: 'Image too large. Maximum size is 5MB.' }, { status: 400 })
}
const timestamp = Date.now()
const storageKey = `og-images/templates/${id}/${timestamp}.png`
logger.info(`[${requestId}] Uploading OG image for template ${id}: ${storageKey}`)
const uploadResult = await uploadFile({
file: imageBuffer,
fileName: storageKey,
contentType: 'image/png',
context: 'og-images',
preserveKey: true,
customKey: storageKey,
})
const baseUrl = getBaseUrl()
const ogImageUrl = `${baseUrl}${uploadResult.path}?context=og-images`
await db
.update(templates)
.set({
ogImageUrl,
updatedAt: new Date(),
})
.where(eq(templates.id, id))
logger.info(`[${requestId}] Successfully uploaded OG image for template ${id}: ${ogImageUrl}`)
return NextResponse.json({
success: true,
ogImageUrl,
})
} catch (error: unknown) {
logger.error(`[${requestId}] Error uploading OG image for template ${id}:`, error)
return NextResponse.json({ error: 'Failed to upload OG image' }, { status: 500 })
}
}
/**
* DELETE /api/templates/[id]/og-image
* Remove the OG image for a template.
*/
export async function DELETE(
_request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = generateRequestId()
const { id } = await params
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { authorized, error, status } = await verifyTemplateOwnership(
id,
session.user.id,
'admin'
)
if (!authorized) {
logger.warn(`[${requestId}] User denied permission to delete OG image for template ${id}`)
return NextResponse.json({ error }, { status: status || 403 })
}
await db
.update(templates)
.set({
ogImageUrl: null,
updatedAt: new Date(),
})
.where(eq(templates.id, id))
logger.info(`[${requestId}] Removed OG image for template ${id}`)
return NextResponse.json({ success: true })
} catch (error: unknown) {
logger.error(`[${requestId}] Error removing OG image for template ${id}:`, error)
return NextResponse.json({ error: 'Failed to remove OG image' }, { status: 500 })
}
}

View File

@@ -1,19 +1,16 @@
import { db } from '@sim/db'
import { templates } from '@sim/db/schema'
import { templates, user } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { verifySuperUser } from '@/lib/templates/permissions'
const logger = createLogger('TemplateRejectionAPI')
export const revalidate = 0
/**
* POST /api/templates/[id]/reject - Reject a template (super users only)
*/
// POST /api/templates/[id]/reject - Reject a template (super users only)
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
@@ -25,18 +22,23 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
if (!currentUser[0]?.isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
}
// Check if template exists
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existingTemplate.length === 0) {
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
// Update template status to rejected
await db
.update(templates)
.set({ status: 'rejected', updatedAt: new Date() })

View File

@@ -1,6 +1,6 @@
import { db } from '@sim/db'
import { templateCreators, templates, workflow } from '@sim/db/schema'
import { eq, sql } from 'drizzle-orm'
import { member, templateCreators, templates, workflow } from '@sim/db/schema'
import { and, eq, or, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
@@ -15,6 +15,7 @@ const logger = createLogger('TemplateByIdAPI')
export const revalidate = 0
// GET /api/templates/[id] - Retrieve a single template by ID
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
@@ -24,6 +25,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
logger.debug(`[${requestId}] Fetching template: ${id}`)
// Fetch the template by ID with creator info
const result = await db
.select({
template: templates,
@@ -45,10 +47,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
creator: creator || undefined,
}
// Only show approved templates to non-authenticated users
if (!session?.user?.id && template.status !== 'approved') {
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
// Check if user has starred (only if authenticated)
let isStarred = false
if (session?.user?.id) {
const { templateStars } = await import('@sim/db/schema')
@@ -76,6 +80,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
logger.debug(`[${requestId}] Incremented view count for template: ${id}`)
} catch (viewError) {
// Log the error but don't fail the request
logger.warn(`[${requestId}] Failed to increment view count for template: ${id}`, viewError)
}
}
@@ -133,6 +138,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
const { name, details, creatorId, tags, updateState } = validationResult.data
// Check if template exists
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existingTemplate.length === 0) {
@@ -140,54 +146,32 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
const template = existingTemplate[0]
if (!template.creatorId) {
logger.warn(`[${requestId}] Template ${id} has no creator, denying update`)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
session.user.id,
template.creatorId,
'admin'
)
if (!hasPermission) {
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
}
// No permission check needed - template updates only happen from within the workspace
// where the user is already editing the connected workflow
// Prepare update data - only include fields that were provided
const updateData: any = {
updatedAt: new Date(),
}
// Only update fields that were provided
if (name !== undefined) updateData.name = name
if (details !== undefined) updateData.details = details
if (tags !== undefined) updateData.tags = tags
if (creatorId !== undefined) updateData.creatorId = creatorId
if (updateState && template.workflowId) {
const { verifyWorkflowAccess } = await import('@/socket-server/middleware/permissions')
const { hasAccess: hasWorkflowAccess } = await verifyWorkflowAccess(
session.user.id,
template.workflowId
)
if (!hasWorkflowAccess) {
logger.warn(`[${requestId}] User denied workflow access for state sync on template ${id}`)
return NextResponse.json({ error: 'Access denied to workflow' }, { status: 403 })
}
// Only update the state if explicitly requested and the template has a connected workflow
if (updateState && existingTemplate[0].workflowId) {
// Load the current workflow state from normalized tables
const { loadWorkflowFromNormalizedTables } = await import('@/lib/workflows/persistence/utils')
const normalizedData = await loadWorkflowFromNormalizedTables(template.workflowId)
const normalizedData = await loadWorkflowFromNormalizedTables(existingTemplate[0].workflowId)
if (normalizedData) {
// Also fetch workflow variables
const [workflowRecord] = await db
.select({ variables: workflow.variables })
.from(workflow)
.where(eq(workflow.id, template.workflowId))
.where(eq(workflow.id, existingTemplate[0].workflowId))
.limit(1)
const currentState = {
@@ -199,15 +183,17 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
lastSaved: Date.now(),
}
// Extract credential requirements from the new state
const requiredCredentials = extractRequiredCredentials(currentState)
// Sanitize the state before storing
const sanitizedState = sanitizeCredentials(currentState)
updateData.state = sanitizedState
updateData.requiredCredentials = requiredCredentials
logger.info(
`[${requestId}] Updating template state and credentials from current workflow: ${template.workflowId}`
`[${requestId}] Updating template state and credentials from current workflow: ${existingTemplate[0].workflowId}`
)
} else {
logger.warn(`[${requestId}] Could not load workflow state for template: ${id}`)
@@ -247,6 +233,7 @@ export async function DELETE(
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Fetch template
const existing = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existing.length === 0) {
logger.warn(`[${requestId}] Template not found for delete: ${id}`)
@@ -255,21 +242,41 @@ export async function DELETE(
const template = existing[0]
if (!template.creatorId) {
logger.warn(`[${requestId}] Template ${id} has no creator, denying delete`)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Permission: Only admin/owner of creator profile can delete
if (template.creatorId) {
const creatorProfile = await db
.select()
.from(templateCreators)
.where(eq(templateCreators.id, template.creatorId))
.limit(1)
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
session.user.id,
template.creatorId,
'admin'
)
if (creatorProfile.length > 0) {
const creator = creatorProfile[0]
let hasPermission = false
if (!hasPermission) {
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
if (creator.referenceType === 'user') {
hasPermission = creator.referenceId === session.user.id
} else if (creator.referenceType === 'organization') {
// For delete, require admin/owner role
const membership = await db
.select()
.from(member)
.where(
and(
eq(member.userId, session.user.id),
eq(member.organizationId, creator.referenceId),
or(eq(member.role, 'admin'), eq(member.role, 'owner'))
)
)
.limit(1)
hasPermission = membership.length > 0
}
if (!hasPermission) {
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
}
}
await db.delete(templates).where(eq(templates.id, id))

View File

@@ -1,5 +1,6 @@
import { db } from '@sim/db'
import {
member,
templateCreators,
templateStars,
templates,
@@ -203,18 +204,51 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
}
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
session.user.id,
data.creatorId,
'member'
)
// Validate creator profile - required for all templates
const creatorProfile = await db
.select()
.from(templateCreators)
.where(eq(templateCreators.id, data.creatorId))
.limit(1)
if (!hasPermission) {
logger.warn(`[${requestId}] User cannot use creator profile: ${data.creatorId}`)
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
if (creatorProfile.length === 0) {
logger.warn(`[${requestId}] Creator profile not found: ${data.creatorId}`)
return NextResponse.json({ error: 'Creator profile not found' }, { status: 404 })
}
const creator = creatorProfile[0]
// Verify user has permission to use this creator profile
if (creator.referenceType === 'user') {
if (creator.referenceId !== session.user.id) {
logger.warn(`[${requestId}] User cannot use creator profile: ${data.creatorId}`)
return NextResponse.json(
{ error: 'You do not have permission to use this creator profile' },
{ status: 403 }
)
}
} else if (creator.referenceType === 'organization') {
// Verify user is a member of the organization
const membership = await db
.select()
.from(member)
.where(
and(eq(member.userId, session.user.id), eq(member.organizationId, creator.referenceId))
)
.limit(1)
if (membership.length === 0) {
logger.warn(
`[${requestId}] User not a member of organization for creator: ${data.creatorId}`
)
return NextResponse.json(
{ error: 'You must be a member of the organization to use its creator profile' },
{ status: 403 }
)
}
}
// Create the template
const templateId = uuidv4()
const now = new Date()

View File

@@ -1,7 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -109,14 +108,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
}
if (folderId) {
const folderIdValidation = validateAlphanumericId(folderId, 'folderId', 50)
if (!folderIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid folderId`, { error: folderIdValidation.error })
return NextResponse.json({ error: folderIdValidation.error }, { status: 400 })
}
}
const qParts: string[] = ['trashed = false']
if (folderId) {
qParts.push(`'${escapeForDriveQuery(folderId)}' in parents`)

View File

@@ -1,7 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
@@ -51,29 +50,6 @@ export async function POST(request: NextRequest) {
.map((id) => id.trim())
.filter((id) => id.length > 0)
for (const labelId of labelIds) {
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
if (!labelIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid label ID: ${labelIdValidation.error}`)
return NextResponse.json(
{
success: false,
error: labelIdValidation.error,
},
{ status: 400 }
)
}
}
const messageIdValidation = validateAlphanumericId(validatedData.messageId, 'messageId', 255)
if (!messageIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid message ID: ${messageIdValidation.error}`)
return NextResponse.json(
{ success: false, error: messageIdValidation.error },
{ status: 400 }
)
}
const gmailResponse = await fetch(
`${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`,
{

View File

@@ -3,7 +3,6 @@ import { account } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -39,12 +38,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialIdValidation = validateAlphanumericId(credentialId, 'credentialId', 255)
if (!credentialIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid credential ID: ${credentialIdValidation.error}`)
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
}
let credentials = await db
.select()
.from(account)

View File

@@ -1,7 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
@@ -54,29 +53,6 @@ export async function POST(request: NextRequest) {
.map((id) => id.trim())
.filter((id) => id.length > 0)
for (const labelId of labelIds) {
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
if (!labelIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid label ID: ${labelIdValidation.error}`)
return NextResponse.json(
{
success: false,
error: labelIdValidation.error,
},
{ status: 400 }
)
}
}
const messageIdValidation = validateAlphanumericId(validatedData.messageId, 'messageId', 255)
if (!messageIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid message ID: ${messageIdValidation.error}`)
return NextResponse.json(
{ success: false, error: messageIdValidation.error },
{ status: 400 }
)
}
const gmailResponse = await fetch(
`${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`,
{

View File

@@ -1,6 +1,5 @@
import { type NextRequest, NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateUUID } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -26,6 +25,7 @@ export async function GET(request: NextRequest) {
logger.info(`[${requestId}] Google Calendar calendars request received`)
try {
// Get the credential ID from the query params
const { searchParams } = new URL(request.url)
const credentialId = searchParams.get('credentialId')
const workflowId = searchParams.get('workflowId') || undefined
@@ -34,25 +34,12 @@ export async function GET(request: NextRequest) {
logger.warn(`[${requestId}] Missing credentialId parameter`)
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialValidation = validateUUID(credentialId, 'credentialId')
if (!credentialValidation.isValid) {
logger.warn(`[${requestId}] Invalid credentialId format`, { credentialId })
return NextResponse.json({ error: credentialValidation.error }, { status: 400 })
}
if (workflowId) {
const workflowValidation = validateUUID(workflowId, 'workflowId')
if (!workflowValidation.isValid) {
logger.warn(`[${requestId}] Invalid workflowId format`, { workflowId })
return NextResponse.json({ error: workflowValidation.error }, { status: 400 })
}
}
const authz = await authorizeCredentialUse(request, { credentialId, workflowId })
if (!authz.ok || !authz.credentialOwnerUserId) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
}
// Refresh access token if needed using the utility function
const accessToken = await refreshAccessTokenIfNeeded(
credentialId,
authz.credentialOwnerUserId,
@@ -63,6 +50,7 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
}
// Fetch calendars from Google Calendar API
logger.info(`[${requestId}] Fetching calendars from Google Calendar API`)
const calendarResponse = await fetch(
'https://www.googleapis.com/calendar/v3/users/me/calendarList',
@@ -93,6 +81,7 @@ export async function GET(request: NextRequest) {
const data = await calendarResponse.json()
const calendars: CalendarListItem[] = data.items || []
// Sort calendars with primary first, then alphabetically
calendars.sort((a, b) => {
if (a.primary && !b.primary) return -1
if (!a.primary && b.primary) return 1

View File

@@ -1,6 +1,5 @@
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -24,12 +23,6 @@ export async function POST(request: Request) {
return NextResponse.json({ error: 'Team ID is required' }, { status: 400 })
}
const teamIdValidation = validateMicrosoftGraphId(teamId, 'Team ID')
if (!teamIdValidation.isValid) {
logger.warn('Invalid team ID provided', { teamId, error: teamIdValidation.error })
return NextResponse.json({ error: teamIdValidation.error }, { status: 400 })
}
try {
const authz = await authorizeCredentialUse(request as any, {
credentialId: credential,
@@ -77,6 +70,7 @@ export async function POST(request: Request) {
endpoint: `https://graph.microsoft.com/v1.0/teams/${teamId}/channels`,
})
// Check for auth errors specifically
if (response.status === 401) {
return NextResponse.json(
{
@@ -99,6 +93,7 @@ export async function POST(request: Request) {
} catch (innerError) {
logger.error('Error during API requests:', innerError)
// Check if it's an authentication error
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
if (
errorMessage.includes('auth') ||

View File

@@ -1,6 +1,5 @@
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -8,35 +7,21 @@ export const dynamic = 'force-dynamic'
const logger = createLogger('TeamsChatsAPI')
/**
* Helper function to get chat members and create a meaningful name
*
* @param chatId - Microsoft Teams chat ID to get display name for
* @param accessToken - Access token for Microsoft Graph API
* @param chatTopic - Optional existing chat topic
* @returns A meaningful display name for the chat
*/
// Helper function to get chat members and create a meaningful name
const getChatDisplayName = async (
chatId: string,
accessToken: string,
chatTopic?: string
): Promise<string> => {
try {
const chatIdValidation = validateMicrosoftGraphId(chatId, 'chatId')
if (!chatIdValidation.isValid) {
logger.warn('Invalid chat ID in getChatDisplayName', {
error: chatIdValidation.error,
chatId: chatId.substring(0, 50),
})
return `Chat ${chatId.substring(0, 8)}...`
}
// If the chat already has a topic, use it
if (chatTopic?.trim() && chatTopic !== 'null') {
return chatTopic
}
// Fetch chat members to create a meaningful name
const membersResponse = await fetch(
`https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(chatId)}/members`,
`https://graph.microsoft.com/v1.0/chats/${chatId}/members`,
{
method: 'GET',
headers: {
@@ -50,25 +35,27 @@ const getChatDisplayName = async (
const membersData = await membersResponse.json()
const members = membersData.value || []
// Filter out the current user and get display names
const memberNames = members
.filter((member: any) => member.displayName && member.displayName !== 'Unknown')
.map((member: any) => member.displayName)
.slice(0, 3)
.slice(0, 3) // Limit to first 3 names to avoid very long names
if (memberNames.length > 0) {
if (memberNames.length === 1) {
return memberNames[0]
return memberNames[0] // 1:1 chat
}
if (memberNames.length === 2) {
return memberNames.join(' & ')
return memberNames.join(' & ') // 2-person group
}
return `${memberNames.slice(0, 2).join(', ')} & ${memberNames.length - 2} more`
return `${memberNames.slice(0, 2).join(', ')} & ${memberNames.length - 2} more` // Larger group
}
}
// Fallback: try to get a better name from recent messages
try {
const messagesResponse = await fetch(
`https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(chatId)}/messages?$top=10&$orderby=createdDateTime desc`,
`https://graph.microsoft.com/v1.0/chats/${chatId}/messages?$top=10&$orderby=createdDateTime desc`,
{
method: 'GET',
headers: {
@@ -82,12 +69,14 @@ const getChatDisplayName = async (
const messagesData = await messagesResponse.json()
const messages = messagesData.value || []
// Look for chat rename events
for (const message of messages) {
if (message.eventDetail?.chatDisplayName) {
return message.eventDetail.chatDisplayName
}
}
// Get unique sender names from recent messages as last resort
const senderNames = [
...new Set(
messages
@@ -114,6 +103,7 @@ const getChatDisplayName = async (
)
}
// Final fallback
return `Chat ${chatId.split(':')[0] || chatId.substring(0, 8)}...`
} catch (error) {
logger.warn(
@@ -156,6 +146,7 @@ export async function POST(request: Request) {
return NextResponse.json({ error: 'Could not retrieve access token' }, { status: 401 })
}
// Now try to fetch the chats
const response = await fetch('https://graph.microsoft.com/v1.0/me/chats', {
method: 'GET',
headers: {
@@ -172,6 +163,7 @@ export async function POST(request: Request) {
endpoint: 'https://graph.microsoft.com/v1.0/me/chats',
})
// Check for auth errors specifically
if (response.status === 401) {
return NextResponse.json(
{
@@ -187,6 +179,7 @@ export async function POST(request: Request) {
const data = await response.json()
// Process chats with enhanced display names
const chats = await Promise.all(
data.value.map(async (chat: any) => ({
id: chat.id,
@@ -200,6 +193,7 @@ export async function POST(request: Request) {
} catch (innerError) {
logger.error('Error during API requests:', innerError)
// Check if it's an authentication error
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
if (
errorMessage.includes('auth') ||

View File

@@ -30,41 +30,23 @@ export async function createMongoDBConnection(config: MongoDBConnectionConfig) {
return client
}
/**
* Recursively checks an object for dangerous MongoDB operators
* @param obj - The object to check
* @param dangerousOperators - Array of operator names to block
* @returns true if a dangerous operator is found
*/
function containsDangerousOperator(obj: unknown, dangerousOperators: string[]): boolean {
if (typeof obj !== 'object' || obj === null) return false
for (const key of Object.keys(obj as Record<string, unknown>)) {
if (dangerousOperators.includes(key)) return true
if (
typeof (obj as Record<string, unknown>)[key] === 'object' &&
containsDangerousOperator((obj as Record<string, unknown>)[key], dangerousOperators)
) {
return true
}
}
return false
}
export function validateFilter(filter: string): { isValid: boolean; error?: string } {
try {
const parsed = JSON.parse(filter)
const dangerousOperators = [
'$where', // Executes arbitrary JavaScript
'$regex', // Can cause ReDoS attacks
'$expr', // Expression evaluation
'$function', // Custom JavaScript functions
'$accumulator', // Custom JavaScript accumulators
'$let', // Variable definitions that could be exploited
]
const dangerousOperators = ['$where', '$regex', '$expr', '$function', '$accumulator', '$let']
if (containsDangerousOperator(parsed, dangerousOperators)) {
const checkForDangerousOps = (obj: any): boolean => {
if (typeof obj !== 'object' || obj === null) return false
for (const key of Object.keys(obj)) {
if (dangerousOperators.includes(key)) return true
if (typeof obj[key] === 'object' && checkForDangerousOps(obj[key])) return true
}
return false
}
if (checkForDangerousOps(parsed)) {
return {
isValid: false,
error: 'Filter contains potentially dangerous operators',
@@ -92,19 +74,29 @@ export function validatePipeline(pipeline: string): { isValid: boolean; error?:
}
const dangerousOperators = [
'$where', // Executes arbitrary JavaScript
'$function', // Custom JavaScript functions
'$accumulator', // Custom JavaScript accumulators
'$let', // Variable definitions that could be exploited
'$merge', // Writes to external collections
'$out', // Writes to external collections
'$currentOp', // Exposes system operation info
'$listSessions', // Exposes session info
'$listLocalSessions', // Exposes local session info
'$where',
'$function',
'$accumulator',
'$let',
'$merge',
'$out',
'$currentOp',
'$listSessions',
'$listLocalSessions',
]
const checkPipelineStage = (stage: any): boolean => {
if (typeof stage !== 'object' || stage === null) return false
for (const key of Object.keys(stage)) {
if (dangerousOperators.includes(key)) return true
if (typeof stage[key] === 'object' && checkPipelineStage(stage[key])) return true
}
return false
}
for (const stage of parsed) {
if (containsDangerousOperator(stage, dangerousOperators)) {
if (checkPipelineStage(stage)) {
return {
isValid: false,
error: 'Pipeline contains potentially dangerous operators',

View File

@@ -98,45 +98,15 @@ export function buildDeleteQuery(table: string, where: string) {
return { query, values: [] }
}
/**
* Validates a WHERE clause to prevent SQL injection attacks
* @param where - The WHERE clause string to validate
* @throws {Error} If the WHERE clause contains potentially dangerous patterns
*/
function validateWhereClause(where: string): void {
const dangerousPatterns = [
// DDL and DML injection via stacked queries
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
// Union-based injection
/union\s+(all\s+)?select/i,
// File operations
/union\s+select/i,
/into\s+outfile/i,
/into\s+dumpfile/i,
/load_file\s*\(/i,
// Comment-based injection (can truncate query)
/load_file/i,
/--/,
/\/\*/,
/\*\//,
// Tautologies - always true/false conditions using backreferences
// Matches OR 'x'='x' or OR x=x (same value both sides) but NOT OR col='value'
/\bor\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
/\bor\s+true\b/i,
/\bor\s+false\b/i,
// AND tautologies (less common but still used in attacks)
/\band\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
/\band\s+true\b/i,
/\band\s+false\b/i,
// Time-based blind injection
/\bsleep\s*\(/i,
/\bbenchmark\s*\(/i,
/\bwaitfor\s+delay/i,
// Stacked queries (any statement after semicolon)
/;\s*\w+/,
// Information schema queries
/information_schema/i,
/mysql\./i,
// System functions and procedures
/\bxp_cmdshell/i,
]
for (const pattern of dangerousPatterns) {

View File

@@ -4,7 +4,6 @@ import { account } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -37,12 +36,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialIdValidation = validateMicrosoftGraphId(credentialId, 'credentialId')
if (!credentialIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid credential ID`, { error: credentialIdValidation.error })
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
}
logger.info(`[${requestId}] Fetching credential`, { credentialId })
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)

View File

@@ -4,7 +4,6 @@ import { account } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -34,12 +33,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialIdValidation = validateMicrosoftGraphId(credentialId, 'credentialId')
if (!credentialIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid credential ID`, { error: credentialIdValidation.error })
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
}
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
if (!credentials.length) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
@@ -55,6 +48,7 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
}
// Build URL for OneDrive folders
let url = `https://graph.microsoft.com/v1.0/me/drive/root/children?$filter=folder ne null&$select=id,name,folder,webUrl,createdDateTime,lastModifiedDateTime&$top=50`
if (query) {
@@ -77,7 +71,7 @@ export async function GET(request: NextRequest) {
const data = await response.json()
const folders = (data.value || [])
.filter((item: MicrosoftGraphDriveItem) => item.folder)
.filter((item: MicrosoftGraphDriveItem) => item.folder) // Only folders
.map((folder: MicrosoftGraphDriveItem) => ({
id: folder.id,
name: folder.name,

View File

@@ -2,7 +2,6 @@ import { type NextRequest, NextResponse } from 'next/server'
import * as XLSX from 'xlsx'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import {
@@ -29,9 +28,9 @@ const ExcelValuesSchema = z.union([
const OneDriveUploadSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
fileName: z.string().min(1, 'File name is required'),
file: z.any().optional(),
file: z.any().optional(), // UserFile object (optional for blank Excel creation)
folderId: z.string().optional().nullable(),
mimeType: z.string().nullish(),
mimeType: z.string().nullish(), // Accept string, null, or undefined
values: ExcelValuesSchema.optional().nullable(),
})
@@ -63,19 +62,24 @@ export async function POST(request: NextRequest) {
let fileBuffer: Buffer
let mimeType: string
// Check if we're creating a blank Excel file
const isExcelCreation =
validatedData.mimeType ===
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' && !validatedData.file
if (isExcelCreation) {
// Create a blank Excel workbook
const workbook = XLSX.utils.book_new()
const worksheet = XLSX.utils.aoa_to_sheet([[]])
XLSX.utils.book_append_sheet(workbook, worksheet, 'Sheet1')
// Generate XLSX file as buffer
const xlsxBuffer = XLSX.write(workbook, { type: 'buffer', bookType: 'xlsx' })
fileBuffer = Buffer.from(xlsxBuffer)
mimeType = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
} else {
// Handle regular file upload
const rawFile = validatedData.file
if (!rawFile) {
@@ -104,6 +108,7 @@ export async function POST(request: NextRequest) {
fileToProcess = rawFile
}
// Convert to UserFile format
let userFile
try {
userFile = processSingleFileToUserFile(fileToProcess, requestId, logger)
@@ -133,7 +138,7 @@ export async function POST(request: NextRequest) {
mimeType = userFile.type || 'application/octet-stream'
}
const maxSize = 250 * 1024 * 1024
const maxSize = 250 * 1024 * 1024 // 250MB
if (fileBuffer.length > maxSize) {
const sizeMB = (fileBuffer.length / (1024 * 1024)).toFixed(2)
logger.warn(`[${requestId}] File too large: ${sizeMB}MB`)
@@ -146,6 +151,7 @@ export async function POST(request: NextRequest) {
)
}
// Ensure file name has an appropriate extension
let fileName = validatedData.fileName
const hasExtension = fileName.includes('.') && fileName.lastIndexOf('.') > 0
@@ -163,17 +169,6 @@ export async function POST(request: NextRequest) {
const folderId = validatedData.folderId?.trim()
if (folderId && folderId !== '') {
const folderIdValidation = validateMicrosoftGraphId(folderId, 'folderId')
if (!folderIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid folder ID`, { error: folderIdValidation.error })
return NextResponse.json(
{
success: false,
error: folderIdValidation.error,
},
{ status: 400 }
)
}
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(folderId)}:/${encodeURIComponent(fileName)}:/content`
} else {
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
@@ -202,12 +197,14 @@ export async function POST(request: NextRequest) {
const fileData = await uploadResponse.json()
// If this is an Excel creation and values were provided, write them using the Excel API
let excelWriteResult: any | undefined
const shouldWriteExcelContent =
isExcelCreation && Array.isArray(excelValues) && excelValues.length > 0
if (shouldWriteExcelContent) {
try {
// Create a workbook session to ensure reliability and persistence of changes
let workbookSessionId: string | undefined
const sessionResp = await fetch(
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/createSession`,
@@ -226,6 +223,7 @@ export async function POST(request: NextRequest) {
workbookSessionId = sessionData?.id
}
// Determine the first worksheet name
let sheetName = 'Sheet1'
try {
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
@@ -274,6 +272,7 @@ export async function POST(request: NextRequest) {
return paddedRow
})
// Compute concise end range from A1 and matrix size (no network round-trip)
const indexToColLetters = (index: number): string => {
let n = index
let s = ''
@@ -314,6 +313,7 @@ export async function POST(request: NextRequest) {
statusText: excelWriteResponse?.statusText,
error: errorText,
})
// Do not fail the entire request; return upload success with write error details
excelWriteResult = {
success: false,
error: `Excel write failed: ${excelWriteResponse?.statusText || 'unknown'}`,
@@ -321,6 +321,7 @@ export async function POST(request: NextRequest) {
}
} else {
const writeData = await excelWriteResponse.json()
// The Range PATCH returns a Range object; log address and values length
const addr = writeData.address || writeData.addressLocal
const v = writeData.values || []
excelWriteResult = {
@@ -332,6 +333,7 @@ export async function POST(request: NextRequest) {
}
}
// Attempt to close the workbook session if one was created
if (workbookSessionId) {
try {
const closeResp = await fetch(

View File

@@ -3,7 +3,6 @@ import { account } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -30,13 +29,8 @@ export async function GET(request: Request) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialIdValidation = validateAlphanumericId(credentialId, 'credentialId')
if (!credentialIdValidation.isValid) {
logger.warn('Invalid credentialId format', { error: credentialIdValidation.error })
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
}
try {
// Ensure we have a session for permission checks
const sessionUserId = session?.user?.id || ''
if (!sessionUserId) {
@@ -44,6 +38,7 @@ export async function GET(request: Request) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
// Resolve the credential owner to support collaborator-owned credentials
const creds = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
if (!creds.length) {
logger.warn('Credential not found', { credentialId })
@@ -84,6 +79,7 @@ export async function GET(request: Request) {
endpoint: 'https://graph.microsoft.com/v1.0/me/mailFolders',
})
// Check for auth errors specifically
if (response.status === 401) {
return NextResponse.json(
{
@@ -100,6 +96,7 @@ export async function GET(request: Request) {
const data = await response.json()
const folders = data.value || []
// Transform folders to match the expected format
const transformedFolders = folders.map((folder: OutlookFolder) => ({
id: folder.id,
name: folder.displayName,
@@ -114,6 +111,7 @@ export async function GET(request: Request) {
} catch (innerError) {
logger.error('Error during API requests:', innerError)
// Check if it's an authentication error
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
if (
errorMessage.includes('auth') ||

View File

@@ -64,46 +64,15 @@ export function sanitizeIdentifier(identifier: string): string {
return sanitizeSingleIdentifier(identifier)
}
/**
* Validates a WHERE clause to prevent SQL injection attacks
* @param where - The WHERE clause string to validate
* @throws {Error} If the WHERE clause contains potentially dangerous patterns
*/
function validateWhereClause(where: string): void {
const dangerousPatterns = [
// DDL and DML injection via stacked queries
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
// Union-based injection
/union\s+(all\s+)?select/i,
// File operations
/union\s+select/i,
/into\s+outfile/i,
/load_file\s*\(/i,
/pg_read_file/i,
// Comment-based injection (can truncate query)
/load_file/i,
/--/,
/\/\*/,
/\*\//,
// Tautologies - always true/false conditions using backreferences
// Matches OR 'x'='x' or OR x=x (same value both sides) but NOT OR col='value'
/\bor\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
/\bor\s+true\b/i,
/\bor\s+false\b/i,
// AND tautologies (less common but still used in attacks)
/\band\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
/\band\s+true\b/i,
/\band\s+false\b/i,
// Time-based blind injection
/\bsleep\s*\(/i,
/\bwaitfor\s+delay/i,
/\bpg_sleep\s*\(/i,
/\bbenchmark\s*\(/i,
// Stacked queries (any statement after semicolon)
/;\s*\w+/,
// Information schema / system catalog queries
/information_schema/i,
/pg_catalog/i,
// System functions and procedures
/\bxp_cmdshell/i,
]
for (const pattern of dangerousPatterns) {

View File

@@ -4,7 +4,6 @@ import { account } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import type { SharepointSite } from '@/tools/sharepoint/types'
@@ -33,12 +32,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialIdValidation = validateAlphanumericId(credentialId, 'credentialId', 255)
if (!credentialIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid credential ID`, { error: credentialIdValidation.error })
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
}
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
if (!credentials.length) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
@@ -54,6 +47,8 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
}
// Build URL for SharePoint sites
// Use search=* to get all sites the user has access to, or search for specific query
const searchQuery = query || '*'
const url = `https://graph.microsoft.com/v1.0/sites?search=${encodeURIComponent(searchQuery)}&$select=id,name,displayName,webUrl,createdDateTime,lastModifiedDateTime&$top=50`

View File

@@ -1,6 +1,5 @@
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -94,6 +93,7 @@ export async function POST(request: Request) {
}
}
// Filter to channels the bot can access and format the response
const channels = (data.channels || [])
.filter((channel: SlackChannel) => {
const canAccess = !channel.is_archived && (channel.is_member || !channel.is_private)
@@ -106,28 +106,6 @@ export async function POST(request: Request) {
return canAccess
})
.filter((channel: SlackChannel) => {
const validation = validateAlphanumericId(channel.id, 'channelId', 50)
if (!validation.isValid) {
logger.warn('Invalid channel ID received from Slack API', {
channelId: channel.id,
channelName: channel.name,
error: validation.error,
})
return false
}
if (!/^[CDG][A-Z0-9]+$/i.test(channel.id)) {
logger.warn('Channel ID does not match Slack format', {
channelId: channel.id,
channelName: channel.name,
})
return false
}
return true
})
.map((channel: SlackChannel) => ({
id: channel.id,
name: channel.name,

View File

@@ -14,12 +14,7 @@ const SlackReadMessagesSchema = z
accessToken: z.string().min(1, 'Access token is required'),
channel: z.string().optional().nullable(),
userId: z.string().optional().nullable(),
limit: z.coerce
.number()
.min(1, 'Limit must be at least 1')
.max(15, 'Limit cannot exceed 15')
.optional()
.nullable(),
limit: z.number().optional().nullable(),
oldest: z.string().optional().nullable(),
latest: z.string().optional().nullable(),
})
@@ -67,8 +62,8 @@ export async function POST(request: NextRequest) {
const url = new URL('https://slack.com/api/conversations.history')
url.searchParams.append('channel', channel!)
const limit = validatedData.limit ?? 10
url.searchParams.append('limit', String(limit))
const limit = validatedData.limit ? Number(validatedData.limit) : 10
url.searchParams.append('limit', String(Math.min(limit, 15)))
if (validatedData.oldest) {
url.searchParams.append('oldest', validatedData.oldest)

View File

@@ -1,6 +1,5 @@
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -21,21 +20,13 @@ export async function POST(request: Request) {
try {
const requestId = generateRequestId()
const body = await request.json()
const { credential, workflowId, userId } = body
const { credential, workflowId } = body
if (!credential) {
logger.error('Missing credential in request')
return NextResponse.json({ error: 'Credential is required' }, { status: 400 })
}
if (userId !== undefined && userId !== null) {
const validation = validateAlphanumericId(userId, 'userId', 100)
if (!validation.isValid) {
logger.warn('Invalid Slack user ID', { userId, error: validation.error })
return NextResponse.json({ error: validation.error }, { status: 400 })
}
}
let accessToken: string
const isBotToken = credential.startsWith('xoxb-')
@@ -72,17 +63,6 @@ export async function POST(request: Request) {
logger.info('Using OAuth token for Slack API')
}
if (userId) {
const userData = await fetchSlackUser(accessToken, userId)
const user = {
id: userData.user.id,
name: userData.user.name,
real_name: userData.user.real_name || userData.user.name,
}
logger.info(`Successfully fetched Slack user: ${userId}`)
return NextResponse.json({ user })
}
const data = await fetchSlackUsers(accessToken)
const users = (data.members || [])
@@ -107,31 +87,6 @@ export async function POST(request: Request) {
}
}
async function fetchSlackUser(accessToken: string, userId: string) {
const url = new URL('https://slack.com/api/users.info')
url.searchParams.append('user', userId)
const response = await fetch(url.toString(), {
method: 'GET',
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
},
})
if (!response.ok) {
throw new Error(`Slack API error: ${response.status} ${response.statusText}`)
}
const data = await response.json()
if (!data.ok) {
throw new Error(data.error || 'Failed to fetch user')
}
return data
}
async function fetchSlackUsers(accessToken: string) {
const url = new URL('https://slack.com/api/users.list')
url.searchParams.append('limit', '200')

View File

@@ -1,7 +1,4 @@
import { type Attributes, Client, type ConnectConfig } from 'ssh2'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('SSHUtils')
// File type constants from POSIX
const S_IFMT = 0o170000 // bit mask for the file type bit field
@@ -35,6 +32,7 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
const host = config.host
const port = config.port
// Connection refused - server not running or wrong port
if (errorMessage.includes('econnrefused') || errorMessage.includes('connection refused')) {
return new Error(
`Connection refused to ${host}:${port}. ` +
@@ -44,6 +42,7 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
)
}
// Connection reset - server closed connection unexpectedly
if (errorMessage.includes('econnreset') || errorMessage.includes('connection reset')) {
return new Error(
`Connection reset by ${host}:${port}. ` +
@@ -54,6 +53,7 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
)
}
// Timeout - server unreachable or slow
if (errorMessage.includes('etimedout') || errorMessage.includes('timeout')) {
return new Error(
`Connection timed out to ${host}:${port}. ` +
@@ -63,6 +63,7 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
)
}
// DNS/hostname resolution
if (errorMessage.includes('enotfound') || errorMessage.includes('getaddrinfo')) {
return new Error(
`Could not resolve hostname "${host}". ` +
@@ -70,6 +71,7 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
)
}
// Authentication failure
if (errorMessage.includes('authentication') || errorMessage.includes('auth')) {
return new Error(
`Authentication failed for user on ${host}:${port}. ` +
@@ -79,6 +81,7 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
)
}
// Private key format issues
if (
errorMessage.includes('key') &&
(errorMessage.includes('parse') || errorMessage.includes('invalid'))
@@ -90,6 +93,7 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
)
}
// Host key verification (first connection)
if (errorMessage.includes('host key') || errorMessage.includes('hostkey')) {
return new Error(
`Host key verification issue for ${host}. ` +
@@ -97,6 +101,7 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
)
}
// Return original error with context if no specific match
return new Error(`SSH connection to ${host}:${port} failed: ${err.message}`)
}
@@ -200,119 +205,19 @@ export function executeSSHCommand(client: Client, command: string): Promise<SSHC
/**
* Sanitize command input to prevent command injection
*
* Removes null bytes and other dangerous control characters while preserving
* legitimate shell syntax. Logs warnings for potentially dangerous patterns.
*
* Note: This function does not block complex shell commands (pipes, redirects, etc.)
* as users legitimately need these features for remote command execution.
*
* @param command - The command to sanitize
* @returns The sanitized command string
*
* @example
* ```typescript
* const safeCommand = sanitizeCommand(userInput)
* // Use safeCommand for SSH execution
* ```
*/
export function sanitizeCommand(command: string): string {
let sanitized = command.replace(/\0/g, '')
sanitized = sanitized.replace(/[\x0B\x0C]/g, '')
sanitized = sanitized.trim()
const dangerousPatterns = [
{ pattern: /\$\(.*\)/, name: 'command substitution $()' },
{ pattern: /`.*`/, name: 'backtick command substitution' },
{ pattern: /;\s*rm\s+-rf/i, name: 'destructive rm -rf command' },
{ pattern: /;\s*dd\s+/i, name: 'dd command (disk operations)' },
{ pattern: /mkfs/i, name: 'filesystem formatting command' },
{ pattern: />\s*\/dev\/sd[a-z]/i, name: 'direct disk write' },
]
for (const { pattern, name } of dangerousPatterns) {
if (pattern.test(sanitized)) {
logger.warn(`Command contains ${name}`, {
command: sanitized.substring(0, 100) + (sanitized.length > 100 ? '...' : ''),
})
}
}
return sanitized
return command.trim()
}
/**
* Sanitize and validate file path to prevent path traversal attacks
*
* This function validates that a file path does not contain:
* - Null bytes
* - Path traversal sequences (.. or ../)
* - URL-encoded path traversal attempts
*
* @param path - The file path to sanitize and validate
* @returns The sanitized path if valid
* @throws Error if path traversal is detected
*
* @example
* ```typescript
* try {
* const safePath = sanitizePath(userInput)
* // Use safePath safely
* } catch (error) {
* // Handle invalid path
* }
* ```
* Sanitize file path - removes null bytes and trims whitespace
*/
export function sanitizePath(path: string): string {
let sanitized = path.replace(/\0/g, '')
sanitized = sanitized.trim()
if (sanitized.includes('%00')) {
logger.warn('Path contains URL-encoded null bytes', {
path: path.substring(0, 100),
})
throw new Error('Path contains invalid characters')
}
const pathTraversalPatterns = [
'../', // Standard Unix path traversal
'..\\', // Windows path traversal
'/../', // Mid-path traversal
'\\..\\', // Windows mid-path traversal
'%2e%2e%2f', // Fully encoded ../
'%2e%2e/', // Partially encoded ../
'%2e%2e%5c', // Fully encoded ..\
'%2e%2e\\', // Partially encoded ..\
'..%2f', // .. with encoded /
'..%5c', // .. with encoded \
'%252e%252e', // Double URL encoded ..
'..%252f', // .. with double encoded /
'..%255c', // .. with double encoded \
]
const lowerPath = sanitized.toLowerCase()
for (const pattern of pathTraversalPatterns) {
if (lowerPath.includes(pattern.toLowerCase())) {
logger.warn('Path traversal attempt detected', {
pattern,
path: path.substring(0, 100),
})
throw new Error('Path contains invalid path traversal sequences')
}
}
const segments = sanitized.split(/[/\\]/)
for (const segment of segments) {
if (segment === '..') {
logger.warn('Path traversal attempt detected (.. as path segment)', {
path: path.substring(0, 100),
})
throw new Error('Path contains invalid path traversal sequences')
}
}
return sanitized
}

View File

@@ -3,7 +3,6 @@ import { account } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { validateEnum, validatePathSegment } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -12,6 +11,7 @@ export const dynamic = 'force-dynamic'
const logger = createLogger('WealthboxItemsAPI')
// Interface for transformed Wealthbox items
interface WealthboxItem {
id: string
name: string
@@ -45,23 +45,12 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialIdValidation = validatePathSegment(credentialId, {
paramName: 'credentialId',
maxLength: 100,
allowHyphens: true,
allowUnderscores: true,
allowDots: false,
})
if (!credentialIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid credentialId format: ${credentialId}`)
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
}
const ALLOWED_TYPES = ['contact'] as const
const typeValidation = validateEnum(type, ALLOWED_TYPES, 'type')
if (!typeValidation.isValid) {
if (type !== 'contact') {
logger.warn(`[${requestId}] Invalid item type: ${type}`)
return NextResponse.json({ error: typeValidation.error }, { status: 400 })
return NextResponse.json(
{ error: 'Invalid item type. Only contact is supported.' },
{ status: 400 }
)
}
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)

View File

@@ -1,6 +1,5 @@
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -13,21 +12,13 @@ export async function POST(request: Request) {
try {
const requestId = generateRequestId()
const body = await request.json()
const { credential, workflowId, siteId } = body
const { credential, workflowId } = body
if (!credential) {
logger.error('Missing credential in request')
return NextResponse.json({ error: 'Credential is required' }, { status: 400 })
}
if (siteId) {
const siteIdValidation = validateAlphanumericId(siteId, 'siteId')
if (!siteIdValidation.isValid) {
logger.error('Invalid siteId', { error: siteIdValidation.error })
return NextResponse.json({ error: siteIdValidation.error }, { status: 400 })
}
}
const authz = await authorizeCredentialUse(request as any, {
credentialId: credential,
workflowId,
@@ -55,11 +46,7 @@ export async function POST(request: Request) {
)
}
const url = siteId
? `https://api.webflow.com/v2/sites/${siteId}`
: 'https://api.webflow.com/v2/sites'
const response = await fetch(url, {
const response = await fetch('https://api.webflow.com/v2/sites', {
headers: {
Authorization: `Bearer ${accessToken}`,
accept: 'application/json',
@@ -71,7 +58,6 @@ export async function POST(request: Request) {
logger.error('Failed to fetch Webflow sites', {
status: response.status,
error: errorData,
siteId: siteId || 'all',
})
return NextResponse.json(
{ error: 'Failed to fetch Webflow sites', details: errorData },
@@ -80,13 +66,7 @@ export async function POST(request: Request) {
}
const data = await response.json()
let sites: any[]
if (siteId) {
sites = [data]
} else {
sites = data.sites || []
}
const sites = data.sites || []
const formattedSites = sites.map((site: any) => ({
id: site.id,

View File

@@ -32,6 +32,7 @@ export async function GET(req: NextRequest) {
return NextResponse.json({ error: 'Missing email or token parameter' }, { status: 400 })
}
// Verify token and get email type
const tokenVerification = verifyUnsubscribeToken(email, token)
if (!tokenVerification.valid) {
logger.warn(`[${requestId}] Invalid unsubscribe token for email: ${email}`)
@@ -41,6 +42,7 @@ export async function GET(req: NextRequest) {
const emailType = tokenVerification.emailType as EmailType
const isTransactional = isTransactionalEmail(emailType)
// Get current preferences
const preferences = await getEmailPreferences(email)
logger.info(
@@ -65,42 +67,22 @@ export async function POST(req: NextRequest) {
const requestId = generateRequestId()
try {
const { searchParams } = new URL(req.url)
const contentType = req.headers.get('content-type') || ''
const body = await req.json()
const result = unsubscribeSchema.safeParse(body)
let email: string
let token: string
let type: 'all' | 'marketing' | 'updates' | 'notifications' = 'all'
if (contentType.includes('application/x-www-form-urlencoded')) {
email = searchParams.get('email') || ''
token = searchParams.get('token') || ''
if (!email || !token) {
logger.warn(`[${requestId}] One-click unsubscribe missing email or token in URL`)
return NextResponse.json({ error: 'Missing email or token parameter' }, { status: 400 })
}
logger.info(`[${requestId}] Processing one-click unsubscribe for: ${email}`)
} else {
const body = await req.json()
const result = unsubscribeSchema.safeParse(body)
if (!result.success) {
logger.warn(`[${requestId}] Invalid unsubscribe POST data`, {
errors: result.error.format(),
})
return NextResponse.json(
{ error: 'Invalid request data', details: result.error.format() },
{ status: 400 }
)
}
email = result.data.email
token = result.data.token
type = result.data.type
if (!result.success) {
logger.warn(`[${requestId}] Invalid unsubscribe POST data`, {
errors: result.error.format(),
})
return NextResponse.json(
{ error: 'Invalid request data', details: result.error.format() },
{ status: 400 }
)
}
const { email, token, type } = result.data
// Verify token and get email type
const tokenVerification = verifyUnsubscribeToken(email, token)
if (!tokenVerification.valid) {
logger.warn(`[${requestId}] Invalid unsubscribe token for email: ${email}`)
@@ -110,6 +92,7 @@ export async function POST(req: NextRequest) {
const emailType = tokenVerification.emailType as EmailType
const isTransactional = isTransactionalEmail(emailType)
// Prevent unsubscribing from transactional emails
if (isTransactional) {
logger.warn(`[${requestId}] Attempted to unsubscribe from transactional email: ${email}`)
return NextResponse.json(
@@ -123,6 +106,7 @@ export async function POST(req: NextRequest) {
)
}
// Process unsubscribe based on type
let success = false
switch (type) {
case 'all':
@@ -146,6 +130,7 @@ export async function POST(req: NextRequest) {
logger.info(`[${requestId}] Successfully unsubscribed ${email} from ${type}`)
// Return 200 for one-click unsubscribe compliance
return NextResponse.json(
{
success: true,

View File

@@ -0,0 +1,97 @@
import { db } from '@sim/db'
import { userStats, workflow } from '@sim/db/schema'
import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('WorkflowStatsAPI')
const queryParamsSchema = z.object({
runs: z.coerce.number().int().min(1).max(100).default(1),
})
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const { id } = await params
const searchParams = request.nextUrl.searchParams
const validation = queryParamsSchema.safeParse({
runs: searchParams.get('runs'),
})
if (!validation.success) {
logger.error(`Invalid query parameters: ${validation.error.message}`)
return NextResponse.json(
{
error:
validation.error.errors[0]?.message ||
'Invalid number of runs. Must be between 1 and 100.',
},
{ status: 400 }
)
}
const { runs } = validation.data
try {
const [workflowRecord] = await db.select().from(workflow).where(eq(workflow.id, id)).limit(1)
if (!workflowRecord) {
return NextResponse.json({ error: `Workflow ${id} not found` }, { status: 404 })
}
try {
await db
.update(workflow)
.set({
runCount: workflowRecord.runCount + runs,
lastRunAt: new Date(),
})
.where(eq(workflow.id, id))
} catch (error) {
logger.error('Error updating workflow runCount:', error)
throw error
}
try {
const userStatsRecords = await db
.select()
.from(userStats)
.where(eq(userStats.userId, workflowRecord.userId))
if (userStatsRecords.length === 0) {
await db.insert(userStats).values({
id: crypto.randomUUID(),
userId: workflowRecord.userId,
totalManualExecutions: 0,
totalApiCalls: 0,
totalWebhookTriggers: 0,
totalScheduledExecutions: 0,
totalChatExecutions: 0,
totalTokensUsed: 0,
totalCost: '0.00',
lastActive: sql`now()`,
})
} else {
await db
.update(userStats)
.set({
lastActive: sql`now()`,
})
.where(eq(userStats.userId, workflowRecord.userId))
}
} catch (error) {
logger.error(`Error ensuring userStats for userId ${workflowRecord.userId}:`, error)
// Don't rethrow - we want to continue even if this fails
}
return NextResponse.json({
success: true,
runsAdded: runs,
newTotal: workflowRecord.runCount + runs,
})
} catch (error) {
logger.error('Error updating workflow stats:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -173,7 +173,7 @@ export async function GET(
// DELETE /api/workspaces/invitations/[invitationId] - Delete a workspace invitation
export async function DELETE(
_request: NextRequest,
_req: NextRequest,
{ params }: { params: Promise<{ invitationId: string }> }
) {
const { invitationId } = await params
@@ -221,7 +221,7 @@ export async function DELETE(
// POST /api/workspaces/invitations/[invitationId] - Resend a workspace invitation
export async function POST(
_request: NextRequest,
_req: NextRequest,
{ params }: { params: Promise<{ invitationId: string }> }
) {
const { invitationId } = await params

View File

@@ -29,24 +29,30 @@ export const metadata: Metadata = {
locale: 'en_US',
images: [
{
url: '/logo/primary/rounded.png',
width: 512,
height: 512,
alt: 'Sim - AI Agent Workflow Builder',
url: '/social/og-image.png',
width: 1200,
height: 630,
alt: 'Sim - Visual AI Workflow Builder',
type: 'image/png',
},
{
url: '/social/og-image-square.png',
width: 600,
height: 600,
alt: 'Sim Logo',
},
],
},
twitter: {
card: 'summary',
card: 'summary_large_image',
site: '@simdotai',
creator: '@simdotai',
title: 'Sim - AI Agent Workflow Builder | Open Source',
description:
'Open-source platform for agentic workflows. 60,000+ developers. Visual builder. 100+ integrations. SOC2 & HIPAA compliant.',
images: {
url: '/logo/primary/rounded.png',
alt: 'Sim - AI Agent Workflow Builder',
url: '/social/twitter-image.png',
alt: 'Sim - Visual AI Workflow Builder',
},
},
alternates: {
@@ -71,6 +77,7 @@ export const metadata: Metadata = {
category: 'technology',
classification: 'AI Development Tools',
referrer: 'origin-when-cross-origin',
// LLM SEO optimizations
other: {
'llm:content-type': 'AI workflow builder, visual programming, no-code AI development',
'llm:use-cases':

View File

@@ -1,88 +1,5 @@
import { db } from '@sim/db'
import { templateCreators, templates } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import type { Metadata } from 'next'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
import TemplateDetails from '@/app/templates/[id]/template'
const logger = createLogger('TemplateMetadata')
/**
* Generate dynamic metadata for template pages.
* This provides OpenGraph images for social media sharing.
*/
export async function generateMetadata({
params,
}: {
params: Promise<{ id: string }>
}): Promise<Metadata> {
const { id } = await params
try {
const result = await db
.select({
template: templates,
creator: templateCreators,
})
.from(templates)
.leftJoin(templateCreators, eq(templates.creatorId, templateCreators.id))
.where(eq(templates.id, id))
.limit(1)
if (result.length === 0) {
return {
title: 'Template Not Found',
description: 'The requested template could not be found.',
}
}
const { template, creator } = result[0]
const baseUrl = getBaseUrl()
const details = template.details as { tagline?: string; about?: string } | null
const description = details?.tagline || 'AI workflow template on Sim'
const hasOgImage = !!template.ogImageUrl
const ogImageUrl = template.ogImageUrl || `${baseUrl}/logo/primary/rounded.png`
return {
title: template.name,
description,
openGraph: {
title: template.name,
description,
type: 'website',
url: `${baseUrl}/templates/${id}`,
siteName: 'Sim',
images: [
{
url: ogImageUrl,
width: hasOgImage ? 1200 : 512,
height: hasOgImage ? 630 : 512,
alt: `${template.name} - Workflow Preview`,
},
],
},
twitter: {
card: hasOgImage ? 'summary_large_image' : 'summary',
title: template.name,
description,
images: [ogImageUrl],
creator: creator?.details
? ((creator.details as Record<string, unknown>).xHandle as string) || undefined
: undefined,
},
}
} catch (error) {
logger.error('Failed to generate template metadata:', error)
return {
title: 'Template',
description: 'AI workflow template on Sim',
}
}
}
/**
* Public template detail page for unauthenticated users.
* Authenticated-user redirect is handled in templates/[id]/layout.tsx.

View File

@@ -39,6 +39,7 @@ function UnsubscribeContent() {
return
}
// Validate the unsubscribe link
fetch(
`/api/users/me/settings/unsubscribe?email=${encodeURIComponent(email)}&token=${encodeURIComponent(token)}`
)
@@ -80,7 +81,9 @@ function UnsubscribeContent() {
if (result.success) {
setUnsubscribed(true)
// Update the data to reflect the change
if (data) {
// Type-safe property construction with validation
const validTypes = ['all', 'marketing', 'updates', 'notifications'] as const
if (validTypes.includes(type)) {
if (type === 'all') {
@@ -189,6 +192,7 @@ function UnsubscribeContent() {
)
}
// Handle transactional emails
if (data?.isTransactional) {
return (
<div className='flex min-h-screen items-center justify-center bg-background p-4'>

View File

@@ -1,16 +1,8 @@
import { db } from '@sim/db'
import { templateCreators, templates } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import type { Metadata } from 'next'
import { redirect } from 'next/navigation'
import { getSession } from '@/lib/auth'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
import TemplateDetails from '@/app/templates/[id]/template'
const logger = createLogger('WorkspaceTemplateMetadata')
interface TemplatePageProps {
params: Promise<{
workspaceId: string
@@ -18,81 +10,6 @@ interface TemplatePageProps {
}>
}
/**
* Generate dynamic metadata for workspace template pages.
* This provides OpenGraph images for social media sharing.
*/
export async function generateMetadata({
params,
}: {
params: Promise<{ workspaceId: string; id: string }>
}): Promise<Metadata> {
const { workspaceId, id } = await params
try {
const result = await db
.select({
template: templates,
creator: templateCreators,
})
.from(templates)
.leftJoin(templateCreators, eq(templates.creatorId, templateCreators.id))
.where(eq(templates.id, id))
.limit(1)
if (result.length === 0) {
return {
title: 'Template Not Found',
description: 'The requested template could not be found.',
}
}
const { template, creator } = result[0]
const baseUrl = getBaseUrl()
const details = template.details as { tagline?: string; about?: string } | null
const description = details?.tagline || 'AI workflow template on Sim'
const hasOgImage = !!template.ogImageUrl
const ogImageUrl = template.ogImageUrl || `${baseUrl}/logo/primary/rounded.png`
return {
title: template.name,
description,
openGraph: {
title: template.name,
description,
type: 'website',
url: `${baseUrl}/workspace/${workspaceId}/templates/${id}`,
siteName: 'Sim',
images: [
{
url: ogImageUrl,
width: hasOgImage ? 1200 : 512,
height: hasOgImage ? 630 : 512,
alt: `${template.name} - Workflow Preview`,
},
],
},
twitter: {
card: hasOgImage ? 'summary_large_image' : 'summary',
title: template.name,
description,
images: [ogImageUrl],
creator: creator?.details
? ((creator.details as Record<string, unknown>).xHandle as string) || undefined
: undefined,
},
}
} catch (error) {
logger.error('Failed to generate workspace template metadata:', error)
return {
title: 'Template',
description: 'AI workflow template on Sim',
}
}
}
/**
* Workspace-scoped template detail page.
* Requires authentication and workspace membership to access.
@@ -102,10 +19,12 @@ export default async function TemplatePage({ params }: TemplatePageProps) {
const { workspaceId, id } = await params
const session = await getSession()
// Redirect unauthenticated users to public template detail page
if (!session?.user?.id) {
redirect(`/templates/${id}`)
}
// Verify workspace membership
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
if (!hasPermission) {
redirect('/')

View File

@@ -1,6 +1,6 @@
'use client'
import React, { useEffect, useRef, useState } from 'react'
import { useEffect, useState } from 'react'
import { Loader2 } from 'lucide-react'
import {
Button,
@@ -18,7 +18,6 @@ import { Skeleton, TagInput } from '@/components/ui'
import { useSession } from '@/lib/auth/auth-client'
import { cn } from '@/lib/core/utils/cn'
import { createLogger } from '@/lib/logs/console/logger'
import { captureAndUploadOGImage, OG_IMAGE_HEIGHT, OG_IMAGE_WIDTH } from '@/lib/og'
import { WorkflowPreview } from '@/app/workspace/[workspaceId]/w/components/workflow-preview/workflow-preview'
import {
useCreateTemplate,
@@ -26,7 +25,6 @@ import {
useTemplateByWorkflow,
useUpdateTemplate,
} from '@/hooks/queries/templates'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { WorkflowState } from '@/stores/workflows/workflow/types'
const logger = createLogger('TemplateDeploy')
@@ -81,9 +79,6 @@ export function TemplateDeploy({
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
const [creatorOptions, setCreatorOptions] = useState<CreatorOption[]>([])
const [loadingCreators, setLoadingCreators] = useState(false)
const [isCapturing, setIsCapturing] = useState(false)
const previewContainerRef = useRef<HTMLDivElement>(null)
const ogCaptureRef = useRef<HTMLDivElement>(null)
const [formData, setFormData] = useState<TemplateFormData>(initialFormData)
@@ -213,8 +208,6 @@ export function TemplateDeploy({
tags: formData.tags,
}
let templateId: string
if (existingTemplate) {
await updateMutation.mutateAsync({
id: existingTemplate.id,
@@ -223,32 +216,11 @@ export function TemplateDeploy({
updateState: true,
},
})
templateId = existingTemplate.id
} else {
const result = await createMutation.mutateAsync({ ...templateData, workflowId })
templateId = result.id
await createMutation.mutateAsync({ ...templateData, workflowId })
}
logger.info(`Template ${existingTemplate ? 'updated' : 'created'} successfully`)
setIsCapturing(true)
requestAnimationFrame(() => {
requestAnimationFrame(async () => {
try {
if (ogCaptureRef.current) {
const ogUrl = await captureAndUploadOGImage(ogCaptureRef.current, templateId)
if (ogUrl) {
logger.info(`OG image uploaded for template ${templateId}: ${ogUrl}`)
}
}
} catch (ogError) {
logger.warn('Failed to capture/upload OG image:', ogError)
} finally {
setIsCapturing(false)
}
})
})
onDeploymentComplete?.()
} catch (error) {
logger.error('Failed to save template:', error)
@@ -303,7 +275,6 @@ export function TemplateDeploy({
Live Template
</Label>
<div
ref={previewContainerRef}
className='[&_*]:!cursor-default relative h-[260px] w-full cursor-default overflow-hidden rounded-[4px] border border-[var(--border)]'
onWheelCapture={(e) => {
if (e.ctrlKey || e.metaKey) return
@@ -452,65 +423,10 @@ export function TemplateDeploy({
</ModalFooter>
</ModalContent>
</Modal>
{/* Hidden container for OG image capture */}
{isCapturing && <OGCaptureContainer ref={ogCaptureRef} />}
</div>
)
}
/**
* Hidden container for OG image capture.
* Lazy-rendered only when capturing - gets workflow state from store on mount.
*/
const OGCaptureContainer = React.forwardRef<HTMLDivElement>((_, ref) => {
const blocks = useWorkflowStore((state) => state.blocks)
const edges = useWorkflowStore((state) => state.edges)
const loops = useWorkflowStore((state) => state.loops)
const parallels = useWorkflowStore((state) => state.parallels)
if (!blocks || Object.keys(blocks).length === 0) {
return null
}
const workflowState: WorkflowState = {
blocks,
edges: edges ?? [],
loops: loops ?? {},
parallels: parallels ?? {},
lastSaved: Date.now(),
}
return (
<div
ref={ref}
style={{
position: 'absolute',
left: '-9999px',
top: '-9999px',
width: OG_IMAGE_WIDTH,
height: OG_IMAGE_HEIGHT,
backgroundColor: '#0c0c0c',
overflow: 'hidden',
}}
aria-hidden='true'
>
<WorkflowPreview
workflowState={workflowState}
showSubBlocks={false}
height='100%'
width='100%'
isPannable={false}
defaultZoom={0.8}
fitPadding={0.2}
lightweight
/>
</div>
)
})
OGCaptureContainer.displayName = 'OGCaptureContainer'
interface TemplatePreviewContentProps {
existingTemplate:
| {

View File

@@ -37,6 +37,7 @@ import { useWand } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-
import type { GenerationType } from '@/blocks/types'
import { createEnvVarPattern, createReferencePattern } from '@/executor/utils/reference-validation'
import { useTagSelection } from '@/hooks/use-tag-selection'
import { useTextHistory } from '@/hooks/use-text-history'
import { normalizeBlockName } from '@/stores/workflows/utils'
const logger = createLogger('Code')
@@ -305,6 +306,20 @@ export function Code({
},
})
// Text history for undo/redo with debouncing
const textHistory = useTextHistory({
blockId,
subBlockId,
value: code,
onChange: (newValue) => {
setCode(newValue)
if (!isPreview && !disabled) {
setStoreValue(newValue)
}
},
disabled: isPreview || disabled || readOnly || isAiStreaming,
})
const getDefaultValueString = () => {
if (defaultValue === undefined || defaultValue === null) return ''
if (typeof defaultValue === 'string') return defaultValue
@@ -348,10 +363,12 @@ export function Code({
useEffect(() => {
handleStreamStartRef.current = () => {
setCode('')
lastInternalValueRef.current = ''
}
handleGeneratedContentRef.current = (generatedCode: string) => {
setCode(generatedCode)
lastInternalValueRef.current = generatedCode
if (!isPreview && !disabled) {
setStoreValue(generatedCode)
}
@@ -387,14 +404,21 @@ export function Code({
}
}, [readOnly])
// Effects: Sync code with external value
// Ref to track the last value we set internally (to avoid sync loops)
const lastInternalValueRef = useRef<string>('')
// Effects: Sync code with external value (only for truly external changes)
useEffect(() => {
if (isAiStreaming) return
const valueString = value?.toString() ?? ''
if (valueString !== code) {
// Only sync if this is a genuine external change, not our own update
// This prevents resetting the undo history when we update the store
if (valueString !== code && valueString !== lastInternalValueRef.current) {
setCode(valueString)
lastInternalValueRef.current = valueString
}
}, [value, code, isAiStreaming])
}, [value, isAiStreaming]) // Removed 'code' from dependencies to prevent sync loops
// Effects: Track active line number for cursor position
useEffect(() => {
@@ -502,8 +526,9 @@ export function Code({
const dropPosition = textarea?.selectionStart ?? code.length
const newValue = `${code.slice(0, dropPosition)}<${code.slice(dropPosition)}`
setCode(newValue)
setStoreValue(newValue)
// Use textHistory for proper undo tracking
textHistory.handleChange(newValue)
lastInternalValueRef.current = newValue
const newCursorPosition = dropPosition + 1
setCursorPosition(newCursorPosition)
@@ -531,7 +556,9 @@ export function Code({
*/
const handleTagSelect = (newValue: string) => {
if (!isPreview && !readOnly) {
setCode(newValue)
// Use textHistory for proper undo tracking
textHistory.handleChange(newValue)
lastInternalValueRef.current = newValue
emitTagSelection(newValue)
}
setShowTags(false)
@@ -548,7 +575,9 @@ export function Code({
*/
const handleEnvVarSelect = (newValue: string) => {
if (!isPreview && !readOnly) {
setCode(newValue)
// Use textHistory for proper undo tracking
textHistory.handleChange(newValue)
lastInternalValueRef.current = newValue
emitTagSelection(newValue)
}
setShowEnvVars(false)
@@ -741,8 +770,10 @@ export function Code({
value={code}
onValueChange={(newCode) => {
if (!isAiStreaming && !isPreview && !disabled && !readOnly) {
setCode(newCode)
setStoreValue(newCode)
// Use textHistory for debounced undo/redo tracking
textHistory.handleChange(newCode)
// Track this as an internal change to prevent sync loops
lastInternalValueRef.current = newCode
const textarea = editorRef.current?.querySelector('textarea')
if (textarea) {
@@ -762,6 +793,10 @@ export function Code({
}
}}
onKeyDown={(e) => {
// Let text history handle undo/redo first
if (textHistory.handleKeyDown(e)) {
return
}
if (e.key === 'Escape') {
setShowTags(false)
setShowEnvVars(false)
@@ -770,6 +805,10 @@ export function Code({
e.preventDefault()
}
}}
onBlur={() => {
// Commit any pending text history changes on blur
textHistory.handleBlur()
}}
highlight={createHighlightFunction(effectiveLanguage, shouldHighlightReference)}
{...getCodeEditorProps({ isStreaming: isAiStreaming, isPreview, disabled })}
/>

View File

@@ -1,5 +1,5 @@
import type { ReactElement } from 'react'
import { useEffect, useRef, useState } from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import 'prismjs/components/prism-json'
import { Wand2 } from 'lucide-react'
import Editor from 'react-simple-code-editor'
@@ -17,6 +17,7 @@ import {
createEnvVarPattern,
createWorkflowVariablePattern,
} from '@/executor/utils/reference-validation'
import { useTextHistoryStore } from '@/stores/text-history'
interface CodeEditorProps {
value: string
@@ -33,6 +34,11 @@ interface CodeEditorProps {
showWandButton?: boolean
onWandClick?: () => void
wandButtonDisabled?: boolean
/**
* Unique identifier for text history. When provided, enables undo/redo functionality.
* Format: "blockId:fieldName" e.g. "block-123:schema" or "block-123:code"
*/
historyId?: string
}
export function CodeEditor({
@@ -50,16 +56,125 @@ export function CodeEditor({
showWandButton = false,
onWandClick,
wandButtonDisabled = false,
historyId,
}: CodeEditorProps) {
const [code, setCode] = useState(value)
const [visualLineHeights, setVisualLineHeights] = useState<number[]>([])
const editorRef = useRef<HTMLDivElement>(null)
const lastInternalValueRef = useRef<string>(value)
const initializedRef = useRef(false)
// Text history store for undo/redo
const textHistoryStore = useTextHistoryStore()
// Parse historyId into blockId and subBlockId for the store
const [historyBlockId, historySubBlockId] = historyId?.split(':') ?? ['', '']
const hasHistory = Boolean(historyId && historyBlockId && historySubBlockId)
// Initialize history on mount
useEffect(() => {
setCode(value)
if (hasHistory && !initializedRef.current) {
textHistoryStore.initHistory(historyBlockId, historySubBlockId, value)
initializedRef.current = true
}
}, [hasHistory, historyBlockId, historySubBlockId, value, textHistoryStore])
// Sync external value changes (but avoid resetting undo history for internal changes)
useEffect(() => {
if (value !== code && value !== lastInternalValueRef.current) {
setCode(value)
lastInternalValueRef.current = value
}
}, [value])
// Handle value change with history tracking
const handleValueChange = useCallback(
(newCode: string) => {
setCode(newCode)
lastInternalValueRef.current = newCode
onChange(newCode)
// Record to history if enabled
if (hasHistory) {
textHistoryStore.recordChange(historyBlockId, historySubBlockId, newCode)
}
},
[onChange, hasHistory, historyBlockId, historySubBlockId, textHistoryStore]
)
// Handle undo
const handleUndo = useCallback(() => {
if (!hasHistory) return false
const previousValue = textHistoryStore.undo(historyBlockId, historySubBlockId)
if (previousValue !== null) {
setCode(previousValue)
lastInternalValueRef.current = previousValue
onChange(previousValue)
return true
}
return false
}, [hasHistory, historyBlockId, historySubBlockId, textHistoryStore, onChange])
// Handle redo
const handleRedo = useCallback(() => {
if (!hasHistory) return false
const nextValue = textHistoryStore.redo(historyBlockId, historySubBlockId)
if (nextValue !== null) {
setCode(nextValue)
lastInternalValueRef.current = nextValue
onChange(nextValue)
return true
}
return false
}, [hasHistory, historyBlockId, historySubBlockId, textHistoryStore, onChange])
// Handle keyboard events for undo/redo
const handleKeyDown = useCallback(
(e: React.KeyboardEvent) => {
if (disabled) return
const isMod = e.metaKey || e.ctrlKey
// Undo: Cmd+Z / Ctrl+Z
if (isMod && e.key === 'z' && !e.shiftKey && hasHistory) {
if (handleUndo()) {
e.preventDefault()
e.stopPropagation()
return
}
}
// Redo: Cmd+Shift+Z / Ctrl+Shift+Z / Ctrl+Y
if (hasHistory) {
if (
(isMod && e.key === 'z' && e.shiftKey) ||
(isMod && e.key === 'Z') ||
(e.ctrlKey && e.key === 'y')
) {
if (handleRedo()) {
e.preventDefault()
e.stopPropagation()
return
}
}
}
// Call parent's onKeyDown if provided
onKeyDown?.(e)
},
[disabled, hasHistory, handleUndo, handleRedo, onKeyDown]
)
// Handle blur - commit pending history
const handleBlur = useCallback(() => {
if (hasHistory) {
textHistoryStore.commitPending(historyBlockId, historySubBlockId)
}
}, [hasHistory, historyBlockId, historySubBlockId, textHistoryStore])
useEffect(() => {
if (!editorRef.current) return
@@ -211,11 +326,9 @@ export function CodeEditor({
<Editor
value={code}
onValueChange={(newCode) => {
setCode(newCode)
onChange(newCode)
}}
onKeyDown={onKeyDown}
onValueChange={handleValueChange}
onKeyDown={handleKeyDown}
onBlur={handleBlur}
highlight={(code) => customHighlight(code)}
disabled={disabled}
{...getCodeEditorProps({ disabled })}

View File

@@ -936,6 +936,7 @@ try {
gutterClassName='bg-[var(--bg)]'
disabled={schemaGeneration.isLoading || schemaGeneration.isStreaming}
onKeyDown={handleKeyDown}
historyId={`${blockId}:tool-schema`}
/>
</ModalTabsContent>
@@ -1018,6 +1019,7 @@ try {
disabled={codeGeneration.isLoading || codeGeneration.isStreaming}
onKeyDown={handleKeyDown}
schemaParameters={schemaParameters}
historyId={`${blockId}:tool-code`}
/>
{showEnvVars && (

View File

@@ -134,6 +134,7 @@ export const MicrosoftPlannerBlock: BlockConfig<MicrosoftPlannerResponse> = {
placeholder: 'Enter the bucket ID',
condition: { field: 'operation', value: ['read_bucket', 'update_bucket', 'delete_bucket'] },
dependsOn: ['credential'],
canonicalParamId: 'bucketId',
},
// ETag for update/delete operations

View File

@@ -181,6 +181,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
id: 'threadTs',
title: 'Thread Timestamp',
type: 'short-input',
canonicalParamId: 'thread_ts',
placeholder: 'Reply to thread (e.g., 1405894322.002768)',
condition: {
field: 'operation',
@@ -262,6 +263,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
id: 'channelLimit',
title: 'Channel Limit',
type: 'short-input',
canonicalParamId: 'limit',
placeholder: '100',
condition: {
field: 'operation',
@@ -273,6 +275,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
id: 'memberLimit',
title: 'Member Limit',
type: 'short-input',
canonicalParamId: 'limit',
placeholder: '100',
condition: {
field: 'operation',
@@ -298,6 +301,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
id: 'userLimit',
title: 'User Limit',
type: 'short-input',
canonicalParamId: 'limit',
placeholder: '100',
condition: {
field: 'operation',
@@ -354,6 +358,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
id: 'updateTimestamp',
title: 'Message Timestamp',
type: 'short-input',
canonicalParamId: 'timestamp',
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
condition: {
field: 'operation',
@@ -377,6 +382,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
id: 'deleteTimestamp',
title: 'Message Timestamp',
type: 'short-input',
canonicalParamId: 'timestamp',
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
condition: {
field: 'operation',
@@ -389,6 +395,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
id: 'reactionTimestamp',
title: 'Message Timestamp',
type: 'short-input',
canonicalParamId: 'timestamp',
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
condition: {
field: 'operation',
@@ -400,6 +407,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
id: 'emojiName',
title: 'Emoji Name',
type: 'short-input',
canonicalParamId: 'name',
placeholder: 'Emoji name without colons (e.g., thumbsup, heart, eyes)',
condition: {
field: 'operation',
@@ -546,35 +554,47 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
baseParams.content = content
break
case 'read': {
const parsedLimit = limit ? Number.parseInt(limit, 10) : 10
if (Number.isNaN(parsedLimit) || parsedLimit < 1 || parsedLimit > 15) {
throw new Error('Message limit must be between 1 and 15')
case 'read':
if (limit) {
const parsedLimit = Number.parseInt(limit, 10)
baseParams.limit = !Number.isNaN(parsedLimit) ? parsedLimit : 10
} else {
baseParams.limit = 10
}
baseParams.limit = parsedLimit
if (oldest) {
baseParams.oldest = oldest
}
break
}
case 'list_channels': {
case 'list_channels':
baseParams.includePrivate = includePrivate !== 'false'
baseParams.excludeArchived = true
baseParams.limit = channelLimit ? Number.parseInt(channelLimit, 10) : 100
if (channelLimit) {
const parsedLimit = Number.parseInt(channelLimit, 10)
baseParams.limit = !Number.isNaN(parsedLimit) ? parsedLimit : 100
} else {
baseParams.limit = 100
}
break
}
case 'list_members': {
baseParams.limit = memberLimit ? Number.parseInt(memberLimit, 10) : 100
case 'list_members':
if (memberLimit) {
const parsedLimit = Number.parseInt(memberLimit, 10)
baseParams.limit = !Number.isNaN(parsedLimit) ? parsedLimit : 100
} else {
baseParams.limit = 100
}
break
}
case 'list_users': {
case 'list_users':
baseParams.includeDeleted = includeDeleted === 'true'
baseParams.limit = userLimit ? Number.parseInt(userLimit, 10) : 100
if (userLimit) {
const parsedLimit = Number.parseInt(userLimit, 10)
baseParams.limit = !Number.isNaN(parsedLimit) ? parsedLimit : 100
} else {
baseParams.limit = 100
}
break
}
case 'get_user':
if (!userId) {

View File

@@ -70,6 +70,17 @@ export const WealthboxBlock: BlockConfig<WealthboxResponse> = {
title: 'Task ID',
type: 'short-input',
placeholder: 'Enter Task ID',
mode: 'basic',
canonicalParamId: 'taskId',
condition: { field: 'operation', value: ['read_task'] },
},
{
id: 'manualTaskId',
title: 'Task ID',
type: 'short-input',
canonicalParamId: 'taskId',
placeholder: 'Enter Task ID',
mode: 'advanced',
condition: { field: 'operation', value: ['read_task'] },
},
{
@@ -156,9 +167,12 @@ export const WealthboxBlock: BlockConfig<WealthboxResponse> = {
}
},
params: (params) => {
const { credential, operation, contactId, manualContactId, taskId, ...rest } = params
const { credential, operation, contactId, manualContactId, taskId, manualTaskId, ...rest } =
params
// Handle both selector and manual inputs
const effectiveContactId = (contactId || manualContactId || '').trim()
const effectiveTaskId = (taskId || manualTaskId || '').trim()
const baseParams = {
...rest,
@@ -211,6 +225,7 @@ export const WealthboxBlock: BlockConfig<WealthboxResponse> = {
contactId: { type: 'string', description: 'Contact identifier' },
manualContactId: { type: 'string', description: 'Manual contact identifier' },
taskId: { type: 'string', description: 'Task identifier' },
manualTaskId: { type: 'string', description: 'Manual task identifier' },
content: { type: 'string', description: 'Content text' },
firstName: { type: 'string', description: 'First name' },
lastName: { type: 'string', description: 'Last name' },

View File

@@ -128,6 +128,8 @@ export const DEFAULTS = {
BLOCK_TITLE: 'Untitled Block',
WORKFLOW_NAME: 'Workflow',
MAX_LOOP_ITERATIONS: 1000,
MAX_FOREACH_ITEMS: 1000,
MAX_PARALLEL_BRANCHES: 20,
MAX_WORKFLOW_DEPTH: 10,
EXECUTION_TIME: 0,
TOKENS: {

View File

@@ -4,6 +4,7 @@ import { LoopConstructor } from '@/executor/dag/construction/loops'
import { NodeConstructor } from '@/executor/dag/construction/nodes'
import { PathConstructor } from '@/executor/dag/construction/paths'
import type { DAGEdge, NodeMetadata } from '@/executor/dag/types'
import { buildSentinelStartId, extractBaseBlockId } from '@/executor/utils/subflow-utils'
import type {
SerializedBlock,
SerializedLoop,
@@ -79,6 +80,9 @@ export class DAGBuilder {
}
}
// Validate loop and parallel structure
this.validateSubflowStructure(dag)
logger.info('DAG built', {
totalNodes: dag.nodes.size,
loopCount: dag.loopConfigs.size,
@@ -105,4 +109,43 @@ export class DAGBuilder {
}
}
}
/**
* Validates that loops and parallels have proper internal structure.
* Throws an error if a loop/parallel has no blocks inside or no connections from start.
*/
private validateSubflowStructure(dag: DAG): void {
for (const [id, config] of dag.loopConfigs) {
this.validateSubflow(dag, id, config.nodes, 'Loop')
}
for (const [id, config] of dag.parallelConfigs) {
this.validateSubflow(dag, id, config.nodes, 'Parallel')
}
}
private validateSubflow(
dag: DAG,
id: string,
nodes: string[] | undefined,
type: 'Loop' | 'Parallel'
): void {
if (!nodes || nodes.length === 0) {
throw new Error(
`${type} has no blocks inside. Add at least one block to the ${type.toLowerCase()}.`
)
}
const sentinelStartNode = dag.nodes.get(buildSentinelStartId(id))
if (!sentinelStartNode) return
const hasConnections = Array.from(sentinelStartNode.outgoingEdges.values()).some((edge) =>
nodes.includes(extractBaseBlockId(edge.target))
)
if (!hasConnections) {
throw new Error(
`${type} start is not connected to any blocks. Connect a block to the ${type.toLowerCase()} start.`
)
}
}
}

View File

@@ -63,8 +63,10 @@ export class DAGExecutor {
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
const loopOrchestrator = new LoopOrchestrator(dag, state, resolver)
loopOrchestrator.setContextExtensions(this.contextExtensions)
const parallelOrchestrator = new ParallelOrchestrator(dag, state)
parallelOrchestrator.setResolver(resolver)
parallelOrchestrator.setContextExtensions(this.contextExtensions)
const allHandlers = createBlockHandlers()
const blockExecutor = new BlockExecutor(allHandlers, resolver, this.contextExtensions, state)
const edgeManager = new EdgeManager(dag)

View File

@@ -14,6 +14,8 @@ export interface LoopScope {
condition?: string
loopType?: 'for' | 'forEach' | 'while' | 'doWhile'
skipFirstConditionCheck?: boolean
/** Error message if loop validation failed (e.g., exceeded max iterations) */
validationError?: string
}
export interface ParallelScope {
@@ -23,6 +25,8 @@ export interface ParallelScope {
completedCount: number
totalExpectedNodes: number
items?: any[]
/** Error message if parallel validation failed (e.g., exceeded max branches) */
validationError?: string
}
export class ExecutionState implements BlockStateController {

View File

@@ -5,14 +5,17 @@ import { buildLoopIndexCondition, DEFAULTS, EDGE } from '@/executor/constants'
import type { DAG } from '@/executor/dag/builder'
import type { EdgeManager } from '@/executor/execution/edge-manager'
import type { LoopScope } from '@/executor/execution/state'
import type { BlockStateController } from '@/executor/execution/types'
import type { BlockStateController, ContextExtensions } from '@/executor/execution/types'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import type { LoopConfigWithNodes } from '@/executor/types/loop'
import { replaceValidReferences } from '@/executor/utils/reference-validation'
import {
addSubflowErrorLog,
buildSentinelEndId,
buildSentinelStartId,
extractBaseBlockId,
resolveArrayInput,
validateMaxCount,
} from '@/executor/utils/subflow-utils'
import type { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedLoop } from '@/serializer/types'
@@ -32,6 +35,7 @@ export interface LoopContinuationResult {
export class LoopOrchestrator {
private edgeManager: EdgeManager | null = null
private contextExtensions: ContextExtensions | null = null
constructor(
private dag: DAG,
@@ -39,6 +43,10 @@ export class LoopOrchestrator {
private resolver: VariableResolver
) {}
setContextExtensions(contextExtensions: ContextExtensions): void {
this.contextExtensions = contextExtensions
}
setEdgeManager(edgeManager: EdgeManager): void {
this.edgeManager = edgeManager
}
@@ -48,7 +56,6 @@ export class LoopOrchestrator {
if (!loopConfig) {
throw new Error(`Loop config not found: ${loopId}`)
}
const scope: LoopScope = {
iteration: 0,
currentIterationOutputs: new Map(),
@@ -58,15 +65,70 @@ export class LoopOrchestrator {
const loopType = loopConfig.loopType
switch (loopType) {
case 'for':
case 'for': {
scope.loopType = 'for'
scope.maxIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS
const requestedIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS
const iterationError = validateMaxCount(
requestedIterations,
DEFAULTS.MAX_LOOP_ITERATIONS,
'For loop iterations'
)
if (iterationError) {
logger.error(iterationError, { loopId, requestedIterations })
this.addLoopErrorLog(ctx, loopId, loopType, iterationError, {
iterations: requestedIterations,
})
scope.maxIterations = 0
scope.validationError = iterationError
scope.condition = buildLoopIndexCondition(0)
ctx.loopExecutions?.set(loopId, scope)
throw new Error(iterationError)
}
scope.maxIterations = requestedIterations
scope.condition = buildLoopIndexCondition(scope.maxIterations)
break
}
case 'forEach': {
scope.loopType = 'forEach'
const items = this.resolveForEachItems(ctx, loopConfig.forEachItems)
let items: any[]
try {
items = this.resolveForEachItems(ctx, loopConfig.forEachItems)
} catch (error) {
const errorMessage = `ForEach loop resolution failed: ${error instanceof Error ? error.message : String(error)}`
logger.error(errorMessage, { loopId, forEachItems: loopConfig.forEachItems })
this.addLoopErrorLog(ctx, loopId, loopType, errorMessage, {
forEachItems: loopConfig.forEachItems,
})
scope.items = []
scope.maxIterations = 0
scope.validationError = errorMessage
scope.condition = buildLoopIndexCondition(0)
ctx.loopExecutions?.set(loopId, scope)
throw new Error(errorMessage)
}
const sizeError = validateMaxCount(
items.length,
DEFAULTS.MAX_FOREACH_ITEMS,
'ForEach loop collection size'
)
if (sizeError) {
logger.error(sizeError, { loopId, collectionSize: items.length })
this.addLoopErrorLog(ctx, loopId, loopType, sizeError, {
forEachItems: loopConfig.forEachItems,
collectionSize: items.length,
})
scope.items = []
scope.maxIterations = 0
scope.validationError = sizeError
scope.condition = buildLoopIndexCondition(0)
ctx.loopExecutions?.set(loopId, scope)
throw new Error(sizeError)
}
scope.items = items
scope.maxIterations = items.length
scope.item = items[0]
@@ -79,15 +141,35 @@ export class LoopOrchestrator {
scope.condition = loopConfig.whileCondition
break
case 'doWhile':
case 'doWhile': {
scope.loopType = 'doWhile'
if (loopConfig.doWhileCondition) {
scope.condition = loopConfig.doWhileCondition
} else {
scope.maxIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS
const requestedIterations = loopConfig.iterations || DEFAULTS.MAX_LOOP_ITERATIONS
const iterationError = validateMaxCount(
requestedIterations,
DEFAULTS.MAX_LOOP_ITERATIONS,
'Do-While loop iterations'
)
if (iterationError) {
logger.error(iterationError, { loopId, requestedIterations })
this.addLoopErrorLog(ctx, loopId, loopType, iterationError, {
iterations: requestedIterations,
})
scope.maxIterations = 0
scope.validationError = iterationError
scope.condition = buildLoopIndexCondition(0)
ctx.loopExecutions?.set(loopId, scope)
throw new Error(iterationError)
}
scope.maxIterations = requestedIterations
scope.condition = buildLoopIndexCondition(scope.maxIterations)
}
break
}
default:
throw new Error(`Unknown loop type: ${loopType}`)
@@ -100,6 +182,23 @@ export class LoopOrchestrator {
return scope
}
private addLoopErrorLog(
ctx: ExecutionContext,
loopId: string,
loopType: string,
errorMessage: string,
inputData?: any
): void {
addSubflowErrorLog(
ctx,
loopId,
'loop',
errorMessage,
{ loopType, ...inputData },
this.contextExtensions
)
}
storeLoopNodeOutput(
ctx: ExecutionContext,
loopId: string,
@@ -412,54 +511,6 @@ export class LoopOrchestrator {
}
private resolveForEachItems(ctx: ExecutionContext, items: any): any[] {
if (Array.isArray(items)) {
return items
}
if (typeof items === 'object' && items !== null) {
return Object.entries(items)
}
if (typeof items === 'string') {
if (items.startsWith('<') && items.endsWith('>')) {
const resolved = this.resolver.resolveSingleReference(ctx, '', items)
if (Array.isArray(resolved)) {
return resolved
}
return []
}
try {
const normalized = items.replace(/'/g, '"')
const parsed = JSON.parse(normalized)
if (Array.isArray(parsed)) {
return parsed
}
return []
} catch (error) {
logger.error('Failed to parse forEach items', { items, error })
return []
}
}
try {
const resolved = this.resolver.resolveInputs(ctx, 'loop_foreach_items', { items }).items
if (Array.isArray(resolved)) {
return resolved
}
logger.warn('ForEach items did not resolve to array', {
items,
resolved,
})
return []
} catch (error: any) {
logger.error('Error resolving forEach items, returning empty array:', {
error: error.message,
})
return []
}
return resolveArrayInput(ctx, items, this.resolver)
}
}

View File

@@ -1,15 +1,19 @@
import { createLogger } from '@/lib/logs/console/logger'
import { DEFAULTS } from '@/executor/constants'
import type { DAG, DAGNode } from '@/executor/dag/builder'
import type { ParallelScope } from '@/executor/execution/state'
import type { BlockStateWriter } from '@/executor/execution/types'
import type { BlockStateWriter, ContextExtensions } from '@/executor/execution/types'
import type { ExecutionContext, NormalizedBlockOutput } from '@/executor/types'
import type { ParallelConfigWithNodes } from '@/executor/types/parallel'
import {
addSubflowErrorLog,
buildBranchNodeId,
calculateBranchCount,
extractBaseBlockId,
extractBranchIndex,
parseDistributionItems,
resolveArrayInput,
validateMaxCount,
} from '@/executor/utils/subflow-utils'
import type { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedParallel } from '@/serializer/types'
@@ -32,6 +36,7 @@ export interface ParallelAggregationResult {
export class ParallelOrchestrator {
private resolver: VariableResolver | null = null
private contextExtensions: ContextExtensions | null = null
constructor(
private dag: DAG,
@@ -42,6 +47,10 @@ export class ParallelOrchestrator {
this.resolver = resolver
}
setContextExtensions(contextExtensions: ContextExtensions): void {
this.contextExtensions = contextExtensions
}
initializeParallelScope(
ctx: ExecutionContext,
parallelId: string,
@@ -49,11 +58,42 @@ export class ParallelOrchestrator {
terminalNodesCount = 1
): ParallelScope {
const parallelConfig = this.dag.parallelConfigs.get(parallelId)
const items = parallelConfig ? this.resolveDistributionItems(ctx, parallelConfig) : undefined
// If we have more items than pre-built branches, expand the DAG
let items: any[] | undefined
if (parallelConfig) {
try {
items = this.resolveDistributionItems(ctx, parallelConfig)
} catch (error) {
const errorMessage = `Parallel distribution resolution failed: ${error instanceof Error ? error.message : String(error)}`
logger.error(errorMessage, {
parallelId,
distribution: parallelConfig.distribution,
})
this.addParallelErrorLog(ctx, parallelId, errorMessage, {
distribution: parallelConfig.distribution,
})
this.setErrorScope(ctx, parallelId, errorMessage)
throw new Error(errorMessage)
}
}
const actualBranchCount = items && items.length > totalBranches ? items.length : totalBranches
const branchError = validateMaxCount(
actualBranchCount,
DEFAULTS.MAX_PARALLEL_BRANCHES,
'Parallel branch count'
)
if (branchError) {
logger.error(branchError, { parallelId, actualBranchCount })
this.addParallelErrorLog(ctx, parallelId, branchError, {
distribution: parallelConfig?.distribution,
branchCount: actualBranchCount,
})
this.setErrorScope(ctx, parallelId, branchError)
throw new Error(branchError)
}
const scope: ParallelScope = {
parallelId,
totalBranches: actualBranchCount,
@@ -108,6 +148,38 @@ export class ParallelOrchestrator {
return scope
}
private addParallelErrorLog(
ctx: ExecutionContext,
parallelId: string,
errorMessage: string,
inputData?: any
): void {
addSubflowErrorLog(
ctx,
parallelId,
'parallel',
errorMessage,
inputData || {},
this.contextExtensions
)
}
private setErrorScope(ctx: ExecutionContext, parallelId: string, errorMessage: string): void {
const scope: ParallelScope = {
parallelId,
totalBranches: 0,
branchOutputs: new Map(),
completedCount: 0,
totalExpectedNodes: 0,
items: [],
validationError: errorMessage,
}
if (!ctx.parallelExecutions) {
ctx.parallelExecutions = new Map()
}
ctx.parallelExecutions.set(parallelId, scope)
}
/**
* Dynamically expand the DAG to include additional branch nodes when
* the resolved item count exceeds the pre-built branch count.
@@ -291,63 +363,11 @@ export class ParallelOrchestrator {
}
}
/**
* Resolve distribution items at runtime, handling references like <previousBlock.items>
* This mirrors how LoopOrchestrator.resolveForEachItems works.
*/
private resolveDistributionItems(ctx: ExecutionContext, config: SerializedParallel): any[] {
const rawItems = config.distribution
if (rawItems === undefined || rawItems === null) {
if (config.distribution === undefined || config.distribution === null) {
return []
}
// Already an array - return as-is
if (Array.isArray(rawItems)) {
return rawItems
}
// Object - convert to entries array (consistent with loop forEach behavior)
if (typeof rawItems === 'object') {
return Object.entries(rawItems)
}
// String handling
if (typeof rawItems === 'string') {
// Resolve references at runtime using the variable resolver
if (rawItems.startsWith('<') && rawItems.endsWith('>') && this.resolver) {
const resolved = this.resolver.resolveSingleReference(ctx, '', rawItems)
if (Array.isArray(resolved)) {
return resolved
}
if (typeof resolved === 'object' && resolved !== null) {
return Object.entries(resolved)
}
logger.warn('Distribution reference did not resolve to array or object', {
rawItems,
resolved,
})
return []
}
// Try to parse as JSON
try {
const normalized = rawItems.replace(/'/g, '"')
const parsed = JSON.parse(normalized)
if (Array.isArray(parsed)) {
return parsed
}
if (typeof parsed === 'object' && parsed !== null) {
return Object.entries(parsed)
}
return []
} catch (error) {
logger.error('Failed to parse distribution items', { rawItems, error })
return []
}
}
return []
return resolveArrayInput(ctx, config.distribution, this.resolver)
}
handleParallelBranchCompletion(

View File

@@ -1,5 +1,8 @@
import { createLogger } from '@/lib/logs/console/logger'
import { LOOP, PARALLEL, PARSING, REFERENCE } from '@/executor/constants'
import type { ContextExtensions } from '@/executor/execution/types'
import type { BlockLog, ExecutionContext } from '@/executor/types'
import type { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedParallel } from '@/serializer/types'
const logger = createLogger('SubflowUtils')
@@ -132,3 +135,131 @@ export function normalizeNodeId(nodeId: string): string {
}
return nodeId
}
/**
* Validates that a count doesn't exceed a maximum limit.
* Returns an error message if validation fails, undefined otherwise.
*/
export function validateMaxCount(count: number, max: number, itemType: string): string | undefined {
if (count > max) {
return `${itemType} (${count}) exceeds maximum allowed (${max}). Execution blocked.`
}
return undefined
}
/**
* Resolves array input at runtime. Handles arrays, objects, references, and JSON strings.
* Used by both loop forEach and parallel distribution resolution.
* Throws an error if resolution fails.
*/
export function resolveArrayInput(
ctx: ExecutionContext,
items: any,
resolver: VariableResolver | null
): any[] {
if (Array.isArray(items)) {
return items
}
if (typeof items === 'object' && items !== null) {
return Object.entries(items)
}
if (typeof items === 'string') {
if (items.startsWith(REFERENCE.START) && items.endsWith(REFERENCE.END) && resolver) {
try {
const resolved = resolver.resolveSingleReference(ctx, '', items)
if (Array.isArray(resolved)) {
return resolved
}
if (typeof resolved === 'object' && resolved !== null) {
return Object.entries(resolved)
}
throw new Error(`Reference "${items}" did not resolve to an array or object`)
} catch (error) {
if (error instanceof Error && error.message.startsWith('Reference "')) {
throw error
}
throw new Error(
`Failed to resolve reference "${items}": ${error instanceof Error ? error.message : String(error)}`
)
}
}
try {
const normalized = items.replace(/'/g, '"')
const parsed = JSON.parse(normalized)
if (Array.isArray(parsed)) {
return parsed
}
if (typeof parsed === 'object' && parsed !== null) {
return Object.entries(parsed)
}
throw new Error(`Parsed value is not an array or object`)
} catch (error) {
if (error instanceof Error && error.message.startsWith('Parsed value')) {
throw error
}
throw new Error(`Failed to parse items as JSON: "${items}"`)
}
}
if (resolver) {
try {
const resolved = resolver.resolveInputs(ctx, 'subflow_items', { items }).items
if (Array.isArray(resolved)) {
return resolved
}
throw new Error(`Resolved items is not an array`)
} catch (error) {
if (error instanceof Error && error.message.startsWith('Resolved items')) {
throw error
}
throw new Error(
`Failed to resolve items: ${error instanceof Error ? error.message : String(error)}`
)
}
}
return []
}
/**
* Creates and logs an error for a subflow (loop or parallel).
*/
export function addSubflowErrorLog(
ctx: ExecutionContext,
blockId: string,
blockType: 'loop' | 'parallel',
errorMessage: string,
inputData: Record<string, any>,
contextExtensions: ContextExtensions | null
): void {
const now = new Date().toISOString()
const block = ctx.workflow?.blocks?.find((b) => b.id === blockId)
const blockName = block?.metadata?.name || (blockType === 'loop' ? 'Loop' : 'Parallel')
const blockLog: BlockLog = {
blockId,
blockName,
blockType,
startedAt: now,
endedAt: now,
durationMs: 0,
success: false,
error: errorMessage,
input: inputData,
output: { error: errorMessage },
...(blockType === 'loop' ? { loopId: blockId } : { parallelId: blockId }),
}
ctx.blockLogs.push(blockLog)
if (contextExtensions?.onBlockComplete) {
contextExtensions.onBlockComplete(blockId, blockName, blockType, {
input: inputData,
output: { error: errorMessage },
executionTime: 0,
})
}
}

View File

@@ -0,0 +1,196 @@
import { useCallback, useEffect, useRef } from 'react'
import { useTextHistoryStore } from '@/stores/text-history'
interface UseTextHistoryOptions {
/** Block ID for the text field */
blockId: string
/** Sub-block ID for the text field */
subBlockId: string
/** Current value of the text field */
value: string
/** Callback to update the value */
onChange: (value: string) => void
/** Whether the field is disabled/readonly */
disabled?: boolean
}
interface UseTextHistoryResult {
/**
* Handle text change - records to history with debouncing
*/
handleChange: (newValue: string) => void
/**
* Handle keyboard events for undo/redo
* Returns true if the event was handled
*/
handleKeyDown: (e: React.KeyboardEvent) => boolean
/**
* Handle blur - commits any pending changes
*/
handleBlur: () => void
/**
* Undo the last change
*/
undo: () => void
/**
* Redo the last undone change
*/
redo: () => void
/**
* Whether undo is available
*/
canUndo: boolean
/**
* Whether redo is available
*/
canRedo: boolean
}
/**
* Hook for managing text undo/redo history for a specific text field.
*
* @remarks
* - Provides debounced history recording (coalesces rapid changes)
* - Handles Cmd+Z/Ctrl+Z for undo, Cmd+Shift+Z/Ctrl+Y for redo
* - Commits pending changes on blur to preserve history
* - Each blockId:subBlockId pair has its own independent history
*
* @example
* ```tsx
* const { handleChange, handleKeyDown, handleBlur } = useTextHistory({
* blockId,
* subBlockId,
* value: code,
* onChange: (newCode) => {
* setCode(newCode)
* setStoreValue(newCode)
* },
* })
*
* <textarea
* value={code}
* onChange={(e) => handleChange(e.target.value)}
* onKeyDown={handleKeyDown}
* onBlur={handleBlur}
* />
* ```
*/
export function useTextHistory({
blockId,
subBlockId,
value,
onChange,
disabled = false,
}: UseTextHistoryOptions): UseTextHistoryResult {
const store = useTextHistoryStore()
const initializedRef = useRef(false)
const lastExternalValueRef = useRef(value)
// Initialize history on mount
useEffect(() => {
if (!initializedRef.current && blockId && subBlockId) {
store.initHistory(blockId, subBlockId, value)
initializedRef.current = true
}
}, [blockId, subBlockId, value, store])
// Handle external value changes (e.g., from AI generation or store sync)
useEffect(() => {
if (value !== lastExternalValueRef.current) {
// This is an external change, commit any pending and record the new value
store.commitPending(blockId, subBlockId)
store.recordChange(blockId, subBlockId, value)
store.commitPending(blockId, subBlockId)
lastExternalValueRef.current = value
}
}, [value, blockId, subBlockId, store])
const handleChange = useCallback(
(newValue: string) => {
if (disabled) return
// Update the external value immediately
onChange(newValue)
lastExternalValueRef.current = newValue
// Record to history with debouncing
store.recordChange(blockId, subBlockId, newValue)
},
[blockId, subBlockId, onChange, disabled, store]
)
const undo = useCallback(() => {
if (disabled) return
const previousValue = store.undo(blockId, subBlockId)
if (previousValue !== null) {
onChange(previousValue)
lastExternalValueRef.current = previousValue
}
}, [blockId, subBlockId, onChange, disabled, store])
const redo = useCallback(() => {
if (disabled) return
const nextValue = store.redo(blockId, subBlockId)
if (nextValue !== null) {
onChange(nextValue)
lastExternalValueRef.current = nextValue
}
}, [blockId, subBlockId, onChange, disabled, store])
const handleKeyDown = useCallback(
(e: React.KeyboardEvent): boolean => {
if (disabled) return false
const isMod = e.metaKey || e.ctrlKey
// Undo: Cmd+Z / Ctrl+Z
if (isMod && e.key === 'z' && !e.shiftKey) {
e.preventDefault()
e.stopPropagation()
undo()
return true
}
// Redo: Cmd+Shift+Z / Ctrl+Shift+Z / Ctrl+Y
if (
(isMod && e.key === 'z' && e.shiftKey) ||
(isMod && e.key === 'Z') ||
(e.ctrlKey && e.key === 'y')
) {
e.preventDefault()
e.stopPropagation()
redo()
return true
}
return false
},
[disabled, undo, redo]
)
const handleBlur = useCallback(() => {
// Commit any pending changes when the field loses focus
store.commitPending(blockId, subBlockId)
}, [blockId, subBlockId, store])
const canUndo = store.canUndo(blockId, subBlockId)
const canRedo = store.canRedo(blockId, subBlockId)
return {
handleChange,
handleKeyDown,
handleBlur,
undo,
redo,
canUndo,
canRedo,
}
}

View File

@@ -138,7 +138,6 @@ export const env = createEnv({
S3_CHAT_BUCKET_NAME: z.string().optional(), // S3 bucket for chat logos
S3_COPILOT_BUCKET_NAME: z.string().optional(), // S3 bucket for copilot files
S3_PROFILE_PICTURES_BUCKET_NAME: z.string().optional(), // S3 bucket for profile pictures
S3_OG_IMAGES_BUCKET_NAME: z.string().optional(), // S3 bucket for OpenGraph images
// Cloud Storage - Azure Blob
AZURE_ACCOUNT_NAME: z.string().optional(), // Azure storage account name
@@ -150,7 +149,6 @@ export const env = createEnv({
AZURE_STORAGE_CHAT_CONTAINER_NAME: z.string().optional(), // Azure container for chat logos
AZURE_STORAGE_COPILOT_CONTAINER_NAME: z.string().optional(), // Azure container for copilot files
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: z.string().optional(), // Azure container for profile pictures
AZURE_STORAGE_OG_IMAGES_CONTAINER_NAME: z.string().optional(), // Azure container for OpenGraph images
// Data Retention
FREE_PLAN_LOG_RETENTION_DAYS: z.string().optional(), // Log retention days for free plan users

View File

@@ -1,5 +1,4 @@
import { env, getEnv } from '../config/env'
import { isDev } from '../config/feature-flags'
/**
* Content Security Policy (CSP) configuration builder
@@ -80,16 +79,10 @@ export const buildTimeCSPDirectives: CSPDirectives = {
'connect-src': [
"'self'",
env.NEXT_PUBLIC_APP_URL || '',
// Only include localhost fallbacks in development mode
...(env.OLLAMA_URL ? [env.OLLAMA_URL] : isDev ? ['http://localhost:11434'] : []),
...(env.NEXT_PUBLIC_SOCKET_URL
? [
env.NEXT_PUBLIC_SOCKET_URL,
env.NEXT_PUBLIC_SOCKET_URL.replace('http://', 'ws://').replace('https://', 'wss://'),
]
: isDev
? ['http://localhost:3002', 'ws://localhost:3002']
: []),
env.OLLAMA_URL || 'http://localhost:11434',
env.NEXT_PUBLIC_SOCKET_URL || 'http://localhost:3002',
env.NEXT_PUBLIC_SOCKET_URL?.replace('http://', 'ws://').replace('https://', 'wss://') ||
'ws://localhost:3002',
'https://api.browser-use.com',
'https://api.exa.ai',
'https://api.firecrawl.dev',
@@ -135,16 +128,11 @@ export function buildCSPString(directives: CSPDirectives): string {
* This maintains compatibility with existing inline scripts while fixing Docker env var issues
*/
export function generateRuntimeCSP(): string {
const socketUrl = getEnv('NEXT_PUBLIC_SOCKET_URL') || 'http://localhost:3002'
const socketWsUrl =
socketUrl.replace('http://', 'ws://').replace('https://', 'wss://') || 'ws://localhost:3002'
const appUrl = getEnv('NEXT_PUBLIC_APP_URL') || ''
// Only include localhost URLs in development or when explicitly configured
const socketUrl = getEnv('NEXT_PUBLIC_SOCKET_URL') || (isDev ? 'http://localhost:3002' : '')
const socketWsUrl = socketUrl
? socketUrl.replace('http://', 'ws://').replace('https://', 'wss://')
: isDev
? 'ws://localhost:3002'
: ''
const ollamaUrl = getEnv('OLLAMA_URL') || (isDev ? 'http://localhost:11434' : '')
const ollamaUrl = getEnv('OLLAMA_URL') || 'http://localhost:11434'
const brandLogoDomains = getHostnameFromUrl(getEnv('NEXT_PUBLIC_BRAND_LOGO_URL'))
const brandFaviconDomains = getHostnameFromUrl(getEnv('NEXT_PUBLIC_BRAND_FAVICON_URL'))

View File

@@ -958,112 +958,3 @@ export function createPinnedUrl(originalUrl: string, resolvedIP: string): string
const port = parsed.port ? `:${parsed.port}` : ''
return `${parsed.protocol}//${resolvedIP}${port}${parsed.pathname}${parsed.search}`
}
/**
* Validates a Google Calendar ID
*
* Google Calendar IDs can be:
* - "primary" (literal string for the user's primary calendar)
* - Email addresses (for user calendars)
* - Alphanumeric strings with hyphens, underscores, and dots (for other calendars)
*
* This validator allows these legitimate formats while blocking path traversal and injection attempts.
*
* @param value - The calendar ID to validate
* @param paramName - Name of the parameter for error messages
* @returns ValidationResult
*
* @example
* ```typescript
* const result = validateGoogleCalendarId(calendarId, 'calendarId')
* if (!result.isValid) {
* return NextResponse.json({ error: result.error }, { status: 400 })
* }
* ```
*/
export function validateGoogleCalendarId(
value: string | null | undefined,
paramName = 'calendarId'
): ValidationResult {
if (value === null || value === undefined || value === '') {
return {
isValid: false,
error: `${paramName} is required`,
}
}
if (value === 'primary') {
return { isValid: true, sanitized: value }
}
const pathTraversalPatterns = [
'../',
'..\\',
'%2e%2e%2f',
'%2e%2e/',
'..%2f',
'%2e%2e%5c',
'%2e%2e\\',
'..%5c',
'%252e%252e%252f',
]
const lowerValue = value.toLowerCase()
for (const pattern of pathTraversalPatterns) {
if (lowerValue.includes(pattern)) {
logger.warn('Path traversal attempt in Google Calendar ID', {
paramName,
value: value.substring(0, 100),
})
return {
isValid: false,
error: `${paramName} contains invalid path traversal sequence`,
}
}
}
if (/[\x00-\x1f\x7f]/.test(value) || value.includes('%00')) {
logger.warn('Control characters in Google Calendar ID', { paramName })
return {
isValid: false,
error: `${paramName} contains invalid control characters`,
}
}
if (value.includes('\n') || value.includes('\r')) {
return {
isValid: false,
error: `${paramName} contains invalid newline characters`,
}
}
const emailPattern = /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$/
if (emailPattern.test(value)) {
return { isValid: true, sanitized: value }
}
const calendarIdPattern = /^[a-zA-Z0-9._@%#+-]+$/
if (!calendarIdPattern.test(value)) {
logger.warn('Invalid Google Calendar ID format', {
paramName,
value: value.substring(0, 100),
})
return {
isValid: false,
error: `${paramName} format is invalid. Must be "primary", an email address, or an alphanumeric ID`,
}
}
if (value.length > 255) {
logger.warn('Google Calendar ID exceeds maximum length', {
paramName,
length: value.length,
})
return {
isValid: false,
error: `${paramName} exceeds maximum length of 255 characters`,
}
}
return { isValid: true, sanitized: value }
}

View File

@@ -471,8 +471,10 @@ function groupIterationBlocks(spans: TraceSpan[]): TraceSpan[] {
}
})
// Include loop/parallel spans that have errors (e.g., validation errors that blocked execution)
// These won't have iteration children, so they should appear directly in results
const nonIterationContainerSpans = normalSpans.filter(
(span) => span.type !== 'parallel' && span.type !== 'loop'
(span) => (span.type !== 'parallel' && span.type !== 'loop') || span.status === 'error'
)
if (iterationSpans.length > 0) {

View File

@@ -79,8 +79,10 @@ export function hasEmailService(): boolean {
export async function sendEmail(options: EmailOptions): Promise<SendEmailResult> {
try {
// Check if user has unsubscribed (skip for critical transactional emails)
if (options.emailType !== 'transactional') {
const unsubscribeType = options.emailType as 'marketing' | 'updates' | 'notifications'
// For arrays, check the first email address (batch emails typically go to similar recipients)
const primaryEmail = Array.isArray(options.to) ? options.to[0] : options.to
const hasUnsubscribed = await isUnsubscribed(primaryEmail, unsubscribeType)
if (hasUnsubscribed) {
@@ -97,8 +99,10 @@ export async function sendEmail(options: EmailOptions): Promise<SendEmailResult>
}
}
// Process email data with unsubscribe tokens and headers
const processedData = await processEmailData(options)
// Try Resend first if configured
if (resend) {
try {
return await sendWithResend(processedData)
@@ -107,6 +111,7 @@ export async function sendEmail(options: EmailOptions): Promise<SendEmailResult>
}
}
// Fallback to Azure Communication Services if configured
if (azureEmailClient) {
try {
return await sendWithAzure(processedData)
@@ -119,6 +124,7 @@ export async function sendEmail(options: EmailOptions): Promise<SendEmailResult>
}
}
// No email service configured
logger.info('Email not sent (no email service configured):', {
to: options.to,
subject: options.subject,
@@ -138,32 +144,6 @@ export async function sendEmail(options: EmailOptions): Promise<SendEmailResult>
}
}
interface UnsubscribeData {
headers: Record<string, string>
html?: string
text?: string
}
function addUnsubscribeData(
recipientEmail: string,
emailType: string,
html?: string,
text?: string
): UnsubscribeData {
const unsubscribeToken = generateUnsubscribeToken(recipientEmail, emailType)
const baseUrl = getBaseUrl()
const unsubscribeUrl = `${baseUrl}/unsubscribe?token=${unsubscribeToken}&email=${encodeURIComponent(recipientEmail)}`
return {
headers: {
'List-Unsubscribe': `<${unsubscribeUrl}>`,
'List-Unsubscribe-Post': 'List-Unsubscribe=One-Click',
},
html: html?.replace(/\{\{UNSUBSCRIBE_TOKEN\}\}/g, unsubscribeToken),
text: text?.replace(/\{\{UNSUBSCRIBE_TOKEN\}\}/g, unsubscribeToken),
}
}
async function processEmailData(options: EmailOptions): Promise<ProcessedEmailData> {
const {
to,
@@ -179,16 +159,27 @@ async function processEmailData(options: EmailOptions): Promise<ProcessedEmailDa
const senderEmail = from || getFromEmailAddress()
// Generate unsubscribe token and add to content
let finalHtml = html
let finalText = text
let headers: Record<string, string> = {}
const headers: Record<string, string> = {}
if (includeUnsubscribe && emailType !== 'transactional') {
// For arrays, use the first email for unsubscribe (batch emails typically go to similar recipients)
const primaryEmail = Array.isArray(to) ? to[0] : to
const unsubData = addUnsubscribeData(primaryEmail, emailType, html, text)
headers = unsubData.headers
finalHtml = unsubData.html
finalText = unsubData.text
const unsubscribeToken = generateUnsubscribeToken(primaryEmail, emailType)
const baseUrl = getBaseUrl()
const unsubscribeUrl = `${baseUrl}/unsubscribe?token=${unsubscribeToken}&email=${encodeURIComponent(primaryEmail)}`
headers['List-Unsubscribe'] = `<${unsubscribeUrl}>`
headers['List-Unsubscribe-Post'] = 'List-Unsubscribe=One-Click'
if (html) {
finalHtml = html.replace(/\{\{UNSUBSCRIBE_TOKEN\}\}/g, unsubscribeToken)
}
if (text) {
finalText = text.replace(/\{\{UNSUBSCRIBE_TOKEN\}\}/g, unsubscribeToken)
}
}
return {
@@ -243,10 +234,13 @@ async function sendWithResend(data: ProcessedEmailData): Promise<SendEmailResult
async function sendWithAzure(data: ProcessedEmailData): Promise<SendEmailResult> {
if (!azureEmailClient) throw new Error('Azure Communication Services not configured')
// Azure Communication Services requires at least one content type
if (!data.html && !data.text) {
throw new Error('Azure Communication Services requires either HTML or text content')
}
// For Azure, use just the email address part (no display name)
// Azure will use the display name configured in the portal for the sender address
const senderEmailOnly = data.senderEmail.includes('<')
? data.senderEmail.match(/<(.+)>/)?.[1] || data.senderEmail
: data.senderEmail
@@ -287,6 +281,7 @@ export async function sendBatchEmails(options: BatchEmailOptions): Promise<Batch
try {
const results: SendEmailResult[] = []
// Try Resend first for batch emails if available
if (resend) {
try {
return await sendBatchWithResend(options.emails)
@@ -295,6 +290,7 @@ export async function sendBatchEmails(options: BatchEmailOptions): Promise<Batch
}
}
// Fallback to individual sends (works with both Azure and Resend)
logger.info('Sending batch emails individually')
for (const email of options.emails) {
try {
@@ -332,57 +328,17 @@ async function sendBatchWithResend(emails: EmailOptions[]): Promise<BatchSendEma
if (!resend) throw new Error('Resend not configured')
const results: SendEmailResult[] = []
const skippedIndices: number[] = []
const batchEmails: any[] = []
for (let i = 0; i < emails.length; i++) {
const email = emails[i]
const { emailType = 'transactional', includeUnsubscribe = true } = email
if (emailType !== 'transactional') {
const unsubscribeType = emailType as 'marketing' | 'updates' | 'notifications'
const primaryEmail = Array.isArray(email.to) ? email.to[0] : email.to
const hasUnsubscribed = await isUnsubscribed(primaryEmail, unsubscribeType)
if (hasUnsubscribed) {
skippedIndices.push(i)
results.push({
success: true,
message: 'Email skipped (user unsubscribed)',
data: { id: 'skipped-unsubscribed' },
})
continue
}
}
const batchEmails = emails.map((email) => {
const senderEmail = email.from || getFromEmailAddress()
const emailData: any = {
from: senderEmail,
to: email.to,
subject: email.subject,
}
if (email.html) emailData.html = email.html
if (email.text) emailData.text = email.text
if (includeUnsubscribe && emailType !== 'transactional') {
const primaryEmail = Array.isArray(email.to) ? email.to[0] : email.to
const unsubData = addUnsubscribeData(primaryEmail, emailType, email.html, email.text)
emailData.headers = unsubData.headers
if (unsubData.html) emailData.html = unsubData.html
if (unsubData.text) emailData.text = unsubData.text
}
batchEmails.push(emailData)
}
if (batchEmails.length === 0) {
return {
success: true,
message: 'All batch emails skipped (users unsubscribed)',
results,
data: { count: 0 },
}
}
return emailData
})
try {
const response = await resend.batch.send(batchEmails as any)
@@ -391,6 +347,7 @@ async function sendBatchWithResend(emails: EmailOptions[]): Promise<BatchSendEma
throw new Error(response.error.message || 'Resend batch API error')
}
// Success - create results for each email
batchEmails.forEach((_, index) => {
results.push({
success: true,
@@ -401,15 +358,12 @@ async function sendBatchWithResend(emails: EmailOptions[]): Promise<BatchSendEma
return {
success: true,
message:
skippedIndices.length > 0
? `${batchEmails.length} emails sent, ${skippedIndices.length} skipped (unsubscribed)`
: 'All batch emails sent successfully via Resend',
message: 'All batch emails sent successfully via Resend',
results,
data: { count: batchEmails.length },
data: { count: results.length },
}
} catch (error) {
logger.error('Resend batch send failed:', error)
throw error
throw error // Let the caller handle fallback
}
}

View File

@@ -1,130 +0,0 @@
import { toPng } from 'html-to-image'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('OGCapturePreview')
/**
* OG image dimensions following social media best practices
*/
export const OG_IMAGE_WIDTH = 1200
export const OG_IMAGE_HEIGHT = 630
/**
* Capture a workflow preview element as a PNG image for OpenGraph.
* Returns a base64-encoded data URL.
*
* @param element - The DOM element containing the workflow preview
* @param retries - Number of retry attempts (default: 3)
* @returns Base64 data URL of the captured image, or null if capture fails
*/
export async function captureWorkflowPreview(
element: HTMLElement,
retries = 3
): Promise<string | null> {
if (!element || element.children.length === 0) {
logger.warn('Cannot capture empty element')
return null
}
for (let attempt = 1; attempt <= retries; attempt++) {
try {
logger.info(`Capturing workflow preview for OG image (attempt ${attempt}/${retries})`)
const dataUrl = await toPng(element, {
width: OG_IMAGE_WIDTH,
height: OG_IMAGE_HEIGHT,
pixelRatio: 2, // Higher quality for crisp rendering
backgroundColor: '#0c0c0c', // Dark background matching the app theme
style: {
transform: 'scale(1)',
transformOrigin: 'top left',
},
filter: (node) => {
const className = node.className?.toString() || ''
if (
className.includes('tooltip') ||
className.includes('popover') ||
className.includes('overlay') ||
className.includes('react-flow__controls') ||
className.includes('react-flow__minimap')
) {
return false
}
return true
},
})
if (dataUrl && dataUrl.length > 1000) {
logger.info('Workflow preview captured successfully')
return dataUrl
}
logger.warn(`Captured image appears to be empty (attempt ${attempt})`)
} catch (error) {
logger.error(`Failed to capture workflow preview (attempt ${attempt}):`, error)
}
if (attempt < retries) {
await new Promise((resolve) => setTimeout(resolve, 500 * attempt))
}
}
logger.error('All capture attempts failed')
return null
}
/**
* Upload a captured OG image to the server.
*
* @param templateId - The ID of the template to associate the image with
* @param imageData - Base64-encoded image data URL
* @returns The public URL of the uploaded image, or null if upload fails
*/
export async function uploadOGImage(templateId: string, imageData: string): Promise<string | null> {
try {
logger.info(`Uploading OG image for template: ${templateId}`)
const response = await fetch(`/api/templates/${templateId}/og-image`, {
method: 'PUT',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ imageData }),
})
if (!response.ok) {
const errorData = await response.json().catch(() => ({}))
throw new Error(errorData.error || `Upload failed with status ${response.status}`)
}
const data = await response.json()
logger.info(`OG image uploaded successfully: ${data.ogImageUrl}`)
return data.ogImageUrl
} catch (error) {
logger.error('Failed to upload OG image:', error)
return null
}
}
/**
* Capture and upload a workflow preview as an OG image.
* This is a convenience function that combines capture and upload.
*
* @param element - The DOM element containing the workflow preview
* @param templateId - The ID of the template
* @returns The public URL of the uploaded image, or null if either step fails
*/
export async function captureAndUploadOGImage(
element: HTMLElement,
templateId: string
): Promise<string | null> {
const imageData = await captureWorkflowPreview(element)
if (!imageData) {
logger.warn('Skipping OG image upload - capture failed')
return null
}
return uploadOGImage(templateId, imageData)
}

View File

@@ -1,7 +0,0 @@
export {
captureAndUploadOGImage,
captureWorkflowPreview,
OG_IMAGE_HEIGHT,
OG_IMAGE_WIDTH,
uploadOGImage,
} from './capture-preview'

View File

@@ -1,121 +0,0 @@
import { db } from '@sim/db'
import { member, templateCreators, templates, user } from '@sim/db/schema'
import { and, eq, or } from 'drizzle-orm'
export type CreatorPermissionLevel = 'member' | 'admin'
/**
* Verifies if a user is a super user.
*
* @param userId - The ID of the user to check
* @returns Object with isSuperUser boolean
*/
export async function verifySuperUser(userId: string): Promise<{ isSuperUser: boolean }> {
const [currentUser] = await db.select().from(user).where(eq(user.id, userId)).limit(1)
return { isSuperUser: currentUser?.isSuperUser || false }
}
/**
* Fetches a template and verifies the user has permission to modify it.
* Combines template existence check and creator permission check in one call.
*
* @param templateId - The ID of the template
* @param userId - The ID of the user to check
* @param requiredLevel - The permission level required ('member' or 'admin')
* @returns Object with template data if authorized, or error information
*/
export async function verifyTemplateOwnership(
templateId: string,
userId: string,
requiredLevel: CreatorPermissionLevel = 'admin'
): Promise<{
authorized: boolean
template?: typeof templates.$inferSelect
error?: string
status?: number
}> {
const [template] = await db.select().from(templates).where(eq(templates.id, templateId)).limit(1)
if (!template) {
return { authorized: false, error: 'Template not found', status: 404 }
}
if (!template.creatorId) {
return { authorized: false, error: 'Access denied', status: 403 }
}
const { hasPermission, error } = await verifyCreatorPermission(
userId,
template.creatorId,
requiredLevel
)
if (!hasPermission) {
return { authorized: false, error: error || 'Access denied', status: 403 }
}
return { authorized: true, template }
}
/**
* Verifies if a user has permission to act on behalf of a creator profile.
*
* @param userId - The ID of the user to check
* @param creatorId - The ID of the creator profile
* @param requiredLevel - The permission level required ('member' for any org member, 'admin' for admin/owner only)
* @returns Object with hasPermission boolean and optional error message
*/
export async function verifyCreatorPermission(
userId: string,
creatorId: string,
requiredLevel: CreatorPermissionLevel = 'admin'
): Promise<{ hasPermission: boolean; error?: string }> {
const creatorProfile = await db
.select()
.from(templateCreators)
.where(eq(templateCreators.id, creatorId))
.limit(1)
if (creatorProfile.length === 0) {
return { hasPermission: false, error: 'Creator profile not found' }
}
const creator = creatorProfile[0]
if (creator.referenceType === 'user') {
const hasPermission = creator.referenceId === userId
return {
hasPermission,
error: hasPermission ? undefined : 'You do not have permission to use this creator profile',
}
}
if (creator.referenceType === 'organization') {
const membershipConditions = [
eq(member.userId, userId),
eq(member.organizationId, creator.referenceId),
]
if (requiredLevel === 'admin') {
membershipConditions.push(or(eq(member.role, 'admin'), eq(member.role, 'owner'))!)
}
const membership = await db
.select()
.from(member)
.where(and(...membershipConditions))
.limit(1)
if (membership.length === 0) {
const error =
requiredLevel === 'admin'
? 'You must be an admin or owner of the organization to perform this action'
: 'You must be a member of the organization to use its creator profile'
return { hasPermission: false, error }
}
return { hasPermission: true }
}
return { hasPermission: false, error: 'Unknown creator profile type' }
}

View File

@@ -85,18 +85,6 @@ export const BLOB_PROFILE_PICTURES_CONFIG = {
containerName: env.AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME || '',
}
export const S3_OG_IMAGES_CONFIG = {
bucket: env.S3_OG_IMAGES_BUCKET_NAME || '',
region: env.AWS_REGION || '',
}
export const BLOB_OG_IMAGES_CONFIG = {
accountName: env.AZURE_ACCOUNT_NAME || '',
accountKey: env.AZURE_ACCOUNT_KEY || '',
connectionString: env.AZURE_CONNECTION_STRING || '',
containerName: env.AZURE_STORAGE_OG_IMAGES_CONTAINER_NAME || '',
}
/**
* Get the current storage provider as a human-readable string
*/
@@ -163,11 +151,6 @@ function getS3Config(context: StorageContext): StorageConfig {
bucket: S3_PROFILE_PICTURES_CONFIG.bucket,
region: S3_PROFILE_PICTURES_CONFIG.region,
}
case 'og-images':
return {
bucket: S3_OG_IMAGES_CONFIG.bucket || S3_CONFIG.bucket,
region: S3_OG_IMAGES_CONFIG.region || S3_CONFIG.region,
}
default:
return {
bucket: S3_CONFIG.bucket,
@@ -223,13 +206,6 @@ function getBlobConfig(context: StorageContext): StorageConfig {
connectionString: BLOB_PROFILE_PICTURES_CONFIG.connectionString,
containerName: BLOB_PROFILE_PICTURES_CONFIG.containerName,
}
case 'og-images':
return {
accountName: BLOB_OG_IMAGES_CONFIG.accountName || BLOB_CONFIG.accountName,
accountKey: BLOB_OG_IMAGES_CONFIG.accountKey || BLOB_CONFIG.accountKey,
connectionString: BLOB_OG_IMAGES_CONFIG.connectionString || BLOB_CONFIG.connectionString,
containerName: BLOB_OG_IMAGES_CONFIG.containerName || BLOB_CONFIG.containerName,
}
default:
return {
accountName: BLOB_CONFIG.accountName,

View File

@@ -5,7 +5,6 @@ export type StorageContext =
| 'execution'
| 'workspace'
| 'profile-pictures'
| 'og-images'
| 'logs'
export interface FileInfo {

View File

@@ -192,15 +192,6 @@ export function isSupportedVideoExtension(extension: string): extension is Suppo
/**
* Validate if an audio/video file type is supported for STT processing
*/
const PNG_MAGIC_BYTES = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a])
/**
* Validate that a buffer contains valid PNG data by checking magic bytes
*/
export function isValidPng(buffer: Buffer): boolean {
return buffer.length >= 8 && buffer.subarray(0, 8).equals(PNG_MAGIC_BYTES)
}
export function validateMediaFileType(
fileName: string,
mimeType: string

View File

@@ -4,7 +4,6 @@ import { and, eq, sql } from 'drizzle-orm'
import { nanoid } from 'nanoid'
import Parser from 'rss-parser'
import { pollingIdempotency } from '@/lib/core/idempotency/service'
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
@@ -157,7 +156,7 @@ export async function pollRssWebhooks() {
const { feed, items: newItems } = await fetchNewRssItems(config, requestId)
if (!newItems.length) {
await updateWebhookConfig(webhookId, now.toISOString(), [])
await updateWebhookConfig(webhookId, config, now.toISOString(), [])
await markWebhookSuccess(webhookId)
logger.info(`[${requestId}] No new items found for webhook ${webhookId}`)
successCount++
@@ -173,11 +172,12 @@ export async function pollRssWebhooks() {
requestId
)
// Collect guids from processed items
const newGuids = newItems
.map((item) => item.guid || item.link || '')
.filter((guid) => guid.length > 0)
await updateWebhookConfig(webhookId, now.toISOString(), newGuids)
await updateWebhookConfig(webhookId, config, now.toISOString(), newGuids)
if (itemFailedCount > 0 && processedCount === 0) {
await markWebhookFailed(webhookId)
@@ -245,36 +245,15 @@ async function fetchNewRssItems(
try {
logger.debug(`[${requestId}] Fetching RSS feed: ${config.feedUrl}`)
const urlValidation = await validateUrlWithDNS(config.feedUrl, 'feedUrl')
if (!urlValidation.isValid) {
logger.error(`[${requestId}] Invalid RSS feed URL: ${urlValidation.error}`)
throw new Error(`Invalid RSS feed URL: ${urlValidation.error}`)
}
const pinnedUrl = createPinnedUrl(config.feedUrl, urlValidation.resolvedIP!)
const response = await fetch(pinnedUrl, {
headers: {
Host: urlValidation.originalHostname!,
'User-Agent': 'SimStudio/1.0 RSS Poller',
Accept: 'application/rss+xml, application/xml, text/xml, */*',
},
signal: AbortSignal.timeout(30000),
})
if (!response.ok) {
throw new Error(`Failed to fetch RSS feed: ${response.status} ${response.statusText}`)
}
const xmlContent = await response.text()
const feed = await parser.parseString(xmlContent)
// Parse the RSS feed
const feed = await parser.parseURL(config.feedUrl)
if (!feed.items || !feed.items.length) {
logger.debug(`[${requestId}] No items in feed`)
return { feed: feed as RssFeed, items: [] }
}
// Filter new items based on timestamp and guids
const lastCheckedTime = config.lastCheckedTimestamp
? new Date(config.lastCheckedTimestamp)
: null
@@ -283,10 +262,12 @@ async function fetchNewRssItems(
const newItems = feed.items.filter((item) => {
const itemGuid = item.guid || item.link || ''
// Check if we've already seen this item by guid
if (itemGuid && lastSeenGuids.has(itemGuid)) {
return false
}
// Check if the item is newer than our last check
if (lastCheckedTime && item.isoDate) {
const itemDate = new Date(item.isoDate)
if (itemDate <= lastCheckedTime) {
@@ -297,12 +278,14 @@ async function fetchNewRssItems(
return true
})
// Sort by date, newest first
newItems.sort((a, b) => {
const dateA = a.isoDate ? new Date(a.isoDate).getTime() : 0
const dateB = b.isoDate ? new Date(b.isoDate).getTime() : 0
return dateB - dateA
})
// Limit to 25 items per poll to prevent overwhelming the system
const limitedItems = newItems.slice(0, 25)
logger.info(
@@ -400,11 +383,17 @@ async function processRssItems(
return { processedCount, failedCount }
}
async function updateWebhookConfig(webhookId: string, timestamp: string, newGuids: string[]) {
async function updateWebhookConfig(
webhookId: string,
_config: RssWebhookConfig,
timestamp: string,
newGuids: string[]
) {
try {
const result = await db.select().from(webhook).where(eq(webhook.id, webhookId))
const existingConfig = (result[0]?.providerConfig as Record<string, any>) || {}
// Merge new guids with existing ones, keeping only the most recent
const existingGuids = existingConfig.lastSeenGuids || []
const allGuids = [...newGuids, ...existingGuids].slice(0, MAX_GUIDS_TO_TRACK)

View File

@@ -2,7 +2,6 @@ import { db } from '@sim/db'
import { account, webhook } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -19,6 +18,7 @@ export async function handleWhatsAppVerification(
challenge: string | null
): Promise<NextResponse | null> {
if (mode && token && challenge) {
// This is a WhatsApp verification request
logger.info(`[${requestId}] WhatsApp verification request received for path: ${path}`)
if (mode !== 'subscribe') {
@@ -26,11 +26,13 @@ export async function handleWhatsAppVerification(
return new NextResponse('Invalid mode', { status: 400 })
}
// Find all active WhatsApp webhooks
const webhooks = await db
.select()
.from(webhook)
.where(and(eq(webhook.provider, 'whatsapp'), eq(webhook.isActive, true)))
// Check if any webhook has a matching verification token
for (const wh of webhooks) {
const providerConfig = (wh.providerConfig as Record<string, any>) || {}
const verificationToken = providerConfig.verificationToken
@@ -42,6 +44,7 @@ export async function handleWhatsAppVerification(
if (token === verificationToken) {
logger.info(`[${requestId}] WhatsApp verification successful for webhook ${wh.id}`)
// Return ONLY the challenge as plain text (exactly as WhatsApp expects)
return new NextResponse(challenge, {
status: 200,
headers: {
@@ -69,52 +72,6 @@ export function handleSlackChallenge(body: any): NextResponse | null {
return null
}
/**
* Fetches a URL with DNS pinning to prevent DNS rebinding attacks
* @param url - The URL to fetch
* @param accessToken - Authorization token (optional for pre-signed URLs)
* @param requestId - Request ID for logging
* @returns The fetch Response or null if validation fails
*/
async function fetchWithDNSPinning(
url: string,
accessToken: string,
requestId: string
): Promise<Response | null> {
try {
const urlValidation = await validateUrlWithDNS(url, 'contentUrl')
if (!urlValidation.isValid) {
logger.warn(`[${requestId}] Invalid content URL: ${urlValidation.error}`, {
url: url.substring(0, 100),
})
return null
}
const pinnedUrl = createPinnedUrl(url, urlValidation.resolvedIP!)
const headers: Record<string, string> = {
Host: urlValidation.originalHostname!,
}
if (accessToken) {
headers.Authorization = `Bearer ${accessToken}`
}
const response = await fetch(pinnedUrl, {
headers,
redirect: 'follow',
})
return response
} catch (error) {
logger.error(`[${requestId}] Error fetching URL with DNS pinning`, {
error: error instanceof Error ? error.message : String(error),
url: url.substring(0, 100),
})
return null
}
}
/**
* Format Microsoft Teams Graph change notification
*/
@@ -133,6 +90,7 @@ async function formatTeamsGraphNotification(
const resource = notification.resource || ''
const subscriptionId = notification.subscriptionId || ''
// Extract chatId and messageId from resource path
let chatId: string | null = null
let messageId: string | null = null
@@ -201,6 +159,7 @@ async function formatTeamsGraphNotification(
[]
let accessToken: string | null = null
// Teams chat subscriptions require credentials
if (!credentialId) {
logger.error('Missing credentialId for Teams chat subscription', {
chatId: resolvedChatId,
@@ -211,9 +170,11 @@ async function formatTeamsGraphNotification(
})
} else {
try {
// Get userId from credential
const rows = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
if (rows.length === 0) {
logger.error('Teams credential not found', { credentialId, chatId: resolvedChatId })
// Continue without message data
} else {
const effectiveUserId = rows[0].userId
accessToken = await refreshAccessTokenIfNeeded(
@@ -246,20 +207,19 @@ async function formatTeamsGraphNotification(
if (contentUrl.includes('sharepoint.com') || contentUrl.includes('onedrive')) {
try {
const directRes = await fetchWithDNSPinning(
contentUrl,
accessToken,
'teams-attachment'
)
const directRes = await fetch(contentUrl, {
headers: { Authorization: `Bearer ${accessToken}` },
redirect: 'follow',
})
if (directRes?.ok) {
if (directRes.ok) {
const arrayBuffer = await directRes.arrayBuffer()
buffer = Buffer.from(arrayBuffer)
mimeType =
directRes.headers.get('content-type') ||
contentTypeHint ||
'application/octet-stream'
} else if (directRes) {
} else {
const encodedUrl = Buffer.from(contentUrl)
.toString('base64')
.replace(/\+/g, '-')
@@ -350,13 +310,9 @@ async function formatTeamsGraphNotification(
const downloadUrl = metadata['@microsoft.graph.downloadUrl']
if (downloadUrl) {
const downloadRes = await fetchWithDNSPinning(
downloadUrl,
'', // downloadUrl is a pre-signed URL, no auth needed
'teams-onedrive-download'
)
const downloadRes = await fetch(downloadUrl)
if (downloadRes?.ok) {
if (downloadRes.ok) {
const arrayBuffer = await downloadRes.arrayBuffer()
buffer = Buffer.from(arrayBuffer)
mimeType =
@@ -380,12 +336,10 @@ async function formatTeamsGraphNotification(
}
} else {
try {
const ares = await fetchWithDNSPinning(
contentUrl,
accessToken,
'teams-attachment-generic'
)
if (ares?.ok) {
const ares = await fetch(contentUrl, {
headers: { Authorization: `Bearer ${accessToken}` },
})
if (ares.ok) {
const arrayBuffer = await ares.arrayBuffer()
buffer = Buffer.from(arrayBuffer)
mimeType =
@@ -423,6 +377,7 @@ async function formatTeamsGraphNotification(
}
}
// If no message was fetched, return minimal data
if (!message) {
logger.warn('No message data available for Teams notification', {
chatId: resolvedChatId,
@@ -458,6 +413,8 @@ async function formatTeamsGraphNotification(
}
}
// Extract data from message - we know it exists now
// body.content is the HTML/text content, summary is a plain text preview (max 280 chars)
const messageText = message.body?.content || ''
const from = message.from?.user || {}
const createdAt = message.createdDateTime || ''

View File

@@ -1,9 +1,10 @@
import { db } from '@sim/db'
import { permissions, userStats, workflow as workflowTable, workspace } from '@sim/db/schema'
import { permissions, workflow as workflowTable, workspace } from '@sim/db/schema'
import type { InferSelectModel } from 'drizzle-orm'
import { and, eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
import type { PermissionType } from '@/lib/workspaces/permissions/utils'
import type { ExecutionResult } from '@/executor/types'
@@ -92,44 +93,17 @@ export async function updateWorkflowRunCounts(workflowId: string, runs = 1) {
throw new Error(`Workflow ${workflowId} not found`)
}
await db
.update(workflowTable)
.set({
runCount: workflow.runCount + runs,
lastRunAt: new Date(),
})
.where(eq(workflowTable.id, workflowId))
// Use the API to update stats
const response = await fetch(`${getBaseUrl()}/api/workflows/${workflowId}/stats?runs=${runs}`, {
method: 'POST',
})
try {
const existing = await db
.select()
.from(userStats)
.where(eq(userStats.userId, workflow.userId))
.limit(1)
if (existing.length === 0) {
logger.warn('User stats record not found - should be created during onboarding', {
userId: workflow.userId,
workflowId,
})
} else {
await db
.update(userStats)
.set({
lastActive: new Date(),
})
.where(eq(userStats.userId, workflow.userId))
}
} catch (error) {
logger.error(`Error updating userStats lastActive for userId ${workflow.userId}:`, error)
// Don't rethrow - we want to continue even if this fails
if (!response.ok) {
const error = await response.json()
throw new Error(error.error || 'Failed to update workflow stats')
}
return {
success: true,
runsAdded: runs,
newTotal: workflow.runCount + runs,
}
return response.json()
} catch (error) {
logger.error(`Error updating workflow stats for ${workflowId}`, error)
throw error
@@ -147,6 +121,7 @@ function sanitizeToolsForComparison(tools: any[] | undefined): any[] {
}
return tools.map((tool) => {
// Remove UI-only field: isExpanded
const { isExpanded, ...cleanTool } = tool
return cleanTool
})
@@ -163,6 +138,7 @@ function sanitizeInputFormatForComparison(inputFormat: any[] | undefined): any[]
}
return inputFormat.map((field) => {
// Remove test-only field: value (used only for manual testing)
const { value, collapsed, ...cleanField } = field
return cleanField
})

View File

@@ -88,7 +88,6 @@
"fuse.js": "7.1.0",
"gray-matter": "^4.0.3",
"groq-sdk": "^0.15.0",
"html-to-image": "1.11.13",
"html-to-text": "^9.0.5",
"input-otp": "^1.4.2",
"ioredis": "^5.6.0",

View File

@@ -0,0 +1 @@
export { useTextHistoryStore } from './store'

View File

@@ -0,0 +1,339 @@
import { create } from 'zustand'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('TextHistoryStore')
/**
* Default debounce delay in milliseconds.
* Changes within this window are coalesced into a single history entry.
*/
const DEBOUNCE_DELAY_MS = 500
/**
* Maximum number of history entries per text field.
*/
const MAX_HISTORY_SIZE = 10
interface TextHistoryEntry {
/** The undo/redo stack of text values */
stack: string[]
/** Current position in the stack (0 = oldest) */
index: number
/** Pending value that hasn't been committed to history yet */
pending: string | null
/** Timer ID for debounced commit */
debounceTimer: ReturnType<typeof setTimeout> | null
/** Timestamp of last change (for coalescing logic) */
lastChangeAt: number
}
interface TextHistoryState {
/** Map of "blockId:subBlockId" to history entry */
histories: Record<string, TextHistoryEntry>
/**
* Records a text change with debouncing.
* Multiple rapid changes are coalesced into a single history entry.
*/
recordChange: (blockId: string, subBlockId: string, value: string) => void
/**
* Immediately commits any pending changes to history.
* Call this on blur or before navigation.
*/
commitPending: (blockId: string, subBlockId: string) => void
/**
* Undo the last text change for a specific field.
* @returns The previous value, or null if at the beginning of history
*/
undo: (blockId: string, subBlockId: string) => string | null
/**
* Redo the last undone text change for a specific field.
* @returns The next value, or null if at the end of history
*/
redo: (blockId: string, subBlockId: string) => string | null
/**
* Check if undo is available for a field.
*/
canUndo: (blockId: string, subBlockId: string) => boolean
/**
* Check if redo is available for a field.
*/
canRedo: (blockId: string, subBlockId: string) => boolean
/**
* Initialize history for a field with an initial value.
* Called when a text field first mounts.
*/
initHistory: (blockId: string, subBlockId: string, initialValue: string) => void
/**
* Clear history for a specific field.
*/
clearHistory: (blockId: string, subBlockId: string) => void
/**
* Clear all history for a block (when block is deleted).
*/
clearBlockHistory: (blockId: string) => void
}
function getKey(blockId: string, subBlockId: string): string {
return `${blockId}:${subBlockId}`
}
function createEmptyEntry(initialValue: string): TextHistoryEntry {
return {
stack: [initialValue],
index: 0,
pending: null,
debounceTimer: null,
lastChangeAt: 0,
}
}
export const useTextHistoryStore = create<TextHistoryState>((set, get) => ({
histories: {},
initHistory: (blockId: string, subBlockId: string, initialValue: string) => {
const key = getKey(blockId, subBlockId)
const state = get()
// Only initialize if not already present
if (!state.histories[key]) {
set({
histories: {
...state.histories,
[key]: createEmptyEntry(initialValue),
},
})
logger.debug('Initialized text history', { blockId, subBlockId })
}
},
recordChange: (blockId: string, subBlockId: string, value: string) => {
const key = getKey(blockId, subBlockId)
const state = get()
let entry = state.histories[key]
// Initialize if needed
if (!entry) {
entry = createEmptyEntry('')
}
// Clear any existing debounce timer
if (entry.debounceTimer) {
clearTimeout(entry.debounceTimer)
}
// Set up new debounce timer
const timer = setTimeout(() => {
get().commitPending(blockId, subBlockId)
}, DEBOUNCE_DELAY_MS)
// Update entry with pending value
set({
histories: {
...get().histories,
[key]: {
...entry,
pending: value,
debounceTimer: timer,
lastChangeAt: Date.now(),
},
},
})
},
commitPending: (blockId: string, subBlockId: string) => {
const key = getKey(blockId, subBlockId)
const state = get()
const entry = state.histories[key]
if (!entry || entry.pending === null) {
return
}
// Clear the timer
if (entry.debounceTimer) {
clearTimeout(entry.debounceTimer)
}
const currentValue = entry.stack[entry.index]
// Don't commit if value hasn't changed
if (entry.pending === currentValue) {
set({
histories: {
...state.histories,
[key]: {
...entry,
pending: null,
debounceTimer: null,
},
},
})
return
}
// Truncate any redo history (we're branching)
const newStack = entry.stack.slice(0, entry.index + 1)
// Add the new value
newStack.push(entry.pending)
// Enforce max size (remove oldest entries)
while (newStack.length > MAX_HISTORY_SIZE) {
newStack.shift()
}
const newIndex = newStack.length - 1
set({
histories: {
...state.histories,
[key]: {
stack: newStack,
index: newIndex,
pending: null,
debounceTimer: null,
lastChangeAt: entry.lastChangeAt,
},
},
})
logger.debug('Committed text change to history', {
blockId,
subBlockId,
stackSize: newStack.length,
index: newIndex,
})
},
undo: (blockId: string, subBlockId: string) => {
const key = getKey(blockId, subBlockId)
const state = get()
const entry = state.histories[key]
if (!entry) {
return null
}
// Commit any pending changes first
if (entry.pending !== null) {
get().commitPending(blockId, subBlockId)
// Re-fetch after commit
const updatedEntry = get().histories[key]
if (!updatedEntry || updatedEntry.index <= 0) {
return null
}
const newIndex = updatedEntry.index - 1
set({
histories: {
...get().histories,
[key]: {
...updatedEntry,
index: newIndex,
},
},
})
logger.debug('Text undo', { blockId, subBlockId, newIndex })
return updatedEntry.stack[newIndex]
}
if (entry.index <= 0) {
return null
}
const newIndex = entry.index - 1
set({
histories: {
...state.histories,
[key]: {
...entry,
index: newIndex,
},
},
})
logger.debug('Text undo', { blockId, subBlockId, newIndex })
return entry.stack[newIndex]
},
redo: (blockId: string, subBlockId: string) => {
const key = getKey(blockId, subBlockId)
const state = get()
const entry = state.histories[key]
if (!entry || entry.index >= entry.stack.length - 1) {
return null
}
const newIndex = entry.index + 1
set({
histories: {
...state.histories,
[key]: {
...entry,
index: newIndex,
},
},
})
logger.debug('Text redo', { blockId, subBlockId, newIndex })
return entry.stack[newIndex]
},
canUndo: (blockId: string, subBlockId: string) => {
const key = getKey(blockId, subBlockId)
const entry = get().histories[key]
if (!entry) return false
// Can undo if we have pending changes or index > 0
return entry.pending !== null || entry.index > 0
},
canRedo: (blockId: string, subBlockId: string) => {
const key = getKey(blockId, subBlockId)
const entry = get().histories[key]
if (!entry) return false
return entry.index < entry.stack.length - 1
},
clearHistory: (blockId: string, subBlockId: string) => {
const key = getKey(blockId, subBlockId)
const state = get()
const entry = state.histories[key]
if (entry?.debounceTimer) {
clearTimeout(entry.debounceTimer)
}
const { [key]: _, ...rest } = state.histories
set({ histories: rest })
logger.debug('Cleared text history', { blockId, subBlockId })
},
clearBlockHistory: (blockId: string) => {
const state = get()
const prefix = `${blockId}:`
const newHistories: Record<string, TextHistoryEntry> = {}
for (const [key, entry] of Object.entries(state.histories)) {
if (key.startsWith(prefix)) {
if (entry.debounceTimer) {
clearTimeout(entry.debounceTimer)
}
} else {
newHistories[key] = entry
}
}
set({ histories: newHistories })
logger.debug('Cleared all text history for block', { blockId })
},
}))

View File

@@ -51,7 +51,7 @@ export const slackMessageReaderTool: ToolConfig<
type: 'number',
required: false,
visibility: 'user-or-llm',
description: 'Number of messages to retrieve (default: 10, max: 15)',
description: 'Number of messages to retrieve (default: 10, max: 100)',
},
oldest: {
type: 'string',

View File

@@ -139,7 +139,6 @@
"fuse.js": "7.1.0",
"gray-matter": "^4.0.3",
"groq-sdk": "^0.15.0",
"html-to-image": "1.11.13",
"html-to-text": "^9.0.5",
"input-otp": "^1.4.2",
"ioredis": "^5.6.0",
@@ -2213,8 +2212,6 @@
"html-escaper": ["html-escaper@2.0.2", "", {}, "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg=="],
"html-to-image": ["html-to-image@1.11.13", "", {}, "sha512-cuOPoI7WApyhBElTTb9oqsawRvZ0rHhaHwghRLlTuffoD1B2aDemlCruLeZrUIIdvG7gs9xeELEPm6PhuASqrg=="],
"html-to-text": ["html-to-text@9.0.5", "", { "dependencies": { "@selderee/plugin-htmlparser2": "^0.11.0", "deepmerge": "^4.3.1", "dom-serializer": "^2.0.0", "htmlparser2": "^8.0.2", "selderee": "^0.11.0" } }, "sha512-qY60FjREgVZL03vJU6IfMV4GDjGBIoOyvuFdpBDIX9yTlDw0TjxVBQp+P8NvpdIXNJvfWBTNul7fsAQJq2FNpg=="],
"html-url-attributes": ["html-url-attributes@3.0.1", "", {}, "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ=="],

View File

@@ -56,7 +56,6 @@ app:
S3_CHAT_BUCKET_NAME: "chat-files" # Deployed chat assets
S3_COPILOT_BUCKET_NAME: "copilot-files" # Copilot attachments
S3_PROFILE_PICTURES_BUCKET_NAME: "profile-pictures" # User avatars
S3_OG_IMAGES_BUCKET_NAME: "og-images" # OpenGraph preview images
# Realtime service
realtime:

View File

@@ -58,7 +58,6 @@ app:
AZURE_STORAGE_CHAT_CONTAINER_NAME: "chat-files" # Deployed chat assets container
AZURE_STORAGE_COPILOT_CONTAINER_NAME: "copilot-files" # Copilot attachments container
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: "profile-pictures" # User avatars container
AZURE_STORAGE_OG_IMAGES_CONTAINER_NAME: "og-images" # OpenGraph preview images container
# Realtime service
realtime:

View File

@@ -133,7 +133,6 @@ app:
S3_CHAT_BUCKET_NAME: "" # S3 bucket for deployed chat files
S3_COPILOT_BUCKET_NAME: "" # S3 bucket for copilot files
S3_PROFILE_PICTURES_BUCKET_NAME: "" # S3 bucket for user profile pictures
S3_OG_IMAGES_BUCKET_NAME: "" # S3 bucket for OpenGraph preview images
# Azure Blob Storage Configuration (optional - for file storage)
# If configured, files will be stored in Azure Blob instead of local storage
@@ -147,7 +146,6 @@ app:
AZURE_STORAGE_CHAT_CONTAINER_NAME: "" # Azure container for deployed chat files
AZURE_STORAGE_COPILOT_CONTAINER_NAME: "" # Azure container for copilot files
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: "" # Azure container for user profile pictures
AZURE_STORAGE_OG_IMAGES_CONTAINER_NAME: "" # Azure container for OpenGraph preview images
# Service configuration
service:

View File

@@ -1 +0,0 @@
ALTER TABLE "templates" ADD COLUMN "og_image_url" text;

View File

@@ -1,4 +0,0 @@
CREATE INDEX "api_key_workspace_type_idx" ON "api_key" USING btree ("workspace_id","type");--> statement-breakpoint
CREATE INDEX "api_key_user_type_idx" ON "api_key" USING btree ("user_id","type");--> statement-breakpoint
CREATE INDEX "verification_expires_at_idx" ON "verification" USING btree ("expires_at");--> statement-breakpoint
CREATE INDEX "workflow_blocks_type_idx" ON "workflow_blocks" USING btree ("type");

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -862,20 +862,6 @@
"when": 1765932898404,
"tag": "0123_windy_lockheed",
"breakpoints": true
},
{
"idx": 124,
"version": "7",
"when": 1766108872186,
"tag": "0124_blushing_colonel_america",
"breakpoints": true
},
{
"idx": 125,
"version": "7",
"when": 1766133598113,
"tag": "0125_eager_lily_hollister",
"breakpoints": true
}
]
}

View File

@@ -108,7 +108,6 @@ export const verification = pgTable(
},
(table) => ({
identifierIdx: index('verification_identifier_idx').on(table.identifier),
expiresAtIdx: index('verification_expires_at_idx').on(table.expiresAt),
})
)
@@ -198,7 +197,6 @@ export const workflowBlocks = pgTable(
},
(table) => ({
workflowIdIdx: index('workflow_blocks_workflow_id_idx').on(table.workflowId),
typeIdx: index('workflow_blocks_type_idx').on(table.type),
})
)
@@ -620,8 +618,6 @@ export const apiKey = pgTable(
'workspace_type_check',
sql`(type = 'workspace' AND workspace_id IS NOT NULL) OR (type = 'personal' AND workspace_id IS NULL)`
),
workspaceTypeIdx: index('api_key_workspace_type_idx').on(table.workspaceId, table.type),
userTypeIdx: index('api_key_user_type_idx').on(table.userId, table.type),
})
)
@@ -1385,7 +1381,6 @@ export const templates = pgTable(
tags: text('tags').array().notNull().default(sql`'{}'::text[]`), // Array of tags
requiredCredentials: jsonb('required_credentials').notNull().default('[]'), // Array of credential requirements
state: jsonb('state').notNull(), // Store the workflow state directly
ogImageUrl: text('og_image_url'), // Pre-generated OpenGraph image URL
createdAt: timestamp('created_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
},

View File

@@ -71,7 +71,7 @@ async function generateIconMapping(): Promise<Record<string, string>> {
console.log('Generating icon mapping from block definitions...')
const iconMapping: Record<string, string> = {}
const blockFiles = (await glob(`${BLOCKS_PATH}/*.ts`)).sort()
const blockFiles = await glob(`${BLOCKS_PATH}/*.ts`)
for (const blockFile of blockFiles) {
const fileContent = fs.readFileSync(blockFile, 'utf-8')
@@ -132,7 +132,6 @@ function writeIconMapping(iconMapping: Record<string, string>): void {
// Generate mapping with direct references (no dynamic access for tree shaking)
const mappingEntries = Object.entries(iconMapping)
.sort(([a], [b]) => a.localeCompare(b))
.map(([blockType, iconName]) => ` ${blockType}: ${iconName},`)
.join('\n')
@@ -1166,7 +1165,7 @@ async function generateAllBlockDocs() {
const iconMapping = await generateIconMapping()
writeIconMapping(iconMapping)
const blockFiles = (await glob(`${BLOCKS_PATH}/*.ts`)).sort()
const blockFiles = await glob(`${BLOCKS_PATH}/*.ts`)
for (const blockFile of blockFiles) {
await generateBlockDoc(blockFile)