mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 15:07:55 -05:00
fix(authentication): added auth checks for various routes, mysql and postgres query validation, csp improvements (#2472)
This commit is contained in:
@@ -1,7 +1,6 @@
|
||||
import { runs } from '@trigger.dev/sdk'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { authenticateApiKeyFromHeader, updateApiKeyLastUsed } from '@/lib/api-key/service'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { createErrorResponse } from '@/app/api/workflows/utils'
|
||||
@@ -18,38 +17,44 @@ export async function GET(
|
||||
try {
|
||||
logger.debug(`[${requestId}] Getting status for task: ${taskId}`)
|
||||
|
||||
// Try session auth first (for web UI)
|
||||
const session = await getSession()
|
||||
let authenticatedUserId: string | null = session?.user?.id || null
|
||||
|
||||
if (!authenticatedUserId) {
|
||||
const apiKeyHeader = request.headers.get('x-api-key')
|
||||
if (apiKeyHeader) {
|
||||
const authResult = await authenticateApiKeyFromHeader(apiKeyHeader)
|
||||
if (authResult.success && authResult.userId) {
|
||||
authenticatedUserId = authResult.userId
|
||||
if (authResult.keyId) {
|
||||
await updateApiKeyLastUsed(authResult.keyId).catch((error) => {
|
||||
logger.warn(`[${requestId}] Failed to update API key last used timestamp:`, {
|
||||
keyId: authResult.keyId,
|
||||
error,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized task status request`)
|
||||
return createErrorResponse(authResult.error || 'Authentication required', 401)
|
||||
}
|
||||
|
||||
if (!authenticatedUserId) {
|
||||
return createErrorResponse('Authentication required', 401)
|
||||
}
|
||||
const authenticatedUserId = authResult.userId
|
||||
|
||||
// Fetch task status from Trigger.dev
|
||||
const run = await runs.retrieve(taskId)
|
||||
|
||||
logger.debug(`[${requestId}] Task ${taskId} status: ${run.status}`)
|
||||
|
||||
// Map Trigger.dev status to our format
|
||||
const payload = run.payload as any
|
||||
if (payload?.workflowId) {
|
||||
const { verifyWorkflowAccess } = await import('@/socket-server/middleware/permissions')
|
||||
const accessCheck = await verifyWorkflowAccess(authenticatedUserId, payload.workflowId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
logger.warn(`[${requestId}] User ${authenticatedUserId} denied access to task ${taskId}`, {
|
||||
workflowId: payload.workflowId,
|
||||
})
|
||||
return createErrorResponse('Access denied', 403)
|
||||
}
|
||||
logger.debug(`[${requestId}] User ${authenticatedUserId} has access to task ${taskId}`)
|
||||
} else {
|
||||
if (payload?.userId && payload.userId !== authenticatedUserId) {
|
||||
logger.warn(
|
||||
`[${requestId}] User ${authenticatedUserId} attempted to access task ${taskId} owned by ${payload.userId}`
|
||||
)
|
||||
return createErrorResponse('Access denied', 403)
|
||||
}
|
||||
if (!payload?.userId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Task ${taskId} has no ownership information in payload. Denying access for security.`
|
||||
)
|
||||
return createErrorResponse('Access denied', 403)
|
||||
}
|
||||
}
|
||||
|
||||
const statusMap = {
|
||||
QUEUED: 'queued',
|
||||
WAITING_FOR_DEPLOY: 'queued',
|
||||
@@ -67,7 +72,6 @@ export async function GET(
|
||||
|
||||
const mappedStatus = statusMap[run.status as keyof typeof statusMap] || 'unknown'
|
||||
|
||||
// Build response based on status
|
||||
const response: any = {
|
||||
success: true,
|
||||
taskId,
|
||||
@@ -77,21 +81,18 @@ export async function GET(
|
||||
},
|
||||
}
|
||||
|
||||
// Add completion details if finished
|
||||
if (mappedStatus === 'completed') {
|
||||
response.output = run.output // This contains the workflow execution results
|
||||
response.metadata.completedAt = run.finishedAt
|
||||
response.metadata.duration = run.durationMs
|
||||
}
|
||||
|
||||
// Add error details if failed
|
||||
if (mappedStatus === 'failed') {
|
||||
response.error = run.error
|
||||
response.metadata.completedAt = run.finishedAt
|
||||
response.metadata.duration = run.durationMs
|
||||
}
|
||||
|
||||
// Add progress info if still processing
|
||||
if (mappedStatus === 'processing' || mappedStatus === 'queued') {
|
||||
response.estimatedDuration = 180000 // 3 minutes max from our config
|
||||
}
|
||||
@@ -107,6 +108,3 @@ export async function GET(
|
||||
return createErrorResponse('Failed to fetch task status', 500)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Implement task cancellation via Trigger.dev API if needed
|
||||
// export async function DELETE() { ... }
|
||||
|
||||
@@ -1,32 +1,72 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowExecutionLogs, workflowExecutionSnapshots } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import {
|
||||
permissions,
|
||||
workflow,
|
||||
workflowExecutionLogs,
|
||||
workflowExecutionSnapshots,
|
||||
} from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('LogsByExecutionIdAPI')
|
||||
|
||||
export async function GET(
|
||||
_request: NextRequest,
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ executionId: string }> }
|
||||
) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const { executionId } = await params
|
||||
|
||||
logger.debug(`Fetching execution data for: ${executionId}`)
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized execution data access attempt for: ${executionId}`)
|
||||
return NextResponse.json(
|
||||
{ error: authResult.error || 'Authentication required' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const authenticatedUserId = authResult.userId
|
||||
|
||||
logger.debug(
|
||||
`[${requestId}] Fetching execution data for: ${executionId} (auth: ${authResult.authType})`
|
||||
)
|
||||
|
||||
// Get the workflow execution log to find the snapshot
|
||||
const [workflowLog] = await db
|
||||
.select()
|
||||
.select({
|
||||
id: workflowExecutionLogs.id,
|
||||
workflowId: workflowExecutionLogs.workflowId,
|
||||
executionId: workflowExecutionLogs.executionId,
|
||||
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
|
||||
trigger: workflowExecutionLogs.trigger,
|
||||
startedAt: workflowExecutionLogs.startedAt,
|
||||
endedAt: workflowExecutionLogs.endedAt,
|
||||
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||
cost: workflowExecutionLogs.cost,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||
.innerJoin(
|
||||
permissions,
|
||||
and(
|
||||
eq(permissions.entityType, 'workspace'),
|
||||
eq(permissions.entityId, workflow.workspaceId),
|
||||
eq(permissions.userId, authenticatedUserId)
|
||||
)
|
||||
)
|
||||
.where(eq(workflowExecutionLogs.executionId, executionId))
|
||||
.limit(1)
|
||||
|
||||
if (!workflowLog) {
|
||||
logger.warn(`[${requestId}] Execution not found or access denied: ${executionId}`)
|
||||
return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Get the workflow state snapshot
|
||||
const [snapshot] = await db
|
||||
.select()
|
||||
.from(workflowExecutionSnapshots)
|
||||
@@ -34,6 +74,7 @@ export async function GET(
|
||||
.limit(1)
|
||||
|
||||
if (!snapshot) {
|
||||
logger.warn(`[${requestId}] Workflow state snapshot not found for execution: ${executionId}`)
|
||||
return NextResponse.json({ error: 'Workflow state snapshot not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
@@ -50,14 +91,14 @@ export async function GET(
|
||||
},
|
||||
}
|
||||
|
||||
logger.debug(`Successfully fetched execution data for: ${executionId}`)
|
||||
logger.debug(`[${requestId}] Successfully fetched execution data for: ${executionId}`)
|
||||
logger.debug(
|
||||
`Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
|
||||
`[${requestId}] Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
|
||||
)
|
||||
|
||||
return NextResponse.json(response)
|
||||
} catch (error) {
|
||||
logger.error('Error fetching execution data:', error)
|
||||
logger.error(`[${requestId}] Error fetching execution data:`, error)
|
||||
return NextResponse.json({ error: 'Failed to fetch execution data' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,8 +3,10 @@ import { memory, workflowBlocks } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
|
||||
|
||||
const logger = createLogger('MemoryByIdAPI')
|
||||
|
||||
@@ -65,6 +67,65 @@ const memoryPutBodySchema = z.object({
|
||||
workflowId: z.string().uuid('Invalid workflow ID format'),
|
||||
})
|
||||
|
||||
/**
|
||||
* Validates authentication and workflow access for memory operations
|
||||
* @param request - The incoming request
|
||||
* @param workflowId - The workflow ID to check access for
|
||||
* @param requestId - Request ID for logging
|
||||
* @param action - 'read' for GET, 'write' for PUT/DELETE
|
||||
* @returns Object with userId if successful, or error response if failed
|
||||
*/
|
||||
async function validateMemoryAccess(
|
||||
request: NextRequest,
|
||||
workflowId: string,
|
||||
requestId: string,
|
||||
action: 'read' | 'write'
|
||||
): Promise<{ userId: string } | { error: NextResponse }> {
|
||||
const authResult = await checkHybridAuth(request, {
|
||||
requireWorkflowId: false,
|
||||
})
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized memory ${action} attempt`)
|
||||
return {
|
||||
error: NextResponse.json(
|
||||
{ success: false, error: { message: 'Authentication required' } },
|
||||
{ status: 401 }
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
const accessContext = await getWorkflowAccessContext(workflowId, authResult.userId)
|
||||
if (!accessContext) {
|
||||
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
||||
return {
|
||||
error: NextResponse.json(
|
||||
{ success: false, error: { message: 'Workflow not found' } },
|
||||
{ status: 404 }
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
const { isOwner, workspacePermission } = accessContext
|
||||
const hasAccess =
|
||||
action === 'read'
|
||||
? isOwner || workspacePermission !== null
|
||||
: isOwner || workspacePermission === 'write' || workspacePermission === 'admin'
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(
|
||||
`[${requestId}] User ${authResult.userId} denied ${action} access to workflow ${workflowId}`
|
||||
)
|
||||
return {
|
||||
error: NextResponse.json(
|
||||
{ success: false, error: { message: 'Access denied' } },
|
||||
{ status: 403 }
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
return { userId: authResult.userId }
|
||||
}
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
@@ -101,6 +162,11 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
const { workflowId: validatedWorkflowId } = validation.data
|
||||
|
||||
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'read')
|
||||
if ('error' in accessCheck) {
|
||||
return accessCheck.error
|
||||
}
|
||||
|
||||
const memories = await db
|
||||
.select()
|
||||
.from(memory)
|
||||
@@ -203,6 +269,11 @@ export async function DELETE(
|
||||
|
||||
const { workflowId: validatedWorkflowId } = validation.data
|
||||
|
||||
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'write')
|
||||
if ('error' in accessCheck) {
|
||||
return accessCheck.error
|
||||
}
|
||||
|
||||
const existingMemory = await db
|
||||
.select({ id: memory.id })
|
||||
.from(memory)
|
||||
@@ -296,6 +367,11 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
)
|
||||
}
|
||||
|
||||
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'write')
|
||||
if ('error' in accessCheck) {
|
||||
return accessCheck.error
|
||||
}
|
||||
|
||||
const existingMemories = await db
|
||||
.select()
|
||||
.from(memory)
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
import { db } from '@sim/db'
|
||||
import { templates, user } from '@sim/db/schema'
|
||||
import { templates } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { verifySuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('TemplateApprovalAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
// POST /api/templates/[id]/approve - Approve a template (super users only)
|
||||
/**
|
||||
* POST /api/templates/[id]/approve - Approve a template (super users only)
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id } = await params
|
||||
@@ -22,23 +25,18 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
|
||||
if (!currentUser[0]?.isSuperUser) {
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if template exists
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
|
||||
if (existingTemplate.length === 0) {
|
||||
logger.warn(`[${requestId}] Template not found for approval: ${id}`)
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Update template status to approved
|
||||
await db
|
||||
.update(templates)
|
||||
.set({ status: 'approved', updatedAt: new Date() })
|
||||
@@ -56,9 +54,11 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
}
|
||||
}
|
||||
|
||||
// POST /api/templates/[id]/reject - Reject a template (super users only)
|
||||
/**
|
||||
* DELETE /api/templates/[id]/approve - Unapprove a template (super users only)
|
||||
*/
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
_request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const requestId = generateRequestId()
|
||||
@@ -71,23 +71,18 @@ export async function DELETE(
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
|
||||
if (!currentUser[0]?.isSuperUser) {
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if template exists
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
|
||||
if (existingTemplate.length === 0) {
|
||||
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Update template status to rejected
|
||||
await db
|
||||
.update(templates)
|
||||
.set({ status: 'rejected', updatedAt: new Date() })
|
||||
|
||||
@@ -6,6 +6,7 @@ import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { verifyTemplateOwnership } from '@/lib/templates/permissions'
|
||||
import { uploadFile } from '@/lib/uploads/core/storage-service'
|
||||
import { isValidPng } from '@/lib/uploads/utils/validation'
|
||||
|
||||
@@ -27,15 +28,14 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const [template] = await db
|
||||
.select({ id: templates.id, workflowId: templates.workflowId })
|
||||
.from(templates)
|
||||
.where(eq(templates.id, id))
|
||||
.limit(1)
|
||||
|
||||
if (!template) {
|
||||
logger.warn(`[${requestId}] Template not found for OG image upload: ${id}`)
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
const { authorized, error, status } = await verifyTemplateOwnership(
|
||||
id,
|
||||
session.user.id,
|
||||
'admin'
|
||||
)
|
||||
if (!authorized) {
|
||||
logger.warn(`[${requestId}] User denied permission to upload OG image for template ${id}`)
|
||||
return NextResponse.json({ error }, { status: status || 403 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
@@ -114,6 +114,16 @@ export async function DELETE(
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { authorized, error, status } = await verifyTemplateOwnership(
|
||||
id,
|
||||
session.user.id,
|
||||
'admin'
|
||||
)
|
||||
if (!authorized) {
|
||||
logger.warn(`[${requestId}] User denied permission to delete OG image for template ${id}`)
|
||||
return NextResponse.json({ error }, { status: status || 403 })
|
||||
}
|
||||
|
||||
await db
|
||||
.update(templates)
|
||||
.set({
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
import { db } from '@sim/db'
|
||||
import { templates, user } from '@sim/db/schema'
|
||||
import { templates } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { verifySuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('TemplateRejectionAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
// POST /api/templates/[id]/reject - Reject a template (super users only)
|
||||
/**
|
||||
* POST /api/templates/[id]/reject - Reject a template (super users only)
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id } = await params
|
||||
@@ -22,23 +25,18 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
||||
|
||||
if (!currentUser[0]?.isSuperUser) {
|
||||
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||
if (!isSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if template exists
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
|
||||
if (existingTemplate.length === 0) {
|
||||
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Update template status to rejected
|
||||
await db
|
||||
.update(templates)
|
||||
.set({ status: 'rejected', updatedAt: new Date() })
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, templateCreators, templates, workflow } from '@sim/db/schema'
|
||||
import { and, eq, or, sql } from 'drizzle-orm'
|
||||
import { templateCreators, templates, workflow } from '@sim/db/schema'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
@@ -15,7 +15,6 @@ const logger = createLogger('TemplateByIdAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
// GET /api/templates/[id] - Retrieve a single template by ID
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id } = await params
|
||||
@@ -25,7 +24,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
logger.debug(`[${requestId}] Fetching template: ${id}`)
|
||||
|
||||
// Fetch the template by ID with creator info
|
||||
const result = await db
|
||||
.select({
|
||||
template: templates,
|
||||
@@ -47,12 +45,10 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
creator: creator || undefined,
|
||||
}
|
||||
|
||||
// Only show approved templates to non-authenticated users
|
||||
if (!session?.user?.id && template.status !== 'approved') {
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check if user has starred (only if authenticated)
|
||||
let isStarred = false
|
||||
if (session?.user?.id) {
|
||||
const { templateStars } = await import('@sim/db/schema')
|
||||
@@ -80,7 +76,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
logger.debug(`[${requestId}] Incremented view count for template: ${id}`)
|
||||
} catch (viewError) {
|
||||
// Log the error but don't fail the request
|
||||
logger.warn(`[${requestId}] Failed to increment view count for template: ${id}`, viewError)
|
||||
}
|
||||
}
|
||||
@@ -138,7 +133,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
const { name, details, creatorId, tags, updateState } = validationResult.data
|
||||
|
||||
// Check if template exists
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
|
||||
if (existingTemplate.length === 0) {
|
||||
@@ -146,32 +140,54 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// No permission check needed - template updates only happen from within the workspace
|
||||
// where the user is already editing the connected workflow
|
||||
const template = existingTemplate[0]
|
||||
|
||||
if (!template.creatorId) {
|
||||
logger.warn(`[${requestId}] Template ${id} has no creator, denying update`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
|
||||
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
|
||||
session.user.id,
|
||||
template.creatorId,
|
||||
'admin'
|
||||
)
|
||||
|
||||
if (!hasPermission) {
|
||||
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
|
||||
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Prepare update data - only include fields that were provided
|
||||
const updateData: any = {
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
// Only update fields that were provided
|
||||
if (name !== undefined) updateData.name = name
|
||||
if (details !== undefined) updateData.details = details
|
||||
if (tags !== undefined) updateData.tags = tags
|
||||
if (creatorId !== undefined) updateData.creatorId = creatorId
|
||||
|
||||
// Only update the state if explicitly requested and the template has a connected workflow
|
||||
if (updateState && existingTemplate[0].workflowId) {
|
||||
// Load the current workflow state from normalized tables
|
||||
if (updateState && template.workflowId) {
|
||||
const { verifyWorkflowAccess } = await import('@/socket-server/middleware/permissions')
|
||||
const { hasAccess: hasWorkflowAccess } = await verifyWorkflowAccess(
|
||||
session.user.id,
|
||||
template.workflowId
|
||||
)
|
||||
|
||||
if (!hasWorkflowAccess) {
|
||||
logger.warn(`[${requestId}] User denied workflow access for state sync on template ${id}`)
|
||||
return NextResponse.json({ error: 'Access denied to workflow' }, { status: 403 })
|
||||
}
|
||||
|
||||
const { loadWorkflowFromNormalizedTables } = await import('@/lib/workflows/persistence/utils')
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(existingTemplate[0].workflowId)
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(template.workflowId)
|
||||
|
||||
if (normalizedData) {
|
||||
// Also fetch workflow variables
|
||||
const [workflowRecord] = await db
|
||||
.select({ variables: workflow.variables })
|
||||
.from(workflow)
|
||||
.where(eq(workflow.id, existingTemplate[0].workflowId))
|
||||
.where(eq(workflow.id, template.workflowId))
|
||||
.limit(1)
|
||||
|
||||
const currentState = {
|
||||
@@ -183,17 +199,15 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
|
||||
// Extract credential requirements from the new state
|
||||
const requiredCredentials = extractRequiredCredentials(currentState)
|
||||
|
||||
// Sanitize the state before storing
|
||||
const sanitizedState = sanitizeCredentials(currentState)
|
||||
|
||||
updateData.state = sanitizedState
|
||||
updateData.requiredCredentials = requiredCredentials
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Updating template state and credentials from current workflow: ${existingTemplate[0].workflowId}`
|
||||
`[${requestId}] Updating template state and credentials from current workflow: ${template.workflowId}`
|
||||
)
|
||||
} else {
|
||||
logger.warn(`[${requestId}] Could not load workflow state for template: ${id}`)
|
||||
@@ -233,7 +247,6 @@ export async function DELETE(
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Fetch template
|
||||
const existing = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
if (existing.length === 0) {
|
||||
logger.warn(`[${requestId}] Template not found for delete: ${id}`)
|
||||
@@ -242,41 +255,21 @@ export async function DELETE(
|
||||
|
||||
const template = existing[0]
|
||||
|
||||
// Permission: Only admin/owner of creator profile can delete
|
||||
if (template.creatorId) {
|
||||
const creatorProfile = await db
|
||||
.select()
|
||||
.from(templateCreators)
|
||||
.where(eq(templateCreators.id, template.creatorId))
|
||||
.limit(1)
|
||||
if (!template.creatorId) {
|
||||
logger.warn(`[${requestId}] Template ${id} has no creator, denying delete`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
if (creatorProfile.length > 0) {
|
||||
const creator = creatorProfile[0]
|
||||
let hasPermission = false
|
||||
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
|
||||
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
|
||||
session.user.id,
|
||||
template.creatorId,
|
||||
'admin'
|
||||
)
|
||||
|
||||
if (creator.referenceType === 'user') {
|
||||
hasPermission = creator.referenceId === session.user.id
|
||||
} else if (creator.referenceType === 'organization') {
|
||||
// For delete, require admin/owner role
|
||||
const membership = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(
|
||||
and(
|
||||
eq(member.userId, session.user.id),
|
||||
eq(member.organizationId, creator.referenceId),
|
||||
or(eq(member.role, 'admin'), eq(member.role, 'owner'))
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
hasPermission = membership.length > 0
|
||||
}
|
||||
|
||||
if (!hasPermission) {
|
||||
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
}
|
||||
if (!hasPermission) {
|
||||
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
|
||||
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
await db.delete(templates).where(eq(templates.id, id))
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
member,
|
||||
templateCreators,
|
||||
templateStars,
|
||||
templates,
|
||||
@@ -204,51 +203,18 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Validate creator profile - required for all templates
|
||||
const creatorProfile = await db
|
||||
.select()
|
||||
.from(templateCreators)
|
||||
.where(eq(templateCreators.id, data.creatorId))
|
||||
.limit(1)
|
||||
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
|
||||
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
|
||||
session.user.id,
|
||||
data.creatorId,
|
||||
'member'
|
||||
)
|
||||
|
||||
if (creatorProfile.length === 0) {
|
||||
logger.warn(`[${requestId}] Creator profile not found: ${data.creatorId}`)
|
||||
return NextResponse.json({ error: 'Creator profile not found' }, { status: 404 })
|
||||
if (!hasPermission) {
|
||||
logger.warn(`[${requestId}] User cannot use creator profile: ${data.creatorId}`)
|
||||
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
const creator = creatorProfile[0]
|
||||
|
||||
// Verify user has permission to use this creator profile
|
||||
if (creator.referenceType === 'user') {
|
||||
if (creator.referenceId !== session.user.id) {
|
||||
logger.warn(`[${requestId}] User cannot use creator profile: ${data.creatorId}`)
|
||||
return NextResponse.json(
|
||||
{ error: 'You do not have permission to use this creator profile' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
} else if (creator.referenceType === 'organization') {
|
||||
// Verify user is a member of the organization
|
||||
const membership = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(
|
||||
and(eq(member.userId, session.user.id), eq(member.organizationId, creator.referenceId))
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (membership.length === 0) {
|
||||
logger.warn(
|
||||
`[${requestId}] User not a member of organization for creator: ${data.creatorId}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{ error: 'You must be a member of the organization to use its creator profile' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Create the template
|
||||
const templateId = uuidv4()
|
||||
const now = new Date()
|
||||
|
||||
|
||||
@@ -30,23 +30,41 @@ export async function createMongoDBConnection(config: MongoDBConnectionConfig) {
|
||||
return client
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively checks an object for dangerous MongoDB operators
|
||||
* @param obj - The object to check
|
||||
* @param dangerousOperators - Array of operator names to block
|
||||
* @returns true if a dangerous operator is found
|
||||
*/
|
||||
function containsDangerousOperator(obj: unknown, dangerousOperators: string[]): boolean {
|
||||
if (typeof obj !== 'object' || obj === null) return false
|
||||
|
||||
for (const key of Object.keys(obj as Record<string, unknown>)) {
|
||||
if (dangerousOperators.includes(key)) return true
|
||||
if (
|
||||
typeof (obj as Record<string, unknown>)[key] === 'object' &&
|
||||
containsDangerousOperator((obj as Record<string, unknown>)[key], dangerousOperators)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
export function validateFilter(filter: string): { isValid: boolean; error?: string } {
|
||||
try {
|
||||
const parsed = JSON.parse(filter)
|
||||
|
||||
const dangerousOperators = ['$where', '$regex', '$expr', '$function', '$accumulator', '$let']
|
||||
const dangerousOperators = [
|
||||
'$where', // Executes arbitrary JavaScript
|
||||
'$regex', // Can cause ReDoS attacks
|
||||
'$expr', // Expression evaluation
|
||||
'$function', // Custom JavaScript functions
|
||||
'$accumulator', // Custom JavaScript accumulators
|
||||
'$let', // Variable definitions that could be exploited
|
||||
]
|
||||
|
||||
const checkForDangerousOps = (obj: any): boolean => {
|
||||
if (typeof obj !== 'object' || obj === null) return false
|
||||
|
||||
for (const key of Object.keys(obj)) {
|
||||
if (dangerousOperators.includes(key)) return true
|
||||
if (typeof obj[key] === 'object' && checkForDangerousOps(obj[key])) return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
if (checkForDangerousOps(parsed)) {
|
||||
if (containsDangerousOperator(parsed, dangerousOperators)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Filter contains potentially dangerous operators',
|
||||
@@ -74,29 +92,19 @@ export function validatePipeline(pipeline: string): { isValid: boolean; error?:
|
||||
}
|
||||
|
||||
const dangerousOperators = [
|
||||
'$where',
|
||||
'$function',
|
||||
'$accumulator',
|
||||
'$let',
|
||||
'$merge',
|
||||
'$out',
|
||||
'$currentOp',
|
||||
'$listSessions',
|
||||
'$listLocalSessions',
|
||||
'$where', // Executes arbitrary JavaScript
|
||||
'$function', // Custom JavaScript functions
|
||||
'$accumulator', // Custom JavaScript accumulators
|
||||
'$let', // Variable definitions that could be exploited
|
||||
'$merge', // Writes to external collections
|
||||
'$out', // Writes to external collections
|
||||
'$currentOp', // Exposes system operation info
|
||||
'$listSessions', // Exposes session info
|
||||
'$listLocalSessions', // Exposes local session info
|
||||
]
|
||||
|
||||
const checkPipelineStage = (stage: any): boolean => {
|
||||
if (typeof stage !== 'object' || stage === null) return false
|
||||
|
||||
for (const key of Object.keys(stage)) {
|
||||
if (dangerousOperators.includes(key)) return true
|
||||
if (typeof stage[key] === 'object' && checkPipelineStage(stage[key])) return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
for (const stage of parsed) {
|
||||
if (checkPipelineStage(stage)) {
|
||||
if (containsDangerousOperator(stage, dangerousOperators)) {
|
||||
return {
|
||||
isValid: false,
|
||||
error: 'Pipeline contains potentially dangerous operators',
|
||||
|
||||
@@ -98,15 +98,45 @@ export function buildDeleteQuery(table: string, where: string) {
|
||||
return { query, values: [] }
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a WHERE clause to prevent SQL injection attacks
|
||||
* @param where - The WHERE clause string to validate
|
||||
* @throws {Error} If the WHERE clause contains potentially dangerous patterns
|
||||
*/
|
||||
function validateWhereClause(where: string): void {
|
||||
const dangerousPatterns = [
|
||||
// DDL and DML injection via stacked queries
|
||||
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
|
||||
/union\s+select/i,
|
||||
// Union-based injection
|
||||
/union\s+(all\s+)?select/i,
|
||||
// File operations
|
||||
/into\s+outfile/i,
|
||||
/load_file/i,
|
||||
/into\s+dumpfile/i,
|
||||
/load_file\s*\(/i,
|
||||
// Comment-based injection (can truncate query)
|
||||
/--/,
|
||||
/\/\*/,
|
||||
/\*\//,
|
||||
// Tautologies - always true/false conditions using backreferences
|
||||
// Matches OR 'x'='x' or OR x=x (same value both sides) but NOT OR col='value'
|
||||
/\bor\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
|
||||
/\bor\s+true\b/i,
|
||||
/\bor\s+false\b/i,
|
||||
// AND tautologies (less common but still used in attacks)
|
||||
/\band\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
|
||||
/\band\s+true\b/i,
|
||||
/\band\s+false\b/i,
|
||||
// Time-based blind injection
|
||||
/\bsleep\s*\(/i,
|
||||
/\bbenchmark\s*\(/i,
|
||||
/\bwaitfor\s+delay/i,
|
||||
// Stacked queries (any statement after semicolon)
|
||||
/;\s*\w+/,
|
||||
// Information schema queries
|
||||
/information_schema/i,
|
||||
/mysql\./i,
|
||||
// System functions and procedures
|
||||
/\bxp_cmdshell/i,
|
||||
]
|
||||
|
||||
for (const pattern of dangerousPatterns) {
|
||||
|
||||
@@ -64,15 +64,46 @@ export function sanitizeIdentifier(identifier: string): string {
|
||||
return sanitizeSingleIdentifier(identifier)
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a WHERE clause to prevent SQL injection attacks
|
||||
* @param where - The WHERE clause string to validate
|
||||
* @throws {Error} If the WHERE clause contains potentially dangerous patterns
|
||||
*/
|
||||
function validateWhereClause(where: string): void {
|
||||
const dangerousPatterns = [
|
||||
// DDL and DML injection via stacked queries
|
||||
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
|
||||
/union\s+select/i,
|
||||
// Union-based injection
|
||||
/union\s+(all\s+)?select/i,
|
||||
// File operations
|
||||
/into\s+outfile/i,
|
||||
/load_file/i,
|
||||
/load_file\s*\(/i,
|
||||
/pg_read_file/i,
|
||||
// Comment-based injection (can truncate query)
|
||||
/--/,
|
||||
/\/\*/,
|
||||
/\*\//,
|
||||
// Tautologies - always true/false conditions using backreferences
|
||||
// Matches OR 'x'='x' or OR x=x (same value both sides) but NOT OR col='value'
|
||||
/\bor\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
|
||||
/\bor\s+true\b/i,
|
||||
/\bor\s+false\b/i,
|
||||
// AND tautologies (less common but still used in attacks)
|
||||
/\band\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
|
||||
/\band\s+true\b/i,
|
||||
/\band\s+false\b/i,
|
||||
// Time-based blind injection
|
||||
/\bsleep\s*\(/i,
|
||||
/\bwaitfor\s+delay/i,
|
||||
/\bpg_sleep\s*\(/i,
|
||||
/\bbenchmark\s*\(/i,
|
||||
// Stacked queries (any statement after semicolon)
|
||||
/;\s*\w+/,
|
||||
// Information schema / system catalog queries
|
||||
/information_schema/i,
|
||||
/pg_catalog/i,
|
||||
// System functions and procedures
|
||||
/\bxp_cmdshell/i,
|
||||
]
|
||||
|
||||
for (const pattern of dangerousPatterns) {
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { type Attributes, Client, type ConnectConfig } from 'ssh2'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('SSHUtils')
|
||||
|
||||
// File type constants from POSIX
|
||||
const S_IFMT = 0o170000 // bit mask for the file type bit field
|
||||
@@ -32,7 +35,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
||||
const host = config.host
|
||||
const port = config.port
|
||||
|
||||
// Connection refused - server not running or wrong port
|
||||
if (errorMessage.includes('econnrefused') || errorMessage.includes('connection refused')) {
|
||||
return new Error(
|
||||
`Connection refused to ${host}:${port}. ` +
|
||||
@@ -42,7 +44,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
||||
)
|
||||
}
|
||||
|
||||
// Connection reset - server closed connection unexpectedly
|
||||
if (errorMessage.includes('econnreset') || errorMessage.includes('connection reset')) {
|
||||
return new Error(
|
||||
`Connection reset by ${host}:${port}. ` +
|
||||
@@ -53,7 +54,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
||||
)
|
||||
}
|
||||
|
||||
// Timeout - server unreachable or slow
|
||||
if (errorMessage.includes('etimedout') || errorMessage.includes('timeout')) {
|
||||
return new Error(
|
||||
`Connection timed out to ${host}:${port}. ` +
|
||||
@@ -63,7 +63,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
||||
)
|
||||
}
|
||||
|
||||
// DNS/hostname resolution
|
||||
if (errorMessage.includes('enotfound') || errorMessage.includes('getaddrinfo')) {
|
||||
return new Error(
|
||||
`Could not resolve hostname "${host}". ` +
|
||||
@@ -71,7 +70,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
||||
)
|
||||
}
|
||||
|
||||
// Authentication failure
|
||||
if (errorMessage.includes('authentication') || errorMessage.includes('auth')) {
|
||||
return new Error(
|
||||
`Authentication failed for user on ${host}:${port}. ` +
|
||||
@@ -81,7 +79,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
||||
)
|
||||
}
|
||||
|
||||
// Private key format issues
|
||||
if (
|
||||
errorMessage.includes('key') &&
|
||||
(errorMessage.includes('parse') || errorMessage.includes('invalid'))
|
||||
@@ -93,7 +90,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
||||
)
|
||||
}
|
||||
|
||||
// Host key verification (first connection)
|
||||
if (errorMessage.includes('host key') || errorMessage.includes('hostkey')) {
|
||||
return new Error(
|
||||
`Host key verification issue for ${host}. ` +
|
||||
@@ -101,7 +97,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
||||
)
|
||||
}
|
||||
|
||||
// Return original error with context if no specific match
|
||||
return new Error(`SSH connection to ${host}:${port} failed: ${err.message}`)
|
||||
}
|
||||
|
||||
@@ -205,19 +200,119 @@ export function executeSSHCommand(client: Client, command: string): Promise<SSHC
|
||||
|
||||
/**
|
||||
* Sanitize command input to prevent command injection
|
||||
*
|
||||
* Removes null bytes and other dangerous control characters while preserving
|
||||
* legitimate shell syntax. Logs warnings for potentially dangerous patterns.
|
||||
*
|
||||
* Note: This function does not block complex shell commands (pipes, redirects, etc.)
|
||||
* as users legitimately need these features for remote command execution.
|
||||
*
|
||||
* @param command - The command to sanitize
|
||||
* @returns The sanitized command string
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const safeCommand = sanitizeCommand(userInput)
|
||||
* // Use safeCommand for SSH execution
|
||||
* ```
|
||||
*/
|
||||
export function sanitizeCommand(command: string): string {
|
||||
return command.trim()
|
||||
let sanitized = command.replace(/\0/g, '')
|
||||
|
||||
sanitized = sanitized.replace(/[\x0B\x0C]/g, '')
|
||||
|
||||
sanitized = sanitized.trim()
|
||||
|
||||
const dangerousPatterns = [
|
||||
{ pattern: /\$\(.*\)/, name: 'command substitution $()' },
|
||||
{ pattern: /`.*`/, name: 'backtick command substitution' },
|
||||
{ pattern: /;\s*rm\s+-rf/i, name: 'destructive rm -rf command' },
|
||||
{ pattern: /;\s*dd\s+/i, name: 'dd command (disk operations)' },
|
||||
{ pattern: /mkfs/i, name: 'filesystem formatting command' },
|
||||
{ pattern: />\s*\/dev\/sd[a-z]/i, name: 'direct disk write' },
|
||||
]
|
||||
|
||||
for (const { pattern, name } of dangerousPatterns) {
|
||||
if (pattern.test(sanitized)) {
|
||||
logger.warn(`Command contains ${name}`, {
|
||||
command: sanitized.substring(0, 100) + (sanitized.length > 100 ? '...' : ''),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return sanitized
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize file path - removes null bytes and trims whitespace
|
||||
* Sanitize and validate file path to prevent path traversal attacks
|
||||
*
|
||||
* This function validates that a file path does not contain:
|
||||
* - Null bytes
|
||||
* - Path traversal sequences (.. or ../)
|
||||
* - URL-encoded path traversal attempts
|
||||
*
|
||||
* @param path - The file path to sanitize and validate
|
||||
* @returns The sanitized path if valid
|
||||
* @throws Error if path traversal is detected
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* try {
|
||||
* const safePath = sanitizePath(userInput)
|
||||
* // Use safePath safely
|
||||
* } catch (error) {
|
||||
* // Handle invalid path
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function sanitizePath(path: string): string {
|
||||
let sanitized = path.replace(/\0/g, '')
|
||||
|
||||
sanitized = sanitized.trim()
|
||||
|
||||
if (sanitized.includes('%00')) {
|
||||
logger.warn('Path contains URL-encoded null bytes', {
|
||||
path: path.substring(0, 100),
|
||||
})
|
||||
throw new Error('Path contains invalid characters')
|
||||
}
|
||||
|
||||
const pathTraversalPatterns = [
|
||||
'../', // Standard Unix path traversal
|
||||
'..\\', // Windows path traversal
|
||||
'/../', // Mid-path traversal
|
||||
'\\..\\', // Windows mid-path traversal
|
||||
'%2e%2e%2f', // Fully encoded ../
|
||||
'%2e%2e/', // Partially encoded ../
|
||||
'%2e%2e%5c', // Fully encoded ..\
|
||||
'%2e%2e\\', // Partially encoded ..\
|
||||
'..%2f', // .. with encoded /
|
||||
'..%5c', // .. with encoded \
|
||||
'%252e%252e', // Double URL encoded ..
|
||||
'..%252f', // .. with double encoded /
|
||||
'..%255c', // .. with double encoded \
|
||||
]
|
||||
|
||||
const lowerPath = sanitized.toLowerCase()
|
||||
for (const pattern of pathTraversalPatterns) {
|
||||
if (lowerPath.includes(pattern.toLowerCase())) {
|
||||
logger.warn('Path traversal attempt detected', {
|
||||
pattern,
|
||||
path: path.substring(0, 100),
|
||||
})
|
||||
throw new Error('Path contains invalid path traversal sequences')
|
||||
}
|
||||
}
|
||||
|
||||
const segments = sanitized.split(/[/\\]/)
|
||||
for (const segment of segments) {
|
||||
if (segment === '..') {
|
||||
logger.warn('Path traversal attempt detected (.. as path segment)', {
|
||||
path: path.substring(0, 100),
|
||||
})
|
||||
throw new Error('Path contains invalid path traversal sequences')
|
||||
}
|
||||
}
|
||||
|
||||
return sanitized
|
||||
}
|
||||
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
import { db } from '@sim/db'
|
||||
import { userStats, workflow } from '@sim/db/schema'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('WorkflowStatsAPI')
|
||||
|
||||
const queryParamsSchema = z.object({
|
||||
runs: z.coerce.number().int().min(1).max(100).default(1),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const { id } = await params
|
||||
const searchParams = request.nextUrl.searchParams
|
||||
|
||||
const validation = queryParamsSchema.safeParse({
|
||||
runs: searchParams.get('runs'),
|
||||
})
|
||||
|
||||
if (!validation.success) {
|
||||
logger.error(`Invalid query parameters: ${validation.error.message}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
validation.error.errors[0]?.message ||
|
||||
'Invalid number of runs. Must be between 1 and 100.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { runs } = validation.data
|
||||
|
||||
try {
|
||||
const [workflowRecord] = await db.select().from(workflow).where(eq(workflow.id, id)).limit(1)
|
||||
|
||||
if (!workflowRecord) {
|
||||
return NextResponse.json({ error: `Workflow ${id} not found` }, { status: 404 })
|
||||
}
|
||||
|
||||
try {
|
||||
await db
|
||||
.update(workflow)
|
||||
.set({
|
||||
runCount: workflowRecord.runCount + runs,
|
||||
lastRunAt: new Date(),
|
||||
})
|
||||
.where(eq(workflow.id, id))
|
||||
} catch (error) {
|
||||
logger.error('Error updating workflow runCount:', error)
|
||||
throw error
|
||||
}
|
||||
|
||||
try {
|
||||
const userStatsRecords = await db
|
||||
.select()
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, workflowRecord.userId))
|
||||
|
||||
if (userStatsRecords.length === 0) {
|
||||
await db.insert(userStats).values({
|
||||
id: crypto.randomUUID(),
|
||||
userId: workflowRecord.userId,
|
||||
totalManualExecutions: 0,
|
||||
totalApiCalls: 0,
|
||||
totalWebhookTriggers: 0,
|
||||
totalScheduledExecutions: 0,
|
||||
totalChatExecutions: 0,
|
||||
totalTokensUsed: 0,
|
||||
totalCost: '0.00',
|
||||
lastActive: sql`now()`,
|
||||
})
|
||||
} else {
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({
|
||||
lastActive: sql`now()`,
|
||||
})
|
||||
.where(eq(userStats.userId, workflowRecord.userId))
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error ensuring userStats for userId ${workflowRecord.userId}:`, error)
|
||||
// Don't rethrow - we want to continue even if this fails
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
runsAdded: runs,
|
||||
newTotal: workflowRecord.runCount + runs,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error updating workflow stats:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import { env, getEnv } from '../config/env'
|
||||
import { isDev } from '../config/feature-flags'
|
||||
|
||||
/**
|
||||
* Content Security Policy (CSP) configuration builder
|
||||
@@ -79,10 +80,16 @@ export const buildTimeCSPDirectives: CSPDirectives = {
|
||||
'connect-src': [
|
||||
"'self'",
|
||||
env.NEXT_PUBLIC_APP_URL || '',
|
||||
env.OLLAMA_URL || 'http://localhost:11434',
|
||||
env.NEXT_PUBLIC_SOCKET_URL || 'http://localhost:3002',
|
||||
env.NEXT_PUBLIC_SOCKET_URL?.replace('http://', 'ws://').replace('https://', 'wss://') ||
|
||||
'ws://localhost:3002',
|
||||
// Only include localhost fallbacks in development mode
|
||||
...(env.OLLAMA_URL ? [env.OLLAMA_URL] : isDev ? ['http://localhost:11434'] : []),
|
||||
...(env.NEXT_PUBLIC_SOCKET_URL
|
||||
? [
|
||||
env.NEXT_PUBLIC_SOCKET_URL,
|
||||
env.NEXT_PUBLIC_SOCKET_URL.replace('http://', 'ws://').replace('https://', 'wss://'),
|
||||
]
|
||||
: isDev
|
||||
? ['http://localhost:3002', 'ws://localhost:3002']
|
||||
: []),
|
||||
'https://api.browser-use.com',
|
||||
'https://api.exa.ai',
|
||||
'https://api.firecrawl.dev',
|
||||
@@ -128,11 +135,16 @@ export function buildCSPString(directives: CSPDirectives): string {
|
||||
* This maintains compatibility with existing inline scripts while fixing Docker env var issues
|
||||
*/
|
||||
export function generateRuntimeCSP(): string {
|
||||
const socketUrl = getEnv('NEXT_PUBLIC_SOCKET_URL') || 'http://localhost:3002'
|
||||
const socketWsUrl =
|
||||
socketUrl.replace('http://', 'ws://').replace('https://', 'wss://') || 'ws://localhost:3002'
|
||||
const appUrl = getEnv('NEXT_PUBLIC_APP_URL') || ''
|
||||
const ollamaUrl = getEnv('OLLAMA_URL') || 'http://localhost:11434'
|
||||
|
||||
// Only include localhost URLs in development or when explicitly configured
|
||||
const socketUrl = getEnv('NEXT_PUBLIC_SOCKET_URL') || (isDev ? 'http://localhost:3002' : '')
|
||||
const socketWsUrl = socketUrl
|
||||
? socketUrl.replace('http://', 'ws://').replace('https://', 'wss://')
|
||||
: isDev
|
||||
? 'ws://localhost:3002'
|
||||
: ''
|
||||
const ollamaUrl = getEnv('OLLAMA_URL') || (isDev ? 'http://localhost:11434' : '')
|
||||
|
||||
const brandLogoDomains = getHostnameFromUrl(getEnv('NEXT_PUBLIC_BRAND_LOGO_URL'))
|
||||
const brandFaviconDomains = getHostnameFromUrl(getEnv('NEXT_PUBLIC_BRAND_FAVICON_URL'))
|
||||
|
||||
121
apps/sim/lib/templates/permissions.ts
Normal file
121
apps/sim/lib/templates/permissions.ts
Normal file
@@ -0,0 +1,121 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, templateCreators, templates, user } from '@sim/db/schema'
|
||||
import { and, eq, or } from 'drizzle-orm'
|
||||
|
||||
export type CreatorPermissionLevel = 'member' | 'admin'
|
||||
|
||||
/**
|
||||
* Verifies if a user is a super user.
|
||||
*
|
||||
* @param userId - The ID of the user to check
|
||||
* @returns Object with isSuperUser boolean
|
||||
*/
|
||||
export async function verifySuperUser(userId: string): Promise<{ isSuperUser: boolean }> {
|
||||
const [currentUser] = await db.select().from(user).where(eq(user.id, userId)).limit(1)
|
||||
return { isSuperUser: currentUser?.isSuperUser || false }
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a template and verifies the user has permission to modify it.
|
||||
* Combines template existence check and creator permission check in one call.
|
||||
*
|
||||
* @param templateId - The ID of the template
|
||||
* @param userId - The ID of the user to check
|
||||
* @param requiredLevel - The permission level required ('member' or 'admin')
|
||||
* @returns Object with template data if authorized, or error information
|
||||
*/
|
||||
export async function verifyTemplateOwnership(
|
||||
templateId: string,
|
||||
userId: string,
|
||||
requiredLevel: CreatorPermissionLevel = 'admin'
|
||||
): Promise<{
|
||||
authorized: boolean
|
||||
template?: typeof templates.$inferSelect
|
||||
error?: string
|
||||
status?: number
|
||||
}> {
|
||||
const [template] = await db.select().from(templates).where(eq(templates.id, templateId)).limit(1)
|
||||
|
||||
if (!template) {
|
||||
return { authorized: false, error: 'Template not found', status: 404 }
|
||||
}
|
||||
|
||||
if (!template.creatorId) {
|
||||
return { authorized: false, error: 'Access denied', status: 403 }
|
||||
}
|
||||
|
||||
const { hasPermission, error } = await verifyCreatorPermission(
|
||||
userId,
|
||||
template.creatorId,
|
||||
requiredLevel
|
||||
)
|
||||
|
||||
if (!hasPermission) {
|
||||
return { authorized: false, error: error || 'Access denied', status: 403 }
|
||||
}
|
||||
|
||||
return { authorized: true, template }
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies if a user has permission to act on behalf of a creator profile.
|
||||
*
|
||||
* @param userId - The ID of the user to check
|
||||
* @param creatorId - The ID of the creator profile
|
||||
* @param requiredLevel - The permission level required ('member' for any org member, 'admin' for admin/owner only)
|
||||
* @returns Object with hasPermission boolean and optional error message
|
||||
*/
|
||||
export async function verifyCreatorPermission(
|
||||
userId: string,
|
||||
creatorId: string,
|
||||
requiredLevel: CreatorPermissionLevel = 'admin'
|
||||
): Promise<{ hasPermission: boolean; error?: string }> {
|
||||
const creatorProfile = await db
|
||||
.select()
|
||||
.from(templateCreators)
|
||||
.where(eq(templateCreators.id, creatorId))
|
||||
.limit(1)
|
||||
|
||||
if (creatorProfile.length === 0) {
|
||||
return { hasPermission: false, error: 'Creator profile not found' }
|
||||
}
|
||||
|
||||
const creator = creatorProfile[0]
|
||||
|
||||
if (creator.referenceType === 'user') {
|
||||
const hasPermission = creator.referenceId === userId
|
||||
return {
|
||||
hasPermission,
|
||||
error: hasPermission ? undefined : 'You do not have permission to use this creator profile',
|
||||
}
|
||||
}
|
||||
|
||||
if (creator.referenceType === 'organization') {
|
||||
const membershipConditions = [
|
||||
eq(member.userId, userId),
|
||||
eq(member.organizationId, creator.referenceId),
|
||||
]
|
||||
|
||||
if (requiredLevel === 'admin') {
|
||||
membershipConditions.push(or(eq(member.role, 'admin'), eq(member.role, 'owner'))!)
|
||||
}
|
||||
|
||||
const membership = await db
|
||||
.select()
|
||||
.from(member)
|
||||
.where(and(...membershipConditions))
|
||||
.limit(1)
|
||||
|
||||
if (membership.length === 0) {
|
||||
const error =
|
||||
requiredLevel === 'admin'
|
||||
? 'You must be an admin or owner of the organization to perform this action'
|
||||
: 'You must be a member of the organization to use its creator profile'
|
||||
return { hasPermission: false, error }
|
||||
}
|
||||
|
||||
return { hasPermission: true }
|
||||
}
|
||||
|
||||
return { hasPermission: false, error: 'Unknown creator profile type' }
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import { and, eq, sql } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import Parser from 'rss-parser'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
@@ -156,7 +157,7 @@ export async function pollRssWebhooks() {
|
||||
const { feed, items: newItems } = await fetchNewRssItems(config, requestId)
|
||||
|
||||
if (!newItems.length) {
|
||||
await updateWebhookConfig(webhookId, config, now.toISOString(), [])
|
||||
await updateWebhookConfig(webhookId, now.toISOString(), [])
|
||||
await markWebhookSuccess(webhookId)
|
||||
logger.info(`[${requestId}] No new items found for webhook ${webhookId}`)
|
||||
successCount++
|
||||
@@ -172,12 +173,11 @@ export async function pollRssWebhooks() {
|
||||
requestId
|
||||
)
|
||||
|
||||
// Collect guids from processed items
|
||||
const newGuids = newItems
|
||||
.map((item) => item.guid || item.link || '')
|
||||
.filter((guid) => guid.length > 0)
|
||||
|
||||
await updateWebhookConfig(webhookId, config, now.toISOString(), newGuids)
|
||||
await updateWebhookConfig(webhookId, now.toISOString(), newGuids)
|
||||
|
||||
if (itemFailedCount > 0 && processedCount === 0) {
|
||||
await markWebhookFailed(webhookId)
|
||||
@@ -245,15 +245,36 @@ async function fetchNewRssItems(
|
||||
try {
|
||||
logger.debug(`[${requestId}] Fetching RSS feed: ${config.feedUrl}`)
|
||||
|
||||
// Parse the RSS feed
|
||||
const feed = await parser.parseURL(config.feedUrl)
|
||||
const urlValidation = await validateUrlWithDNS(config.feedUrl, 'feedUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.error(`[${requestId}] Invalid RSS feed URL: ${urlValidation.error}`)
|
||||
throw new Error(`Invalid RSS feed URL: ${urlValidation.error}`)
|
||||
}
|
||||
|
||||
const pinnedUrl = createPinnedUrl(config.feedUrl, urlValidation.resolvedIP!)
|
||||
|
||||
const response = await fetch(pinnedUrl, {
|
||||
headers: {
|
||||
Host: urlValidation.originalHostname!,
|
||||
'User-Agent': 'SimStudio/1.0 RSS Poller',
|
||||
Accept: 'application/rss+xml, application/xml, text/xml, */*',
|
||||
},
|
||||
signal: AbortSignal.timeout(30000),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch RSS feed: ${response.status} ${response.statusText}`)
|
||||
}
|
||||
|
||||
const xmlContent = await response.text()
|
||||
|
||||
const feed = await parser.parseString(xmlContent)
|
||||
|
||||
if (!feed.items || !feed.items.length) {
|
||||
logger.debug(`[${requestId}] No items in feed`)
|
||||
return { feed: feed as RssFeed, items: [] }
|
||||
}
|
||||
|
||||
// Filter new items based on timestamp and guids
|
||||
const lastCheckedTime = config.lastCheckedTimestamp
|
||||
? new Date(config.lastCheckedTimestamp)
|
||||
: null
|
||||
@@ -262,12 +283,10 @@ async function fetchNewRssItems(
|
||||
const newItems = feed.items.filter((item) => {
|
||||
const itemGuid = item.guid || item.link || ''
|
||||
|
||||
// Check if we've already seen this item by guid
|
||||
if (itemGuid && lastSeenGuids.has(itemGuid)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if the item is newer than our last check
|
||||
if (lastCheckedTime && item.isoDate) {
|
||||
const itemDate = new Date(item.isoDate)
|
||||
if (itemDate <= lastCheckedTime) {
|
||||
@@ -278,14 +297,12 @@ async function fetchNewRssItems(
|
||||
return true
|
||||
})
|
||||
|
||||
// Sort by date, newest first
|
||||
newItems.sort((a, b) => {
|
||||
const dateA = a.isoDate ? new Date(a.isoDate).getTime() : 0
|
||||
const dateB = b.isoDate ? new Date(b.isoDate).getTime() : 0
|
||||
return dateB - dateA
|
||||
})
|
||||
|
||||
// Limit to 25 items per poll to prevent overwhelming the system
|
||||
const limitedItems = newItems.slice(0, 25)
|
||||
|
||||
logger.info(
|
||||
@@ -383,17 +400,11 @@ async function processRssItems(
|
||||
return { processedCount, failedCount }
|
||||
}
|
||||
|
||||
async function updateWebhookConfig(
|
||||
webhookId: string,
|
||||
_config: RssWebhookConfig,
|
||||
timestamp: string,
|
||||
newGuids: string[]
|
||||
) {
|
||||
async function updateWebhookConfig(webhookId: string, timestamp: string, newGuids: string[]) {
|
||||
try {
|
||||
const result = await db.select().from(webhook).where(eq(webhook.id, webhookId))
|
||||
const existingConfig = (result[0]?.providerConfig as Record<string, any>) || {}
|
||||
|
||||
// Merge new guids with existing ones, keeping only the most recent
|
||||
const existingGuids = existingConfig.lastSeenGuids || []
|
||||
const allGuids = [...newGuids, ...existingGuids].slice(0, MAX_GUIDS_TO_TRACK)
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import { db } from '@sim/db'
|
||||
import { account, webhook } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
|
||||
@@ -18,7 +19,6 @@ export async function handleWhatsAppVerification(
|
||||
challenge: string | null
|
||||
): Promise<NextResponse | null> {
|
||||
if (mode && token && challenge) {
|
||||
// This is a WhatsApp verification request
|
||||
logger.info(`[${requestId}] WhatsApp verification request received for path: ${path}`)
|
||||
|
||||
if (mode !== 'subscribe') {
|
||||
@@ -26,13 +26,11 @@ export async function handleWhatsAppVerification(
|
||||
return new NextResponse('Invalid mode', { status: 400 })
|
||||
}
|
||||
|
||||
// Find all active WhatsApp webhooks
|
||||
const webhooks = await db
|
||||
.select()
|
||||
.from(webhook)
|
||||
.where(and(eq(webhook.provider, 'whatsapp'), eq(webhook.isActive, true)))
|
||||
|
||||
// Check if any webhook has a matching verification token
|
||||
for (const wh of webhooks) {
|
||||
const providerConfig = (wh.providerConfig as Record<string, any>) || {}
|
||||
const verificationToken = providerConfig.verificationToken
|
||||
@@ -44,7 +42,6 @@ export async function handleWhatsAppVerification(
|
||||
|
||||
if (token === verificationToken) {
|
||||
logger.info(`[${requestId}] WhatsApp verification successful for webhook ${wh.id}`)
|
||||
// Return ONLY the challenge as plain text (exactly as WhatsApp expects)
|
||||
return new NextResponse(challenge, {
|
||||
status: 200,
|
||||
headers: {
|
||||
@@ -72,6 +69,52 @@ export function handleSlackChallenge(body: any): NextResponse | null {
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a URL with DNS pinning to prevent DNS rebinding attacks
|
||||
* @param url - The URL to fetch
|
||||
* @param accessToken - Authorization token (optional for pre-signed URLs)
|
||||
* @param requestId - Request ID for logging
|
||||
* @returns The fetch Response or null if validation fails
|
||||
*/
|
||||
async function fetchWithDNSPinning(
|
||||
url: string,
|
||||
accessToken: string,
|
||||
requestId: string
|
||||
): Promise<Response | null> {
|
||||
try {
|
||||
const urlValidation = await validateUrlWithDNS(url, 'contentUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Invalid content URL: ${urlValidation.error}`, {
|
||||
url: url.substring(0, 100),
|
||||
})
|
||||
return null
|
||||
}
|
||||
|
||||
const pinnedUrl = createPinnedUrl(url, urlValidation.resolvedIP!)
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
Host: urlValidation.originalHostname!,
|
||||
}
|
||||
|
||||
if (accessToken) {
|
||||
headers.Authorization = `Bearer ${accessToken}`
|
||||
}
|
||||
|
||||
const response = await fetch(pinnedUrl, {
|
||||
headers,
|
||||
redirect: 'follow',
|
||||
})
|
||||
|
||||
return response
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching URL with DNS pinning`, {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
url: url.substring(0, 100),
|
||||
})
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Format Microsoft Teams Graph change notification
|
||||
*/
|
||||
@@ -90,7 +133,6 @@ async function formatTeamsGraphNotification(
|
||||
const resource = notification.resource || ''
|
||||
const subscriptionId = notification.subscriptionId || ''
|
||||
|
||||
// Extract chatId and messageId from resource path
|
||||
let chatId: string | null = null
|
||||
let messageId: string | null = null
|
||||
|
||||
@@ -159,7 +201,6 @@ async function formatTeamsGraphNotification(
|
||||
[]
|
||||
let accessToken: string | null = null
|
||||
|
||||
// Teams chat subscriptions require credentials
|
||||
if (!credentialId) {
|
||||
logger.error('Missing credentialId for Teams chat subscription', {
|
||||
chatId: resolvedChatId,
|
||||
@@ -170,11 +211,9 @@ async function formatTeamsGraphNotification(
|
||||
})
|
||||
} else {
|
||||
try {
|
||||
// Get userId from credential
|
||||
const rows = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
||||
if (rows.length === 0) {
|
||||
logger.error('Teams credential not found', { credentialId, chatId: resolvedChatId })
|
||||
// Continue without message data
|
||||
} else {
|
||||
const effectiveUserId = rows[0].userId
|
||||
accessToken = await refreshAccessTokenIfNeeded(
|
||||
@@ -207,19 +246,20 @@ async function formatTeamsGraphNotification(
|
||||
|
||||
if (contentUrl.includes('sharepoint.com') || contentUrl.includes('onedrive')) {
|
||||
try {
|
||||
const directRes = await fetch(contentUrl, {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
redirect: 'follow',
|
||||
})
|
||||
const directRes = await fetchWithDNSPinning(
|
||||
contentUrl,
|
||||
accessToken,
|
||||
'teams-attachment'
|
||||
)
|
||||
|
||||
if (directRes.ok) {
|
||||
if (directRes?.ok) {
|
||||
const arrayBuffer = await directRes.arrayBuffer()
|
||||
buffer = Buffer.from(arrayBuffer)
|
||||
mimeType =
|
||||
directRes.headers.get('content-type') ||
|
||||
contentTypeHint ||
|
||||
'application/octet-stream'
|
||||
} else {
|
||||
} else if (directRes) {
|
||||
const encodedUrl = Buffer.from(contentUrl)
|
||||
.toString('base64')
|
||||
.replace(/\+/g, '-')
|
||||
@@ -310,9 +350,13 @@ async function formatTeamsGraphNotification(
|
||||
const downloadUrl = metadata['@microsoft.graph.downloadUrl']
|
||||
|
||||
if (downloadUrl) {
|
||||
const downloadRes = await fetch(downloadUrl)
|
||||
const downloadRes = await fetchWithDNSPinning(
|
||||
downloadUrl,
|
||||
'', // downloadUrl is a pre-signed URL, no auth needed
|
||||
'teams-onedrive-download'
|
||||
)
|
||||
|
||||
if (downloadRes.ok) {
|
||||
if (downloadRes?.ok) {
|
||||
const arrayBuffer = await downloadRes.arrayBuffer()
|
||||
buffer = Buffer.from(arrayBuffer)
|
||||
mimeType =
|
||||
@@ -336,10 +380,12 @@ async function formatTeamsGraphNotification(
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
const ares = await fetch(contentUrl, {
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
})
|
||||
if (ares.ok) {
|
||||
const ares = await fetchWithDNSPinning(
|
||||
contentUrl,
|
||||
accessToken,
|
||||
'teams-attachment-generic'
|
||||
)
|
||||
if (ares?.ok) {
|
||||
const arrayBuffer = await ares.arrayBuffer()
|
||||
buffer = Buffer.from(arrayBuffer)
|
||||
mimeType =
|
||||
@@ -377,7 +423,6 @@ async function formatTeamsGraphNotification(
|
||||
}
|
||||
}
|
||||
|
||||
// If no message was fetched, return minimal data
|
||||
if (!message) {
|
||||
logger.warn('No message data available for Teams notification', {
|
||||
chatId: resolvedChatId,
|
||||
@@ -413,8 +458,6 @@ async function formatTeamsGraphNotification(
|
||||
}
|
||||
}
|
||||
|
||||
// Extract data from message - we know it exists now
|
||||
// body.content is the HTML/text content, summary is a plain text preview (max 280 chars)
|
||||
const messageText = message.body?.content || ''
|
||||
const from = message.from?.user || {}
|
||||
const createdAt = message.createdDateTime || ''
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { db } from '@sim/db'
|
||||
import { permissions, workflow as workflowTable, workspace } from '@sim/db/schema'
|
||||
import { permissions, userStats, workflow as workflowTable, workspace } from '@sim/db/schema'
|
||||
import type { InferSelectModel } from 'drizzle-orm'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { PermissionType } from '@/lib/workspaces/permissions/utils'
|
||||
import type { ExecutionResult } from '@/executor/types'
|
||||
@@ -93,17 +92,44 @@ export async function updateWorkflowRunCounts(workflowId: string, runs = 1) {
|
||||
throw new Error(`Workflow ${workflowId} not found`)
|
||||
}
|
||||
|
||||
// Use the API to update stats
|
||||
const response = await fetch(`${getBaseUrl()}/api/workflows/${workflowId}/stats?runs=${runs}`, {
|
||||
method: 'POST',
|
||||
})
|
||||
await db
|
||||
.update(workflowTable)
|
||||
.set({
|
||||
runCount: workflow.runCount + runs,
|
||||
lastRunAt: new Date(),
|
||||
})
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json()
|
||||
throw new Error(error.error || 'Failed to update workflow stats')
|
||||
try {
|
||||
const existing = await db
|
||||
.select()
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, workflow.userId))
|
||||
.limit(1)
|
||||
|
||||
if (existing.length === 0) {
|
||||
logger.warn('User stats record not found - should be created during onboarding', {
|
||||
userId: workflow.userId,
|
||||
workflowId,
|
||||
})
|
||||
} else {
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({
|
||||
lastActive: new Date(),
|
||||
})
|
||||
.where(eq(userStats.userId, workflow.userId))
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error updating userStats lastActive for userId ${workflow.userId}:`, error)
|
||||
// Don't rethrow - we want to continue even if this fails
|
||||
}
|
||||
|
||||
return response.json()
|
||||
return {
|
||||
success: true,
|
||||
runsAdded: runs,
|
||||
newTotal: workflow.runCount + runs,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error updating workflow stats for ${workflowId}`, error)
|
||||
throw error
|
||||
@@ -121,7 +147,6 @@ function sanitizeToolsForComparison(tools: any[] | undefined): any[] {
|
||||
}
|
||||
|
||||
return tools.map((tool) => {
|
||||
// Remove UI-only field: isExpanded
|
||||
const { isExpanded, ...cleanTool } = tool
|
||||
return cleanTool
|
||||
})
|
||||
@@ -138,7 +163,6 @@ function sanitizeInputFormatForComparison(inputFormat: any[] | undefined): any[]
|
||||
}
|
||||
|
||||
return inputFormat.map((field) => {
|
||||
// Remove test-only field: value (used only for manual testing)
|
||||
const { value, collapsed, ...cleanField } = field
|
||||
return cleanField
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user