mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-21 04:48:00 -05:00
path security vuln
This commit is contained in:
@@ -6,7 +6,7 @@ import { createLogger } from '@sim/logger'
|
||||
import binaryExtensionsList from 'binary-extensions'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
||||
@@ -349,11 +349,8 @@ async function handleExternalUrl(
|
||||
}
|
||||
}
|
||||
|
||||
// Use the original URL after DNS validation passes.
|
||||
// DNS pinning (connecting to IP directly) breaks TLS SNI for HTTPS.
|
||||
// Since we've validated the IP is not private/reserved, using the original URL is safe.
|
||||
const response = await fetch(url, {
|
||||
signal: AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS),
|
||||
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||
timeout: DOWNLOAD_TIMEOUT_MS,
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch URL: ${response.status} ${response.statusText}`)
|
||||
|
||||
@@ -5,7 +5,7 @@ import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { executeTool } from '@/tools'
|
||||
@@ -211,15 +211,15 @@ export async function GET(request: Request) {
|
||||
logger.info(`[${requestId}] Proxying ${method} request to: ${targetUrl}`)
|
||||
|
||||
try {
|
||||
// Use the original URL after DNS validation passes.
|
||||
// DNS pinning breaks TLS SNI for HTTPS; validation already ensures IP is safe.
|
||||
const response = await fetch(targetUrl, {
|
||||
// Use secure fetch with IP pinning to prevent DNS rebinding attacks
|
||||
// This uses the pre-resolved IP while preserving hostname for TLS SNI
|
||||
const response = await secureFetchWithPinnedIP(targetUrl, urlValidation.resolvedIP!, {
|
||||
method: method,
|
||||
headers: {
|
||||
...getProxyHeaders(),
|
||||
...customHeaders,
|
||||
},
|
||||
body: body || undefined,
|
||||
body: body,
|
||||
})
|
||||
|
||||
const contentType = response.headers.get('content-type') || ''
|
||||
@@ -232,8 +232,8 @@ export async function GET(request: Request) {
|
||||
}
|
||||
|
||||
const errorMessage = !response.ok
|
||||
? data && typeof data === 'object' && data.error
|
||||
? `${data.error.message || JSON.stringify(data.error)}`
|
||||
? data && typeof data === 'object' && (data as { error?: { message?: string } }).error
|
||||
? `${(data as { error: { message?: string } }).error.message || JSON.stringify((data as { error: unknown }).error)}`
|
||||
: response.statusText || `HTTP error ${response.status}`
|
||||
: undefined
|
||||
|
||||
@@ -245,7 +245,7 @@ export async function GET(request: Request) {
|
||||
success: response.ok,
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
headers: Object.fromEntries(response.headers.entries()),
|
||||
headers: response.headers.toRecord(),
|
||||
data,
|
||||
error: errorMessage,
|
||||
})
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import dns from 'dns/promises'
|
||||
import http from 'http'
|
||||
import https from 'https'
|
||||
import { createLogger } from '@sim/logger'
|
||||
|
||||
const logger = createLogger('InputValidation')
|
||||
@@ -898,6 +900,139 @@ export function createPinnedUrl(originalUrl: string, resolvedIP: string): string
|
||||
return `${parsed.protocol}//${host}${port}${parsed.pathname}${parsed.search}`
|
||||
}
|
||||
|
||||
export interface SecureFetchOptions {
|
||||
method?: string
|
||||
headers?: Record<string, string>
|
||||
body?: string
|
||||
timeout?: number
|
||||
}
|
||||
|
||||
export class SecureFetchHeaders {
|
||||
private headers: Map<string, string>
|
||||
|
||||
constructor(headers: Record<string, string>) {
|
||||
this.headers = new Map(Object.entries(headers).map(([k, v]) => [k.toLowerCase(), v]))
|
||||
}
|
||||
|
||||
get(name: string): string | null {
|
||||
return this.headers.get(name.toLowerCase()) ?? null
|
||||
}
|
||||
|
||||
toRecord(): Record<string, string> {
|
||||
const record: Record<string, string> = {}
|
||||
for (const [key, value] of this.headers) {
|
||||
record[key] = value
|
||||
}
|
||||
return record
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
return this.headers.entries()
|
||||
}
|
||||
}
|
||||
|
||||
export interface SecureFetchResponse {
|
||||
ok: boolean
|
||||
status: number
|
||||
statusText: string
|
||||
headers: SecureFetchHeaders
|
||||
text: () => Promise<string>
|
||||
json: () => Promise<unknown>
|
||||
arrayBuffer: () => Promise<ArrayBuffer>
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a fetch with IP pinning to prevent DNS rebinding attacks.
|
||||
* Uses the pre-resolved IP address while preserving the original hostname for TLS SNI.
|
||||
*/
|
||||
export function secureFetchWithPinnedIP(
|
||||
url: string,
|
||||
resolvedIP: string,
|
||||
options: SecureFetchOptions = {}
|
||||
): Promise<SecureFetchResponse> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const parsed = new URL(url)
|
||||
const isHttps = parsed.protocol === 'https:'
|
||||
const defaultPort = isHttps ? 443 : 80
|
||||
const port = parsed.port ? Number.parseInt(parsed.port, 10) : defaultPort
|
||||
|
||||
const isIPv6 = resolvedIP.includes(':')
|
||||
const family = isIPv6 ? 6 : 4
|
||||
|
||||
const agentOptions = {
|
||||
lookup: (
|
||||
_hostname: string,
|
||||
_options: unknown,
|
||||
callback: (err: NodeJS.ErrnoException | null, address: string, family: number) => void
|
||||
) => {
|
||||
callback(null, resolvedIP, family)
|
||||
},
|
||||
}
|
||||
|
||||
const agent = isHttps
|
||||
? new https.Agent(agentOptions as https.AgentOptions)
|
||||
: new http.Agent(agentOptions as http.AgentOptions)
|
||||
|
||||
const requestOptions: http.RequestOptions = {
|
||||
hostname: parsed.hostname,
|
||||
port,
|
||||
path: parsed.pathname + parsed.search,
|
||||
method: options.method || 'GET',
|
||||
headers: options.headers || {},
|
||||
agent,
|
||||
timeout: options.timeout || 30000,
|
||||
}
|
||||
|
||||
const protocol = isHttps ? https : http
|
||||
const req = protocol.request(requestOptions, (res) => {
|
||||
const chunks: Buffer[] = []
|
||||
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
res.on('end', () => {
|
||||
const bodyBuffer = Buffer.concat(chunks)
|
||||
const body = bodyBuffer.toString('utf-8')
|
||||
const headersRecord: Record<string, string> = {}
|
||||
for (const [key, value] of Object.entries(res.headers)) {
|
||||
if (typeof value === 'string') {
|
||||
headersRecord[key.toLowerCase()] = value
|
||||
} else if (Array.isArray(value)) {
|
||||
headersRecord[key.toLowerCase()] = value.join(', ')
|
||||
}
|
||||
}
|
||||
|
||||
resolve({
|
||||
ok: res.statusCode !== undefined && res.statusCode >= 200 && res.statusCode < 300,
|
||||
status: res.statusCode || 0,
|
||||
statusText: res.statusMessage || '',
|
||||
headers: new SecureFetchHeaders(headersRecord),
|
||||
text: async () => body,
|
||||
json: async () => JSON.parse(body),
|
||||
arrayBuffer: async () =>
|
||||
bodyBuffer.buffer.slice(
|
||||
bodyBuffer.byteOffset,
|
||||
bodyBuffer.byteOffset + bodyBuffer.byteLength
|
||||
),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
req.on('error', (error) => {
|
||||
reject(error)
|
||||
})
|
||||
|
||||
req.on('timeout', () => {
|
||||
req.destroy()
|
||||
reject(new Error('Request timeout'))
|
||||
})
|
||||
|
||||
if (options.body) {
|
||||
req.write(options.body)
|
||||
}
|
||||
|
||||
req.end()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates an Airtable ID (base, table, or webhook ID)
|
||||
*
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
'use server'
|
||||
|
||||
import type { Logger } from '@sim/logger'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import type { StorageContext } from '@/lib/uploads'
|
||||
import { isExecutionFile } from '@/lib/uploads/contexts/execution/utils'
|
||||
import { inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
@@ -9,38 +10,32 @@ import type { UserFile } from '@/executor/types'
|
||||
/**
|
||||
* Download a file from a URL (internal or external)
|
||||
* For internal URLs, uses direct storage access (server-side only)
|
||||
* For external URLs, uses HTTP fetch
|
||||
* For external URLs, validates DNS/SSRF and uses secure fetch with IP pinning
|
||||
*/
|
||||
export async function downloadFileFromUrl(fileUrl: string, timeoutMs = 180000): Promise<Buffer> {
|
||||
const { isInternalFileUrl } = await import('./file-utils')
|
||||
const { parseInternalFileUrl } = await import('./file-utils')
|
||||
const controller = new AbortController()
|
||||
const timeoutId = setTimeout(() => controller.abort(), timeoutMs)
|
||||
|
||||
try {
|
||||
if (isInternalFileUrl(fileUrl)) {
|
||||
const { key, context } = parseInternalFileUrl(fileUrl)
|
||||
const { downloadFile } = await import('@/lib/uploads/core/storage-service')
|
||||
const buffer = await downloadFile({ key, context })
|
||||
clearTimeout(timeoutId)
|
||||
return buffer
|
||||
}
|
||||
|
||||
const response = await fetch(fileUrl, { signal: controller.signal })
|
||||
clearTimeout(timeoutId)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download file: ${response.statusText}`)
|
||||
}
|
||||
|
||||
return Buffer.from(await response.arrayBuffer())
|
||||
} catch (error) {
|
||||
clearTimeout(timeoutId)
|
||||
if (error instanceof Error && error.name === 'AbortError') {
|
||||
throw new Error('File download timed out')
|
||||
}
|
||||
throw error
|
||||
if (isInternalFileUrl(fileUrl)) {
|
||||
const { key, context } = parseInternalFileUrl(fileUrl)
|
||||
const { downloadFile } = await import('@/lib/uploads/core/storage-service')
|
||||
return downloadFile({ key, context })
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(fileUrl, 'fileUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
throw new Error(`Invalid file URL: ${urlValidation.error}`)
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(fileUrl, urlValidation.resolvedIP!, {
|
||||
timeout: timeoutMs,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download file: ${response.statusText}`)
|
||||
}
|
||||
|
||||
return Buffer.from(await response.arrayBuffer())
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -5,7 +5,7 @@ import { and, eq, isNull, or, sql } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import Parser from 'rss-parser'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
||||
|
||||
@@ -265,14 +265,12 @@ async function fetchNewRssItems(
|
||||
throw new Error(`Invalid RSS feed URL: ${urlValidation.error}`)
|
||||
}
|
||||
|
||||
// Use the original URL after DNS validation passes.
|
||||
// DNS pinning breaks TLS SNI for HTTPS; validation already ensures IP is safe.
|
||||
const response = await fetch(config.feedUrl, {
|
||||
const response = await secureFetchWithPinnedIP(config.feedUrl, urlValidation.resolvedIP!, {
|
||||
headers: {
|
||||
'User-Agent': 'Sim/1.0 RSS Poller',
|
||||
Accept: 'application/rss+xml, application/xml, text/xml, */*',
|
||||
},
|
||||
signal: AbortSignal.timeout(30000),
|
||||
timeout: 30000,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
|
||||
@@ -3,7 +3,11 @@ import { account, webhook } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
type SecureFetchResponse,
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import type { DbOrTx } from '@/lib/db/types'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
|
||||
@@ -98,7 +102,7 @@ async function fetchWithDNSPinning(
|
||||
url: string,
|
||||
accessToken: string,
|
||||
requestId: string
|
||||
): Promise<Response | null> {
|
||||
): Promise<SecureFetchResponse | null> {
|
||||
try {
|
||||
const urlValidation = await validateUrlWithDNS(url, 'contentUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
@@ -108,17 +112,14 @@ async function fetchWithDNSPinning(
|
||||
return null
|
||||
}
|
||||
|
||||
// Use the original URL after DNS validation passes.
|
||||
// DNS pinning breaks TLS SNI for HTTPS; validation already ensures IP is safe.
|
||||
const headers: Record<string, string> = {}
|
||||
|
||||
if (accessToken) {
|
||||
headers.Authorization = `Bearer ${accessToken}`
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||
headers,
|
||||
redirect: 'follow',
|
||||
})
|
||||
|
||||
return response
|
||||
|
||||
Reference in New Issue
Block a user