mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-03 03:04:57 -05:00
separate server and client logic
This commit is contained in:
@@ -16,7 +16,7 @@ import {
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getBrandConfig } from '@/lib/branding/branding'
|
||||
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
@@ -1119,7 +1119,7 @@ async function handlePushNotificationSet(
|
||||
)
|
||||
}
|
||||
|
||||
const urlValidation = validateExternalUrl(
|
||||
const urlValidation = await validateUrlWithDNS(
|
||||
params.pushNotificationConfig.url,
|
||||
'Push notification URL'
|
||||
)
|
||||
|
||||
@@ -6,7 +6,10 @@ import { createLogger } from '@sim/logger'
|
||||
import binaryExtensionsList from 'binary-extensions'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||
@@ -20,6 +23,7 @@ import {
|
||||
getMimeTypeFromExtension,
|
||||
getViewerUrl,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
@@ -216,7 +220,7 @@ async function parseFileSingle(
|
||||
}
|
||||
}
|
||||
|
||||
if (filePath.includes('/api/files/serve/')) {
|
||||
if (isInternalFileUrl(filePath)) {
|
||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||
}
|
||||
|
||||
@@ -247,7 +251,7 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
||||
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
|
||||
}
|
||||
|
||||
if (filePath.startsWith('/') && !filePath.startsWith('/api/files/serve/')) {
|
||||
if (filePath.startsWith('/') && !isInternalFileUrl(filePath)) {
|
||||
return { isValid: false, error: 'Path outside allowed directory' }
|
||||
}
|
||||
|
||||
@@ -368,7 +372,7 @@ async function handleExternalUrl(
|
||||
throw new Error(`File too large: ${buffer.length} bytes (max: ${MAX_DOWNLOAD_SIZE_BYTES})`)
|
||||
}
|
||||
|
||||
logger.info(`Downloaded file from URL: ${sanitizeUrlForLog(url)}, size: ${buffer.length} bytes`)
|
||||
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
|
||||
|
||||
let userFile: UserFile | undefined
|
||||
const mimeType = response.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
|
||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -95,6 +96,14 @@ export async function POST(request: NextRequest) {
|
||||
if (validatedData.files && validatedData.files.length > 0) {
|
||||
for (const file of validatedData.files) {
|
||||
if (file.type === 'url') {
|
||||
const urlValidation = await validateUrlWithDNS(file.data, 'fileUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: urlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const filePart: FilePart = {
|
||||
kind: 'file',
|
||||
file: {
|
||||
|
||||
@@ -3,7 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -40,7 +40,7 @@ export async function POST(request: NextRequest) {
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
||||
|
||||
const urlValidation = validateExternalUrl(validatedData.webhookUrl, 'Webhook URL')
|
||||
const urlValidation = await validateUrlWithDNS(validatedData.webhookUrl, 'Webhook URL')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
|
||||
return NextResponse.json(
|
||||
|
||||
168
apps/sim/app/api/tools/github/latest-commit/route.ts
Normal file
168
apps/sim/app/api/tools/github/latest-commit/route.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GitHubLatestCommitAPI')
|
||||
|
||||
const GitHubLatestCommitSchema = z.object({
|
||||
owner: z.string().min(1, 'Owner is required'),
|
||||
repo: z.string().min(1, 'Repo is required'),
|
||||
branch: z.string().optional().nullable(),
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized GitHub latest commit attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = GitHubLatestCommitSchema.parse(body)
|
||||
|
||||
const { owner, repo, branch, apiKey } = validatedData
|
||||
|
||||
const baseUrl = `https://api.github.com/repos/${owner}/${repo}`
|
||||
const commitUrl = branch ? `${baseUrl}/commits/${branch}` : `${baseUrl}/commits/HEAD`
|
||||
|
||||
logger.info(`[${requestId}] Fetching latest commit from GitHub`, { owner, repo, branch })
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(commitUrl, 'commitUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(commitUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/vnd.github.v3+json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] GitHub API error`, {
|
||||
status: response.status,
|
||||
error: errorData,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: errorData.message || `GitHub API error: ${response.status}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
const content = `Latest commit: "${data.commit.message}" by ${data.commit.author.name} on ${data.commit.author.date}. SHA: ${data.sha}`
|
||||
|
||||
const files = data.files || []
|
||||
const fileDetailsWithContent = []
|
||||
|
||||
for (const file of files) {
|
||||
const fileDetail: Record<string, any> = {
|
||||
filename: file.filename,
|
||||
additions: file.additions,
|
||||
deletions: file.deletions,
|
||||
changes: file.changes,
|
||||
status: file.status,
|
||||
raw_url: file.raw_url,
|
||||
blob_url: file.blob_url,
|
||||
patch: file.patch,
|
||||
content: undefined,
|
||||
}
|
||||
|
||||
if (file.status !== 'removed' && file.raw_url) {
|
||||
try {
|
||||
const rawUrlValidation = await validateUrlWithDNS(file.raw_url, 'rawUrl')
|
||||
if (rawUrlValidation.isValid) {
|
||||
const contentResponse = await secureFetchWithPinnedIP(
|
||||
file.raw_url,
|
||||
rawUrlValidation.resolvedIP!,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (contentResponse.ok) {
|
||||
fileDetail.content = await contentResponse.text()
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to fetch content for ${file.filename}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
fileDetailsWithContent.push(fileDetail)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Latest commit fetched successfully`, {
|
||||
sha: data.sha,
|
||||
fileCount: files.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
content,
|
||||
metadata: {
|
||||
sha: data.sha,
|
||||
html_url: data.html_url,
|
||||
commit_message: data.commit.message,
|
||||
author: {
|
||||
name: data.commit.author.name,
|
||||
login: data.author?.login || 'Unknown',
|
||||
avatar_url: data.author?.avatar_url || '',
|
||||
html_url: data.author?.html_url || '',
|
||||
},
|
||||
committer: {
|
||||
name: data.commit.committer.name,
|
||||
login: data.committer?.login || 'Unknown',
|
||||
avatar_url: data.committer?.avatar_url || '',
|
||||
html_url: data.committer?.html_url || '',
|
||||
},
|
||||
stats: data.stats
|
||||
? {
|
||||
additions: data.stats.additions,
|
||||
deletions: data.stats.deletions,
|
||||
total: data.stats.total,
|
||||
}
|
||||
: undefined,
|
||||
files: fileDetailsWithContent.length > 0 ? fileDetailsWithContent : undefined,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching GitHub latest commit:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
231
apps/sim/app/api/tools/google_drive/download/route.ts
Normal file
231
apps/sim/app/api/tools/google_drive/download/route.ts
Normal file
@@ -0,0 +1,231 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import type { GoogleDriveFile, GoogleDriveRevision } from '@/tools/google_drive/types'
|
||||
import {
|
||||
ALL_FILE_FIELDS,
|
||||
ALL_REVISION_FIELDS,
|
||||
DEFAULT_EXPORT_FORMATS,
|
||||
GOOGLE_WORKSPACE_MIME_TYPES,
|
||||
} from '@/tools/google_drive/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GoogleDriveDownloadAPI')
|
||||
|
||||
const GoogleDriveDownloadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileId: z.string().min(1, 'File ID is required'),
|
||||
mimeType: z.string().optional().nullable(),
|
||||
fileName: z.string().optional().nullable(),
|
||||
includeRevisions: z.boolean().optional().default(true),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Google Drive download attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = GoogleDriveDownloadSchema.parse(body)
|
||||
|
||||
const {
|
||||
accessToken,
|
||||
fileId,
|
||||
mimeType: exportMimeType,
|
||||
fileName,
|
||||
includeRevisions,
|
||||
} = validatedData
|
||||
const authHeader = `Bearer ${accessToken}`
|
||||
|
||||
logger.info(`[${requestId}] Getting file metadata from Google Drive`, { fileId })
|
||||
|
||||
const metadataUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?fields=${ALL_FILE_FIELDS}&supportsAllDrives=true`
|
||||
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
|
||||
if (!metadataUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: metadataUrlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const metadataResponse = await secureFetchWithPinnedIP(
|
||||
metadataUrl,
|
||||
metadataUrlValidation.resolvedIP!,
|
||||
{
|
||||
headers: { Authorization: authHeader },
|
||||
}
|
||||
)
|
||||
|
||||
if (!metadataResponse.ok) {
|
||||
const errorDetails = await metadataResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to get file metadata`, {
|
||||
status: metadataResponse.status,
|
||||
error: errorDetails,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const metadata: GoogleDriveFile = await metadataResponse.json()
|
||||
const fileMimeType = metadata.mimeType
|
||||
|
||||
let fileBuffer: Buffer
|
||||
let finalMimeType = fileMimeType
|
||||
|
||||
if (GOOGLE_WORKSPACE_MIME_TYPES.includes(fileMimeType)) {
|
||||
const exportFormat = exportMimeType || DEFAULT_EXPORT_FORMATS[fileMimeType] || 'text/plain'
|
||||
finalMimeType = exportFormat
|
||||
|
||||
logger.info(`[${requestId}] Exporting Google Workspace file`, {
|
||||
fileId,
|
||||
mimeType: fileMimeType,
|
||||
exportFormat,
|
||||
})
|
||||
|
||||
const exportUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/export?mimeType=${encodeURIComponent(exportFormat)}&supportsAllDrives=true`
|
||||
const exportUrlValidation = await validateUrlWithDNS(exportUrl, 'exportUrl')
|
||||
if (!exportUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: exportUrlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const exportResponse = await secureFetchWithPinnedIP(
|
||||
exportUrl,
|
||||
exportUrlValidation.resolvedIP!,
|
||||
{ headers: { Authorization: authHeader } }
|
||||
)
|
||||
|
||||
if (!exportResponse.ok) {
|
||||
const exportError = await exportResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to export file`, {
|
||||
status: exportResponse.status,
|
||||
error: exportError,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: exportError.error?.message || 'Failed to export Google Workspace file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const arrayBuffer = await exportResponse.arrayBuffer()
|
||||
fileBuffer = Buffer.from(arrayBuffer)
|
||||
} else {
|
||||
logger.info(`[${requestId}] Downloading regular file`, { fileId, mimeType: fileMimeType })
|
||||
|
||||
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?alt=media&supportsAllDrives=true`
|
||||
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||
if (!downloadUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: downloadUrlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(
|
||||
downloadUrl,
|
||||
downloadUrlValidation.resolvedIP!,
|
||||
{ headers: { Authorization: authHeader } }
|
||||
)
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
const downloadError = await downloadResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to download file`, {
|
||||
status: downloadResponse.status,
|
||||
error: downloadError,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
fileBuffer = Buffer.from(arrayBuffer)
|
||||
}
|
||||
|
||||
const canReadRevisions = metadata.capabilities?.canReadRevisions === true
|
||||
if (includeRevisions && canReadRevisions) {
|
||||
try {
|
||||
const revisionsUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/revisions?fields=revisions(${ALL_REVISION_FIELDS})&pageSize=100`
|
||||
const revisionsUrlValidation = await validateUrlWithDNS(revisionsUrl, 'revisionsUrl')
|
||||
if (revisionsUrlValidation.isValid) {
|
||||
const revisionsResponse = await secureFetchWithPinnedIP(
|
||||
revisionsUrl,
|
||||
revisionsUrlValidation.resolvedIP!,
|
||||
{ headers: { Authorization: authHeader } }
|
||||
)
|
||||
|
||||
if (revisionsResponse.ok) {
|
||||
const revisionsData = await revisionsResponse.json()
|
||||
metadata.revisions = revisionsData.revisions as GoogleDriveRevision[]
|
||||
logger.info(`[${requestId}] Fetched file revisions`, {
|
||||
fileId,
|
||||
revisionCount: metadata.revisions?.length || 0,
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Error fetching revisions, continuing without them`, { error })
|
||||
}
|
||||
}
|
||||
|
||||
const resolvedName = fileName || metadata.name || 'download'
|
||||
|
||||
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||
fileId,
|
||||
name: resolvedName,
|
||||
size: fileBuffer.length,
|
||||
mimeType: finalMimeType,
|
||||
})
|
||||
|
||||
const base64Data = fileBuffer.toString('base64')
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedName,
|
||||
mimeType: finalMimeType,
|
||||
data: base64Data,
|
||||
size: fileBuffer.length,
|
||||
},
|
||||
metadata,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error downloading Google Drive file:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,132 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GoogleVaultDownloadExportFileAPI')
|
||||
|
||||
const GoogleVaultDownloadExportFileSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
matterId: z.string().min(1, 'Matter ID is required'),
|
||||
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||
objectName: z.string().min(1, 'Object name is required'),
|
||||
fileName: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Google Vault download attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = GoogleVaultDownloadExportFileSchema.parse(body)
|
||||
|
||||
const { accessToken, bucketName, objectName, fileName } = validatedData
|
||||
|
||||
const bucket = encodeURIComponent(bucketName)
|
||||
const object = encodeURIComponent(objectName)
|
||||
const downloadUrl = `https://storage.googleapis.com/storage/v1/b/${bucket}/o/${object}?alt=media`
|
||||
|
||||
logger.info(`[${requestId}] Downloading file from Google Vault`, { bucketName, objectName })
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: enhanceGoogleVaultError(urlValidation.error || 'Invalid URL') },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(downloadUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
const errorText = await downloadResponse.text().catch(() => '')
|
||||
const errorMessage = `Failed to download file: ${errorText || downloadResponse.statusText}`
|
||||
logger.error(`[${requestId}] Failed to download Vault export file`, {
|
||||
status: downloadResponse.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: enhanceGoogleVaultError(errorMessage) },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const contentType = downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||
const disposition = downloadResponse.headers.get('content-disposition') || ''
|
||||
const match = disposition.match(/filename\*=UTF-8''([^;]+)|filename="([^"]+)"/)
|
||||
|
||||
let resolvedName = fileName
|
||||
if (!resolvedName) {
|
||||
if (match?.[1]) {
|
||||
try {
|
||||
resolvedName = decodeURIComponent(match[1])
|
||||
} catch {
|
||||
resolvedName = match[1]
|
||||
}
|
||||
} else if (match?.[2]) {
|
||||
resolvedName = match[2]
|
||||
} else if (objectName) {
|
||||
const parts = objectName.split('/')
|
||||
resolvedName = parts[parts.length - 1] || 'vault-export.bin'
|
||||
} else {
|
||||
resolvedName = 'vault-export.bin'
|
||||
}
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
|
||||
logger.info(`[${requestId}] Vault export file downloaded successfully`, {
|
||||
name: resolvedName,
|
||||
size: buffer.length,
|
||||
mimeType: contentType,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedName,
|
||||
mimeType: contentType,
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error downloading Google Vault export file:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateImageUrl } from '@/lib/core/security/input-validation'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
const logger = createLogger('ImageProxyAPI')
|
||||
@@ -27,19 +29,20 @@ export async function GET(request: NextRequest) {
|
||||
return new NextResponse('Missing URL parameter', { status: 400 })
|
||||
}
|
||||
|
||||
const urlValidation = validateImageUrl(imageUrl)
|
||||
const urlValidation = await validateUrlWithDNS(imageUrl, 'imageUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Blocked image proxy request`, {
|
||||
url: sanitizeUrlForLog(imageUrl),
|
||||
url: imageUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return new NextResponse(urlValidation.error || 'Invalid image URL', { status: 403 })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Proxying image request for: ${sanitizeUrlForLog(imageUrl)}`)
|
||||
logger.info(`[${requestId}] Proxying image request for: ${imageUrl}`)
|
||||
|
||||
try {
|
||||
const imageResponse = await fetch(imageUrl, {
|
||||
const imageResponse = await secureFetchWithPinnedIP(imageUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'User-Agent':
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
|
||||
|
||||
@@ -2,7 +2,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -63,7 +62,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const url = `${baseUrl}/servicedesk/${serviceDeskId}/queue${params.toString() ? `?${params.toString()}` : ''}`
|
||||
|
||||
logger.info('Fetching queues from:', sanitizeUrlForLog(url))
|
||||
logger.info('Fetching queues from:', url)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
|
||||
@@ -6,7 +6,6 @@ import {
|
||||
validateJiraCloudId,
|
||||
validateJiraIssueKey,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -67,7 +66,7 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
const url = `${baseUrl}/request`
|
||||
|
||||
logger.info('Creating request at:', sanitizeUrlForLog(url))
|
||||
logger.info('Creating request at:', url)
|
||||
|
||||
const requestBody: Record<string, unknown> = {
|
||||
serviceDeskId,
|
||||
@@ -129,7 +128,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const url = `${baseUrl}/request/${issueIdOrKey}`
|
||||
|
||||
logger.info('Fetching request from:', sanitizeUrlForLog(url))
|
||||
logger.info('Fetching request from:', url)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
|
||||
@@ -2,7 +2,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -69,7 +68,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const url = `${baseUrl}/request${params.toString() ? `?${params.toString()}` : ''}`
|
||||
|
||||
logger.info('Fetching requests from:', sanitizeUrlForLog(url))
|
||||
logger.info('Fetching requests from:', url)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
|
||||
@@ -2,7 +2,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -54,7 +53,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const url = `${baseUrl}/servicedesk/${serviceDeskId}/requesttype${params.toString() ? `?${params.toString()}` : ''}`
|
||||
|
||||
logger.info('Fetching request types from:', sanitizeUrlForLog(url))
|
||||
logger.info('Fetching request types from:', url)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
|
||||
@@ -2,7 +2,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId } from '@/lib/core/security/input-validation'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -44,7 +43,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const url = `${baseUrl}/servicedesk${params.toString() ? `?${params.toString()}` : ''}`
|
||||
|
||||
logger.info('Fetching service desks from:', sanitizeUrlForLog(url))
|
||||
logger.info('Fetching service desks from:', url)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
|
||||
@@ -2,7 +2,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -54,7 +53,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const url = `${baseUrl}/request/${issueIdOrKey}/sla${params.toString() ? `?${params.toString()}` : ''}`
|
||||
|
||||
logger.info('Fetching SLA info from:', sanitizeUrlForLog(url))
|
||||
logger.info('Fetching SLA info from:', url)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
|
||||
@@ -6,7 +6,6 @@ import {
|
||||
validateJiraCloudId,
|
||||
validateJiraIssueKey,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -70,7 +69,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const url = `${baseUrl}/request/${issueIdOrKey}/transition`
|
||||
|
||||
logger.info('Transitioning request at:', sanitizeUrlForLog(url))
|
||||
logger.info('Transitioning request at:', url)
|
||||
|
||||
const body: Record<string, unknown> = {
|
||||
id: transitionId,
|
||||
|
||||
@@ -2,7 +2,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { getJiraCloudId, getJsmApiBaseUrl, getJsmHeaders } from '@/tools/jsm/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -50,7 +49,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const url = `${baseUrl}/request/${issueIdOrKey}/transition`
|
||||
|
||||
logger.info('Fetching transitions from:', sanitizeUrlForLog(url))
|
||||
logger.info('Fetching transitions from:', url)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
|
||||
@@ -2,11 +2,19 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import type {
|
||||
GraphApiErrorResponse,
|
||||
GraphChatMessage,
|
||||
GraphDriveItem,
|
||||
} from '@/tools/microsoft_teams/types'
|
||||
import { resolveMentionsForChannel, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -21,6 +29,22 @@ const TeamsWriteChannelSchema = z.object({
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
})
|
||||
|
||||
async function secureFetchGraph(
|
||||
url: string,
|
||||
options: {
|
||||
method?: string
|
||||
headers?: Record<string, string>
|
||||
body?: string | Buffer | Uint8Array
|
||||
},
|
||||
paramName: string
|
||||
) {
|
||||
const urlValidation = await validateUrlWithDNS(url, paramName)
|
||||
if (!urlValidation.isValid) {
|
||||
throw new Error(urlValidation.error)
|
||||
}
|
||||
return secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, options)
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
@@ -85,26 +109,32 @@ export async function POST(request: NextRequest) {
|
||||
encodeURIComponent(file.name) +
|
||||
':/content'
|
||||
|
||||
logger.info(`[${requestId}] Uploading to Teams: ${sanitizeUrlForLog(uploadUrl)}`)
|
||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
||||
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': file.type || 'application/octet-stream',
|
||||
const uploadResponse = await secureFetchGraph(
|
||||
uploadUrl,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': file.type || 'application/octet-stream',
|
||||
},
|
||||
body: buffer,
|
||||
},
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
'uploadUrl'
|
||||
)
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||
const errorData = (await uploadResponse
|
||||
.json()
|
||||
.catch(() => ({}))) as GraphApiErrorResponse
|
||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadedFile = await uploadResponse.json()
|
||||
const uploadedFile = (await uploadResponse.json()) as GraphDriveItem
|
||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
||||
id: uploadedFile.id,
|
||||
webUrl: uploadedFile.webUrl,
|
||||
@@ -112,21 +142,28 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
||||
|
||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
const fileDetailsResponse = await secureFetchGraph(
|
||||
fileDetailsUrl,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
},
|
||||
})
|
||||
'fileDetailsUrl'
|
||||
)
|
||||
|
||||
if (!fileDetailsResponse.ok) {
|
||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
||||
const errorData = (await fileDetailsResponse
|
||||
.json()
|
||||
.catch(() => ({}))) as GraphApiErrorResponse
|
||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const fileDetails = await fileDetailsResponse.json()
|
||||
const fileDetails = (await fileDetailsResponse.json()) as GraphDriveItem
|
||||
logger.info(`[${requestId}] Got file details`, {
|
||||
webDavUrl: fileDetails.webDavUrl,
|
||||
eTag: fileDetails.eTag,
|
||||
@@ -211,17 +248,21 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages`
|
||||
|
||||
const teamsResponse = await fetch(teamsUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
const teamsResponse = await secureFetchGraph(
|
||||
teamsUrl,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(messageBody),
|
||||
},
|
||||
body: JSON.stringify(messageBody),
|
||||
})
|
||||
'teamsUrl'
|
||||
)
|
||||
|
||||
if (!teamsResponse.ok) {
|
||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
||||
const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
|
||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
@@ -232,7 +273,7 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const responseData = await teamsResponse.json()
|
||||
const responseData = (await teamsResponse.json()) as GraphChatMessage
|
||||
logger.info(`[${requestId}] Teams channel message sent successfully`, {
|
||||
messageId: responseData.id,
|
||||
attachmentCount: attachments.length,
|
||||
|
||||
@@ -2,11 +2,19 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import type {
|
||||
GraphApiErrorResponse,
|
||||
GraphChatMessage,
|
||||
GraphDriveItem,
|
||||
} from '@/tools/microsoft_teams/types'
|
||||
import { resolveMentionsForChat, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -20,6 +28,22 @@ const TeamsWriteChatSchema = z.object({
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
})
|
||||
|
||||
async function secureFetchGraph(
|
||||
url: string,
|
||||
options: {
|
||||
method?: string
|
||||
headers?: Record<string, string>
|
||||
body?: string | Buffer | Uint8Array
|
||||
},
|
||||
paramName: string
|
||||
) {
|
||||
const urlValidation = await validateUrlWithDNS(url, paramName)
|
||||
if (!urlValidation.isValid) {
|
||||
throw new Error(urlValidation.error)
|
||||
}
|
||||
return secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, options)
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
@@ -83,26 +107,32 @@ export async function POST(request: NextRequest) {
|
||||
encodeURIComponent(file.name) +
|
||||
':/content'
|
||||
|
||||
logger.info(`[${requestId}] Uploading to Teams: ${sanitizeUrlForLog(uploadUrl)}`)
|
||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
||||
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': file.type || 'application/octet-stream',
|
||||
const uploadResponse = await secureFetchGraph(
|
||||
uploadUrl,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': file.type || 'application/octet-stream',
|
||||
},
|
||||
body: buffer,
|
||||
},
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
'uploadUrl'
|
||||
)
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||
const errorData = (await uploadResponse
|
||||
.json()
|
||||
.catch(() => ({}))) as GraphApiErrorResponse
|
||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadedFile = await uploadResponse.json()
|
||||
const uploadedFile = (await uploadResponse.json()) as GraphDriveItem
|
||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
||||
id: uploadedFile.id,
|
||||
webUrl: uploadedFile.webUrl,
|
||||
@@ -110,21 +140,28 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
||||
|
||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
const fileDetailsResponse = await secureFetchGraph(
|
||||
fileDetailsUrl,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
},
|
||||
})
|
||||
'fileDetailsUrl'
|
||||
)
|
||||
|
||||
if (!fileDetailsResponse.ok) {
|
||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
||||
const errorData = (await fileDetailsResponse
|
||||
.json()
|
||||
.catch(() => ({}))) as GraphApiErrorResponse
|
||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const fileDetails = await fileDetailsResponse.json()
|
||||
const fileDetails = (await fileDetailsResponse.json()) as GraphDriveItem
|
||||
logger.info(`[${requestId}] Got file details`, {
|
||||
webDavUrl: fileDetails.webDavUrl,
|
||||
eTag: fileDetails.eTag,
|
||||
@@ -208,17 +245,21 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages`
|
||||
|
||||
const teamsResponse = await fetch(teamsUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
const teamsResponse = await secureFetchGraph(
|
||||
teamsUrl,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(messageBody),
|
||||
},
|
||||
body: JSON.stringify(messageBody),
|
||||
})
|
||||
'teamsUrl'
|
||||
)
|
||||
|
||||
if (!teamsResponse.ok) {
|
||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
||||
const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
|
||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
@@ -229,7 +270,7 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const responseData = await teamsResponse.json()
|
||||
const responseData = (await teamsResponse.json()) as GraphChatMessage
|
||||
logger.info(`[${requestId}] Teams message sent successfully`, {
|
||||
messageId: responseData.id,
|
||||
attachmentCount: attachments.length,
|
||||
|
||||
@@ -2,16 +2,17 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
downloadFileFromStorage,
|
||||
resolveInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -21,6 +22,7 @@ const MistralParseSchema = z.object({
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
filePath: z.string().min(1, 'File path is required').optional(),
|
||||
fileData: FileInputSchema.optional(),
|
||||
file: FileInputSchema.optional(),
|
||||
resultType: z.string().optional(),
|
||||
pages: z.array(z.number()).optional(),
|
||||
includeImageBase64: z.boolean().optional(),
|
||||
@@ -51,7 +53,7 @@ export async function POST(request: NextRequest) {
|
||||
const body = await request.json()
|
||||
const validatedData = MistralParseSchema.parse(body)
|
||||
|
||||
const fileData = validatedData.fileData
|
||||
const fileData = validatedData.file || validatedData.fileData
|
||||
const filePath = typeof fileData === 'string' ? fileData : validatedData.filePath
|
||||
|
||||
if (!fileData && (!filePath || filePath.trim() === '')) {
|
||||
@@ -76,65 +78,72 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
if (fileData && typeof fileData === 'object') {
|
||||
const base64 = (fileData as { base64?: string }).base64
|
||||
const mimeType = (fileData as { type?: string }).type || 'application/pdf'
|
||||
if (!base64) {
|
||||
const rawFile = fileData
|
||||
let userFile
|
||||
try {
|
||||
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File base64 content is required',
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const mimeType = userFile.type || 'application/pdf'
|
||||
let base64 = userFile.base64
|
||||
if (!base64) {
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
base64 = buffer.toString('base64')
|
||||
}
|
||||
const base64Payload = base64.startsWith('data:')
|
||||
? base64
|
||||
: `data:${mimeType};base64,${base64}`
|
||||
mistralBody.document = {
|
||||
type: 'document_base64',
|
||||
document_base64: base64Payload,
|
||||
type: 'document_url',
|
||||
document_url: base64Payload,
|
||||
}
|
||||
} else if (filePath) {
|
||||
let fileUrl = filePath
|
||||
|
||||
if (isInternalFileUrl(filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(filePath)
|
||||
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File not found',
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
const isInternalFilePath = isInternalFileUrl(filePath)
|
||||
if (isInternalFilePath) {
|
||||
const resolution = await resolveInternalFileUrl(filePath, userId, requestId, logger)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
error: resolution.error.message,
|
||||
},
|
||||
{ status: 500 }
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
fileUrl = resolution.fileUrl || fileUrl
|
||||
} else if (filePath.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
fileUrl = `${baseUrl}${filePath}`
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: filePath.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = await validateUrlWithDNS(fileUrl, 'filePath')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
mistralBody.document = {
|
||||
@@ -156,15 +165,34 @@ export async function POST(request: NextRequest) {
|
||||
mistralBody.image_min_size = validatedData.imageMinSize
|
||||
}
|
||||
|
||||
const mistralResponse = await fetch('https://api.mistral.ai/v1/ocr', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
},
|
||||
body: JSON.stringify(mistralBody),
|
||||
})
|
||||
const mistralEndpoint = 'https://api.mistral.ai/v1/ocr'
|
||||
const mistralValidation = await validateUrlWithDNS(mistralEndpoint, 'Mistral API URL')
|
||||
if (!mistralValidation.isValid) {
|
||||
logger.error(`[${requestId}] Mistral API URL validation failed`, {
|
||||
error: mistralValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to reach Mistral API',
|
||||
},
|
||||
{ status: 502 }
|
||||
)
|
||||
}
|
||||
|
||||
const mistralResponse = await secureFetchWithPinnedIP(
|
||||
mistralEndpoint,
|
||||
mistralValidation.resolvedIP!,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
},
|
||||
body: JSON.stringify(mistralBody),
|
||||
}
|
||||
)
|
||||
|
||||
if (!mistralResponse.ok) {
|
||||
const errorText = await mistralResponse.text()
|
||||
|
||||
159
apps/sim/app/api/tools/onedrive/download/route.ts
Normal file
159
apps/sim/app/api/tools/onedrive/download/route.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('OneDriveDownloadAPI')
|
||||
|
||||
const OneDriveDownloadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileId: z.string().min(1, 'File ID is required'),
|
||||
fileName: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized OneDrive download attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = OneDriveDownloadSchema.parse(body)
|
||||
|
||||
const { accessToken, fileId, fileName } = validatedData
|
||||
const authHeader = `Bearer ${accessToken}`
|
||||
|
||||
logger.info(`[${requestId}] Getting file metadata from OneDrive`, { fileId })
|
||||
|
||||
const metadataUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}`
|
||||
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
|
||||
if (!metadataUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: metadataUrlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const metadataResponse = await secureFetchWithPinnedIP(
|
||||
metadataUrl,
|
||||
metadataUrlValidation.resolvedIP!,
|
||||
{
|
||||
headers: { Authorization: authHeader },
|
||||
}
|
||||
)
|
||||
|
||||
if (!metadataResponse.ok) {
|
||||
const errorDetails = await metadataResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to get file metadata`, {
|
||||
status: metadataResponse.status,
|
||||
error: errorDetails,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const metadata = await metadataResponse.json()
|
||||
|
||||
if (metadata.folder && !metadata.file) {
|
||||
logger.error(`[${requestId}] Attempted to download a folder`, {
|
||||
itemId: metadata.id,
|
||||
itemName: metadata.name,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Cannot download folder "${metadata.name}". Please select a file instead.`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const mimeType = metadata.file?.mimeType || 'application/octet-stream'
|
||||
|
||||
logger.info(`[${requestId}] Downloading file from OneDrive`, { fileId, mimeType })
|
||||
|
||||
const downloadUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}/content`
|
||||
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||
if (!downloadUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: downloadUrlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(
|
||||
downloadUrl,
|
||||
downloadUrlValidation.resolvedIP!,
|
||||
{
|
||||
headers: { Authorization: authHeader },
|
||||
}
|
||||
)
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
const downloadError = await downloadResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to download file`, {
|
||||
status: downloadResponse.status,
|
||||
error: downloadError,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const fileBuffer = Buffer.from(arrayBuffer)
|
||||
|
||||
const resolvedName = fileName || metadata.name || 'download'
|
||||
|
||||
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||
fileId,
|
||||
name: resolvedName,
|
||||
size: fileBuffer.length,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const base64Data = fileBuffer.toString('base64')
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedName,
|
||||
mimeType,
|
||||
data: base64Data,
|
||||
size: fileBuffer.length,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error downloading OneDrive file:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,11 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import * as XLSX from 'xlsx'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateMicrosoftGraphId,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import {
|
||||
@@ -36,6 +40,22 @@ const OneDriveUploadSchema = z.object({
|
||||
values: ExcelValuesSchema.optional().nullable(),
|
||||
})
|
||||
|
||||
async function secureFetchGraph(
|
||||
url: string,
|
||||
options: {
|
||||
method?: string
|
||||
headers?: Record<string, string>
|
||||
body?: string | Buffer | Uint8Array
|
||||
},
|
||||
paramName: string
|
||||
) {
|
||||
const urlValidation = await validateUrlWithDNS(url, paramName)
|
||||
if (!urlValidation.isValid) {
|
||||
throw new Error(urlValidation.error)
|
||||
}
|
||||
return secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, options)
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
@@ -164,14 +184,18 @@ export async function POST(request: NextRequest) {
|
||||
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
||||
}
|
||||
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': mimeType,
|
||||
const uploadResponse = await secureFetchGraph(
|
||||
uploadUrl,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': mimeType,
|
||||
},
|
||||
body: fileBuffer,
|
||||
},
|
||||
body: new Uint8Array(fileBuffer),
|
||||
})
|
||||
'uploadUrl'
|
||||
)
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorText = await uploadResponse.text()
|
||||
@@ -194,8 +218,11 @@ export async function POST(request: NextRequest) {
|
||||
if (shouldWriteExcelContent) {
|
||||
try {
|
||||
let workbookSessionId: string | undefined
|
||||
const sessionResp = await fetch(
|
||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/createSession`,
|
||||
const sessionUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||
fileData.id
|
||||
)}/workbook/createSession`
|
||||
const sessionResp = await secureFetchGraph(
|
||||
sessionUrl,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
@@ -203,7 +230,8 @@ export async function POST(request: NextRequest) {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ persistChanges: true }),
|
||||
}
|
||||
},
|
||||
'sessionUrl'
|
||||
)
|
||||
|
||||
if (sessionResp.ok) {
|
||||
@@ -216,12 +244,17 @@ export async function POST(request: NextRequest) {
|
||||
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||
fileData.id
|
||||
)}/workbook/worksheets?$select=name&$orderby=position&$top=1`
|
||||
const listResp = await fetch(listUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||
const listResp = await secureFetchGraph(
|
||||
listUrl,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||
},
|
||||
},
|
||||
})
|
||||
'listUrl'
|
||||
)
|
||||
if (listResp.ok) {
|
||||
const listData = await listResp.json()
|
||||
const firstSheetName = listData?.value?.[0]?.name
|
||||
@@ -282,15 +315,19 @@ export async function POST(request: NextRequest) {
|
||||
)}')/range(address='${encodeURIComponent(computedRangeAddress)}')`
|
||||
)
|
||||
|
||||
const excelWriteResponse = await fetch(url.toString(), {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||
const excelWriteResponse = await secureFetchGraph(
|
||||
url.toString(),
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||
},
|
||||
body: JSON.stringify({ values: processedValues }),
|
||||
},
|
||||
body: JSON.stringify({ values: processedValues }),
|
||||
})
|
||||
'excelWriteUrl'
|
||||
)
|
||||
|
||||
if (!excelWriteResponse || !excelWriteResponse.ok) {
|
||||
const errorText = excelWriteResponse ? await excelWriteResponse.text() : 'no response'
|
||||
@@ -319,15 +356,19 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
if (workbookSessionId) {
|
||||
try {
|
||||
const closeResp = await fetch(
|
||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/closeSession`,
|
||||
const closeUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||
fileData.id
|
||||
)}/workbook/closeSession`
|
||||
const closeResp = await secureFetchGraph(
|
||||
closeUrl,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'workbook-session-id': workbookSessionId,
|
||||
},
|
||||
}
|
||||
},
|
||||
'closeSessionUrl'
|
||||
)
|
||||
if (!closeResp.ok) {
|
||||
const closeText = await closeResp.text()
|
||||
|
||||
153
apps/sim/app/api/tools/pipedrive/get-files/route.ts
Normal file
153
apps/sim/app/api/tools/pipedrive/get-files/route.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('PipedriveGetFilesAPI')
|
||||
|
||||
const PipedriveGetFilesSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
deal_id: z.string().optional().nullable(),
|
||||
person_id: z.string().optional().nullable(),
|
||||
org_id: z.string().optional().nullable(),
|
||||
limit: z.string().optional().nullable(),
|
||||
downloadFiles: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Pipedrive get files attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = PipedriveGetFilesSchema.parse(body)
|
||||
|
||||
const { accessToken, deal_id, person_id, org_id, limit, downloadFiles } = validatedData
|
||||
|
||||
const baseUrl = 'https://api.pipedrive.com/v1/files'
|
||||
const queryParams = new URLSearchParams()
|
||||
|
||||
if (deal_id) queryParams.append('deal_id', deal_id)
|
||||
if (person_id) queryParams.append('person_id', person_id)
|
||||
if (org_id) queryParams.append('org_id', org_id)
|
||||
if (limit) queryParams.append('limit', limit)
|
||||
|
||||
const queryString = queryParams.toString()
|
||||
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||
|
||||
logger.info(`[${requestId}] Fetching files from Pipedrive`, { deal_id, person_id, org_id })
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
logger.error(`[${requestId}] Pipedrive API request failed`, { data })
|
||||
return NextResponse.json(
|
||||
{ success: false, error: data.error || 'Failed to fetch files from Pipedrive' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const files = data.data || []
|
||||
const downloadedFiles: Array<{
|
||||
name: string
|
||||
mimeType: string
|
||||
data: string
|
||||
size: number
|
||||
}> = []
|
||||
|
||||
if (downloadFiles) {
|
||||
for (const file of files) {
|
||||
if (!file?.url) continue
|
||||
|
||||
try {
|
||||
const fileUrlValidation = await validateUrlWithDNS(file.url, 'fileUrl')
|
||||
if (!fileUrlValidation.isValid) continue
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(
|
||||
file.url,
|
||||
fileUrlValidation.resolvedIP!,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
}
|
||||
)
|
||||
|
||||
if (!downloadResponse.ok) continue
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
const extension = getFileExtension(file.name || '')
|
||||
const mimeType =
|
||||
downloadResponse.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
const fileName = file.name || `pipedrive-file-${file.id || Date.now()}`
|
||||
|
||||
downloadedFiles.push({
|
||||
name: fileName,
|
||||
mimeType,
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to download file ${file.id}:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Pipedrive files fetched successfully`, {
|
||||
fileCount: files.length,
|
||||
downloadedCount: downloadedFiles.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
files,
|
||||
downloadedFiles: downloadedFiles.length > 0 ? downloadedFiles : undefined,
|
||||
total_items: files.length,
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching Pipedrive files:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -2,14 +2,19 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import {
|
||||
extractStorageKey,
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { type StorageContext, StorageService } from '@/lib/uploads'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import {
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
processSingleFileToUserFile,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { resolveInternalFileUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -18,7 +23,8 @@ const logger = createLogger('PulseParseAPI')
|
||||
|
||||
const PulseParseSchema = z.object({
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
filePath: z.string().min(1, 'File path is required'),
|
||||
filePath: z.string().optional(),
|
||||
file: RawFileInputSchema.optional(),
|
||||
pages: z.string().optional(),
|
||||
extractFigure: z.boolean().optional(),
|
||||
figureDescription: z.boolean().optional(),
|
||||
@@ -50,25 +56,48 @@ export async function POST(request: NextRequest) {
|
||||
const body = await request.json()
|
||||
const validatedData = PulseParseSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Pulse parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
const fileInput = validatedData.file
|
||||
let fileUrl = ''
|
||||
if (fileInput) {
|
||||
logger.info(`[${requestId}] Pulse parse request`, {
|
||||
fileName: fileInput.name,
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
let userFile
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
||||
userFile = processSingleFileToUserFile(fileInput, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = userFile.url || ''
|
||||
if (fileUrl && isInternalFileUrl(fileUrl)) {
|
||||
const resolution = await resolveInternalFileUrl(fileUrl, userId, requestId, logger)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: resolution.error.message,
|
||||
},
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
fileUrl = resolution.fileUrl || ''
|
||||
}
|
||||
if (!fileUrl && userFile.key) {
|
||||
const context = (userFile.context as StorageContext) || inferContextFromKey(userFile.key)
|
||||
const hasAccess = await verifyFileAccess(userFile.key, userId, undefined, context, false)
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
key: userFile.key,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
@@ -79,22 +108,68 @@ export async function POST(request: NextRequest) {
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(userFile.key, context, 5 * 60)
|
||||
}
|
||||
} else if (validatedData.filePath) {
|
||||
logger.info(`[${requestId}] Pulse parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
fileUrl = validatedData.filePath
|
||||
const isInternalFilePath = isInternalFileUrl(validatedData.filePath)
|
||||
if (isInternalFilePath) {
|
||||
const resolution = await resolveInternalFileUrl(
|
||||
validatedData.filePath,
|
||||
userId,
|
||||
requestId,
|
||||
logger
|
||||
)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: resolution.error.message,
|
||||
},
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
fileUrl = resolution.fileUrl || fileUrl
|
||||
} else if (validatedData.filePath.startsWith('/')) {
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: validatedData.filePath.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 500 }
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = await validateUrlWithDNS(fileUrl, 'filePath')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
||||
}
|
||||
|
||||
if (!fileUrl) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File input is required',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const formData = new FormData()
|
||||
@@ -119,13 +194,36 @@ export async function POST(request: NextRequest) {
|
||||
formData.append('chunk_size', String(validatedData.chunkSize))
|
||||
}
|
||||
|
||||
const pulseResponse = await fetch('https://api.runpulse.com/extract', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'x-api-key': validatedData.apiKey,
|
||||
},
|
||||
body: formData,
|
||||
})
|
||||
const pulseEndpoint = 'https://api.runpulse.com/extract'
|
||||
const pulseValidation = await validateUrlWithDNS(pulseEndpoint, 'Pulse API URL')
|
||||
if (!pulseValidation.isValid) {
|
||||
logger.error(`[${requestId}] Pulse API URL validation failed`, {
|
||||
error: pulseValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to reach Pulse API',
|
||||
},
|
||||
{ status: 502 }
|
||||
)
|
||||
}
|
||||
|
||||
const pulsePayload = new Response(formData)
|
||||
const contentType = pulsePayload.headers.get('content-type') || 'multipart/form-data'
|
||||
const bodyBuffer = Buffer.from(await pulsePayload.arrayBuffer())
|
||||
const pulseResponse = await secureFetchWithPinnedIP(
|
||||
pulseEndpoint,
|
||||
pulseValidation.resolvedIP!,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'x-api-key': validatedData.apiKey,
|
||||
'Content-Type': contentType,
|
||||
},
|
||||
body: bodyBuffer,
|
||||
}
|
||||
)
|
||||
|
||||
if (!pulseResponse.ok) {
|
||||
const errorText = await pulseResponse.text()
|
||||
|
||||
@@ -2,14 +2,19 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import {
|
||||
extractStorageKey,
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { type StorageContext, StorageService } from '@/lib/uploads'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import {
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
processSingleFileToUserFile,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { resolveInternalFileUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -18,7 +23,8 @@ const logger = createLogger('ReductoParseAPI')
|
||||
|
||||
const ReductoParseSchema = z.object({
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
filePath: z.string().min(1, 'File path is required'),
|
||||
filePath: z.string().optional(),
|
||||
file: RawFileInputSchema.optional(),
|
||||
pages: z.array(z.number()).optional(),
|
||||
tableOutputFormat: z.enum(['html', 'md']).optional(),
|
||||
})
|
||||
@@ -46,31 +52,49 @@ export async function POST(request: NextRequest) {
|
||||
const body = await request.json()
|
||||
const validatedData = ReductoParseSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Reducto parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
const fileInput = validatedData.file
|
||||
let fileUrl = ''
|
||||
if (fileInput) {
|
||||
logger.info(`[${requestId}] Reducto parse request`, {
|
||||
fileName: fileInput.name,
|
||||
userId,
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
let userFile
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const hasAccess = await verifyFileAccess(
|
||||
storageKey,
|
||||
userId,
|
||||
undefined, // customConfig
|
||||
context, // context
|
||||
false // isLocal
|
||||
userFile = processSingleFileToUserFile(fileInput, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = userFile.url || ''
|
||||
if (fileUrl && isInternalFileUrl(fileUrl)) {
|
||||
const resolution = await resolveInternalFileUrl(fileUrl, userId, requestId, logger)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: resolution.error.message,
|
||||
},
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
fileUrl = resolution.fileUrl || ''
|
||||
}
|
||||
if (!fileUrl && userFile.key) {
|
||||
const context = (userFile.context as StorageContext) || inferContextFromKey(userFile.key)
|
||||
const hasAccess = await verifyFileAccess(userFile.key, userId, undefined, context, false)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
key: userFile.key,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
@@ -82,21 +106,68 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(userFile.key, context, 5 * 60)
|
||||
}
|
||||
} else if (validatedData.filePath) {
|
||||
logger.info(`[${requestId}] Reducto parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
fileUrl = validatedData.filePath
|
||||
const isInternalFilePath = isInternalFileUrl(validatedData.filePath)
|
||||
if (isInternalFilePath) {
|
||||
const resolution = await resolveInternalFileUrl(
|
||||
validatedData.filePath,
|
||||
userId,
|
||||
requestId,
|
||||
logger
|
||||
)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: resolution.error.message,
|
||||
},
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
fileUrl = resolution.fileUrl || fileUrl
|
||||
} else if (validatedData.filePath.startsWith('/')) {
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: validatedData.filePath.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 500 }
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = await validateUrlWithDNS(fileUrl, 'filePath')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
||||
}
|
||||
|
||||
if (!fileUrl) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File input is required',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const reductoBody: Record<string, unknown> = {
|
||||
@@ -115,15 +186,34 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
const reductoResponse = await fetch('https://platform.reducto.ai/parse', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
},
|
||||
body: JSON.stringify(reductoBody),
|
||||
})
|
||||
const reductoEndpoint = 'https://platform.reducto.ai/parse'
|
||||
const reductoValidation = await validateUrlWithDNS(reductoEndpoint, 'Reducto API URL')
|
||||
if (!reductoValidation.isValid) {
|
||||
logger.error(`[${requestId}] Reducto API URL validation failed`, {
|
||||
error: reductoValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to reach Reducto API',
|
||||
},
|
||||
{ status: 502 }
|
||||
)
|
||||
}
|
||||
|
||||
const reductoResponse = await secureFetchWithPinnedIP(
|
||||
reductoEndpoint,
|
||||
reductoValidation.resolvedIP!,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
},
|
||||
body: JSON.stringify(reductoBody),
|
||||
}
|
||||
)
|
||||
|
||||
if (!reductoResponse.ok) {
|
||||
const errorText = await reductoResponse.text()
|
||||
|
||||
@@ -2,7 +2,10 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
@@ -21,6 +24,22 @@ const SharepointUploadSchema = z.object({
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
})
|
||||
|
||||
async function secureFetchGraph(
|
||||
url: string,
|
||||
options: {
|
||||
method?: string
|
||||
headers?: Record<string, string>
|
||||
body?: string | Buffer | Uint8Array
|
||||
},
|
||||
paramName: string
|
||||
) {
|
||||
const urlValidation = await validateUrlWithDNS(url, paramName)
|
||||
if (!urlValidation.isValid) {
|
||||
throw new Error(urlValidation.error)
|
||||
}
|
||||
return secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, options)
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
@@ -81,14 +100,17 @@ export async function POST(request: NextRequest) {
|
||||
let effectiveDriveId = validatedData.driveId
|
||||
if (!effectiveDriveId) {
|
||||
logger.info(`[${requestId}] No driveId provided, fetching default drive for site`)
|
||||
const driveResponse = await fetch(
|
||||
`https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`,
|
||||
const driveUrl = `https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`
|
||||
const driveResponse = await secureFetchGraph(
|
||||
driveUrl,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
}
|
||||
},
|
||||
'driveUrl'
|
||||
)
|
||||
|
||||
if (!driveResponse.ok) {
|
||||
@@ -145,16 +167,20 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const uploadUrl = `https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drives/${effectiveDriveId}/root:${encodedPath}:/content`
|
||||
|
||||
logger.info(`[${requestId}] Uploading to: ${sanitizeUrlForLog(uploadUrl)}`)
|
||||
logger.info(`[${requestId}] Uploading to: ${uploadUrl}`)
|
||||
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': userFile.type || 'application/octet-stream',
|
||||
const uploadResponse = await secureFetchGraph(
|
||||
uploadUrl,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': userFile.type || 'application/octet-stream',
|
||||
},
|
||||
body: buffer,
|
||||
},
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
'uploadUrl'
|
||||
)
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||
|
||||
170
apps/sim/app/api/tools/slack/download/route.ts
Normal file
170
apps/sim/app/api/tools/slack/download/route.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SlackDownloadAPI')
|
||||
|
||||
const SlackDownloadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileId: z.string().min(1, 'File ID is required'),
|
||||
fileName: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Slack download attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Slack download request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = SlackDownloadSchema.parse(body)
|
||||
|
||||
const { accessToken, fileId, fileName } = validatedData
|
||||
|
||||
logger.info(`[${requestId}] Getting file info from Slack`, { fileId })
|
||||
|
||||
const infoResponse = await fetch(`https://slack.com/api/files.info?file=${fileId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!infoResponse.ok) {
|
||||
const errorDetails = await infoResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to get file info from Slack`, {
|
||||
status: infoResponse.status,
|
||||
statusText: infoResponse.statusText,
|
||||
error: errorDetails,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorDetails.error || 'Failed to get file info',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await infoResponse.json()
|
||||
|
||||
if (!data.ok) {
|
||||
logger.error(`[${requestId}] Slack API returned error`, { error: data.error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: data.error || 'Slack API error',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const file = data.file
|
||||
const resolvedFileName = fileName || file.name || 'download'
|
||||
const mimeType = file.mimetype || 'application/octet-stream'
|
||||
const urlPrivate = file.url_private
|
||||
|
||||
if (!urlPrivate) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File does not have a download URL',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(urlPrivate, 'urlPrivate')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Downloading file from Slack`, {
|
||||
fileId,
|
||||
fileName: resolvedFileName,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(urlPrivate, urlValidation.resolvedIP!, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
logger.error(`[${requestId}] Failed to download file content`, {
|
||||
status: downloadResponse.status,
|
||||
statusText: downloadResponse.statusText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to download file content',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const fileBuffer = Buffer.from(arrayBuffer)
|
||||
|
||||
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||
fileId,
|
||||
name: resolvedFileName,
|
||||
size: fileBuffer.length,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const base64Data = fileBuffer.toString('base64')
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedFileName,
|
||||
mimeType,
|
||||
data: base64Data,
|
||||
size: fileBuffer.length,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error downloading Slack file:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,28 @@
|
||||
import type { Logger } from '@sim/logger'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import type { ToolFileData } from '@/tools/types'
|
||||
|
||||
async function secureFetchExternal(
|
||||
url: string,
|
||||
options: {
|
||||
method?: string
|
||||
headers?: Record<string, string>
|
||||
body?: string | Buffer | Uint8Array
|
||||
},
|
||||
paramName: string
|
||||
) {
|
||||
const urlValidation = await validateUrlWithDNS(url, paramName)
|
||||
if (!urlValidation.isValid) {
|
||||
throw new Error(urlValidation.error)
|
||||
}
|
||||
return secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, options)
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a message to a Slack channel using chat.postMessage
|
||||
*/
|
||||
@@ -108,10 +128,14 @@ export async function uploadFilesToSlack(
|
||||
|
||||
logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`)
|
||||
|
||||
const uploadResponse = await fetch(urlData.upload_url, {
|
||||
method: 'POST',
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
const uploadResponse = await secureFetchExternal(
|
||||
urlData.upload_url,
|
||||
{
|
||||
method: 'POST',
|
||||
body: buffer,
|
||||
},
|
||||
'uploadUrl'
|
||||
)
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`)
|
||||
|
||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction'
|
||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||
|
||||
@@ -123,6 +124,10 @@ export async function POST(request: NextRequest) {
|
||||
const variablesObject = processVariables(params.variables)
|
||||
|
||||
const startUrl = normalizeUrl(rawStartUrl)
|
||||
const urlValidation = await validateUrlWithDNS(startUrl, 'startUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('Starting Stagehand agent process', {
|
||||
rawStartUrl,
|
||||
|
||||
@@ -3,7 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||
|
||||
const logger = createLogger('StagehandExtractAPI')
|
||||
@@ -52,6 +52,10 @@ export async function POST(request: NextRequest) {
|
||||
const params = validationResult.data
|
||||
const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params
|
||||
const url = normalizeUrl(rawUrl)
|
||||
const urlValidation = await validateUrlWithDNS(url, 'url')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('Starting Stagehand extraction process', {
|
||||
rawUrl,
|
||||
@@ -121,7 +125,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const page = stagehand.context.pages()[0]
|
||||
|
||||
logger.info(`Navigating to ${sanitizeUrlForLog(url)}`)
|
||||
logger.info(`Navigating to ${url}`)
|
||||
await page.goto(url, { waitUntil: 'networkidle' })
|
||||
logger.info('Navigation complete')
|
||||
|
||||
|
||||
@@ -2,8 +2,15 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { extractAudioFromVideo, isVideoFile } from '@/lib/audio/extractor'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
downloadFileFromStorage,
|
||||
resolveInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils.server'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import type { TranscriptSegment } from '@/tools/stt/types'
|
||||
|
||||
@@ -46,6 +53,7 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = authResult.userId
|
||||
const body: SttRequestBody = await request.json()
|
||||
const {
|
||||
provider,
|
||||
@@ -73,6 +81,9 @@ export async function POST(request: NextRequest) {
|
||||
let audioMimeType: string
|
||||
|
||||
if (body.audioFile) {
|
||||
if (Array.isArray(body.audioFile) && body.audioFile.length !== 1) {
|
||||
return NextResponse.json({ error: 'audioFile must be a single file' }, { status: 400 })
|
||||
}
|
||||
const file = Array.isArray(body.audioFile) ? body.audioFile[0] : body.audioFile
|
||||
logger.info(`[${requestId}] Processing uploaded file: ${file.name}`)
|
||||
|
||||
@@ -80,6 +91,12 @@ export async function POST(request: NextRequest) {
|
||||
audioFileName = file.name
|
||||
audioMimeType = file.type
|
||||
} else if (body.audioFileReference) {
|
||||
if (Array.isArray(body.audioFileReference) && body.audioFileReference.length !== 1) {
|
||||
return NextResponse.json(
|
||||
{ error: 'audioFileReference must be a single file' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const file = Array.isArray(body.audioFileReference)
|
||||
? body.audioFileReference[0]
|
||||
: body.audioFileReference
|
||||
@@ -89,16 +106,50 @@ export async function POST(request: NextRequest) {
|
||||
audioFileName = file.name
|
||||
audioMimeType = file.type
|
||||
} else if (body.audioUrl) {
|
||||
logger.info(`[${requestId}] Downloading from URL: ${sanitizeUrlForLog(body.audioUrl)}`)
|
||||
logger.info(`[${requestId}] Downloading from URL: ${body.audioUrl}`)
|
||||
|
||||
const response = await fetch(body.audioUrl)
|
||||
let audioUrl = body.audioUrl.trim()
|
||||
if (audioUrl.startsWith('/') && !isInternalFileUrl(audioUrl)) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (isInternalFileUrl(audioUrl)) {
|
||||
if (!userId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Authentication required for internal file access' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
const resolution = await resolveInternalFileUrl(audioUrl, userId, requestId, logger)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{ error: resolution.error.message },
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
audioUrl = resolution.fileUrl || audioUrl
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(audioUrl, 'audioUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(audioUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download audio from URL: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const arrayBuffer = await response.arrayBuffer()
|
||||
audioBuffer = Buffer.from(arrayBuffer)
|
||||
audioFileName = body.audioUrl.split('/').pop() || 'audio_file'
|
||||
audioFileName = audioUrl.split('/').pop() || 'audio_file'
|
||||
audioMimeType = response.headers.get('content-type') || 'audio/mpeg'
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
|
||||
@@ -4,18 +4,18 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateAwsRegion,
|
||||
validateExternalUrl,
|
||||
validateS3BucketName,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
downloadFileFromStorage,
|
||||
resolveInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
||||
@@ -35,6 +35,7 @@ const TextractParseSchema = z
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
||||
filePath: z.string().optional(),
|
||||
file: RawFileInputSchema.optional(),
|
||||
s3Uri: z.string().optional(),
|
||||
featureTypes: z
|
||||
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
||||
@@ -50,6 +51,20 @@ const TextractParseSchema = z
|
||||
path: ['region'],
|
||||
})
|
||||
}
|
||||
if (data.processingMode === 'async' && !data.s3Uri) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'S3 URI is required for multi-page processing (s3://bucket/key)',
|
||||
path: ['s3Uri'],
|
||||
})
|
||||
}
|
||||
if (data.processingMode !== 'async' && !data.file && !data.filePath) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'File input is required for single-page processing',
|
||||
path: ['filePath'],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
function getSignatureKey(
|
||||
@@ -111,7 +126,14 @@ function signAwsRequest(
|
||||
}
|
||||
|
||||
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
||||
const response = await fetch(url)
|
||||
const urlValidation = await validateUrlWithDNS(url, 'Document URL')
|
||||
if (!urlValidation.isValid) {
|
||||
throw new Error(urlValidation.error || 'Invalid document URL')
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||
}
|
||||
@@ -318,8 +340,8 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Textract parse request`, {
|
||||
processingMode,
|
||||
filePath: validatedData.filePath?.substring(0, 50),
|
||||
s3Uri: validatedData.s3Uri?.substring(0, 50),
|
||||
hasFile: Boolean(validatedData.file),
|
||||
hasS3Uri: Boolean(validatedData.s3Uri),
|
||||
featureTypes,
|
||||
userId,
|
||||
})
|
||||
@@ -414,90 +436,89 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
}
|
||||
|
||||
if (!validatedData.filePath) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File path is required for single-page processing',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
let bytes = ''
|
||||
let contentType = 'application/octet-stream'
|
||||
let isPdf = false
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath)
|
||||
|
||||
if (isInternalFilePath) {
|
||||
if (validatedData.file) {
|
||||
let userFile
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File not found',
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
userFile = processSingleFileToUserFile(validatedData.file, requestId, logger)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
// Reject arbitrary absolute paths that don't contain /api/files/serve/
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: validatedData.filePath.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = validateExternalUrl(fileUrl, 'Document URL')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
||||
userId,
|
||||
url: fileUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
bytes = buffer.toString('base64')
|
||||
contentType = userFile.type || 'application/octet-stream'
|
||||
isPdf = contentType.includes('pdf') || userFile.name?.toLowerCase().endsWith('.pdf')
|
||||
} else if (validatedData.filePath) {
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
const isInternalFilePath = isInternalFileUrl(fileUrl)
|
||||
|
||||
if (isInternalFilePath) {
|
||||
const resolution = await resolveInternalFileUrl(fileUrl, userId, requestId, logger)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: resolution.error.message,
|
||||
},
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
fileUrl = resolution.fileUrl || fileUrl
|
||||
} else if (fileUrl.startsWith('/')) {
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: fileUrl.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = await validateUrlWithDNS(fileUrl, 'Document URL')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
||||
userId,
|
||||
url: fileUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const fetched = await fetchDocumentBytes(fileUrl)
|
||||
bytes = fetched.bytes
|
||||
contentType = fetched.contentType
|
||||
isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File input is required for single-page processing',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { bytes, contentType } = await fetchDocumentBytes(fileUrl)
|
||||
|
||||
// Track if this is a PDF for better error messaging
|
||||
const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
||||
|
||||
const uri = '/'
|
||||
|
||||
let textractBody: Record<string, unknown>
|
||||
|
||||
219
apps/sim/app/api/tools/twilio/get-recording/route.ts
Normal file
219
apps/sim/app/api/tools/twilio/get-recording/route.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('TwilioGetRecordingAPI')
|
||||
|
||||
const TwilioGetRecordingSchema = z.object({
|
||||
accountSid: z.string().min(1, 'Account SID is required'),
|
||||
authToken: z.string().min(1, 'Auth token is required'),
|
||||
recordingSid: z.string().min(1, 'Recording SID is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Twilio get recording attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = TwilioGetRecordingSchema.parse(body)
|
||||
|
||||
const { accountSid, authToken, recordingSid } = validatedData
|
||||
|
||||
if (!accountSid.startsWith('AC')) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Invalid Account SID format. Account SID must start with "AC" (you provided: ${accountSid.substring(0, 2)}...)`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const twilioAuth = Buffer.from(`${accountSid}:${authToken}`).toString('base64')
|
||||
|
||||
logger.info(`[${requestId}] Getting recording info from Twilio`, { recordingSid })
|
||||
|
||||
const infoUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Recordings/${recordingSid}.json`
|
||||
const infoUrlValidation = await validateUrlWithDNS(infoUrl, 'infoUrl')
|
||||
if (!infoUrlValidation.isValid) {
|
||||
return NextResponse.json({ success: false, error: infoUrlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const infoResponse = await secureFetchWithPinnedIP(infoUrl, infoUrlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||
})
|
||||
|
||||
if (!infoResponse.ok) {
|
||||
const errorData = await infoResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Twilio API error`, {
|
||||
status: infoResponse.status,
|
||||
error: errorData,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: errorData.message || `Twilio API error: ${infoResponse.status}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await infoResponse.json()
|
||||
|
||||
if (data.error_code) {
|
||||
return NextResponse.json({
|
||||
success: false,
|
||||
output: {
|
||||
success: false,
|
||||
error: data.message || data.error_message || 'Failed to retrieve recording',
|
||||
},
|
||||
error: data.message || data.error_message || 'Failed to retrieve recording',
|
||||
})
|
||||
}
|
||||
|
||||
const baseUrl = 'https://api.twilio.com'
|
||||
const mediaUrl = data.uri ? `${baseUrl}${data.uri.replace('.json', '')}` : undefined
|
||||
|
||||
let transcriptionText: string | undefined
|
||||
let transcriptionStatus: string | undefined
|
||||
let transcriptionPrice: string | undefined
|
||||
let transcriptionPriceUnit: string | undefined
|
||||
let file:
|
||||
| {
|
||||
name: string
|
||||
mimeType: string
|
||||
data: string
|
||||
size: number
|
||||
}
|
||||
| undefined
|
||||
|
||||
try {
|
||||
const transcriptionUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Transcriptions.json?RecordingSid=${data.sid}`
|
||||
logger.info(`[${requestId}] Checking for transcriptions`)
|
||||
|
||||
const transcriptionUrlValidation = await validateUrlWithDNS(
|
||||
transcriptionUrl,
|
||||
'transcriptionUrl'
|
||||
)
|
||||
if (transcriptionUrlValidation.isValid) {
|
||||
const transcriptionResponse = await secureFetchWithPinnedIP(
|
||||
transcriptionUrl,
|
||||
transcriptionUrlValidation.resolvedIP!,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||
}
|
||||
)
|
||||
|
||||
if (transcriptionResponse.ok) {
|
||||
const transcriptionData = await transcriptionResponse.json()
|
||||
|
||||
if (transcriptionData.transcriptions && transcriptionData.transcriptions.length > 0) {
|
||||
const transcription = transcriptionData.transcriptions[0]
|
||||
transcriptionText = transcription.transcription_text
|
||||
transcriptionStatus = transcription.status
|
||||
transcriptionPrice = transcription.price
|
||||
transcriptionPriceUnit = transcription.price_unit
|
||||
logger.info(`[${requestId}] Transcription found`, {
|
||||
status: transcriptionStatus,
|
||||
textLength: transcriptionText?.length,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to fetch transcription:`, error)
|
||||
}
|
||||
|
||||
if (mediaUrl) {
|
||||
try {
|
||||
const mediaUrlValidation = await validateUrlWithDNS(mediaUrl, 'mediaUrl')
|
||||
if (mediaUrlValidation.isValid) {
|
||||
const mediaResponse = await secureFetchWithPinnedIP(
|
||||
mediaUrl,
|
||||
mediaUrlValidation.resolvedIP!,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||
}
|
||||
)
|
||||
|
||||
if (mediaResponse.ok) {
|
||||
const contentType =
|
||||
mediaResponse.headers.get('content-type') || 'application/octet-stream'
|
||||
const extension = getExtensionFromMimeType(contentType) || 'dat'
|
||||
const arrayBuffer = await mediaResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
const fileName = `${data.sid || recordingSid}.${extension}`
|
||||
|
||||
file = {
|
||||
name: fileName,
|
||||
mimeType: contentType,
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to download recording media:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Twilio recording fetched successfully`, {
|
||||
recordingSid: data.sid,
|
||||
hasFile: !!file,
|
||||
hasTranscription: !!transcriptionText,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
recordingSid: data.sid,
|
||||
callSid: data.call_sid,
|
||||
duration: data.duration ? Number.parseInt(data.duration, 10) : undefined,
|
||||
status: data.status,
|
||||
channels: data.channels,
|
||||
source: data.source,
|
||||
mediaUrl,
|
||||
file,
|
||||
price: data.price,
|
||||
priceUnit: data.price_unit,
|
||||
uri: data.uri,
|
||||
transcriptionText,
|
||||
transcriptionStatus,
|
||||
transcriptionPrice,
|
||||
transcriptionPriceUnit,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching Twilio recording:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -3,10 +3,17 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
downloadFileFromStorage,
|
||||
resolveInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils.server'
|
||||
import { convertUsageMetadata, extractTextContent } from '@/providers/google/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -42,6 +49,7 @@ export async function POST(request: NextRequest) {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const userId = authResult.userId
|
||||
const body = await request.json()
|
||||
const validatedData = VisionAnalyzeSchema.parse(body)
|
||||
|
||||
@@ -80,12 +88,65 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
|
||||
const base64 = buffer.toString('base64')
|
||||
let base64 = userFile.base64
|
||||
let bufferLength = 0
|
||||
if (!base64) {
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
base64 = buffer.toString('base64')
|
||||
bufferLength = buffer.length
|
||||
}
|
||||
const mimeType = userFile.type || 'image/jpeg'
|
||||
imageSource = `data:${mimeType};base64,${base64}`
|
||||
logger.info(`[${requestId}] Converted image to base64 (${buffer.length} bytes)`)
|
||||
if (bufferLength > 0) {
|
||||
logger.info(`[${requestId}] Converted image to base64 (${bufferLength} bytes)`)
|
||||
}
|
||||
}
|
||||
|
||||
let imageUrlValidation: Awaited<ReturnType<typeof validateUrlWithDNS>> | null = null
|
||||
if (imageSource && !imageSource.startsWith('data:')) {
|
||||
if (imageSource.startsWith('/') && !isInternalFileUrl(imageSource)) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (isInternalFileUrl(imageSource)) {
|
||||
if (!userId) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Authentication required for internal file access',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
const resolution = await resolveInternalFileUrl(imageSource, userId, requestId, logger)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: resolution.error.message,
|
||||
},
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
imageSource = resolution.fileUrl || imageSource
|
||||
}
|
||||
|
||||
imageUrlValidation = await validateUrlWithDNS(imageSource, 'imageUrl')
|
||||
if (!imageUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: imageUrlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
|
||||
@@ -113,7 +174,15 @@ export async function POST(request: NextRequest) {
|
||||
if (isGemini) {
|
||||
let base64Payload = imageSource
|
||||
if (!base64Payload.startsWith('data:')) {
|
||||
const response = await fetch(base64Payload)
|
||||
const urlValidation =
|
||||
imageUrlValidation || (await validateUrlWithDNS(base64Payload, 'imageUrl'))
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(base64Payload, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
})
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to fetch image for Gemini' },
|
||||
@@ -126,7 +195,6 @@ export async function POST(request: NextRequest) {
|
||||
const base64 = Buffer.from(arrayBuffer).toString('base64')
|
||||
base64Payload = `data:${contentType};base64,${base64}`
|
||||
}
|
||||
|
||||
const base64Marker = ';base64,'
|
||||
const markerIndex = base64Payload.indexOf(base64Marker)
|
||||
if (!base64Payload.startsWith('data:') || markerIndex === -1) {
|
||||
|
||||
182
apps/sim/app/api/tools/zoom/get-recordings/route.ts
Normal file
182
apps/sim/app/api/tools/zoom/get-recordings/route.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('ZoomGetRecordingsAPI')
|
||||
|
||||
const ZoomGetRecordingsSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
meetingId: z.string().min(1, 'Meeting ID is required'),
|
||||
includeFolderItems: z.boolean().optional(),
|
||||
ttl: z.number().optional(),
|
||||
downloadFiles: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Zoom get recordings attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = ZoomGetRecordingsSchema.parse(body)
|
||||
|
||||
const { accessToken, meetingId, includeFolderItems, ttl, downloadFiles } = validatedData
|
||||
|
||||
const baseUrl = `https://api.zoom.us/v2/meetings/${encodeURIComponent(meetingId)}/recordings`
|
||||
const queryParams = new URLSearchParams()
|
||||
|
||||
if (includeFolderItems != null) {
|
||||
queryParams.append('include_folder_items', String(includeFolderItems))
|
||||
}
|
||||
if (ttl) {
|
||||
queryParams.append('ttl', String(ttl))
|
||||
}
|
||||
|
||||
const queryString = queryParams.toString()
|
||||
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||
|
||||
logger.info(`[${requestId}] Fetching recordings from Zoom`, { meetingId })
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Zoom API error`, {
|
||||
status: response.status,
|
||||
error: errorData,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: errorData.message || `Zoom API error: ${response.status}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const files: Array<{
|
||||
name: string
|
||||
mimeType: string
|
||||
data: string
|
||||
size: number
|
||||
}> = []
|
||||
|
||||
if (downloadFiles && Array.isArray(data.recording_files)) {
|
||||
for (const file of data.recording_files) {
|
||||
if (!file?.download_url) continue
|
||||
|
||||
try {
|
||||
const fileUrlValidation = await validateUrlWithDNS(file.download_url, 'downloadUrl')
|
||||
if (!fileUrlValidation.isValid) continue
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(
|
||||
file.download_url,
|
||||
fileUrlValidation.resolvedIP!,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
}
|
||||
)
|
||||
|
||||
if (!downloadResponse.ok) continue
|
||||
|
||||
const contentType =
|
||||
downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
const extension =
|
||||
file.file_extension?.toString().toLowerCase() ||
|
||||
getExtensionFromMimeType(contentType) ||
|
||||
'dat'
|
||||
const fileName = `zoom-recording-${file.id || file.recording_start || Date.now()}.${extension}`
|
||||
|
||||
files.push({
|
||||
name: fileName,
|
||||
mimeType: contentType,
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to download recording file:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Zoom recordings fetched successfully`, {
|
||||
recordingCount: data.recording_files?.length || 0,
|
||||
downloadedCount: files.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
recording: {
|
||||
uuid: data.uuid,
|
||||
id: data.id,
|
||||
account_id: data.account_id,
|
||||
host_id: data.host_id,
|
||||
topic: data.topic,
|
||||
type: data.type,
|
||||
start_time: data.start_time,
|
||||
duration: data.duration,
|
||||
total_size: data.total_size,
|
||||
recording_count: data.recording_count,
|
||||
share_url: data.share_url,
|
||||
recording_files: (data.recording_files || []).map((file: any) => ({
|
||||
id: file.id,
|
||||
meeting_id: file.meeting_id,
|
||||
recording_start: file.recording_start,
|
||||
recording_end: file.recording_end,
|
||||
file_type: file.file_type,
|
||||
file_extension: file.file_extension,
|
||||
file_size: file.file_size,
|
||||
play_url: file.play_url,
|
||||
download_url: file.download_url,
|
||||
status: file.status,
|
||||
recording_type: file.recording_type,
|
||||
})),
|
||||
},
|
||||
files: files.length > 0 ? files : undefined,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching Zoom recordings:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { ArrowDown, Loader2 } from 'lucide-react'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { extractWorkspaceIdFromExecutionKey, getViewerUrl } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
const logger = createLogger('FileCards')
|
||||
@@ -58,7 +57,7 @@ function FileCard({ file, isExecutionFile = false, workspaceId }: FileCardProps)
|
||||
if (file.key.startsWith('url/')) {
|
||||
if (file.url) {
|
||||
window.open(file.url, '_blank')
|
||||
logger.info(`Opened URL-type file directly: ${sanitizeUrlForLog(file.url)}`)
|
||||
logger.info(`Opened URL-type file directly: ${file.url}`)
|
||||
return
|
||||
}
|
||||
throw new Error('URL is required for URL-type files')
|
||||
@@ -78,13 +77,13 @@ function FileCard({ file, isExecutionFile = false, workspaceId }: FileCardProps)
|
||||
const serveUrl =
|
||||
file.url || `/api/files/serve/${encodeURIComponent(file.key)}?context=execution`
|
||||
window.open(serveUrl, '_blank')
|
||||
logger.info(`Opened execution file serve URL: ${sanitizeUrlForLog(serveUrl)}`)
|
||||
logger.info(`Opened execution file serve URL: ${serveUrl}`)
|
||||
} else {
|
||||
const viewerUrl = resolvedWorkspaceId ? getViewerUrl(file.key, resolvedWorkspaceId) : null
|
||||
|
||||
if (viewerUrl) {
|
||||
router.push(viewerUrl)
|
||||
logger.info(`Navigated to viewer URL: ${sanitizeUrlForLog(viewerUrl)}`)
|
||||
logger.info(`Navigated to viewer URL: ${viewerUrl}`)
|
||||
} else {
|
||||
logger.warn(
|
||||
`Could not construct viewer URL for file: ${file.name}, falling back to serve URL`
|
||||
|
||||
@@ -779,7 +779,7 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
||||
reason: { type: 'string', description: 'Reason for moderation action' },
|
||||
archived: { type: 'string', description: 'Archive status (true/false)' },
|
||||
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
|
||||
files: { type: 'array', description: 'Files to attach (UserFile array)' },
|
||||
files: { type: 'file[]', description: 'Files to attach (UserFile array)' },
|
||||
limit: { type: 'number', description: 'Message limit' },
|
||||
autoArchiveDuration: { type: 'number', description: 'Thread auto-archive duration in minutes' },
|
||||
channelType: { type: 'number', description: 'Discord channel type (0=text, 2=voice, etc.)' },
|
||||
|
||||
@@ -1,11 +1,48 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { DocumentIcon } from '@/components/icons'
|
||||
import { inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import type { BlockConfig, SubBlockType } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import type { FileParserOutput, FileParserV3Output } from '@/tools/file/types'
|
||||
|
||||
const logger = createLogger('FileBlock')
|
||||
|
||||
const resolveFilePathFromInput = (fileInput: unknown): string | null => {
|
||||
if (!fileInput || typeof fileInput !== 'object') {
|
||||
return null
|
||||
}
|
||||
|
||||
const record = fileInput as Record<string, unknown>
|
||||
if (typeof record.path === 'string' && record.path.trim() !== '') {
|
||||
return record.path
|
||||
}
|
||||
if (typeof record.url === 'string' && record.url.trim() !== '') {
|
||||
return record.url
|
||||
}
|
||||
if (typeof record.key === 'string' && record.key.trim() !== '') {
|
||||
const key = record.key.trim()
|
||||
const context = typeof record.context === 'string' ? record.context : inferContextFromKey(key)
|
||||
return `/api/files/serve/${encodeURIComponent(key)}?context=${context}`
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
const resolveFilePathsFromInput = (fileInput: unknown): string[] => {
|
||||
if (!fileInput) {
|
||||
return []
|
||||
}
|
||||
|
||||
if (Array.isArray(fileInput)) {
|
||||
return fileInput
|
||||
.map((file) => resolveFilePathFromInput(file))
|
||||
.filter((path): path is string => Boolean(path))
|
||||
}
|
||||
|
||||
const resolved = resolveFilePathFromInput(fileInput)
|
||||
return resolved ? [resolved] : []
|
||||
}
|
||||
|
||||
export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
type: 'file',
|
||||
name: 'File (Legacy)',
|
||||
@@ -79,24 +116,14 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
|
||||
// Handle file upload input
|
||||
if (inputMethod === 'upload') {
|
||||
// Handle case where 'file' is an array (multiple files)
|
||||
if (params.file && Array.isArray(params.file) && params.file.length > 0) {
|
||||
const filePaths = params.file.map((file) => file.path)
|
||||
|
||||
const filePaths = resolveFilePathsFromInput(params.file)
|
||||
if (filePaths.length > 0) {
|
||||
return {
|
||||
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
||||
fileType: params.fileType || 'auto',
|
||||
}
|
||||
}
|
||||
|
||||
// Handle case where 'file' is a single file object
|
||||
if (params.file?.path) {
|
||||
return {
|
||||
filePath: params.file.path,
|
||||
fileType: params.fileType || 'auto',
|
||||
}
|
||||
}
|
||||
|
||||
// If no files, return error
|
||||
logger.error('No files provided for upload method')
|
||||
throw new Error('Please upload a file')
|
||||
@@ -182,16 +209,17 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
|
||||
}
|
||||
|
||||
if (Array.isArray(fileInput) && fileInput.length > 0) {
|
||||
const filePaths = fileInput.map((file) => file.path)
|
||||
const filePaths = resolveFilePathsFromInput(fileInput)
|
||||
return {
|
||||
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
||||
fileType: params.fileType || 'auto',
|
||||
}
|
||||
}
|
||||
|
||||
if (fileInput?.path) {
|
||||
const resolvedSingle = resolveFilePathsFromInput(fileInput)
|
||||
if (resolvedSingle.length > 0) {
|
||||
return {
|
||||
filePath: fileInput.path,
|
||||
filePath: resolvedSingle[0],
|
||||
fileType: params.fileType || 'auto',
|
||||
}
|
||||
}
|
||||
@@ -274,9 +302,7 @@ export const FileV3Block: BlockConfig<FileParserV3Output> = {
|
||||
}
|
||||
|
||||
if (Array.isArray(fileInput)) {
|
||||
const filePaths = fileInput
|
||||
.map((file) => (file as { url?: string; path?: string }).url || file.path)
|
||||
.filter((path): path is string => Boolean(path))
|
||||
const filePaths = resolveFilePathsFromInput(fileInput)
|
||||
if (filePaths.length === 0) {
|
||||
logger.error('No valid file paths found in file input array')
|
||||
throw new Error('File input is required')
|
||||
@@ -291,13 +317,13 @@ export const FileV3Block: BlockConfig<FileParserV3Output> = {
|
||||
}
|
||||
|
||||
if (typeof fileInput === 'object') {
|
||||
const filePath = (fileInput as { url?: string; path?: string }).url || fileInput.path
|
||||
if (!filePath) {
|
||||
logger.error('File input object missing path or url')
|
||||
const resolvedPaths = resolveFilePathsFromInput(fileInput)
|
||||
if (resolvedPaths.length === 0) {
|
||||
logger.error('File input object missing path, url, or key')
|
||||
throw new Error('File input is required')
|
||||
}
|
||||
return {
|
||||
filePath,
|
||||
filePath: resolvedPaths[0],
|
||||
fileType: params.fileType || 'auto',
|
||||
workspaceId: params._context?.workspaceId,
|
||||
workflowId: params._context?.workflowId,
|
||||
|
||||
@@ -4,6 +4,26 @@ import { AuthMode } from '@/blocks/types'
|
||||
import type { FirefliesResponse } from '@/tools/fireflies/types'
|
||||
import { getTrigger } from '@/triggers'
|
||||
|
||||
const resolveHttpsUrlFromFileInput = (fileInput: unknown): string | null => {
|
||||
if (!fileInput || typeof fileInput !== 'object') {
|
||||
return null
|
||||
}
|
||||
|
||||
const record = fileInput as Record<string, unknown>
|
||||
const url =
|
||||
typeof record.url === 'string'
|
||||
? record.url.trim()
|
||||
: typeof record.path === 'string'
|
||||
? record.path.trim()
|
||||
: ''
|
||||
|
||||
if (!url || !url.startsWith('https://')) {
|
||||
return null
|
||||
}
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
export const FirefliesBlock: BlockConfig<FirefliesResponse> = {
|
||||
type: 'fireflies',
|
||||
name: 'Fireflies',
|
||||
@@ -587,3 +607,74 @@ Return ONLY the summary text - no quotes, no labels.`,
|
||||
available: ['fireflies_transcription_complete'],
|
||||
},
|
||||
}
|
||||
|
||||
const firefliesV2SubBlocks = (FirefliesBlock.subBlocks || []).filter(
|
||||
(subBlock) => subBlock.id !== 'audioUrl'
|
||||
)
|
||||
const firefliesV2Inputs = FirefliesBlock.inputs
|
||||
? Object.fromEntries(Object.entries(FirefliesBlock.inputs).filter(([key]) => key !== 'audioUrl'))
|
||||
: {}
|
||||
|
||||
export const FirefliesV2Block: BlockConfig<FirefliesResponse> = {
|
||||
...FirefliesBlock,
|
||||
type: 'fireflies_v2',
|
||||
name: 'Fireflies (File Only)',
|
||||
description: 'Interact with Fireflies.ai meeting transcripts and recordings',
|
||||
hideFromToolbar: true,
|
||||
subBlocks: firefliesV2SubBlocks,
|
||||
tools: {
|
||||
...FirefliesBlock.tools,
|
||||
config: {
|
||||
...FirefliesBlock.tools?.config,
|
||||
tool: (params) =>
|
||||
FirefliesBlock.tools?.config?.tool
|
||||
? FirefliesBlock.tools.config.tool(params)
|
||||
: params.operation || 'fireflies_list_transcripts',
|
||||
params: (params) => {
|
||||
const baseParams = FirefliesBlock.tools?.config?.params
|
||||
if (!baseParams) {
|
||||
return params
|
||||
}
|
||||
|
||||
if (params.operation === 'fireflies_upload_audio') {
|
||||
let audioInput = params.audioFile || params.audioFileReference
|
||||
if (!audioInput) {
|
||||
throw new Error('Audio file is required.')
|
||||
}
|
||||
if (typeof audioInput === 'string') {
|
||||
try {
|
||||
audioInput = JSON.parse(audioInput)
|
||||
} catch {
|
||||
throw new Error('Audio file must be a valid file reference.')
|
||||
}
|
||||
}
|
||||
if (Array.isArray(audioInput)) {
|
||||
throw new Error(
|
||||
'File reference must be a single file, not an array. Use <block.files[0]> to select one file.'
|
||||
)
|
||||
}
|
||||
if (typeof audioInput !== 'object' || audioInput === null) {
|
||||
throw new Error('Audio file must be a file reference.')
|
||||
}
|
||||
const audioUrl = resolveHttpsUrlFromFileInput(audioInput)
|
||||
if (!audioUrl) {
|
||||
throw new Error('Audio file must include a https URL.')
|
||||
}
|
||||
|
||||
return baseParams({
|
||||
...params,
|
||||
audioUrl,
|
||||
audioFile: undefined,
|
||||
audioFileReference: undefined,
|
||||
})
|
||||
}
|
||||
|
||||
return baseParams(params)
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
...firefliesV2Inputs,
|
||||
audioFileReference: { type: 'json', description: 'Audio/video file reference' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { GoogleSheetsIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import type { GoogleSheetsResponse, GoogleSheetsV2Response } from '@/tools/google_sheets/types'
|
||||
|
||||
// Legacy block - hidden from toolbar
|
||||
@@ -681,34 +682,38 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
||||
'google_sheets_copy_sheet_v2',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'read':
|
||||
return 'google_sheets_read_v2'
|
||||
case 'write':
|
||||
return 'google_sheets_write_v2'
|
||||
case 'update':
|
||||
return 'google_sheets_update_v2'
|
||||
case 'append':
|
||||
return 'google_sheets_append_v2'
|
||||
case 'clear':
|
||||
return 'google_sheets_clear_v2'
|
||||
case 'get_info':
|
||||
return 'google_sheets_get_spreadsheet_v2'
|
||||
case 'create':
|
||||
return 'google_sheets_create_spreadsheet_v2'
|
||||
case 'batch_get':
|
||||
return 'google_sheets_batch_get_v2'
|
||||
case 'batch_update':
|
||||
return 'google_sheets_batch_update_v2'
|
||||
case 'batch_clear':
|
||||
return 'google_sheets_batch_clear_v2'
|
||||
case 'copy_sheet':
|
||||
return 'google_sheets_copy_sheet_v2'
|
||||
default:
|
||||
throw new Error(`Invalid Google Sheets V2 operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
tool: createVersionedToolSelector({
|
||||
baseToolSelector: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'read':
|
||||
return 'google_sheets_read'
|
||||
case 'write':
|
||||
return 'google_sheets_write'
|
||||
case 'update':
|
||||
return 'google_sheets_update'
|
||||
case 'append':
|
||||
return 'google_sheets_append'
|
||||
case 'clear':
|
||||
return 'google_sheets_clear'
|
||||
case 'get_info':
|
||||
return 'google_sheets_get_spreadsheet'
|
||||
case 'create':
|
||||
return 'google_sheets_create_spreadsheet'
|
||||
case 'batch_get':
|
||||
return 'google_sheets_batch_get'
|
||||
case 'batch_update':
|
||||
return 'google_sheets_batch_update'
|
||||
case 'batch_clear':
|
||||
return 'google_sheets_batch_clear'
|
||||
case 'copy_sheet':
|
||||
return 'google_sheets_copy_sheet'
|
||||
default:
|
||||
throw new Error(`Invalid Google Sheets operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
suffix: '_v2',
|
||||
fallbackToolId: 'google_sheets_read_v2',
|
||||
}),
|
||||
params: (params) => {
|
||||
const {
|
||||
credential,
|
||||
|
||||
@@ -3,6 +3,26 @@ import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type { GoogleSlidesResponse } from '@/tools/google_slides/types'
|
||||
|
||||
const resolveHttpsUrlFromFileInput = (fileInput: unknown): string | null => {
|
||||
if (!fileInput || typeof fileInput !== 'object') {
|
||||
return null
|
||||
}
|
||||
|
||||
const record = fileInput as Record<string, unknown>
|
||||
const url =
|
||||
typeof record.url === 'string'
|
||||
? record.url.trim()
|
||||
: typeof record.path === 'string'
|
||||
? record.path.trim()
|
||||
: ''
|
||||
|
||||
if (!url || !url.startsWith('https://')) {
|
||||
return null
|
||||
}
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
export const GoogleSlidesBlock: BlockConfig<GoogleSlidesResponse> = {
|
||||
type: 'google_slides',
|
||||
name: 'Google Slides',
|
||||
@@ -903,3 +923,99 @@ Return ONLY the text content - no explanations, no markdown formatting markers,
|
||||
text: { type: 'string', description: 'Text that was inserted' },
|
||||
},
|
||||
}
|
||||
|
||||
const googleSlidesV2SubBlocks = (GoogleSlidesBlock.subBlocks || []).flatMap((subBlock) => {
|
||||
if (subBlock.id === 'imageFile') {
|
||||
return [
|
||||
{
|
||||
...subBlock,
|
||||
canonicalParamId: 'imageFile',
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
if (subBlock.id !== 'imageUrl') {
|
||||
return [subBlock]
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
id: 'imageFileReference',
|
||||
title: 'Image',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'imageFile',
|
||||
placeholder: 'Reference image from previous blocks',
|
||||
mode: 'advanced',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'add_image' },
|
||||
},
|
||||
]
|
||||
})
|
||||
|
||||
const googleSlidesV2Inputs = GoogleSlidesBlock.inputs
|
||||
? Object.fromEntries(
|
||||
Object.entries(GoogleSlidesBlock.inputs).filter(
|
||||
([key]) => key !== 'imageUrl' && key !== 'imageSource'
|
||||
)
|
||||
)
|
||||
: {}
|
||||
|
||||
export const GoogleSlidesV2Block: BlockConfig<GoogleSlidesResponse> = {
|
||||
...GoogleSlidesBlock,
|
||||
type: 'google_slides_v2',
|
||||
name: 'Google Slides (File Only)',
|
||||
description: 'Read, write, and create presentations',
|
||||
hideFromToolbar: true,
|
||||
subBlocks: googleSlidesV2SubBlocks,
|
||||
tools: {
|
||||
...GoogleSlidesBlock.tools,
|
||||
config: {
|
||||
...GoogleSlidesBlock.tools?.config,
|
||||
params: (params) => {
|
||||
const baseParams = GoogleSlidesBlock.tools?.config?.params
|
||||
if (!baseParams) {
|
||||
return params
|
||||
}
|
||||
|
||||
if (params.operation === 'add_image') {
|
||||
let imageInput = params.imageFile || params.imageFileReference || params.imageSource
|
||||
if (!imageInput) {
|
||||
throw new Error('Image file is required.')
|
||||
}
|
||||
if (typeof imageInput === 'string') {
|
||||
try {
|
||||
imageInput = JSON.parse(imageInput)
|
||||
} catch {
|
||||
throw new Error('Image file must be a valid file reference.')
|
||||
}
|
||||
}
|
||||
if (Array.isArray(imageInput)) {
|
||||
throw new Error(
|
||||
'File reference must be a single file, not an array. Use <block.files[0]> to select one file.'
|
||||
)
|
||||
}
|
||||
if (typeof imageInput !== 'object' || imageInput === null) {
|
||||
throw new Error('Image file must be a file reference.')
|
||||
}
|
||||
const imageUrl = resolveHttpsUrlFromFileInput(imageInput)
|
||||
if (!imageUrl) {
|
||||
throw new Error('Image file must include a https URL.')
|
||||
}
|
||||
|
||||
return baseParams({
|
||||
...params,
|
||||
imageUrl,
|
||||
imageFileReference: undefined,
|
||||
imageSource: undefined,
|
||||
})
|
||||
}
|
||||
|
||||
return baseParams(params)
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
...googleSlidesV2Inputs,
|
||||
imageFileReference: { type: 'json', description: 'Image file reference' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1025,7 +1025,7 @@ Return ONLY the comment text - no explanations.`,
|
||||
commentId: { type: 'string', description: 'Comment ID for update/delete operations' },
|
||||
// Attachment operation inputs
|
||||
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
|
||||
files: { type: 'array', description: 'Files to attach (UserFile array)' },
|
||||
files: { type: 'file[]', description: 'Files to attach (UserFile array)' },
|
||||
attachmentId: { type: 'string', description: 'Attachment ID for delete operation' },
|
||||
// Worklog operation inputs
|
||||
timeSpentSeconds: {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { MicrosoftExcelIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import type {
|
||||
MicrosoftExcelResponse,
|
||||
MicrosoftExcelV2Response,
|
||||
@@ -489,16 +490,20 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
||||
tools: {
|
||||
access: ['microsoft_excel_read_v2', 'microsoft_excel_write_v2'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'read':
|
||||
return 'microsoft_excel_read_v2'
|
||||
case 'write':
|
||||
return 'microsoft_excel_write_v2'
|
||||
default:
|
||||
throw new Error(`Invalid Microsoft Excel V2 operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
tool: createVersionedToolSelector({
|
||||
baseToolSelector: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'read':
|
||||
return 'microsoft_excel_read'
|
||||
case 'write':
|
||||
return 'microsoft_excel_write'
|
||||
default:
|
||||
throw new Error(`Invalid Microsoft Excel operation: ${params.operation}`)
|
||||
}
|
||||
},
|
||||
suffix: '_v2',
|
||||
fallbackToolId: 'microsoft_excel_read_v2',
|
||||
}),
|
||||
params: (params) => {
|
||||
const {
|
||||
credential,
|
||||
|
||||
@@ -94,7 +94,7 @@ export const MistralParseBlock: BlockConfig<MistralParserOutput> = {
|
||||
if (!params.fileUpload) {
|
||||
throw new Error('Please upload a PDF document')
|
||||
}
|
||||
parameters.fileUpload = params.fileUpload
|
||||
parameters.file = params.fileUpload
|
||||
}
|
||||
|
||||
let pagesArray: number[] | undefined
|
||||
@@ -162,7 +162,7 @@ export const MistralParseV2Block: BlockConfig<MistralParserOutput> = {
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'filePath',
|
||||
id: 'fileReference',
|
||||
title: 'File Reference',
|
||||
type: 'short-input' as SubBlockType,
|
||||
canonicalParamId: 'document',
|
||||
@@ -213,15 +213,26 @@ export const MistralParseV2Block: BlockConfig<MistralParserOutput> = {
|
||||
resultType: params.resultType || 'markdown',
|
||||
}
|
||||
|
||||
const documentInput = params.fileUpload || params.filePath || params.document
|
||||
let documentInput = params.fileUpload || params.fileReference || params.document
|
||||
if (!documentInput) {
|
||||
throw new Error('PDF document is required')
|
||||
}
|
||||
if (typeof documentInput === 'object') {
|
||||
parameters.fileData = documentInput
|
||||
} else if (typeof documentInput === 'string') {
|
||||
parameters.filePath = documentInput.trim()
|
||||
if (typeof documentInput === 'string') {
|
||||
try {
|
||||
documentInput = JSON.parse(documentInput)
|
||||
} catch {
|
||||
throw new Error('PDF document must be a valid file reference')
|
||||
}
|
||||
}
|
||||
if (Array.isArray(documentInput)) {
|
||||
throw new Error(
|
||||
'File reference must be a single file, not an array. Use <block.attachments[0]> to select one file.'
|
||||
)
|
||||
}
|
||||
if (typeof documentInput !== 'object' || documentInput === null) {
|
||||
throw new Error('PDF document must be a file reference')
|
||||
}
|
||||
parameters.file = documentInput
|
||||
|
||||
let pagesArray: number[] | undefined
|
||||
if (params.pages && params.pages.trim() !== '') {
|
||||
@@ -257,7 +268,7 @@ export const MistralParseV2Block: BlockConfig<MistralParserOutput> = {
|
||||
},
|
||||
inputs: {
|
||||
document: { type: 'json', description: 'Document input (file upload or file reference)' },
|
||||
filePath: { type: 'string', description: 'File reference (advanced mode)' },
|
||||
fileReference: { type: 'json', description: 'File reference (advanced mode)' },
|
||||
fileUpload: { type: 'json', description: 'Uploaded PDF file (basic mode)' },
|
||||
apiKey: { type: 'string', description: 'Mistral API key' },
|
||||
resultType: { type: 'string', description: 'Output format type' },
|
||||
|
||||
@@ -412,6 +412,7 @@ export const NotionV2Block: BlockConfig<any> = {
|
||||
'notion_read_database_v2',
|
||||
'notion_write_v2',
|
||||
'notion_create_page_v2',
|
||||
'notion_update_page_v2',
|
||||
'notion_query_database_v2',
|
||||
'notion_search_v2',
|
||||
'notion_create_database_v2',
|
||||
|
||||
@@ -392,7 +392,7 @@ export const OutlookBlock: BlockConfig<OutlookResponse> = {
|
||||
body: { type: 'string', description: 'Email content' },
|
||||
contentType: { type: 'string', description: 'Content type (Text or HTML)' },
|
||||
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
|
||||
attachments: { type: 'array', description: 'Files to attach (UserFile array)' },
|
||||
attachments: { type: 'file[]', description: 'Files to attach (UserFile array)' },
|
||||
// Forward operation inputs
|
||||
messageId: { type: 'string', description: 'Message ID to forward' },
|
||||
comment: { type: 'string', description: 'Optional comment for forwarding' },
|
||||
|
||||
@@ -804,6 +804,7 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
deals: { type: 'json', description: 'Array of deal objects' },
|
||||
deal: { type: 'json', description: 'Single deal object' },
|
||||
files: { type: 'json', description: 'Array of file objects' },
|
||||
downloadedFiles: { type: 'file[]', description: 'Downloaded files from Pipedrive' },
|
||||
messages: { type: 'json', description: 'Array of mail message objects' },
|
||||
pipelines: { type: 'json', description: 'Array of pipeline objects' },
|
||||
projects: { type: 'json', description: 'Array of project objects' },
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { PulseIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig, type SubBlockType } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import type { PulseParserOutput } from '@/tools/pulse/types'
|
||||
|
||||
export const PulseBlock: BlockConfig<PulseParserOutput> = {
|
||||
type: 'pulse',
|
||||
name: 'Pulse',
|
||||
description: 'Extract text from documents using Pulse OCR',
|
||||
hideFromToolbar: true,
|
||||
authMode: AuthMode.ApiKey,
|
||||
longDescription:
|
||||
'Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via URL or upload.',
|
||||
@@ -77,7 +79,7 @@ export const PulseBlock: BlockConfig<PulseParserOutput> = {
|
||||
throw new Error('Document is required')
|
||||
}
|
||||
if (typeof documentInput === 'object') {
|
||||
parameters.fileUpload = documentInput
|
||||
parameters.file = documentInput
|
||||
} else if (typeof documentInput === 'string') {
|
||||
parameters.filePath = documentInput.trim()
|
||||
}
|
||||
@@ -126,3 +128,78 @@ export const PulseBlock: BlockConfig<PulseParserOutput> = {
|
||||
figures: { type: 'json', description: 'Extracted figures if figure extraction was enabled' },
|
||||
},
|
||||
}
|
||||
|
||||
const pulseV2Inputs = PulseBlock.inputs
|
||||
? Object.fromEntries(Object.entries(PulseBlock.inputs).filter(([key]) => key !== 'filePath'))
|
||||
: {}
|
||||
const pulseV2SubBlocks = (PulseBlock.subBlocks || []).filter(
|
||||
(subBlock) => subBlock.id !== 'filePath'
|
||||
)
|
||||
|
||||
export const PulseV2Block: BlockConfig<PulseParserOutput> = {
|
||||
...PulseBlock,
|
||||
type: 'pulse_v2',
|
||||
name: 'Pulse (File Only)',
|
||||
hideFromToolbar: false,
|
||||
longDescription:
|
||||
'Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via upload.',
|
||||
subBlocks: pulseV2SubBlocks,
|
||||
tools: {
|
||||
access: ['pulse_parser_v2'],
|
||||
config: {
|
||||
tool: createVersionedToolSelector({
|
||||
baseToolSelector: () => 'pulse_parser',
|
||||
suffix: '_v2',
|
||||
fallbackToolId: 'pulse_parser_v2',
|
||||
}),
|
||||
params: (params) => {
|
||||
if (!params || !params.apiKey || params.apiKey.trim() === '') {
|
||||
throw new Error('Pulse API key is required')
|
||||
}
|
||||
|
||||
const parameters: Record<string, unknown> = {
|
||||
apiKey: params.apiKey.trim(),
|
||||
}
|
||||
|
||||
let documentInput = params.fileUpload || params.document
|
||||
if (!documentInput) {
|
||||
throw new Error('Document file is required')
|
||||
}
|
||||
if (typeof documentInput === 'string') {
|
||||
try {
|
||||
documentInput = JSON.parse(documentInput)
|
||||
} catch {
|
||||
throw new Error('Document file must be a valid file reference')
|
||||
}
|
||||
}
|
||||
if (Array.isArray(documentInput)) {
|
||||
throw new Error(
|
||||
'File reference must be a single file, not an array. Use <block.attachments[0]> to select one file.'
|
||||
)
|
||||
}
|
||||
if (typeof documentInput !== 'object' || documentInput === null) {
|
||||
throw new Error('Document file must be a file reference')
|
||||
}
|
||||
parameters.file = documentInput
|
||||
|
||||
if (params.pages && params.pages.trim() !== '') {
|
||||
parameters.pages = params.pages.trim()
|
||||
}
|
||||
|
||||
if (params.chunking && params.chunking.trim() !== '') {
|
||||
parameters.chunking = params.chunking.trim()
|
||||
}
|
||||
|
||||
if (params.chunkSize && params.chunkSize.trim() !== '') {
|
||||
const size = Number.parseInt(params.chunkSize.trim(), 10)
|
||||
if (!Number.isNaN(size) && size > 0) {
|
||||
parameters.chunkSize = size
|
||||
}
|
||||
}
|
||||
|
||||
return parameters
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: pulseV2Inputs,
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { ReductoIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig, type SubBlockType } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import type { ReductoParserOutput } from '@/tools/reducto/types'
|
||||
|
||||
export const ReductoBlock: BlockConfig<ReductoParserOutput> = {
|
||||
type: 'reducto',
|
||||
name: 'Reducto',
|
||||
description: 'Extract text from PDF documents',
|
||||
hideFromToolbar: true,
|
||||
authMode: AuthMode.ApiKey,
|
||||
longDescription: `Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents, or from a URL.`,
|
||||
docsLink: 'https://docs.sim.ai/tools/reducto',
|
||||
@@ -74,7 +76,7 @@ export const ReductoBlock: BlockConfig<ReductoParserOutput> = {
|
||||
}
|
||||
|
||||
if (typeof documentInput === 'object') {
|
||||
parameters.fileUpload = documentInput
|
||||
parameters.file = documentInput
|
||||
} else if (typeof documentInput === 'string') {
|
||||
parameters.filePath = documentInput.trim()
|
||||
}
|
||||
@@ -132,3 +134,94 @@ export const ReductoBlock: BlockConfig<ReductoParserOutput> = {
|
||||
studio_link: { type: 'string', description: 'Link to Reducto studio interface' },
|
||||
},
|
||||
}
|
||||
|
||||
const reductoV2Inputs = ReductoBlock.inputs
|
||||
? Object.fromEntries(Object.entries(ReductoBlock.inputs).filter(([key]) => key !== 'filePath'))
|
||||
: {}
|
||||
const reductoV2SubBlocks = (ReductoBlock.subBlocks || []).filter(
|
||||
(subBlock) => subBlock.id !== 'filePath'
|
||||
)
|
||||
|
||||
export const ReductoV2Block: BlockConfig<ReductoParserOutput> = {
|
||||
...ReductoBlock,
|
||||
type: 'reducto_v2',
|
||||
name: 'Reducto (File Only)',
|
||||
hideFromToolbar: false,
|
||||
longDescription: `Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents.`,
|
||||
subBlocks: reductoV2SubBlocks,
|
||||
tools: {
|
||||
access: ['reducto_parser_v2'],
|
||||
config: {
|
||||
tool: createVersionedToolSelector({
|
||||
baseToolSelector: () => 'reducto_parser',
|
||||
suffix: '_v2',
|
||||
fallbackToolId: 'reducto_parser_v2',
|
||||
}),
|
||||
params: (params) => {
|
||||
if (!params || !params.apiKey || params.apiKey.trim() === '') {
|
||||
throw new Error('Reducto API key is required')
|
||||
}
|
||||
|
||||
const parameters: Record<string, unknown> = {
|
||||
apiKey: params.apiKey.trim(),
|
||||
}
|
||||
|
||||
let documentInput = params.fileUpload || params.document
|
||||
if (!documentInput) {
|
||||
throw new Error('PDF document file is required')
|
||||
}
|
||||
if (typeof documentInput === 'string') {
|
||||
try {
|
||||
documentInput = JSON.parse(documentInput)
|
||||
} catch {
|
||||
throw new Error('PDF document file must be a valid file reference')
|
||||
}
|
||||
}
|
||||
if (Array.isArray(documentInput)) {
|
||||
throw new Error(
|
||||
'File reference must be a single file, not an array. Use <block.attachments[0]> to select one file.'
|
||||
)
|
||||
}
|
||||
if (typeof documentInput !== 'object' || documentInput === null) {
|
||||
throw new Error('PDF document file must be a file reference')
|
||||
}
|
||||
parameters.file = documentInput
|
||||
|
||||
let pagesArray: number[] | undefined
|
||||
if (params.pages && params.pages.trim() !== '') {
|
||||
try {
|
||||
pagesArray = params.pages
|
||||
.split(',')
|
||||
.map((p: string) => p.trim())
|
||||
.filter((p: string) => p.length > 0)
|
||||
.map((p: string) => {
|
||||
const num = Number.parseInt(p, 10)
|
||||
if (Number.isNaN(num) || num < 0) {
|
||||
throw new Error(`Invalid page number: ${p}`)
|
||||
}
|
||||
return num
|
||||
})
|
||||
|
||||
if (pagesArray && pagesArray.length === 0) {
|
||||
pagesArray = undefined
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
throw new Error(`Page number format error: ${errorMessage}`)
|
||||
}
|
||||
}
|
||||
|
||||
if (pagesArray && pagesArray.length > 0) {
|
||||
parameters.pages = pagesArray
|
||||
}
|
||||
|
||||
if (params.tableOutputFormat) {
|
||||
parameters.tableOutputFormat = params.tableOutputFormat
|
||||
}
|
||||
|
||||
return parameters
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: reductoV2Inputs,
|
||||
}
|
||||
|
||||
@@ -600,7 +600,7 @@ Return ONLY the HTML content.`,
|
||||
mailTemplateId: { type: 'string', description: 'Template ID for sending mail' },
|
||||
dynamicTemplateData: { type: 'json', description: 'Dynamic template data' },
|
||||
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
|
||||
attachments: { type: 'array', description: 'Files to attach (UserFile array)' },
|
||||
attachments: { type: 'file[]', description: 'Files to attach (UserFile array)' },
|
||||
// Contact inputs
|
||||
email: { type: 'string', description: 'Contact email' },
|
||||
firstName: { type: 'string', description: 'Contact first name' },
|
||||
|
||||
@@ -279,7 +279,7 @@ export const SftpBlock: BlockConfig<SftpUploadResult> = {
|
||||
privateKey: { type: 'string', description: 'Private key for authentication' },
|
||||
passphrase: { type: 'string', description: 'Passphrase for encrypted key' },
|
||||
remotePath: { type: 'string', description: 'Remote path on the SFTP server' },
|
||||
files: { type: 'array', description: 'Files to upload (UserFile array)' },
|
||||
files: { type: 'file[]', description: 'Files to upload (UserFile array)' },
|
||||
fileContent: { type: 'string', description: 'Direct content to upload' },
|
||||
fileName: { type: 'string', description: 'File name for direct content' },
|
||||
overwrite: { type: 'boolean', description: 'Overwrite existing files' },
|
||||
|
||||
@@ -196,7 +196,7 @@ export const SmtpBlock: BlockConfig<SmtpSendMailResult> = {
|
||||
cc: { type: 'string', description: 'CC recipients (comma-separated)' },
|
||||
bcc: { type: 'string', description: 'BCC recipients (comma-separated)' },
|
||||
replyTo: { type: 'string', description: 'Reply-to email address' },
|
||||
attachments: { type: 'array', description: 'Files to attach (UserFile array)' },
|
||||
attachments: { type: 'file[]', description: 'Files to attach (UserFile array)' },
|
||||
},
|
||||
|
||||
outputs: {
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { STTIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import type { SttBlockResponse } from '@/tools/stt/types'
|
||||
|
||||
export const SttBlock: BlockConfig<SttBlockResponse> = {
|
||||
type: 'stt',
|
||||
name: 'Speech-to-Text',
|
||||
description: 'Convert speech to text using AI',
|
||||
hideFromToolbar: true,
|
||||
authMode: AuthMode.ApiKey,
|
||||
longDescription:
|
||||
'Transcribe audio and video files to text using leading AI providers. Supports multiple languages, timestamps, and speaker diarization.',
|
||||
@@ -345,3 +347,63 @@ export const SttBlock: BlockConfig<SttBlockResponse> = {
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const sttV2Inputs = SttBlock.inputs
|
||||
? Object.fromEntries(Object.entries(SttBlock.inputs).filter(([key]) => key !== 'audioUrl'))
|
||||
: {}
|
||||
const sttV2SubBlocks = (SttBlock.subBlocks || []).filter((subBlock) => subBlock.id !== 'audioUrl')
|
||||
|
||||
export const SttV2Block: BlockConfig<SttBlockResponse> = {
|
||||
...SttBlock,
|
||||
type: 'stt_v2',
|
||||
name: 'Speech-to-Text (File Only)',
|
||||
hideFromToolbar: false,
|
||||
subBlocks: sttV2SubBlocks,
|
||||
tools: {
|
||||
access: [
|
||||
'stt_whisper_v2',
|
||||
'stt_deepgram_v2',
|
||||
'stt_elevenlabs_v2',
|
||||
'stt_assemblyai_v2',
|
||||
'stt_gemini_v2',
|
||||
],
|
||||
config: {
|
||||
tool: createVersionedToolSelector({
|
||||
baseToolSelector: (params) => {
|
||||
switch (params.provider) {
|
||||
case 'whisper':
|
||||
return 'stt_whisper'
|
||||
case 'deepgram':
|
||||
return 'stt_deepgram'
|
||||
case 'elevenlabs':
|
||||
return 'stt_elevenlabs'
|
||||
case 'assemblyai':
|
||||
return 'stt_assemblyai'
|
||||
case 'gemini':
|
||||
return 'stt_gemini'
|
||||
default:
|
||||
return 'stt_whisper'
|
||||
}
|
||||
},
|
||||
suffix: '_v2',
|
||||
fallbackToolId: 'stt_whisper_v2',
|
||||
}),
|
||||
params: (params) => ({
|
||||
provider: params.provider,
|
||||
apiKey: params.apiKey,
|
||||
model: params.model,
|
||||
audioFile: params.audioFile,
|
||||
audioFileReference: params.audioFileReference,
|
||||
language: params.language,
|
||||
timestamps: params.timestamps,
|
||||
diarization: params.diarization,
|
||||
translateToEnglish: params.translateToEnglish,
|
||||
sentiment: params.sentiment,
|
||||
entityDetection: params.entityDetection,
|
||||
piiRedaction: params.piiRedaction,
|
||||
summarization: params.summarization,
|
||||
}),
|
||||
},
|
||||
},
|
||||
inputs: sttV2Inputs,
|
||||
}
|
||||
|
||||
@@ -351,7 +351,7 @@ export const TelegramBlock: BlockConfig<TelegramResponse> = {
|
||||
type: 'json',
|
||||
description: 'Files to attach (UI upload)',
|
||||
},
|
||||
files: { type: 'array', description: 'Files to attach (UserFile array)' },
|
||||
files: { type: 'file[]', description: 'Files to attach (UserFile array)' },
|
||||
caption: { type: 'string', description: 'Caption for media' },
|
||||
messageId: { type: 'string', description: 'Message ID to delete' },
|
||||
},
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { TextractIcon } from '@/components/icons'
|
||||
import { AuthMode, type BlockConfig, type SubBlockType } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import type { TextractParserOutput } from '@/tools/textract/types'
|
||||
|
||||
export const TextractBlock: BlockConfig<TextractParserOutput> = {
|
||||
type: 'textract',
|
||||
name: 'AWS Textract',
|
||||
description: 'Extract text, tables, and forms from documents',
|
||||
hideFromToolbar: true,
|
||||
authMode: AuthMode.ApiKey,
|
||||
longDescription: `Integrate AWS Textract into your workflow to extract text, tables, forms, and key-value pairs from documents. Single-page mode supports JPEG, PNG, and single-page PDF. Multi-page mode supports multi-page PDF and TIFF.`,
|
||||
docsLink: 'https://docs.sim.ai/tools/textract',
|
||||
@@ -140,7 +142,7 @@ export const TextractBlock: BlockConfig<TextractParserOutput> = {
|
||||
throw new Error('Document is required')
|
||||
}
|
||||
if (typeof documentInput === 'object') {
|
||||
parameters.fileUpload = documentInput
|
||||
parameters.file = documentInput
|
||||
} else if (typeof documentInput === 'string') {
|
||||
parameters.filePath = documentInput.trim()
|
||||
}
|
||||
@@ -189,3 +191,88 @@ export const TextractBlock: BlockConfig<TextractParserOutput> = {
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const textractV2Inputs = TextractBlock.inputs
|
||||
? Object.fromEntries(Object.entries(TextractBlock.inputs).filter(([key]) => key !== 'filePath'))
|
||||
: {}
|
||||
const textractV2SubBlocks = (TextractBlock.subBlocks || []).filter(
|
||||
(subBlock) => subBlock.id !== 'filePath'
|
||||
)
|
||||
|
||||
export const TextractV2Block: BlockConfig<TextractParserOutput> = {
|
||||
...TextractBlock,
|
||||
type: 'textract_v2',
|
||||
name: 'AWS Textract (File Only)',
|
||||
hideFromToolbar: false,
|
||||
subBlocks: textractV2SubBlocks,
|
||||
tools: {
|
||||
access: ['textract_parser_v2'],
|
||||
config: {
|
||||
tool: createVersionedToolSelector({
|
||||
baseToolSelector: () => 'textract_parser',
|
||||
suffix: '_v2',
|
||||
fallbackToolId: 'textract_parser_v2',
|
||||
}),
|
||||
params: (params) => {
|
||||
if (!params.accessKeyId || params.accessKeyId.trim() === '') {
|
||||
throw new Error('AWS Access Key ID is required')
|
||||
}
|
||||
if (!params.secretAccessKey || params.secretAccessKey.trim() === '') {
|
||||
throw new Error('AWS Secret Access Key is required')
|
||||
}
|
||||
if (!params.region || params.region.trim() === '') {
|
||||
throw new Error('AWS Region is required')
|
||||
}
|
||||
|
||||
const processingMode = params.processingMode || 'sync'
|
||||
const parameters: Record<string, unknown> = {
|
||||
accessKeyId: params.accessKeyId.trim(),
|
||||
secretAccessKey: params.secretAccessKey.trim(),
|
||||
region: params.region.trim(),
|
||||
processingMode,
|
||||
}
|
||||
|
||||
if (processingMode === 'async') {
|
||||
if (!params.s3Uri || params.s3Uri.trim() === '') {
|
||||
throw new Error('S3 URI is required for multi-page processing')
|
||||
}
|
||||
parameters.s3Uri = params.s3Uri.trim()
|
||||
} else {
|
||||
let documentInput = params.fileUpload || params.document
|
||||
if (!documentInput) {
|
||||
throw new Error('Document file is required')
|
||||
}
|
||||
if (typeof documentInput === 'string') {
|
||||
try {
|
||||
documentInput = JSON.parse(documentInput)
|
||||
} catch {
|
||||
throw new Error('Document file must be a valid file reference')
|
||||
}
|
||||
}
|
||||
if (Array.isArray(documentInput)) {
|
||||
throw new Error(
|
||||
'File reference must be a single file, not an array. Use <block.attachments[0]> to select one file.'
|
||||
)
|
||||
}
|
||||
if (typeof documentInput !== 'object' || documentInput === null) {
|
||||
throw new Error('Document file must be a file reference')
|
||||
}
|
||||
parameters.file = documentInput
|
||||
}
|
||||
|
||||
const featureTypes: string[] = []
|
||||
if (params.extractTables) featureTypes.push('TABLES')
|
||||
if (params.extractForms) featureTypes.push('FORMS')
|
||||
if (params.detectSignatures) featureTypes.push('SIGNATURES')
|
||||
if (params.analyzeLayout) featureTypes.push('LAYOUT')
|
||||
|
||||
if (featureTypes.length > 0) {
|
||||
parameters.featureTypes = featureTypes
|
||||
}
|
||||
|
||||
return parameters
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: textractV2Inputs,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { EyeIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import type { VisionResponse } from '@/tools/vision/types'
|
||||
|
||||
const VISION_MODEL_OPTIONS = [
|
||||
@@ -107,6 +108,16 @@ export const VisionV2Block: BlockConfig<VisionResponse> = {
|
||||
name: 'Vision',
|
||||
description: 'Analyze images with vision models',
|
||||
hideFromToolbar: false,
|
||||
tools: {
|
||||
access: ['vision_tool_v2'],
|
||||
config: {
|
||||
tool: createVersionedToolSelector({
|
||||
baseToolSelector: () => 'vision_tool',
|
||||
suffix: '_v2',
|
||||
fallbackToolId: 'vision_tool_v2',
|
||||
}),
|
||||
},
|
||||
},
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'imageFile',
|
||||
|
||||
@@ -30,7 +30,7 @@ import { EvaluatorBlock } from '@/blocks/blocks/evaluator'
|
||||
import { ExaBlock } from '@/blocks/blocks/exa'
|
||||
import { FileBlock, FileV2Block, FileV3Block } from '@/blocks/blocks/file'
|
||||
import { FirecrawlBlock } from '@/blocks/blocks/firecrawl'
|
||||
import { FirefliesBlock } from '@/blocks/blocks/fireflies'
|
||||
import { FirefliesBlock, FirefliesV2Block } from '@/blocks/blocks/fireflies'
|
||||
import { FunctionBlock } from '@/blocks/blocks/function'
|
||||
import { GenericWebhookBlock } from '@/blocks/blocks/generic_webhook'
|
||||
import { GitHubBlock, GitHubV2Block } from '@/blocks/blocks/github'
|
||||
@@ -44,7 +44,7 @@ import { GoogleFormsBlock } from '@/blocks/blocks/google_forms'
|
||||
import { GoogleGroupsBlock } from '@/blocks/blocks/google_groups'
|
||||
import { GoogleMapsBlock } from '@/blocks/blocks/google_maps'
|
||||
import { GoogleSheetsBlock, GoogleSheetsV2Block } from '@/blocks/blocks/google_sheets'
|
||||
import { GoogleSlidesBlock } from '@/blocks/blocks/google_slides'
|
||||
import { GoogleSlidesBlock, GoogleSlidesV2Block } from '@/blocks/blocks/google_slides'
|
||||
import { GoogleVaultBlock } from '@/blocks/blocks/google_vault'
|
||||
import { GrafanaBlock } from '@/blocks/blocks/grafana'
|
||||
import { GrainBlock } from '@/blocks/blocks/grain'
|
||||
@@ -94,11 +94,11 @@ import { PipedriveBlock } from '@/blocks/blocks/pipedrive'
|
||||
import { PolymarketBlock } from '@/blocks/blocks/polymarket'
|
||||
import { PostgreSQLBlock } from '@/blocks/blocks/postgresql'
|
||||
import { PostHogBlock } from '@/blocks/blocks/posthog'
|
||||
import { PulseBlock } from '@/blocks/blocks/pulse'
|
||||
import { PulseBlock, PulseV2Block } from '@/blocks/blocks/pulse'
|
||||
import { QdrantBlock } from '@/blocks/blocks/qdrant'
|
||||
import { RDSBlock } from '@/blocks/blocks/rds'
|
||||
import { RedditBlock } from '@/blocks/blocks/reddit'
|
||||
import { ReductoBlock } from '@/blocks/blocks/reducto'
|
||||
import { ReductoBlock, ReductoV2Block } from '@/blocks/blocks/reducto'
|
||||
import { ResendBlock } from '@/blocks/blocks/resend'
|
||||
import { ResponseBlock } from '@/blocks/blocks/response'
|
||||
import { RouterBlock, RouterV2Block } from '@/blocks/blocks/router'
|
||||
@@ -124,11 +124,11 @@ import { StagehandBlock } from '@/blocks/blocks/stagehand'
|
||||
import { StartTriggerBlock } from '@/blocks/blocks/start_trigger'
|
||||
import { StarterBlock } from '@/blocks/blocks/starter'
|
||||
import { StripeBlock } from '@/blocks/blocks/stripe'
|
||||
import { SttBlock } from '@/blocks/blocks/stt'
|
||||
import { SttBlock, SttV2Block } from '@/blocks/blocks/stt'
|
||||
import { SupabaseBlock } from '@/blocks/blocks/supabase'
|
||||
import { TavilyBlock } from '@/blocks/blocks/tavily'
|
||||
import { TelegramBlock } from '@/blocks/blocks/telegram'
|
||||
import { TextractBlock } from '@/blocks/blocks/textract'
|
||||
import { TextractBlock, TextractV2Block } from '@/blocks/blocks/textract'
|
||||
import { ThinkingBlock } from '@/blocks/blocks/thinking'
|
||||
import { TinybirdBlock } from '@/blocks/blocks/tinybird'
|
||||
import { TranslateBlock } from '@/blocks/blocks/translate'
|
||||
@@ -195,6 +195,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
file_v3: FileV3Block,
|
||||
firecrawl: FirecrawlBlock,
|
||||
fireflies: FirefliesBlock,
|
||||
fireflies_v2: FirefliesV2Block,
|
||||
function: FunctionBlock,
|
||||
generic_webhook: GenericWebhookBlock,
|
||||
github: GitHubBlock,
|
||||
@@ -213,6 +214,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
google_sheets: GoogleSheetsBlock,
|
||||
google_sheets_v2: GoogleSheetsV2Block,
|
||||
google_slides: GoogleSlidesBlock,
|
||||
google_slides_v2: GoogleSlidesV2Block,
|
||||
google_vault: GoogleVaultBlock,
|
||||
grafana: GrafanaBlock,
|
||||
grain: GrainBlock,
|
||||
@@ -268,10 +270,12 @@ export const registry: Record<string, BlockConfig> = {
|
||||
postgresql: PostgreSQLBlock,
|
||||
posthog: PostHogBlock,
|
||||
pulse: PulseBlock,
|
||||
pulse_v2: PulseV2Block,
|
||||
qdrant: QdrantBlock,
|
||||
rds: RDSBlock,
|
||||
reddit: RedditBlock,
|
||||
reducto: ReductoBlock,
|
||||
reducto_v2: ReductoV2Block,
|
||||
resend: ResendBlock,
|
||||
response: ResponseBlock,
|
||||
router: RouterBlock,
|
||||
@@ -299,10 +303,12 @@ export const registry: Record<string, BlockConfig> = {
|
||||
starter: StarterBlock,
|
||||
stripe: StripeBlock,
|
||||
stt: SttBlock,
|
||||
stt_v2: SttV2Block,
|
||||
supabase: SupabaseBlock,
|
||||
tavily: TavilyBlock,
|
||||
telegram: TelegramBlock,
|
||||
textract: TextractBlock,
|
||||
textract_v2: TextractV2Block,
|
||||
thinking: ThinkingBlock,
|
||||
tinybird: TinybirdBlock,
|
||||
translate: TranslateBlock,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { BlockType, HTTP } from '@/executor/constants'
|
||||
import type { BlockHandler, ExecutionContext } from '@/executor/types'
|
||||
import type { SerializedBlock } from '@/serializer/types'
|
||||
@@ -41,16 +42,9 @@ export class ApiBlockHandler implements BlockHandler {
|
||||
}
|
||||
}
|
||||
|
||||
if (!urlToValidate.match(/^https?:\/\//i)) {
|
||||
throw new Error(
|
||||
`Invalid URL: "${urlToValidate}" - URL must include protocol (try "https://${urlToValidate}")`
|
||||
)
|
||||
}
|
||||
|
||||
try {
|
||||
new URL(urlToValidate)
|
||||
} catch (e: any) {
|
||||
throw new Error(`Invalid URL format: "${urlToValidate}" - ${e.message}`)
|
||||
const urlValidation = await validateUrlWithDNS(urlToValidate, 'url')
|
||||
if (!urlValidation.isValid) {
|
||||
throw new Error(urlValidation.error)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { uploadExecutionFile, uploadFileFromRawData } from '@/lib/uploads/contexts/execution'
|
||||
import { downloadFileFromUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||
import type { ExecutionContext, UserFile } from '@/executor/types'
|
||||
import type { ToolConfig, ToolFileData } from '@/tools/types'
|
||||
|
||||
@@ -127,14 +128,7 @@ export class FileToolProcessor {
|
||||
}
|
||||
|
||||
if (!buffer && fileData.url) {
|
||||
const response = await fetch(fileData.url)
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download file from ${fileData.url}: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const arrayBuffer = await response.arrayBuffer()
|
||||
buffer = Buffer.from(arrayBuffer)
|
||||
buffer = await downloadFileFromUrl(fileData.url)
|
||||
}
|
||||
|
||||
if (buffer) {
|
||||
|
||||
@@ -4,7 +4,10 @@ import { a2aPushNotificationConfig, a2aTask } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
|
||||
const logger = createLogger('A2APushNotifications')
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
ClientFactoryOptions,
|
||||
} from '@a2a-js/sdk/client'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||
import { A2A_TERMINAL_STATES } from './constants'
|
||||
|
||||
const logger = createLogger('A2AUtils')
|
||||
@@ -94,11 +95,13 @@ export function extractFileContent(message: Message): A2AFile[] {
|
||||
.map((part) => {
|
||||
const file = part.file as unknown as Record<string, unknown>
|
||||
const uri = (file.url as string) || (file.uri as string)
|
||||
const hasBytes = Boolean(file.bytes)
|
||||
const canUseUri = Boolean(uri) && (!hasBytes || (uri ? !isInternalFileUrl(uri) : true))
|
||||
return {
|
||||
name: file.name as string | undefined,
|
||||
mimeType: file.mimeType as string | undefined,
|
||||
...(uri ? { uri } : {}),
|
||||
...(file.bytes ? { bytes: file.bytes as string } : {}),
|
||||
...(canUseUri ? { uri } : {}),
|
||||
...(hasBytes ? { bytes: file.bytes as string } : {}),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
290
apps/sim/lib/core/security/input-validation.server.ts
Normal file
290
apps/sim/lib/core/security/input-validation.server.ts
Normal file
@@ -0,0 +1,290 @@
|
||||
import dns from 'dns/promises'
|
||||
import http from 'http'
|
||||
import https from 'https'
|
||||
import type { LookupFunction } from 'net'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import * as ipaddr from 'ipaddr.js'
|
||||
import { type ValidationResult, validateExternalUrl } from '@/lib/core/security/input-validation'
|
||||
|
||||
const logger = createLogger('InputValidation')
|
||||
|
||||
/**
|
||||
* Result type for async URL validation with resolved IP
|
||||
*/
|
||||
export interface AsyncValidationResult extends ValidationResult {
|
||||
resolvedIP?: string
|
||||
originalHostname?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if an IP address is private or reserved (not routable on the public internet)
|
||||
* Uses ipaddr.js for robust handling of all IP formats including:
|
||||
* - Octal notation (0177.0.0.1)
|
||||
* - Hex notation (0x7f000001)
|
||||
* - IPv4-mapped IPv6 (::ffff:127.0.0.1)
|
||||
* - Various edge cases that regex patterns miss
|
||||
*/
|
||||
function isPrivateOrReservedIP(ip: string): boolean {
|
||||
try {
|
||||
if (!ipaddr.isValid(ip)) {
|
||||
return true
|
||||
}
|
||||
|
||||
const addr = ipaddr.process(ip)
|
||||
const range = addr.range()
|
||||
|
||||
return range !== 'unicast'
|
||||
} catch {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a URL and resolves its DNS to prevent SSRF via DNS rebinding
|
||||
*
|
||||
* This function:
|
||||
* 1. Performs basic URL validation (protocol, format)
|
||||
* 2. Resolves the hostname to an IP address
|
||||
* 3. Validates the resolved IP is not private/reserved
|
||||
* 4. Returns the resolved IP for use in the actual request
|
||||
*
|
||||
* @param url - The URL to validate
|
||||
* @param paramName - Name of the parameter for error messages
|
||||
* @returns AsyncValidationResult with resolved IP for DNS pinning
|
||||
*/
|
||||
export async function validateUrlWithDNS(
|
||||
url: string | null | undefined,
|
||||
paramName = 'url'
|
||||
): Promise<AsyncValidationResult> {
|
||||
const basicValidation = validateExternalUrl(url, paramName)
|
||||
if (!basicValidation.isValid) {
|
||||
return basicValidation
|
||||
}
|
||||
|
||||
const parsedUrl = new URL(url!)
|
||||
const hostname = parsedUrl.hostname
|
||||
|
||||
try {
|
||||
const { address } = await dns.lookup(hostname)
|
||||
|
||||
if (isPrivateOrReservedIP(address)) {
|
||||
logger.warn('URL resolves to blocked IP address', {
|
||||
paramName,
|
||||
hostname,
|
||||
resolvedIP: address,
|
||||
})
|
||||
return {
|
||||
isValid: false,
|
||||
error: `${paramName} resolves to a blocked IP address`,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: true,
|
||||
resolvedIP: address,
|
||||
originalHostname: hostname,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('DNS lookup failed for URL', {
|
||||
paramName,
|
||||
hostname,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return {
|
||||
isValid: false,
|
||||
error: `${paramName} hostname could not be resolved`,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SecureFetchOptions {
|
||||
method?: string
|
||||
headers?: Record<string, string>
|
||||
body?: string | Buffer | Uint8Array
|
||||
timeout?: number
|
||||
maxRedirects?: number
|
||||
}
|
||||
|
||||
export class SecureFetchHeaders {
|
||||
private headers: Map<string, string>
|
||||
|
||||
constructor(headers: Record<string, string>) {
|
||||
this.headers = new Map(Object.entries(headers).map(([k, v]) => [k.toLowerCase(), v]))
|
||||
}
|
||||
|
||||
get(name: string): string | null {
|
||||
return this.headers.get(name.toLowerCase()) ?? null
|
||||
}
|
||||
|
||||
toRecord(): Record<string, string> {
|
||||
const record: Record<string, string> = {}
|
||||
for (const [key, value] of this.headers) {
|
||||
record[key] = value
|
||||
}
|
||||
return record
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
return this.headers.entries()
|
||||
}
|
||||
}
|
||||
|
||||
export interface SecureFetchResponse {
|
||||
ok: boolean
|
||||
status: number
|
||||
statusText: string
|
||||
headers: SecureFetchHeaders
|
||||
text: () => Promise<string>
|
||||
json: () => Promise<unknown>
|
||||
arrayBuffer: () => Promise<ArrayBuffer>
|
||||
}
|
||||
|
||||
const DEFAULT_MAX_REDIRECTS = 5
|
||||
|
||||
function isRedirectStatus(status: number): boolean {
|
||||
return status >= 300 && status < 400 && status !== 304
|
||||
}
|
||||
|
||||
function resolveRedirectUrl(baseUrl: string, location: string): string {
|
||||
try {
|
||||
return new URL(location, baseUrl).toString()
|
||||
} catch {
|
||||
throw new Error(`Invalid redirect location: ${location}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a fetch with IP pinning to prevent DNS rebinding attacks.
|
||||
* Uses the pre-resolved IP address while preserving the original hostname for TLS SNI.
|
||||
* Follows redirects securely by validating each redirect target.
|
||||
*/
|
||||
export async function secureFetchWithPinnedIP(
|
||||
url: string,
|
||||
resolvedIP: string,
|
||||
options: SecureFetchOptions = {},
|
||||
redirectCount = 0
|
||||
): Promise<SecureFetchResponse> {
|
||||
const maxRedirects = options.maxRedirects ?? DEFAULT_MAX_REDIRECTS
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const parsed = new URL(url)
|
||||
const isHttps = parsed.protocol === 'https:'
|
||||
const defaultPort = isHttps ? 443 : 80
|
||||
const port = parsed.port ? Number.parseInt(parsed.port, 10) : defaultPort
|
||||
|
||||
const isIPv6 = resolvedIP.includes(':')
|
||||
const family = isIPv6 ? 6 : 4
|
||||
|
||||
const lookup: LookupFunction = (_hostname, options, callback) => {
|
||||
if (options.all) {
|
||||
callback(null, [{ address: resolvedIP, family }])
|
||||
} else {
|
||||
callback(null, resolvedIP, family)
|
||||
}
|
||||
}
|
||||
|
||||
const agentOptions: http.AgentOptions = { lookup }
|
||||
|
||||
const agent = isHttps ? new https.Agent(agentOptions) : new http.Agent(agentOptions)
|
||||
|
||||
// Remove accept-encoding since Node.js http/https doesn't auto-decompress
|
||||
// Headers are lowercase due to Web Headers API normalization in executeToolRequest
|
||||
const { 'accept-encoding': _, ...sanitizedHeaders } = options.headers ?? {}
|
||||
|
||||
const requestOptions: http.RequestOptions = {
|
||||
hostname: parsed.hostname,
|
||||
port,
|
||||
path: parsed.pathname + parsed.search,
|
||||
method: options.method || 'GET',
|
||||
headers: sanitizedHeaders,
|
||||
agent,
|
||||
timeout: options.timeout || 300000, // Default 5 minutes
|
||||
}
|
||||
|
||||
const protocol = isHttps ? https : http
|
||||
const req = protocol.request(requestOptions, (res) => {
|
||||
const statusCode = res.statusCode || 0
|
||||
const location = res.headers.location
|
||||
|
||||
if (isRedirectStatus(statusCode) && location && redirectCount < maxRedirects) {
|
||||
res.resume()
|
||||
const redirectUrl = resolveRedirectUrl(url, location)
|
||||
|
||||
validateUrlWithDNS(redirectUrl, 'redirectUrl')
|
||||
.then((validation) => {
|
||||
if (!validation.isValid) {
|
||||
reject(new Error(`Redirect blocked: ${validation.error}`))
|
||||
return
|
||||
}
|
||||
return secureFetchWithPinnedIP(
|
||||
redirectUrl,
|
||||
validation.resolvedIP!,
|
||||
options,
|
||||
redirectCount + 1
|
||||
)
|
||||
})
|
||||
.then((response) => {
|
||||
if (response) resolve(response)
|
||||
})
|
||||
.catch(reject)
|
||||
return
|
||||
}
|
||||
|
||||
if (isRedirectStatus(statusCode) && location && redirectCount >= maxRedirects) {
|
||||
res.resume()
|
||||
reject(new Error(`Too many redirects (max: ${maxRedirects})`))
|
||||
return
|
||||
}
|
||||
|
||||
const chunks: Buffer[] = []
|
||||
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
|
||||
res.on('error', (error) => {
|
||||
reject(error)
|
||||
})
|
||||
|
||||
res.on('end', () => {
|
||||
const bodyBuffer = Buffer.concat(chunks)
|
||||
const body = bodyBuffer.toString('utf-8')
|
||||
const headersRecord: Record<string, string> = {}
|
||||
for (const [key, value] of Object.entries(res.headers)) {
|
||||
if (typeof value === 'string') {
|
||||
headersRecord[key.toLowerCase()] = value
|
||||
} else if (Array.isArray(value)) {
|
||||
headersRecord[key.toLowerCase()] = value.join(', ')
|
||||
}
|
||||
}
|
||||
|
||||
resolve({
|
||||
ok: statusCode >= 200 && statusCode < 300,
|
||||
status: statusCode,
|
||||
statusText: res.statusMessage || '',
|
||||
headers: new SecureFetchHeaders(headersRecord),
|
||||
text: async () => body,
|
||||
json: async () => JSON.parse(body),
|
||||
arrayBuffer: async () =>
|
||||
bodyBuffer.buffer.slice(
|
||||
bodyBuffer.byteOffset,
|
||||
bodyBuffer.byteOffset + bodyBuffer.byteLength
|
||||
),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
req.on('error', (error) => {
|
||||
reject(error)
|
||||
})
|
||||
|
||||
req.on('timeout', () => {
|
||||
req.destroy()
|
||||
reject(new Error(`Request timed out after ${requestOptions.timeout}ms`))
|
||||
})
|
||||
|
||||
if (options.body) {
|
||||
req.write(options.body)
|
||||
}
|
||||
|
||||
req.end()
|
||||
})
|
||||
}
|
||||
@@ -18,8 +18,8 @@ import {
|
||||
validatePathSegment,
|
||||
validateProxyUrl,
|
||||
validateS3BucketName,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { sanitizeForLogging } from '@/lib/core/security/redaction'
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
@@ -1,7 +1,3 @@
|
||||
import dns from 'dns/promises'
|
||||
import http from 'http'
|
||||
import https from 'https'
|
||||
import type { LookupFunction } from 'net'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import * as ipaddr from 'ipaddr.js'
|
||||
|
||||
@@ -765,263 +761,6 @@ function isPrivateOrReservedIP(ip: string): boolean {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Result type for async URL validation with resolved IP
|
||||
*/
|
||||
export interface AsyncValidationResult extends ValidationResult {
|
||||
resolvedIP?: string
|
||||
originalHostname?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a URL and resolves its DNS to prevent SSRF via DNS rebinding
|
||||
*
|
||||
* This function:
|
||||
* 1. Performs basic URL validation (protocol, format)
|
||||
* 2. Resolves the hostname to an IP address
|
||||
* 3. Validates the resolved IP is not private/reserved
|
||||
* 4. Returns the resolved IP for use in the actual request
|
||||
*
|
||||
* @param url - The URL to validate
|
||||
* @param paramName - Name of the parameter for error messages
|
||||
* @returns AsyncValidationResult with resolved IP for DNS pinning
|
||||
*/
|
||||
export async function validateUrlWithDNS(
|
||||
url: string | null | undefined,
|
||||
paramName = 'url'
|
||||
): Promise<AsyncValidationResult> {
|
||||
const basicValidation = validateExternalUrl(url, paramName)
|
||||
if (!basicValidation.isValid) {
|
||||
return basicValidation
|
||||
}
|
||||
|
||||
const parsedUrl = new URL(url!)
|
||||
const hostname = parsedUrl.hostname
|
||||
|
||||
try {
|
||||
const { address } = await dns.lookup(hostname)
|
||||
|
||||
if (isPrivateOrReservedIP(address)) {
|
||||
logger.warn('URL resolves to blocked IP address', {
|
||||
paramName,
|
||||
hostname,
|
||||
resolvedIP: address,
|
||||
})
|
||||
return {
|
||||
isValid: false,
|
||||
error: `${paramName} resolves to a blocked IP address`,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isValid: true,
|
||||
resolvedIP: address,
|
||||
originalHostname: hostname,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('DNS lookup failed for URL', {
|
||||
paramName,
|
||||
hostname,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return {
|
||||
isValid: false,
|
||||
error: `${paramName} hostname could not be resolved`,
|
||||
}
|
||||
}
|
||||
}
|
||||
export interface SecureFetchOptions {
|
||||
method?: string
|
||||
headers?: Record<string, string>
|
||||
body?: string
|
||||
timeout?: number
|
||||
maxRedirects?: number
|
||||
}
|
||||
|
||||
export class SecureFetchHeaders {
|
||||
private headers: Map<string, string>
|
||||
|
||||
constructor(headers: Record<string, string>) {
|
||||
this.headers = new Map(Object.entries(headers).map(([k, v]) => [k.toLowerCase(), v]))
|
||||
}
|
||||
|
||||
get(name: string): string | null {
|
||||
return this.headers.get(name.toLowerCase()) ?? null
|
||||
}
|
||||
|
||||
toRecord(): Record<string, string> {
|
||||
const record: Record<string, string> = {}
|
||||
for (const [key, value] of this.headers) {
|
||||
record[key] = value
|
||||
}
|
||||
return record
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
return this.headers.entries()
|
||||
}
|
||||
}
|
||||
|
||||
export interface SecureFetchResponse {
|
||||
ok: boolean
|
||||
status: number
|
||||
statusText: string
|
||||
headers: SecureFetchHeaders
|
||||
text: () => Promise<string>
|
||||
json: () => Promise<unknown>
|
||||
arrayBuffer: () => Promise<ArrayBuffer>
|
||||
}
|
||||
|
||||
const DEFAULT_MAX_REDIRECTS = 5
|
||||
|
||||
function isRedirectStatus(status: number): boolean {
|
||||
return status >= 300 && status < 400 && status !== 304
|
||||
}
|
||||
|
||||
function resolveRedirectUrl(baseUrl: string, location: string): string {
|
||||
try {
|
||||
return new URL(location, baseUrl).toString()
|
||||
} catch {
|
||||
throw new Error(`Invalid redirect location: ${location}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a fetch with IP pinning to prevent DNS rebinding attacks.
|
||||
* Uses the pre-resolved IP address while preserving the original hostname for TLS SNI.
|
||||
* Follows redirects securely by validating each redirect target.
|
||||
*/
|
||||
export async function secureFetchWithPinnedIP(
|
||||
url: string,
|
||||
resolvedIP: string,
|
||||
options: SecureFetchOptions = {},
|
||||
redirectCount = 0
|
||||
): Promise<SecureFetchResponse> {
|
||||
const maxRedirects = options.maxRedirects ?? DEFAULT_MAX_REDIRECTS
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const parsed = new URL(url)
|
||||
const isHttps = parsed.protocol === 'https:'
|
||||
const defaultPort = isHttps ? 443 : 80
|
||||
const port = parsed.port ? Number.parseInt(parsed.port, 10) : defaultPort
|
||||
|
||||
const isIPv6 = resolvedIP.includes(':')
|
||||
const family = isIPv6 ? 6 : 4
|
||||
|
||||
const lookup: LookupFunction = (_hostname, options, callback) => {
|
||||
if (options.all) {
|
||||
callback(null, [{ address: resolvedIP, family }])
|
||||
} else {
|
||||
callback(null, resolvedIP, family)
|
||||
}
|
||||
}
|
||||
|
||||
const agentOptions: http.AgentOptions = { lookup }
|
||||
|
||||
const agent = isHttps ? new https.Agent(agentOptions) : new http.Agent(agentOptions)
|
||||
|
||||
// Remove accept-encoding since Node.js http/https doesn't auto-decompress
|
||||
// Headers are lowercase due to Web Headers API normalization in executeToolRequest
|
||||
const { 'accept-encoding': _, ...sanitizedHeaders } = options.headers ?? {}
|
||||
|
||||
const requestOptions: http.RequestOptions = {
|
||||
hostname: parsed.hostname,
|
||||
port,
|
||||
path: parsed.pathname + parsed.search,
|
||||
method: options.method || 'GET',
|
||||
headers: sanitizedHeaders,
|
||||
agent,
|
||||
timeout: options.timeout || 300000, // Default 5 minutes
|
||||
}
|
||||
|
||||
const protocol = isHttps ? https : http
|
||||
const req = protocol.request(requestOptions, (res) => {
|
||||
const statusCode = res.statusCode || 0
|
||||
const location = res.headers.location
|
||||
|
||||
if (isRedirectStatus(statusCode) && location && redirectCount < maxRedirects) {
|
||||
res.resume()
|
||||
const redirectUrl = resolveRedirectUrl(url, location)
|
||||
|
||||
validateUrlWithDNS(redirectUrl, 'redirectUrl')
|
||||
.then((validation) => {
|
||||
if (!validation.isValid) {
|
||||
reject(new Error(`Redirect blocked: ${validation.error}`))
|
||||
return
|
||||
}
|
||||
return secureFetchWithPinnedIP(
|
||||
redirectUrl,
|
||||
validation.resolvedIP!,
|
||||
options,
|
||||
redirectCount + 1
|
||||
)
|
||||
})
|
||||
.then((response) => {
|
||||
if (response) resolve(response)
|
||||
})
|
||||
.catch(reject)
|
||||
return
|
||||
}
|
||||
|
||||
if (isRedirectStatus(statusCode) && location && redirectCount >= maxRedirects) {
|
||||
res.resume()
|
||||
reject(new Error(`Too many redirects (max: ${maxRedirects})`))
|
||||
return
|
||||
}
|
||||
|
||||
const chunks: Buffer[] = []
|
||||
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk))
|
||||
|
||||
res.on('error', (error) => {
|
||||
reject(error)
|
||||
})
|
||||
|
||||
res.on('end', () => {
|
||||
const bodyBuffer = Buffer.concat(chunks)
|
||||
const body = bodyBuffer.toString('utf-8')
|
||||
const headersRecord: Record<string, string> = {}
|
||||
for (const [key, value] of Object.entries(res.headers)) {
|
||||
if (typeof value === 'string') {
|
||||
headersRecord[key.toLowerCase()] = value
|
||||
} else if (Array.isArray(value)) {
|
||||
headersRecord[key.toLowerCase()] = value.join(', ')
|
||||
}
|
||||
}
|
||||
|
||||
resolve({
|
||||
ok: statusCode >= 200 && statusCode < 300,
|
||||
status: statusCode,
|
||||
statusText: res.statusMessage || '',
|
||||
headers: new SecureFetchHeaders(headersRecord),
|
||||
text: async () => body,
|
||||
json: async () => JSON.parse(body),
|
||||
arrayBuffer: async () =>
|
||||
bodyBuffer.buffer.slice(
|
||||
bodyBuffer.byteOffset,
|
||||
bodyBuffer.byteOffset + bodyBuffer.byteLength
|
||||
),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
req.on('error', (error) => {
|
||||
reject(error)
|
||||
})
|
||||
|
||||
req.on('timeout', () => {
|
||||
req.destroy()
|
||||
reject(new Error(`Request timed out after ${requestOptions.timeout}ms`))
|
||||
})
|
||||
|
||||
if (options.body) {
|
||||
req.write(options.body)
|
||||
}
|
||||
|
||||
req.end()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates an Airtable ID (base, table, or webhook ID)
|
||||
*
|
||||
|
||||
@@ -24,6 +24,22 @@ export function getBaseUrl(): string {
|
||||
return `${protocol}${baseUrl}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures a URL is absolute by prefixing the base URL when a relative path is provided.
|
||||
* @param pathOrUrl - Relative path (e.g., /api/files/serve/...) or absolute URL
|
||||
*/
|
||||
export function ensureAbsoluteUrl(pathOrUrl: string): string {
|
||||
if (!pathOrUrl) {
|
||||
throw new Error('URL is required')
|
||||
}
|
||||
|
||||
if (pathOrUrl.startsWith('/')) {
|
||||
return `${getBaseUrl()}${pathOrUrl}`
|
||||
}
|
||||
|
||||
return pathOrUrl
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns just the domain and port part of the application URL
|
||||
* @returns The domain with port if applicable (e.g., 'localhost:3000' or 'sim.ai')
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
||||
import { TRIGGER_TYPES } from '@/lib/workflows/triggers/triggers'
|
||||
import type { InputFormatField } from '@/lib/workflows/types'
|
||||
@@ -11,7 +10,7 @@ const logger = createLogger('ExecutionFiles')
|
||||
const MAX_FILE_SIZE = 20 * 1024 * 1024 // 20MB
|
||||
|
||||
/**
|
||||
* Process a single file for workflow execution - handles both base64 ('file' type) and URL pass-through ('url' type)
|
||||
* Process a single file for workflow execution - handles base64 ('file' type) and URL downloads ('url' type)
|
||||
*/
|
||||
export async function processExecutionFile(
|
||||
file: { type: string; data: string; name: string; mime?: string },
|
||||
@@ -60,14 +59,28 @@ export async function processExecutionFile(
|
||||
}
|
||||
|
||||
if (file.type === 'url' && file.data) {
|
||||
return {
|
||||
id: uuidv4(),
|
||||
url: file.data,
|
||||
name: file.name,
|
||||
size: 0,
|
||||
type: file.mime || 'application/octet-stream',
|
||||
key: `url/${file.name}`,
|
||||
const { downloadFileFromUrl } = await import('@/lib/uploads/utils/file-utils.server')
|
||||
const buffer = await downloadFileFromUrl(file.data)
|
||||
|
||||
if (buffer.length > MAX_FILE_SIZE) {
|
||||
const fileSizeMB = (buffer.length / (1024 * 1024)).toFixed(2)
|
||||
throw new Error(
|
||||
`File "${file.name}" exceeds the maximum size limit of 20MB (actual size: ${fileSizeMB}MB)`
|
||||
)
|
||||
}
|
||||
|
||||
logger.debug(`[${requestId}] Uploading file from URL: ${file.name} (${buffer.length} bytes)`)
|
||||
|
||||
const userFile = await uploadExecutionFile(
|
||||
executionContext,
|
||||
buffer,
|
||||
file.name,
|
||||
file.mime || 'application/octet-stream',
|
||||
userId
|
||||
)
|
||||
|
||||
logger.debug(`[${requestId}] Successfully uploaded ${file.name} from URL`)
|
||||
return userFile
|
||||
}
|
||||
|
||||
return null
|
||||
|
||||
@@ -3,11 +3,11 @@ import { PDFDocument } from 'pdf-lib'
|
||||
import { getBYOKKey } from '@/lib/api-key/byok'
|
||||
import { type Chunk, JsonYamlChunker, StructuredDataChunker, TextChunker } from '@/lib/chunkers'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { parseBuffer, parseFile } from '@/lib/file-parsers'
|
||||
import type { FileParseMetadata } from '@/lib/file-parsers/types'
|
||||
import { retryWithExponentialBackoff } from '@/lib/knowledge/documents/utils'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { mistralParserTool } from '@/tools/mistral/parser'
|
||||
|
||||
@@ -246,7 +246,7 @@ async function handleFileForOCR(
|
||||
userId?: string,
|
||||
workspaceId?: string | null
|
||||
) {
|
||||
const isExternalHttps = fileUrl.startsWith('https://') && !fileUrl.includes('/api/files/serve/')
|
||||
const isExternalHttps = fileUrl.startsWith('https://') && !isInternalFileUrl(fileUrl)
|
||||
|
||||
if (isExternalHttps) {
|
||||
if (mimeType === 'application/pdf') {
|
||||
@@ -490,7 +490,7 @@ async function parseWithMistralOCR(
|
||||
workspaceId
|
||||
)
|
||||
|
||||
logger.info(`Mistral OCR: Using presigned URL for ${filename}: ${sanitizeUrlForLog(httpsUrl)}`)
|
||||
logger.info(`Mistral OCR: Using presigned URL for ${filename}: ${httpsUrl}`)
|
||||
|
||||
let pageCount = 0
|
||||
if (mimeType === 'application/pdf' && buffer) {
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import { z } from 'zod'
|
||||
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
const isUrlLike = (value: string) =>
|
||||
value.startsWith('http://') || value.startsWith('https://') || value.startsWith('/')
|
||||
|
||||
export const RawFileInputSchema = z
|
||||
.object({
|
||||
@@ -18,6 +22,30 @@ export const RawFileInputSchema = z
|
||||
.refine((data) => Boolean(data.key || data.path || data.url), {
|
||||
message: 'File must include key, path, or url',
|
||||
})
|
||||
.refine(
|
||||
(data) => {
|
||||
if (data.key || data.path) {
|
||||
return true
|
||||
}
|
||||
if (!data.url) {
|
||||
return true
|
||||
}
|
||||
return isInternalFileUrl(data.url)
|
||||
},
|
||||
{ message: 'File url must reference an uploaded file' }
|
||||
)
|
||||
.refine(
|
||||
(data) => {
|
||||
if (data.key || !data.path) {
|
||||
return true
|
||||
}
|
||||
if (!isUrlLike(data.path)) {
|
||||
return true
|
||||
}
|
||||
return isInternalFileUrl(data.path)
|
||||
},
|
||||
{ message: 'File path must reference an uploaded file' }
|
||||
)
|
||||
|
||||
export const RawFileInputArraySchema = z.array(RawFileInputSchema)
|
||||
|
||||
|
||||
@@ -1,10 +1,19 @@
|
||||
'use server'
|
||||
|
||||
import type { Logger } from '@sim/logger'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import type { StorageContext } from '@/lib/uploads'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { isExecutionFile } from '@/lib/uploads/contexts/execution/utils'
|
||||
import { inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
|
||||
/**
|
||||
@@ -13,7 +22,6 @@ import type { UserFile } from '@/executor/types'
|
||||
* For external URLs, validates DNS/SSRF and uses secure fetch with IP pinning
|
||||
*/
|
||||
export async function downloadFileFromUrl(fileUrl: string, timeoutMs = 180000): Promise<Buffer> {
|
||||
const { isInternalFileUrl } = await import('./file-utils')
|
||||
const { parseInternalFileUrl } = await import('./file-utils')
|
||||
|
||||
if (isInternalFileUrl(fileUrl)) {
|
||||
@@ -38,6 +46,39 @@ export async function downloadFileFromUrl(fileUrl: string, timeoutMs = 180000):
|
||||
return Buffer.from(await response.arrayBuffer())
|
||||
}
|
||||
|
||||
export async function resolveInternalFileUrl(
|
||||
filePath: string,
|
||||
userId: string,
|
||||
requestId: string,
|
||||
logger: Logger
|
||||
): Promise<{ fileUrl?: string; error?: { status: number; message: string } }> {
|
||||
if (!isInternalFileUrl(filePath)) {
|
||||
return { fileUrl: filePath }
|
||||
}
|
||||
|
||||
try {
|
||||
const storageKey = extractStorageKey(filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
context,
|
||||
})
|
||||
return { error: { status: 404, message: 'File not found' } }
|
||||
}
|
||||
|
||||
const fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
return { fileUrl }
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
return { error: { status: 500, message: 'Failed to generate file access URL' } }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads a file from storage (execution or regular)
|
||||
* @param userFile - UserFile object
|
||||
|
||||
@@ -438,6 +438,7 @@ export interface RawFileInput {
|
||||
uploadedAt?: string | Date
|
||||
expiresAt?: string | Date
|
||||
context?: string
|
||||
base64?: string
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -456,6 +457,41 @@ function isCompleteUserFile(file: RawFileInput): file is UserFile {
|
||||
)
|
||||
}
|
||||
|
||||
function isUrlLike(value: string): boolean {
|
||||
return value.startsWith('http://') || value.startsWith('https://') || value.startsWith('/')
|
||||
}
|
||||
|
||||
function resolveStorageKeyFromRawFile(file: RawFileInput): string | null {
|
||||
if (file.key) {
|
||||
return file.key
|
||||
}
|
||||
|
||||
if (file.path) {
|
||||
if (isUrlLike(file.path)) {
|
||||
return isInternalFileUrl(file.path) ? extractStorageKey(file.path) : null
|
||||
}
|
||||
return file.path
|
||||
}
|
||||
|
||||
if (file.url) {
|
||||
return isInternalFileUrl(file.url) ? extractStorageKey(file.url) : null
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function resolveInternalFileUrl(file: RawFileInput): string {
|
||||
if (file.url && isInternalFileUrl(file.url)) {
|
||||
return file.url
|
||||
}
|
||||
|
||||
if (file.path && isInternalFileUrl(file.path)) {
|
||||
return file.path
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a single raw file object to UserFile format
|
||||
* @param file - Raw file object (must be a single file, not an array)
|
||||
@@ -476,10 +512,13 @@ export function processSingleFileToUserFile(
|
||||
}
|
||||
|
||||
if (isCompleteUserFile(file)) {
|
||||
return file
|
||||
return {
|
||||
...file,
|
||||
url: resolveInternalFileUrl(file),
|
||||
}
|
||||
}
|
||||
|
||||
const storageKey = file.key || (file.path ? extractStorageKey(file.path) : null)
|
||||
const storageKey = resolveStorageKeyFromRawFile(file)
|
||||
|
||||
if (!storageKey) {
|
||||
logger.warn(`[${requestId}] File has no storage key: ${file.name || 'unknown'}`)
|
||||
@@ -489,10 +528,12 @@ export function processSingleFileToUserFile(
|
||||
const userFile: UserFile = {
|
||||
id: file.id || `file-${Date.now()}`,
|
||||
name: file.name,
|
||||
url: file.url || file.path || '',
|
||||
url: resolveInternalFileUrl(file),
|
||||
size: file.size,
|
||||
type: file.type || 'application/octet-stream',
|
||||
key: storageKey,
|
||||
context: file.context,
|
||||
base64: file.base64,
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Converted file to UserFile: ${userFile.name} (key: ${userFile.key})`)
|
||||
@@ -523,11 +564,14 @@ export function processFilesToUserFiles(
|
||||
}
|
||||
|
||||
if (isCompleteUserFile(file)) {
|
||||
userFiles.push(file)
|
||||
userFiles.push({
|
||||
...file,
|
||||
url: resolveInternalFileUrl(file),
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
const storageKey = file.key || (file.path ? extractStorageKey(file.path) : null)
|
||||
const storageKey = resolveStorageKeyFromRawFile(file)
|
||||
|
||||
if (!storageKey) {
|
||||
logger.warn(`[${requestId}] Skipping file without storage key: ${file.name || 'unknown'}`)
|
||||
@@ -537,10 +581,12 @@ export function processFilesToUserFiles(
|
||||
const userFile: UserFile = {
|
||||
id: file.id || `file-${Date.now()}`,
|
||||
name: file.name,
|
||||
url: file.url || file.path || '',
|
||||
url: resolveInternalFileUrl(file),
|
||||
size: file.size,
|
||||
type: file.type || 'application/octet-stream',
|
||||
key: storageKey,
|
||||
context: file.context,
|
||||
base64: file.base64,
|
||||
}
|
||||
|
||||
logger.info(
|
||||
|
||||
@@ -5,7 +5,10 @@ import { and, eq, isNull, or, sql } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import Parser from 'rss-parser'
|
||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { MAX_CONSECUTIVE_FAILURES } from '@/triggers/constants'
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
type SecureFetchResponse,
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import type { DbOrTx } from '@/lib/db/types'
|
||||
import { getProviderIdFromServiceId } from '@/lib/oauth'
|
||||
@@ -115,7 +115,7 @@ async function fetchWithDNSPinning(
|
||||
const urlValidation = await validateUrlWithDNS(url, 'contentUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Invalid content URL: ${urlValidation.error}`, {
|
||||
url: sanitizeUrlForLog(url),
|
||||
url,
|
||||
})
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import type { BrowserUseRunTaskParams, BrowserUseRunTaskResponse } from '@/tools/browser_use/types'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
@@ -184,7 +183,7 @@ async function pollForCompletion(
|
||||
}
|
||||
|
||||
if (!liveUrlLogged && taskData.live_url) {
|
||||
logger.info(`BrowserUse task ${taskId} live URL: ${sanitizeUrlForLog(taskData.live_url)}`)
|
||||
logger.info(`BrowserUse task ${taskId} live URL: ${taskData.live_url}`)
|
||||
liveUrlLogged = true
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import type {
|
||||
FileParseApiMultiResponse,
|
||||
@@ -9,21 +10,14 @@ import type {
|
||||
FileParserOutputData,
|
||||
FileParserV3Output,
|
||||
FileParserV3OutputData,
|
||||
FileUploadInput,
|
||||
} from '@/tools/file/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('FileParserTool')
|
||||
|
||||
interface FileUploadObject {
|
||||
path: string
|
||||
name?: string
|
||||
size?: number
|
||||
type?: string
|
||||
}
|
||||
|
||||
interface ToolBodyParams extends Partial<FileParserInput> {
|
||||
file?: FileUploadObject | FileUploadObject[]
|
||||
files?: FileUploadObject[]
|
||||
files?: FileUploadInput[]
|
||||
_context?: {
|
||||
workspaceId?: string
|
||||
workflowId?: string
|
||||
@@ -104,6 +98,12 @@ export const fileParserTool: ToolConfig<FileParserInput, FileParserOutput> = {
|
||||
visibility: 'user-only',
|
||||
description: 'Path to the file(s). Can be a single path, URL, or an array of paths.',
|
||||
},
|
||||
file: {
|
||||
type: 'file',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Uploaded file(s) to parse',
|
||||
},
|
||||
fileType: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
@@ -129,6 +129,28 @@ export const fileParserTool: ToolConfig<FileParserInput, FileParserOutput> = {
|
||||
let determinedFilePath: string | string[] | null = null
|
||||
const determinedFileType: string | undefined = params.fileType
|
||||
|
||||
const resolveFilePath = (fileInput: unknown): string | null => {
|
||||
if (!fileInput || typeof fileInput !== 'object') return null
|
||||
|
||||
if ('path' in fileInput && typeof (fileInput as { path?: unknown }).path === 'string') {
|
||||
return (fileInput as { path: string }).path
|
||||
}
|
||||
|
||||
if ('url' in fileInput && typeof (fileInput as { url?: unknown }).url === 'string') {
|
||||
return (fileInput as { url: string }).url
|
||||
}
|
||||
|
||||
if ('key' in fileInput && typeof (fileInput as { key?: unknown }).key === 'string') {
|
||||
const fileRecord = fileInput as Record<string, unknown>
|
||||
const key = fileRecord.key as string
|
||||
const context =
|
||||
typeof fileRecord.context === 'string' ? fileRecord.context : inferContextFromKey(key)
|
||||
return `/api/files/serve/${encodeURIComponent(key)}?context=${context}`
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
// Determine the file path(s) based on input parameters.
|
||||
// Precedence: direct filePath > file array > single file object > legacy files array
|
||||
// 1. Check for direct filePath (URL or single path from upload)
|
||||
@@ -139,18 +161,34 @@ export const fileParserTool: ToolConfig<FileParserInput, FileParserOutput> = {
|
||||
// 2. Check for file upload (array)
|
||||
else if (params.file && Array.isArray(params.file) && params.file.length > 0) {
|
||||
logger.info('Tool body processing file array upload')
|
||||
determinedFilePath = params.file.map((file) => file.path)
|
||||
const filePaths = params.file
|
||||
.map((file) => resolveFilePath(file))
|
||||
.filter(Boolean) as string[]
|
||||
if (filePaths.length !== params.file.length) {
|
||||
throw new Error('Invalid file input: One or more files are missing path or URL')
|
||||
}
|
||||
determinedFilePath = filePaths
|
||||
}
|
||||
// 3. Check for file upload (single object)
|
||||
else if (params.file && !Array.isArray(params.file) && params.file.path) {
|
||||
else if (params.file && !Array.isArray(params.file)) {
|
||||
logger.info('Tool body processing single file object upload')
|
||||
determinedFilePath = params.file.path
|
||||
const resolvedPath = resolveFilePath(params.file)
|
||||
if (!resolvedPath) {
|
||||
throw new Error('Invalid file input: Missing path or URL')
|
||||
}
|
||||
determinedFilePath = resolvedPath
|
||||
}
|
||||
// 4. Check for deprecated multiple files case (from older blocks?)
|
||||
else if (params.files && Array.isArray(params.files)) {
|
||||
logger.info('Tool body processing legacy files array:', params.files.length)
|
||||
if (params.files.length > 0) {
|
||||
determinedFilePath = params.files.map((file) => file.path)
|
||||
const filePaths = params.files
|
||||
.map((file) => resolveFilePath(file))
|
||||
.filter(Boolean) as string[]
|
||||
if (filePaths.length !== params.files.length) {
|
||||
throw new Error('Invalid file input: One or more files are missing path or URL')
|
||||
}
|
||||
determinedFilePath = filePaths
|
||||
} else {
|
||||
logger.warn('Legacy files array provided but is empty')
|
||||
}
|
||||
|
||||
@@ -2,13 +2,21 @@ import type { UserFile } from '@/executor/types'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface FileParserInput {
|
||||
filePath: string | string[]
|
||||
filePath?: string | string[]
|
||||
file?: UserFile | UserFile[] | FileUploadInput | FileUploadInput[]
|
||||
fileType?: string
|
||||
workspaceId?: string
|
||||
workflowId?: string
|
||||
executionId?: string
|
||||
}
|
||||
|
||||
export interface FileUploadInput {
|
||||
path: string
|
||||
name?: string
|
||||
size?: number
|
||||
type?: string
|
||||
}
|
||||
|
||||
export interface FileParseResult {
|
||||
content: string
|
||||
fileType: string
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||
import type { FileContentResponse, GetFileContentParams } from '@/tools/github/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
@@ -77,6 +78,14 @@ export const getFileContentTool: ToolConfig<GetFileContentParams, FileContentRes
|
||||
}
|
||||
|
||||
let decodedContent = ''
|
||||
let file:
|
||||
| {
|
||||
name: string
|
||||
mimeType: string
|
||||
data: string
|
||||
size: number
|
||||
}
|
||||
| undefined
|
||||
if (data.content) {
|
||||
try {
|
||||
decodedContent = Buffer.from(data.content, 'base64').toString('utf-8')
|
||||
@@ -84,6 +93,17 @@ export const getFileContentTool: ToolConfig<GetFileContentParams, FileContentRes
|
||||
decodedContent = '[Binary file - content cannot be displayed as text]'
|
||||
}
|
||||
}
|
||||
if (data.content && data.encoding === 'base64' && data.name) {
|
||||
const base64Data = String(data.content).replace(/\n/g, '')
|
||||
const extension = getFileExtension(data.name)
|
||||
const mimeType = getMimeTypeFromExtension(extension)
|
||||
file = {
|
||||
name: data.name,
|
||||
mimeType,
|
||||
data: base64Data,
|
||||
size: data.size || 0,
|
||||
}
|
||||
}
|
||||
|
||||
const contentPreview =
|
||||
decodedContent.length > 500
|
||||
@@ -103,6 +123,7 @@ ${contentPreview}`
|
||||
success: true,
|
||||
output: {
|
||||
content,
|
||||
file,
|
||||
metadata: {
|
||||
name: data.name,
|
||||
path: data.path,
|
||||
@@ -121,6 +142,11 @@ ${contentPreview}`
|
||||
type: 'string',
|
||||
description: 'Human-readable file information with content preview',
|
||||
},
|
||||
file: {
|
||||
type: 'file',
|
||||
description: 'Downloaded file stored in execution files',
|
||||
optional: true,
|
||||
},
|
||||
metadata: {
|
||||
type: 'object',
|
||||
description: 'File metadata including name, path, SHA, size, and URLs',
|
||||
@@ -150,6 +176,14 @@ export const getFileContentV2Tool: ToolConfig<GetFileContentParams, any> = {
|
||||
|
||||
// Decode base64 content if present
|
||||
let decodedContent = ''
|
||||
let file:
|
||||
| {
|
||||
name: string
|
||||
mimeType: string
|
||||
data: string
|
||||
size: number
|
||||
}
|
||||
| undefined
|
||||
if (data.content && data.encoding === 'base64') {
|
||||
try {
|
||||
decodedContent = Buffer.from(data.content, 'base64').toString('utf-8')
|
||||
@@ -157,6 +191,17 @@ export const getFileContentV2Tool: ToolConfig<GetFileContentParams, any> = {
|
||||
decodedContent = data.content
|
||||
}
|
||||
}
|
||||
if (data.content && data.encoding === 'base64' && data.name) {
|
||||
const base64Data = String(data.content).replace(/\n/g, '')
|
||||
const extension = getFileExtension(data.name)
|
||||
const mimeType = getMimeTypeFromExtension(extension)
|
||||
file = {
|
||||
name: data.name,
|
||||
mimeType,
|
||||
data: base64Data,
|
||||
size: data.size || 0,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
@@ -172,6 +217,7 @@ export const getFileContentV2Tool: ToolConfig<GetFileContentParams, any> = {
|
||||
download_url: data.download_url ?? null,
|
||||
git_url: data.git_url,
|
||||
_links: data._links,
|
||||
file,
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -188,5 +234,10 @@ export const getFileContentV2Tool: ToolConfig<GetFileContentParams, any> = {
|
||||
download_url: { type: 'string', description: 'Direct download URL', optional: true },
|
||||
git_url: { type: 'string', description: 'Git blob API URL' },
|
||||
_links: { type: 'json', description: 'Related links' },
|
||||
file: {
|
||||
type: 'file',
|
||||
description: 'Downloaded file stored in execution files',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import {
|
||||
COMMIT_DATA_OUTPUT,
|
||||
type LatestCommitParams,
|
||||
@@ -7,8 +6,6 @@ import {
|
||||
} from '@/tools/github/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('GitHubLatestCommitTool')
|
||||
|
||||
export const latestCommitTool: ToolConfig<LatestCommitParams, LatestCommitResponse> = {
|
||||
id: 'github_latest_commit',
|
||||
name: 'GitHub Latest Commit',
|
||||
@@ -43,92 +40,17 @@ export const latestCommitTool: ToolConfig<LatestCommitParams, LatestCommitRespon
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const baseUrl = `https://api.github.com/repos/${params.owner}/${params.repo}`
|
||||
return params.branch ? `${baseUrl}/commits/${params.branch}` : `${baseUrl}/commits/HEAD`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Accept: 'application/vnd.github.v3+json',
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
url: '/api/tools/github/latest-commit',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
owner: params.owner,
|
||||
repo: params.repo,
|
||||
branch: params.branch,
|
||||
apiKey: params.apiKey,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response, params) => {
|
||||
const data = await response.json()
|
||||
|
||||
const content = `Latest commit: "${data.commit.message}" by ${data.commit.author.name} on ${data.commit.author.date}. SHA: ${data.sha}`
|
||||
|
||||
const files = data.files || []
|
||||
const fileDetailsWithContent = []
|
||||
|
||||
if (files.length > 0) {
|
||||
for (const file of files) {
|
||||
const fileDetail = {
|
||||
filename: file.filename,
|
||||
additions: file.additions,
|
||||
deletions: file.deletions,
|
||||
changes: file.changes,
|
||||
status: file.status,
|
||||
raw_url: file.raw_url,
|
||||
blob_url: file.blob_url,
|
||||
patch: file.patch,
|
||||
content: undefined as string | undefined,
|
||||
}
|
||||
|
||||
if (file.status !== 'removed' && file.raw_url) {
|
||||
try {
|
||||
const contentResponse = await fetch(file.raw_url, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${params?.apiKey}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
})
|
||||
|
||||
if (contentResponse.ok) {
|
||||
fileDetail.content = await contentResponse.text()
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to fetch content for ${file.filename}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
fileDetailsWithContent.push(fileDetail)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
content,
|
||||
metadata: {
|
||||
sha: data.sha,
|
||||
html_url: data.html_url,
|
||||
commit_message: data.commit.message,
|
||||
author: {
|
||||
name: data.commit.author.name,
|
||||
login: data.author?.login || 'Unknown',
|
||||
avatar_url: data.author?.avatar_url || '',
|
||||
html_url: data.author?.html_url || '',
|
||||
},
|
||||
committer: {
|
||||
name: data.commit.committer.name,
|
||||
login: data.committer?.login || 'Unknown',
|
||||
avatar_url: data.committer?.avatar_url || '',
|
||||
html_url: data.committer?.html_url || '',
|
||||
},
|
||||
stats: data.stats
|
||||
? {
|
||||
additions: data.stats.additions,
|
||||
deletions: data.stats.deletions,
|
||||
total: data.stats.total,
|
||||
}
|
||||
: undefined,
|
||||
files: fileDetailsWithContent.length > 0 ? fileDetailsWithContent : undefined,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { OutputProperty, ToolResponse } from '@/tools/types'
|
||||
import type { OutputProperty, ToolFileData, ToolResponse } from '@/tools/types'
|
||||
|
||||
/**
|
||||
* Shared output property definitions for GitHub API responses.
|
||||
@@ -1876,6 +1876,7 @@ export interface TreeItemMetadata {
|
||||
export interface FileContentResponse extends ToolResponse {
|
||||
output: {
|
||||
content: string
|
||||
file?: ToolFileData
|
||||
metadata: FileContentMetadata
|
||||
}
|
||||
}
|
||||
|
||||
@@ -125,7 +125,7 @@ export interface GmailMessage {
|
||||
// Gmail Attachment Interface (for processed attachments)
|
||||
export interface GmailAttachment {
|
||||
name: string
|
||||
data: Buffer
|
||||
data: string
|
||||
mimeType: string
|
||||
size: number
|
||||
}
|
||||
|
||||
@@ -251,7 +251,7 @@ export async function downloadAttachments(
|
||||
|
||||
downloadedAttachments.push({
|
||||
name: attachment.filename,
|
||||
data: buffer,
|
||||
data: buffer.toString('base64'),
|
||||
mimeType: attachment.mimeType,
|
||||
size: attachment.size,
|
||||
})
|
||||
|
||||
@@ -1,20 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type {
|
||||
GoogleDriveDownloadResponse,
|
||||
GoogleDriveFile,
|
||||
GoogleDriveRevision,
|
||||
GoogleDriveToolParams,
|
||||
} from '@/tools/google_drive/types'
|
||||
import {
|
||||
ALL_FILE_FIELDS,
|
||||
ALL_REVISION_FIELDS,
|
||||
DEFAULT_EXPORT_FORMATS,
|
||||
GOOGLE_WORKSPACE_MIME_TYPES,
|
||||
} from '@/tools/google_drive/utils'
|
||||
import type { GoogleDriveDownloadResponse, GoogleDriveToolParams } from '@/tools/google_drive/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('GoogleDriveDownloadTool')
|
||||
|
||||
export const downloadTool: ToolConfig<GoogleDriveToolParams, GoogleDriveDownloadResponse> = {
|
||||
id: 'google_drive_download',
|
||||
name: 'Download File from Google Drive',
|
||||
@@ -62,164 +48,18 @@ export const downloadTool: ToolConfig<GoogleDriveToolParams, GoogleDriveDownload
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) =>
|
||||
`https://www.googleapis.com/drive/v3/files/${params.fileId}?fields=${ALL_FILE_FIELDS}&supportsAllDrives=true`,
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
url: '/api/tools/google_drive/download',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
accessToken: params.accessToken,
|
||||
fileId: params.fileId,
|
||||
mimeType: params.mimeType,
|
||||
fileName: params.fileName,
|
||||
includeRevisions: params.includeRevisions,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response, params?: GoogleDriveToolParams) => {
|
||||
try {
|
||||
if (!response.ok) {
|
||||
const errorDetails = await response.json().catch(() => ({}))
|
||||
logger.error('Failed to get file metadata', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorDetails,
|
||||
})
|
||||
throw new Error(errorDetails.error?.message || 'Failed to get file metadata')
|
||||
}
|
||||
|
||||
const metadata: GoogleDriveFile = await response.json()
|
||||
const fileId = metadata.id
|
||||
const mimeType = metadata.mimeType
|
||||
const authHeader = `Bearer ${params?.accessToken || ''}`
|
||||
|
||||
let fileBuffer: Buffer
|
||||
let finalMimeType = mimeType
|
||||
|
||||
if (GOOGLE_WORKSPACE_MIME_TYPES.includes(mimeType)) {
|
||||
const exportFormat = params?.mimeType || DEFAULT_EXPORT_FORMATS[mimeType] || 'text/plain'
|
||||
finalMimeType = exportFormat
|
||||
|
||||
logger.info('Exporting Google Workspace file', {
|
||||
fileId,
|
||||
mimeType,
|
||||
exportFormat,
|
||||
})
|
||||
|
||||
const exportResponse = await fetch(
|
||||
`https://www.googleapis.com/drive/v3/files/${fileId}/export?mimeType=${encodeURIComponent(exportFormat)}&supportsAllDrives=true`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: authHeader,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!exportResponse.ok) {
|
||||
const exportError = await exportResponse.json().catch(() => ({}))
|
||||
logger.error('Failed to export file', {
|
||||
status: exportResponse.status,
|
||||
statusText: exportResponse.statusText,
|
||||
error: exportError,
|
||||
})
|
||||
throw new Error(exportError.error?.message || 'Failed to export Google Workspace file')
|
||||
}
|
||||
|
||||
const arrayBuffer = await exportResponse.arrayBuffer()
|
||||
fileBuffer = Buffer.from(arrayBuffer)
|
||||
} else {
|
||||
logger.info('Downloading regular file', {
|
||||
fileId,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const downloadResponse = await fetch(
|
||||
`https://www.googleapis.com/drive/v3/files/${fileId}?alt=media&supportsAllDrives=true`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: authHeader,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
const downloadError = await downloadResponse.json().catch(() => ({}))
|
||||
logger.error('Failed to download file', {
|
||||
status: downloadResponse.status,
|
||||
statusText: downloadResponse.statusText,
|
||||
error: downloadError,
|
||||
})
|
||||
throw new Error(downloadError.error?.message || 'Failed to download file')
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
fileBuffer = Buffer.from(arrayBuffer)
|
||||
}
|
||||
|
||||
const includeRevisions = params?.includeRevisions !== false
|
||||
const canReadRevisions = metadata.capabilities?.canReadRevisions === true
|
||||
if (includeRevisions && canReadRevisions) {
|
||||
try {
|
||||
const revisionsResponse = await fetch(
|
||||
`https://www.googleapis.com/drive/v3/files/${fileId}/revisions?fields=revisions(${ALL_REVISION_FIELDS})&pageSize=100`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: authHeader,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (revisionsResponse.ok) {
|
||||
const revisionsData = await revisionsResponse.json()
|
||||
metadata.revisions = revisionsData.revisions as GoogleDriveRevision[]
|
||||
logger.info('Fetched file revisions', {
|
||||
fileId,
|
||||
revisionCount: metadata.revisions?.length || 0,
|
||||
})
|
||||
} else {
|
||||
logger.warn('Failed to fetch revisions, continuing without them', {
|
||||
status: revisionsResponse.status,
|
||||
statusText: revisionsResponse.statusText,
|
||||
})
|
||||
}
|
||||
} catch (revisionError: any) {
|
||||
logger.warn('Error fetching revisions, continuing without them', {
|
||||
error: revisionError.message,
|
||||
})
|
||||
}
|
||||
} else if (includeRevisions && !canReadRevisions) {
|
||||
logger.info('Skipping revision fetch - user does not have canReadRevisions permission', {
|
||||
fileId,
|
||||
})
|
||||
}
|
||||
|
||||
const resolvedName = params?.fileName || metadata.name || 'download'
|
||||
|
||||
logger.info('File downloaded successfully', {
|
||||
fileId,
|
||||
name: resolvedName,
|
||||
size: fileBuffer.length,
|
||||
mimeType: finalMimeType,
|
||||
hasOwners: !!metadata.owners?.length,
|
||||
hasPermissions: !!metadata.permissions?.length,
|
||||
hasRevisions: !!metadata.revisions?.length,
|
||||
})
|
||||
|
||||
const base64Data = fileBuffer.toString('base64')
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedName,
|
||||
mimeType: finalMimeType,
|
||||
data: base64Data,
|
||||
size: fileBuffer.length,
|
||||
},
|
||||
metadata,
|
||||
},
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error('Error in transform response', {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
})
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import type { GoogleVaultDownloadExportFileParams } from '@/tools/google_vault/types'
|
||||
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const downloadExportFileTool: ToolConfig<GoogleVaultDownloadExportFileParams> = {
|
||||
@@ -47,92 +46,18 @@ export const downloadExportFileTool: ToolConfig<GoogleVaultDownloadExportFilePar
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const bucket = encodeURIComponent(params.bucketName)
|
||||
const object = encodeURIComponent(params.objectName)
|
||||
return `https://storage.googleapis.com/storage/v1/b/${bucket}/o/${object}?alt=media`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
url: '/api/tools/google_vault/download-export-file',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
accessToken: params.accessToken,
|
||||
matterId: params.matterId,
|
||||
bucketName: params.bucketName,
|
||||
objectName: params.objectName,
|
||||
fileName: params.fileName,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response, params?: GoogleVaultDownloadExportFileParams) => {
|
||||
if (!response.ok) {
|
||||
let details: any
|
||||
try {
|
||||
details = await response.json()
|
||||
} catch {
|
||||
try {
|
||||
const text = await response.text()
|
||||
details = { error: text }
|
||||
} catch {
|
||||
details = undefined
|
||||
}
|
||||
}
|
||||
const errorMessage =
|
||||
details?.error || `Failed to download Vault export file (${response.status})`
|
||||
throw new Error(enhanceGoogleVaultError(errorMessage))
|
||||
}
|
||||
|
||||
if (!params?.accessToken || !params?.bucketName || !params?.objectName) {
|
||||
throw new Error('Missing required parameters for download')
|
||||
}
|
||||
|
||||
const bucket = encodeURIComponent(params.bucketName)
|
||||
const object = encodeURIComponent(params.objectName)
|
||||
const downloadUrl = `https://storage.googleapis.com/storage/v1/b/${bucket}/o/${object}?alt=media`
|
||||
|
||||
const downloadResponse = await fetch(downloadUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
const errorText = await downloadResponse.text().catch(() => '')
|
||||
const errorMessage = `Failed to download file: ${errorText || downloadResponse.statusText}`
|
||||
throw new Error(enhanceGoogleVaultError(errorMessage))
|
||||
}
|
||||
|
||||
const contentType = downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||
const disposition = downloadResponse.headers.get('content-disposition') || ''
|
||||
const match = disposition.match(/filename\*=UTF-8''([^;]+)|filename="([^"]+)"/)
|
||||
|
||||
let resolvedName = params.fileName
|
||||
if (!resolvedName) {
|
||||
if (match?.[1]) {
|
||||
try {
|
||||
resolvedName = decodeURIComponent(match[1])
|
||||
} catch {
|
||||
resolvedName = match[1]
|
||||
}
|
||||
} else if (match?.[2]) {
|
||||
resolvedName = match[2]
|
||||
} else if (params.objectName) {
|
||||
const parts = params.objectName.split('/')
|
||||
resolvedName = parts[parts.length - 1] || 'vault-export.bin'
|
||||
} else {
|
||||
resolvedName = 'vault-export.bin'
|
||||
}
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedName,
|
||||
mimeType: contentType,
|
||||
data: buffer,
|
||||
size: buffer.length,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { parseMcpToolId } from '@/lib/mcp/utils'
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import type {
|
||||
MicrosoftPlannerReadResponse,
|
||||
MicrosoftPlannerToolParams,
|
||||
@@ -77,7 +76,7 @@ export const readTaskTool: ToolConfig<MicrosoftPlannerToolParams, MicrosoftPlann
|
||||
finalUrl = 'https://graph.microsoft.com/v1.0/me/planner/tasks'
|
||||
}
|
||||
|
||||
logger.info('Microsoft Planner URL:', sanitizeUrlForLog(finalUrl))
|
||||
logger.info('Microsoft Planner URL:', finalUrl)
|
||||
return finalUrl
|
||||
},
|
||||
method: 'GET',
|
||||
|
||||
@@ -1,6 +1,30 @@
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import type { ToolFileData, ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface GraphApiErrorResponse {
|
||||
error?: {
|
||||
message?: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface GraphDriveItem {
|
||||
id: string
|
||||
webUrl?: string
|
||||
webDavUrl?: string
|
||||
eTag?: string
|
||||
name?: string
|
||||
size?: number
|
||||
}
|
||||
|
||||
export interface GraphChatMessage {
|
||||
id?: string
|
||||
chatId?: string
|
||||
channelIdentity?: { teamId?: string; channelId?: string }
|
||||
body?: { content?: string }
|
||||
createdDateTime?: string
|
||||
webUrl?: string
|
||||
}
|
||||
|
||||
export interface MicrosoftTeamsAttachment {
|
||||
id: string
|
||||
contentType: string
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||
import type {
|
||||
MistralParserInput,
|
||||
MistralParserOutput,
|
||||
@@ -19,10 +19,16 @@ export const mistralParserTool: ToolConfig<MistralParserInput, MistralParserOutp
|
||||
params: {
|
||||
filePath: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'URL to a PDF document to be processed',
|
||||
},
|
||||
file: {
|
||||
type: 'file',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Document file to be processed',
|
||||
},
|
||||
fileUpload: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
@@ -84,162 +90,84 @@ export const mistralParserTool: ToolConfig<MistralParserInput, MistralParserOutp
|
||||
throw new Error('Invalid parameters: Parameters must be provided as an object')
|
||||
}
|
||||
|
||||
// Validate required parameters
|
||||
if (!params.apiKey || typeof params.apiKey !== 'string' || params.apiKey.trim() === '') {
|
||||
throw new Error('Missing or invalid API key: A valid Mistral API key is required')
|
||||
}
|
||||
|
||||
// Check if we have a file upload instead of direct URL
|
||||
if (
|
||||
params.fileUpload &&
|
||||
(!params.filePath || params.filePath === 'null' || params.filePath === '')
|
||||
) {
|
||||
// Try to extract file path from upload data
|
||||
if (
|
||||
typeof params.fileUpload === 'object' &&
|
||||
params.fileUpload !== null &&
|
||||
params.fileUpload.url
|
||||
) {
|
||||
// Get the full URL to the file
|
||||
let uploadedFilePath = params.fileUpload.url
|
||||
const fileInput =
|
||||
params.file && typeof params.file === 'object' ? params.file : params.fileUpload
|
||||
const hasFileUpload = fileInput && typeof fileInput === 'object'
|
||||
const hasFilePath =
|
||||
typeof params.filePath === 'string' &&
|
||||
params.filePath !== 'null' &&
|
||||
params.filePath.trim() !== ''
|
||||
|
||||
// Make sure the file path is an absolute URL
|
||||
if (uploadedFilePath.startsWith('/')) {
|
||||
// If it's a relative path starting with /, convert to absolute URL
|
||||
const baseUrl = getBaseUrl()
|
||||
if (!baseUrl) throw new Error('Failed to get base URL for file path conversion')
|
||||
uploadedFilePath = `${baseUrl}${uploadedFilePath}`
|
||||
}
|
||||
|
||||
// Set the filePath parameter
|
||||
params.filePath = uploadedFilePath
|
||||
logger.info('Using uploaded file:', uploadedFilePath)
|
||||
} else {
|
||||
throw new Error('Invalid file upload: Upload data is missing or invalid')
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
!params.filePath ||
|
||||
typeof params.filePath !== 'string' ||
|
||||
params.filePath.trim() === ''
|
||||
) {
|
||||
throw new Error('Missing or invalid file path: Please provide a URL to a PDF document')
|
||||
}
|
||||
|
||||
let filePathToValidate = params.filePath.trim()
|
||||
if (filePathToValidate.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
if (!baseUrl) throw new Error('Failed to get base URL for file path conversion')
|
||||
filePathToValidate = `${baseUrl}${filePathToValidate}`
|
||||
}
|
||||
|
||||
let url
|
||||
try {
|
||||
url = new URL(filePathToValidate)
|
||||
|
||||
// Validate protocol
|
||||
if (!['http:', 'https:'].includes(url.protocol)) {
|
||||
throw new Error(`Invalid protocol: ${url.protocol}. URL must use HTTP or HTTPS protocol`)
|
||||
}
|
||||
|
||||
// Validate against known unsupported services
|
||||
if (url.hostname.includes('drive.google.com') || url.hostname.includes('docs.google.com')) {
|
||||
throw new Error(
|
||||
'Google Drive links are not supported by the Mistral OCR API. ' +
|
||||
'Please upload your PDF to a public web server or provide a direct download link ' +
|
||||
'that ends with .pdf extension.'
|
||||
)
|
||||
}
|
||||
|
||||
// Validate file appears to be a PDF (stricter check with informative warning)
|
||||
const pathname = url.pathname.toLowerCase()
|
||||
if (!pathname.endsWith('.pdf')) {
|
||||
// Check if PDF is included in the path at all
|
||||
if (!pathname.includes('pdf')) {
|
||||
logger.warn(
|
||||
'Warning: URL does not appear to point to a PDF document. ' +
|
||||
'The Mistral OCR API is designed to work with PDF files. ' +
|
||||
'Please ensure your URL points to a valid PDF document (ideally ending with .pdf extension).'
|
||||
)
|
||||
} else {
|
||||
// If "pdf" is in the URL but not at the end, give a different warning
|
||||
logger.warn(
|
||||
'Warning: URL contains "pdf" but does not end with .pdf extension. ' +
|
||||
'This might still work if the server returns a valid PDF document despite the missing extension.'
|
||||
)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
throw new Error(
|
||||
`Invalid URL format: ${errorMessage}. Please provide a valid HTTP or HTTPS URL to a PDF document (e.g., https://example.com/document.pdf)`
|
||||
)
|
||||
}
|
||||
|
||||
// Create the request body with required parameters
|
||||
const requestBody: Record<string, any> = {
|
||||
const requestBody: Record<string, unknown> = {
|
||||
apiKey: params.apiKey,
|
||||
filePath: url.toString(),
|
||||
}
|
||||
|
||||
// Check if this is an internal workspace file path
|
||||
if (params.fileUpload?.url?.startsWith('/api/files/serve/')) {
|
||||
// Update filePath to the internal path for workspace files
|
||||
requestBody.filePath = params.fileUpload.url
|
||||
}
|
||||
if (hasFilePath) {
|
||||
const filePathToValidate = params.filePath!.trim()
|
||||
|
||||
// Add optional parameters with proper validation
|
||||
// Include images (base64)
|
||||
if (params.includeImageBase64 !== undefined) {
|
||||
if (typeof params.includeImageBase64 !== 'boolean') {
|
||||
logger.warn('includeImageBase64 parameter should be a boolean, using default (false)')
|
||||
if (filePathToValidate.startsWith('/')) {
|
||||
if (!isInternalFileUrl(filePathToValidate)) {
|
||||
throw new Error(
|
||||
'Invalid file path. Only uploaded files are supported for internal paths.'
|
||||
)
|
||||
}
|
||||
requestBody.filePath = filePathToValidate
|
||||
} else {
|
||||
requestBody.includeImageBase64 = params.includeImageBase64
|
||||
}
|
||||
}
|
||||
|
||||
// Page selection - safely handle null and undefined
|
||||
if (params.pages !== undefined && params.pages !== null) {
|
||||
if (Array.isArray(params.pages) && params.pages.length > 0) {
|
||||
// Validate all page numbers are non-negative integers
|
||||
const validPages = params.pages.filter(
|
||||
(page) => typeof page === 'number' && Number.isInteger(page) && page >= 0
|
||||
)
|
||||
|
||||
if (validPages.length > 0) {
|
||||
requestBody.pages = validPages
|
||||
|
||||
if (validPages.length !== params.pages.length) {
|
||||
logger.warn(
|
||||
`Some invalid page numbers were removed. Using ${validPages.length} valid pages: ${validPages.join(', ')}`
|
||||
let url
|
||||
try {
|
||||
url = new URL(filePathToValidate)
|
||||
if (!['http:', 'https:'].includes(url.protocol)) {
|
||||
throw new Error(
|
||||
`Invalid protocol: ${url.protocol}. URL must use HTTP or HTTPS protocol`
|
||||
)
|
||||
}
|
||||
} else {
|
||||
logger.warn('No valid page numbers provided, processing all pages')
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
throw new Error(
|
||||
`Invalid URL format: ${errorMessage}. Please provide a valid HTTP or HTTPS URL to a PDF document (e.g., https://example.com/document.pdf)`
|
||||
)
|
||||
}
|
||||
} else if (Array.isArray(params.pages) && params.pages.length === 0) {
|
||||
logger.warn('Empty pages array provided, processing all pages')
|
||||
|
||||
requestBody.filePath = url.toString()
|
||||
}
|
||||
} else if (hasFileUpload) {
|
||||
requestBody.file = fileInput
|
||||
} else {
|
||||
throw new Error('Missing file input: Please provide a PDF URL or upload a file')
|
||||
}
|
||||
|
||||
if (params.includeImageBase64 !== undefined) {
|
||||
requestBody.includeImageBase64 = params.includeImageBase64
|
||||
}
|
||||
|
||||
if (Array.isArray(params.pages) && params.pages.length > 0) {
|
||||
const validPages = params.pages.filter(
|
||||
(page) => typeof page === 'number' && Number.isInteger(page) && page >= 0
|
||||
)
|
||||
if (validPages.length > 0) {
|
||||
requestBody.pages = validPages
|
||||
}
|
||||
}
|
||||
|
||||
// Image limit - safely handle null and undefined
|
||||
if (typeof params.resultType === 'string' && params.resultType.trim() !== '') {
|
||||
requestBody.resultType = params.resultType
|
||||
}
|
||||
|
||||
if (params.imageLimit !== undefined && params.imageLimit !== null) {
|
||||
const imageLimit = Number(params.imageLimit)
|
||||
if (Number.isInteger(imageLimit) && imageLimit > 0) {
|
||||
if (!Number.isNaN(imageLimit) && imageLimit >= 0) {
|
||||
requestBody.imageLimit = imageLimit
|
||||
} else {
|
||||
logger.warn('imageLimit must be a positive integer, ignoring this parameter')
|
||||
}
|
||||
}
|
||||
|
||||
// Minimum image size - safely handle null and undefined
|
||||
if (params.imageMinSize !== undefined && params.imageMinSize !== null) {
|
||||
const imageMinSize = Number(params.imageMinSize)
|
||||
if (Number.isInteger(imageMinSize) && imageMinSize > 0) {
|
||||
if (!Number.isNaN(imageMinSize) && imageMinSize >= 0) {
|
||||
requestBody.imageMinSize = imageMinSize
|
||||
} else {
|
||||
logger.warn('imageMinSize must be a positive integer, ignoring this parameter')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -422,18 +350,12 @@ export const mistralParserTool: ToolConfig<MistralParserInput, MistralParserOutp
|
||||
}
|
||||
|
||||
const mistralParserV2Params = {
|
||||
fileData: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
file: {
|
||||
type: 'file',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'File data from a previous block',
|
||||
},
|
||||
filePath: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'File path or URL (legacy)',
|
||||
},
|
||||
resultType: mistralParserTool.params.resultType,
|
||||
includeImageBase64: mistralParserTool.params.includeImageBase64,
|
||||
pages: mistralParserTool.params.pages,
|
||||
@@ -467,8 +389,8 @@ export const mistralParserV2Tool: ToolConfig<MistralParserV2Input, MistralParser
|
||||
throw new Error('Missing or invalid API key: A valid Mistral API key is required')
|
||||
}
|
||||
|
||||
const fileData = params.fileData ?? params.filePath
|
||||
if (!fileData) {
|
||||
const file = params.file
|
||||
if (!file || typeof file !== 'object') {
|
||||
throw new Error('File input is required')
|
||||
}
|
||||
|
||||
@@ -477,11 +399,7 @@ export const mistralParserV2Tool: ToolConfig<MistralParserV2Input, MistralParser
|
||||
resultType: params.resultType || 'markdown',
|
||||
}
|
||||
|
||||
if (typeof fileData === 'string') {
|
||||
requestBody.filePath = fileData.trim()
|
||||
} else {
|
||||
requestBody.fileData = fileData
|
||||
}
|
||||
requestBody.file = file
|
||||
|
||||
if (params.pages) {
|
||||
requestBody.pages = params.pages
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { RawFileInput } from '@/lib/uploads/utils/file-utils'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import type { OutputProperty, ToolResponse } from '@/tools/types'
|
||||
|
||||
@@ -118,7 +119,7 @@ export const MISTRAL_PARSER_METADATA_OUTPUT_PROPERTIES = {
|
||||
jobId: { type: 'string', description: 'Unique job identifier' },
|
||||
fileType: { type: 'string', description: 'File type (e.g., pdf)' },
|
||||
fileName: { type: 'string', description: 'Original file name' },
|
||||
source: { type: 'string', description: 'Source type (url)' },
|
||||
source: { type: 'string', description: 'Source type (url or file)' },
|
||||
pageCount: { type: 'number', description: 'Number of pages processed' },
|
||||
model: { type: 'string', description: 'Mistral model used' },
|
||||
resultType: { type: 'string', description: 'Output format (markdown, text, json)' },
|
||||
@@ -137,8 +138,9 @@ export const MISTRAL_PARSER_METADATA_OUTPUT: OutputProperty = {
|
||||
}
|
||||
|
||||
export interface MistralParserInput {
|
||||
filePath: string
|
||||
fileUpload?: UserFile
|
||||
filePath?: string
|
||||
file?: RawFileInput
|
||||
fileUpload?: RawFileInput
|
||||
_internalFilePath?: string
|
||||
apiKey: string
|
||||
resultType?: 'markdown' | 'text' | 'json'
|
||||
@@ -149,8 +151,7 @@ export interface MistralParserInput {
|
||||
}
|
||||
|
||||
export interface MistralParserV2Input {
|
||||
fileData?: UserFile | string
|
||||
filePath?: string
|
||||
file: UserFile
|
||||
apiKey: string
|
||||
resultType?: 'markdown' | 'text' | 'json'
|
||||
includeImageBase64?: boolean
|
||||
@@ -168,7 +169,7 @@ export interface MistralParserMetadata {
|
||||
jobId: string
|
||||
fileType: string
|
||||
fileName: string
|
||||
source: 'url'
|
||||
source: 'url' | 'file'
|
||||
sourceUrl?: string
|
||||
pageCount: number
|
||||
usageInfo?: MistralOcrUsageInfo
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { OneDriveDownloadResponse, OneDriveToolParams } from '@/tools/onedrive/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('OneDriveDownloadTool')
|
||||
|
||||
export const downloadTool: ToolConfig<OneDriveToolParams, OneDriveDownloadResponse> = {
|
||||
id: 'onedrive_download',
|
||||
name: 'Download File from OneDrive',
|
||||
@@ -37,91 +34,16 @@ export const downloadTool: ToolConfig<OneDriveToolParams, OneDriveDownloadRespon
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
return `https://graph.microsoft.com/v1.0/me/drive/items/${params.fileId}`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
url: '/api/tools/onedrive/download',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
accessToken: params.accessToken,
|
||||
fileId: params.fileId,
|
||||
fileName: params.fileName,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response, params?: OneDriveToolParams) => {
|
||||
try {
|
||||
if (!response.ok) {
|
||||
const errorDetails = await response.json().catch(() => ({}))
|
||||
logger.error('Failed to get file metadata', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorDetails,
|
||||
requestedFileId: params?.fileId,
|
||||
})
|
||||
throw new Error(errorDetails.error?.message || 'Failed to get file metadata')
|
||||
}
|
||||
|
||||
const metadata = await response.json()
|
||||
|
||||
// Check if this is actually a folder
|
||||
if (metadata.folder && !metadata.file) {
|
||||
logger.error('Attempted to download a folder instead of a file', {
|
||||
itemId: metadata.id,
|
||||
itemName: metadata.name,
|
||||
isFolder: true,
|
||||
})
|
||||
throw new Error(`Cannot download folder "${metadata.name}". Please select a file instead.`)
|
||||
}
|
||||
|
||||
const fileId = metadata.id
|
||||
const fileName = metadata.name
|
||||
const mimeType = metadata.file?.mimeType || 'application/octet-stream'
|
||||
const authHeader = `Bearer ${params?.accessToken || ''}`
|
||||
|
||||
const downloadResponse = await fetch(
|
||||
`https://graph.microsoft.com/v1.0/me/drive/items/${fileId}/content`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: authHeader,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
const downloadError = await downloadResponse.json().catch(() => ({}))
|
||||
logger.error('Failed to download file', {
|
||||
status: downloadResponse.status,
|
||||
statusText: downloadResponse.statusText,
|
||||
error: downloadError,
|
||||
})
|
||||
throw new Error(downloadError.error?.message || 'Failed to download file')
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const fileBuffer = Buffer.from(arrayBuffer)
|
||||
|
||||
const resolvedName = params?.fileName || fileName || 'download'
|
||||
|
||||
// Convert buffer to base64 string for proper JSON serialization
|
||||
// This ensures the file data survives the proxy round-trip
|
||||
const base64Data = fileBuffer.toString('base64')
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedName,
|
||||
mimeType,
|
||||
data: base64Data,
|
||||
size: fileBuffer.length,
|
||||
},
|
||||
},
|
||||
}
|
||||
} catch (error: any) {
|
||||
logger.error('Error in transform response', {
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
})
|
||||
throw error
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
|
||||
@@ -77,7 +77,6 @@ export const imageTool: ToolConfig = {
|
||||
n: params.n ? Number(params.n) : 1,
|
||||
}
|
||||
|
||||
// Add model-specific parameters
|
||||
if (params.model === 'dall-e-3') {
|
||||
if (params.quality) body.quality = params.quality
|
||||
if (params.style) body.style = params.style
|
||||
@@ -164,37 +163,6 @@ export const imageTool: ToolConfig = {
|
||||
base64Image = buffer.toString('base64')
|
||||
} catch (error) {
|
||||
logger.error('Error fetching or processing image:', error)
|
||||
|
||||
try {
|
||||
logger.info('Attempting fallback with direct browser fetch...')
|
||||
const directImageResponse = await fetch(imageUrl, {
|
||||
cache: 'no-store',
|
||||
headers: {
|
||||
Accept: 'image/*, */*',
|
||||
'User-Agent': 'Mozilla/5.0 (compatible DalleProxy/1.0)',
|
||||
},
|
||||
})
|
||||
|
||||
if (!directImageResponse.ok) {
|
||||
throw new Error(`Direct fetch failed: ${directImageResponse.status}`)
|
||||
}
|
||||
|
||||
const imageBlob = await directImageResponse.blob()
|
||||
if (imageBlob.size === 0) {
|
||||
throw new Error('Empty blob received from direct fetch')
|
||||
}
|
||||
|
||||
const arrayBuffer = await imageBlob.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
base64Image = buffer.toString('base64')
|
||||
|
||||
logger.info(
|
||||
'Successfully converted image to base64 via direct fetch, length:',
|
||||
base64Image.length
|
||||
)
|
||||
} catch (fallbackError) {
|
||||
logger.error('Fallback fetch also failed:', fallbackError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ async function downloadAttachments(
|
||||
const buffer = Buffer.from(contentBytes, 'base64')
|
||||
attachments.push({
|
||||
name: attachment.name,
|
||||
data: buffer,
|
||||
data: buffer.toString('base64'),
|
||||
contentType: attachment.contentType,
|
||||
size: attachment.size,
|
||||
})
|
||||
|
||||
@@ -218,7 +218,7 @@ export interface OutlookMessagesResponse {
|
||||
// Outlook attachment interface (for tool responses)
|
||||
export interface OutlookAttachment {
|
||||
name: string
|
||||
data: Buffer
|
||||
data: string
|
||||
contentType: string
|
||||
size: number
|
||||
}
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { PipedriveGetFilesParams, PipedriveGetFilesResponse } from '@/tools/pipedrive/types'
|
||||
import { PIPEDRIVE_FILE_OUTPUT_PROPERTIES } from '@/tools/pipedrive/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('PipedriveGetFiles')
|
||||
|
||||
export const pipedriveGetFilesTool: ToolConfig<PipedriveGetFilesParams, PipedriveGetFilesResponse> =
|
||||
{
|
||||
id: 'pipedrive_get_files',
|
||||
@@ -43,52 +40,28 @@ export const pipedriveGetFilesTool: ToolConfig<PipedriveGetFilesParams, Pipedriv
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Number of results to return (e.g., "50", default: 100, max: 500)',
|
||||
},
|
||||
downloadFiles: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Download file contents into file outputs',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const baseUrl = 'https://api.pipedrive.com/v1/files'
|
||||
const queryParams = new URLSearchParams()
|
||||
|
||||
if (params.deal_id) queryParams.append('deal_id', params.deal_id)
|
||||
if (params.person_id) queryParams.append('person_id', params.person_id)
|
||||
if (params.org_id) queryParams.append('org_id', params.org_id)
|
||||
if (params.limit) queryParams.append('limit', params.limit)
|
||||
|
||||
const queryString = queryParams.toString()
|
||||
return queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => {
|
||||
if (!params.accessToken) {
|
||||
throw new Error('Access token is required')
|
||||
}
|
||||
|
||||
return {
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
Accept: 'application/json',
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
logger.error('Pipedrive API request failed', { data })
|
||||
throw new Error(data.error || 'Failed to fetch files from Pipedrive')
|
||||
}
|
||||
|
||||
const files = data.data || []
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
files,
|
||||
total_items: files.length,
|
||||
success: true,
|
||||
},
|
||||
}
|
||||
url: '/api/tools/pipedrive/get-files',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
accessToken: params.accessToken,
|
||||
deal_id: params.deal_id,
|
||||
person_id: params.person_id,
|
||||
org_id: params.org_id,
|
||||
limit: params.limit,
|
||||
downloadFiles: params.downloadFiles,
|
||||
}),
|
||||
},
|
||||
|
||||
outputs: {
|
||||
@@ -100,6 +73,11 @@ export const pipedriveGetFilesTool: ToolConfig<PipedriveGetFilesParams, Pipedriv
|
||||
properties: PIPEDRIVE_FILE_OUTPUT_PROPERTIES,
|
||||
},
|
||||
},
|
||||
downloadedFiles: {
|
||||
type: 'file[]',
|
||||
description: 'Downloaded files from Pipedrive',
|
||||
optional: true,
|
||||
},
|
||||
total_items: { type: 'number', description: 'Total number of files returned' },
|
||||
success: { type: 'boolean', description: 'Operation success status' },
|
||||
},
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { OutputProperty, ToolResponse } from '@/tools/types'
|
||||
import type { OutputProperty, ToolFileData, ToolResponse } from '@/tools/types'
|
||||
|
||||
/**
|
||||
* Output property definitions for Pipedrive API responses.
|
||||
@@ -435,10 +435,12 @@ export interface PipedriveGetFilesParams {
|
||||
person_id?: string
|
||||
org_id?: string
|
||||
limit?: string
|
||||
downloadFiles?: boolean
|
||||
}
|
||||
|
||||
export interface PipedriveGetFilesOutput {
|
||||
files: PipedriveFile[]
|
||||
downloadedFiles?: ToolFileData[]
|
||||
total_items: number
|
||||
success: boolean
|
||||
}
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
export { pulseParserTool } from '@/tools/pulse/parser'
|
||||
export { pulseParserTool, pulseParserV2Tool } from '@/tools/pulse/parser'
|
||||
export * from './types'
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import type { PulseParserInput, PulseParserOutput } from '@/tools/pulse/types'
|
||||
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||
import type { PulseParserInput, PulseParserOutput, PulseParserV2Input } from '@/tools/pulse/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('PulseParserTool')
|
||||
|
||||
export const pulseParserTool: ToolConfig<PulseParserInput, PulseParserOutput> = {
|
||||
id: 'pulse_parser',
|
||||
name: 'Pulse Document Parser',
|
||||
@@ -14,10 +11,16 @@ export const pulseParserTool: ToolConfig<PulseParserInput, PulseParserOutput> =
|
||||
params: {
|
||||
filePath: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'URL to a document to be processed',
|
||||
},
|
||||
file: {
|
||||
type: 'file',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Document file to be processed',
|
||||
},
|
||||
fileUpload: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
@@ -86,70 +89,50 @@ export const pulseParserTool: ToolConfig<PulseParserInput, PulseParserOutput> =
|
||||
throw new Error('Missing or invalid API key: A valid Pulse API key is required')
|
||||
}
|
||||
|
||||
if (
|
||||
params.fileUpload &&
|
||||
(!params.filePath || params.filePath === 'null' || params.filePath === '')
|
||||
) {
|
||||
if (
|
||||
typeof params.fileUpload === 'object' &&
|
||||
params.fileUpload !== null &&
|
||||
(params.fileUpload.url || params.fileUpload.path)
|
||||
) {
|
||||
let uploadedFilePath: string = params.fileUpload.url ?? params.fileUpload.path ?? ''
|
||||
|
||||
if (!uploadedFilePath) {
|
||||
throw new Error('Invalid file upload: Upload data is missing or invalid')
|
||||
}
|
||||
|
||||
if (uploadedFilePath.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
if (!baseUrl) throw new Error('Failed to get base URL for file path conversion')
|
||||
uploadedFilePath = `${baseUrl}${uploadedFilePath}`
|
||||
}
|
||||
|
||||
params.filePath = uploadedFilePath
|
||||
logger.info('Using uploaded file:', uploadedFilePath)
|
||||
} else {
|
||||
throw new Error('Invalid file upload: Upload data is missing or invalid')
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
!params.filePath ||
|
||||
typeof params.filePath !== 'string' ||
|
||||
params.filePath.trim() === ''
|
||||
) {
|
||||
throw new Error('Missing or invalid file path: Please provide a URL to a document')
|
||||
}
|
||||
|
||||
let filePathToValidate = params.filePath.trim()
|
||||
if (filePathToValidate.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
if (!baseUrl) throw new Error('Failed to get base URL for file path conversion')
|
||||
filePathToValidate = `${baseUrl}${filePathToValidate}`
|
||||
}
|
||||
|
||||
let url
|
||||
try {
|
||||
url = new URL(filePathToValidate)
|
||||
|
||||
if (!['http:', 'https:'].includes(url.protocol)) {
|
||||
throw new Error(`Invalid protocol: ${url.protocol}. URL must use HTTP or HTTPS protocol`)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
throw new Error(
|
||||
`Invalid URL format: ${errorMessage}. Please provide a valid HTTP or HTTPS URL to a document`
|
||||
)
|
||||
}
|
||||
|
||||
const requestBody: Record<string, unknown> = {
|
||||
apiKey: params.apiKey.trim(),
|
||||
filePath: url.toString(),
|
||||
}
|
||||
const fileInput =
|
||||
params.file && typeof params.file === 'object' ? params.file : params.fileUpload
|
||||
const hasFileUpload = fileInput && typeof fileInput === 'object'
|
||||
const hasFilePath =
|
||||
typeof params.filePath === 'string' &&
|
||||
params.filePath !== 'null' &&
|
||||
params.filePath.trim() !== ''
|
||||
|
||||
if (params.fileUpload?.path?.startsWith('/api/files/serve/')) {
|
||||
requestBody.filePath = params.fileUpload.path
|
||||
if (hasFilePath) {
|
||||
const filePathToValidate = params.filePath!.trim()
|
||||
|
||||
if (filePathToValidate.startsWith('/')) {
|
||||
if (!isInternalFileUrl(filePathToValidate)) {
|
||||
throw new Error(
|
||||
'Invalid file path. Only uploaded files are supported for internal paths.'
|
||||
)
|
||||
}
|
||||
requestBody.filePath = filePathToValidate
|
||||
} else {
|
||||
let url
|
||||
try {
|
||||
url = new URL(filePathToValidate)
|
||||
|
||||
if (!['http:', 'https:'].includes(url.protocol)) {
|
||||
throw new Error(
|
||||
`Invalid protocol: ${url.protocol}. URL must use HTTP or HTTPS protocol`
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
throw new Error(
|
||||
`Invalid URL format: ${errorMessage}. Please provide a valid HTTP or HTTPS URL to a document`
|
||||
)
|
||||
}
|
||||
|
||||
requestBody.filePath = url.toString()
|
||||
}
|
||||
} else if (hasFileUpload) {
|
||||
requestBody.file = fileInput
|
||||
} else {
|
||||
throw new Error('Missing file input: Please provide a document URL or upload a file')
|
||||
}
|
||||
|
||||
if (params.pages && typeof params.pages === 'string' && params.pages.trim() !== '') {
|
||||
@@ -270,3 +253,77 @@ export const pulseParserTool: ToolConfig<PulseParserInput, PulseParserOutput> =
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
export const pulseParserV2Tool: ToolConfig<PulseParserV2Input, PulseParserOutput> = {
|
||||
...pulseParserTool,
|
||||
id: 'pulse_parser_v2',
|
||||
name: 'Pulse Document Parser (File Only)',
|
||||
postProcess: undefined,
|
||||
directExecution: undefined,
|
||||
transformResponse: pulseParserTool.transformResponse
|
||||
? (response: Response, params?: PulseParserV2Input) =>
|
||||
pulseParserTool.transformResponse!(response, params as unknown as PulseParserInput)
|
||||
: undefined,
|
||||
params: {
|
||||
file: {
|
||||
type: 'file',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Document to be processed',
|
||||
},
|
||||
pages: pulseParserTool.params.pages,
|
||||
extractFigure: pulseParserTool.params.extractFigure,
|
||||
figureDescription: pulseParserTool.params.figureDescription,
|
||||
returnHtml: pulseParserTool.params.returnHtml,
|
||||
chunking: pulseParserTool.params.chunking,
|
||||
chunkSize: pulseParserTool.params.chunkSize,
|
||||
apiKey: pulseParserTool.params.apiKey,
|
||||
},
|
||||
request: {
|
||||
url: '/api/tools/pulse/parse',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
}),
|
||||
body: (params: PulseParserV2Input) => {
|
||||
if (!params || typeof params !== 'object') {
|
||||
throw new Error('Invalid parameters: Parameters must be provided as an object')
|
||||
}
|
||||
|
||||
if (!params.apiKey || typeof params.apiKey !== 'string' || params.apiKey.trim() === '') {
|
||||
throw new Error('Missing or invalid API key: A valid Pulse API key is required')
|
||||
}
|
||||
|
||||
if (!params.file || typeof params.file !== 'object') {
|
||||
throw new Error('Missing or invalid file: Please provide a file object')
|
||||
}
|
||||
|
||||
const requestBody: Record<string, unknown> = {
|
||||
apiKey: params.apiKey.trim(),
|
||||
file: params.file,
|
||||
}
|
||||
|
||||
if (params.pages && typeof params.pages === 'string' && params.pages.trim() !== '') {
|
||||
requestBody.pages = params.pages.trim()
|
||||
}
|
||||
if (params.extractFigure !== undefined) {
|
||||
requestBody.extractFigure = params.extractFigure
|
||||
}
|
||||
if (params.figureDescription !== undefined) {
|
||||
requestBody.figureDescription = params.figureDescription
|
||||
}
|
||||
if (params.returnHtml !== undefined) {
|
||||
requestBody.returnHtml = params.returnHtml
|
||||
}
|
||||
if (params.chunking && typeof params.chunking === 'string' && params.chunking.trim() !== '') {
|
||||
requestBody.chunking = params.chunking.trim()
|
||||
}
|
||||
if (params.chunkSize !== undefined && params.chunkSize > 0) {
|
||||
requestBody.chunkSize = params.chunkSize
|
||||
}
|
||||
|
||||
return requestBody
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { RawFileInput } from '@/lib/uploads/utils/file-utils'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
/**
|
||||
@@ -5,13 +7,38 @@ import type { ToolResponse } from '@/tools/types'
|
||||
*/
|
||||
export interface PulseParserInput {
|
||||
/** URL to a document to be processed */
|
||||
filePath: string
|
||||
filePath?: string
|
||||
|
||||
file?: RawFileInput
|
||||
|
||||
/** File upload data (from file-upload component) */
|
||||
fileUpload?: {
|
||||
url?: string
|
||||
path?: string
|
||||
}
|
||||
fileUpload?: RawFileInput
|
||||
|
||||
/** Pulse API key for authentication */
|
||||
apiKey: string
|
||||
|
||||
/** Page range to process (1-indexed, e.g., "1-2,5") */
|
||||
pages?: string
|
||||
|
||||
/** Whether to extract figures from the document */
|
||||
extractFigure?: boolean
|
||||
|
||||
/** Whether to generate figure descriptions/captions */
|
||||
figureDescription?: boolean
|
||||
|
||||
/** Whether to include HTML in the response */
|
||||
returnHtml?: boolean
|
||||
|
||||
/** Chunking strategies (comma-separated: semantic, header, page, recursive) */
|
||||
chunking?: string
|
||||
|
||||
/** Maximum characters per chunk when chunking is enabled */
|
||||
chunkSize?: number
|
||||
}
|
||||
|
||||
export interface PulseParserV2Input {
|
||||
/** File to be processed */
|
||||
file: UserFile
|
||||
|
||||
/** Pulse API key for authentication */
|
||||
apiKey: string
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
import { reductoParserTool } from '@/tools/reducto/parser'
|
||||
import { reductoParserTool, reductoParserV2Tool } from '@/tools/reducto/parser'
|
||||
|
||||
export { reductoParserTool }
|
||||
export { reductoParserTool, reductoParserV2Tool }
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import type { ReductoParserInput, ReductoParserOutput } from '@/tools/reducto/types'
|
||||
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||
import type {
|
||||
ReductoParserInput,
|
||||
ReductoParserOutput,
|
||||
ReductoParserV2Input,
|
||||
} from '@/tools/reducto/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('ReductoParserTool')
|
||||
|
||||
export const reductoParserTool: ToolConfig<ReductoParserInput, ReductoParserOutput> = {
|
||||
id: 'reducto_parser',
|
||||
name: 'Reducto PDF Parser',
|
||||
@@ -14,10 +15,16 @@ export const reductoParserTool: ToolConfig<ReductoParserInput, ReductoParserOutp
|
||||
params: {
|
||||
filePath: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'URL to a PDF document to be processed',
|
||||
},
|
||||
file: {
|
||||
type: 'file',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Document file to be processed',
|
||||
},
|
||||
fileUpload: {
|
||||
type: 'object',
|
||||
required: false,
|
||||
@@ -63,66 +70,50 @@ export const reductoParserTool: ToolConfig<ReductoParserInput, ReductoParserOutp
|
||||
throw new Error('Missing or invalid API key: A valid Reducto API key is required')
|
||||
}
|
||||
|
||||
if (
|
||||
params.fileUpload &&
|
||||
(!params.filePath || params.filePath === 'null' || params.filePath === '')
|
||||
) {
|
||||
if (
|
||||
typeof params.fileUpload === 'object' &&
|
||||
params.fileUpload !== null &&
|
||||
(params.fileUpload.url || params.fileUpload.path)
|
||||
) {
|
||||
let uploadedFilePath = (params.fileUpload.url || params.fileUpload.path) as string
|
||||
|
||||
if (uploadedFilePath.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
if (!baseUrl) throw new Error('Failed to get base URL for file path conversion')
|
||||
uploadedFilePath = `${baseUrl}${uploadedFilePath}`
|
||||
}
|
||||
|
||||
params.filePath = uploadedFilePath as string
|
||||
logger.info('Using uploaded file:', uploadedFilePath)
|
||||
} else {
|
||||
throw new Error('Invalid file upload: Upload data is missing or invalid')
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
!params.filePath ||
|
||||
typeof params.filePath !== 'string' ||
|
||||
params.filePath.trim() === ''
|
||||
) {
|
||||
throw new Error('Missing or invalid file path: Please provide a URL to a PDF document')
|
||||
}
|
||||
|
||||
let filePathToValidate = params.filePath.trim()
|
||||
if (filePathToValidate.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
if (!baseUrl) throw new Error('Failed to get base URL for file path conversion')
|
||||
filePathToValidate = `${baseUrl}${filePathToValidate}`
|
||||
}
|
||||
|
||||
let url
|
||||
try {
|
||||
url = new URL(filePathToValidate)
|
||||
|
||||
if (!['http:', 'https:'].includes(url.protocol)) {
|
||||
throw new Error(`Invalid protocol: ${url.protocol}. URL must use HTTP or HTTPS protocol`)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
throw new Error(
|
||||
`Invalid URL format: ${errorMessage}. Please provide a valid HTTP or HTTPS URL to a PDF document.`
|
||||
)
|
||||
}
|
||||
|
||||
const requestBody: Record<string, unknown> = {
|
||||
apiKey: params.apiKey,
|
||||
filePath: url.toString(),
|
||||
}
|
||||
const fileInput =
|
||||
params.file && typeof params.file === 'object' ? params.file : params.fileUpload
|
||||
const hasFileUpload = fileInput && typeof fileInput === 'object'
|
||||
const hasFilePath =
|
||||
typeof params.filePath === 'string' &&
|
||||
params.filePath !== 'null' &&
|
||||
params.filePath.trim() !== ''
|
||||
|
||||
if (params.fileUpload?.path?.startsWith('/api/files/serve/')) {
|
||||
requestBody.filePath = params.fileUpload.path
|
||||
if (hasFilePath) {
|
||||
const filePathToValidate = params.filePath!.trim()
|
||||
|
||||
if (filePathToValidate.startsWith('/')) {
|
||||
if (!isInternalFileUrl(filePathToValidate)) {
|
||||
throw new Error(
|
||||
'Invalid file path. Only uploaded files are supported for internal paths.'
|
||||
)
|
||||
}
|
||||
requestBody.filePath = filePathToValidate
|
||||
} else {
|
||||
let url
|
||||
try {
|
||||
url = new URL(filePathToValidate)
|
||||
|
||||
if (!['http:', 'https:'].includes(url.protocol)) {
|
||||
throw new Error(
|
||||
`Invalid protocol: ${url.protocol}. URL must use HTTP or HTTPS protocol`
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
throw new Error(
|
||||
`Invalid URL format: ${errorMessage}. Please provide a valid HTTP or HTTPS URL to a PDF document.`
|
||||
)
|
||||
}
|
||||
|
||||
requestBody.filePath = url.toString()
|
||||
}
|
||||
} else if (hasFileUpload) {
|
||||
requestBody.file = fileInput
|
||||
} else {
|
||||
throw new Error('Missing file input: Please provide a PDF URL or upload a file')
|
||||
}
|
||||
|
||||
if (params.tableOutputFormat && ['html', 'md'].includes(params.tableOutputFormat)) {
|
||||
@@ -190,3 +181,71 @@ export const reductoParserTool: ToolConfig<ReductoParserInput, ReductoParserOutp
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
export const reductoParserV2Tool: ToolConfig<ReductoParserV2Input, ReductoParserOutput> = {
|
||||
...reductoParserTool,
|
||||
id: 'reducto_parser_v2',
|
||||
name: 'Reducto PDF Parser (File Only)',
|
||||
postProcess: undefined,
|
||||
directExecution: undefined,
|
||||
transformResponse: reductoParserTool.transformResponse
|
||||
? (response: Response, params?: ReductoParserV2Input) =>
|
||||
reductoParserTool.transformResponse!(response, params as unknown as ReductoParserInput)
|
||||
: undefined,
|
||||
params: {
|
||||
file: {
|
||||
type: 'file',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'PDF document to be processed',
|
||||
},
|
||||
pages: reductoParserTool.params.pages,
|
||||
tableOutputFormat: reductoParserTool.params.tableOutputFormat,
|
||||
apiKey: reductoParserTool.params.apiKey,
|
||||
},
|
||||
request: {
|
||||
url: '/api/tools/reducto/parse',
|
||||
method: 'POST',
|
||||
headers: (params) => ({
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
}),
|
||||
body: (params: ReductoParserV2Input) => {
|
||||
if (!params || typeof params !== 'object') {
|
||||
throw new Error('Invalid parameters: Parameters must be provided as an object')
|
||||
}
|
||||
|
||||
if (!params.apiKey || typeof params.apiKey !== 'string' || params.apiKey.trim() === '') {
|
||||
throw new Error('Missing or invalid API key: A valid Reducto API key is required')
|
||||
}
|
||||
|
||||
if (!params.file || typeof params.file !== 'object') {
|
||||
throw new Error('Missing or invalid file: Please provide a file object')
|
||||
}
|
||||
|
||||
const requestBody: Record<string, unknown> = {
|
||||
apiKey: params.apiKey,
|
||||
file: params.file,
|
||||
}
|
||||
|
||||
if (params.tableOutputFormat && ['html', 'md'].includes(params.tableOutputFormat)) {
|
||||
requestBody.tableOutputFormat = params.tableOutputFormat
|
||||
}
|
||||
|
||||
if (params.pages !== undefined && params.pages !== null) {
|
||||
if (Array.isArray(params.pages) && params.pages.length > 0) {
|
||||
const validPages = params.pages.filter(
|
||||
(page) => typeof page === 'number' && Number.isInteger(page) && page >= 0
|
||||
)
|
||||
|
||||
if (validPages.length > 0) {
|
||||
requestBody.pages = validPages
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return requestBody
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { RawFileInput } from '@/lib/uploads/utils/file-utils'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
/**
|
||||
@@ -5,13 +7,26 @@ import type { ToolResponse } from '@/tools/types'
|
||||
*/
|
||||
export interface ReductoParserInput {
|
||||
/** URL to a document to be processed */
|
||||
filePath: string
|
||||
filePath?: string
|
||||
|
||||
file?: RawFileInput
|
||||
|
||||
/** File upload data (from file-upload component) */
|
||||
fileUpload?: {
|
||||
url?: string
|
||||
path?: string
|
||||
}
|
||||
fileUpload?: RawFileInput
|
||||
|
||||
/** Reducto API key for authentication */
|
||||
apiKey: string
|
||||
|
||||
/** Specific pages to process (1-indexed) */
|
||||
pages?: number[]
|
||||
|
||||
/** Table output format (html or md) */
|
||||
tableOutputFormat?: 'html' | 'md'
|
||||
}
|
||||
|
||||
export interface ReductoParserV2Input {
|
||||
/** File to be processed */
|
||||
file: UserFile
|
||||
|
||||
/** Reducto API key for authentication */
|
||||
apiKey: string
|
||||
|
||||
@@ -1229,7 +1229,7 @@ import {
|
||||
posthogUpdatePropertyDefinitionTool,
|
||||
posthogUpdateSurveyTool,
|
||||
} from '@/tools/posthog'
|
||||
import { pulseParserTool } from '@/tools/pulse'
|
||||
import { pulseParserTool, pulseParserV2Tool } from '@/tools/pulse'
|
||||
import { qdrantFetchTool, qdrantSearchTool, qdrantUpsertTool } from '@/tools/qdrant'
|
||||
import {
|
||||
rdsDeleteTool,
|
||||
@@ -1254,7 +1254,7 @@ import {
|
||||
redditUnsaveTool,
|
||||
redditVoteTool,
|
||||
} from '@/tools/reddit'
|
||||
import { reductoParserTool } from '@/tools/reducto'
|
||||
import { reductoParserTool, reductoParserV2Tool } from '@/tools/reducto'
|
||||
import { mailSendTool } from '@/tools/resend'
|
||||
import {
|
||||
s3CopyObjectTool,
|
||||
@@ -1554,10 +1554,15 @@ import {
|
||||
} from '@/tools/stripe'
|
||||
import {
|
||||
assemblyaiSttTool,
|
||||
assemblyaiSttV2Tool,
|
||||
deepgramSttTool,
|
||||
deepgramSttV2Tool,
|
||||
elevenLabsSttTool,
|
||||
elevenLabsSttV2Tool,
|
||||
geminiSttTool,
|
||||
geminiSttV2Tool,
|
||||
whisperSttTool,
|
||||
whisperSttV2Tool,
|
||||
} from '@/tools/stt'
|
||||
import {
|
||||
supabaseCountTool,
|
||||
@@ -1593,7 +1598,7 @@ import {
|
||||
telegramSendPhotoTool,
|
||||
telegramSendVideoTool,
|
||||
} from '@/tools/telegram'
|
||||
import { textractParserTool } from '@/tools/textract'
|
||||
import { textractParserTool, textractParserV2Tool } from '@/tools/textract'
|
||||
import { thinkingTool } from '@/tools/thinking'
|
||||
import { tinybirdEventsTool, tinybirdQueryTool } from '@/tools/tinybird'
|
||||
import {
|
||||
@@ -1633,7 +1638,7 @@ import {
|
||||
runwayVideoTool,
|
||||
veoVideoTool,
|
||||
} from '@/tools/video'
|
||||
import { visionTool } from '@/tools/vision'
|
||||
import { visionTool, visionToolV2 } from '@/tools/vision'
|
||||
import {
|
||||
wealthboxReadContactTool,
|
||||
wealthboxReadNoteTool,
|
||||
@@ -1777,6 +1782,7 @@ export const tools: Record<string, ToolConfig> = {
|
||||
llm_chat: llmChatTool,
|
||||
function_execute: functionExecuteTool,
|
||||
vision_tool: visionTool,
|
||||
vision_tool_v2: visionToolV2,
|
||||
file_parser: fileParseTool,
|
||||
file_parser_v2: fileParserV2Tool,
|
||||
file_parser_v3: fileParserV3Tool,
|
||||
@@ -2494,6 +2500,7 @@ export const tools: Record<string, ToolConfig> = {
|
||||
perplexity_chat: perplexityChatTool,
|
||||
perplexity_search: perplexitySearchTool,
|
||||
pulse_parser: pulseParserTool,
|
||||
pulse_parser_v2: pulseParserV2Tool,
|
||||
posthog_capture_event: posthogCaptureEventTool,
|
||||
posthog_batch_events: posthogBatchEventsTool,
|
||||
posthog_list_persons: posthogListPersonsTool,
|
||||
@@ -2618,7 +2625,9 @@ export const tools: Record<string, ToolConfig> = {
|
||||
mistral_parser: mistralParserTool,
|
||||
mistral_parser_v2: mistralParserV2Tool,
|
||||
reducto_parser: reductoParserTool,
|
||||
reducto_parser_v2: reductoParserV2Tool,
|
||||
textract_parser: textractParserTool,
|
||||
textract_parser_v2: textractParserV2Tool,
|
||||
thinking_tool: thinkingTool,
|
||||
tinybird_events: tinybirdEventsTool,
|
||||
tinybird_query: tinybirdQueryTool,
|
||||
@@ -2646,10 +2655,15 @@ export const tools: Record<string, ToolConfig> = {
|
||||
search_tool: searchTool,
|
||||
elevenlabs_tts: elevenLabsTtsTool,
|
||||
stt_whisper: whisperSttTool,
|
||||
stt_whisper_v2: whisperSttV2Tool,
|
||||
stt_deepgram: deepgramSttTool,
|
||||
stt_deepgram_v2: deepgramSttV2Tool,
|
||||
stt_elevenlabs: elevenLabsSttTool,
|
||||
stt_elevenlabs_v2: elevenLabsSttV2Tool,
|
||||
stt_assemblyai: assemblyaiSttTool,
|
||||
stt_assemblyai_v2: assemblyaiSttV2Tool,
|
||||
stt_gemini: geminiSttTool,
|
||||
stt_gemini_v2: geminiSttV2Tool,
|
||||
tts_openai: openaiTtsTool,
|
||||
tts_deepgram: deepgramTtsTool,
|
||||
tts_elevenlabs: elevenLabsTtsUnifiedTool,
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import type {
|
||||
SharepointGetListResponse,
|
||||
SharepointList,
|
||||
@@ -58,7 +57,7 @@ export const getListTool: ToolConfig<SharepointToolParams, SharepointGetListResp
|
||||
const url = new URL(baseUrl)
|
||||
const finalUrl = url.toString()
|
||||
logger.info('SharePoint List All Lists URL', {
|
||||
finalUrl: sanitizeUrlForLog(finalUrl),
|
||||
finalUrl,
|
||||
siteId,
|
||||
})
|
||||
return finalUrl
|
||||
@@ -76,7 +75,7 @@ export const getListTool: ToolConfig<SharepointToolParams, SharepointGetListResp
|
||||
itemsUrl.searchParams.set('$expand', 'fields')
|
||||
const finalItemsUrl = itemsUrl.toString()
|
||||
logger.info('SharePoint Get List Items URL', {
|
||||
finalUrl: sanitizeUrlForLog(finalItemsUrl),
|
||||
finalUrl: finalItemsUrl,
|
||||
siteId,
|
||||
listId: params.listId,
|
||||
})
|
||||
@@ -93,7 +92,7 @@ export const getListTool: ToolConfig<SharepointToolParams, SharepointGetListResp
|
||||
|
||||
const finalUrl = url.toString()
|
||||
logger.info('SharePoint Get List URL', {
|
||||
finalUrl: sanitizeUrlForLog(finalUrl),
|
||||
finalUrl,
|
||||
siteId,
|
||||
listId: params.listId,
|
||||
includeColumns: !!params.includeColumns,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user