From 5a0becf76fbbfd6c9a09199c54b2bde5ed1f8c76 Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Mon, 2 Feb 2026 17:04:17 -0800 Subject: [PATCH] fix integrations --- .../confluence/upload-attachment/route.ts | 3 + .../microsoft_teams/write_channel/route.ts | 12 ++++ apps/sim/app/api/tools/mistral/parse/route.ts | 39 +++++++++-- .../app/api/tools/onedrive/upload/route.ts | 6 ++ apps/sim/app/api/tools/outlook/send/route.ts | 4 +- apps/sim/app/api/tools/reducto/parse/route.ts | 7 +- .../app/api/tools/sharepoint/upload/route.ts | 68 +++++++++++++++++-- apps/sim/app/api/tools/slack/utils.ts | 8 ++- .../app/api/tools/ssh/download-file/route.ts | 10 +++ apps/sim/app/api/tools/stt/route.ts | 31 ++++++--- apps/sim/blocks/blocks/stt.ts | 5 +- apps/sim/tools/dropbox/download.ts | 13 +++- apps/sim/tools/dropbox/upload.ts | 17 +++-- 13 files changed, 187 insertions(+), 36 deletions(-) diff --git a/apps/sim/app/api/tools/confluence/upload-attachment/route.ts b/apps/sim/app/api/tools/confluence/upload-attachment/route.ts index f6be92f3f..599d70b75 100644 --- a/apps/sim/app/api/tools/confluence/upload-attachment/route.ts +++ b/apps/sim/app/api/tools/confluence/upload-attachment/route.ts @@ -92,6 +92,9 @@ export async function POST(request: NextRequest) { formData.append('comment', comment) } + // Add minorEdit field as required by Confluence API + formData.append('minorEdit', 'false') + const response = await fetch(url, { method: 'POST', headers: { diff --git a/apps/sim/app/api/tools/microsoft_teams/write_channel/route.ts b/apps/sim/app/api/tools/microsoft_teams/write_channel/route.ts index 3fb575dd4..5e0f358ea 100644 --- a/apps/sim/app/api/tools/microsoft_teams/write_channel/route.ts +++ b/apps/sim/app/api/tools/microsoft_teams/write_channel/route.ts @@ -94,6 +94,18 @@ export async function POST(request: NextRequest) { for (const file of userFiles) { try { + // Microsoft Graph API limits direct uploads to 4MB + const maxSize = 4 * 1024 * 1024 + if (file.size > maxSize) { + const sizeMB = (file.size / (1024 * 1024)).toFixed(2) + logger.error( + `[${requestId}] File ${file.name} is ${sizeMB}MB, exceeds 4MB limit for direct upload` + ) + throw new Error( + `File "${file.name}" (${sizeMB}MB) exceeds the 4MB limit for Teams attachments. Use smaller files or upload to SharePoint/OneDrive first.` + ) + } + logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`) const buffer = await downloadFileFromStorage(file, requestId, logger) diff --git a/apps/sim/app/api/tools/mistral/parse/route.ts b/apps/sim/app/api/tools/mistral/parse/route.ts index bf7c66905..c8394122e 100644 --- a/apps/sim/app/api/tools/mistral/parse/route.ts +++ b/apps/sim/app/api/tools/mistral/parse/route.ts @@ -101,9 +101,19 @@ export async function POST(request: NextRequest) { const base64Payload = base64.startsWith('data:') ? base64 : `data:${mimeType};base64,${base64}` - mistralBody.document = { - type: 'document_url', - document_url: base64Payload, + + // Mistral API uses different document types for images vs documents + const isImage = mimeType.startsWith('image/') + if (isImage) { + mistralBody.document = { + type: 'image_url', + image_url: base64Payload, + } + } else { + mistralBody.document = { + type: 'document_url', + document_url: base64Payload, + } } } else if (filePath) { let fileUrl = filePath @@ -146,9 +156,26 @@ export async function POST(request: NextRequest) { } } - mistralBody.document = { - type: 'document_url', - document_url: fileUrl, + // Detect image URLs by extension for proper Mistral API type + const lowerUrl = fileUrl.toLowerCase() + const isImageUrl = + lowerUrl.endsWith('.png') || + lowerUrl.endsWith('.jpg') || + lowerUrl.endsWith('.jpeg') || + lowerUrl.endsWith('.gif') || + lowerUrl.endsWith('.webp') || + lowerUrl.endsWith('.avif') + + if (isImageUrl) { + mistralBody.document = { + type: 'image_url', + image_url: fileUrl, + } + } else { + mistralBody.document = { + type: 'document_url', + document_url: fileUrl, + } } } diff --git a/apps/sim/app/api/tools/onedrive/upload/route.ts b/apps/sim/app/api/tools/onedrive/upload/route.ts index 87902f882..2dbad9ef0 100644 --- a/apps/sim/app/api/tools/onedrive/upload/route.ts +++ b/apps/sim/app/api/tools/onedrive/upload/route.ts @@ -38,6 +38,7 @@ const OneDriveUploadSchema = z.object({ folderId: z.string().optional().nullable(), mimeType: z.string().nullish(), values: ExcelValuesSchema.optional().nullable(), + conflictBehavior: z.enum(['fail', 'replace', 'rename']).optional().nullable(), }) async function secureFetchGraph( @@ -184,6 +185,11 @@ export async function POST(request: NextRequest) { uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content` } + // Add conflict behavior if specified (defaults to replace by Microsoft Graph API) + if (validatedData.conflictBehavior) { + uploadUrl += `?@microsoft.graph.conflictBehavior=${validatedData.conflictBehavior}` + } + const uploadResponse = await secureFetchGraph( uploadUrl, { diff --git a/apps/sim/app/api/tools/outlook/send/route.ts b/apps/sim/app/api/tools/outlook/send/route.ts index 88578bcef..f90f62518 100644 --- a/apps/sim/app/api/tools/outlook/send/route.ts +++ b/apps/sim/app/api/tools/outlook/send/route.ts @@ -96,14 +96,14 @@ export async function POST(request: NextRequest) { if (attachments.length > 0) { const totalSize = attachments.reduce((sum, file) => sum + file.size, 0) - const maxSize = 4 * 1024 * 1024 // 4MB + const maxSize = 3 * 1024 * 1024 // 3MB - Microsoft Graph API limit for inline attachments if (totalSize > maxSize) { const sizeMB = (totalSize / (1024 * 1024)).toFixed(2) return NextResponse.json( { success: false, - error: `Total attachment size (${sizeMB}MB) exceeds Outlook's limit of 4MB per request`, + error: `Total attachment size (${sizeMB}MB) exceeds Microsoft Graph API limit of 3MB per request`, }, { status: 400 } ) diff --git a/apps/sim/app/api/tools/reducto/parse/route.ts b/apps/sim/app/api/tools/reducto/parse/route.ts index dc885b1f8..089733043 100644 --- a/apps/sim/app/api/tools/reducto/parse/route.ts +++ b/apps/sim/app/api/tools/reducto/parse/route.ts @@ -175,8 +175,13 @@ export async function POST(request: NextRequest) { } if (validatedData.pages && validatedData.pages.length > 0) { + // Reducto API expects page_range as an object with start/end, not an array + const pages = validatedData.pages reductoBody.settings = { - page_range: validatedData.pages, + page_range: { + start: Math.min(...pages), + end: Math.max(...pages), + }, } } diff --git a/apps/sim/app/api/tools/sharepoint/upload/route.ts b/apps/sim/app/api/tools/sharepoint/upload/route.ts index 43a39ee4c..05392f0bf 100644 --- a/apps/sim/app/api/tools/sharepoint/upload/route.ts +++ b/apps/sim/app/api/tools/sharepoint/upload/route.ts @@ -114,7 +114,9 @@ export async function POST(request: NextRequest) { ) if (!driveResponse.ok) { - const errorData = await driveResponse.json().catch(() => ({})) + const errorData = (await driveResponse.json().catch(() => ({}))) as { + error?: { message?: string } + } logger.error(`[${requestId}] Failed to get default drive:`, errorData) return NextResponse.json( { @@ -125,7 +127,7 @@ export async function POST(request: NextRequest) { ) } - const driveData = await driveResponse.json() + const driveData = (await driveResponse.json()) as { id: string } effectiveDriveId = driveData.id logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`) } @@ -187,20 +189,76 @@ export async function POST(request: NextRequest) { logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData) if (uploadResponse.status === 409) { - logger.warn(`[${requestId}] File ${fileName} already exists, attempting to replace`) + // File exists - retry with conflict behavior set to replace + logger.warn(`[${requestId}] File ${fileName} already exists, retrying with replace`) + const replaceUrl = `${uploadUrl}?@microsoft.graph.conflictBehavior=replace` + const replaceResponse = await secureFetchGraph( + replaceUrl, + { + method: 'PUT', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + 'Content-Type': userFile.type || 'application/octet-stream', + }, + body: buffer, + }, + 'replaceUrl' + ) + + if (!replaceResponse.ok) { + const replaceErrorData = (await replaceResponse.json().catch(() => ({}))) as { + error?: { message?: string } + } + logger.error(`[${requestId}] Failed to replace file ${fileName}:`, replaceErrorData) + return NextResponse.json( + { + success: false, + error: replaceErrorData.error?.message || `Failed to replace file: ${fileName}`, + }, + { status: replaceResponse.status } + ) + } + + const replaceData = (await replaceResponse.json()) as { + id: string + name: string + webUrl: string + size: number + createdDateTime: string + lastModifiedDateTime: string + } + logger.info(`[${requestId}] File replaced successfully: ${fileName}`) + + uploadedFiles.push({ + id: replaceData.id, + name: replaceData.name, + webUrl: replaceData.webUrl, + size: replaceData.size, + createdDateTime: replaceData.createdDateTime, + lastModifiedDateTime: replaceData.lastModifiedDateTime, + }) continue } return NextResponse.json( { success: false, - error: errorData.error?.message || `Failed to upload file: ${fileName}`, + error: + (errorData as { error?: { message?: string } }).error?.message || + `Failed to upload file: ${fileName}`, }, { status: uploadResponse.status } ) } - const uploadData = await uploadResponse.json() + const uploadData = (await uploadResponse.json()) as { + id: string + name: string + webUrl: string + size: number + createdDateTime: string + lastModifiedDateTime: string + } logger.info(`[${requestId}] File uploaded successfully: ${fileName}`) uploadedFiles.push({ diff --git a/apps/sim/app/api/tools/slack/utils.ts b/apps/sim/app/api/tools/slack/utils.ts index c4128f4eb..4577d4491 100644 --- a/apps/sim/app/api/tools/slack/utils.ts +++ b/apps/sim/app/api/tools/slack/utils.ts @@ -156,7 +156,8 @@ export async function completeSlackFileUpload( uploadedFileIds: string[], channel: string, text: string, - accessToken: string + accessToken: string, + threadTs?: string | null ): Promise<{ ok: boolean; files?: any[]; error?: string }> { const response = await fetch('https://slack.com/api/files.completeUploadExternal', { method: 'POST', @@ -168,6 +169,7 @@ export async function completeSlackFileUpload( files: uploadedFileIds.map((id) => ({ id })), channel_id: channel, initial_comment: text, + ...(threadTs && { thread_ts: threadTs }), }), }) @@ -307,8 +309,8 @@ export async function sendSlackMessage( return { success: true, output: formatMessageSuccessResponse(data, text) } } - // Complete file upload - const completeData = await completeSlackFileUpload(fileIds, channel, text, accessToken) + // Complete file upload with thread support + const completeData = await completeSlackFileUpload(fileIds, channel, text, accessToken, threadTs) if (!completeData.ok) { logger.error(`[${requestId}] Failed to complete upload:`, completeData.error) diff --git a/apps/sim/app/api/tools/ssh/download-file/route.ts b/apps/sim/app/api/tools/ssh/download-file/route.ts index 818d0ed41..cd908a1b9 100644 --- a/apps/sim/app/api/tools/ssh/download-file/route.ts +++ b/apps/sim/app/api/tools/ssh/download-file/route.ts @@ -80,6 +80,16 @@ export async function POST(request: NextRequest) { }) }) + // Check file size limit (50MB to prevent memory exhaustion) + const maxSize = 50 * 1024 * 1024 + if (stats.size > maxSize) { + const sizeMB = (stats.size / (1024 * 1024)).toFixed(2) + return NextResponse.json( + { error: `File size (${sizeMB}MB) exceeds download limit of 50MB` }, + { status: 400 } + ) + } + // Read file content const content = await new Promise((resolve, reject) => { const chunks: Buffer[] = [] diff --git a/apps/sim/app/api/tools/stt/route.ts b/apps/sim/app/api/tools/stt/route.ts index 5917db680..9330d4da4 100644 --- a/apps/sim/app/api/tools/stt/route.ts +++ b/apps/sim/app/api/tools/stt/route.ts @@ -201,7 +201,9 @@ export async function POST(request: NextRequest) { translateToEnglish, model, body.prompt, - body.temperature + body.temperature, + audioMimeType, + audioFileName ) transcript = result.transcript segments = result.segments @@ -214,7 +216,8 @@ export async function POST(request: NextRequest) { language, timestamps, diarization, - model + model, + audioMimeType ) transcript = result.transcript segments = result.segments @@ -304,7 +307,9 @@ async function transcribeWithWhisper( translate?: boolean, model?: string, prompt?: string, - temperature?: number + temperature?: number, + mimeType?: string, + fileName?: string ): Promise<{ transcript: string segments?: TranscriptSegment[] @@ -313,8 +318,11 @@ async function transcribeWithWhisper( }> { const formData = new FormData() - const blob = new Blob([new Uint8Array(audioBuffer)], { type: 'audio/mpeg' }) - formData.append('file', blob, 'audio.mp3') + // Use actual MIME type and filename if provided + const actualMimeType = mimeType || 'audio/mpeg' + const actualFileName = fileName || 'audio.mp3' + const blob = new Blob([new Uint8Array(audioBuffer)], { type: actualMimeType }) + formData.append('file', blob, actualFileName) formData.append('model', model || 'whisper-1') if (language && language !== 'auto') { @@ -331,10 +339,11 @@ async function transcribeWithWhisper( formData.append('response_format', 'verbose_json') + // OpenAI API uses array notation for timestamp_granularities if (timestamps === 'word') { - formData.append('timestamp_granularities', 'word') + formData.append('timestamp_granularities[]', 'word') } else if (timestamps === 'sentence') { - formData.append('timestamp_granularities', 'segment') + formData.append('timestamp_granularities[]', 'segment') } const endpoint = translate ? 'translations' : 'transcriptions' @@ -377,7 +386,8 @@ async function transcribeWithDeepgram( language?: string, timestamps?: 'none' | 'sentence' | 'word', diarization?: boolean, - model?: string + model?: string, + mimeType?: string ): Promise<{ transcript: string segments?: TranscriptSegment[] @@ -409,7 +419,7 @@ async function transcribeWithDeepgram( method: 'POST', headers: { Authorization: `Token ${apiKey}`, - 'Content-Type': 'audio/mpeg', + 'Content-Type': mimeType || 'audio/mpeg', }, body: new Uint8Array(audioBuffer), }) @@ -565,7 +575,8 @@ async function transcribeWithAssemblyAI( audio_url: upload_url, } - if (model === 'best' || model === 'nano') { + // AssemblyAI only supports 'best', 'slam-1', or 'universal' for speech_model + if (model === 'best') { transcriptRequest.speech_model = model } diff --git a/apps/sim/blocks/blocks/stt.ts b/apps/sim/blocks/blocks/stt.ts index 8b9a94575..344f14458 100644 --- a/apps/sim/blocks/blocks/stt.ts +++ b/apps/sim/blocks/blocks/stt.ts @@ -82,10 +82,7 @@ export const SttBlock: BlockConfig = { title: 'Model', type: 'dropdown', condition: { field: 'provider', value: 'assemblyai' }, - options: [ - { label: 'Best', id: 'best' }, - { label: 'Nano', id: 'nano' }, - ], + options: [{ label: 'Best', id: 'best' }], value: () => 'best', required: true, }, diff --git a/apps/sim/tools/dropbox/download.ts b/apps/sim/tools/dropbox/download.ts index 24292ebda..8adf286e6 100644 --- a/apps/sim/tools/dropbox/download.ts +++ b/apps/sim/tools/dropbox/download.ts @@ -1,6 +1,16 @@ import type { DropboxDownloadParams, DropboxDownloadResponse } from '@/tools/dropbox/types' import type { ToolConfig } from '@/tools/types' +/** + * Escapes non-ASCII characters in JSON string for HTTP header safety. + * Dropbox API requires characters 0x7F and all non-ASCII to be escaped as \uXXXX. + */ +function httpHeaderSafeJson(value: object): string { + return JSON.stringify(value).replace(/[\u007f-\uffff]/g, (c) => { + return '\\u' + ('0000' + c.charCodeAt(0).toString(16)).slice(-4) + }) +} + export const dropboxDownloadTool: ToolConfig = { id: 'dropbox_download', name: 'Dropbox Download File', @@ -30,7 +40,8 @@ export const dropboxDownloadTool: ToolConfig { + return '\\u' + ('0000' + c.charCodeAt(0).toString(16)).slice(-4) + }) +} + export const dropboxUploadTool: ToolConfig = { id: 'dropbox_upload', name: 'Dropbox Upload File', @@ -70,13 +80,12 @@ export const dropboxUploadTool: ToolConfig { - // The body should be the raw binary data - // In this case we're passing the base64 content which will be decoded - return params.fileContent + // Decode base64 to raw binary bytes - Dropbox expects raw binary, not base64 text + return Buffer.from(params.fileContent, 'base64') }, },