fix integrations

This commit is contained in:
Vikhyath Mondreti
2026-02-02 17:04:17 -08:00
parent f4a3c94f87
commit 5a0becf76f
13 changed files with 187 additions and 36 deletions

View File

@@ -92,6 +92,9 @@ export async function POST(request: NextRequest) {
formData.append('comment', comment)
}
// Add minorEdit field as required by Confluence API
formData.append('minorEdit', 'false')
const response = await fetch(url, {
method: 'POST',
headers: {

View File

@@ -94,6 +94,18 @@ export async function POST(request: NextRequest) {
for (const file of userFiles) {
try {
// Microsoft Graph API limits direct uploads to 4MB
const maxSize = 4 * 1024 * 1024
if (file.size > maxSize) {
const sizeMB = (file.size / (1024 * 1024)).toFixed(2)
logger.error(
`[${requestId}] File ${file.name} is ${sizeMB}MB, exceeds 4MB limit for direct upload`
)
throw new Error(
`File "${file.name}" (${sizeMB}MB) exceeds the 4MB limit for Teams attachments. Use smaller files or upload to SharePoint/OneDrive first.`
)
}
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
const buffer = await downloadFileFromStorage(file, requestId, logger)

View File

@@ -101,9 +101,19 @@ export async function POST(request: NextRequest) {
const base64Payload = base64.startsWith('data:')
? base64
: `data:${mimeType};base64,${base64}`
mistralBody.document = {
type: 'document_url',
document_url: base64Payload,
// Mistral API uses different document types for images vs documents
const isImage = mimeType.startsWith('image/')
if (isImage) {
mistralBody.document = {
type: 'image_url',
image_url: base64Payload,
}
} else {
mistralBody.document = {
type: 'document_url',
document_url: base64Payload,
}
}
} else if (filePath) {
let fileUrl = filePath
@@ -146,9 +156,26 @@ export async function POST(request: NextRequest) {
}
}
mistralBody.document = {
type: 'document_url',
document_url: fileUrl,
// Detect image URLs by extension for proper Mistral API type
const lowerUrl = fileUrl.toLowerCase()
const isImageUrl =
lowerUrl.endsWith('.png') ||
lowerUrl.endsWith('.jpg') ||
lowerUrl.endsWith('.jpeg') ||
lowerUrl.endsWith('.gif') ||
lowerUrl.endsWith('.webp') ||
lowerUrl.endsWith('.avif')
if (isImageUrl) {
mistralBody.document = {
type: 'image_url',
image_url: fileUrl,
}
} else {
mistralBody.document = {
type: 'document_url',
document_url: fileUrl,
}
}
}

View File

@@ -38,6 +38,7 @@ const OneDriveUploadSchema = z.object({
folderId: z.string().optional().nullable(),
mimeType: z.string().nullish(),
values: ExcelValuesSchema.optional().nullable(),
conflictBehavior: z.enum(['fail', 'replace', 'rename']).optional().nullable(),
})
async function secureFetchGraph(
@@ -184,6 +185,11 @@ export async function POST(request: NextRequest) {
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
}
// Add conflict behavior if specified (defaults to replace by Microsoft Graph API)
if (validatedData.conflictBehavior) {
uploadUrl += `?@microsoft.graph.conflictBehavior=${validatedData.conflictBehavior}`
}
const uploadResponse = await secureFetchGraph(
uploadUrl,
{

View File

@@ -96,14 +96,14 @@ export async function POST(request: NextRequest) {
if (attachments.length > 0) {
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
const maxSize = 4 * 1024 * 1024 // 4MB
const maxSize = 3 * 1024 * 1024 // 3MB - Microsoft Graph API limit for inline attachments
if (totalSize > maxSize) {
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
return NextResponse.json(
{
success: false,
error: `Total attachment size (${sizeMB}MB) exceeds Outlook's limit of 4MB per request`,
error: `Total attachment size (${sizeMB}MB) exceeds Microsoft Graph API limit of 3MB per request`,
},
{ status: 400 }
)

View File

@@ -175,8 +175,13 @@ export async function POST(request: NextRequest) {
}
if (validatedData.pages && validatedData.pages.length > 0) {
// Reducto API expects page_range as an object with start/end, not an array
const pages = validatedData.pages
reductoBody.settings = {
page_range: validatedData.pages,
page_range: {
start: Math.min(...pages),
end: Math.max(...pages),
},
}
}

View File

@@ -114,7 +114,9 @@ export async function POST(request: NextRequest) {
)
if (!driveResponse.ok) {
const errorData = await driveResponse.json().catch(() => ({}))
const errorData = (await driveResponse.json().catch(() => ({}))) as {
error?: { message?: string }
}
logger.error(`[${requestId}] Failed to get default drive:`, errorData)
return NextResponse.json(
{
@@ -125,7 +127,7 @@ export async function POST(request: NextRequest) {
)
}
const driveData = await driveResponse.json()
const driveData = (await driveResponse.json()) as { id: string }
effectiveDriveId = driveData.id
logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`)
}
@@ -187,20 +189,76 @@ export async function POST(request: NextRequest) {
logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData)
if (uploadResponse.status === 409) {
logger.warn(`[${requestId}] File ${fileName} already exists, attempting to replace`)
// File exists - retry with conflict behavior set to replace
logger.warn(`[${requestId}] File ${fileName} already exists, retrying with replace`)
const replaceUrl = `${uploadUrl}?@microsoft.graph.conflictBehavior=replace`
const replaceResponse = await secureFetchGraph(
replaceUrl,
{
method: 'PUT',
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
'Content-Type': userFile.type || 'application/octet-stream',
},
body: buffer,
},
'replaceUrl'
)
if (!replaceResponse.ok) {
const replaceErrorData = (await replaceResponse.json().catch(() => ({}))) as {
error?: { message?: string }
}
logger.error(`[${requestId}] Failed to replace file ${fileName}:`, replaceErrorData)
return NextResponse.json(
{
success: false,
error: replaceErrorData.error?.message || `Failed to replace file: ${fileName}`,
},
{ status: replaceResponse.status }
)
}
const replaceData = (await replaceResponse.json()) as {
id: string
name: string
webUrl: string
size: number
createdDateTime: string
lastModifiedDateTime: string
}
logger.info(`[${requestId}] File replaced successfully: ${fileName}`)
uploadedFiles.push({
id: replaceData.id,
name: replaceData.name,
webUrl: replaceData.webUrl,
size: replaceData.size,
createdDateTime: replaceData.createdDateTime,
lastModifiedDateTime: replaceData.lastModifiedDateTime,
})
continue
}
return NextResponse.json(
{
success: false,
error: errorData.error?.message || `Failed to upload file: ${fileName}`,
error:
(errorData as { error?: { message?: string } }).error?.message ||
`Failed to upload file: ${fileName}`,
},
{ status: uploadResponse.status }
)
}
const uploadData = await uploadResponse.json()
const uploadData = (await uploadResponse.json()) as {
id: string
name: string
webUrl: string
size: number
createdDateTime: string
lastModifiedDateTime: string
}
logger.info(`[${requestId}] File uploaded successfully: ${fileName}`)
uploadedFiles.push({

View File

@@ -156,7 +156,8 @@ export async function completeSlackFileUpload(
uploadedFileIds: string[],
channel: string,
text: string,
accessToken: string
accessToken: string,
threadTs?: string | null
): Promise<{ ok: boolean; files?: any[]; error?: string }> {
const response = await fetch('https://slack.com/api/files.completeUploadExternal', {
method: 'POST',
@@ -168,6 +169,7 @@ export async function completeSlackFileUpload(
files: uploadedFileIds.map((id) => ({ id })),
channel_id: channel,
initial_comment: text,
...(threadTs && { thread_ts: threadTs }),
}),
})
@@ -307,8 +309,8 @@ export async function sendSlackMessage(
return { success: true, output: formatMessageSuccessResponse(data, text) }
}
// Complete file upload
const completeData = await completeSlackFileUpload(fileIds, channel, text, accessToken)
// Complete file upload with thread support
const completeData = await completeSlackFileUpload(fileIds, channel, text, accessToken, threadTs)
if (!completeData.ok) {
logger.error(`[${requestId}] Failed to complete upload:`, completeData.error)

View File

@@ -80,6 +80,16 @@ export async function POST(request: NextRequest) {
})
})
// Check file size limit (50MB to prevent memory exhaustion)
const maxSize = 50 * 1024 * 1024
if (stats.size > maxSize) {
const sizeMB = (stats.size / (1024 * 1024)).toFixed(2)
return NextResponse.json(
{ error: `File size (${sizeMB}MB) exceeds download limit of 50MB` },
{ status: 400 }
)
}
// Read file content
const content = await new Promise<Buffer>((resolve, reject) => {
const chunks: Buffer[] = []

View File

@@ -201,7 +201,9 @@ export async function POST(request: NextRequest) {
translateToEnglish,
model,
body.prompt,
body.temperature
body.temperature,
audioMimeType,
audioFileName
)
transcript = result.transcript
segments = result.segments
@@ -214,7 +216,8 @@ export async function POST(request: NextRequest) {
language,
timestamps,
diarization,
model
model,
audioMimeType
)
transcript = result.transcript
segments = result.segments
@@ -304,7 +307,9 @@ async function transcribeWithWhisper(
translate?: boolean,
model?: string,
prompt?: string,
temperature?: number
temperature?: number,
mimeType?: string,
fileName?: string
): Promise<{
transcript: string
segments?: TranscriptSegment[]
@@ -313,8 +318,11 @@ async function transcribeWithWhisper(
}> {
const formData = new FormData()
const blob = new Blob([new Uint8Array(audioBuffer)], { type: 'audio/mpeg' })
formData.append('file', blob, 'audio.mp3')
// Use actual MIME type and filename if provided
const actualMimeType = mimeType || 'audio/mpeg'
const actualFileName = fileName || 'audio.mp3'
const blob = new Blob([new Uint8Array(audioBuffer)], { type: actualMimeType })
formData.append('file', blob, actualFileName)
formData.append('model', model || 'whisper-1')
if (language && language !== 'auto') {
@@ -331,10 +339,11 @@ async function transcribeWithWhisper(
formData.append('response_format', 'verbose_json')
// OpenAI API uses array notation for timestamp_granularities
if (timestamps === 'word') {
formData.append('timestamp_granularities', 'word')
formData.append('timestamp_granularities[]', 'word')
} else if (timestamps === 'sentence') {
formData.append('timestamp_granularities', 'segment')
formData.append('timestamp_granularities[]', 'segment')
}
const endpoint = translate ? 'translations' : 'transcriptions'
@@ -377,7 +386,8 @@ async function transcribeWithDeepgram(
language?: string,
timestamps?: 'none' | 'sentence' | 'word',
diarization?: boolean,
model?: string
model?: string,
mimeType?: string
): Promise<{
transcript: string
segments?: TranscriptSegment[]
@@ -409,7 +419,7 @@ async function transcribeWithDeepgram(
method: 'POST',
headers: {
Authorization: `Token ${apiKey}`,
'Content-Type': 'audio/mpeg',
'Content-Type': mimeType || 'audio/mpeg',
},
body: new Uint8Array(audioBuffer),
})
@@ -565,7 +575,8 @@ async function transcribeWithAssemblyAI(
audio_url: upload_url,
}
if (model === 'best' || model === 'nano') {
// AssemblyAI only supports 'best', 'slam-1', or 'universal' for speech_model
if (model === 'best') {
transcriptRequest.speech_model = model
}

View File

@@ -82,10 +82,7 @@ export const SttBlock: BlockConfig<SttBlockResponse> = {
title: 'Model',
type: 'dropdown',
condition: { field: 'provider', value: 'assemblyai' },
options: [
{ label: 'Best', id: 'best' },
{ label: 'Nano', id: 'nano' },
],
options: [{ label: 'Best', id: 'best' }],
value: () => 'best',
required: true,
},

View File

@@ -1,6 +1,16 @@
import type { DropboxDownloadParams, DropboxDownloadResponse } from '@/tools/dropbox/types'
import type { ToolConfig } from '@/tools/types'
/**
* Escapes non-ASCII characters in JSON string for HTTP header safety.
* Dropbox API requires characters 0x7F and all non-ASCII to be escaped as \uXXXX.
*/
function httpHeaderSafeJson(value: object): string {
return JSON.stringify(value).replace(/[\u007f-\uffff]/g, (c) => {
return '\\u' + ('0000' + c.charCodeAt(0).toString(16)).slice(-4)
})
}
export const dropboxDownloadTool: ToolConfig<DropboxDownloadParams, DropboxDownloadResponse> = {
id: 'dropbox_download',
name: 'Dropbox Download File',
@@ -30,7 +40,8 @@ export const dropboxDownloadTool: ToolConfig<DropboxDownloadParams, DropboxDownl
}
return {
Authorization: `Bearer ${params.accessToken}`,
'Dropbox-API-Arg': JSON.stringify({ path: params.path }),
'Content-Type': 'application/octet-stream',
'Dropbox-API-Arg': httpHeaderSafeJson({ path: params.path }),
}
},
},

View File

@@ -1,6 +1,16 @@
import type { DropboxUploadParams, DropboxUploadResponse } from '@/tools/dropbox/types'
import type { ToolConfig } from '@/tools/types'
/**
* Escapes non-ASCII characters in JSON string for HTTP header safety.
* Dropbox API requires characters 0x7F and all non-ASCII to be escaped as \uXXXX.
*/
function httpHeaderSafeJson(value: object): string {
return JSON.stringify(value).replace(/[\u007f-\uffff]/g, (c) => {
return '\\u' + ('0000' + c.charCodeAt(0).toString(16)).slice(-4)
})
}
export const dropboxUploadTool: ToolConfig<DropboxUploadParams, DropboxUploadResponse> = {
id: 'dropbox_upload',
name: 'Dropbox Upload File',
@@ -70,13 +80,12 @@ export const dropboxUploadTool: ToolConfig<DropboxUploadParams, DropboxUploadRes
return {
Authorization: `Bearer ${params.accessToken}`,
'Content-Type': 'application/octet-stream',
'Dropbox-API-Arg': JSON.stringify(dropboxApiArg),
'Dropbox-API-Arg': httpHeaderSafeJson(dropboxApiArg),
}
},
body: (params) => {
// The body should be the raw binary data
// In this case we're passing the base64 content which will be decoded
return params.fileContent
// Decode base64 to raw binary bytes - Dropbox expects raw binary, not base64 text
return Buffer.from(params.fileContent, 'base64')
},
},