feat(tools): added download file tool for onedrive, google drive, and slack; added move email tool for gmail and outlook (#1785)

* feat(tools): added download file tool for onedrive, google drive, and slack

* added gmail & outlook move tools, added missing credentials descriptions to modal

* added slack delete/update message, add reaction; added gmail read/unread/label/unarchive; added outlook copy/delete/read/unread

* added threads to slack operations

* added timestamp for slack webhook trigger since api uses timestamp for updating/reacting/deleting

* cleanup

* added file info to slack read messages

* updated slack desc

* fixed downloading for onedrive, slack, and drive

* fix type check

* fix build failure

* cleanup files, fix triggers with attachments, fix integration blocks with include attachment to parse to user files, remove unused code

* fix move files tools

* fix tests

* fix build errors

* fix type error

* fix tests

* remove redundant code and filter out unecessary user file fields

* fix lint error

* remove fields from tag dropdown

* fix file upload via API

* fix pdf parse issue

---------

Co-authored-by: waleed <waleed>
Co-authored-by: Adam Gough <adamgough@Adams-MacBook-Pro.local>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
This commit is contained in:
Waleed
2025-11-05 13:00:34 -08:00
committed by GitHub
parent 21774de275
commit cf023e4d22
117 changed files with 6221 additions and 887 deletions

View File

@@ -98,6 +98,7 @@ export class DAGExecutor {
workflowId,
workspaceId: this.contextExtensions.workspaceId,
executionId: this.contextExtensions.executionId,
userId: this.contextExtensions.userId,
isDeployedContext: this.contextExtensions.isDeployedContext,
blockStates: new Map(),
blockLogs: [],

View File

@@ -4,6 +4,7 @@ import type { SubflowType } from '@/stores/workflows/workflow/types'
export interface ContextExtensions {
workspaceId?: string
executionId?: string
userId?: string
stream?: boolean
selectedOutputs?: string[]
edges?: Array<{ source: string; target: string }>

View File

@@ -89,6 +89,7 @@ export class ApiBlockHandler implements BlockHandler {
_context: {
workflowId: ctx.workflowId,
workspaceId: ctx.workspaceId,
executionId: ctx.executionId,
},
},
false,

View File

@@ -66,6 +66,7 @@ export class GenericBlockHandler implements BlockHandler {
_context: {
workflowId: ctx.workflowId,
workspaceId: ctx.workspaceId,
executionId: ctx.executionId,
},
},
false,

View File

@@ -12,8 +12,6 @@ export interface UserFile {
size: number
type: string
key: string
uploadedAt: string
expiresAt: string
context?: string
}
@@ -107,6 +105,7 @@ export interface ExecutionContext {
workflowId: string // Unique identifier for this workflow execution
workspaceId?: string // Workspace ID for file storage scoping
executionId?: string // Unique execution ID for file storage scoping
userId?: string // User ID for file storage attribution
// Whether this execution is running against deployed state (API/webhook/schedule/chat)
// Manual executions in the builder should leave this undefined/false
isDeployedContext?: boolean

View File

@@ -1,5 +1,5 @@
import { createLogger } from '@/lib/logs/console/logger'
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
import { uploadExecutionFile, uploadFileFromRawData } from '@/lib/uploads/contexts/execution'
import type { ExecutionContext, UserFile } from '@/executor/types'
import type { ToolConfig, ToolFileData } from '@/tools/types'
@@ -73,7 +73,7 @@ export class FileToolProcessor {
if (outputType === 'file[]') {
return FileToolProcessor.processFileArray(fileData, outputKey, executionContext)
}
return FileToolProcessor.processFileData(fileData, executionContext, outputKey)
return FileToolProcessor.processFileData(fileData, executionContext)
}
/**
@@ -89,9 +89,7 @@ export class FileToolProcessor {
}
return Promise.all(
fileData.map((file, index) =>
FileToolProcessor.processFileData(file, executionContext, `${outputKey}[${index}]`)
)
fileData.map((file, index) => FileToolProcessor.processFileData(file, executionContext))
)
}
@@ -100,49 +98,10 @@ export class FileToolProcessor {
*/
private static async processFileData(
fileData: ToolFileData,
context: ExecutionContext,
outputKey: string
context: ExecutionContext
): Promise<UserFile> {
logger.info(`Processing file data for output '${outputKey}': ${fileData.name}`)
try {
// Convert various formats to Buffer
let buffer: Buffer
if (Buffer.isBuffer(fileData.data)) {
buffer = fileData.data
logger.info(`Using Buffer data for ${fileData.name} (${buffer.length} bytes)`)
} else if (
fileData.data &&
typeof fileData.data === 'object' &&
'type' in fileData.data &&
'data' in fileData.data
) {
// Handle serialized Buffer objects (from JSON serialization)
const serializedBuffer = fileData.data as { type: string; data: number[] }
if (serializedBuffer.type === 'Buffer' && Array.isArray(serializedBuffer.data)) {
buffer = Buffer.from(serializedBuffer.data)
} else {
throw new Error(`Invalid serialized buffer format for ${fileData.name}`)
}
logger.info(
`Converted serialized Buffer to Buffer for ${fileData.name} (${buffer.length} bytes)`
)
} else if (typeof fileData.data === 'string' && fileData.data) {
// Assume base64 or base64url
let base64Data = fileData.data
// Convert base64url to base64 if needed (Gmail API format)
if (base64Data && (base64Data.includes('-') || base64Data.includes('_'))) {
base64Data = base64Data.replace(/-/g, '+').replace(/_/g, '/')
}
buffer = Buffer.from(base64Data, 'base64')
logger.info(
`Converted base64 string to Buffer for ${fileData.name} (${buffer.length} bytes)`
)
} else if (fileData.url) {
// Download from URL
logger.info(`Downloading file from URL: ${fileData.url}`)
if (fileData.url) {
const response = await fetch(fileData.url)
if (!response.ok) {
@@ -150,35 +109,38 @@ export class FileToolProcessor {
}
const arrayBuffer = await response.arrayBuffer()
buffer = Buffer.from(arrayBuffer)
logger.info(`Downloaded file from URL for ${fileData.name} (${buffer.length} bytes)`)
} else {
throw new Error(
`File data for '${fileData.name}' must have either 'data' (Buffer/base64) or 'url' property`
const buffer = Buffer.from(arrayBuffer)
if (buffer.length === 0) {
throw new Error(`File '${fileData.name}' has zero bytes`)
}
return await uploadExecutionFile(
{
workspaceId: context.workspaceId || '',
workflowId: context.workflowId,
executionId: context.executionId || '',
},
buffer,
fileData.name,
fileData.mimeType,
context.userId
)
}
// Validate buffer
if (buffer.length === 0) {
throw new Error(`File '${fileData.name}' has zero bytes`)
}
// Store in execution filesystem
const userFile = await uploadExecutionFile(
return uploadFileFromRawData(
{
name: fileData.name,
data: fileData.data,
mimeType: fileData.mimeType,
},
{
workspaceId: context.workspaceId || '',
workflowId: context.workflowId,
executionId: context.executionId || '',
},
buffer,
fileData.name,
fileData.mimeType
context.userId
)
logger.info(
`Successfully stored file '${fileData.name}' in execution filesystem with key: ${userFile.key}`
)
return userFile
} catch (error) {
logger.error(`Error processing file data for '${fileData.name}':`, error)
throw error

View File

@@ -1,3 +1,4 @@
import { isUserFile } from '@/lib/utils'
import {
classifyStartBlockType,
getLegacyStarterMode,
@@ -233,20 +234,6 @@ function getRawInputCandidate(workflowInput: unknown): unknown {
return workflowInput
}
function isUserFile(candidate: unknown): candidate is UserFile {
if (!isPlainObject(candidate)) {
return false
}
return (
typeof candidate.id === 'string' &&
typeof candidate.name === 'string' &&
typeof candidate.url === 'string' &&
typeof candidate.size === 'number' &&
typeof candidate.type === 'string'
)
}
function getFilesFromWorkflowInput(workflowInput: unknown): UserFile[] | undefined {
if (!isPlainObject(workflowInput)) {
return undefined