mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-08 22:48:14 -05:00
feat(files): gmail upload attachment, workspace files, file storage limits (#1666)
* feat(gmail): add attachment uploads * add workspace files * update landing page * fix lint * fix test * fixed UI * added additional S3 tools to upload files * added search filters for gmail trigger * added files to every block * works * fix * register sharepoint tool --------- Co-authored-by: waleed <waleed>
This commit is contained in:
committed by
GitHub
parent
d92d9a02cd
commit
35c551984f
@@ -70,6 +70,7 @@ Send emails using Gmail
|
||||
| `body` | string | Yes | Email body content |
|
||||
| `cc` | string | No | CC recipients \(comma-separated\) |
|
||||
| `bcc` | string | No | BCC recipients \(comma-separated\) |
|
||||
| `attachments` | file[] | No | Files to attach to the email |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -91,6 +92,7 @@ Draft emails using Gmail
|
||||
| `body` | string | Yes | Email body content |
|
||||
| `cc` | string | No | CC recipients \(comma-separated\) |
|
||||
| `bcc` | string | No | BCC recipients \(comma-separated\) |
|
||||
| `attachments` | file[] | No | Files to attach to the email draft |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: S3
|
||||
description: View S3 files
|
||||
description: Upload, download, list, and manage S3 files
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
@@ -62,12 +62,37 @@ In Sim, the S3 integration enables your agents to retrieve and access files stor
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate S3 into the workflow. Can get presigned URLs for S3 objects. Requires access key and secret access key.
|
||||
Integrate S3 into the workflow. Upload files, download objects, list bucket contents, delete objects, and copy objects between buckets. Requires AWS access key and secret access key.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `s3_put_object`
|
||||
|
||||
Upload a file to an AWS S3 bucket
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `accessKeyId` | string | Yes | Your AWS Access Key ID |
|
||||
| `secretAccessKey` | string | Yes | Your AWS Secret Access Key |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `bucketName` | string | Yes | S3 bucket name |
|
||||
| `objectKey` | string | Yes | Object key/path in S3 \(e.g., folder/filename.ext\) |
|
||||
| `file` | file | No | File to upload |
|
||||
| `content` | string | No | Text content to upload \(alternative to file\) |
|
||||
| `contentType` | string | No | Content-Type header \(auto-detected from file if not provided\) |
|
||||
| `acl` | string | No | Access control list \(e.g., private, public-read\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | URL of the uploaded S3 object |
|
||||
| `metadata` | object | Upload metadata including ETag and location |
|
||||
|
||||
### `s3_get_object`
|
||||
|
||||
Retrieve an object from an AWS S3 bucket
|
||||
@@ -87,6 +112,73 @@ Retrieve an object from an AWS S3 bucket
|
||||
| `url` | string | Pre-signed URL for downloading the S3 object |
|
||||
| `metadata` | object | File metadata including type, size, name, and last modified date |
|
||||
|
||||
### `s3_list_objects`
|
||||
|
||||
List objects in an AWS S3 bucket
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `accessKeyId` | string | Yes | Your AWS Access Key ID |
|
||||
| `secretAccessKey` | string | Yes | Your AWS Secret Access Key |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `bucketName` | string | Yes | S3 bucket name |
|
||||
| `prefix` | string | No | Prefix to filter objects \(e.g., folder/\) |
|
||||
| `maxKeys` | number | No | Maximum number of objects to return \(default: 1000\) |
|
||||
| `continuationToken` | string | No | Token for pagination |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `objects` | array | List of S3 objects |
|
||||
|
||||
### `s3_delete_object`
|
||||
|
||||
Delete an object from an AWS S3 bucket
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `accessKeyId` | string | Yes | Your AWS Access Key ID |
|
||||
| `secretAccessKey` | string | Yes | Your AWS Secret Access Key |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `bucketName` | string | Yes | S3 bucket name |
|
||||
| `objectKey` | string | Yes | Object key/path to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `deleted` | boolean | Whether the object was successfully deleted |
|
||||
| `metadata` | object | Deletion metadata |
|
||||
|
||||
### `s3_copy_object`
|
||||
|
||||
Copy an object within or between AWS S3 buckets
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `accessKeyId` | string | Yes | Your AWS Access Key ID |
|
||||
| `secretAccessKey` | string | Yes | Your AWS Secret Access Key |
|
||||
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
|
||||
| `sourceBucket` | string | Yes | Source bucket name |
|
||||
| `sourceKey` | string | Yes | Source object key/path |
|
||||
| `destinationBucket` | string | Yes | Destination bucket name |
|
||||
| `destinationKey` | string | Yes | Destination object key/path |
|
||||
| `acl` | string | No | Access control list for the copied object \(e.g., private, public-read\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `url` | string | URL of the copied S3 object |
|
||||
| `metadata` | object | Copy operation metadata |
|
||||
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
Code2,
|
||||
Database,
|
||||
DollarSign,
|
||||
HardDrive,
|
||||
Users,
|
||||
Workflow,
|
||||
} from 'lucide-react'
|
||||
@@ -42,6 +43,7 @@ interface PricingTier {
|
||||
*/
|
||||
const FREE_PLAN_FEATURES: PricingFeature[] = [
|
||||
{ icon: DollarSign, text: '$10 usage limit' },
|
||||
{ icon: HardDrive, text: '5GB file storage' },
|
||||
{ icon: Workflow, text: 'Public template access' },
|
||||
{ icon: Users, text: 'Community support' },
|
||||
{ icon: Database, text: 'Limited log retention' },
|
||||
|
||||
@@ -8,6 +8,7 @@ import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { validateExternalUrl } from '@/lib/security/input-validation'
|
||||
import { downloadFile, isUsingCloudStorage } from '@/lib/uploads'
|
||||
import { extractStorageKey } from '@/lib/uploads/file-utils'
|
||||
import { UPLOAD_DIR_SERVER } from '@/lib/uploads/setup.server'
|
||||
import '@/lib/uploads/setup.server'
|
||||
|
||||
@@ -69,13 +70,13 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
try {
|
||||
const requestData = await request.json()
|
||||
const { filePath, fileType } = requestData
|
||||
const { filePath, fileType, workspaceId } = requestData
|
||||
|
||||
if (!filePath || (typeof filePath === 'string' && filePath.trim() === '')) {
|
||||
return NextResponse.json({ success: false, error: 'No file path provided' }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('File parse request received:', { filePath, fileType })
|
||||
logger.info('File parse request received:', { filePath, fileType, workspaceId })
|
||||
|
||||
if (Array.isArray(filePath)) {
|
||||
const results = []
|
||||
@@ -89,7 +90,7 @@ export async function POST(request: NextRequest) {
|
||||
continue
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(path, fileType)
|
||||
const result = await parseFileSingle(path, fileType, workspaceId)
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
}
|
||||
@@ -117,7 +118,7 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
}
|
||||
|
||||
const result = await parseFileSingle(filePath, fileType)
|
||||
const result = await parseFileSingle(filePath, fileType, workspaceId)
|
||||
|
||||
if (result.metadata) {
|
||||
result.metadata.processingTime = Date.now() - startTime
|
||||
@@ -153,7 +154,11 @@ export async function POST(request: NextRequest) {
|
||||
/**
|
||||
* Parse a single file and return its content
|
||||
*/
|
||||
async function parseFileSingle(filePath: string, fileType?: string): Promise<ParseResult> {
|
||||
async function parseFileSingle(
|
||||
filePath: string,
|
||||
fileType?: string,
|
||||
workspaceId?: string
|
||||
): Promise<ParseResult> {
|
||||
logger.info('Parsing file:', filePath)
|
||||
|
||||
if (!filePath || filePath.trim() === '') {
|
||||
@@ -174,7 +179,7 @@ async function parseFileSingle(filePath: string, fileType?: string): Promise<Par
|
||||
}
|
||||
|
||||
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
|
||||
return handleExternalUrl(filePath, fileType)
|
||||
return handleExternalUrl(filePath, fileType, workspaceId)
|
||||
}
|
||||
|
||||
const isS3Path = filePath.includes('/api/files/serve/s3/')
|
||||
@@ -216,10 +221,16 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
||||
|
||||
/**
|
||||
* Handle external URL
|
||||
* If workspaceId is provided, checks if file already exists and saves to workspace if not
|
||||
*/
|
||||
async function handleExternalUrl(url: string, fileType?: string): Promise<ParseResult> {
|
||||
async function handleExternalUrl(
|
||||
url: string,
|
||||
fileType?: string,
|
||||
workspaceId?: string
|
||||
): Promise<ParseResult> {
|
||||
try {
|
||||
logger.info('Fetching external URL:', url)
|
||||
logger.info('WorkspaceId for URL save:', workspaceId)
|
||||
|
||||
const urlValidation = validateExternalUrl(url, 'fileUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
@@ -231,6 +242,34 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
|
||||
}
|
||||
}
|
||||
|
||||
// Extract filename from URL
|
||||
const urlPath = new URL(url).pathname
|
||||
const filename = urlPath.split('/').pop() || 'download'
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
|
||||
logger.info(`Extracted filename: ${filename}, workspaceId: ${workspaceId}`)
|
||||
|
||||
// If workspaceId provided, check if file already exists in workspace
|
||||
if (workspaceId) {
|
||||
const { fileExistsInWorkspace, listWorkspaceFiles } = await import(
|
||||
'@/lib/uploads/workspace-files'
|
||||
)
|
||||
const exists = await fileExistsInWorkspace(workspaceId, filename)
|
||||
|
||||
if (exists) {
|
||||
logger.info(`File ${filename} already exists in workspace, using existing file`)
|
||||
// Get existing file and parse from storage
|
||||
const workspaceFiles = await listWorkspaceFiles(workspaceId)
|
||||
const existingFile = workspaceFiles.find((f) => f.name === filename)
|
||||
|
||||
if (existingFile) {
|
||||
// Parse from workspace storage instead of re-downloading
|
||||
const storageFilePath = `/api/files/serve/${existingFile.key}`
|
||||
return handleCloudFile(storageFilePath, fileType)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
signal: AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS),
|
||||
})
|
||||
@@ -251,9 +290,23 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
|
||||
|
||||
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
|
||||
|
||||
const urlPath = new URL(url).pathname
|
||||
const filename = urlPath.split('/').pop() || 'download'
|
||||
const extension = path.extname(filename).toLowerCase().substring(1)
|
||||
// If workspaceId provided, save to workspace storage
|
||||
if (workspaceId) {
|
||||
try {
|
||||
const { getSession } = await import('@/lib/auth')
|
||||
const { uploadWorkspaceFile } = await import('@/lib/uploads/workspace-files')
|
||||
|
||||
const session = await getSession()
|
||||
if (session?.user?.id) {
|
||||
const mimeType = response.headers.get('content-type') || getMimeType(extension)
|
||||
await uploadWorkspaceFile(workspaceId, session.user.id, buffer, filename, mimeType)
|
||||
logger.info(`Saved URL file to workspace storage: ${filename}`)
|
||||
}
|
||||
} catch (saveError) {
|
||||
// Log but don't fail - continue with parsing even if save fails
|
||||
logger.warn(`Failed to save URL file to workspace:`, saveError)
|
||||
}
|
||||
}
|
||||
|
||||
if (extension === 'pdf') {
|
||||
return await handlePdfBuffer(buffer, filename, fileType, url)
|
||||
@@ -281,16 +334,7 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
|
||||
*/
|
||||
async function handleCloudFile(filePath: string, fileType?: string): Promise<ParseResult> {
|
||||
try {
|
||||
let cloudKey: string
|
||||
if (filePath.includes('/api/files/serve/s3/')) {
|
||||
cloudKey = decodeURIComponent(filePath.split('/api/files/serve/s3/')[1])
|
||||
} else if (filePath.includes('/api/files/serve/blob/')) {
|
||||
cloudKey = decodeURIComponent(filePath.split('/api/files/serve/blob/')[1])
|
||||
} else if (filePath.startsWith('/api/files/serve/')) {
|
||||
cloudKey = decodeURIComponent(filePath.substring('/api/files/serve/'.length))
|
||||
} else {
|
||||
cloudKey = filePath
|
||||
}
|
||||
const cloudKey = extractStorageKey(filePath)
|
||||
|
||||
logger.info('Extracted cloud key:', cloudKey)
|
||||
|
||||
|
||||
@@ -66,6 +66,8 @@ export async function POST(request: NextRequest) {
|
||||
logger.info(
|
||||
`Uploading files for execution-scoped storage: workflow=${workflowId}, execution=${executionId}`
|
||||
)
|
||||
} else if (workspaceId) {
|
||||
logger.info(`Uploading files for workspace-scoped storage: workspace=${workspaceId}`)
|
||||
}
|
||||
|
||||
const uploadResults = []
|
||||
@@ -83,6 +85,7 @@ export async function POST(request: NextRequest) {
|
||||
const bytes = await file.arrayBuffer()
|
||||
const buffer = Buffer.from(bytes)
|
||||
|
||||
// Priority 1: Execution-scoped storage (temporary, 5 min expiry)
|
||||
if (workflowId && executionId) {
|
||||
const { uploadExecutionFile } = await import('@/lib/workflows/execution-file-storage')
|
||||
const userFile = await uploadExecutionFile(
|
||||
@@ -100,6 +103,47 @@ export async function POST(request: NextRequest) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Priority 2: Workspace-scoped storage (persistent, no expiry)
|
||||
if (workspaceId) {
|
||||
try {
|
||||
const { uploadWorkspaceFile } = await import('@/lib/uploads/workspace-files')
|
||||
const userFile = await uploadWorkspaceFile(
|
||||
workspaceId,
|
||||
session.user.id,
|
||||
buffer,
|
||||
originalName,
|
||||
file.type || 'application/octet-stream'
|
||||
)
|
||||
|
||||
uploadResults.push(userFile)
|
||||
continue
|
||||
} catch (workspaceError) {
|
||||
// Check error type
|
||||
const errorMessage =
|
||||
workspaceError instanceof Error ? workspaceError.message : 'Upload failed'
|
||||
const isDuplicate = errorMessage.includes('already exists')
|
||||
const isStorageLimitError =
|
||||
errorMessage.includes('Storage limit exceeded') ||
|
||||
errorMessage.includes('storage limit')
|
||||
|
||||
logger.warn(`Workspace file upload failed: ${errorMessage}`)
|
||||
|
||||
// Determine appropriate status code
|
||||
let statusCode = 500
|
||||
if (isDuplicate) statusCode = 409
|
||||
else if (isStorageLimitError) statusCode = 413
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
isDuplicate,
|
||||
},
|
||||
{ status: statusCode }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info(`Uploading file: ${originalName}`)
|
||||
const result = await uploadFile(buffer, originalName, file.type, file.size)
|
||||
|
||||
@@ -67,12 +67,20 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
chunkCount: 5,
|
||||
tokenCount: 100,
|
||||
characterCount: 500,
|
||||
processingStatus: 'completed',
|
||||
processingStatus: 'completed' as const,
|
||||
processingStartedAt: new Date(),
|
||||
processingCompletedAt: new Date(),
|
||||
processingError: null,
|
||||
enabled: true,
|
||||
uploadedAt: new Date(),
|
||||
tag1: null,
|
||||
tag2: null,
|
||||
tag3: null,
|
||||
tag4: null,
|
||||
tag5: null,
|
||||
tag6: null,
|
||||
tag7: null,
|
||||
deletedAt: null,
|
||||
}
|
||||
|
||||
const resetMocks = () => {
|
||||
@@ -343,7 +351,8 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(createSingleDocument)).toHaveBeenCalledWith(
|
||||
validDocumentData,
|
||||
'kb-123',
|
||||
expect.any(String)
|
||||
expect.any(String),
|
||||
'user-123'
|
||||
)
|
||||
})
|
||||
|
||||
@@ -451,7 +460,8 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
expect(vi.mocked(createDocumentRecords)).toHaveBeenCalledWith(
|
||||
validBulkData.documents,
|
||||
'kb-123',
|
||||
expect.any(String)
|
||||
expect.any(String),
|
||||
'user-123'
|
||||
)
|
||||
expect(vi.mocked(processDocumentsWithQueue)).toHaveBeenCalled()
|
||||
})
|
||||
@@ -605,7 +615,7 @@ describe('Knowledge Base Documents API Route', () => {
|
||||
const data = await response.json()
|
||||
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.error).toBe('Failed to create document')
|
||||
expect(data.error).toBe('Database error')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -179,7 +179,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const createdDocuments = await createDocumentRecords(
|
||||
validatedData.documents,
|
||||
knowledgeBaseId,
|
||||
requestId
|
||||
requestId,
|
||||
userId
|
||||
)
|
||||
|
||||
logger.info(
|
||||
@@ -243,7 +244,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
try {
|
||||
const validatedData = CreateDocumentSchema.parse(body)
|
||||
|
||||
const newDocument = await createSingleDocument(validatedData, knowledgeBaseId, requestId)
|
||||
const newDocument = await createSingleDocument(
|
||||
validatedData,
|
||||
knowledgeBaseId,
|
||||
requestId,
|
||||
userId
|
||||
)
|
||||
|
||||
// Track single document upload
|
||||
try {
|
||||
@@ -278,7 +284,13 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating document`, error)
|
||||
return NextResponse.json({ error: 'Failed to create document' }, { status: 500 })
|
||||
|
||||
// Check if it's a storage limit error
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to create document'
|
||||
const isStorageLimitError =
|
||||
errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit')
|
||||
|
||||
return NextResponse.json({ error: errorMessage }, { status: isStorageLimitError ? 413 : 500 })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -317,7 +329,8 @@ export async function PATCH(req: NextRequest, { params }: { params: Promise<{ id
|
||||
knowledgeBaseId,
|
||||
operation,
|
||||
documentIds,
|
||||
requestId
|
||||
requestId,
|
||||
session.user.id
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
|
||||
175
apps/sim/app/api/tools/discord/send-message/route.ts
Normal file
175
apps/sim/app/api/tools/discord/send-message/route.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processFilesToUserFiles } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('DiscordSendMessageAPI')
|
||||
|
||||
const DiscordSendMessageSchema = z.object({
|
||||
botToken: z.string().min(1, 'Bot token is required'),
|
||||
channelId: z.string().min(1, 'Channel ID is required'),
|
||||
content: z.string().optional().nullable(),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Discord send attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Discord send request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = DiscordSendMessageSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending Discord message`, {
|
||||
channelId: validatedData.channelId,
|
||||
hasFiles: !!(validatedData.files && validatedData.files.length > 0),
|
||||
fileCount: validatedData.files?.length || 0,
|
||||
})
|
||||
|
||||
const discordApiUrl = `https://discord.com/api/v10/channels/${validatedData.channelId}/messages`
|
||||
|
||||
if (!validatedData.files || validatedData.files.length === 0) {
|
||||
logger.info(`[${requestId}] No files, using JSON POST`)
|
||||
|
||||
const response = await fetch(discordApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bot ${validatedData.botToken}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
content: validatedData.content || '',
|
||||
}),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Discord API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.message || 'Failed to send message',
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
logger.info(`[${requestId}] Message sent successfully`)
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: data.content,
|
||||
data: data,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`)
|
||||
|
||||
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||
|
||||
if (userFiles.length === 0) {
|
||||
logger.warn(`[${requestId}] No valid files to upload, falling back to text-only`)
|
||||
const response = await fetch(discordApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bot ${validatedData.botToken}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
content: validatedData.content || '',
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: data.content,
|
||||
data: data,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const formData = new FormData()
|
||||
|
||||
const payload = {
|
||||
content: validatedData.content || '',
|
||||
}
|
||||
formData.append('payload_json', JSON.stringify(payload))
|
||||
|
||||
for (let i = 0; i < userFiles.length; i++) {
|
||||
const userFile = userFiles[i]
|
||||
logger.info(`[${requestId}] Downloading file ${i}: ${userFile.name}`)
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
|
||||
const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type })
|
||||
formData.append(`files[${i}]`, blob, userFile.name)
|
||||
logger.info(`[${requestId}] Added file ${i}: ${userFile.name} (${buffer.length} bytes)`)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Sending multipart request with ${userFiles.length} file(s)`)
|
||||
const response = await fetch(discordApiUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bot ${validatedData.botToken}`,
|
||||
},
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Discord API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.message || 'Failed to send message with files',
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
logger.info(`[${requestId}] Message with files sent successfully`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: data.content,
|
||||
data: data,
|
||||
fileCount: userFiles.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error sending Discord message:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
201
apps/sim/app/api/tools/gmail/draft/route.ts
Normal file
201
apps/sim/app/api/tools/gmail/draft/route.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processFilesToUserFiles } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { base64UrlEncode, buildMimeMessage } from '@/tools/gmail/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GmailDraftAPI')
|
||||
|
||||
const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me'
|
||||
|
||||
const GmailDraftSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
to: z.string().min(1, 'Recipient email is required'),
|
||||
subject: z.string().min(1, 'Subject is required'),
|
||||
body: z.string().min(1, 'Email body is required'),
|
||||
cc: z.string().optional().nullable(),
|
||||
bcc: z.string().optional().nullable(),
|
||||
attachments: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail draft attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Gmail draft request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = GmailDraftSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Creating Gmail draft`, {
|
||||
to: validatedData.to,
|
||||
subject: validatedData.subject,
|
||||
hasAttachments: !!(validatedData.attachments && validatedData.attachments.length > 0),
|
||||
attachmentCount: validatedData.attachments?.length || 0,
|
||||
})
|
||||
|
||||
let rawMessage: string | undefined
|
||||
|
||||
if (validatedData.attachments && validatedData.attachments.length > 0) {
|
||||
const rawAttachments = validatedData.attachments
|
||||
logger.info(`[${requestId}] Processing ${rawAttachments.length} attachment(s)`)
|
||||
|
||||
const attachments = processFilesToUserFiles(rawAttachments, requestId, logger)
|
||||
|
||||
if (attachments.length === 0) {
|
||||
logger.warn(`[${requestId}] No valid attachments found after processing`)
|
||||
} else {
|
||||
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
||||
const maxSize = 25 * 1024 * 1024 // 25MB
|
||||
|
||||
if (totalSize > maxSize) {
|
||||
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Total attachment size (${sizeMB}MB) exceeds Gmail's limit of 25MB`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const attachmentBuffers = await Promise.all(
|
||||
attachments.map(async (file) => {
|
||||
try {
|
||||
logger.info(
|
||||
`[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)`
|
||||
)
|
||||
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
|
||||
return {
|
||||
filename: file.name,
|
||||
mimeType: file.type || 'application/octet-stream',
|
||||
content: buffer,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to download attachment ${file.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to download attachment "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
const mimeMessage = buildMimeMessage({
|
||||
to: validatedData.to,
|
||||
cc: validatedData.cc ?? undefined,
|
||||
bcc: validatedData.bcc ?? undefined,
|
||||
subject: validatedData.subject,
|
||||
body: validatedData.body,
|
||||
attachments: attachmentBuffers,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Built MIME message for draft (${mimeMessage.length} bytes)`)
|
||||
rawMessage = base64UrlEncode(mimeMessage)
|
||||
}
|
||||
}
|
||||
|
||||
if (!rawMessage) {
|
||||
const emailHeaders = [
|
||||
'Content-Type: text/plain; charset="UTF-8"',
|
||||
'MIME-Version: 1.0',
|
||||
`To: ${validatedData.to}`,
|
||||
]
|
||||
|
||||
if (validatedData.cc) {
|
||||
emailHeaders.push(`Cc: ${validatedData.cc}`)
|
||||
}
|
||||
if (validatedData.bcc) {
|
||||
emailHeaders.push(`Bcc: ${validatedData.bcc}`)
|
||||
}
|
||||
|
||||
emailHeaders.push(`Subject: ${validatedData.subject}`, '', validatedData.body)
|
||||
const email = emailHeaders.join('\n')
|
||||
rawMessage = Buffer.from(email).toString('base64url')
|
||||
}
|
||||
|
||||
const gmailResponse = await fetch(`${GMAIL_API_BASE}/drafts`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
message: { raw: rawMessage },
|
||||
}),
|
||||
})
|
||||
|
||||
if (!gmailResponse.ok) {
|
||||
const errorText = await gmailResponse.text()
|
||||
logger.error(`[${requestId}] Gmail API error:`, errorText)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Gmail API error: ${gmailResponse.statusText}`,
|
||||
},
|
||||
{ status: gmailResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await gmailResponse.json()
|
||||
|
||||
logger.info(`[${requestId}] Draft created successfully`, { draftId: data.id })
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
content: 'Email drafted successfully',
|
||||
metadata: {
|
||||
id: data.id,
|
||||
message: {
|
||||
id: data.message?.id,
|
||||
threadId: data.message?.threadId,
|
||||
labelIds: data.message?.labelIds,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error creating Gmail draft:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
196
apps/sim/app/api/tools/gmail/send/route.ts
Normal file
196
apps/sim/app/api/tools/gmail/send/route.ts
Normal file
@@ -0,0 +1,196 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processFilesToUserFiles } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { base64UrlEncode, buildMimeMessage } from '@/tools/gmail/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GmailSendAPI')
|
||||
|
||||
const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me'
|
||||
|
||||
const GmailSendSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
to: z.string().min(1, 'Recipient email is required'),
|
||||
subject: z.string().min(1, 'Subject is required'),
|
||||
body: z.string().min(1, 'Email body is required'),
|
||||
cc: z.string().optional().nullable(),
|
||||
bcc: z.string().optional().nullable(),
|
||||
attachments: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Gmail send attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Gmail send request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = GmailSendSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending Gmail email`, {
|
||||
to: validatedData.to,
|
||||
subject: validatedData.subject,
|
||||
hasAttachments: !!(validatedData.attachments && validatedData.attachments.length > 0),
|
||||
attachmentCount: validatedData.attachments?.length || 0,
|
||||
})
|
||||
|
||||
let rawMessage: string | undefined
|
||||
|
||||
if (validatedData.attachments && validatedData.attachments.length > 0) {
|
||||
const rawAttachments = validatedData.attachments
|
||||
logger.info(`[${requestId}] Processing ${rawAttachments.length} attachment(s)`)
|
||||
|
||||
const attachments = processFilesToUserFiles(rawAttachments, requestId, logger)
|
||||
|
||||
if (attachments.length === 0) {
|
||||
logger.warn(`[${requestId}] No valid attachments found after processing`)
|
||||
} else {
|
||||
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
||||
const maxSize = 25 * 1024 * 1024 // 25MB
|
||||
|
||||
if (totalSize > maxSize) {
|
||||
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Total attachment size (${sizeMB}MB) exceeds Gmail's limit of 25MB`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const attachmentBuffers = await Promise.all(
|
||||
attachments.map(async (file) => {
|
||||
try {
|
||||
logger.info(
|
||||
`[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)`
|
||||
)
|
||||
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
|
||||
return {
|
||||
filename: file.name,
|
||||
mimeType: file.type || 'application/octet-stream',
|
||||
content: buffer,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to download attachment ${file.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to download attachment "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
const mimeMessage = buildMimeMessage({
|
||||
to: validatedData.to,
|
||||
cc: validatedData.cc ?? undefined,
|
||||
bcc: validatedData.bcc ?? undefined,
|
||||
subject: validatedData.subject,
|
||||
body: validatedData.body,
|
||||
attachments: attachmentBuffers,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Built MIME message (${mimeMessage.length} bytes)`)
|
||||
rawMessage = base64UrlEncode(mimeMessage)
|
||||
}
|
||||
}
|
||||
|
||||
if (!rawMessage) {
|
||||
const emailHeaders = [
|
||||
'Content-Type: text/plain; charset="UTF-8"',
|
||||
'MIME-Version: 1.0',
|
||||
`To: ${validatedData.to}`,
|
||||
]
|
||||
|
||||
if (validatedData.cc) {
|
||||
emailHeaders.push(`Cc: ${validatedData.cc}`)
|
||||
}
|
||||
if (validatedData.bcc) {
|
||||
emailHeaders.push(`Bcc: ${validatedData.bcc}`)
|
||||
}
|
||||
|
||||
emailHeaders.push(`Subject: ${validatedData.subject}`, '', validatedData.body)
|
||||
const email = emailHeaders.join('\n')
|
||||
rawMessage = Buffer.from(email).toString('base64url')
|
||||
}
|
||||
|
||||
const gmailResponse = await fetch(`${GMAIL_API_BASE}/messages/send`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ raw: rawMessage }),
|
||||
})
|
||||
|
||||
if (!gmailResponse.ok) {
|
||||
const errorText = await gmailResponse.text()
|
||||
logger.error(`[${requestId}] Gmail API error:`, errorText)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Gmail API error: ${gmailResponse.statusText}`,
|
||||
},
|
||||
{ status: gmailResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await gmailResponse.json()
|
||||
|
||||
logger.info(`[${requestId}] Email sent successfully`, { messageId: data.id })
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
content: 'Email sent successfully',
|
||||
metadata: {
|
||||
id: data.id,
|
||||
threadId: data.threadId,
|
||||
labelIds: data.labelIds,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error sending Gmail email:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
298
apps/sim/app/api/tools/google_drive/upload/route.ts
Normal file
298
apps/sim/app/api/tools/google_drive/upload/route.ts
Normal file
@@ -0,0 +1,298 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processSingleFileToUserFile } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import {
|
||||
GOOGLE_WORKSPACE_MIME_TYPES,
|
||||
handleSheetsFormat,
|
||||
SOURCE_MIME_TYPES,
|
||||
} from '@/tools/google_drive/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GoogleDriveUploadAPI')
|
||||
|
||||
const GOOGLE_DRIVE_API_BASE = 'https://www.googleapis.com/upload/drive/v3/files'
|
||||
|
||||
const GoogleDriveUploadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileName: z.string().min(1, 'File name is required'),
|
||||
file: z.any().optional().nullable(),
|
||||
mimeType: z.string().optional().nullable(),
|
||||
folderId: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Build multipart upload body for Google Drive API
|
||||
*/
|
||||
function buildMultipartBody(
|
||||
metadata: Record<string, any>,
|
||||
fileBuffer: Buffer,
|
||||
mimeType: string,
|
||||
boundary: string
|
||||
): string {
|
||||
const parts: string[] = []
|
||||
|
||||
parts.push(`--${boundary}`)
|
||||
parts.push('Content-Type: application/json; charset=UTF-8')
|
||||
parts.push('')
|
||||
parts.push(JSON.stringify(metadata))
|
||||
|
||||
parts.push(`--${boundary}`)
|
||||
parts.push(`Content-Type: ${mimeType}`)
|
||||
parts.push('Content-Transfer-Encoding: base64')
|
||||
parts.push('')
|
||||
parts.push(fileBuffer.toString('base64'))
|
||||
|
||||
parts.push(`--${boundary}--`)
|
||||
|
||||
return parts.join('\r\n')
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Google Drive upload attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated Google Drive upload request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = GoogleDriveUploadSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Uploading file to Google Drive`, {
|
||||
fileName: validatedData.fileName,
|
||||
mimeType: validatedData.mimeType,
|
||||
folderId: validatedData.folderId,
|
||||
hasFile: !!validatedData.file,
|
||||
})
|
||||
|
||||
if (!validatedData.file) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'No file provided. Use the text content field for text-only uploads.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Process file - convert to UserFile format if needed
|
||||
const fileData = validatedData.file
|
||||
|
||||
let userFile
|
||||
try {
|
||||
userFile = processSingleFileToUserFile(fileData, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Downloading file from storage`, {
|
||||
fileName: userFile.name,
|
||||
key: userFile.key,
|
||||
size: userFile.size,
|
||||
})
|
||||
|
||||
let fileBuffer: Buffer
|
||||
|
||||
try {
|
||||
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to download file:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Failed to download file: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
let uploadMimeType = validatedData.mimeType || userFile.type || 'application/octet-stream'
|
||||
const requestedMimeType = validatedData.mimeType || userFile.type || 'application/octet-stream'
|
||||
|
||||
if (GOOGLE_WORKSPACE_MIME_TYPES.includes(requestedMimeType)) {
|
||||
uploadMimeType = SOURCE_MIME_TYPES[requestedMimeType] || 'text/plain'
|
||||
logger.info(`[${requestId}] Converting to Google Workspace type`, {
|
||||
requestedMimeType,
|
||||
uploadMimeType,
|
||||
})
|
||||
}
|
||||
|
||||
if (requestedMimeType === 'application/vnd.google-apps.spreadsheet') {
|
||||
try {
|
||||
const textContent = fileBuffer.toString('utf-8')
|
||||
const { csv } = handleSheetsFormat(textContent)
|
||||
if (csv !== undefined) {
|
||||
fileBuffer = Buffer.from(csv, 'utf-8')
|
||||
uploadMimeType = 'text/csv'
|
||||
logger.info(`[${requestId}] Converted to CSV for Google Sheets upload`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Could not convert to CSV, uploading as-is:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
const metadata: {
|
||||
name: string
|
||||
mimeType: string
|
||||
parents?: string[]
|
||||
} = {
|
||||
name: validatedData.fileName,
|
||||
mimeType: requestedMimeType,
|
||||
}
|
||||
|
||||
if (validatedData.folderId && validatedData.folderId.trim() !== '') {
|
||||
metadata.parents = [validatedData.folderId.trim()]
|
||||
}
|
||||
|
||||
const boundary = `boundary_${Date.now()}_${Math.random().toString(36).substring(7)}`
|
||||
|
||||
const multipartBody = buildMultipartBody(metadata, fileBuffer, uploadMimeType, boundary)
|
||||
|
||||
logger.info(`[${requestId}] Uploading to Google Drive via multipart upload`, {
|
||||
fileName: validatedData.fileName,
|
||||
size: fileBuffer.length,
|
||||
uploadMimeType,
|
||||
requestedMimeType,
|
||||
})
|
||||
|
||||
const uploadResponse = await fetch(
|
||||
`${GOOGLE_DRIVE_API_BASE}?uploadType=multipart&supportsAllDrives=true`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': `multipart/related; boundary=${boundary}`,
|
||||
'Content-Length': Buffer.byteLength(multipartBody, 'utf-8').toString(),
|
||||
},
|
||||
body: multipartBody,
|
||||
}
|
||||
)
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorText = await uploadResponse.text()
|
||||
logger.error(`[${requestId}] Google Drive API error:`, {
|
||||
status: uploadResponse.status,
|
||||
statusText: uploadResponse.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Google Drive API error: ${uploadResponse.statusText}`,
|
||||
},
|
||||
{ status: uploadResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const uploadData = await uploadResponse.json()
|
||||
const fileId = uploadData.id
|
||||
|
||||
logger.info(`[${requestId}] File uploaded successfully`, { fileId })
|
||||
|
||||
if (GOOGLE_WORKSPACE_MIME_TYPES.includes(requestedMimeType)) {
|
||||
logger.info(`[${requestId}] Updating file name to ensure it persists after conversion`)
|
||||
|
||||
const updateNameResponse = await fetch(
|
||||
`https://www.googleapis.com/drive/v3/files/${fileId}?supportsAllDrives=true`,
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: validatedData.fileName,
|
||||
}),
|
||||
}
|
||||
)
|
||||
|
||||
if (!updateNameResponse.ok) {
|
||||
logger.warn(
|
||||
`[${requestId}] Failed to update filename after conversion, but content was uploaded`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const finalFileResponse = await fetch(
|
||||
`https://www.googleapis.com/drive/v3/files/${fileId}?supportsAllDrives=true&fields=id,name,mimeType,webViewLink,webContentLink,size,createdTime,modifiedTime,parents`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
const finalFile = await finalFileResponse.json()
|
||||
|
||||
logger.info(`[${requestId}] Upload complete`, {
|
||||
fileId: finalFile.id,
|
||||
fileName: finalFile.name,
|
||||
webViewLink: finalFile.webViewLink,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
id: finalFile.id,
|
||||
name: finalFile.name,
|
||||
mimeType: finalFile.mimeType,
|
||||
webViewLink: finalFile.webViewLink,
|
||||
webContentLink: finalFile.webContentLink,
|
||||
size: finalFile.size,
|
||||
createdTime: finalFile.createdTime,
|
||||
modifiedTime: finalFile.modifiedTime,
|
||||
parents: finalFile.parents,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error uploading file to Google Drive:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
218
apps/sim/app/api/tools/microsoft_teams/write_channel/route.ts
Normal file
218
apps/sim/app/api/tools/microsoft_teams/write_channel/route.ts
Normal file
@@ -0,0 +1,218 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processFilesToUserFiles } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('TeamsWriteChannelAPI')
|
||||
|
||||
const TeamsWriteChannelSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
teamId: z.string().min(1, 'Team ID is required'),
|
||||
channelId: z.string().min(1, 'Channel ID is required'),
|
||||
content: z.string().min(1, 'Message content is required'),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Teams channel write attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated Teams channel write request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = TeamsWriteChannelSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending Teams channel message`, {
|
||||
teamId: validatedData.teamId,
|
||||
channelId: validatedData.channelId,
|
||||
hasFiles: !!(validatedData.files && validatedData.files.length > 0),
|
||||
fileCount: validatedData.files?.length || 0,
|
||||
})
|
||||
|
||||
const attachments: any[] = []
|
||||
if (validatedData.files && validatedData.files.length > 0) {
|
||||
const rawFiles = validatedData.files
|
||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to OneDrive`)
|
||||
|
||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
||||
|
||||
for (const file of userFiles) {
|
||||
try {
|
||||
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
|
||||
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
|
||||
const uploadUrl =
|
||||
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
|
||||
encodeURIComponent(file.name) +
|
||||
':/content'
|
||||
|
||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
||||
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': file.type || 'application/octet-stream',
|
||||
},
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadedFile = await uploadResponse.json()
|
||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
||||
id: uploadedFile.id,
|
||||
webUrl: uploadedFile.webUrl,
|
||||
})
|
||||
|
||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
||||
|
||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!fileDetailsResponse.ok) {
|
||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const fileDetails = await fileDetailsResponse.json()
|
||||
logger.info(`[${requestId}] Got file details`, {
|
||||
webDavUrl: fileDetails.webDavUrl,
|
||||
eTag: fileDetails.eTag,
|
||||
})
|
||||
|
||||
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
|
||||
|
||||
attachments.push({
|
||||
id: attachmentId,
|
||||
contentType: 'reference',
|
||||
contentUrl: fileDetails.webDavUrl,
|
||||
name: file.name,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
|
||||
)
|
||||
}
|
||||
|
||||
let messageContent = validatedData.content
|
||||
|
||||
if (attachments.length > 0) {
|
||||
const attachmentTags = attachments
|
||||
.map((att) => `<attachment id="${att.id}"></attachment>`)
|
||||
.join(' ')
|
||||
messageContent = `${validatedData.content}<br/>${attachmentTags}`
|
||||
}
|
||||
|
||||
const messageBody = {
|
||||
body: {
|
||||
contentType: attachments.length > 0 ? 'html' : 'text',
|
||||
content: messageContent,
|
||||
},
|
||||
}
|
||||
|
||||
if (attachments.length > 0) {
|
||||
;(messageBody as any).attachments = attachments
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Sending message to Teams channel: ${validatedData.channelId}`)
|
||||
|
||||
const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages`
|
||||
|
||||
const teamsResponse = await fetch(teamsUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(messageBody),
|
||||
})
|
||||
|
||||
if (!teamsResponse.ok) {
|
||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.error?.message || 'Failed to send Teams channel message',
|
||||
},
|
||||
{ status: teamsResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const responseData = await teamsResponse.json()
|
||||
logger.info(`[${requestId}] Teams channel message sent successfully`, {
|
||||
messageId: responseData.id,
|
||||
attachmentCount: attachments.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
updatedContent: true,
|
||||
metadata: {
|
||||
messageId: responseData.id,
|
||||
teamId: responseData.channelIdentity?.teamId || validatedData.teamId,
|
||||
channelId: responseData.channelIdentity?.channelId || validatedData.channelId,
|
||||
content: responseData.body?.content || validatedData.content,
|
||||
createdTime: responseData.createdDateTime || new Date().toISOString(),
|
||||
url: responseData.webUrl || '',
|
||||
attachmentCount: attachments.length,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error sending Teams channel message:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
215
apps/sim/app/api/tools/microsoft_teams/write_chat/route.ts
Normal file
215
apps/sim/app/api/tools/microsoft_teams/write_chat/route.ts
Normal file
@@ -0,0 +1,215 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processFilesToUserFiles } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('TeamsWriteChatAPI')
|
||||
|
||||
const TeamsWriteChatSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
chatId: z.string().min(1, 'Chat ID is required'),
|
||||
content: z.string().min(1, 'Message content is required'),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Teams chat write attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated Teams chat write request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = TeamsWriteChatSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending Teams chat message`, {
|
||||
chatId: validatedData.chatId,
|
||||
hasFiles: !!(validatedData.files && validatedData.files.length > 0),
|
||||
fileCount: validatedData.files?.length || 0,
|
||||
})
|
||||
|
||||
const attachments: any[] = []
|
||||
if (validatedData.files && validatedData.files.length > 0) {
|
||||
const rawFiles = validatedData.files
|
||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to Teams`)
|
||||
|
||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
||||
|
||||
for (const file of userFiles) {
|
||||
try {
|
||||
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
|
||||
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
|
||||
const uploadUrl =
|
||||
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
|
||||
encodeURIComponent(file.name) +
|
||||
':/content'
|
||||
|
||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
||||
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': file.type || 'application/octet-stream',
|
||||
},
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadedFile = await uploadResponse.json()
|
||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
||||
id: uploadedFile.id,
|
||||
webUrl: uploadedFile.webUrl,
|
||||
})
|
||||
|
||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
||||
|
||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!fileDetailsResponse.ok) {
|
||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const fileDetails = await fileDetailsResponse.json()
|
||||
logger.info(`[${requestId}] Got file details`, {
|
||||
webDavUrl: fileDetails.webDavUrl,
|
||||
eTag: fileDetails.eTag,
|
||||
})
|
||||
|
||||
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
|
||||
|
||||
attachments.push({
|
||||
id: attachmentId,
|
||||
contentType: 'reference',
|
||||
contentUrl: fileDetails.webDavUrl,
|
||||
name: file.name,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
|
||||
)
|
||||
}
|
||||
|
||||
let messageContent = validatedData.content
|
||||
|
||||
if (attachments.length > 0) {
|
||||
const attachmentTags = attachments
|
||||
.map((att) => `<attachment id="${att.id}"></attachment>`)
|
||||
.join(' ')
|
||||
messageContent = `${validatedData.content}<br/>${attachmentTags}`
|
||||
}
|
||||
|
||||
const messageBody = {
|
||||
body: {
|
||||
contentType: attachments.length > 0 ? 'html' : 'text',
|
||||
content: messageContent,
|
||||
},
|
||||
}
|
||||
|
||||
if (attachments.length > 0) {
|
||||
;(messageBody as any).attachments = attachments
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Sending message to Teams chat: ${validatedData.chatId}`)
|
||||
|
||||
const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages`
|
||||
|
||||
const teamsResponse = await fetch(teamsUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(messageBody),
|
||||
})
|
||||
|
||||
if (!teamsResponse.ok) {
|
||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.error?.message || 'Failed to send Teams message',
|
||||
},
|
||||
{ status: teamsResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const responseData = await teamsResponse.json()
|
||||
logger.info(`[${requestId}] Teams message sent successfully`, {
|
||||
messageId: responseData.id,
|
||||
attachmentCount: attachments.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
updatedContent: true,
|
||||
metadata: {
|
||||
messageId: responseData.id,
|
||||
chatId: responseData.chatId || validatedData.chatId,
|
||||
content: responseData.body?.content || validatedData.content,
|
||||
createdTime: responseData.createdDateTime || new Date().toISOString(),
|
||||
url: responseData.webUrl || '',
|
||||
attachmentCount: attachments.length,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error sending Teams chat message:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
149
apps/sim/app/api/tools/mistral/parse/route.ts
Normal file
149
apps/sim/app/api/tools/mistral/parse/route.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getPresignedUrl } from '@/lib/uploads'
|
||||
import { extractStorageKey } from '@/lib/uploads/file-utils'
|
||||
import { getBaseUrl } from '@/lib/urls/utils'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('MistralParseAPI')
|
||||
|
||||
const MistralParseSchema = z.object({
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
filePath: z.string().min(1, 'File path is required'),
|
||||
resultType: z.string().optional(),
|
||||
pages: z.array(z.number()).optional(),
|
||||
includeImageBase64: z.boolean().optional(),
|
||||
imageLimit: z.number().optional(),
|
||||
imageMinSize: z.number().optional(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Mistral parse attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = MistralParseSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Mistral parse request`, {
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath.includes('/api/files/serve/'),
|
||||
})
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
// Check if it's an internal workspace file path
|
||||
if (validatedData.filePath?.includes('/api/files/serve/')) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
// Generate 5-minute presigned URL for external API access
|
||||
fileUrl = await getPresignedUrl(storageKey, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for workspace file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
// Convert relative path to absolute URL
|
||||
const baseUrl = getBaseUrl()
|
||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
||||
}
|
||||
|
||||
// Call Mistral API with the resolved URL
|
||||
const mistralBody: any = {
|
||||
model: 'mistral-ocr-latest',
|
||||
document: {
|
||||
type: 'document_url',
|
||||
document_url: fileUrl,
|
||||
},
|
||||
}
|
||||
|
||||
if (validatedData.pages) {
|
||||
mistralBody.pages = validatedData.pages
|
||||
}
|
||||
if (validatedData.includeImageBase64 !== undefined) {
|
||||
mistralBody.include_image_base64 = validatedData.includeImageBase64
|
||||
}
|
||||
if (validatedData.imageLimit) {
|
||||
mistralBody.image_limit = validatedData.imageLimit
|
||||
}
|
||||
if (validatedData.imageMinSize) {
|
||||
mistralBody.image_min_size = validatedData.imageMinSize
|
||||
}
|
||||
|
||||
const mistralResponse = await fetch('https://api.mistral.ai/v1/ocr', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
},
|
||||
body: JSON.stringify(mistralBody),
|
||||
})
|
||||
|
||||
if (!mistralResponse.ok) {
|
||||
const errorText = await mistralResponse.text()
|
||||
logger.error(`[${requestId}] Mistral API error:`, errorText)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Mistral API error: ${mistralResponse.statusText}`,
|
||||
},
|
||||
{ status: mistralResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const mistralData = await mistralResponse.json()
|
||||
|
||||
logger.info(`[${requestId}] Mistral parse successful`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: mistralData,
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error in Mistral parse:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
201
apps/sim/app/api/tools/onedrive/upload/route.ts
Normal file
201
apps/sim/app/api/tools/onedrive/upload/route.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processSingleFileToUserFile } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('OneDriveUploadAPI')
|
||||
|
||||
const MICROSOFT_GRAPH_BASE = 'https://graph.microsoft.com/v1.0'
|
||||
|
||||
const OneDriveUploadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileName: z.string().min(1, 'File name is required'),
|
||||
file: z.any(), // UserFile object
|
||||
folderId: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized OneDrive upload attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated OneDrive upload request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = OneDriveUploadSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Uploading file to OneDrive`, {
|
||||
fileName: validatedData.fileName,
|
||||
folderId: validatedData.folderId || 'root',
|
||||
})
|
||||
|
||||
// Handle array or single file
|
||||
const rawFile = validatedData.file
|
||||
let fileToProcess
|
||||
|
||||
if (Array.isArray(rawFile)) {
|
||||
if (rawFile.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'No file provided',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
fileToProcess = rawFile[0]
|
||||
} else {
|
||||
fileToProcess = rawFile
|
||||
}
|
||||
|
||||
// Convert to UserFile format
|
||||
let userFile
|
||||
try {
|
||||
userFile = processSingleFileToUserFile(fileToProcess, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Downloading file from storage: ${userFile.key}`)
|
||||
|
||||
let fileBuffer: Buffer
|
||||
|
||||
try {
|
||||
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to download file from storage:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Failed to download file: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
const maxSize = 250 * 1024 * 1024 // 250MB
|
||||
if (fileBuffer.length > maxSize) {
|
||||
const sizeMB = (fileBuffer.length / (1024 * 1024)).toFixed(2)
|
||||
logger.warn(`[${requestId}] File too large: ${sizeMB}MB`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `File size (${sizeMB}MB) exceeds OneDrive's limit of 250MB for simple uploads. Use chunked upload for larger files.`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const fileName = validatedData.fileName || userFile.name
|
||||
|
||||
let uploadUrl: string
|
||||
const folderId = validatedData.folderId?.trim()
|
||||
|
||||
if (folderId && folderId !== '') {
|
||||
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(folderId)}:/${encodeURIComponent(fileName)}:/content`
|
||||
} else {
|
||||
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Uploading to OneDrive: ${uploadUrl}`)
|
||||
|
||||
const mimeType = userFile.type || 'application/octet-stream'
|
||||
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': mimeType,
|
||||
},
|
||||
body: new Uint8Array(fileBuffer),
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorText = await uploadResponse.text()
|
||||
logger.error(`[${requestId}] OneDrive upload failed:`, {
|
||||
status: uploadResponse.status,
|
||||
statusText: uploadResponse.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `OneDrive upload failed: ${uploadResponse.statusText}`,
|
||||
details: errorText,
|
||||
},
|
||||
{ status: uploadResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const fileData = await uploadResponse.json()
|
||||
|
||||
logger.info(`[${requestId}] File uploaded successfully to OneDrive`, {
|
||||
fileId: fileData.id,
|
||||
fileName: fileData.name,
|
||||
size: fileData.size,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
id: fileData.id,
|
||||
name: fileData.name,
|
||||
mimeType: fileData.file?.mimeType || mimeType,
|
||||
webViewLink: fileData.webUrl,
|
||||
webContentLink: fileData['@microsoft.graph.downloadUrl'],
|
||||
size: fileData.size,
|
||||
createdTime: fileData.createdDateTime,
|
||||
modifiedTime: fileData.lastModifiedDateTime,
|
||||
parentReference: fileData.parentReference,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error uploading file to OneDrive:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
185
apps/sim/app/api/tools/outlook/draft/route.ts
Normal file
185
apps/sim/app/api/tools/outlook/draft/route.ts
Normal file
@@ -0,0 +1,185 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processFilesToUserFiles } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('OutlookDraftAPI')
|
||||
|
||||
const OutlookDraftSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
to: z.string().min(1, 'Recipient email is required'),
|
||||
subject: z.string().min(1, 'Subject is required'),
|
||||
body: z.string().min(1, 'Email body is required'),
|
||||
cc: z.string().optional().nullable(),
|
||||
bcc: z.string().optional().nullable(),
|
||||
attachments: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Outlook draft attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Outlook draft request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = OutlookDraftSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Creating Outlook draft`, {
|
||||
to: validatedData.to,
|
||||
subject: validatedData.subject,
|
||||
hasAttachments: !!(validatedData.attachments && validatedData.attachments.length > 0),
|
||||
attachmentCount: validatedData.attachments?.length || 0,
|
||||
})
|
||||
|
||||
const toRecipients = validatedData.to.split(',').map((email) => ({
|
||||
emailAddress: { address: email.trim() },
|
||||
}))
|
||||
|
||||
const ccRecipients = validatedData.cc
|
||||
? validatedData.cc.split(',').map((email) => ({
|
||||
emailAddress: { address: email.trim() },
|
||||
}))
|
||||
: undefined
|
||||
|
||||
const bccRecipients = validatedData.bcc
|
||||
? validatedData.bcc.split(',').map((email) => ({
|
||||
emailAddress: { address: email.trim() },
|
||||
}))
|
||||
: undefined
|
||||
|
||||
const message: any = {
|
||||
subject: validatedData.subject,
|
||||
body: {
|
||||
contentType: 'Text',
|
||||
content: validatedData.body,
|
||||
},
|
||||
toRecipients,
|
||||
}
|
||||
|
||||
if (ccRecipients) {
|
||||
message.ccRecipients = ccRecipients
|
||||
}
|
||||
|
||||
if (bccRecipients) {
|
||||
message.bccRecipients = bccRecipients
|
||||
}
|
||||
|
||||
if (validatedData.attachments && validatedData.attachments.length > 0) {
|
||||
const rawAttachments = validatedData.attachments
|
||||
logger.info(`[${requestId}] Processing ${rawAttachments.length} attachment(s)`)
|
||||
|
||||
const attachments = processFilesToUserFiles(rawAttachments, requestId, logger)
|
||||
|
||||
if (attachments.length > 0) {
|
||||
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
||||
const maxSize = 4 * 1024 * 1024 // 4MB
|
||||
|
||||
if (totalSize > maxSize) {
|
||||
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Total attachment size (${sizeMB}MB) exceeds Outlook's limit of 4MB per request`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const attachmentObjects = await Promise.all(
|
||||
attachments.map(async (file) => {
|
||||
try {
|
||||
logger.info(
|
||||
`[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)`
|
||||
)
|
||||
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
|
||||
const base64Content = buffer.toString('base64')
|
||||
|
||||
return {
|
||||
'@odata.type': '#microsoft.graph.fileAttachment',
|
||||
name: file.name,
|
||||
contentType: file.type || 'application/octet-stream',
|
||||
contentBytes: base64Content,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to download attachment ${file.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to download attachment "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Converted ${attachmentObjects.length} attachments to base64`)
|
||||
message.attachments = attachmentObjects
|
||||
}
|
||||
}
|
||||
|
||||
const graphEndpoint = 'https://graph.microsoft.com/v1.0/me/messages'
|
||||
|
||||
logger.info(`[${requestId}] Creating draft via Microsoft Graph API`)
|
||||
|
||||
const graphResponse = await fetch(graphEndpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(message),
|
||||
})
|
||||
|
||||
if (!graphResponse.ok) {
|
||||
const errorData = await graphResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Microsoft Graph API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.error?.message || 'Failed to create draft',
|
||||
},
|
||||
{ status: graphResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const responseData = await graphResponse.json()
|
||||
logger.info(`[${requestId}] Draft created successfully, ID: ${responseData.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: 'Draft created successfully',
|
||||
messageId: responseData.id,
|
||||
subject: responseData.subject,
|
||||
attachmentCount: message.attachments?.length || 0,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error creating Outlook draft:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
198
apps/sim/app/api/tools/outlook/send/route.ts
Normal file
198
apps/sim/app/api/tools/outlook/send/route.ts
Normal file
@@ -0,0 +1,198 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processFilesToUserFiles } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('OutlookSendAPI')
|
||||
|
||||
const OutlookSendSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
to: z.string().min(1, 'Recipient email is required'),
|
||||
subject: z.string().min(1, 'Subject is required'),
|
||||
body: z.string().min(1, 'Email body is required'),
|
||||
cc: z.string().optional().nullable(),
|
||||
bcc: z.string().optional().nullable(),
|
||||
replyToMessageId: z.string().optional().nullable(),
|
||||
conversationId: z.string().optional().nullable(),
|
||||
attachments: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Outlook send attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Outlook send request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = OutlookSendSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending Outlook email`, {
|
||||
to: validatedData.to,
|
||||
subject: validatedData.subject,
|
||||
hasAttachments: !!(validatedData.attachments && validatedData.attachments.length > 0),
|
||||
attachmentCount: validatedData.attachments?.length || 0,
|
||||
})
|
||||
|
||||
const toRecipients = validatedData.to.split(',').map((email) => ({
|
||||
emailAddress: { address: email.trim() },
|
||||
}))
|
||||
|
||||
const ccRecipients = validatedData.cc
|
||||
? validatedData.cc.split(',').map((email) => ({
|
||||
emailAddress: { address: email.trim() },
|
||||
}))
|
||||
: undefined
|
||||
|
||||
const bccRecipients = validatedData.bcc
|
||||
? validatedData.bcc.split(',').map((email) => ({
|
||||
emailAddress: { address: email.trim() },
|
||||
}))
|
||||
: undefined
|
||||
|
||||
const message: any = {
|
||||
subject: validatedData.subject,
|
||||
body: {
|
||||
contentType: 'Text',
|
||||
content: validatedData.body,
|
||||
},
|
||||
toRecipients,
|
||||
}
|
||||
|
||||
if (ccRecipients) {
|
||||
message.ccRecipients = ccRecipients
|
||||
}
|
||||
|
||||
if (bccRecipients) {
|
||||
message.bccRecipients = bccRecipients
|
||||
}
|
||||
|
||||
if (validatedData.attachments && validatedData.attachments.length > 0) {
|
||||
const rawAttachments = validatedData.attachments
|
||||
logger.info(`[${requestId}] Processing ${rawAttachments.length} attachment(s)`)
|
||||
|
||||
const attachments = processFilesToUserFiles(rawAttachments, requestId, logger)
|
||||
|
||||
if (attachments.length > 0) {
|
||||
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
||||
const maxSize = 4 * 1024 * 1024 // 4MB
|
||||
|
||||
if (totalSize > maxSize) {
|
||||
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Total attachment size (${sizeMB}MB) exceeds Outlook's limit of 4MB per request`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const attachmentObjects = await Promise.all(
|
||||
attachments.map(async (file) => {
|
||||
try {
|
||||
logger.info(
|
||||
`[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)`
|
||||
)
|
||||
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
|
||||
const base64Content = buffer.toString('base64')
|
||||
|
||||
return {
|
||||
'@odata.type': '#microsoft.graph.fileAttachment',
|
||||
name: file.name,
|
||||
contentType: file.type || 'application/octet-stream',
|
||||
contentBytes: base64Content,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to download attachment ${file.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to download attachment "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Converted ${attachmentObjects.length} attachments to base64`)
|
||||
message.attachments = attachmentObjects
|
||||
}
|
||||
}
|
||||
|
||||
const graphEndpoint = validatedData.replyToMessageId
|
||||
? `https://graph.microsoft.com/v1.0/me/messages/${validatedData.replyToMessageId}/reply`
|
||||
: 'https://graph.microsoft.com/v1.0/me/sendMail'
|
||||
|
||||
logger.info(`[${requestId}] Sending to Microsoft Graph API: ${graphEndpoint}`)
|
||||
|
||||
const graphResponse = await fetch(graphEndpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(
|
||||
validatedData.replyToMessageId
|
||||
? {
|
||||
comment: validatedData.body,
|
||||
message: message,
|
||||
}
|
||||
: {
|
||||
message: message,
|
||||
saveToSentItems: true,
|
||||
}
|
||||
),
|
||||
})
|
||||
|
||||
if (!graphResponse.ok) {
|
||||
const errorData = await graphResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Microsoft Graph API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.error?.message || 'Failed to send email',
|
||||
},
|
||||
{ status: graphResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Email sent successfully`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: 'Email sent successfully',
|
||||
status: 'sent',
|
||||
timestamp: new Date().toISOString(),
|
||||
attachmentCount: message.attachments?.length || 0,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error sending Outlook email:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
115
apps/sim/app/api/tools/s3/copy-object/route.ts
Normal file
115
apps/sim/app/api/tools/s3/copy-object/route.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import { CopyObjectCommand, type ObjectCannedACL, S3Client } from '@aws-sdk/client-s3'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('S3CopyObjectAPI')
|
||||
|
||||
const S3CopyObjectSchema = z.object({
|
||||
accessKeyId: z.string().min(1, 'Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'Secret Access Key is required'),
|
||||
region: z.string().min(1, 'Region is required'),
|
||||
sourceBucket: z.string().min(1, 'Source bucket name is required'),
|
||||
sourceKey: z.string().min(1, 'Source object key is required'),
|
||||
destinationBucket: z.string().min(1, 'Destination bucket name is required'),
|
||||
destinationKey: z.string().min(1, 'Destination object key is required'),
|
||||
acl: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized S3 copy object attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated S3 copy object request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = S3CopyObjectSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Copying S3 object`, {
|
||||
source: `${validatedData.sourceBucket}/${validatedData.sourceKey}`,
|
||||
destination: `${validatedData.destinationBucket}/${validatedData.destinationKey}`,
|
||||
})
|
||||
|
||||
// Initialize S3 client
|
||||
const s3Client = new S3Client({
|
||||
region: validatedData.region,
|
||||
credentials: {
|
||||
accessKeyId: validatedData.accessKeyId,
|
||||
secretAccessKey: validatedData.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
// Copy object (properly encode the source key for CopySource parameter)
|
||||
const encodedSourceKey = validatedData.sourceKey.split('/').map(encodeURIComponent).join('/')
|
||||
const copySource = `${validatedData.sourceBucket}/${encodedSourceKey}`
|
||||
const copyCommand = new CopyObjectCommand({
|
||||
Bucket: validatedData.destinationBucket,
|
||||
Key: validatedData.destinationKey,
|
||||
CopySource: copySource,
|
||||
ACL: validatedData.acl as ObjectCannedACL | undefined,
|
||||
})
|
||||
|
||||
const result = await s3Client.send(copyCommand)
|
||||
|
||||
logger.info(`[${requestId}] Object copied successfully`, {
|
||||
source: copySource,
|
||||
destination: `${validatedData.destinationBucket}/${validatedData.destinationKey}`,
|
||||
etag: result.CopyObjectResult?.ETag,
|
||||
})
|
||||
|
||||
// Generate public URL for destination (properly encode the destination key)
|
||||
const encodedDestKey = validatedData.destinationKey.split('/').map(encodeURIComponent).join('/')
|
||||
const url = `https://${validatedData.destinationBucket}.s3.${validatedData.region}.amazonaws.com/${encodedDestKey}`
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url,
|
||||
copySourceVersionId: result.CopySourceVersionId,
|
||||
versionId: result.VersionId,
|
||||
etag: result.CopyObjectResult?.ETag,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error copying S3 object:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
106
apps/sim/app/api/tools/s3/delete-object/route.ts
Normal file
106
apps/sim/app/api/tools/s3/delete-object/route.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { DeleteObjectCommand, S3Client } from '@aws-sdk/client-s3'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('S3DeleteObjectAPI')
|
||||
|
||||
const S3DeleteObjectSchema = z.object({
|
||||
accessKeyId: z.string().min(1, 'Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'Secret Access Key is required'),
|
||||
region: z.string().min(1, 'Region is required'),
|
||||
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||
objectKey: z.string().min(1, 'Object key is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized S3 delete object attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated S3 delete object request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = S3DeleteObjectSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Deleting S3 object`, {
|
||||
bucket: validatedData.bucketName,
|
||||
key: validatedData.objectKey,
|
||||
})
|
||||
|
||||
// Initialize S3 client
|
||||
const s3Client = new S3Client({
|
||||
region: validatedData.region,
|
||||
credentials: {
|
||||
accessKeyId: validatedData.accessKeyId,
|
||||
secretAccessKey: validatedData.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
// Delete object
|
||||
const deleteCommand = new DeleteObjectCommand({
|
||||
Bucket: validatedData.bucketName,
|
||||
Key: validatedData.objectKey,
|
||||
})
|
||||
|
||||
const result = await s3Client.send(deleteCommand)
|
||||
|
||||
logger.info(`[${requestId}] Object deleted successfully`, {
|
||||
bucket: validatedData.bucketName,
|
||||
key: validatedData.objectKey,
|
||||
deleteMarker: result.DeleteMarker,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
key: validatedData.objectKey,
|
||||
deleteMarker: result.DeleteMarker,
|
||||
versionId: result.VersionId,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error deleting S3 object:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
116
apps/sim/app/api/tools/s3/list-objects/route.ts
Normal file
116
apps/sim/app/api/tools/s3/list-objects/route.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { ListObjectsV2Command, S3Client } from '@aws-sdk/client-s3'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('S3ListObjectsAPI')
|
||||
|
||||
const S3ListObjectsSchema = z.object({
|
||||
accessKeyId: z.string().min(1, 'Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'Secret Access Key is required'),
|
||||
region: z.string().min(1, 'Region is required'),
|
||||
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||
prefix: z.string().optional().nullable(),
|
||||
maxKeys: z.number().optional().nullable(),
|
||||
continuationToken: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized S3 list objects attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated S3 list objects request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = S3ListObjectsSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Listing S3 objects`, {
|
||||
bucket: validatedData.bucketName,
|
||||
prefix: validatedData.prefix || '(none)',
|
||||
maxKeys: validatedData.maxKeys || 1000,
|
||||
})
|
||||
|
||||
// Initialize S3 client
|
||||
const s3Client = new S3Client({
|
||||
region: validatedData.region,
|
||||
credentials: {
|
||||
accessKeyId: validatedData.accessKeyId,
|
||||
secretAccessKey: validatedData.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
// List objects
|
||||
const listCommand = new ListObjectsV2Command({
|
||||
Bucket: validatedData.bucketName,
|
||||
Prefix: validatedData.prefix || undefined,
|
||||
MaxKeys: validatedData.maxKeys || undefined,
|
||||
ContinuationToken: validatedData.continuationToken || undefined,
|
||||
})
|
||||
|
||||
const result = await s3Client.send(listCommand)
|
||||
|
||||
const objects = (result.Contents || []).map((obj) => ({
|
||||
key: obj.Key || '',
|
||||
size: obj.Size || 0,
|
||||
lastModified: obj.LastModified?.toISOString() || '',
|
||||
etag: obj.ETag || '',
|
||||
}))
|
||||
|
||||
logger.info(`[${requestId}] Listed ${objects.length} objects`, {
|
||||
bucket: validatedData.bucketName,
|
||||
isTruncated: result.IsTruncated,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
objects,
|
||||
isTruncated: result.IsTruncated,
|
||||
nextContinuationToken: result.NextContinuationToken,
|
||||
keyCount: result.KeyCount,
|
||||
prefix: validatedData.prefix,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error listing S3 objects:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
153
apps/sim/app/api/tools/s3/put-object/route.ts
Normal file
153
apps/sim/app/api/tools/s3/put-object/route.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { type ObjectCannedACL, PutObjectCommand, S3Client } from '@aws-sdk/client-s3'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processSingleFileToUserFile } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('S3PutObjectAPI')
|
||||
|
||||
const S3PutObjectSchema = z.object({
|
||||
accessKeyId: z.string().min(1, 'Access Key ID is required'),
|
||||
secretAccessKey: z.string().min(1, 'Secret Access Key is required'),
|
||||
region: z.string().min(1, 'Region is required'),
|
||||
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||
objectKey: z.string().min(1, 'Object key is required'),
|
||||
file: z.any().optional().nullable(),
|
||||
content: z.string().optional().nullable(),
|
||||
contentType: z.string().optional().nullable(),
|
||||
acl: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized S3 put object attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated S3 put object request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = S3PutObjectSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Uploading to S3`, {
|
||||
bucket: validatedData.bucketName,
|
||||
key: validatedData.objectKey,
|
||||
hasFile: !!validatedData.file,
|
||||
hasContent: !!validatedData.content,
|
||||
})
|
||||
|
||||
const s3Client = new S3Client({
|
||||
region: validatedData.region,
|
||||
credentials: {
|
||||
accessKeyId: validatedData.accessKeyId,
|
||||
secretAccessKey: validatedData.secretAccessKey,
|
||||
},
|
||||
})
|
||||
|
||||
let uploadBody: Buffer | string
|
||||
let uploadContentType: string | undefined
|
||||
|
||||
if (validatedData.file) {
|
||||
const rawFile = validatedData.file
|
||||
logger.info(`[${requestId}] Processing file upload: ${rawFile.name}`)
|
||||
|
||||
let userFile
|
||||
try {
|
||||
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
|
||||
uploadBody = buffer
|
||||
uploadContentType = validatedData.contentType || userFile.type || 'application/octet-stream'
|
||||
} else if (validatedData.content) {
|
||||
uploadBody = Buffer.from(validatedData.content, 'utf-8')
|
||||
uploadContentType = validatedData.contentType || 'text/plain'
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Either file or content must be provided',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const putCommand = new PutObjectCommand({
|
||||
Bucket: validatedData.bucketName,
|
||||
Key: validatedData.objectKey,
|
||||
Body: uploadBody,
|
||||
ContentType: uploadContentType,
|
||||
ACL: validatedData.acl as ObjectCannedACL | undefined,
|
||||
})
|
||||
|
||||
const result = await s3Client.send(putCommand)
|
||||
|
||||
logger.info(`[${requestId}] File uploaded successfully`, {
|
||||
etag: result.ETag,
|
||||
bucket: validatedData.bucketName,
|
||||
key: validatedData.objectKey,
|
||||
})
|
||||
|
||||
const encodedKey = validatedData.objectKey.split('/').map(encodeURIComponent).join('/')
|
||||
const url = `https://${validatedData.bucketName}.s3.${validatedData.region}.amazonaws.com/${encodedKey}`
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
url,
|
||||
etag: result.ETag,
|
||||
location: url,
|
||||
key: validatedData.objectKey,
|
||||
bucket: validatedData.bucketName,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid request data',
|
||||
details: error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Error uploading to S3:`, error)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Internal server error',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
216
apps/sim/app/api/tools/sharepoint/upload/route.ts
Normal file
216
apps/sim/app/api/tools/sharepoint/upload/route.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processFilesToUserFiles } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SharepointUploadAPI')
|
||||
|
||||
const SharepointUploadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
siteId: z.string().default('root'),
|
||||
driveId: z.string().optional().nullable(),
|
||||
folderPath: z.string().optional().nullable(),
|
||||
fileName: z.string().optional().nullable(),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SharePoint upload attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated SharePoint upload request via ${authResult.authType}`,
|
||||
{
|
||||
userId: authResult.userId,
|
||||
}
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = SharepointUploadSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Uploading files to SharePoint`, {
|
||||
siteId: validatedData.siteId,
|
||||
driveId: validatedData.driveId,
|
||||
folderPath: validatedData.folderPath,
|
||||
hasFiles: !!(validatedData.files && validatedData.files.length > 0),
|
||||
fileCount: validatedData.files?.length || 0,
|
||||
})
|
||||
|
||||
if (!validatedData.files || validatedData.files.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'At least one file is required for upload',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||
|
||||
if (userFiles.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'No valid files to upload',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let effectiveDriveId = validatedData.driveId
|
||||
if (!effectiveDriveId) {
|
||||
logger.info(`[${requestId}] No driveId provided, fetching default drive for site`)
|
||||
const driveResponse = await fetch(
|
||||
`https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (!driveResponse.ok) {
|
||||
const errorData = await driveResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to get default drive:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.error?.message || 'Failed to get default document library',
|
||||
},
|
||||
{ status: driveResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const driveData = await driveResponse.json()
|
||||
effectiveDriveId = driveData.id
|
||||
logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`)
|
||||
}
|
||||
|
||||
const uploadedFiles: any[] = []
|
||||
|
||||
for (const userFile of userFiles) {
|
||||
logger.info(`[${requestId}] Uploading file: ${userFile.name}`)
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
|
||||
const fileName = validatedData.fileName || userFile.name
|
||||
const folderPath = validatedData.folderPath?.trim() || ''
|
||||
|
||||
const fileSizeMB = buffer.length / (1024 * 1024)
|
||||
|
||||
if (fileSizeMB > 250) {
|
||||
logger.warn(
|
||||
`[${requestId}] File ${fileName} is ${fileSizeMB.toFixed(2)}MB, exceeds 250MB limit`
|
||||
)
|
||||
continue
|
||||
}
|
||||
|
||||
let uploadPath = ''
|
||||
if (folderPath) {
|
||||
const normalizedPath = folderPath.startsWith('/') ? folderPath : `/${folderPath}`
|
||||
const cleanPath = normalizedPath.endsWith('/')
|
||||
? normalizedPath.slice(0, -1)
|
||||
: normalizedPath
|
||||
uploadPath = `${cleanPath}/${fileName}`
|
||||
} else {
|
||||
uploadPath = `/${fileName}`
|
||||
}
|
||||
|
||||
const encodedPath = uploadPath
|
||||
.split('/')
|
||||
.map((segment) => (segment ? encodeURIComponent(segment) : ''))
|
||||
.join('/')
|
||||
|
||||
const uploadUrl = `https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drives/${effectiveDriveId}/root:${encodedPath}:/content`
|
||||
|
||||
logger.info(`[${requestId}] Uploading to: ${uploadUrl}`)
|
||||
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': userFile.type || 'application/octet-stream',
|
||||
},
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData)
|
||||
|
||||
if (uploadResponse.status === 409) {
|
||||
logger.warn(`[${requestId}] File ${fileName} already exists, attempting to replace`)
|
||||
continue
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.error?.message || `Failed to upload file: ${fileName}`,
|
||||
},
|
||||
{ status: uploadResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const uploadData = await uploadResponse.json()
|
||||
logger.info(`[${requestId}] File uploaded successfully: ${fileName}`)
|
||||
|
||||
uploadedFiles.push({
|
||||
id: uploadData.id,
|
||||
name: uploadData.name,
|
||||
webUrl: uploadData.webUrl,
|
||||
size: uploadData.size,
|
||||
createdDateTime: uploadData.createdDateTime,
|
||||
lastModifiedDateTime: uploadData.lastModifiedDateTime,
|
||||
})
|
||||
}
|
||||
|
||||
if (uploadedFiles.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'No files were uploaded successfully',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully uploaded ${uploadedFiles.length} file(s)`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
uploadedFiles,
|
||||
fileCount: uploadedFiles.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error uploading files to SharePoint:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
227
apps/sim/app/api/tools/slack/send-message/route.ts
Normal file
227
apps/sim/app/api/tools/slack/send-message/route.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processFilesToUserFiles } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SlackSendMessageAPI')
|
||||
|
||||
const SlackSendMessageSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
channel: z.string().min(1, 'Channel is required'),
|
||||
text: z.string().min(1, 'Message text is required'),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Slack send attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Slack send request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = SlackSendMessageSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending Slack message`, {
|
||||
channel: validatedData.channel,
|
||||
hasFiles: !!(validatedData.files && validatedData.files.length > 0),
|
||||
fileCount: validatedData.files?.length || 0,
|
||||
})
|
||||
|
||||
if (!validatedData.files || validatedData.files.length === 0) {
|
||||
logger.info(`[${requestId}] No files, using chat.postMessage`)
|
||||
|
||||
const response = await fetch('https://slack.com/api/chat.postMessage', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
channel: validatedData.channel,
|
||||
text: validatedData.text,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
logger.error(`[${requestId}] Slack API error:`, data.error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: data.error || 'Failed to send message',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Message sent successfully`)
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: data.ts,
|
||||
channel: data.channel,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`)
|
||||
|
||||
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||
|
||||
if (userFiles.length === 0) {
|
||||
logger.warn(`[${requestId}] No valid files to upload`)
|
||||
const response = await fetch('https://slack.com/api/chat.postMessage', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
channel: validatedData.channel,
|
||||
text: validatedData.text,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: data.ts,
|
||||
channel: data.channel,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const uploadedFileIds: string[] = []
|
||||
|
||||
for (const userFile of userFiles) {
|
||||
logger.info(`[${requestId}] Uploading file: ${userFile.name}`)
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
|
||||
const getUrlResponse = await fetch('https://slack.com/api/files.getUploadURLExternal', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: new URLSearchParams({
|
||||
filename: userFile.name,
|
||||
length: buffer.length.toString(),
|
||||
}),
|
||||
})
|
||||
|
||||
const urlData = await getUrlResponse.json()
|
||||
|
||||
if (!urlData.ok) {
|
||||
logger.error(`[${requestId}] Failed to get upload URL:`, urlData.error)
|
||||
continue
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`)
|
||||
|
||||
const uploadResponse = await fetch(urlData.upload_url, {
|
||||
method: 'POST',
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`)
|
||||
continue
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] File data uploaded successfully`)
|
||||
uploadedFileIds.push(urlData.file_id)
|
||||
}
|
||||
|
||||
if (uploadedFileIds.length === 0) {
|
||||
logger.warn(`[${requestId}] No files uploaded successfully, sending text-only message`)
|
||||
const response = await fetch('https://slack.com/api/chat.postMessage', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
channel: validatedData.channel,
|
||||
text: validatedData.text,
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: data.ts,
|
||||
channel: data.channel,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const completeResponse = await fetch('https://slack.com/api/files.completeUploadExternal', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
files: uploadedFileIds.map((id) => ({ id })),
|
||||
channel_id: validatedData.channel,
|
||||
initial_comment: validatedData.text,
|
||||
}),
|
||||
})
|
||||
|
||||
const completeData = await completeResponse.json()
|
||||
|
||||
if (!completeData.ok) {
|
||||
logger.error(`[${requestId}] Failed to complete upload:`, completeData.error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: completeData.error || 'Failed to complete file upload',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Files uploaded and shared successfully`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: completeData.files?.[0]?.created || Date.now() / 1000,
|
||||
channel: validatedData.channel,
|
||||
fileCount: uploadedFileIds.length,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error sending Slack message:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
149
apps/sim/app/api/tools/telegram/send-document/route.ts
Normal file
149
apps/sim/app/api/tools/telegram/send-document/route.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processFilesToUserFiles } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { convertMarkdownToHTML } from '@/tools/telegram/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('TelegramSendDocumentAPI')
|
||||
|
||||
const TelegramSendDocumentSchema = z.object({
|
||||
botToken: z.string().min(1, 'Bot token is required'),
|
||||
chatId: z.string().min(1, 'Chat ID is required'),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
caption: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, {
|
||||
requireWorkflowId: false,
|
||||
})
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Telegram send attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Telegram send request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = TelegramSendDocumentSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending Telegram document`, {
|
||||
chatId: validatedData.chatId,
|
||||
hasFiles: !!(validatedData.files && validatedData.files.length > 0),
|
||||
fileCount: validatedData.files?.length || 0,
|
||||
})
|
||||
|
||||
if (!validatedData.files || validatedData.files.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'At least one document file is required for sendDocument operation',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||
|
||||
if (userFiles.length === 0) {
|
||||
logger.warn(`[${requestId}] No valid files to upload`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'No valid files provided for upload',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const maxSize = 50 * 1024 * 1024 // 50MB
|
||||
const tooLargeFiles = userFiles.filter((file) => file.size > maxSize)
|
||||
|
||||
if (tooLargeFiles.length > 0) {
|
||||
const filesInfo = tooLargeFiles
|
||||
.map((f) => `${f.name} (${(f.size / (1024 * 1024)).toFixed(2)}MB)`)
|
||||
.join(', ')
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `The following files exceed Telegram's 50MB limit: ${filesInfo}`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const userFile = userFiles[0]
|
||||
logger.info(`[${requestId}] Uploading document: ${userFile.name}`)
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
|
||||
logger.info(`[${requestId}] Downloaded file: ${buffer.length} bytes`)
|
||||
|
||||
const formData = new FormData()
|
||||
formData.append('chat_id', validatedData.chatId)
|
||||
|
||||
const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type })
|
||||
formData.append('document', blob, userFile.name)
|
||||
|
||||
if (validatedData.caption) {
|
||||
formData.append('caption', convertMarkdownToHTML(validatedData.caption))
|
||||
formData.append('parse_mode', 'HTML')
|
||||
}
|
||||
|
||||
const telegramApiUrl = `https://api.telegram.org/bot${validatedData.botToken}/sendDocument`
|
||||
logger.info(`[${requestId}] Sending request to Telegram API`)
|
||||
|
||||
const response = await fetch(telegramApiUrl, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
logger.error(`[${requestId}] Telegram API error:`, data)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: data.description || 'Failed to send document to Telegram',
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Document sent successfully`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
message: 'Document sent successfully',
|
||||
data: data.result,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error sending Telegram document:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
231
apps/sim/app/api/tools/vision/analyze/route.ts
Normal file
231
apps/sim/app/api/tools/vision/analyze/route.ts
Normal file
@@ -0,0 +1,231 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { downloadFileFromStorage, processSingleFileToUserFile } from '@/lib/uploads/file-processing'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('VisionAnalyzeAPI')
|
||||
|
||||
const VisionAnalyzeSchema = z.object({
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
imageUrl: z.string().optional().nullable(),
|
||||
imageFile: z.any().optional().nullable(),
|
||||
model: z.string().optional().default('gpt-4o'),
|
||||
prompt: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Vision analyze attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Vision analyze request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = VisionAnalyzeSchema.parse(body)
|
||||
|
||||
if (!validatedData.imageUrl && !validatedData.imageFile) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Either imageUrl or imageFile is required',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Analyzing image`, {
|
||||
hasFile: !!validatedData.imageFile,
|
||||
hasUrl: !!validatedData.imageUrl,
|
||||
model: validatedData.model,
|
||||
})
|
||||
|
||||
let imageSource: string = validatedData.imageUrl || ''
|
||||
|
||||
if (validatedData.imageFile) {
|
||||
const rawFile = validatedData.imageFile
|
||||
logger.info(`[${requestId}] Processing image file: ${rawFile.name}`)
|
||||
|
||||
let userFile
|
||||
try {
|
||||
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to process image file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
|
||||
const base64 = buffer.toString('base64')
|
||||
const mimeType = userFile.type || 'image/jpeg'
|
||||
imageSource = `data:${mimeType};base64,${base64}`
|
||||
logger.info(`[${requestId}] Converted image to base64 (${buffer.length} bytes)`)
|
||||
}
|
||||
|
||||
const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
|
||||
const prompt = validatedData.prompt || defaultPrompt
|
||||
|
||||
const isClaude = validatedData.model.startsWith('claude-3')
|
||||
const apiUrl = isClaude
|
||||
? 'https://api.anthropic.com/v1/messages'
|
||||
: 'https://api.openai.com/v1/chat/completions'
|
||||
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
if (isClaude) {
|
||||
headers['x-api-key'] = validatedData.apiKey
|
||||
headers['anthropic-version'] = '2023-06-01'
|
||||
} else {
|
||||
headers.Authorization = `Bearer ${validatedData.apiKey}`
|
||||
}
|
||||
|
||||
let requestBody: any
|
||||
|
||||
if (isClaude) {
|
||||
if (imageSource.startsWith('data:')) {
|
||||
const base64Match = imageSource.match(/^data:([^;]+);base64,(.+)$/)
|
||||
if (!base64Match) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Invalid base64 image format' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const [, mediaType, base64Data] = base64Match
|
||||
|
||||
requestBody = {
|
||||
model: validatedData.model,
|
||||
max_tokens: 1024,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
{ type: 'text', text: prompt },
|
||||
{
|
||||
type: 'image',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: mediaType,
|
||||
data: base64Data,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
} else {
|
||||
requestBody = {
|
||||
model: validatedData.model,
|
||||
max_tokens: 1024,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
{ type: 'text', text: prompt },
|
||||
{
|
||||
type: 'image',
|
||||
source: { type: 'url', url: imageSource },
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
} else {
|
||||
requestBody = {
|
||||
model: validatedData.model,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
{ type: 'text', text: prompt },
|
||||
{
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: imageSource,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
max_tokens: 1000,
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Sending request to ${isClaude ? 'Anthropic' : 'OpenAI'} API`)
|
||||
const response = await fetch(apiUrl, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify(requestBody),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Vision API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorData.error?.message || errorData.message || 'Failed to analyze image',
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const result = data.content?.[0]?.text || data.choices?.[0]?.message?.content
|
||||
|
||||
logger.info(`[${requestId}] Image analyzed successfully`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
content: result,
|
||||
model: data.model,
|
||||
tokens: data.content
|
||||
? (data.usage?.input_tokens || 0) + (data.usage?.output_tokens || 0)
|
||||
: data.usage?.total_tokens,
|
||||
usage: data.usage
|
||||
? {
|
||||
input_tokens: data.usage.input_tokens,
|
||||
output_tokens: data.usage.output_tokens,
|
||||
total_tokens:
|
||||
data.usage.total_tokens ||
|
||||
(data.usage.input_tokens || 0) + (data.usage.output_tokens || 0),
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error analyzing image:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getPresignedUrlWithConfig, USE_BLOB_STORAGE, USE_S3_STORAGE } from '@/lib/uploads'
|
||||
import { BLOB_CONFIG, S3_CONFIG } from '@/lib/uploads/setup'
|
||||
import { getWorkspaceFile } from '@/lib/uploads/workspace-files'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('WorkspaceFileDownloadAPI')
|
||||
|
||||
/**
|
||||
* POST /api/workspaces/[id]/files/[fileId]/download
|
||||
* Generate presigned download URL (requires read permission)
|
||||
* Reuses execution file helper pattern for 5-minute presigned URLs
|
||||
*/
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; fileId: string }> }
|
||||
) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: workspaceId, fileId } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check workspace permissions (requires read)
|
||||
const userPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||
if (!userPermission) {
|
||||
logger.warn(
|
||||
`[${requestId}] User ${session.user.id} lacks permission for workspace ${workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 })
|
||||
}
|
||||
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||
if (!fileRecord) {
|
||||
return NextResponse.json({ error: 'File not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Generate 5-minute presigned URL (same pattern as execution files)
|
||||
let downloadUrl: string
|
||||
|
||||
if (USE_S3_STORAGE) {
|
||||
downloadUrl = await getPresignedUrlWithConfig(
|
||||
fileRecord.key,
|
||||
{
|
||||
bucket: S3_CONFIG.bucket,
|
||||
region: S3_CONFIG.region,
|
||||
},
|
||||
5 * 60 // 5 minutes
|
||||
)
|
||||
} else if (USE_BLOB_STORAGE) {
|
||||
downloadUrl = await getPresignedUrlWithConfig(
|
||||
fileRecord.key,
|
||||
{
|
||||
accountName: BLOB_CONFIG.accountName,
|
||||
accountKey: BLOB_CONFIG.accountKey,
|
||||
connectionString: BLOB_CONFIG.connectionString,
|
||||
containerName: BLOB_CONFIG.containerName,
|
||||
},
|
||||
5 * 60 // 5 minutes
|
||||
)
|
||||
} else {
|
||||
throw new Error('No cloud storage configured')
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Generated download URL for workspace file: ${fileRecord.name}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
downloadUrl,
|
||||
fileName: fileRecord.name,
|
||||
expiresIn: 300, // 5 minutes
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error generating download URL:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to generate download URL',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
55
apps/sim/app/api/workspaces/[id]/files/[fileId]/route.ts
Normal file
55
apps/sim/app/api/workspaces/[id]/files/[fileId]/route.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { deleteWorkspaceFile } from '@/lib/uploads/workspace-files'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('WorkspaceFileAPI')
|
||||
|
||||
/**
|
||||
* DELETE /api/workspaces/[id]/files/[fileId]
|
||||
* Delete a workspace file (requires write permission)
|
||||
*/
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; fileId: string }> }
|
||||
) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: workspaceId, fileId } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check workspace permissions (requires write)
|
||||
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||
if (userPermission !== 'admin' && userPermission !== 'write') {
|
||||
logger.warn(
|
||||
`[${requestId}] User ${session.user.id} lacks write permission for workspace ${workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 })
|
||||
}
|
||||
|
||||
await deleteWorkspaceFile(workspaceId, fileId)
|
||||
|
||||
logger.info(`[${requestId}] Deleted workspace file: ${fileId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error deleting workspace file:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to delete file',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
127
apps/sim/app/api/workspaces/[id]/files/route.ts
Normal file
127
apps/sim/app/api/workspaces/[id]/files/route.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getUserEntityPermissions } from '@/lib/permissions/utils'
|
||||
import { listWorkspaceFiles, uploadWorkspaceFile } from '@/lib/uploads/workspace-files'
|
||||
import { generateRequestId } from '@/lib/utils'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('WorkspaceFilesAPI')
|
||||
|
||||
/**
|
||||
* GET /api/workspaces/[id]/files
|
||||
* List all files for a workspace (requires read permission)
|
||||
*/
|
||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: workspaceId } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check workspace permissions (requires read)
|
||||
const userPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||
if (!userPermission) {
|
||||
logger.warn(
|
||||
`[${requestId}] User ${session.user.id} lacks permission for workspace ${workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 })
|
||||
}
|
||||
|
||||
const files = await listWorkspaceFiles(workspaceId)
|
||||
|
||||
logger.info(`[${requestId}] Listed ${files.length} files for workspace ${workspaceId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
files,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error listing workspace files:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to list files',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/workspaces/[id]/files
|
||||
* Upload a new file to workspace storage (requires write permission)
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: workspaceId } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check workspace permissions (requires write)
|
||||
const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId)
|
||||
if (userPermission !== 'admin' && userPermission !== 'write') {
|
||||
logger.warn(
|
||||
`[${requestId}] User ${session.user.id} lacks write permission for workspace ${workspaceId}`
|
||||
)
|
||||
return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 })
|
||||
}
|
||||
|
||||
const formData = await request.formData()
|
||||
const file = formData.get('file') as File
|
||||
|
||||
if (!file) {
|
||||
return NextResponse.json({ error: 'No file provided' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Validate file size (100MB limit)
|
||||
const maxSize = 100 * 1024 * 1024
|
||||
if (file.size > maxSize) {
|
||||
return NextResponse.json(
|
||||
{ error: `File size exceeds 100MB limit (${(file.size / (1024 * 1024)).toFixed(2)}MB)` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const buffer = Buffer.from(await file.arrayBuffer())
|
||||
|
||||
const userFile = await uploadWorkspaceFile(
|
||||
workspaceId,
|
||||
session.user.id,
|
||||
buffer,
|
||||
file.name,
|
||||
file.type || 'application/octet-stream'
|
||||
)
|
||||
|
||||
logger.info(`[${requestId}] Uploaded workspace file: ${file.name}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
file: userFile,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error uploading workspace file:`, error)
|
||||
|
||||
// Check if it's a duplicate file error
|
||||
const errorMessage = error instanceof Error ? error.message : 'Failed to upload file'
|
||||
const isDuplicate = errorMessage.includes('already exists')
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
isDuplicate,
|
||||
},
|
||||
{ status: isDuplicate ? 409 : 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useRef, useState } from 'react'
|
||||
import { Check, Loader2, X } from 'lucide-react'
|
||||
import { AlertCircle, Check, Loader2, X } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
||||
import { Label } from '@/components/ui/label'
|
||||
@@ -42,7 +42,7 @@ export function UploadModal({
|
||||
const [fileError, setFileError] = useState<string | null>(null)
|
||||
const [isDragging, setIsDragging] = useState(false)
|
||||
|
||||
const { isUploading, uploadProgress, uploadFiles } = useKnowledgeUpload({
|
||||
const { isUploading, uploadProgress, uploadError, uploadFiles, clearError } = useKnowledgeUpload({
|
||||
onUploadComplete: () => {
|
||||
logger.info(`Successfully uploaded ${files.length} files`)
|
||||
onUploadComplete?.()
|
||||
@@ -55,6 +55,7 @@ export function UploadModal({
|
||||
|
||||
setFiles([])
|
||||
setFileError(null)
|
||||
clearError()
|
||||
setIsDragging(false)
|
||||
onOpenChange(false)
|
||||
}
|
||||
@@ -276,7 +277,20 @@ export function UploadModal({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{fileError && <p className='text-destructive text-sm'>{fileError}</p>}
|
||||
{fileError && (
|
||||
<div className='rounded-md border border-destructive/50 bg-destructive/10 px-3 py-2 text-destructive text-sm'>
|
||||
{fileError}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{uploadError && (
|
||||
<div className='rounded-md border border-destructive/50 bg-destructive/10 px-3 py-2'>
|
||||
<div className='flex items-start gap-2'>
|
||||
<AlertCircle className='mt-0.5 h-4 w-4 shrink-0 text-destructive' />
|
||||
<div className='flex-1 text-destructive text-sm'>{uploadError.message}</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -79,13 +79,20 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
const scrollContainerRef = useRef<HTMLDivElement>(null)
|
||||
const dropZoneRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
const { uploadFiles, isUploading, uploadProgress } = useKnowledgeUpload({
|
||||
const { uploadFiles, isUploading, uploadProgress, uploadError, clearError } = useKnowledgeUpload({
|
||||
onUploadComplete: (uploadedFiles) => {
|
||||
logger.info(`Successfully uploaded ${uploadedFiles.length} files`)
|
||||
// Files uploaded and document records created - processing will continue in background
|
||||
},
|
||||
})
|
||||
|
||||
const handleClose = (open: boolean) => {
|
||||
if (!open) {
|
||||
clearError()
|
||||
}
|
||||
onOpenChange(open)
|
||||
}
|
||||
|
||||
// Cleanup file preview URLs when component unmounts to prevent memory leaks
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
@@ -319,7 +326,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
files.forEach((file) => URL.revokeObjectURL(file.preview))
|
||||
setFiles([])
|
||||
|
||||
onOpenChange(false)
|
||||
handleClose(false)
|
||||
} catch (error) {
|
||||
logger.error('Error creating knowledge base:', error)
|
||||
setSubmitStatus({
|
||||
@@ -332,7 +339,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<Dialog open={open} onOpenChange={handleClose}>
|
||||
<DialogContent
|
||||
className='flex h-[74vh] flex-col gap-0 overflow-hidden p-0 sm:max-w-[600px]'
|
||||
hideCloseButton
|
||||
@@ -344,7 +351,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
className='h-8 w-8 p-0'
|
||||
onClick={() => onOpenChange(false)}
|
||||
onClick={() => handleClose(false)}
|
||||
>
|
||||
<X className='h-4 w-4' />
|
||||
<span className='sr-only'>Close</span>
|
||||
@@ -368,6 +375,14 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{uploadError && (
|
||||
<Alert variant='destructive' className='mb-6'>
|
||||
<AlertCircle className='h-4 w-4' />
|
||||
<AlertTitle>Upload Error</AlertTitle>
|
||||
<AlertDescription>{uploadError.message}</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{/* Form Fields Section - Fixed at top */}
|
||||
<div className='flex-shrink-0 space-y-4'>
|
||||
<div className='space-y-2'>
|
||||
@@ -621,7 +636,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
|
||||
{/* Footer */}
|
||||
<div className='mt-auto border-t px-6 pt-4 pb-6'>
|
||||
<div className='flex justify-between'>
|
||||
<Button variant='outline' onClick={() => onOpenChange(false)} type='button'>
|
||||
<Button variant='outline' onClick={() => handleClose(false)} type='button'>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
|
||||
@@ -1,10 +1,23 @@
|
||||
'use client'
|
||||
|
||||
import { useRef, useState } from 'react'
|
||||
import { X } from 'lucide-react'
|
||||
import { ChevronDown, X } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import {
|
||||
Command,
|
||||
CommandEmpty,
|
||||
CommandGroup,
|
||||
CommandInput,
|
||||
CommandItem,
|
||||
CommandList,
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverTrigger,
|
||||
} from '@/components/ui'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Progress } from '@/components/ui/progress'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { WorkspaceFileRecord } from '@/lib/uploads/workspace-files'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/hooks/use-sub-block-value'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -20,11 +33,13 @@ interface FileUploadProps {
|
||||
isPreview?: boolean
|
||||
previewValue?: any | null
|
||||
disabled?: boolean
|
||||
isWide?: boolean
|
||||
}
|
||||
|
||||
interface UploadedFile {
|
||||
name: string
|
||||
path: string
|
||||
key?: string
|
||||
size: number
|
||||
type: string
|
||||
}
|
||||
@@ -44,11 +59,17 @@ export function FileUpload({
|
||||
isPreview = false,
|
||||
previewValue,
|
||||
disabled = false,
|
||||
isWide = false,
|
||||
}: FileUploadProps) {
|
||||
// State management - handle both single file and array of files
|
||||
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
|
||||
const [uploadingFiles, setUploadingFiles] = useState<UploadingFile[]>([])
|
||||
const [uploadProgress, setUploadProgress] = useState(0)
|
||||
const [workspaceFiles, setWorkspaceFiles] = useState<WorkspaceFileRecord[]>([])
|
||||
const [loadingWorkspaceFiles, setLoadingWorkspaceFiles] = useState(false)
|
||||
const [uploadError, setUploadError] = useState<string | null>(null)
|
||||
const [addMoreOpen, setAddMoreOpen] = useState(false)
|
||||
const [pickerOpen, setPickerOpen] = useState(false)
|
||||
|
||||
// For file deletion status
|
||||
const [deletingFiles, setDeletingFiles] = useState<Record<string, boolean>>({})
|
||||
@@ -58,10 +79,43 @@ export function FileUpload({
|
||||
|
||||
// Stores
|
||||
const { activeWorkflowId } = useWorkflowRegistry()
|
||||
const params = useParams()
|
||||
const workspaceId = params?.workspaceId as string
|
||||
|
||||
// Use preview value when in preview mode, otherwise use store value
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
|
||||
// Load workspace files function
|
||||
const loadWorkspaceFiles = async () => {
|
||||
if (!workspaceId || isPreview) return
|
||||
|
||||
try {
|
||||
setLoadingWorkspaceFiles(true)
|
||||
const response = await fetch(`/api/workspaces/${workspaceId}/files`)
|
||||
const data = await response.json()
|
||||
|
||||
if (data.success) {
|
||||
setWorkspaceFiles(data.files || [])
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error loading workspace files:', error)
|
||||
} finally {
|
||||
setLoadingWorkspaceFiles(false)
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out already selected files
|
||||
const availableWorkspaceFiles = workspaceFiles.filter((workspaceFile) => {
|
||||
const existingFiles = Array.isArray(value) ? value : value ? [value] : []
|
||||
// Check if this workspace file is already added (match by name or key)
|
||||
return !existingFiles.some(
|
||||
(existing) =>
|
||||
existing.name === workspaceFile.name ||
|
||||
existing.path?.includes(workspaceFile.key) ||
|
||||
existing.key === workspaceFile.key
|
||||
)
|
||||
})
|
||||
|
||||
/**
|
||||
* Opens file dialog
|
||||
* Prevents event propagation to avoid ReactFlow capturing the event
|
||||
@@ -87,6 +141,15 @@ export function FileUpload({
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`
|
||||
}
|
||||
|
||||
/**
|
||||
* Truncate long file names keeping both start and end segments.
|
||||
*/
|
||||
const truncateMiddle = (text: string, start = 28, end = 18) => {
|
||||
if (!text) return ''
|
||||
if (text.length <= start + end + 3) return text
|
||||
return `${text.slice(0, start)}...${text.slice(-end)}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles file upload when new file(s) are selected
|
||||
*/
|
||||
@@ -137,6 +200,8 @@ export function FileUpload({
|
||||
let progressInterval: NodeJS.Timeout | null = null
|
||||
|
||||
try {
|
||||
setUploadError(null) // Clear previous errors
|
||||
|
||||
// Simulate upload progress
|
||||
progressInterval = setInterval(() => {
|
||||
setUploadProgress((prev) => {
|
||||
@@ -148,92 +213,64 @@ export function FileUpload({
|
||||
const uploadedFiles: UploadedFile[] = []
|
||||
const uploadErrors: string[] = []
|
||||
|
||||
// Try to get pre-signed URLs first for direct upload
|
||||
let useDirectUpload = false
|
||||
|
||||
// Upload each file separately
|
||||
// Upload each file via server (workspace files need DB records)
|
||||
for (const file of validFiles) {
|
||||
try {
|
||||
// First, try to get a pre-signed URL for direct upload
|
||||
const presignedResponse = await fetch('/api/files/presigned', {
|
||||
// Create FormData for upload
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
// Add workspace ID for workspace-scoped storage
|
||||
if (workspaceId) {
|
||||
formData.append('workspaceId', workspaceId)
|
||||
}
|
||||
|
||||
// Upload the file via server
|
||||
const response = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
fileName: file.name,
|
||||
contentType: file.type,
|
||||
fileSize: file.size,
|
||||
}),
|
||||
body: formData,
|
||||
})
|
||||
|
||||
const presignedData = await presignedResponse.json()
|
||||
const data = await response.json()
|
||||
|
||||
if (presignedResponse.ok && presignedData.directUploadSupported) {
|
||||
// Use direct upload method
|
||||
useDirectUpload = true
|
||||
// Handle error response
|
||||
if (!response.ok) {
|
||||
const errorMessage = data.error || `Failed to upload file: ${response.status}`
|
||||
uploadErrors.push(`${file.name}: ${errorMessage}`)
|
||||
|
||||
const uploadHeaders: Record<string, string> = {
|
||||
'Content-Type': file.type,
|
||||
// Set error message with conditional auto-dismiss
|
||||
setUploadError(errorMessage)
|
||||
|
||||
// Only auto-dismiss duplicate errors, keep other errors (like storage limits) visible
|
||||
if (data.isDuplicate || response.status === 409) {
|
||||
setTimeout(() => setUploadError(null), 5000)
|
||||
}
|
||||
|
||||
// Add Azure-specific headers if provided
|
||||
if (presignedData.uploadHeaders) {
|
||||
Object.assign(uploadHeaders, presignedData.uploadHeaders)
|
||||
}
|
||||
|
||||
// Upload directly to cloud storage using the pre-signed URL
|
||||
const uploadResponse = await fetch(presignedData.presignedUrl, {
|
||||
method: 'PUT',
|
||||
headers: uploadHeaders, // Use the merged headers
|
||||
body: file,
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
throw new Error(
|
||||
`Direct upload failed: ${uploadResponse.status} ${uploadResponse.statusText}`
|
||||
)
|
||||
}
|
||||
|
||||
// Use the file info returned from the presigned URL endpoint
|
||||
uploadedFiles.push({
|
||||
name: presignedData.fileInfo.name,
|
||||
path: presignedData.fileInfo.path,
|
||||
size: presignedData.fileInfo.size,
|
||||
type: presignedData.fileInfo.type,
|
||||
})
|
||||
} else {
|
||||
// Fallback to traditional upload through API route
|
||||
useDirectUpload = false
|
||||
|
||||
// Create FormData for upload
|
||||
const formData = new FormData()
|
||||
formData.append('file', file)
|
||||
|
||||
// Upload the file via server
|
||||
const response = await fetch('/api/files/upload', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
// Handle error response
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({ error: response.statusText }))
|
||||
const errorMessage = errorData.error || `Failed to upload file: ${response.status}`
|
||||
uploadErrors.push(`${file.name}: ${errorMessage}`)
|
||||
continue
|
||||
}
|
||||
|
||||
// Process successful upload
|
||||
const data = await response.json()
|
||||
|
||||
uploadedFiles.push({
|
||||
name: file.name,
|
||||
path: data.url || data.path, // Use url or path from upload response
|
||||
size: file.size,
|
||||
type: file.type,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if response has error even with 200 status
|
||||
if (data.success === false) {
|
||||
const errorMessage = data.error || 'Upload failed'
|
||||
uploadErrors.push(`${file.name}: ${errorMessage}`)
|
||||
|
||||
// Set error message with conditional auto-dismiss
|
||||
setUploadError(errorMessage)
|
||||
|
||||
// Only auto-dismiss duplicate errors, keep other errors (like storage limits) visible
|
||||
if (data.isDuplicate) {
|
||||
setTimeout(() => setUploadError(null), 5000)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Process successful upload - handle both workspace and regular uploads
|
||||
uploadedFiles.push({
|
||||
name: file.name,
|
||||
path: data.file?.url || data.url, // Workspace: data.file.url, Non-workspace: data.url
|
||||
key: data.file?.key || data.key, // Storage key for proper file access
|
||||
size: file.size,
|
||||
type: file.type,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Error uploading ${file.name}:`, error)
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
@@ -251,15 +288,18 @@ export function FileUpload({
|
||||
|
||||
// Send consolidated notification about uploaded files
|
||||
if (uploadedFiles.length > 0) {
|
||||
const uploadMethod = useDirectUpload ? 'direct' : 'server'
|
||||
setUploadError(null) // Clear error on successful upload
|
||||
|
||||
// Refresh workspace files list to keep dropdown up to date
|
||||
if (workspaceId) {
|
||||
void loadWorkspaceFiles()
|
||||
}
|
||||
|
||||
if (uploadedFiles.length === 1) {
|
||||
logger.info(
|
||||
`${uploadedFiles[0].name} was uploaded successfully (${uploadMethod} upload)`,
|
||||
activeWorkflowId
|
||||
)
|
||||
logger.info(`${uploadedFiles[0].name} was uploaded successfully`, activeWorkflowId)
|
||||
} else {
|
||||
logger.info(
|
||||
`Uploaded ${uploadedFiles.length} files successfully: ${uploadedFiles.map((f) => f.name).join(', ')} (${uploadMethod} upload)`,
|
||||
`Uploaded ${uploadedFiles.length} files successfully: ${uploadedFiles.map((f) => f.name).join(', ')}`,
|
||||
activeWorkflowId
|
||||
)
|
||||
}
|
||||
@@ -322,6 +362,44 @@ export function FileUpload({
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle selecting an existing workspace file
|
||||
*/
|
||||
const handleSelectWorkspaceFile = (fileId: string) => {
|
||||
const selectedFile = workspaceFiles.find((f) => f.id === fileId)
|
||||
if (!selectedFile) return
|
||||
|
||||
// Convert workspace file record to uploaded file format
|
||||
// Path will be converted to presigned URL during execution if needed
|
||||
const uploadedFile: UploadedFile = {
|
||||
name: selectedFile.name,
|
||||
path: selectedFile.path,
|
||||
size: selectedFile.size,
|
||||
type: selectedFile.type,
|
||||
}
|
||||
|
||||
if (multiple) {
|
||||
// For multiple files: Append to existing
|
||||
const existingFiles = Array.isArray(value) ? value : value ? [value] : []
|
||||
const uniqueFiles = new Map()
|
||||
|
||||
existingFiles.forEach((file) => {
|
||||
uniqueFiles.set(file.url || file.path, file)
|
||||
})
|
||||
|
||||
uniqueFiles.set(uploadedFile.path, uploadedFile)
|
||||
const newFiles = Array.from(uniqueFiles.values())
|
||||
|
||||
setStoreValue(newFiles)
|
||||
} else {
|
||||
// For single file: Replace
|
||||
setStoreValue(uploadedFile)
|
||||
}
|
||||
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
logger.info(`Selected workspace file: ${selectedFile.name}`, activeWorkflowId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles deletion of a single file
|
||||
*/
|
||||
@@ -335,22 +413,31 @@ export function FileUpload({
|
||||
setDeletingFiles((prev) => ({ ...prev, [file.path || '']: true }))
|
||||
|
||||
try {
|
||||
// Call API to delete the file from server
|
||||
const response = await fetch('/api/files/delete', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ filePath: file.path }),
|
||||
})
|
||||
// Check if this is a workspace file (decoded path contains workspaceId pattern)
|
||||
const decodedPath = file.path ? decodeURIComponent(file.path) : ''
|
||||
const isWorkspaceFile =
|
||||
workspaceId &&
|
||||
(decodedPath.includes(`/${workspaceId}/`) || decodedPath.includes(`${workspaceId}/`))
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({ error: response.statusText }))
|
||||
const errorMessage = errorData.error || `Failed to delete file: ${response.status}`
|
||||
throw new Error(errorMessage)
|
||||
if (!isWorkspaceFile) {
|
||||
// Only delete from storage if it's NOT a workspace file
|
||||
// Workspace files are permanent and managed through Settings
|
||||
const response = await fetch('/api/files/delete', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ filePath: file.path }),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({ error: response.statusText }))
|
||||
const errorMessage = errorData.error || `Failed to delete file: ${response.status}`
|
||||
throw new Error(errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
// Update the UI state
|
||||
// Update the UI state (remove from selection)
|
||||
if (multiple) {
|
||||
// For multiple files: Remove the specific file
|
||||
const filesArray = Array.isArray(value) ? value : value ? [value] : []
|
||||
@@ -364,7 +451,7 @@ export function FileUpload({
|
||||
useWorkflowStore.getState().triggerUpdate()
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
error instanceof Error ? error.message : 'Failed to delete file from server',
|
||||
error instanceof Error ? error.message : 'Failed to remove file',
|
||||
activeWorkflowId
|
||||
)
|
||||
} finally {
|
||||
@@ -461,7 +548,9 @@ export function FileUpload({
|
||||
className='flex items-center justify-between rounded border border-border bg-background px-3 py-2'
|
||||
>
|
||||
<div className='flex-1 truncate pr-2'>
|
||||
<div className='truncate font-normal text-sm'>{file.name}</div>
|
||||
<div className='truncate font-normal text-sm' title={file.name}>
|
||||
{truncateMiddle(file.name)}
|
||||
</div>
|
||||
<div className='text-muted-foreground text-xs'>{formatFileSize(file.size)}</div>
|
||||
</div>
|
||||
<Button
|
||||
@@ -517,7 +606,10 @@ export function FileUpload({
|
||||
data-testid='file-input-element'
|
||||
/>
|
||||
|
||||
<div className='bg-card'>
|
||||
{/* Error message */}
|
||||
{uploadError && <div className='mb-2 text-red-600 text-sm'>{uploadError}</div>}
|
||||
|
||||
<div>
|
||||
{/* File list with consistent spacing */}
|
||||
{(hasFiles || isUploading) && (
|
||||
<div className='mb-2 space-y-2'>
|
||||
@@ -547,49 +639,152 @@ export function FileUpload({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Action buttons */}
|
||||
{(hasFiles || isUploading) && (
|
||||
<div className='flex space-x-2 bg-card'>
|
||||
<Button
|
||||
type='button'
|
||||
variant='outline'
|
||||
size='sm'
|
||||
className='h-10 flex-1 font-normal text-sm'
|
||||
onClick={handleRemoveAllFiles}
|
||||
disabled={isUploading}
|
||||
{/* Add More dropdown for multiple files */}
|
||||
{hasFiles && multiple && !isUploading && (
|
||||
<div>
|
||||
<Popover
|
||||
open={addMoreOpen}
|
||||
onOpenChange={(open) => {
|
||||
setAddMoreOpen(open)
|
||||
if (open) void loadWorkspaceFiles()
|
||||
}}
|
||||
>
|
||||
Remove All
|
||||
</Button>
|
||||
{multiple && !isUploading && (
|
||||
<Button
|
||||
type='button'
|
||||
variant='outline'
|
||||
size='sm'
|
||||
className='h-10 flex-1 font-normal text-sm'
|
||||
onClick={handleOpenFileDialog}
|
||||
>
|
||||
Add More
|
||||
</Button>
|
||||
)}
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
role='combobox'
|
||||
aria-expanded={addMoreOpen}
|
||||
className='relative w-full justify-between'
|
||||
disabled={disabled || loadingWorkspaceFiles}
|
||||
>
|
||||
<span className='truncate font-normal'>+ Add More</span>
|
||||
<ChevronDown className='absolute right-3 h-4 w-4 shrink-0 opacity-50' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className={isWide ? 'w-[420px] p-0' : 'w-[320px] p-0'} align='start'>
|
||||
<Command>
|
||||
<CommandInput
|
||||
placeholder='Search files...'
|
||||
className='text-foreground placeholder:text-muted-foreground'
|
||||
/>
|
||||
<CommandList onWheel={(e) => e.stopPropagation()}>
|
||||
<CommandGroup>
|
||||
<CommandItem
|
||||
value='upload_new'
|
||||
onSelect={() => {
|
||||
setAddMoreOpen(false)
|
||||
handleOpenFileDialog({
|
||||
preventDefault: () => {},
|
||||
stopPropagation: () => {},
|
||||
} as React.MouseEvent)
|
||||
}}
|
||||
>
|
||||
Upload New File
|
||||
</CommandItem>
|
||||
</CommandGroup>
|
||||
<CommandEmpty>
|
||||
{availableWorkspaceFiles.length === 0
|
||||
? 'No files available.'
|
||||
: 'No files found.'}
|
||||
</CommandEmpty>
|
||||
{availableWorkspaceFiles.length > 0 && (
|
||||
<CommandGroup heading='Workspace Files'>
|
||||
{availableWorkspaceFiles.map((file) => (
|
||||
<CommandItem
|
||||
key={file.id}
|
||||
value={file.name}
|
||||
onSelect={() => {
|
||||
handleSelectWorkspaceFile(file.id)
|
||||
setAddMoreOpen(false)
|
||||
}}
|
||||
>
|
||||
<span className='truncate' title={file.name}>
|
||||
{truncateMiddle(file.name)}
|
||||
</span>
|
||||
</CommandItem>
|
||||
))}
|
||||
</CommandGroup>
|
||||
)}
|
||||
</CommandList>
|
||||
</Command>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Show upload button if no files and not uploading */}
|
||||
{/* Show dropdown selector if no files and not uploading */}
|
||||
{!hasFiles && !isUploading && (
|
||||
<div className='flex items-center'>
|
||||
<Button
|
||||
type='button'
|
||||
variant='outline'
|
||||
className='h-10 w-full justify-center bg-card text-center font-normal text-sm'
|
||||
onClick={handleOpenFileDialog}
|
||||
<Popover
|
||||
open={pickerOpen}
|
||||
onOpenChange={(open) => {
|
||||
setPickerOpen(open)
|
||||
if (open) void loadWorkspaceFiles()
|
||||
}}
|
||||
>
|
||||
<div className='flex w-full items-center justify-center gap-2'>
|
||||
{/* <Upload className="h-4 w-4" /> */}
|
||||
<span>{multiple ? 'Upload Files' : 'Upload File'}</span>
|
||||
<span className='text-muted-foreground text-xs'>({maxSize}MB max)</span>
|
||||
</div>
|
||||
</Button>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant='outline'
|
||||
role='combobox'
|
||||
aria-expanded={pickerOpen}
|
||||
className='relative w-full justify-between'
|
||||
disabled={disabled || loadingWorkspaceFiles}
|
||||
>
|
||||
<span className='truncate font-normal'>
|
||||
{loadingWorkspaceFiles ? 'Loading files...' : 'Select or upload file'}
|
||||
</span>
|
||||
<ChevronDown className='absolute right-3 h-4 w-4 shrink-0 opacity-50' />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className={isWide ? 'w-[420px] p-0' : 'w-[320px] p-0'} align='start'>
|
||||
<Command>
|
||||
<CommandInput
|
||||
placeholder='Search files...'
|
||||
className='text-foreground placeholder:text-muted-foreground'
|
||||
/>
|
||||
<CommandList onWheel={(e) => e.stopPropagation()}>
|
||||
<CommandGroup>
|
||||
<CommandItem
|
||||
value='upload_new'
|
||||
onSelect={() => {
|
||||
setPickerOpen(false)
|
||||
handleOpenFileDialog({
|
||||
preventDefault: () => {},
|
||||
stopPropagation: () => {},
|
||||
} as React.MouseEvent)
|
||||
}}
|
||||
>
|
||||
Upload New File
|
||||
</CommandItem>
|
||||
</CommandGroup>
|
||||
<CommandEmpty>
|
||||
{availableWorkspaceFiles.length === 0
|
||||
? 'No files available.'
|
||||
: 'No files found.'}
|
||||
</CommandEmpty>
|
||||
{availableWorkspaceFiles.length > 0 && (
|
||||
<CommandGroup heading='Workspace Files'>
|
||||
{availableWorkspaceFiles.map((file) => (
|
||||
<CommandItem
|
||||
key={file.id}
|
||||
value={file.name}
|
||||
onSelect={() => {
|
||||
handleSelectWorkspaceFile(file.id)
|
||||
setPickerOpen(false)
|
||||
}}
|
||||
>
|
||||
<span className='truncate' title={file.name}>
|
||||
{truncateMiddle(file.name)}
|
||||
</span>
|
||||
</CommandItem>
|
||||
))}
|
||||
</CommandGroup>
|
||||
)}
|
||||
</CommandList>
|
||||
</Command>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -306,6 +306,7 @@ export const SubBlock = memo(
|
||||
isPreview={isPreview}
|
||||
previewValue={previewValue}
|
||||
disabled={isDisabled}
|
||||
isWide={isWide}
|
||||
/>
|
||||
)
|
||||
case 'webhook-config': {
|
||||
|
||||
@@ -0,0 +1,319 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { Download, Search, Trash2, Upload } from 'lucide-react'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { Input } from '@/components/ui'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from '@/components/ui/table'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getFileExtension } from '@/lib/uploads/file-utils'
|
||||
import type { WorkspaceFileRecord } from '@/lib/uploads/workspace-files'
|
||||
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components'
|
||||
import { useUserPermissions } from '@/hooks/use-user-permissions'
|
||||
import { useWorkspacePermissions } from '@/hooks/use-workspace-permissions'
|
||||
|
||||
const logger = createLogger('FileUploadsSettings')
|
||||
|
||||
const SUPPORTED_EXTENSIONS = [
|
||||
'pdf',
|
||||
'csv',
|
||||
'doc',
|
||||
'docx',
|
||||
'txt',
|
||||
'md',
|
||||
'xlsx',
|
||||
'xls',
|
||||
'html',
|
||||
'htm',
|
||||
'pptx',
|
||||
'ppt',
|
||||
] as const
|
||||
const ACCEPT_ATTR = '.pdf,.csv,.doc,.docx,.txt,.md,.xlsx,.xls,.html,.htm,.pptx,.ppt'
|
||||
|
||||
export function FileUploads() {
|
||||
const params = useParams()
|
||||
const workspaceId = params?.workspaceId as string
|
||||
const [files, setFiles] = useState<WorkspaceFileRecord[]>([])
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [uploading, setUploading] = useState(false)
|
||||
const [deletingFileId, setDeletingFileId] = useState<string | null>(null)
|
||||
const [uploadError, setUploadError] = useState<string | null>(null)
|
||||
const fileInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
const { permissions: workspacePermissions, loading: permissionsLoading } =
|
||||
useWorkspacePermissions(workspaceId)
|
||||
const userPermissions = useUserPermissions(workspacePermissions, permissionsLoading)
|
||||
|
||||
const loadFiles = async () => {
|
||||
if (!workspaceId) return
|
||||
|
||||
try {
|
||||
setLoading(true)
|
||||
const response = await fetch(`/api/workspaces/${workspaceId}/files`)
|
||||
const data = await response.json()
|
||||
|
||||
if (data.success) {
|
||||
setFiles(data.files)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error loading workspace files:', error)
|
||||
} finally {
|
||||
setLoading(false)
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
void loadFiles()
|
||||
}, [workspaceId])
|
||||
|
||||
const handleUploadClick = () => {
|
||||
fileInputRef.current?.click()
|
||||
}
|
||||
|
||||
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const list = e.target.files
|
||||
if (!list || list.length === 0 || !workspaceId) return
|
||||
|
||||
try {
|
||||
setUploading(true)
|
||||
setUploadError(null)
|
||||
|
||||
const filesToUpload = Array.from(list)
|
||||
const unsupported: string[] = []
|
||||
const allowedFiles = filesToUpload.filter((f) => {
|
||||
const ext = getFileExtension(f.name)
|
||||
const ok = SUPPORTED_EXTENSIONS.includes(ext as (typeof SUPPORTED_EXTENSIONS)[number])
|
||||
if (!ok) unsupported.push(f.name)
|
||||
return ok
|
||||
})
|
||||
let lastError: string | null = null
|
||||
|
||||
for (const selectedFile of allowedFiles) {
|
||||
try {
|
||||
const formData = new FormData()
|
||||
formData.append('file', selectedFile)
|
||||
|
||||
const response = await fetch(`/api/workspaces/${workspaceId}/files`, {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
if (!data.success) {
|
||||
lastError = data.error || 'Upload failed'
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('Error uploading file:', err)
|
||||
lastError = 'Upload failed'
|
||||
}
|
||||
}
|
||||
|
||||
await loadFiles()
|
||||
if (unsupported.length) {
|
||||
lastError = `Unsupported file type: ${unsupported.join(', ')}`
|
||||
}
|
||||
if (lastError) setUploadError(lastError)
|
||||
} catch (error) {
|
||||
logger.error('Error uploading file:', error)
|
||||
setUploadError('Upload failed')
|
||||
setTimeout(() => setUploadError(null), 5000)
|
||||
} finally {
|
||||
setUploading(false)
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const handleDownload = async (file: WorkspaceFileRecord) => {
|
||||
if (!workspaceId) return
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workspaces/${workspaceId}/files/${file.id}/download`, {
|
||||
method: 'POST',
|
||||
})
|
||||
const data = await response.json()
|
||||
|
||||
if (data.success && data.downloadUrl) {
|
||||
window.open(data.downloadUrl, '_blank')
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error downloading file:', error)
|
||||
}
|
||||
}
|
||||
|
||||
const handleDelete = async (file: WorkspaceFileRecord) => {
|
||||
if (!workspaceId) return
|
||||
|
||||
try {
|
||||
setDeletingFileId(file.id)
|
||||
|
||||
const response = await fetch(`/api/workspaces/${workspaceId}/files/${file.id}`, {
|
||||
method: 'DELETE',
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (data.success) {
|
||||
await loadFiles()
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error deleting file:', error)
|
||||
} finally {
|
||||
setDeletingFileId(null)
|
||||
}
|
||||
}
|
||||
|
||||
const formatFileSize = (bytes: number): string => {
|
||||
if (bytes < 1024) return `${bytes} B`
|
||||
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`
|
||||
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`
|
||||
}
|
||||
|
||||
const formatDate = (date: Date | string): string => {
|
||||
const d = new Date(date)
|
||||
const mm = String(d.getMonth() + 1).padStart(2, '0')
|
||||
const dd = String(d.getDate()).padStart(2, '0')
|
||||
const yy = String(d.getFullYear()).slice(2)
|
||||
return `${mm}/${dd}/${yy}`
|
||||
}
|
||||
|
||||
const [search, setSearch] = useState('')
|
||||
const filteredFiles = useMemo(() => {
|
||||
if (!search) return files
|
||||
const q = search.toLowerCase()
|
||||
return files.filter((f) => f.name.toLowerCase().includes(q))
|
||||
}, [files, search])
|
||||
|
||||
const truncateMiddle = (text: string, start = 24, end = 12) => {
|
||||
if (!text) return ''
|
||||
if (text.length <= start + end + 3) return text
|
||||
return `${text.slice(0, start)}...${text.slice(-end)}`
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='relative flex h-full flex-col'>
|
||||
{/* Header: search left, file count + Upload right */}
|
||||
<div className='flex items-center justify-between px-6 pt-4 pb-2'>
|
||||
<div className='flex h-9 w-56 items-center gap-2 rounded-[8px] border bg-transparent pr-2 pl-3'>
|
||||
<Search className='h-4 w-4 flex-shrink-0 text-muted-foreground' strokeWidth={2} />
|
||||
<Input
|
||||
placeholder='Search files...'
|
||||
value={search}
|
||||
onChange={(e) => setSearch(e.target.value)}
|
||||
className='flex-1 border-0 bg-transparent px-0 font-[380] font-sans text-base text-foreground leading-none placeholder:text-muted-foreground focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
/>
|
||||
</div>
|
||||
<div className='flex items-center gap-3'>
|
||||
<div className='text-muted-foreground text-sm'>
|
||||
{files.length} {files.length === 1 ? 'file' : 'files'}
|
||||
</div>
|
||||
{userPermissions.canEdit && (
|
||||
<div className='flex items-center'>
|
||||
<input
|
||||
ref={fileInputRef}
|
||||
type='file'
|
||||
className='hidden'
|
||||
onChange={handleFileChange}
|
||||
disabled={uploading}
|
||||
accept={ACCEPT_ATTR}
|
||||
/>
|
||||
<Button
|
||||
onClick={handleUploadClick}
|
||||
disabled={uploading}
|
||||
variant='ghost'
|
||||
className='h-9 rounded-[8px] border bg-background px-3 shadow-xs hover:bg-muted focus:outline-none focus-visible:ring-0 focus-visible:ring-offset-0'
|
||||
>
|
||||
<Upload className='mr-2 h-4 w-4 stroke-[2px]' />
|
||||
{uploading ? 'Uploading...' : 'Upload File'}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Error message */}
|
||||
{uploadError && <div className='px-6 pb-2 text-red-600 text-sm'>{uploadError}</div>}
|
||||
|
||||
{/* Files Table */}
|
||||
<div className='scrollbar-thin scrollbar-thumb-muted scrollbar-track-transparent min-h-0 flex-1 overflow-y-auto px-6'>
|
||||
{loading ? (
|
||||
<div className='py-8 text-center text-muted-foreground text-sm'>Loading files...</div>
|
||||
) : files.length === 0 ? (
|
||||
<div className='py-8 text-center text-muted-foreground text-sm'>
|
||||
No files uploaded yet
|
||||
</div>
|
||||
) : (
|
||||
<Table className='table-auto text-[13px]'>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead className='w-[56%] px-3 text-xs'>Name</TableHead>
|
||||
<TableHead className='w-[14%] px-3 text-left text-xs'>Size</TableHead>
|
||||
<TableHead className='w-[15%] px-3 text-left text-xs'>Uploaded</TableHead>
|
||||
<TableHead className='w-[15%] px-3 text-left text-xs'>Actions</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{filteredFiles.map((file) => {
|
||||
const Icon = getDocumentIcon(file.type || '', file.name)
|
||||
return (
|
||||
<TableRow key={file.id} className='hover:bg-muted/50'>
|
||||
<TableCell className='px-3'>
|
||||
<div className='flex min-w-0 items-center gap-2'>
|
||||
<Icon className='h-3.5 w-3.5 shrink-0 text-muted-foreground' />
|
||||
<span className='min-w-0 truncate font-normal' title={file.name}>
|
||||
{truncateMiddle(file.name)}
|
||||
</span>
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell className='whitespace-nowrap px-3 text-[12px] text-muted-foreground'>
|
||||
{formatFileSize(file.size)}
|
||||
</TableCell>
|
||||
<TableCell className='whitespace-nowrap px-3 text-[12px] text-muted-foreground'>
|
||||
{formatDate(file.uploadedAt)}
|
||||
</TableCell>
|
||||
<TableCell className='px-3'>
|
||||
<div className='flex items-center gap-1'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
onClick={() => handleDownload(file)}
|
||||
title='Download'
|
||||
className='h-6 w-6'
|
||||
aria-label={`Download ${file.name}`}
|
||||
>
|
||||
<Download className='h-3.5 w-3.5 text-muted-foreground' />
|
||||
</Button>
|
||||
{userPermissions.canEdit && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
onClick={() => handleDelete(file)}
|
||||
className='h-6 w-6 text-destructive hover:text-destructive'
|
||||
disabled={deletingFileId === file.id}
|
||||
title='Delete'
|
||||
aria-label={`Delete ${file.name}`}
|
||||
>
|
||||
<Trash2 className='h-3.5 w-3.5' />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
)
|
||||
})}
|
||||
</TableBody>
|
||||
</Table>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -3,6 +3,7 @@ export { ApiKeys } from './api-keys/api-keys'
|
||||
export { Copilot } from './copilot/copilot'
|
||||
export { Credentials } from './credentials/credentials'
|
||||
export { EnvironmentVariables } from './environment/environment'
|
||||
export { FileUploads } from './file-uploads/file-uploads'
|
||||
export { General } from './general/general'
|
||||
export { MCP } from './mcp/mcp'
|
||||
export { Privacy } from './privacy/privacy'
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
Bot,
|
||||
CreditCard,
|
||||
FileCode,
|
||||
Files,
|
||||
Home,
|
||||
Key,
|
||||
LogIn,
|
||||
@@ -32,6 +33,7 @@ interface SettingsNavigationProps {
|
||||
| 'account'
|
||||
| 'credentials'
|
||||
| 'apikeys'
|
||||
| 'files'
|
||||
| 'subscription'
|
||||
| 'team'
|
||||
| 'sso'
|
||||
@@ -49,6 +51,7 @@ type NavigationItem = {
|
||||
| 'account'
|
||||
| 'credentials'
|
||||
| 'apikeys'
|
||||
| 'files'
|
||||
| 'subscription'
|
||||
| 'team'
|
||||
| 'sso'
|
||||
@@ -94,6 +97,11 @@ const allNavigationItems: NavigationItem[] = [
|
||||
label: 'API Keys',
|
||||
icon: Key,
|
||||
},
|
||||
{
|
||||
id: 'files',
|
||||
label: 'File Uploads',
|
||||
icon: Files,
|
||||
},
|
||||
{
|
||||
id: 'copilot',
|
||||
label: 'Copilot',
|
||||
|
||||
@@ -2,6 +2,7 @@ import {
|
||||
Building2,
|
||||
Clock,
|
||||
Database,
|
||||
HardDrive,
|
||||
HeadphonesIcon,
|
||||
Infinity as InfinityIcon,
|
||||
MessageSquare,
|
||||
@@ -15,6 +16,7 @@ import type { PlanFeature } from './components/plan-card'
|
||||
export const PRO_PLAN_FEATURES: PlanFeature[] = [
|
||||
{ icon: Zap, text: '25 runs per minute (sync)' },
|
||||
{ icon: Clock, text: '200 runs per minute (async)' },
|
||||
{ icon: HardDrive, text: '50GB file storage' },
|
||||
{ icon: Building2, text: 'Unlimited workspaces' },
|
||||
{ icon: Workflow, text: 'Unlimited workflows' },
|
||||
{ icon: Users, text: 'Unlimited invites' },
|
||||
@@ -24,12 +26,14 @@ export const PRO_PLAN_FEATURES: PlanFeature[] = [
|
||||
export const TEAM_PLAN_FEATURES: PlanFeature[] = [
|
||||
{ icon: Zap, text: '75 runs per minute (sync)' },
|
||||
{ icon: Clock, text: '500 runs per minute (async)' },
|
||||
{ icon: HardDrive, text: '500GB file storage (pooled)' },
|
||||
{ icon: InfinityIcon, text: 'Everything in Pro' },
|
||||
{ icon: MessageSquare, text: 'Dedicated Slack channel' },
|
||||
]
|
||||
|
||||
export const ENTERPRISE_PLAN_FEATURES: PlanFeature[] = [
|
||||
{ icon: Zap, text: 'Custom rate limits' },
|
||||
{ icon: HardDrive, text: 'Custom file storage limits' },
|
||||
{ icon: Server, text: 'Enterprise hosting' },
|
||||
{ icon: HeadphonesIcon, text: 'Dedicated support' },
|
||||
]
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
Copilot,
|
||||
Credentials,
|
||||
EnvironmentVariables,
|
||||
FileUploads,
|
||||
General,
|
||||
MCP,
|
||||
Privacy,
|
||||
@@ -36,6 +37,7 @@ type SettingsSection =
|
||||
| 'account'
|
||||
| 'credentials'
|
||||
| 'apikeys'
|
||||
| 'files'
|
||||
| 'subscription'
|
||||
| 'team'
|
||||
| 'sso'
|
||||
@@ -165,6 +167,11 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
|
||||
<ApiKeys onOpenChange={onOpenChange} />
|
||||
</div>
|
||||
)}
|
||||
{activeSection === 'files' && (
|
||||
<div className='h-full'>
|
||||
<FileUploads />
|
||||
</div>
|
||||
)}
|
||||
{isSubscriptionEnabled && activeSection === 'subscription' && (
|
||||
<div className='h-full'>
|
||||
<Subscription onOpenChange={onOpenChange} />
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
Clock,
|
||||
Database,
|
||||
DollarSign,
|
||||
HardDrive,
|
||||
HeadphonesIcon,
|
||||
Infinity as InfinityIcon,
|
||||
MessageSquare,
|
||||
@@ -82,6 +83,7 @@ export function SubscriptionModal({ open, onOpenChange }: SubscriptionModalProps
|
||||
{ text: '$10 free inference credit', included: true, icon: DollarSign },
|
||||
{ text: '10 runs per minute (sync)', included: true, icon: Zap },
|
||||
{ text: '50 runs per minute (async)', included: true, icon: Clock },
|
||||
{ text: '5GB file storage', included: true, icon: HardDrive },
|
||||
{ text: '7-day log retention', included: true, icon: Database },
|
||||
],
|
||||
isActive: subscription.isFree,
|
||||
@@ -94,6 +96,7 @@ export function SubscriptionModal({ open, onOpenChange }: SubscriptionModalProps
|
||||
features: [
|
||||
{ text: '25 runs per minute (sync)', included: true, icon: Zap },
|
||||
{ text: '200 runs per minute (async)', included: true, icon: Clock },
|
||||
{ text: '50GB file storage', included: true, icon: HardDrive },
|
||||
{ text: 'Unlimited workspaces', included: true, icon: Building2 },
|
||||
{ text: 'Unlimited workflows', included: true, icon: Workflow },
|
||||
{ text: 'Unlimited invites', included: true, icon: Users },
|
||||
@@ -109,6 +112,7 @@ export function SubscriptionModal({ open, onOpenChange }: SubscriptionModalProps
|
||||
features: [
|
||||
{ text: '75 runs per minute (sync)', included: true, icon: Zap },
|
||||
{ text: '500 runs per minute (async)', included: true, icon: Clock },
|
||||
{ text: '500GB file storage (pooled)', included: true, icon: HardDrive },
|
||||
{ text: 'Everything in Pro', included: true, icon: InfinityIcon },
|
||||
{ text: 'Dedicated Slack channel', included: true, icon: MessageSquare },
|
||||
],
|
||||
@@ -121,6 +125,7 @@ export function SubscriptionModal({ open, onOpenChange }: SubscriptionModalProps
|
||||
description: '',
|
||||
features: [
|
||||
{ text: 'Custom rate limits', included: true, icon: Zap },
|
||||
{ text: 'Custom file storage', included: true, icon: HardDrive },
|
||||
{ text: 'Enterprise hosting license', included: true, icon: Server },
|
||||
{ text: 'Custom enterprise support', included: true, icon: HeadphonesIcon },
|
||||
],
|
||||
|
||||
@@ -86,6 +86,31 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
||||
placeholder: 'Enter message content...',
|
||||
condition: { field: 'operation', value: 'discord_send_message' },
|
||||
},
|
||||
// File upload (basic mode)
|
||||
{
|
||||
id: 'attachmentFiles',
|
||||
title: 'Attachments',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Upload files to attach',
|
||||
condition: { field: 'operation', value: 'discord_send_message' },
|
||||
mode: 'basic',
|
||||
multiple: true,
|
||||
required: false,
|
||||
},
|
||||
// Variable reference (advanced mode)
|
||||
{
|
||||
id: 'files',
|
||||
title: 'File Attachments',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Reference files from previous blocks',
|
||||
condition: { field: 'operation', value: 'discord_send_message' },
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -120,19 +145,22 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
||||
const channelId = (params.channelId || '').trim()
|
||||
|
||||
switch (params.operation) {
|
||||
case 'discord_send_message':
|
||||
case 'discord_send_message': {
|
||||
if (!serverId) {
|
||||
throw new Error('Server ID is required.')
|
||||
}
|
||||
if (!channelId) {
|
||||
throw new Error('Channel ID is required.')
|
||||
}
|
||||
const fileParam = params.attachmentFiles || params.files
|
||||
return {
|
||||
...commonParams,
|
||||
serverId,
|
||||
channelId,
|
||||
content: params.content,
|
||||
...(fileParam && { files: fileParam }),
|
||||
}
|
||||
}
|
||||
case 'discord_get_messages':
|
||||
if (!serverId) {
|
||||
throw new Error('Server ID is required.')
|
||||
@@ -171,6 +199,8 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
||||
serverId: { type: 'string', description: 'Discord server identifier' },
|
||||
channelId: { type: 'string', description: 'Discord channel identifier' },
|
||||
content: { type: 'string', description: 'Message content' },
|
||||
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
|
||||
files: { type: 'json', description: 'Files to attach (UserFile array)' },
|
||||
limit: { type: 'number', description: 'Message limit' },
|
||||
userId: { type: 'string', description: 'Discord user identifier' },
|
||||
},
|
||||
|
||||
@@ -25,7 +25,7 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
layout: 'full' as SubBlockLayout,
|
||||
options: [
|
||||
{ id: 'url', label: 'File URL' },
|
||||
{ id: 'upload', label: 'Upload Files' },
|
||||
{ id: 'upload', label: 'Uploaded Files' },
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -42,7 +42,7 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
|
||||
{
|
||||
id: 'file',
|
||||
title: 'Upload Files',
|
||||
title: 'Process Files',
|
||||
type: 'file-upload' as SubBlockType,
|
||||
layout: 'full' as SubBlockLayout,
|
||||
acceptedTypes: '.pdf,.csv,.doc,.docx,.txt,.md,.xlsx,.xls,.html,.htm,.pptx,.ppt',
|
||||
@@ -73,6 +73,7 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
return {
|
||||
filePath: fileUrl,
|
||||
fileType: params.fileType || 'auto',
|
||||
workspaceId: params._context?.workspaceId,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -75,6 +75,31 @@ export const GmailBlock: BlockConfig<GmailToolResponse> = {
|
||||
condition: { field: 'operation', value: ['send_gmail', 'draft_gmail'] },
|
||||
required: true,
|
||||
},
|
||||
// File upload (basic mode)
|
||||
{
|
||||
id: 'attachmentFiles',
|
||||
title: 'Attachments',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'attachments',
|
||||
placeholder: 'Upload files to attach',
|
||||
condition: { field: 'operation', value: ['send_gmail', 'draft_gmail'] },
|
||||
mode: 'basic',
|
||||
multiple: true,
|
||||
required: false,
|
||||
},
|
||||
// Variable reference (advanced mode)
|
||||
{
|
||||
id: 'attachments',
|
||||
title: 'Attachments',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'attachments',
|
||||
placeholder: 'Reference files from previous blocks',
|
||||
condition: { field: 'operation', value: ['send_gmail', 'draft_gmail'] },
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
},
|
||||
// Advanced Settings - Additional Recipients
|
||||
{
|
||||
id: 'cc',
|
||||
@@ -225,6 +250,7 @@ export const GmailBlock: BlockConfig<GmailToolResponse> = {
|
||||
body: { type: 'string', description: 'Email content' },
|
||||
cc: { type: 'string', description: 'CC recipients (comma-separated)' },
|
||||
bcc: { type: 'string', description: 'BCC recipients (comma-separated)' },
|
||||
attachments: { type: 'json', description: 'Files to attach (UserFile array)' },
|
||||
// Read operation inputs
|
||||
folder: { type: 'string', description: 'Gmail folder' },
|
||||
manualFolder: { type: 'string', description: 'Manual folder name' },
|
||||
|
||||
@@ -22,6 +22,7 @@ export const GoogleDriveBlock: BlockConfig<GoogleDriveResponse> = {
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Create Folder', id: 'create_folder' },
|
||||
{ label: 'Create File', id: 'create_file' },
|
||||
{ label: 'Upload File', id: 'upload' },
|
||||
{ label: 'List Files', id: 'list' },
|
||||
],
|
||||
@@ -39,23 +40,48 @@ export const GoogleDriveBlock: BlockConfig<GoogleDriveResponse> = {
|
||||
requiredScopes: ['https://www.googleapis.com/auth/drive.file'],
|
||||
placeholder: 'Select Google Drive account',
|
||||
},
|
||||
// Upload Fields
|
||||
// Create/Upload File Fields
|
||||
{
|
||||
id: 'fileName',
|
||||
title: 'File Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Name of the file',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
placeholder: 'Name of the file (e.g., document.txt)',
|
||||
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
||||
required: true,
|
||||
},
|
||||
// File upload (basic mode) - binary files
|
||||
{
|
||||
id: 'fileUpload',
|
||||
title: 'Upload File',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'file',
|
||||
placeholder: 'Upload a file to Google Drive',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
mode: 'basic',
|
||||
multiple: false,
|
||||
required: false,
|
||||
},
|
||||
// Variable reference (advanced mode) - for referencing files from previous blocks
|
||||
{
|
||||
id: 'file',
|
||||
title: 'File Reference',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'file',
|
||||
placeholder: 'Reference file from previous block (e.g., {{block_name.file}})',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
title: 'Content',
|
||||
title: 'Text Content',
|
||||
type: 'long-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Content to upload to the file',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
placeholder: 'Text content for the file',
|
||||
condition: { field: 'operation', value: 'create_file' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
@@ -64,13 +90,18 @@ export const GoogleDriveBlock: BlockConfig<GoogleDriveResponse> = {
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Auto-detect from file', id: '' },
|
||||
{ label: 'Google Doc', id: 'application/vnd.google-apps.document' },
|
||||
{ label: 'Google Sheet', id: 'application/vnd.google-apps.spreadsheet' },
|
||||
{ label: 'Google Slides', id: 'application/vnd.google-apps.presentation' },
|
||||
{ label: 'PDF (application/pdf)', id: 'application/pdf' },
|
||||
{ label: 'Plain Text (text/plain)', id: 'text/plain' },
|
||||
{ label: 'HTML (text/html)', id: 'text/html' },
|
||||
{ label: 'CSV (text/csv)', id: 'text/csv' },
|
||||
],
|
||||
placeholder: 'Select a file type',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
placeholder: 'Select a file type (optional)',
|
||||
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'folderSelector',
|
||||
@@ -85,7 +116,7 @@ export const GoogleDriveBlock: BlockConfig<GoogleDriveResponse> = {
|
||||
placeholder: 'Select a parent folder',
|
||||
mode: 'basic',
|
||||
dependsOn: ['credential'],
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
||||
},
|
||||
{
|
||||
id: 'manualFolderId',
|
||||
@@ -95,7 +126,7 @@ export const GoogleDriveBlock: BlockConfig<GoogleDriveResponse> = {
|
||||
canonicalParamId: 'folderId',
|
||||
placeholder: 'Enter parent folder ID (leave empty for root folder)',
|
||||
mode: 'advanced',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
||||
},
|
||||
// Get Content Fields
|
||||
// {
|
||||
@@ -223,6 +254,7 @@ export const GoogleDriveBlock: BlockConfig<GoogleDriveResponse> = {
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'create_file':
|
||||
case 'upload':
|
||||
return 'google_drive_upload'
|
||||
case 'create_folder':
|
||||
@@ -254,7 +286,8 @@ export const GoogleDriveBlock: BlockConfig<GoogleDriveResponse> = {
|
||||
credential: { type: 'string', description: 'Google Drive access token' },
|
||||
// Upload and Create Folder operation inputs
|
||||
fileName: { type: 'string', description: 'File or folder name' },
|
||||
content: { type: 'string', description: 'File content' },
|
||||
file: { type: 'json', description: 'File to upload (UserFile object)' },
|
||||
content: { type: 'string', description: 'Text content to upload' },
|
||||
mimeType: { type: 'string', description: 'File MIME type' },
|
||||
// List operation inputs
|
||||
folderSelector: { type: 'string', description: 'Selected folder' },
|
||||
|
||||
@@ -138,6 +138,31 @@ export const MicrosoftTeamsBlock: BlockConfig<MicrosoftTeamsResponse> = {
|
||||
condition: { field: 'operation', value: ['write_chat', 'write_channel'] },
|
||||
required: true,
|
||||
},
|
||||
// File upload (basic mode)
|
||||
{
|
||||
id: 'attachmentFiles',
|
||||
title: 'Attachments',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Upload files to attach',
|
||||
condition: { field: 'operation', value: ['write_chat', 'write_channel'] },
|
||||
mode: 'basic',
|
||||
multiple: true,
|
||||
required: false,
|
||||
},
|
||||
// Variable reference (advanced mode)
|
||||
{
|
||||
id: 'files',
|
||||
title: 'File Attachments',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Reference files from previous blocks',
|
||||
condition: { field: 'operation', value: ['write_chat', 'write_channel'] },
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'triggerConfig',
|
||||
title: 'Trigger Configuration',
|
||||
@@ -179,6 +204,8 @@ export const MicrosoftTeamsBlock: BlockConfig<MicrosoftTeamsResponse> = {
|
||||
manualChatId,
|
||||
channelId,
|
||||
manualChannelId,
|
||||
attachmentFiles,
|
||||
files,
|
||||
...rest
|
||||
} = params
|
||||
|
||||
@@ -186,11 +213,17 @@ export const MicrosoftTeamsBlock: BlockConfig<MicrosoftTeamsResponse> = {
|
||||
const effectiveChatId = (chatId || manualChatId || '').trim()
|
||||
const effectiveChannelId = (channelId || manualChannelId || '').trim()
|
||||
|
||||
const baseParams = {
|
||||
const baseParams: Record<string, any> = {
|
||||
...rest,
|
||||
credential,
|
||||
}
|
||||
|
||||
// Add files if provided
|
||||
const fileParam = attachmentFiles || files
|
||||
if (fileParam && (operation === 'write_chat' || operation === 'write_channel')) {
|
||||
baseParams.files = fileParam
|
||||
}
|
||||
|
||||
if (operation === 'read_chat' || operation === 'write_chat') {
|
||||
if (!effectiveChatId) {
|
||||
throw new Error('Chat ID is required. Please select a chat or enter a chat ID.')
|
||||
@@ -223,6 +256,8 @@ export const MicrosoftTeamsBlock: BlockConfig<MicrosoftTeamsResponse> = {
|
||||
teamId: { type: 'string', description: 'Team identifier' },
|
||||
manualTeamId: { type: 'string', description: 'Manual team identifier' },
|
||||
content: { type: 'string', description: 'Message content' },
|
||||
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
|
||||
files: { type: 'json', description: 'Files to attach (UserFile array)' },
|
||||
},
|
||||
outputs: {
|
||||
content: { type: 'string', description: 'Formatted message content from chat/channel' },
|
||||
|
||||
@@ -22,6 +22,7 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Create Folder', id: 'create_folder' },
|
||||
{ label: 'Create File', id: 'create_file' },
|
||||
{ label: 'Upload File', id: 'upload' },
|
||||
{ label: 'List Files', id: 'list' },
|
||||
],
|
||||
@@ -44,22 +45,49 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
],
|
||||
placeholder: 'Select Microsoft account',
|
||||
},
|
||||
// Upload Fields
|
||||
// Create File Fields
|
||||
{
|
||||
id: 'fileName',
|
||||
title: 'File Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Name of the file',
|
||||
placeholder: 'Name of the file (e.g., document.txt)',
|
||||
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
||||
required: true,
|
||||
},
|
||||
// File upload (basic mode)
|
||||
{
|
||||
id: 'file',
|
||||
title: 'File',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'file',
|
||||
placeholder: 'Upload a file',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
mode: 'basic',
|
||||
multiple: false,
|
||||
required: false,
|
||||
},
|
||||
// Variable reference (advanced mode)
|
||||
{
|
||||
id: 'fileReference',
|
||||
title: 'File',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'file',
|
||||
placeholder: 'Reference file from previous block (e.g., {{block_1.file}})',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
title: 'Content',
|
||||
title: 'Text Content',
|
||||
type: 'long-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Content to upload to the file',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
placeholder: 'Text content for the file',
|
||||
condition: { field: 'operation', value: 'create_file' },
|
||||
required: true,
|
||||
},
|
||||
|
||||
{
|
||||
@@ -82,7 +110,7 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
placeholder: 'Select a parent folder',
|
||||
dependsOn: ['credential'],
|
||||
mode: 'basic',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
||||
},
|
||||
{
|
||||
id: 'manualFolderId',
|
||||
@@ -93,7 +121,7 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
placeholder: 'Enter parent folder ID (leave empty for root folder)',
|
||||
dependsOn: ['credential'],
|
||||
mode: 'advanced',
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
condition: { field: 'operation', value: ['create_file', 'upload'] },
|
||||
},
|
||||
{
|
||||
id: 'folderName',
|
||||
@@ -194,6 +222,7 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'create_file':
|
||||
case 'upload':
|
||||
return 'onedrive_upload'
|
||||
case 'create_folder':
|
||||
@@ -225,7 +254,9 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
|
||||
credential: { type: 'string', description: 'Microsoft account credential' },
|
||||
// Upload and Create Folder operation inputs
|
||||
fileName: { type: 'string', description: 'File name' },
|
||||
content: { type: 'string', description: 'File content' },
|
||||
file: { type: 'json', description: 'File to upload (UserFile object)' },
|
||||
fileReference: { type: 'json', description: 'File reference from previous block' },
|
||||
content: { type: 'string', description: 'Text content to upload' },
|
||||
// Get Content operation inputs
|
||||
// fileId: { type: 'string', required: false },
|
||||
// List operation inputs
|
||||
|
||||
@@ -97,6 +97,31 @@ export const OutlookBlock: BlockConfig<OutlookResponse> = {
|
||||
condition: { field: 'operation', value: ['send_outlook', 'draft_outlook'] },
|
||||
required: true,
|
||||
},
|
||||
// File upload (basic mode)
|
||||
{
|
||||
id: 'attachmentFiles',
|
||||
title: 'Attachments',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'attachments',
|
||||
placeholder: 'Upload files to attach',
|
||||
condition: { field: 'operation', value: ['send_outlook', 'draft_outlook'] },
|
||||
mode: 'basic',
|
||||
multiple: true,
|
||||
required: false,
|
||||
},
|
||||
// Variable reference (advanced mode)
|
||||
{
|
||||
id: 'attachments',
|
||||
title: 'Attachments',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'attachments',
|
||||
placeholder: 'Reference files from previous blocks',
|
||||
condition: { field: 'operation', value: ['send_outlook', 'draft_outlook'] },
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
},
|
||||
// Advanced Settings - Threading
|
||||
{
|
||||
id: 'replyToMessageId',
|
||||
@@ -231,6 +256,8 @@ export const OutlookBlock: BlockConfig<OutlookResponse> = {
|
||||
to: { type: 'string', description: 'Recipient email address' },
|
||||
subject: { type: 'string', description: 'Email subject' },
|
||||
body: { type: 'string', description: 'Email content' },
|
||||
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
|
||||
attachments: { type: 'json', description: 'Files to attach (UserFile array)' },
|
||||
// Forward operation inputs
|
||||
messageId: { type: 'string', description: 'Message ID to forward' },
|
||||
comment: { type: 'string', description: 'Optional comment for forwarding' },
|
||||
|
||||
@@ -6,15 +6,31 @@ import type { S3Response } from '@/tools/s3/types'
|
||||
export const S3Block: BlockConfig<S3Response> = {
|
||||
type: 's3',
|
||||
name: 'S3',
|
||||
description: 'View S3 files',
|
||||
description: 'Upload, download, list, and manage S3 files',
|
||||
authMode: AuthMode.ApiKey,
|
||||
longDescription:
|
||||
'Integrate S3 into the workflow. Can get presigned URLs for S3 objects. Requires access key and secret access key.',
|
||||
'Integrate S3 into the workflow. Upload files, download objects, list bucket contents, delete objects, and copy objects between buckets. Requires AWS access key and secret access key.',
|
||||
docsLink: 'https://docs.sim.ai/tools/s3',
|
||||
category: 'tools',
|
||||
bgColor: '#E0E0E0',
|
||||
icon: S3Icon,
|
||||
subBlocks: [
|
||||
// Operation selector
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Download File', id: 'get_object' },
|
||||
{ label: 'Upload File', id: 'put_object' },
|
||||
{ label: 'List Objects', id: 'list_objects' },
|
||||
{ label: 'Delete Object', id: 'delete_object' },
|
||||
{ label: 'Copy Object', id: 'copy_object' },
|
||||
],
|
||||
value: () => 'get_object',
|
||||
},
|
||||
// AWS Credentials
|
||||
{
|
||||
id: 'accessKeyId',
|
||||
title: 'Access Key ID',
|
||||
@@ -33,76 +49,394 @@ export const S3Block: BlockConfig<S3Response> = {
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'region',
|
||||
title: 'AWS Region',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'e.g., us-east-1, us-west-2',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['put_object', 'list_objects', 'delete_object', 'copy_object'],
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'bucketName',
|
||||
title: 'Bucket Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter S3 bucket name',
|
||||
condition: { field: 'operation', value: ['put_object', 'list_objects', 'delete_object'] },
|
||||
required: true,
|
||||
},
|
||||
|
||||
// ===== UPLOAD (PUT OBJECT) FIELDS =====
|
||||
{
|
||||
id: 'objectKey',
|
||||
title: 'Object Key/Path',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'e.g., myfile.pdf or documents/report.pdf',
|
||||
condition: { field: 'operation', value: 'put_object' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'uploadFile',
|
||||
title: 'File to Upload',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'file',
|
||||
placeholder: 'Upload a file',
|
||||
condition: { field: 'operation', value: 'put_object' },
|
||||
mode: 'basic',
|
||||
multiple: false,
|
||||
},
|
||||
{
|
||||
id: 'file',
|
||||
title: 'File Reference',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'file',
|
||||
placeholder: 'Reference a file from previous blocks',
|
||||
condition: { field: 'operation', value: 'put_object' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'content',
|
||||
title: 'Text Content',
|
||||
type: 'long-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Or enter text content to upload',
|
||||
condition: { field: 'operation', value: 'put_object' },
|
||||
},
|
||||
{
|
||||
id: 'contentType',
|
||||
title: 'Content Type',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'e.g., text/plain, application/json (auto-detected if not provided)',
|
||||
condition: { field: 'operation', value: 'put_object' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'acl',
|
||||
title: 'Access Control',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Private', id: 'private' },
|
||||
{ label: 'Public Read', id: 'public-read' },
|
||||
{ label: 'Public Read/Write', id: 'public-read-write' },
|
||||
{ label: 'Authenticated Read', id: 'authenticated-read' },
|
||||
],
|
||||
placeholder: 'Select ACL (default: private)',
|
||||
condition: { field: 'operation', value: 'put_object' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
|
||||
// ===== DOWNLOAD (GET OBJECT) FIELDS =====
|
||||
{
|
||||
id: 's3Uri',
|
||||
title: 'S3 Object URL',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'e.g., https://bucket-name.s3.region.amazonaws.com/path/to/file',
|
||||
condition: { field: 'operation', value: 'get_object' },
|
||||
required: true,
|
||||
},
|
||||
|
||||
// ===== LIST OBJECTS FIELDS =====
|
||||
{
|
||||
id: 'prefix',
|
||||
title: 'Prefix/Folder',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Filter by prefix (e.g., folder/ or leave empty for all)',
|
||||
condition: { field: 'operation', value: 'list_objects' },
|
||||
},
|
||||
{
|
||||
id: 'maxKeys',
|
||||
title: 'Max Results',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Maximum number of objects to return (default: 1000)',
|
||||
condition: { field: 'operation', value: 'list_objects' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'continuationToken',
|
||||
title: 'Continuation Token',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Token for pagination (from previous response)',
|
||||
condition: { field: 'operation', value: 'list_objects' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
|
||||
// ===== DELETE OBJECT FIELDS =====
|
||||
{
|
||||
id: 'objectKey',
|
||||
title: 'Object Key/Path',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'e.g., myfile.pdf or documents/report.pdf',
|
||||
condition: { field: 'operation', value: 'delete_object' },
|
||||
required: true,
|
||||
},
|
||||
|
||||
// ===== COPY OBJECT FIELDS =====
|
||||
{
|
||||
id: 'sourceBucket',
|
||||
title: 'Source Bucket',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Source bucket name',
|
||||
condition: { field: 'operation', value: 'copy_object' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'sourceKey',
|
||||
title: 'Source Object Key',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'e.g., oldfile.pdf or folder/file.pdf',
|
||||
condition: { field: 'operation', value: 'copy_object' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'destinationBucket',
|
||||
title: 'Destination Bucket',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Destination bucket name (can be same as source)',
|
||||
condition: { field: 'operation', value: 'copy_object' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'destinationKey',
|
||||
title: 'Destination Object Key',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'e.g., newfile.pdf or backup/file.pdf',
|
||||
condition: { field: 'operation', value: 'copy_object' },
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'copyAcl',
|
||||
title: 'Access Control',
|
||||
type: 'dropdown',
|
||||
layout: 'full',
|
||||
options: [
|
||||
{ label: 'Private', id: 'private' },
|
||||
{ label: 'Public Read', id: 'public-read' },
|
||||
{ label: 'Public Read/Write', id: 'public-read-write' },
|
||||
{ label: 'Authenticated Read', id: 'authenticated-read' },
|
||||
],
|
||||
placeholder: 'Select ACL for copied object (default: private)',
|
||||
condition: { field: 'operation', value: 'copy_object' },
|
||||
mode: 'advanced',
|
||||
canonicalParamId: 'acl',
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['s3_get_object'],
|
||||
access: [
|
||||
's3_put_object',
|
||||
's3_get_object',
|
||||
's3_list_objects',
|
||||
's3_delete_object',
|
||||
's3_copy_object',
|
||||
],
|
||||
config: {
|
||||
tool: () => 's3_get_object',
|
||||
tool: (params) => {
|
||||
// Default to get_object for backward compatibility with existing workflows
|
||||
const operation = params.operation || 'get_object'
|
||||
|
||||
switch (operation) {
|
||||
case 'put_object':
|
||||
return 's3_put_object'
|
||||
case 'get_object':
|
||||
return 's3_get_object'
|
||||
case 'list_objects':
|
||||
return 's3_list_objects'
|
||||
case 'delete_object':
|
||||
return 's3_delete_object'
|
||||
case 'copy_object':
|
||||
return 's3_copy_object'
|
||||
default:
|
||||
throw new Error(`Invalid S3 operation: ${operation}`)
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
// Validate required fields
|
||||
// Validate required fields (common to all operations)
|
||||
if (!params.accessKeyId) {
|
||||
throw new Error('Access Key ID is required')
|
||||
}
|
||||
if (!params.secretAccessKey) {
|
||||
throw new Error('Secret Access Key is required')
|
||||
}
|
||||
if (!params.s3Uri) {
|
||||
throw new Error('S3 Object URL is required')
|
||||
}
|
||||
|
||||
// Parse S3 URI
|
||||
try {
|
||||
const url = new URL(params.s3Uri)
|
||||
const hostname = url.hostname
|
||||
// Default to get_object for backward compatibility with existing workflows
|
||||
const operation = params.operation || 'get_object'
|
||||
|
||||
// Extract bucket name from hostname
|
||||
const bucketName = hostname.split('.')[0]
|
||||
// Operation-specific parameters
|
||||
switch (operation) {
|
||||
case 'put_object': {
|
||||
if (!params.region) {
|
||||
throw new Error('AWS Region is required')
|
||||
}
|
||||
if (!params.bucketName) {
|
||||
throw new Error('Bucket Name is required')
|
||||
}
|
||||
if (!params.objectKey) {
|
||||
throw new Error('Object Key is required for upload')
|
||||
}
|
||||
// Use file from uploadFile if in basic mode, otherwise use file reference
|
||||
const fileParam = params.uploadFile || params.file
|
||||
|
||||
// Extract region from hostname
|
||||
const regionMatch = hostname.match(/s3[.-]([^.]+)\.amazonaws\.com/)
|
||||
const region = regionMatch ? regionMatch[1] : 'us-east-1'
|
||||
|
||||
// Extract object key from pathname (remove leading slash)
|
||||
const objectKey = url.pathname.startsWith('/') ? url.pathname.substring(1) : url.pathname
|
||||
|
||||
if (!bucketName) {
|
||||
throw new Error('Could not extract bucket name from URL')
|
||||
return {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
bucketName: params.bucketName,
|
||||
objectKey: params.objectKey,
|
||||
file: fileParam,
|
||||
content: params.content,
|
||||
contentType: params.contentType,
|
||||
acl: params.acl,
|
||||
}
|
||||
}
|
||||
|
||||
if (!objectKey) {
|
||||
throw new Error('No object key found in URL')
|
||||
case 'get_object': {
|
||||
if (!params.s3Uri) {
|
||||
throw new Error('S3 Object URL is required')
|
||||
}
|
||||
|
||||
// Parse S3 URI for get_object
|
||||
try {
|
||||
const url = new URL(params.s3Uri)
|
||||
const hostname = url.hostname
|
||||
const bucketName = hostname.split('.')[0]
|
||||
const regionMatch = hostname.match(/s3[.-]([^.]+)\.amazonaws\.com/)
|
||||
const region = regionMatch ? regionMatch[1] : params.region
|
||||
const objectKey = url.pathname.startsWith('/')
|
||||
? url.pathname.substring(1)
|
||||
: url.pathname
|
||||
|
||||
if (!bucketName || !objectKey) {
|
||||
throw new Error('Could not parse S3 URL')
|
||||
}
|
||||
|
||||
return {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region,
|
||||
bucketName,
|
||||
objectKey,
|
||||
s3Uri: params.s3Uri,
|
||||
}
|
||||
} catch (_error) {
|
||||
throw new Error(
|
||||
'Invalid S3 Object URL format. Expected: https://bucket-name.s3.region.amazonaws.com/path/to/file'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region,
|
||||
bucketName,
|
||||
objectKey,
|
||||
case 'list_objects':
|
||||
if (!params.region) {
|
||||
throw new Error('AWS Region is required')
|
||||
}
|
||||
if (!params.bucketName) {
|
||||
throw new Error('Bucket Name is required')
|
||||
}
|
||||
return {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
bucketName: params.bucketName,
|
||||
prefix: params.prefix,
|
||||
maxKeys: params.maxKeys ? Number.parseInt(params.maxKeys as string, 10) : undefined,
|
||||
continuationToken: params.continuationToken,
|
||||
}
|
||||
|
||||
case 'delete_object':
|
||||
if (!params.region) {
|
||||
throw new Error('AWS Region is required')
|
||||
}
|
||||
if (!params.bucketName) {
|
||||
throw new Error('Bucket Name is required')
|
||||
}
|
||||
if (!params.objectKey) {
|
||||
throw new Error('Object Key is required for deletion')
|
||||
}
|
||||
return {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
bucketName: params.bucketName,
|
||||
objectKey: params.objectKey,
|
||||
}
|
||||
|
||||
case 'copy_object': {
|
||||
if (!params.region) {
|
||||
throw new Error('AWS Region is required')
|
||||
}
|
||||
if (!params.sourceBucket || !params.sourceKey) {
|
||||
throw new Error('Source bucket and key are required')
|
||||
}
|
||||
if (!params.destinationBucket || !params.destinationKey) {
|
||||
throw new Error('Destination bucket and key are required')
|
||||
}
|
||||
// Use copyAcl if provided, map to acl parameter
|
||||
const acl = params.copyAcl || params.acl
|
||||
return {
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
sourceBucket: params.sourceBucket,
|
||||
sourceKey: params.sourceKey,
|
||||
destinationBucket: params.destinationBucket,
|
||||
destinationKey: params.destinationKey,
|
||||
acl: acl,
|
||||
}
|
||||
}
|
||||
} catch (_error) {
|
||||
throw new Error(
|
||||
'Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file'
|
||||
)
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown operation: ${operation}`)
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
accessKeyId: { type: 'string', description: 'AWS access key ID' },
|
||||
secretAccessKey: { type: 'string', description: 'AWS secret access key' },
|
||||
region: { type: 'string', description: 'AWS region' },
|
||||
bucketName: { type: 'string', description: 'S3 bucket name' },
|
||||
// Upload inputs
|
||||
objectKey: { type: 'string', description: 'Object key/path in S3' },
|
||||
uploadFile: { type: 'json', description: 'File to upload (UI)' },
|
||||
file: { type: 'json', description: 'File to upload (reference)' },
|
||||
content: { type: 'string', description: 'Text content to upload' },
|
||||
contentType: { type: 'string', description: 'Content-Type header' },
|
||||
acl: { type: 'string', description: 'Access control list' },
|
||||
// Download inputs
|
||||
s3Uri: { type: 'string', description: 'S3 object URL' },
|
||||
// List inputs
|
||||
prefix: { type: 'string', description: 'Prefix filter' },
|
||||
maxKeys: { type: 'number', description: 'Maximum results' },
|
||||
continuationToken: { type: 'string', description: 'Pagination token' },
|
||||
// Copy inputs
|
||||
sourceBucket: { type: 'string', description: 'Source bucket name' },
|
||||
sourceKey: { type: 'string', description: 'Source object key' },
|
||||
destinationBucket: { type: 'string', description: 'Destination bucket name' },
|
||||
destinationKey: { type: 'string', description: 'Destination object key' },
|
||||
copyAcl: { type: 'string', description: 'ACL for copied object' },
|
||||
},
|
||||
outputs: {
|
||||
url: { type: 'string', description: 'Presigned URL' },
|
||||
metadata: { type: 'json', description: 'Object metadata' },
|
||||
url: { type: 'string', description: 'URL of S3 object' },
|
||||
objects: { type: 'json', description: 'List of objects (for list operation)' },
|
||||
deleted: { type: 'boolean', description: 'Deletion status' },
|
||||
metadata: { type: 'json', description: 'Operation metadata' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -31,6 +31,7 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
|
||||
{ label: 'Read List', id: 'read_list' },
|
||||
{ label: 'Update List', id: 'update_list' },
|
||||
{ label: 'Add List Items', id: 'add_list_items' },
|
||||
{ label: 'Upload File', id: 'upload_file' },
|
||||
],
|
||||
},
|
||||
{
|
||||
@@ -83,6 +84,7 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
|
||||
'read_list',
|
||||
'update_list',
|
||||
'add_list_items',
|
||||
'upload_file',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -182,6 +184,62 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
|
||||
canonicalParamId: 'listItemFields',
|
||||
condition: { field: 'operation', value: ['update_list', 'add_list_items'] },
|
||||
},
|
||||
|
||||
// Upload File operation fields
|
||||
{
|
||||
id: 'driveId',
|
||||
title: 'Document Library ID',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter document library (drive) ID',
|
||||
canonicalParamId: 'driveId',
|
||||
condition: { field: 'operation', value: 'upload_file' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'folderPath',
|
||||
title: 'Folder Path',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Optional folder path (e.g., /Documents/Subfolder)',
|
||||
condition: { field: 'operation', value: 'upload_file' },
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'fileName',
|
||||
title: 'File Name',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Optional: override uploaded file name',
|
||||
condition: { field: 'operation', value: 'upload_file' },
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
},
|
||||
// File upload (basic mode)
|
||||
{
|
||||
id: 'uploadFiles',
|
||||
title: 'Files',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Upload files to SharePoint',
|
||||
condition: { field: 'operation', value: 'upload_file' },
|
||||
mode: 'basic',
|
||||
multiple: true,
|
||||
required: false,
|
||||
},
|
||||
// Variable reference (advanced mode)
|
||||
{
|
||||
id: 'files',
|
||||
title: 'Files',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Reference files from previous blocks',
|
||||
condition: { field: 'operation', value: 'upload_file' },
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
@@ -192,6 +250,7 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
|
||||
'sharepoint_get_list',
|
||||
'sharepoint_update_list',
|
||||
'sharepoint_add_list_items',
|
||||
'sharepoint_upload_file',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -210,6 +269,8 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
|
||||
return 'sharepoint_update_list'
|
||||
case 'add_list_items':
|
||||
return 'sharepoint_add_list_items'
|
||||
case 'upload_file':
|
||||
return 'sharepoint_upload_file'
|
||||
default:
|
||||
throw new Error(`Invalid Sharepoint operation: ${params.operation}`)
|
||||
}
|
||||
@@ -225,6 +286,8 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
|
||||
listItemFields,
|
||||
includeColumns,
|
||||
includeItems,
|
||||
uploadFiles,
|
||||
files,
|
||||
...others
|
||||
} = rest as any
|
||||
|
||||
@@ -270,7 +333,9 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
|
||||
} catch {}
|
||||
}
|
||||
|
||||
return {
|
||||
// Handle file upload files parameter
|
||||
const fileParam = uploadFiles || files
|
||||
const baseParams = {
|
||||
credential,
|
||||
siteId: effectiveSiteId || undefined,
|
||||
pageSize: others.pageSize ? Number.parseInt(others.pageSize as string, 10) : undefined,
|
||||
@@ -281,6 +346,13 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
|
||||
includeColumns: coerceBoolean(includeColumns),
|
||||
includeItems: coerceBoolean(includeItems),
|
||||
}
|
||||
|
||||
// Add files if provided
|
||||
if (fileParam) {
|
||||
baseParams.files = fileParam
|
||||
}
|
||||
|
||||
return baseParams
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -303,6 +375,11 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
|
||||
includeItems: { type: 'boolean', description: 'Include items in response' },
|
||||
listItemId: { type: 'string', description: 'List item ID' },
|
||||
listItemFields: { type: 'string', description: 'List item fields' },
|
||||
driveId: { type: 'string', description: 'Document library (drive) ID' },
|
||||
folderPath: { type: 'string', description: 'Folder path for file upload' },
|
||||
fileName: { type: 'string', description: 'File name override' },
|
||||
uploadFiles: { type: 'json', description: 'Files to upload (UI upload)' },
|
||||
files: { type: 'json', description: 'Files to upload (UserFile array)' },
|
||||
},
|
||||
outputs: {
|
||||
sites: {
|
||||
@@ -322,6 +399,14 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
|
||||
type: 'json',
|
||||
description: 'Array of SharePoint list items with fields',
|
||||
},
|
||||
uploadedFiles: {
|
||||
type: 'json',
|
||||
description: 'Array of uploaded file objects with id, name, webUrl, size',
|
||||
},
|
||||
fileCount: {
|
||||
type: 'number',
|
||||
description: 'Number of files uploaded',
|
||||
},
|
||||
success: {
|
||||
type: 'boolean',
|
||||
description: 'Success status',
|
||||
|
||||
@@ -109,6 +109,31 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// File upload (basic mode)
|
||||
{
|
||||
id: 'attachmentFiles',
|
||||
title: 'Attachments',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Upload files to attach',
|
||||
condition: { field: 'operation', value: 'send' },
|
||||
mode: 'basic',
|
||||
multiple: true,
|
||||
required: false,
|
||||
},
|
||||
// Variable reference (advanced mode)
|
||||
{
|
||||
id: 'files',
|
||||
title: 'File Attachments',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Reference files from previous blocks',
|
||||
condition: { field: 'operation', value: 'send' },
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
},
|
||||
// Canvas specific fields
|
||||
{
|
||||
id: 'title',
|
||||
@@ -194,6 +219,8 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
content,
|
||||
limit,
|
||||
oldest,
|
||||
attachmentFiles,
|
||||
files,
|
||||
...rest
|
||||
} = params
|
||||
|
||||
@@ -224,12 +251,18 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
|
||||
// Handle operation-specific params
|
||||
switch (operation) {
|
||||
case 'send':
|
||||
case 'send': {
|
||||
if (!rest.text) {
|
||||
throw new Error('Message text is required for send operation')
|
||||
}
|
||||
baseParams.text = rest.text
|
||||
// Add files if provided
|
||||
const fileParam = attachmentFiles || files
|
||||
if (fileParam) {
|
||||
baseParams.files = fileParam
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'canvas':
|
||||
if (!title || !content) {
|
||||
@@ -264,6 +297,8 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
channel: { type: 'string', description: 'Channel identifier' },
|
||||
manualChannel: { type: 'string', description: 'Manual channel identifier' },
|
||||
text: { type: 'string', description: 'Message text' },
|
||||
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
|
||||
files: { type: 'json', description: 'Files to attach (UserFile array)' },
|
||||
title: { type: 'string', description: 'Canvas title' },
|
||||
content: { type: 'string', description: 'Canvas content' },
|
||||
limit: { type: 'string', description: 'Message limit' },
|
||||
|
||||
@@ -27,6 +27,7 @@ export const TelegramBlock: BlockConfig<TelegramResponse> = {
|
||||
{ label: 'Send Video', id: 'telegram_send_video' },
|
||||
{ label: 'Send Audio', id: 'telegram_send_audio' },
|
||||
{ label: 'Send Animation', id: 'telegram_send_animation' },
|
||||
{ label: 'Send Document', id: 'telegram_send_document' },
|
||||
{ label: 'Delete Message', id: 'telegram_delete_message' },
|
||||
],
|
||||
value: () => 'telegram_message',
|
||||
@@ -107,6 +108,33 @@ export const TelegramBlock: BlockConfig<TelegramResponse> = {
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'telegram_send_animation' },
|
||||
},
|
||||
// File upload (basic mode) for Send Document
|
||||
{
|
||||
id: 'attachmentFiles',
|
||||
title: 'Document',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Upload document file',
|
||||
condition: { field: 'operation', value: 'telegram_send_document' },
|
||||
mode: 'basic',
|
||||
multiple: false,
|
||||
required: false,
|
||||
description: 'Document file to send (PDF, ZIP, DOC, etc.). Max size: 50MB',
|
||||
},
|
||||
// Variable reference (advanced mode) for Send Document
|
||||
{
|
||||
id: 'files',
|
||||
title: 'Document',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'files',
|
||||
placeholder: 'Reference document from previous blocks',
|
||||
condition: { field: 'operation', value: 'telegram_send_document' },
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
description: 'Reference a document file from a previous block',
|
||||
},
|
||||
{
|
||||
id: 'caption',
|
||||
title: 'Caption',
|
||||
@@ -121,6 +149,7 @@ export const TelegramBlock: BlockConfig<TelegramResponse> = {
|
||||
'telegram_send_video',
|
||||
'telegram_send_audio',
|
||||
'telegram_send_animation',
|
||||
'telegram_send_document',
|
||||
],
|
||||
},
|
||||
},
|
||||
@@ -152,6 +181,7 @@ export const TelegramBlock: BlockConfig<TelegramResponse> = {
|
||||
'telegram_send_video',
|
||||
'telegram_send_audio',
|
||||
'telegram_send_animation',
|
||||
'telegram_send_document',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
@@ -168,6 +198,8 @@ export const TelegramBlock: BlockConfig<TelegramResponse> = {
|
||||
return 'telegram_send_audio'
|
||||
case 'telegram_send_animation':
|
||||
return 'telegram_send_animation'
|
||||
case 'telegram_send_document':
|
||||
return 'telegram_send_document'
|
||||
default:
|
||||
return 'telegram_message'
|
||||
}
|
||||
@@ -238,6 +270,15 @@ export const TelegramBlock: BlockConfig<TelegramResponse> = {
|
||||
animation: params.animation,
|
||||
caption: params.caption,
|
||||
}
|
||||
case 'telegram_send_document': {
|
||||
// Handle file upload
|
||||
const fileParam = params.attachmentFiles || params.files
|
||||
return {
|
||||
...commonParams,
|
||||
files: fileParam,
|
||||
caption: params.caption,
|
||||
}
|
||||
}
|
||||
default:
|
||||
return {
|
||||
...commonParams,
|
||||
@@ -256,6 +297,11 @@ export const TelegramBlock: BlockConfig<TelegramResponse> = {
|
||||
video: { type: 'string', description: 'Video URL or file_id' },
|
||||
audio: { type: 'string', description: 'Audio URL or file_id' },
|
||||
animation: { type: 'string', description: 'Animation URL or file_id' },
|
||||
attachmentFiles: {
|
||||
type: 'json',
|
||||
description: 'Files to attach (UI upload)',
|
||||
},
|
||||
files: { type: 'json', description: 'Files to attach (UserFile array)' },
|
||||
caption: { type: 'string', description: 'Caption for media' },
|
||||
messageId: { type: 'string', description: 'Message ID to delete' },
|
||||
},
|
||||
|
||||
@@ -14,13 +14,37 @@ export const VisionBlock: BlockConfig<VisionResponse> = {
|
||||
bgColor: '#4D5FFF',
|
||||
icon: EyeIcon,
|
||||
subBlocks: [
|
||||
// Image file upload (basic mode)
|
||||
{
|
||||
id: 'imageUrl',
|
||||
title: 'Image URL',
|
||||
id: 'imageFile',
|
||||
title: 'Image File',
|
||||
type: 'file-upload',
|
||||
layout: 'full',
|
||||
canonicalParamId: 'imageFile',
|
||||
placeholder: 'Upload an image file',
|
||||
mode: 'basic',
|
||||
multiple: false,
|
||||
required: false,
|
||||
acceptedTypes: '.jpg,.jpeg,.png,.gif,.webp',
|
||||
},
|
||||
// Image file reference (advanced mode)
|
||||
{
|
||||
id: 'imageFileReference',
|
||||
title: 'Image File Reference',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Enter publicly accessible image URL',
|
||||
required: true,
|
||||
canonicalParamId: 'imageFile',
|
||||
placeholder: 'Reference an image from previous blocks',
|
||||
mode: 'advanced',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'imageUrl',
|
||||
title: 'Image URL (alternative)',
|
||||
type: 'short-input',
|
||||
layout: 'full',
|
||||
placeholder: 'Or enter publicly accessible image URL',
|
||||
required: false,
|
||||
},
|
||||
{
|
||||
id: 'model',
|
||||
@@ -58,6 +82,8 @@ export const VisionBlock: BlockConfig<VisionResponse> = {
|
||||
inputs: {
|
||||
apiKey: { type: 'string', description: 'Provider API key' },
|
||||
imageUrl: { type: 'string', description: 'Image URL' },
|
||||
imageFile: { type: 'json', description: 'Image file (UserFile)' },
|
||||
imageFileReference: { type: 'json', description: 'Image file reference' },
|
||||
model: { type: 'string', description: 'Vision model' },
|
||||
prompt: { type: 'string', description: 'Analysis prompt' },
|
||||
},
|
||||
|
||||
2
apps/sim/lib/billing/storage/index.ts
Normal file
2
apps/sim/lib/billing/storage/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { checkStorageQuota, getUserStorageLimit, getUserStorageUsage } from './limits'
|
||||
export { decrementStorageUsage, incrementStorageUsage } from './tracking'
|
||||
190
apps/sim/lib/billing/storage/limits.ts
Normal file
190
apps/sim/lib/billing/storage/limits.ts
Normal file
@@ -0,0 +1,190 @@
|
||||
/**
|
||||
* Storage limit management
|
||||
* Similar to cost limits but for file storage quotas
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
DEFAULT_ENTERPRISE_STORAGE_LIMIT_GB,
|
||||
DEFAULT_FREE_STORAGE_LIMIT_GB,
|
||||
DEFAULT_PRO_STORAGE_LIMIT_GB,
|
||||
DEFAULT_TEAM_STORAGE_LIMIT_GB,
|
||||
} from '@sim/db/consts'
|
||||
import { organization, subscription, userStats } from '@sim/db/schema'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { getEnv } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('StorageLimits')
|
||||
|
||||
/**
|
||||
* Convert GB to bytes
|
||||
*/
|
||||
function gbToBytes(gb: number): number {
|
||||
return gb * 1024 * 1024 * 1024
|
||||
}
|
||||
|
||||
/**
|
||||
* Get storage limits from environment variables with fallback to constants
|
||||
* Returns limits in bytes
|
||||
*/
|
||||
export function getStorageLimits() {
|
||||
return {
|
||||
free: gbToBytes(
|
||||
Number.parseInt(getEnv('FREE_STORAGE_LIMIT_GB') || String(DEFAULT_FREE_STORAGE_LIMIT_GB))
|
||||
),
|
||||
pro: gbToBytes(
|
||||
Number.parseInt(getEnv('PRO_STORAGE_LIMIT_GB') || String(DEFAULT_PRO_STORAGE_LIMIT_GB))
|
||||
),
|
||||
team: gbToBytes(
|
||||
Number.parseInt(getEnv('TEAM_STORAGE_LIMIT_GB') || String(DEFAULT_TEAM_STORAGE_LIMIT_GB))
|
||||
),
|
||||
enterpriseDefault: gbToBytes(
|
||||
Number.parseInt(
|
||||
getEnv('ENTERPRISE_STORAGE_LIMIT_GB') || String(DEFAULT_ENTERPRISE_STORAGE_LIMIT_GB)
|
||||
)
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get storage limit for a specific plan
|
||||
* Returns limit in bytes
|
||||
*/
|
||||
export function getStorageLimitForPlan(plan: string, metadata?: any): number {
|
||||
const limits = getStorageLimits()
|
||||
|
||||
switch (plan) {
|
||||
case 'free':
|
||||
return limits.free
|
||||
case 'pro':
|
||||
return limits.pro
|
||||
case 'team':
|
||||
return limits.team
|
||||
case 'enterprise':
|
||||
// Check for custom limit in metadata (stored in GB)
|
||||
if (metadata?.storageLimitGB) {
|
||||
return gbToBytes(Number.parseInt(metadata.storageLimitGB))
|
||||
}
|
||||
return limits.enterpriseDefault
|
||||
default:
|
||||
return limits.free
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get storage limit for a user based on their subscription
|
||||
* Returns limit in bytes
|
||||
*/
|
||||
export async function getUserStorageLimit(userId: string): Promise<number> {
|
||||
try {
|
||||
// Check if user is in a team/enterprise org
|
||||
const { getHighestPrioritySubscription } = await import('@/lib/billing/core/subscription')
|
||||
const sub = await getHighestPrioritySubscription(userId)
|
||||
|
||||
const limits = getStorageLimits()
|
||||
|
||||
if (!sub || sub.plan === 'free') {
|
||||
return limits.free
|
||||
}
|
||||
|
||||
if (sub.plan === 'pro') {
|
||||
return limits.pro
|
||||
}
|
||||
|
||||
// Team/Enterprise: Use organization limit
|
||||
if (sub.plan === 'team' || sub.plan === 'enterprise') {
|
||||
// Get organization storage limit
|
||||
const orgRecord = await db
|
||||
.select({ metadata: subscription.metadata })
|
||||
.from(subscription)
|
||||
.where(eq(subscription.id, sub.id))
|
||||
.limit(1)
|
||||
|
||||
if (orgRecord.length > 0 && orgRecord[0].metadata) {
|
||||
const metadata = orgRecord[0].metadata as any
|
||||
if (metadata.customStorageLimitGB) {
|
||||
return metadata.customStorageLimitGB * 1024 * 1024 * 1024
|
||||
}
|
||||
}
|
||||
|
||||
// Default for team/enterprise
|
||||
return sub.plan === 'enterprise' ? limits.enterpriseDefault : limits.team
|
||||
}
|
||||
|
||||
return limits.free
|
||||
} catch (error) {
|
||||
logger.error('Error getting user storage limit:', error)
|
||||
return getStorageLimits().free
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current storage usage for a user
|
||||
* Returns usage in bytes
|
||||
*/
|
||||
export async function getUserStorageUsage(userId: string): Promise<number> {
|
||||
try {
|
||||
// Check if user is in a team/enterprise org
|
||||
const { getHighestPrioritySubscription } = await import('@/lib/billing/core/subscription')
|
||||
const sub = await getHighestPrioritySubscription(userId)
|
||||
|
||||
if (sub && (sub.plan === 'team' || sub.plan === 'enterprise')) {
|
||||
// Use organization storage
|
||||
const orgRecord = await db
|
||||
.select({ storageUsedBytes: organization.storageUsedBytes })
|
||||
.from(organization)
|
||||
.where(eq(organization.id, sub.referenceId))
|
||||
.limit(1)
|
||||
|
||||
return orgRecord.length > 0 ? orgRecord[0].storageUsedBytes || 0 : 0
|
||||
}
|
||||
|
||||
// Free/Pro: Use user stats
|
||||
const stats = await db
|
||||
.select({ storageUsedBytes: userStats.storageUsedBytes })
|
||||
.from(userStats)
|
||||
.where(eq(userStats.userId, userId))
|
||||
.limit(1)
|
||||
|
||||
return stats.length > 0 ? stats[0].storageUsedBytes || 0 : 0
|
||||
} catch (error) {
|
||||
logger.error('Error getting user storage usage:', error)
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user has storage quota available
|
||||
*/
|
||||
export async function checkStorageQuota(
|
||||
userId: string,
|
||||
additionalBytes: number
|
||||
): Promise<{ allowed: boolean; currentUsage: number; limit: number; error?: string }> {
|
||||
try {
|
||||
const [currentUsage, limit] = await Promise.all([
|
||||
getUserStorageUsage(userId),
|
||||
getUserStorageLimit(userId),
|
||||
])
|
||||
|
||||
const newUsage = currentUsage + additionalBytes
|
||||
const allowed = newUsage <= limit
|
||||
|
||||
return {
|
||||
allowed,
|
||||
currentUsage,
|
||||
limit,
|
||||
error: allowed
|
||||
? undefined
|
||||
: `Storage limit exceeded. Used: ${(newUsage / (1024 * 1024 * 1024)).toFixed(2)}GB, Limit: ${(limit / (1024 * 1024 * 1024)).toFixed(0)}GB`,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error checking storage quota:', error)
|
||||
return {
|
||||
allowed: false,
|
||||
currentUsage: 0,
|
||||
limit: 0,
|
||||
error: 'Failed to check storage quota',
|
||||
}
|
||||
}
|
||||
}
|
||||
83
apps/sim/lib/billing/storage/tracking.ts
Normal file
83
apps/sim/lib/billing/storage/tracking.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
/**
|
||||
* Storage usage tracking
|
||||
* Updates storage_used_bytes for users and organizations
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { organization, userStats } from '@sim/db/schema'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('StorageTracking')
|
||||
|
||||
/**
|
||||
* Increment storage usage after successful file upload
|
||||
*/
|
||||
export async function incrementStorageUsage(userId: string, bytes: number): Promise<void> {
|
||||
try {
|
||||
// Check if user is in a team/enterprise org
|
||||
const { getHighestPrioritySubscription } = await import('@/lib/billing/core/subscription')
|
||||
const sub = await getHighestPrioritySubscription(userId)
|
||||
|
||||
if (sub && (sub.plan === 'team' || sub.plan === 'enterprise')) {
|
||||
// Update organization storage
|
||||
await db
|
||||
.update(organization)
|
||||
.set({
|
||||
storageUsedBytes: sql`${organization.storageUsedBytes} + ${bytes}`,
|
||||
})
|
||||
.where(eq(organization.id, sub.referenceId))
|
||||
|
||||
logger.info(`Incremented org storage: ${bytes} bytes for org ${sub.referenceId}`)
|
||||
} else {
|
||||
// Update user stats storage
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({
|
||||
storageUsedBytes: sql`${userStats.storageUsedBytes} + ${bytes}`,
|
||||
})
|
||||
.where(eq(userStats.userId, userId))
|
||||
|
||||
logger.info(`Incremented user storage: ${bytes} bytes for user ${userId}`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error incrementing storage usage:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrement storage usage after file deletion
|
||||
*/
|
||||
export async function decrementStorageUsage(userId: string, bytes: number): Promise<void> {
|
||||
try {
|
||||
// Check if user is in a team/enterprise org
|
||||
const { getHighestPrioritySubscription } = await import('@/lib/billing/core/subscription')
|
||||
const sub = await getHighestPrioritySubscription(userId)
|
||||
|
||||
if (sub && (sub.plan === 'team' || sub.plan === 'enterprise')) {
|
||||
// Update organization storage
|
||||
await db
|
||||
.update(organization)
|
||||
.set({
|
||||
storageUsedBytes: sql`GREATEST(0, ${organization.storageUsedBytes} - ${bytes})`,
|
||||
})
|
||||
.where(eq(organization.id, sub.referenceId))
|
||||
|
||||
logger.info(`Decremented org storage: ${bytes} bytes for org ${sub.referenceId}`)
|
||||
} else {
|
||||
// Update user stats storage
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({
|
||||
storageUsedBytes: sql`GREATEST(0, ${userStats.storageUsedBytes} - ${bytes})`,
|
||||
})
|
||||
.where(eq(userStats.userId, userId))
|
||||
|
||||
logger.info(`Decremented user storage: ${bytes} bytes for user ${userId}`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error decrementing storage usage:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -42,12 +42,16 @@ export const env = createEnv({
|
||||
STRIPE_WEBHOOK_SECRET: z.string().min(1).optional(), // General Stripe webhook secret
|
||||
STRIPE_FREE_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for free tier
|
||||
FREE_TIER_COST_LIMIT: z.number().optional(), // Cost limit for free tier users
|
||||
FREE_STORAGE_LIMIT_GB: z.number().optional(), // Storage limit in GB for free tier users (default: 5GB)
|
||||
STRIPE_PRO_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for pro tier
|
||||
PRO_TIER_COST_LIMIT: z.number().optional(), // Cost limit for pro tier users
|
||||
PRO_STORAGE_LIMIT_GB: z.number().optional(), // Storage limit in GB for pro tier users (default: 50GB)
|
||||
STRIPE_TEAM_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for team tier
|
||||
TEAM_TIER_COST_LIMIT: z.number().optional(), // Cost limit for team tier users
|
||||
TEAM_STORAGE_LIMIT_GB: z.number().optional(), // Storage limit in GB for team tier organizations (default: 500GB, pooled)
|
||||
STRIPE_ENTERPRISE_PRICE_ID: z.string().min(1).optional(), // Stripe price ID for enterprise tier
|
||||
ENTERPRISE_TIER_COST_LIMIT: z.number().optional(), // Cost limit for enterprise tier users
|
||||
ENTERPRISE_STORAGE_LIMIT_GB: z.number().optional(), // Default storage limit in GB for enterprise tier (default: 500GB, can be overridden per org)
|
||||
BILLING_ENABLED: z.boolean().optional(), // Enable billing enforcement and usage tracking
|
||||
OVERAGE_THRESHOLD_DOLLARS: z.number().optional().default(50), // Dollar threshold for incremental overage billing (default: $50)
|
||||
|
||||
|
||||
@@ -38,8 +38,11 @@ export class PdfParser implements FileParser {
|
||||
pdfData.text.length
|
||||
)
|
||||
|
||||
// Remove null bytes from content (PostgreSQL JSONB doesn't allow them)
|
||||
const cleanContent = pdfData.text.replace(/\u0000/g, '')
|
||||
|
||||
return {
|
||||
content: pdfData.text,
|
||||
content: cleanContent,
|
||||
metadata: {
|
||||
pageCount: pdfData.numpages,
|
||||
info: pdfData.info,
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import crypto, { randomUUID } from 'crypto'
|
||||
import { db } from '@sim/db'
|
||||
import { document, embedding, knowledgeBaseTagDefinitions } from '@sim/db/schema'
|
||||
import { document, embedding, knowledgeBase, knowledgeBaseTagDefinitions } from '@sim/db/schema'
|
||||
import { tasks } from '@trigger.dev/sdk'
|
||||
import { and, asc, desc, eq, inArray, isNull, sql } from 'drizzle-orm'
|
||||
import {
|
||||
checkStorageQuota,
|
||||
decrementStorageUsage,
|
||||
incrementStorageUsage,
|
||||
} from '@/lib/billing/storage'
|
||||
import { generateEmbeddings } from '@/lib/embeddings/utils'
|
||||
import { env } from '@/lib/env'
|
||||
import { getSlotsForFieldType, type TAG_SLOT_CONFIG } from '@/lib/knowledge/consts'
|
||||
@@ -659,8 +664,32 @@ export async function createDocumentRecords(
|
||||
tag7?: string
|
||||
}>,
|
||||
knowledgeBaseId: string,
|
||||
requestId: string
|
||||
requestId: string,
|
||||
userId?: string
|
||||
): Promise<DocumentData[]> {
|
||||
// Check storage limits before creating documents
|
||||
if (userId) {
|
||||
const totalSize = documents.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kb.length === 0) {
|
||||
throw new Error('Knowledge base not found')
|
||||
}
|
||||
|
||||
// Always meter the knowledge base owner
|
||||
const quotaCheck = await checkStorageQuota(kb[0].userId, totalSize)
|
||||
|
||||
if (!quotaCheck.allowed) {
|
||||
throw new Error(quotaCheck.error || 'Storage limit exceeded')
|
||||
}
|
||||
}
|
||||
|
||||
return await db.transaction(async (tx) => {
|
||||
const now = new Date()
|
||||
const documentRecords = []
|
||||
@@ -728,6 +757,33 @@ export async function createDocumentRecords(
|
||||
logger.info(
|
||||
`[${requestId}] Bulk created ${documentRecords.length} document records in knowledge base ${knowledgeBaseId}`
|
||||
)
|
||||
|
||||
// Increment storage usage tracking
|
||||
if (userId) {
|
||||
const totalSize = documents.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kb.length > 0) {
|
||||
// Always meter the knowledge base owner
|
||||
try {
|
||||
await incrementStorageUsage(kb[0].userId, totalSize)
|
||||
logger.info(
|
||||
`[${requestId}] Updated knowledge base owner storage usage for ${totalSize} bytes`
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to update knowledge base owner storage usage:`,
|
||||
error
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return returnData
|
||||
@@ -928,7 +984,8 @@ export async function createSingleDocument(
|
||||
tag7?: string
|
||||
},
|
||||
knowledgeBaseId: string,
|
||||
requestId: string
|
||||
requestId: string,
|
||||
userId?: string
|
||||
): Promise<{
|
||||
id: string
|
||||
knowledgeBaseId: string
|
||||
@@ -949,6 +1006,27 @@ export async function createSingleDocument(
|
||||
tag6: string | null
|
||||
tag7: string | null
|
||||
}> {
|
||||
// Check storage limits before creating document
|
||||
if (userId) {
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kb.length === 0) {
|
||||
throw new Error('Knowledge base not found')
|
||||
}
|
||||
|
||||
// Always meter the knowledge base owner
|
||||
const quotaCheck = await checkStorageQuota(kb[0].userId, documentData.fileSize)
|
||||
|
||||
if (!quotaCheck.allowed) {
|
||||
throw new Error(quotaCheck.error || 'Storage limit exceeded')
|
||||
}
|
||||
}
|
||||
|
||||
const documentId = randomUUID()
|
||||
const now = new Date()
|
||||
|
||||
@@ -994,6 +1072,28 @@ export async function createSingleDocument(
|
||||
|
||||
logger.info(`[${requestId}] Document created: ${documentId} in knowledge base ${knowledgeBaseId}`)
|
||||
|
||||
// Increment storage usage tracking
|
||||
if (userId) {
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kb.length > 0) {
|
||||
// Always meter the knowledge base owner
|
||||
try {
|
||||
await incrementStorageUsage(kb[0].userId, documentData.fileSize)
|
||||
logger.info(
|
||||
`[${requestId}] Updated knowledge base owner storage usage for ${documentData.fileSize} bytes`
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to update knowledge base owner storage usage:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return newDocument as {
|
||||
id: string
|
||||
knowledgeBaseId: string
|
||||
@@ -1023,7 +1123,8 @@ export async function bulkDocumentOperation(
|
||||
knowledgeBaseId: string,
|
||||
operation: 'enable' | 'disable' | 'delete',
|
||||
documentIds: string[],
|
||||
requestId: string
|
||||
requestId: string,
|
||||
userId?: string
|
||||
): Promise<{
|
||||
success: boolean
|
||||
successCount: number
|
||||
@@ -1071,6 +1172,23 @@ export async function bulkDocumentOperation(
|
||||
}>
|
||||
|
||||
if (operation === 'delete') {
|
||||
// Get file sizes before deletion for storage tracking
|
||||
let totalSize = 0
|
||||
if (userId) {
|
||||
const documentsToDelete = await db
|
||||
.select({ fileSize: document.fileSize })
|
||||
.from(document)
|
||||
.where(
|
||||
and(
|
||||
eq(document.knowledgeBaseId, knowledgeBaseId),
|
||||
inArray(document.id, documentIds),
|
||||
isNull(document.deletedAt)
|
||||
)
|
||||
)
|
||||
|
||||
totalSize = documentsToDelete.reduce((sum, doc) => sum + doc.fileSize, 0)
|
||||
}
|
||||
|
||||
// Handle bulk soft delete
|
||||
updateResult = await db
|
||||
.update(document)
|
||||
@@ -1085,6 +1203,28 @@ export async function bulkDocumentOperation(
|
||||
)
|
||||
)
|
||||
.returning({ id: document.id, deletedAt: document.deletedAt })
|
||||
|
||||
// Decrement storage usage tracking
|
||||
if (userId && totalSize > 0) {
|
||||
// Get knowledge base owner
|
||||
const kb = await db
|
||||
.select({ userId: knowledgeBase.userId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, knowledgeBaseId))
|
||||
.limit(1)
|
||||
|
||||
if (kb.length > 0) {
|
||||
// Always meter the knowledge base owner
|
||||
try {
|
||||
await decrementStorageUsage(kb[0].userId, totalSize)
|
||||
logger.info(
|
||||
`[${requestId}] Updated knowledge base owner storage usage for -${totalSize} bytes`
|
||||
)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to update knowledge base owner storage usage:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Handle bulk enable/disable
|
||||
const enabled = operation === 'enable'
|
||||
|
||||
103
apps/sim/lib/uploads/file-processing.ts
Normal file
103
apps/sim/lib/uploads/file-processing.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import type { Logger } from '@/lib/logs/console/logger'
|
||||
import { extractStorageKey } from '@/lib/uploads/file-utils'
|
||||
import { downloadFile } from '@/lib/uploads/storage-client'
|
||||
import { downloadExecutionFile } from '@/lib/workflows/execution-file-storage'
|
||||
import { isExecutionFile } from '@/lib/workflows/execution-files'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
|
||||
/**
|
||||
* Converts a single raw file object to UserFile format
|
||||
* @param file - Raw file object
|
||||
* @param requestId - Request ID for logging
|
||||
* @param logger - Logger instance
|
||||
* @returns UserFile object
|
||||
* @throws Error if file has no storage key
|
||||
*/
|
||||
export function processSingleFileToUserFile(
|
||||
file: any,
|
||||
requestId: string,
|
||||
logger: Logger
|
||||
): UserFile {
|
||||
// Already a UserFile (from variable reference)
|
||||
if (file.id && file.key && file.uploadedAt) {
|
||||
return file as UserFile
|
||||
}
|
||||
|
||||
// Extract storage key from path or key property
|
||||
const storageKey = file.key || (file.path ? extractStorageKey(file.path) : null)
|
||||
|
||||
if (!storageKey) {
|
||||
logger.warn(`[${requestId}] File has no storage key: ${file.name || 'unknown'}`)
|
||||
throw new Error(`File has no storage key: ${file.name || 'unknown'}`)
|
||||
}
|
||||
|
||||
const userFile: UserFile = {
|
||||
id: file.id || `file-${Date.now()}`,
|
||||
name: file.name,
|
||||
url: file.url || file.path,
|
||||
size: file.size,
|
||||
type: file.type || 'application/octet-stream',
|
||||
key: storageKey,
|
||||
uploadedAt: file.uploadedAt || new Date().toISOString(),
|
||||
expiresAt: file.expiresAt || new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(),
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Converted file to UserFile: ${userFile.name} (key: ${userFile.key})`)
|
||||
return userFile
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts raw file objects (from file-upload or variable references) to UserFile format
|
||||
* @param files - Array of raw file objects
|
||||
* @param requestId - Request ID for logging
|
||||
* @param logger - Logger instance
|
||||
* @returns Array of UserFile objects
|
||||
*/
|
||||
export function processFilesToUserFiles(
|
||||
files: any[],
|
||||
requestId: string,
|
||||
logger: Logger
|
||||
): UserFile[] {
|
||||
const userFiles: UserFile[] = []
|
||||
|
||||
for (const file of files) {
|
||||
try {
|
||||
const userFile = processSingleFileToUserFile(file, requestId, logger)
|
||||
userFiles.push(userFile)
|
||||
} catch (error) {
|
||||
// Log and skip files that can't be processed
|
||||
logger.warn(
|
||||
`[${requestId}] Skipping file: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return userFiles
|
||||
}
|
||||
|
||||
/**
|
||||
* Downloads a file from storage (execution or regular)
|
||||
* @param userFile - UserFile object
|
||||
* @param requestId - Request ID for logging
|
||||
* @param logger - Logger instance
|
||||
* @returns Buffer containing file data
|
||||
*/
|
||||
export async function downloadFileFromStorage(
|
||||
userFile: UserFile,
|
||||
requestId: string,
|
||||
logger: Logger
|
||||
): Promise<Buffer> {
|
||||
let buffer: Buffer
|
||||
|
||||
if (isExecutionFile(userFile)) {
|
||||
logger.info(`[${requestId}] Downloading from execution storage: ${userFile.key}`)
|
||||
buffer = await downloadExecutionFile(userFile)
|
||||
} else if (userFile.key) {
|
||||
logger.info(`[${requestId}] Downloading from regular storage: ${userFile.key}`)
|
||||
buffer = await downloadFile(userFile.key)
|
||||
} else {
|
||||
throw new Error('File has no key - cannot download')
|
||||
}
|
||||
|
||||
return buffer
|
||||
}
|
||||
@@ -142,3 +142,20 @@ export function getMimeTypeFromExtension(extension: string): string {
|
||||
|
||||
return extensionMimeMap[extension.toLowerCase()] || 'application/octet-stream'
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract storage key from a file path
|
||||
* Handles various path formats: /api/files/serve/xyz, /api/files/serve/s3/xyz, etc.
|
||||
*/
|
||||
export function extractStorageKey(filePath: string): string {
|
||||
if (filePath.includes('/api/files/serve/s3/')) {
|
||||
return decodeURIComponent(filePath.split('/api/files/serve/s3/')[1])
|
||||
}
|
||||
if (filePath.includes('/api/files/serve/blob/')) {
|
||||
return decodeURIComponent(filePath.split('/api/files/serve/blob/')[1])
|
||||
}
|
||||
if (filePath.startsWith('/api/files/serve/')) {
|
||||
return decodeURIComponent(filePath.substring('/api/files/serve/'.length))
|
||||
}
|
||||
return filePath
|
||||
}
|
||||
|
||||
303
apps/sim/lib/uploads/workspace-files.ts
Normal file
303
apps/sim/lib/uploads/workspace-files.ts
Normal file
@@ -0,0 +1,303 @@
|
||||
/**
|
||||
* Workspace file storage system
|
||||
* Files uploaded at workspace level persist indefinitely and are accessible across all workflows
|
||||
*/
|
||||
|
||||
import { db } from '@sim/db'
|
||||
import { workspaceFile } from '@sim/db/schema'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import {
|
||||
checkStorageQuota,
|
||||
decrementStorageUsage,
|
||||
incrementStorageUsage,
|
||||
} from '@/lib/billing/storage'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { deleteFile, downloadFile } from '@/lib/uploads/storage-client'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
|
||||
const logger = createLogger('WorkspaceFileStorage')
|
||||
|
||||
export interface WorkspaceFileRecord {
|
||||
id: string
|
||||
workspaceId: string
|
||||
name: string
|
||||
key: string
|
||||
path: string // Full serve path including storage type
|
||||
url?: string // Presigned URL for external access (optional, regenerated as needed)
|
||||
size: number
|
||||
type: string
|
||||
uploadedBy: string
|
||||
uploadedAt: Date
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate workspace-scoped storage key
|
||||
* Pattern: {workspaceId}/{timestamp}-{filename}
|
||||
*/
|
||||
export function generateWorkspaceFileKey(workspaceId: string, fileName: string): string {
|
||||
const timestamp = Date.now()
|
||||
const random = Math.random().toString(36).substring(2, 9)
|
||||
const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_')
|
||||
return `${workspaceId}/${timestamp}-${random}-${safeFileName}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload a file to workspace-scoped storage
|
||||
*/
|
||||
export async function uploadWorkspaceFile(
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
fileBuffer: Buffer,
|
||||
fileName: string,
|
||||
contentType: string
|
||||
): Promise<UserFile> {
|
||||
logger.info(`Uploading workspace file: ${fileName} for workspace ${workspaceId}`)
|
||||
|
||||
// Check for duplicates
|
||||
const exists = await fileExistsInWorkspace(workspaceId, fileName)
|
||||
if (exists) {
|
||||
throw new Error(`A file named "${fileName}" already exists in this workspace`)
|
||||
}
|
||||
|
||||
// Check storage quota
|
||||
const quotaCheck = await checkStorageQuota(userId, fileBuffer.length)
|
||||
|
||||
if (!quotaCheck.allowed) {
|
||||
throw new Error(quotaCheck.error || 'Storage limit exceeded')
|
||||
}
|
||||
|
||||
// Generate workspace-scoped storage key
|
||||
const storageKey = generateWorkspaceFileKey(workspaceId, fileName)
|
||||
const fileId = `wf_${Date.now()}_${Math.random().toString(36).substring(2, 9)}`
|
||||
|
||||
try {
|
||||
let uploadResult: any
|
||||
|
||||
logger.info(`Generated storage key: ${storageKey}`)
|
||||
|
||||
// Upload to storage with skipTimestampPrefix to use exact key
|
||||
const { USE_S3_STORAGE, USE_BLOB_STORAGE, S3_CONFIG, BLOB_CONFIG } = await import(
|
||||
'@/lib/uploads/setup'
|
||||
)
|
||||
|
||||
if (USE_S3_STORAGE) {
|
||||
const { uploadToS3 } = await import('@/lib/uploads/s3/s3-client')
|
||||
// Use custom config overload with skipTimestampPrefix
|
||||
uploadResult = await uploadToS3(
|
||||
fileBuffer,
|
||||
storageKey,
|
||||
contentType,
|
||||
{
|
||||
bucket: S3_CONFIG.bucket,
|
||||
region: S3_CONFIG.region,
|
||||
},
|
||||
fileBuffer.length,
|
||||
true // skipTimestampPrefix = true
|
||||
)
|
||||
} else if (USE_BLOB_STORAGE) {
|
||||
const { uploadToBlob } = await import('@/lib/uploads/blob/blob-client')
|
||||
// Blob doesn't have skipTimestampPrefix, but we pass the full key
|
||||
uploadResult = await uploadToBlob(
|
||||
fileBuffer,
|
||||
storageKey,
|
||||
contentType,
|
||||
{
|
||||
accountName: BLOB_CONFIG.accountName,
|
||||
accountKey: BLOB_CONFIG.accountKey,
|
||||
connectionString: BLOB_CONFIG.connectionString,
|
||||
containerName: BLOB_CONFIG.containerName,
|
||||
},
|
||||
fileBuffer.length
|
||||
)
|
||||
} else {
|
||||
throw new Error('No storage provider configured')
|
||||
}
|
||||
|
||||
logger.info(`S3/Blob upload returned key: ${uploadResult.key}`)
|
||||
logger.info(`Keys match: ${uploadResult.key === storageKey}`)
|
||||
|
||||
// Store metadata in database - use the EXACT key from upload result
|
||||
await db.insert(workspaceFile).values({
|
||||
id: fileId,
|
||||
workspaceId,
|
||||
name: fileName,
|
||||
key: uploadResult.key, // This is what actually got stored in S3
|
||||
size: fileBuffer.length,
|
||||
type: contentType,
|
||||
uploadedBy: userId,
|
||||
uploadedAt: new Date(),
|
||||
})
|
||||
|
||||
logger.info(`Successfully uploaded workspace file: ${fileName} with key: ${uploadResult.key}`)
|
||||
|
||||
// Increment storage usage tracking
|
||||
try {
|
||||
await incrementStorageUsage(userId, fileBuffer.length)
|
||||
} catch (storageError) {
|
||||
logger.error(`Failed to update storage tracking:`, storageError)
|
||||
// Continue - don't fail upload if tracking fails
|
||||
}
|
||||
|
||||
// Generate presigned URL (valid for 24 hours for initial access)
|
||||
const { getPresignedUrl } = await import('@/lib/uploads')
|
||||
let presignedUrl: string | undefined
|
||||
|
||||
try {
|
||||
presignedUrl = await getPresignedUrl(uploadResult.key, 24 * 60 * 60) // 24 hours
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to generate presigned URL for ${fileName}:`, error)
|
||||
}
|
||||
|
||||
// Return UserFile format (no expiry for workspace files)
|
||||
return {
|
||||
id: fileId,
|
||||
name: fileName,
|
||||
size: fileBuffer.length,
|
||||
type: contentType,
|
||||
url: presignedUrl || uploadResult.path, // Use presigned URL for external access
|
||||
key: uploadResult.key,
|
||||
uploadedAt: new Date().toISOString(),
|
||||
expiresAt: new Date(Date.now() + 365 * 24 * 60 * 60 * 1000).toISOString(), // 1 year
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to upload workspace file ${fileName}:`, error)
|
||||
throw new Error(
|
||||
`Failed to upload file: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file with the same name already exists in workspace
|
||||
*/
|
||||
export async function fileExistsInWorkspace(
|
||||
workspaceId: string,
|
||||
fileName: string
|
||||
): Promise<boolean> {
|
||||
try {
|
||||
const existing = await db
|
||||
.select()
|
||||
.from(workspaceFile)
|
||||
.where(and(eq(workspaceFile.workspaceId, workspaceId), eq(workspaceFile.name, fileName)))
|
||||
.limit(1)
|
||||
|
||||
return existing.length > 0
|
||||
} catch (error) {
|
||||
logger.error(`Failed to check file existence for ${fileName}:`, error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all files for a workspace
|
||||
*/
|
||||
export async function listWorkspaceFiles(workspaceId: string): Promise<WorkspaceFileRecord[]> {
|
||||
try {
|
||||
const files = await db
|
||||
.select()
|
||||
.from(workspaceFile)
|
||||
.where(eq(workspaceFile.workspaceId, workspaceId))
|
||||
.orderBy(workspaceFile.uploadedAt)
|
||||
|
||||
// Add full serve path for each file (don't generate presigned URLs here)
|
||||
const { getServePathPrefix } = await import('@/lib/uploads')
|
||||
const pathPrefix = getServePathPrefix()
|
||||
|
||||
return files.map((file) => ({
|
||||
...file,
|
||||
path: `${pathPrefix}${encodeURIComponent(file.key)}`,
|
||||
// url will be generated on-demand during execution for external APIs
|
||||
}))
|
||||
} catch (error) {
|
||||
logger.error(`Failed to list workspace files for ${workspaceId}:`, error)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific workspace file
|
||||
*/
|
||||
export async function getWorkspaceFile(
|
||||
workspaceId: string,
|
||||
fileId: string
|
||||
): Promise<WorkspaceFileRecord | null> {
|
||||
try {
|
||||
const files = await db
|
||||
.select()
|
||||
.from(workspaceFile)
|
||||
.where(and(eq(workspaceFile.id, fileId), eq(workspaceFile.workspaceId, workspaceId)))
|
||||
.limit(1)
|
||||
|
||||
if (files.length === 0) return null
|
||||
|
||||
// Add full serve path
|
||||
const { getServePathPrefix } = await import('@/lib/uploads')
|
||||
const pathPrefix = getServePathPrefix()
|
||||
|
||||
return {
|
||||
...files[0],
|
||||
path: `${pathPrefix}${encodeURIComponent(files[0].key)}`,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Failed to get workspace file ${fileId}:`, error)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Download workspace file content
|
||||
*/
|
||||
export async function downloadWorkspaceFile(fileRecord: WorkspaceFileRecord): Promise<Buffer> {
|
||||
logger.info(`Downloading workspace file: ${fileRecord.name}`)
|
||||
|
||||
try {
|
||||
const buffer = await downloadFile(fileRecord.key)
|
||||
logger.info(
|
||||
`Successfully downloaded workspace file: ${fileRecord.name} (${buffer.length} bytes)`
|
||||
)
|
||||
return buffer
|
||||
} catch (error) {
|
||||
logger.error(`Failed to download workspace file ${fileRecord.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to download file: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a workspace file (both from storage and database)
|
||||
*/
|
||||
export async function deleteWorkspaceFile(workspaceId: string, fileId: string): Promise<void> {
|
||||
logger.info(`Deleting workspace file: ${fileId}`)
|
||||
|
||||
try {
|
||||
// Get file record first
|
||||
const fileRecord = await getWorkspaceFile(workspaceId, fileId)
|
||||
if (!fileRecord) {
|
||||
throw new Error('File not found')
|
||||
}
|
||||
|
||||
// Delete from storage
|
||||
await deleteFile(fileRecord.key)
|
||||
|
||||
// Delete from database
|
||||
await db
|
||||
.delete(workspaceFile)
|
||||
.where(and(eq(workspaceFile.id, fileId), eq(workspaceFile.workspaceId, workspaceId)))
|
||||
|
||||
// Decrement storage usage tracking
|
||||
try {
|
||||
await decrementStorageUsage(fileRecord.uploadedBy, fileRecord.size)
|
||||
} catch (storageError) {
|
||||
logger.error(`Failed to update storage tracking:`, storageError)
|
||||
// Continue - don't fail deletion if tracking fails
|
||||
}
|
||||
|
||||
logger.info(`Successfully deleted workspace file: ${fileRecord.name}`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to delete workspace file ${fileId}:`, error)
|
||||
throw new Error(
|
||||
`Failed to delete file: ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -15,6 +15,7 @@ interface GmailWebhookConfig {
|
||||
labelIds: string[]
|
||||
labelFilterBehavior: 'INCLUDE' | 'EXCLUDE'
|
||||
markAsRead: boolean
|
||||
searchQuery?: string
|
||||
maxEmailsPerPoll?: number
|
||||
lastCheckedTimestamp?: string
|
||||
historyId?: string
|
||||
@@ -308,13 +309,53 @@ async function fetchNewEmails(accessToken: string, config: GmailWebhookConfig, r
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a Gmail search query from label and search configuration
|
||||
*/
|
||||
function buildGmailSearchQuery(config: {
|
||||
labelIds?: string[]
|
||||
labelFilterBehavior?: 'INCLUDE' | 'EXCLUDE'
|
||||
searchQuery?: string
|
||||
}): string {
|
||||
let labelQuery = ''
|
||||
if (config.labelIds && config.labelIds.length > 0) {
|
||||
const labelParts = config.labelIds.map((label) => `label:${label}`).join(' OR ')
|
||||
labelQuery =
|
||||
config.labelFilterBehavior === 'INCLUDE'
|
||||
? config.labelIds.length > 1
|
||||
? `(${labelParts})`
|
||||
: labelParts
|
||||
: config.labelIds.length > 1
|
||||
? `-(${labelParts})`
|
||||
: `-${labelParts}`
|
||||
}
|
||||
|
||||
let searchQueryPart = ''
|
||||
if (config.searchQuery?.trim()) {
|
||||
searchQueryPart = config.searchQuery.trim()
|
||||
if (searchQueryPart.includes(' OR ') || searchQueryPart.includes(' AND ')) {
|
||||
searchQueryPart = `(${searchQueryPart})`
|
||||
}
|
||||
}
|
||||
|
||||
let baseQuery = ''
|
||||
if (labelQuery && searchQueryPart) {
|
||||
baseQuery = `${labelQuery} ${searchQueryPart}`
|
||||
} else if (searchQueryPart) {
|
||||
baseQuery = searchQueryPart
|
||||
} else if (labelQuery) {
|
||||
baseQuery = labelQuery
|
||||
} else {
|
||||
baseQuery = 'in:inbox'
|
||||
}
|
||||
|
||||
return baseQuery
|
||||
}
|
||||
|
||||
async function searchEmails(accessToken: string, config: GmailWebhookConfig, requestId: string) {
|
||||
try {
|
||||
// Build query parameters for label filtering
|
||||
const labelQuery =
|
||||
config.labelIds && config.labelIds.length > 0
|
||||
? config.labelIds.map((label) => `label:${label}`).join(' ')
|
||||
: 'in:inbox'
|
||||
const baseQuery = buildGmailSearchQuery(config)
|
||||
logger.debug(`[${requestId}] Gmail search query: ${baseQuery}`)
|
||||
|
||||
// Improved time-based filtering with dynamic buffer
|
||||
let timeConstraint = ''
|
||||
@@ -363,11 +404,8 @@ async function searchEmails(accessToken: string, config: GmailWebhookConfig, req
|
||||
logger.debug(`[${requestId}] No last check time, using default: newer_than:1d`)
|
||||
}
|
||||
|
||||
// Combine label and time constraints
|
||||
const query =
|
||||
config.labelFilterBehavior === 'INCLUDE'
|
||||
? `${labelQuery}${timeConstraint}`
|
||||
: `-${labelQuery}${timeConstraint}`
|
||||
// Combine base query and time constraints
|
||||
const query = `${baseQuery}${timeConstraint}`
|
||||
|
||||
logger.info(`[${requestId}] Searching for emails with query: ${query}`)
|
||||
|
||||
|
||||
@@ -58,3 +58,19 @@ export function isFileExpired(userFile: UserFile): boolean {
|
||||
export function getFileExpirationDate(): string {
|
||||
return new Date(Date.now() + 5 * 60 * 1000).toISOString()
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file is from execution storage based on its key pattern
|
||||
* Execution files have keys in format: workspaceId/workflowId/executionId/filename
|
||||
* Regular files have keys in format: timestamp-random-filename or just filename
|
||||
*/
|
||||
export function isExecutionFile(file: UserFile): boolean {
|
||||
if (!file.key) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Execution files have at least 3 slashes in their key (4 parts)
|
||||
// e.g., "workspace123/workflow456/execution789/document.pdf"
|
||||
const parts = file.key.split('/')
|
||||
return parts.length >= 4 && !file.key.startsWith('/api/') && !file.key.startsWith('http')
|
||||
}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
import type {
|
||||
DiscordMessage,
|
||||
DiscordSendMessageParams,
|
||||
DiscordSendMessageResponse,
|
||||
} from '@/tools/discord/types'
|
||||
import type { DiscordSendMessageParams, DiscordSendMessageResponse } from '@/tools/discord/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const discordSendMessageTool: ToolConfig<
|
||||
@@ -39,46 +35,38 @@ export const discordSendMessageTool: ToolConfig<
|
||||
visibility: 'user-only',
|
||||
description: 'The Discord server ID (guild ID)',
|
||||
},
|
||||
files: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Files to attach to the message',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params: DiscordSendMessageParams) =>
|
||||
`https://discord.com/api/v10/channels/${params.channelId}/messages`,
|
||||
url: '/api/tools/discord/send-message',
|
||||
method: 'POST',
|
||||
headers: (params: DiscordSendMessageParams) => {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
if (params.botToken) {
|
||||
headers.Authorization = `Bot ${params.botToken}`
|
||||
}
|
||||
|
||||
return headers
|
||||
},
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: DiscordSendMessageParams) => {
|
||||
const body: Record<string, any> = {}
|
||||
|
||||
if (params.content) {
|
||||
body.content = params.content
|
||||
return {
|
||||
botToken: params.botToken,
|
||||
channelId: params.channelId,
|
||||
content: params.content || 'Message sent from Sim',
|
||||
files: params.files || null,
|
||||
}
|
||||
|
||||
if (!body.content) {
|
||||
body.content = 'Message sent from Sim'
|
||||
}
|
||||
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
const data = (await response.json()) as DiscordMessage
|
||||
const data = await response.json()
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to send Discord message')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: 'Discord message sent successfully',
|
||||
data,
|
||||
},
|
||||
output: data.output,
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ export interface DiscordSendMessageParams extends DiscordAuthParams {
|
||||
description?: string
|
||||
color?: string | number
|
||||
}
|
||||
files?: any[]
|
||||
}
|
||||
|
||||
export interface DiscordGetMessagesParams extends DiscordAuthParams {
|
||||
|
||||
@@ -85,6 +85,7 @@ export const fileParserTool: ToolConfig<FileParserInput, FileParserOutput> = {
|
||||
return {
|
||||
filePath: determinedFilePath,
|
||||
fileType: determinedFileType,
|
||||
workspaceId: params.workspaceId || params._context?.workspaceId,
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -119,11 +120,6 @@ export const fileParserTool: ToolConfig<FileParserInput, FileParserOutput> = {
|
||||
combinedContent,
|
||||
}
|
||||
|
||||
// Add named properties for each file for dropdown access
|
||||
fileResults.forEach((file: FileParseResult, index: number) => {
|
||||
output[`file${index + 1}`] = file
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output,
|
||||
@@ -133,11 +129,10 @@ export const fileParserTool: ToolConfig<FileParserInput, FileParserOutput> = {
|
||||
// Handle single file response
|
||||
logger.info('Successfully parsed file:', result.output?.name || 'unknown')
|
||||
|
||||
// For a single file, create the output with both array and named property
|
||||
// For a single file, create the output with just array format
|
||||
const output: FileParserOutputData = {
|
||||
files: [result.output || result],
|
||||
combinedContent: result.output?.content || result.content || '',
|
||||
file1: result.output || result,
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import type { GmailSendParams, GmailToolResponse } from '@/tools/gmail/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me'
|
||||
|
||||
export const gmailDraftTool: ToolConfig<GmailSendParams, GmailToolResponse> = {
|
||||
id: 'gmail_draft',
|
||||
name: 'Gmail Draft',
|
||||
@@ -52,55 +50,50 @@ export const gmailDraftTool: ToolConfig<GmailSendParams, GmailToolResponse> = {
|
||||
visibility: 'user-or-llm',
|
||||
description: 'BCC recipients (comma-separated)',
|
||||
},
|
||||
attachments: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Files to attach to the email draft',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: () => `${GMAIL_API_BASE}/drafts`,
|
||||
url: '/api/tools/gmail/draft',
|
||||
method: 'POST',
|
||||
headers: (params: GmailSendParams) => ({
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: GmailSendParams): Record<string, any> => {
|
||||
const emailHeaders = [
|
||||
'Content-Type: text/plain; charset="UTF-8"',
|
||||
'MIME-Version: 1.0',
|
||||
`To: ${params.to}`,
|
||||
]
|
||||
|
||||
if (params.cc) {
|
||||
emailHeaders.push(`Cc: ${params.cc}`)
|
||||
}
|
||||
if (params.bcc) {
|
||||
emailHeaders.push(`Bcc: ${params.bcc}`)
|
||||
}
|
||||
|
||||
emailHeaders.push(`Subject: ${params.subject}`, '', params.body)
|
||||
const email = emailHeaders.join('\n')
|
||||
|
||||
return {
|
||||
message: {
|
||||
raw: Buffer.from(email).toString('base64url'),
|
||||
},
|
||||
}
|
||||
},
|
||||
body: (params: GmailSendParams) => ({
|
||||
accessToken: params.accessToken,
|
||||
to: params.to,
|
||||
subject: params.subject,
|
||||
body: params.body,
|
||||
cc: params.cc,
|
||||
bcc: params.bcc,
|
||||
attachments: params.attachments,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
content: data.error || 'Failed to create draft',
|
||||
metadata: {},
|
||||
},
|
||||
error: data.error,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
content: 'Email drafted successfully',
|
||||
metadata: {
|
||||
id: data.id,
|
||||
message: {
|
||||
id: data.message?.id,
|
||||
threadId: data.message?.threadId,
|
||||
labelIds: data.message?.labelIds,
|
||||
},
|
||||
},
|
||||
content: data.output.content,
|
||||
metadata: data.output.metadata,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import type { GmailSendParams, GmailToolResponse } from '@/tools/gmail/types'
|
||||
import { GMAIL_API_BASE } from '@/tools/gmail/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const gmailSendTool: ToolConfig<GmailSendParams, GmailToolResponse> = {
|
||||
@@ -51,50 +50,50 @@ export const gmailSendTool: ToolConfig<GmailSendParams, GmailToolResponse> = {
|
||||
visibility: 'user-or-llm',
|
||||
description: 'BCC recipients (comma-separated)',
|
||||
},
|
||||
attachments: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Files to attach to the email',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: () => `${GMAIL_API_BASE}/messages/send`,
|
||||
url: '/api/tools/gmail/send',
|
||||
method: 'POST',
|
||||
headers: (params: GmailSendParams) => ({
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: GmailSendParams): Record<string, any> => {
|
||||
const emailHeaders = [
|
||||
'Content-Type: text/plain; charset="UTF-8"',
|
||||
'MIME-Version: 1.0',
|
||||
`To: ${params.to}`,
|
||||
]
|
||||
|
||||
if (params.cc) {
|
||||
emailHeaders.push(`Cc: ${params.cc}`)
|
||||
}
|
||||
if (params.bcc) {
|
||||
emailHeaders.push(`Bcc: ${params.bcc}`)
|
||||
}
|
||||
|
||||
emailHeaders.push(`Subject: ${params.subject}`, '', params.body)
|
||||
const email = emailHeaders.join('\n')
|
||||
|
||||
return {
|
||||
raw: Buffer.from(email).toString('base64url'),
|
||||
}
|
||||
},
|
||||
body: (params: GmailSendParams) => ({
|
||||
accessToken: params.accessToken,
|
||||
to: params.to,
|
||||
subject: params.subject,
|
||||
body: params.body,
|
||||
cc: params.cc,
|
||||
bcc: params.bcc,
|
||||
attachments: params.attachments,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
content: data.error || 'Failed to send email',
|
||||
metadata: {},
|
||||
},
|
||||
error: data.error,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
content: 'Email sent successfully',
|
||||
metadata: {
|
||||
id: data.id,
|
||||
threadId: data.threadId,
|
||||
labelIds: data.labelIds,
|
||||
},
|
||||
content: data.output.content,
|
||||
metadata: data.output.metadata,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
// Base parameters shared by all operations
|
||||
@@ -12,6 +13,7 @@ export interface GmailSendParams extends BaseGmailParams {
|
||||
bcc?: string
|
||||
subject: string
|
||||
body: string
|
||||
attachments?: UserFile[]
|
||||
}
|
||||
|
||||
// Read operation parameters
|
||||
|
||||
@@ -238,3 +238,91 @@ export function createMessagesSummary(messages: any[]): string {
|
||||
|
||||
return summary
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unique MIME boundary string
|
||||
*/
|
||||
function generateBoundary(): string {
|
||||
return `----=_Part_${Date.now()}_${Math.random().toString(36).substring(2, 15)}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode string or buffer to base64url format (URL-safe base64)
|
||||
* Gmail API requires base64url encoding for the raw message field
|
||||
*/
|
||||
export function base64UrlEncode(data: string | Buffer): string {
|
||||
const base64 = Buffer.from(data).toString('base64')
|
||||
return base64.replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '')
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a MIME multipart message with optional attachments
|
||||
* @param params Message parameters including recipients, subject, body, and attachments
|
||||
* @returns Complete MIME message string ready to be base64url encoded
|
||||
*/
|
||||
export interface BuildMimeMessageParams {
|
||||
to: string
|
||||
cc?: string
|
||||
bcc?: string
|
||||
subject: string
|
||||
body: string
|
||||
attachments?: Array<{
|
||||
filename: string
|
||||
mimeType: string
|
||||
content: Buffer
|
||||
}>
|
||||
}
|
||||
|
||||
export function buildMimeMessage(params: BuildMimeMessageParams): string {
|
||||
const { to, cc, bcc, subject, body, attachments } = params
|
||||
const boundary = generateBoundary()
|
||||
const messageParts: string[] = []
|
||||
|
||||
// Add headers
|
||||
messageParts.push(`To: ${to}`)
|
||||
if (cc) {
|
||||
messageParts.push(`Cc: ${cc}`)
|
||||
}
|
||||
if (bcc) {
|
||||
messageParts.push(`Bcc: ${bcc}`)
|
||||
}
|
||||
messageParts.push(`Subject: ${subject}`)
|
||||
messageParts.push('MIME-Version: 1.0')
|
||||
|
||||
if (attachments && attachments.length > 0) {
|
||||
// Multipart message with attachments
|
||||
messageParts.push(`Content-Type: multipart/mixed; boundary="${boundary}"`)
|
||||
messageParts.push('')
|
||||
messageParts.push(`--${boundary}`)
|
||||
messageParts.push('Content-Type: text/plain; charset="UTF-8"')
|
||||
messageParts.push('Content-Transfer-Encoding: 7bit')
|
||||
messageParts.push('')
|
||||
messageParts.push(body)
|
||||
messageParts.push('')
|
||||
|
||||
// Add each attachment
|
||||
for (const attachment of attachments) {
|
||||
messageParts.push(`--${boundary}`)
|
||||
messageParts.push(`Content-Type: ${attachment.mimeType}`)
|
||||
messageParts.push(`Content-Disposition: attachment; filename="${attachment.filename}"`)
|
||||
messageParts.push('Content-Transfer-Encoding: base64')
|
||||
messageParts.push('')
|
||||
|
||||
// Split base64 content into 76-character lines (MIME standard)
|
||||
const base64Content = attachment.content.toString('base64')
|
||||
const lines = base64Content.match(/.{1,76}/g) || []
|
||||
messageParts.push(...lines)
|
||||
messageParts.push('')
|
||||
}
|
||||
|
||||
messageParts.push(`--${boundary}--`)
|
||||
} else {
|
||||
// Simple text message without attachments
|
||||
messageParts.push('Content-Type: text/plain; charset="UTF-8"')
|
||||
messageParts.push('MIME-Version: 1.0')
|
||||
messageParts.push('')
|
||||
messageParts.push(body)
|
||||
}
|
||||
|
||||
return messageParts.join('\n')
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@ export interface GoogleDriveToolParams {
|
||||
folderSelector?: string
|
||||
fileId?: string
|
||||
fileName?: string
|
||||
file?: any // UserFile object
|
||||
content?: string
|
||||
mimeType?: string
|
||||
query?: string
|
||||
|
||||
@@ -34,17 +34,23 @@ export const uploadTool: ToolConfig<GoogleDriveToolParams, GoogleDriveUploadResp
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The name of the file to upload',
|
||||
},
|
||||
file: {
|
||||
type: 'file',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Binary file to upload (UserFile object)',
|
||||
},
|
||||
content: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The content of the file to upload',
|
||||
description: 'Text content to upload (use this OR file, not both)',
|
||||
},
|
||||
mimeType: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'The MIME type of the file to upload',
|
||||
description: 'The MIME type of the file to upload (auto-detected from file if not provided)',
|
||||
},
|
||||
folderSelector: {
|
||||
type: 'string',
|
||||
@@ -61,13 +67,37 @@ export const uploadTool: ToolConfig<GoogleDriveToolParams, GoogleDriveUploadResp
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://www.googleapis.com/drive/v3/files?supportsAllDrives=true',
|
||||
url: (params) => {
|
||||
// Use custom API route if file is provided, otherwise use Google Drive API directly
|
||||
if (params.file) {
|
||||
return '/api/tools/google_drive/upload'
|
||||
}
|
||||
return 'https://www.googleapis.com/drive/v3/files?supportsAllDrives=true'
|
||||
},
|
||||
method: 'POST',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
headers: (params) => {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
// Google Drive API for text-only uploads needs Authorization
|
||||
if (!params.file) {
|
||||
headers.Authorization = `Bearer ${params.accessToken}`
|
||||
}
|
||||
return headers
|
||||
},
|
||||
body: (params) => {
|
||||
// Custom route handles file uploads
|
||||
if (params.file) {
|
||||
return {
|
||||
accessToken: params.accessToken,
|
||||
fileName: params.fileName,
|
||||
file: params.file,
|
||||
mimeType: params.mimeType,
|
||||
folderId: params.folderSelector || params.folderId,
|
||||
}
|
||||
}
|
||||
|
||||
// Original text-only upload logic
|
||||
const metadata: {
|
||||
name: string | undefined
|
||||
mimeType: string
|
||||
@@ -91,6 +121,23 @@ export const uploadTool: ToolConfig<GoogleDriveToolParams, GoogleDriveUploadResp
|
||||
try {
|
||||
const data = await response.json()
|
||||
|
||||
// Handle custom API route response (for file uploads)
|
||||
if (params?.file && data.success !== undefined) {
|
||||
if (!data.success) {
|
||||
logger.error('Failed to upload file via custom API route', {
|
||||
error: data.error,
|
||||
})
|
||||
throw new Error(data.error || 'Failed to upload file to Google Drive')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
file: data.output.file,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Handle Google Drive API response (for text-only uploads)
|
||||
if (!response.ok) {
|
||||
logger.error('Failed to create file in Google Drive', {
|
||||
status: response.status,
|
||||
|
||||
@@ -71,6 +71,7 @@ export interface MicrosoftTeamsToolParams {
|
||||
teamId?: string
|
||||
content?: string
|
||||
includeAttachments?: boolean
|
||||
files?: any[] // UserFile array for attachments
|
||||
}
|
||||
|
||||
export type MicrosoftTeamsResponse = MicrosoftTeamsReadResponse | MicrosoftTeamsWriteResponse
|
||||
|
||||
@@ -38,6 +38,12 @@ export const writeChannelTool: ToolConfig<MicrosoftTeamsToolParams, MicrosoftTea
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The content to write to the channel',
|
||||
},
|
||||
files: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Files to attach to the message',
|
||||
},
|
||||
},
|
||||
|
||||
outputs: {
|
||||
@@ -62,6 +68,11 @@ export const writeChannelTool: ToolConfig<MicrosoftTeamsToolParams, MicrosoftTea
|
||||
throw new Error('Channel ID is required')
|
||||
}
|
||||
|
||||
// If files are provided, use custom API route for attachment handling
|
||||
if (params.files && params.files.length > 0) {
|
||||
return '/api/tools/microsoft_teams/write_channel'
|
||||
}
|
||||
|
||||
const encodedTeamId = encodeURIComponent(teamId)
|
||||
const encodedChannelId = encodeURIComponent(channelId)
|
||||
|
||||
@@ -87,6 +98,17 @@ export const writeChannelTool: ToolConfig<MicrosoftTeamsToolParams, MicrosoftTea
|
||||
throw new Error('Content is required')
|
||||
}
|
||||
|
||||
// If using custom API route (with files), pass all params
|
||||
if (params.files && params.files.length > 0) {
|
||||
return {
|
||||
accessToken: params.accessToken,
|
||||
teamId: params.teamId,
|
||||
channelId: params.channelId,
|
||||
content: params.content,
|
||||
files: params.files,
|
||||
}
|
||||
}
|
||||
|
||||
// Microsoft Teams API expects this specific format for channel messages
|
||||
const requestBody = {
|
||||
body: {
|
||||
@@ -101,7 +123,12 @@ export const writeChannelTool: ToolConfig<MicrosoftTeamsToolParams, MicrosoftTea
|
||||
transformResponse: async (response: Response, params?: MicrosoftTeamsToolParams) => {
|
||||
const data = await response.json()
|
||||
|
||||
// Create document metadata from the response
|
||||
// Handle custom API route response format
|
||||
if (data.success !== undefined && data.output) {
|
||||
return data
|
||||
}
|
||||
|
||||
// Handle direct Graph API response format
|
||||
const metadata = {
|
||||
messageId: data.id || '',
|
||||
teamId: data.channelIdentity?.teamId || '',
|
||||
|
||||
@@ -32,6 +32,12 @@ export const writeChatTool: ToolConfig<MicrosoftTeamsToolParams, MicrosoftTeamsW
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The content to write to the message',
|
||||
},
|
||||
files: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Files to attach to the message',
|
||||
},
|
||||
},
|
||||
|
||||
outputs: {
|
||||
@@ -51,6 +57,11 @@ export const writeChatTool: ToolConfig<MicrosoftTeamsToolParams, MicrosoftTeamsW
|
||||
throw new Error('Chat ID is required')
|
||||
}
|
||||
|
||||
// If files are provided, use custom API route for attachment handling
|
||||
if (params.files && params.files.length > 0) {
|
||||
return '/api/tools/microsoft_teams/write_chat'
|
||||
}
|
||||
|
||||
return `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(chatId)}/messages`
|
||||
},
|
||||
method: 'POST',
|
||||
@@ -71,6 +82,16 @@ export const writeChatTool: ToolConfig<MicrosoftTeamsToolParams, MicrosoftTeamsW
|
||||
throw new Error('Content is required')
|
||||
}
|
||||
|
||||
// If using custom API route (with files), pass all params
|
||||
if (params.files && params.files.length > 0) {
|
||||
return {
|
||||
accessToken: params.accessToken,
|
||||
chatId: params.chatId,
|
||||
content: params.content,
|
||||
files: params.files,
|
||||
}
|
||||
}
|
||||
|
||||
// Microsoft Teams API expects this specific format
|
||||
const requestBody = {
|
||||
body: {
|
||||
@@ -85,7 +106,12 @@ export const writeChatTool: ToolConfig<MicrosoftTeamsToolParams, MicrosoftTeamsW
|
||||
transformResponse: async (response: Response, params?: MicrosoftTeamsToolParams) => {
|
||||
const data = await response.json()
|
||||
|
||||
// Create document metadata from the response
|
||||
// Handle custom API route response format
|
||||
if (data.success !== undefined && data.output) {
|
||||
return data
|
||||
}
|
||||
|
||||
// Handle direct Graph API response format
|
||||
const metadata = {
|
||||
messageId: data.id || '',
|
||||
chatId: data.chatId || '',
|
||||
|
||||
@@ -65,7 +65,7 @@ export const mistralParserTool: ToolConfig<MistralParserInput, MistralParserOutp
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://api.mistral.ai/v1/ocr',
|
||||
url: '/api/tools/mistral/parse',
|
||||
method: 'POST',
|
||||
headers: (params) => {
|
||||
return {
|
||||
@@ -168,11 +168,14 @@ export const mistralParserTool: ToolConfig<MistralParserInput, MistralParserOutp
|
||||
|
||||
// Create the request body with required parameters
|
||||
const requestBody: Record<string, any> = {
|
||||
model: 'mistral-ocr-latest',
|
||||
document: {
|
||||
type: 'document_url',
|
||||
document_url: url.toString(),
|
||||
},
|
||||
apiKey: params.apiKey,
|
||||
filePath: url.toString(),
|
||||
}
|
||||
|
||||
// Check if this is an internal workspace file path
|
||||
if (params.fileUpload?.path?.startsWith('/api/files/serve/')) {
|
||||
// Update filePath to the internal path for workspace files
|
||||
requestBody.filePath = params.fileUpload.path
|
||||
}
|
||||
|
||||
// Add optional parameters with proper validation
|
||||
@@ -181,7 +184,7 @@ export const mistralParserTool: ToolConfig<MistralParserInput, MistralParserOutp
|
||||
if (typeof params.includeImageBase64 !== 'boolean') {
|
||||
logger.warn('includeImageBase64 parameter should be a boolean, using default (false)')
|
||||
} else {
|
||||
requestBody.include_image_base64 = params.includeImageBase64
|
||||
requestBody.includeImageBase64 = params.includeImageBase64
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,7 +216,7 @@ export const mistralParserTool: ToolConfig<MistralParserInput, MistralParserOutp
|
||||
if (params.imageLimit !== undefined && params.imageLimit !== null) {
|
||||
const imageLimit = Number(params.imageLimit)
|
||||
if (Number.isInteger(imageLimit) && imageLimit > 0) {
|
||||
requestBody.image_limit = imageLimit
|
||||
requestBody.imageLimit = imageLimit
|
||||
} else {
|
||||
logger.warn('imageLimit must be a positive integer, ignoring this parameter')
|
||||
}
|
||||
@@ -223,7 +226,7 @@ export const mistralParserTool: ToolConfig<MistralParserInput, MistralParserOutp
|
||||
if (params.imageMinSize !== undefined && params.imageMinSize !== null) {
|
||||
const imageMinSize = Number(params.imageMinSize)
|
||||
if (Number.isInteger(imageMinSize) && imageMinSize > 0) {
|
||||
requestBody.image_min_size = imageMinSize
|
||||
requestBody.imageMinSize = imageMinSize
|
||||
} else {
|
||||
logger.warn('imageMinSize must be a positive integer, ignoring this parameter')
|
||||
}
|
||||
|
||||
@@ -10,6 +10,9 @@ export interface MistralParserInput {
|
||||
/** File upload data (from file-upload component) */
|
||||
fileUpload?: any
|
||||
|
||||
/** Internal file path flag (for presigned URL conversion) */
|
||||
_internalFilePath?: string
|
||||
|
||||
/** Mistral API key for authentication */
|
||||
apiKey: string
|
||||
|
||||
|
||||
@@ -53,6 +53,7 @@ export interface OneDriveToolParams {
|
||||
folderName?: string
|
||||
fileId?: string
|
||||
fileName?: string
|
||||
file?: unknown // UserFile or UserFile array
|
||||
content?: string
|
||||
mimeType?: string
|
||||
query?: string
|
||||
|
||||
@@ -36,11 +36,17 @@ export const uploadTool: ToolConfig<OneDriveToolParams, OneDriveUploadResponse>
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The name of the file to upload',
|
||||
},
|
||||
file: {
|
||||
type: 'file',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'The file to upload (binary)',
|
||||
},
|
||||
content: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The content of the file to upload',
|
||||
description: 'The text content to upload (if no file is provided)',
|
||||
},
|
||||
folderSelector: {
|
||||
type: 'string',
|
||||
@@ -58,6 +64,12 @@ export const uploadTool: ToolConfig<OneDriveToolParams, OneDriveUploadResponse>
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
// If file is provided, use custom API route for binary upload
|
||||
if (params.file) {
|
||||
return '/api/tools/onedrive/upload'
|
||||
}
|
||||
|
||||
// Text-only upload - use direct Microsoft Graph API
|
||||
let fileName = params.fileName || 'untitled'
|
||||
|
||||
// Always create .txt files for text content
|
||||
@@ -74,17 +86,59 @@ export const uploadTool: ToolConfig<OneDriveToolParams, OneDriveUploadResponse>
|
||||
// Default to root folder
|
||||
return `https://graph.microsoft.com/v1.0/me/drive/root:/${fileName}:/content`
|
||||
},
|
||||
method: 'PUT',
|
||||
headers: (params) => ({
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
'Content-Type': 'text/plain',
|
||||
}),
|
||||
body: (params) => (params.content || '') as unknown as Record<string, unknown>,
|
||||
method: (params) => {
|
||||
// Use POST for custom API route, PUT for direct upload
|
||||
return params.file ? 'POST' : 'PUT'
|
||||
},
|
||||
headers: (params) => {
|
||||
const headers: Record<string, string> = {}
|
||||
// For file uploads via custom API, send JSON
|
||||
if (params.file) {
|
||||
headers['Content-Type'] = 'application/json'
|
||||
} else {
|
||||
// For text-only uploads, use direct PUT with access token
|
||||
headers.Authorization = `Bearer ${params.accessToken}`
|
||||
headers['Content-Type'] = 'text/plain'
|
||||
}
|
||||
return headers
|
||||
},
|
||||
body: (params) => {
|
||||
// For file uploads, send all params as JSON to custom API route
|
||||
if (params.file) {
|
||||
return {
|
||||
accessToken: params.accessToken,
|
||||
fileName: params.fileName,
|
||||
file: params.file,
|
||||
folderId: params.manualFolderId || params.folderSelector,
|
||||
}
|
||||
}
|
||||
// For text-only uploads, send content directly
|
||||
return (params.content || '') as unknown as Record<string, unknown>
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response, params?: OneDriveToolParams) => {
|
||||
// Microsoft Graph API returns the file metadata directly
|
||||
const fileData = await response.json()
|
||||
const data = await response.json()
|
||||
|
||||
// Handle response from custom API route (for file uploads)
|
||||
if (params?.file && data.success !== undefined) {
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to upload file')
|
||||
}
|
||||
|
||||
logger.info('Successfully uploaded file to OneDrive via custom API', {
|
||||
fileId: data.output?.file?.id,
|
||||
fileName: data.output?.file?.name,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: data.output,
|
||||
}
|
||||
}
|
||||
|
||||
// Handle response from direct Microsoft Graph API (for text-only uploads)
|
||||
const fileData = data
|
||||
|
||||
logger.info('Successfully uploaded file to OneDrive', {
|
||||
fileId: fileData.id,
|
||||
|
||||
@@ -49,72 +49,48 @@ export const outlookDraftTool: ToolConfig<OutlookDraftParams, OutlookDraftRespon
|
||||
visibility: 'user-or-llm',
|
||||
description: 'BCC recipients (comma-separated)',
|
||||
},
|
||||
attachments: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Files to attach to the email draft',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
return `https://graph.microsoft.com/v1.0/me/messages`
|
||||
},
|
||||
url: '/api/tools/outlook/draft',
|
||||
method: 'POST',
|
||||
headers: (params) => {
|
||||
// Validate access token
|
||||
if (!params.accessToken) {
|
||||
throw new Error('Access token is required')
|
||||
}
|
||||
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: OutlookDraftParams) => {
|
||||
return {
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
},
|
||||
body: (params: OutlookDraftParams): Record<string, any> => {
|
||||
// Helper function to parse comma-separated emails
|
||||
const parseEmails = (emailString?: string) => {
|
||||
if (!emailString) return []
|
||||
return emailString
|
||||
.split(',')
|
||||
.map((email) => email.trim())
|
||||
.filter((email) => email.length > 0)
|
||||
.map((email) => ({ emailAddress: { address: email } }))
|
||||
}
|
||||
|
||||
const message: any = {
|
||||
accessToken: params.accessToken,
|
||||
to: params.to,
|
||||
subject: params.subject,
|
||||
body: {
|
||||
contentType: 'Text',
|
||||
content: params.body,
|
||||
},
|
||||
toRecipients: parseEmails(params.to),
|
||||
body: params.body,
|
||||
cc: params.cc || null,
|
||||
bcc: params.bcc || null,
|
||||
attachments: params.attachments || null,
|
||||
}
|
||||
|
||||
// Add CC if provided
|
||||
const ccRecipients = parseEmails(params.cc)
|
||||
if (ccRecipients.length > 0) {
|
||||
message.ccRecipients = ccRecipients
|
||||
}
|
||||
|
||||
// Add BCC if provided
|
||||
const bccRecipients = parseEmails(params.bcc)
|
||||
if (bccRecipients.length > 0) {
|
||||
message.bccRecipients = bccRecipients
|
||||
}
|
||||
|
||||
return message
|
||||
},
|
||||
},
|
||||
transformResponse: async (response) => {
|
||||
// Outlook draft API returns the created message object
|
||||
const data = await response.json()
|
||||
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to create Outlook draft')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: 'Email drafted successfully',
|
||||
message: data.output.message,
|
||||
results: {
|
||||
id: data.id,
|
||||
subject: data.subject,
|
||||
id: data.output.messageId,
|
||||
subject: data.output.subject,
|
||||
status: 'drafted',
|
||||
timestamp: new Date().toISOString(),
|
||||
attachmentCount: data.output.attachmentCount || 0,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -61,108 +61,48 @@ export const outlookSendTool: ToolConfig<OutlookSendParams, OutlookSendResponse>
|
||||
visibility: 'user-or-llm',
|
||||
description: 'BCC recipients (comma-separated)',
|
||||
},
|
||||
attachments: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Files to attach to the email',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
// If replying to a specific message, use the reply endpoint
|
||||
if (params.replyToMessageId) {
|
||||
return `https://graph.microsoft.com/v1.0/me/messages/${params.replyToMessageId}/reply`
|
||||
}
|
||||
// Otherwise use the regular send mail endpoint
|
||||
return `https://graph.microsoft.com/v1.0/me/sendMail`
|
||||
},
|
||||
url: '/api/tools/outlook/send',
|
||||
method: 'POST',
|
||||
headers: (params) => {
|
||||
// Validate access token
|
||||
if (!params.accessToken) {
|
||||
throw new Error('Access token is required')
|
||||
}
|
||||
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: OutlookSendParams) => {
|
||||
return {
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
},
|
||||
body: (params: OutlookSendParams): Record<string, any> => {
|
||||
// Helper function to parse comma-separated emails
|
||||
const parseEmails = (emailString?: string) => {
|
||||
if (!emailString) return []
|
||||
return emailString
|
||||
.split(',')
|
||||
.map((email) => email.trim())
|
||||
.filter((email) => email.length > 0)
|
||||
.map((email) => ({ emailAddress: { address: email } }))
|
||||
}
|
||||
|
||||
// If replying to a message, use the reply format
|
||||
if (params.replyToMessageId) {
|
||||
const replyBody: any = {
|
||||
message: {
|
||||
body: {
|
||||
contentType: 'Text',
|
||||
content: params.body,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Add CC/BCC if provided
|
||||
const ccRecipients = parseEmails(params.cc)
|
||||
const bccRecipients = parseEmails(params.bcc)
|
||||
|
||||
if (ccRecipients.length > 0) {
|
||||
replyBody.message.ccRecipients = ccRecipients
|
||||
}
|
||||
if (bccRecipients.length > 0) {
|
||||
replyBody.message.bccRecipients = bccRecipients
|
||||
}
|
||||
|
||||
return replyBody
|
||||
}
|
||||
|
||||
// Regular send mail format
|
||||
const toRecipients = parseEmails(params.to)
|
||||
const ccRecipients = parseEmails(params.cc)
|
||||
const bccRecipients = parseEmails(params.bcc)
|
||||
|
||||
const message: any = {
|
||||
accessToken: params.accessToken,
|
||||
to: params.to,
|
||||
subject: params.subject,
|
||||
body: {
|
||||
contentType: 'Text',
|
||||
content: params.body,
|
||||
},
|
||||
toRecipients,
|
||||
}
|
||||
|
||||
// Add CC/BCC if provided
|
||||
if (ccRecipients.length > 0) {
|
||||
message.ccRecipients = ccRecipients
|
||||
}
|
||||
if (bccRecipients.length > 0) {
|
||||
message.bccRecipients = bccRecipients
|
||||
}
|
||||
|
||||
// Add conversation ID for threading if provided
|
||||
if (params.conversationId) {
|
||||
message.conversationId = params.conversationId
|
||||
}
|
||||
|
||||
return {
|
||||
message,
|
||||
saveToSentItems: true,
|
||||
body: params.body,
|
||||
cc: params.cc || null,
|
||||
bcc: params.bcc || null,
|
||||
replyToMessageId: params.replyToMessageId || null,
|
||||
conversationId: params.conversationId || null,
|
||||
attachments: params.attachments || null,
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
// Outlook sendMail API returns empty body on success
|
||||
const data = await response.json()
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to send Outlook email')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
message: 'Email sent successfully',
|
||||
message: data.output.message,
|
||||
results: {
|
||||
status: 'sent',
|
||||
timestamp: new Date().toISOString(),
|
||||
status: data.output.status,
|
||||
timestamp: data.output.timestamp,
|
||||
attachmentCount: data.output.attachmentCount || 0,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ export interface OutlookSendParams {
|
||||
conversationId?: string
|
||||
cc?: string
|
||||
bcc?: string
|
||||
attachments?: any[]
|
||||
}
|
||||
|
||||
export interface OutlookSendResponse extends ToolResponse {
|
||||
@@ -41,6 +42,7 @@ export interface OutlookDraftParams {
|
||||
bcc?: string
|
||||
subject: string
|
||||
body: string
|
||||
attachments?: any[]
|
||||
}
|
||||
|
||||
export interface OutlookDraftResponse extends ToolResponse {
|
||||
|
||||
@@ -152,7 +152,13 @@ import {
|
||||
import { qdrantFetchTool, qdrantSearchTool, qdrantUpsertTool } from '@/tools/qdrant'
|
||||
import { redditGetCommentsTool, redditGetPostsTool, redditHotPostsTool } from '@/tools/reddit'
|
||||
import { mailSendTool } from '@/tools/resend'
|
||||
import { s3GetObjectTool } from '@/tools/s3'
|
||||
import {
|
||||
s3CopyObjectTool,
|
||||
s3DeleteObjectTool,
|
||||
s3GetObjectTool,
|
||||
s3ListObjectsTool,
|
||||
s3PutObjectTool,
|
||||
} from '@/tools/s3'
|
||||
import { searchTool as serperSearch } from '@/tools/serper'
|
||||
import {
|
||||
sharepointAddListItemTool,
|
||||
@@ -162,6 +168,7 @@ import {
|
||||
sharepointListSitesTool,
|
||||
sharepointReadPageTool,
|
||||
sharepointUpdateListItemTool,
|
||||
sharepointUploadFileTool,
|
||||
} from '@/tools/sharepoint'
|
||||
import { slackCanvasTool, slackMessageReaderTool, slackMessageTool } from '@/tools/slack'
|
||||
import { smsSendTool } from '@/tools/sms'
|
||||
@@ -180,6 +187,7 @@ import {
|
||||
telegramMessageTool,
|
||||
telegramSendAnimationTool,
|
||||
telegramSendAudioTool,
|
||||
telegramSendDocumentTool,
|
||||
telegramSendPhotoTool,
|
||||
telegramSendVideoTool,
|
||||
} from '@/tools/telegram'
|
||||
@@ -362,12 +370,17 @@ export const tools: Record<string, ToolConfig> = {
|
||||
knowledge_create_document: knowledgeCreateDocumentTool,
|
||||
elevenlabs_tts: elevenLabsTtsTool,
|
||||
s3_get_object: s3GetObjectTool,
|
||||
s3_put_object: s3PutObjectTool,
|
||||
s3_list_objects: s3ListObjectsTool,
|
||||
s3_delete_object: s3DeleteObjectTool,
|
||||
s3_copy_object: s3CopyObjectTool,
|
||||
telegram_message: telegramMessageTool,
|
||||
telegram_delete_message: telegramDeleteMessageTool,
|
||||
telegram_send_audio: telegramSendAudioTool,
|
||||
telegram_send_animation: telegramSendAnimationTool,
|
||||
telegram_send_photo: telegramSendPhotoTool,
|
||||
telegram_send_video: telegramSendVideoTool,
|
||||
telegram_send_document: telegramSendDocumentTool,
|
||||
clay_populate: clayPopulateTool,
|
||||
discord_send_message: discordSendMessageTool,
|
||||
discord_get_messages: discordGetMessagesTool,
|
||||
@@ -432,6 +445,5 @@ export const tools: Record<string, ToolConfig> = {
|
||||
sharepoint_create_list: sharepointCreateListTool,
|
||||
sharepoint_update_list: sharepointUpdateListItemTool,
|
||||
sharepoint_add_list_items: sharepointAddListItemTool,
|
||||
// Provider chat tools
|
||||
// Provider chat tools - handled separately in agent blocks
|
||||
sharepoint_upload_file: sharepointUploadFileTool,
|
||||
}
|
||||
|
||||
117
apps/sim/tools/s3/copy_object.ts
Normal file
117
apps/sim/tools/s3/copy_object.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const s3CopyObjectTool: ToolConfig = {
|
||||
id: 's3_copy_object',
|
||||
name: 'S3 Copy Object',
|
||||
description: 'Copy an object within or between AWS S3 buckets',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
accessKeyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your AWS Access Key ID',
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your AWS Secret Access Key',
|
||||
},
|
||||
region: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'AWS region (e.g., us-east-1)',
|
||||
},
|
||||
sourceBucket: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Source bucket name',
|
||||
},
|
||||
sourceKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Source object key/path',
|
||||
},
|
||||
destinationBucket: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Destination bucket name',
|
||||
},
|
||||
destinationKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Destination object key/path',
|
||||
},
|
||||
acl: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Access control list for the copied object (e.g., private, public-read)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/s3/copy-object',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
sourceBucket: params.sourceBucket,
|
||||
sourceKey: params.sourceKey,
|
||||
destinationBucket: params.destinationBucket,
|
||||
destinationKey: params.destinationKey,
|
||||
acl: params.acl,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
url: '',
|
||||
metadata: {
|
||||
error: data.error || 'Failed to copy object',
|
||||
},
|
||||
},
|
||||
error: data.error,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
url: data.output.url,
|
||||
metadata: {
|
||||
copySourceVersionId: data.output.copySourceVersionId,
|
||||
versionId: data.output.versionId,
|
||||
etag: data.output.etag,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL of the copied S3 object',
|
||||
},
|
||||
metadata: {
|
||||
type: 'object',
|
||||
description: 'Copy operation metadata',
|
||||
},
|
||||
},
|
||||
}
|
||||
96
apps/sim/tools/s3/delete_object.ts
Normal file
96
apps/sim/tools/s3/delete_object.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const s3DeleteObjectTool: ToolConfig = {
|
||||
id: 's3_delete_object',
|
||||
name: 'S3 Delete Object',
|
||||
description: 'Delete an object from an AWS S3 bucket',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
accessKeyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your AWS Access Key ID',
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your AWS Secret Access Key',
|
||||
},
|
||||
region: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'AWS region (e.g., us-east-1)',
|
||||
},
|
||||
bucketName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'S3 bucket name',
|
||||
},
|
||||
objectKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Object key/path to delete',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/s3/delete-object',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
bucketName: params.bucketName,
|
||||
objectKey: params.objectKey,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
deleted: false,
|
||||
metadata: {
|
||||
error: data.error || 'Failed to delete object',
|
||||
},
|
||||
},
|
||||
error: data.error,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
deleted: true,
|
||||
metadata: {
|
||||
key: data.output.key,
|
||||
deleteMarker: data.output.deleteMarker,
|
||||
versionId: data.output.versionId,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
deleted: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the object was successfully deleted',
|
||||
},
|
||||
metadata: {
|
||||
type: 'object',
|
||||
description: 'Deletion metadata',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,3 +1,7 @@
|
||||
import { s3CopyObjectTool } from '@/tools/s3/copy_object'
|
||||
import { s3DeleteObjectTool } from '@/tools/s3/delete_object'
|
||||
import { s3GetObjectTool } from '@/tools/s3/get_object'
|
||||
import { s3ListObjectsTool } from '@/tools/s3/list_objects'
|
||||
import { s3PutObjectTool } from '@/tools/s3/put_object'
|
||||
|
||||
export { s3GetObjectTool }
|
||||
export { s3GetObjectTool, s3PutObjectTool, s3ListObjectsTool, s3DeleteObjectTool, s3CopyObjectTool }
|
||||
|
||||
120
apps/sim/tools/s3/list_objects.ts
Normal file
120
apps/sim/tools/s3/list_objects.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const s3ListObjectsTool: ToolConfig = {
|
||||
id: 's3_list_objects',
|
||||
name: 'S3 List Objects',
|
||||
description: 'List objects in an AWS S3 bucket',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
accessKeyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your AWS Access Key ID',
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your AWS Secret Access Key',
|
||||
},
|
||||
region: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'AWS region (e.g., us-east-1)',
|
||||
},
|
||||
bucketName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'S3 bucket name',
|
||||
},
|
||||
prefix: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Prefix to filter objects (e.g., folder/)',
|
||||
},
|
||||
maxKeys: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Maximum number of objects to return (default: 1000)',
|
||||
},
|
||||
continuationToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Token for pagination',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/s3/list-objects',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
bucketName: params.bucketName,
|
||||
prefix: params.prefix,
|
||||
maxKeys: params.maxKeys,
|
||||
continuationToken: params.continuationToken,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
objects: [],
|
||||
metadata: {
|
||||
error: data.error || 'Failed to list objects',
|
||||
},
|
||||
},
|
||||
error: data.error,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
objects: data.output.objects || [],
|
||||
metadata: {
|
||||
isTruncated: data.output.isTruncated,
|
||||
nextContinuationToken: data.output.nextContinuationToken,
|
||||
keyCount: data.output.keyCount,
|
||||
prefix: data.output.prefix,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
objects: {
|
||||
type: 'array',
|
||||
description: 'List of S3 objects',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
key: { type: 'string', description: 'Object key' },
|
||||
size: { type: 'number', description: 'Object size in bytes' },
|
||||
lastModified: { type: 'string', description: 'Last modified timestamp' },
|
||||
etag: { type: 'string', description: 'Entity tag' },
|
||||
},
|
||||
},
|
||||
},
|
||||
metadata: {
|
||||
type: 'object',
|
||||
description: 'Listing metadata including pagination info',
|
||||
},
|
||||
},
|
||||
}
|
||||
125
apps/sim/tools/s3/put_object.ts
Normal file
125
apps/sim/tools/s3/put_object.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const s3PutObjectTool: ToolConfig = {
|
||||
id: 's3_put_object',
|
||||
name: 'S3 Put Object',
|
||||
description: 'Upload a file to an AWS S3 bucket',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
accessKeyId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your AWS Access Key ID',
|
||||
},
|
||||
secretAccessKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your AWS Secret Access Key',
|
||||
},
|
||||
region: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'AWS region (e.g., us-east-1)',
|
||||
},
|
||||
bucketName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'S3 bucket name',
|
||||
},
|
||||
objectKey: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Object key/path in S3 (e.g., folder/filename.ext)',
|
||||
},
|
||||
file: {
|
||||
type: 'file',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'File to upload',
|
||||
},
|
||||
content: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Text content to upload (alternative to file)',
|
||||
},
|
||||
contentType: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Content-Type header (auto-detected from file if not provided)',
|
||||
},
|
||||
acl: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Access control list (e.g., private, public-read)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/s3/put-object',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => ({
|
||||
accessKeyId: params.accessKeyId,
|
||||
secretAccessKey: params.secretAccessKey,
|
||||
region: params.region,
|
||||
bucketName: params.bucketName,
|
||||
objectKey: params.objectKey,
|
||||
file: params.file,
|
||||
content: params.content,
|
||||
contentType: params.contentType,
|
||||
acl: params.acl,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response) => {
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.success) {
|
||||
return {
|
||||
success: false,
|
||||
output: {
|
||||
url: '',
|
||||
metadata: {
|
||||
error: data.error || 'Failed to upload object',
|
||||
},
|
||||
},
|
||||
error: data.error,
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
url: data.output.url,
|
||||
metadata: {
|
||||
etag: data.output.etag,
|
||||
location: data.output.location,
|
||||
key: data.output.key,
|
||||
bucket: data.output.bucket,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
url: {
|
||||
type: 'string',
|
||||
description: 'URL of the uploaded S3 object',
|
||||
},
|
||||
metadata: {
|
||||
type: 'object',
|
||||
description: 'Upload metadata including ETag and location',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -2,12 +2,30 @@ import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface S3Response extends ToolResponse {
|
||||
output: {
|
||||
url: string
|
||||
metadata: {
|
||||
fileType: string
|
||||
url?: string
|
||||
objects?: Array<{
|
||||
key: string
|
||||
size: number
|
||||
name: string
|
||||
lastModified: string
|
||||
etag: string
|
||||
}>
|
||||
deleted?: boolean
|
||||
metadata: {
|
||||
fileType?: string
|
||||
size?: number
|
||||
name?: string
|
||||
lastModified?: string
|
||||
etag?: string
|
||||
location?: string
|
||||
key?: string
|
||||
bucket?: string
|
||||
isTruncated?: boolean
|
||||
nextContinuationToken?: string
|
||||
keyCount?: number
|
||||
prefix?: string
|
||||
deleteMarker?: boolean
|
||||
versionId?: string
|
||||
copySourceVersionId?: string
|
||||
error?: string
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import { getListTool } from '@/tools/sharepoint/get_list'
|
||||
import { listSitesTool } from '@/tools/sharepoint/list_sites'
|
||||
import { readPageTool } from '@/tools/sharepoint/read_page'
|
||||
import { updateListItemTool } from '@/tools/sharepoint/update_list'
|
||||
import { uploadFileTool } from '@/tools/sharepoint/upload_file'
|
||||
|
||||
export const sharepointCreatePageTool = createPageTool
|
||||
export const sharepointCreateListTool = createListTool
|
||||
@@ -13,3 +14,4 @@ export const sharepointListSitesTool = listSitesTool
|
||||
export const sharepointReadPageTool = readPageTool
|
||||
export const sharepointUpdateListItemTool = updateListItemTool
|
||||
export const sharepointAddListItemTool = addListItemTool
|
||||
export const sharepointUploadFileTool = uploadFileTool
|
||||
|
||||
@@ -176,6 +176,11 @@ export interface SharepointToolParams {
|
||||
// Update List Item
|
||||
itemId?: string
|
||||
listItemFields?: Record<string, unknown>
|
||||
// Upload File
|
||||
driveId?: string
|
||||
folderPath?: string
|
||||
fileName?: string
|
||||
files?: any[]
|
||||
}
|
||||
|
||||
export interface GraphApiResponse {
|
||||
@@ -260,6 +265,7 @@ export type SharepointResponse =
|
||||
| SharepointCreateListResponse
|
||||
| SharepointUpdateListItemResponse
|
||||
| SharepointAddListItemResponse
|
||||
| SharepointUploadFileResponse
|
||||
|
||||
export interface SharepointGetListResponse extends ToolResponse {
|
||||
output: {
|
||||
@@ -292,3 +298,19 @@ export interface SharepointAddListItemResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export interface SharepointUploadedFile {
|
||||
id: string
|
||||
name: string
|
||||
webUrl: string
|
||||
size: number
|
||||
createdDateTime?: string
|
||||
lastModifiedDateTime?: string
|
||||
}
|
||||
|
||||
export interface SharepointUploadFileResponse extends ToolResponse {
|
||||
output: {
|
||||
uploadedFiles: SharepointUploadedFile[]
|
||||
fileCount: number
|
||||
}
|
||||
}
|
||||
|
||||
115
apps/sim/tools/sharepoint/upload_file.ts
Normal file
115
apps/sim/tools/sharepoint/upload_file.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
import type { SharepointToolParams, SharepointUploadFileResponse } from '@/tools/sharepoint/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const uploadFileTool: ToolConfig<SharepointToolParams, SharepointUploadFileResponse> = {
|
||||
id: 'sharepoint_upload_file',
|
||||
name: 'Upload File to SharePoint',
|
||||
description: 'Upload files to a SharePoint document library',
|
||||
version: '1.0',
|
||||
|
||||
oauth: {
|
||||
required: true,
|
||||
provider: 'sharepoint',
|
||||
additionalScopes: [
|
||||
'openid',
|
||||
'profile',
|
||||
'email',
|
||||
'Files.ReadWrite',
|
||||
'Sites.ReadWrite.All',
|
||||
'offline_access',
|
||||
],
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'The access token for the SharePoint API',
|
||||
},
|
||||
siteId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'The ID of the SharePoint site',
|
||||
},
|
||||
driveId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'The ID of the document library (drive). If not provided, uses default drive.',
|
||||
},
|
||||
folderPath: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Optional folder path within the document library (e.g., /Documents/Subfolder)',
|
||||
},
|
||||
fileName: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Optional: override the uploaded file name',
|
||||
},
|
||||
files: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Files to upload to SharePoint',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/sharepoint/upload',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: SharepointToolParams) => {
|
||||
return {
|
||||
accessToken: params.accessToken,
|
||||
siteId: params.siteId || 'root',
|
||||
driveId: params.driveId || null,
|
||||
folderPath: params.folderPath || null,
|
||||
fileName: params.fileName || null,
|
||||
files: params.files || null,
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to upload files to SharePoint')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
uploadedFiles: data.output.uploadedFiles,
|
||||
fileCount: data.output.fileCount,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
uploadedFiles: {
|
||||
type: 'array',
|
||||
description: 'Array of uploaded file objects',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'The unique ID of the uploaded file' },
|
||||
name: { type: 'string', description: 'The name of the uploaded file' },
|
||||
webUrl: { type: 'string', description: 'The URL to access the file' },
|
||||
size: { type: 'number', description: 'The size of the file in bytes' },
|
||||
createdDateTime: { type: 'string', description: 'When the file was created' },
|
||||
lastModifiedDateTime: { type: 'string', description: 'When the file was last modified' },
|
||||
},
|
||||
},
|
||||
},
|
||||
fileCount: {
|
||||
type: 'number',
|
||||
description: 'Number of files uploaded',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -51,33 +51,38 @@ export const slackMessageTool: ToolConfig<SlackMessageParams, SlackMessageRespon
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Message text to send (supports Slack mrkdwn formatting)',
|
||||
},
|
||||
files: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Files to attach to the message',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: 'https://slack.com/api/chat.postMessage',
|
||||
url: '/api/tools/slack/send-message',
|
||||
method: 'POST',
|
||||
headers: (params: SlackMessageParams) => ({
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${params.accessToken || params.botToken}`,
|
||||
}),
|
||||
body: (params: SlackMessageParams) => {
|
||||
const body: any = {
|
||||
return {
|
||||
accessToken: params.accessToken || params.botToken,
|
||||
channel: params.channel,
|
||||
markdown_text: params.text,
|
||||
text: params.text,
|
||||
files: params.files || null,
|
||||
}
|
||||
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to send Slack message')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
ts: data.ts,
|
||||
channel: data.channel,
|
||||
},
|
||||
output: data.output,
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ export interface SlackMessageParams extends SlackBaseParams {
|
||||
channel: string
|
||||
text: string
|
||||
thread_ts?: string
|
||||
files?: any[]
|
||||
}
|
||||
|
||||
export interface SlackCanvasParams extends SlackBaseParams {
|
||||
|
||||
@@ -2,6 +2,7 @@ import { telegramDeleteMessageTool } from '@/tools/telegram/delete_message'
|
||||
import { telegramMessageTool } from '@/tools/telegram/message'
|
||||
import { telegramSendAnimationTool } from '@/tools/telegram/send_animation'
|
||||
import { telegramSendAudioTool } from '@/tools/telegram/send_audio'
|
||||
import { telegramSendDocumentTool } from '@/tools/telegram/send_document'
|
||||
import { telegramSendPhotoTool } from '@/tools/telegram/send_photo'
|
||||
import { telegramSendVideoTool } from '@/tools/telegram/send_video'
|
||||
|
||||
@@ -10,6 +11,7 @@ export {
|
||||
telegramSendAudioTool,
|
||||
telegramDeleteMessageTool,
|
||||
telegramMessageTool,
|
||||
telegramSendDocumentTool,
|
||||
telegramSendPhotoTool,
|
||||
telegramSendVideoTool,
|
||||
}
|
||||
|
||||
143
apps/sim/tools/telegram/send_document.ts
Normal file
143
apps/sim/tools/telegram/send_document.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import type {
|
||||
TelegramSendDocumentParams,
|
||||
TelegramSendDocumentResponse,
|
||||
} from '@/tools/telegram/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const telegramSendDocumentTool: ToolConfig<
|
||||
TelegramSendDocumentParams,
|
||||
TelegramSendDocumentResponse
|
||||
> = {
|
||||
id: 'telegram_send_document',
|
||||
name: 'Telegram Send Document',
|
||||
description:
|
||||
'Send documents (PDF, ZIP, DOC, etc.) to Telegram channels or users through the Telegram Bot API.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
botToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Your Telegram Bot API Token',
|
||||
},
|
||||
chatId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Target Telegram chat ID',
|
||||
},
|
||||
files: {
|
||||
type: 'file[]',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Document file to send (PDF, ZIP, DOC, etc.). Max size: 50MB',
|
||||
},
|
||||
caption: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Document caption (optional)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: '/api/tools/telegram/send-document',
|
||||
method: 'POST',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params: TelegramSendDocumentParams) => {
|
||||
return {
|
||||
botToken: params.botToken,
|
||||
chatId: params.chatId,
|
||||
files: params.files || null,
|
||||
caption: params.caption,
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to send Telegram document')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
output: data.output,
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Success or error message' },
|
||||
data: {
|
||||
type: 'object',
|
||||
description: 'Telegram message data including document',
|
||||
properties: {
|
||||
message_id: {
|
||||
type: 'number',
|
||||
description: 'Unique Telegram message identifier',
|
||||
},
|
||||
from: {
|
||||
type: 'object',
|
||||
description: 'Information about the sender',
|
||||
properties: {
|
||||
id: { type: 'number', description: 'Sender ID' },
|
||||
is_bot: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the chat is a bot or not',
|
||||
},
|
||||
first_name: {
|
||||
type: 'string',
|
||||
description: "Sender's first name (if available)",
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
description: "Sender's username (if available)",
|
||||
},
|
||||
},
|
||||
},
|
||||
chat: {
|
||||
type: 'object',
|
||||
description: 'Information about the chat where message was sent',
|
||||
properties: {
|
||||
id: { type: 'number', description: 'Chat ID' },
|
||||
first_name: {
|
||||
type: 'string',
|
||||
description: 'Chat first name (if private chat)',
|
||||
},
|
||||
username: {
|
||||
type: 'string',
|
||||
description: 'Chat username (for private or channels)',
|
||||
},
|
||||
type: {
|
||||
type: 'string',
|
||||
description: 'Type of chat (private, group, supergroup, or channel)',
|
||||
},
|
||||
},
|
||||
},
|
||||
date: {
|
||||
type: 'number',
|
||||
description: 'Unix timestamp when the message was sent',
|
||||
},
|
||||
document: {
|
||||
type: 'object',
|
||||
description: 'Document file details',
|
||||
properties: {
|
||||
file_name: { type: 'string', description: 'Document file name' },
|
||||
mime_type: { type: 'string', description: 'Document MIME type' },
|
||||
file_id: { type: 'string', description: 'Document file ID' },
|
||||
file_unique_id: {
|
||||
type: 'string',
|
||||
description: 'Unique document file identifier',
|
||||
},
|
||||
file_size: {
|
||||
type: 'number',
|
||||
description: 'Size of document file in bytes',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -115,6 +115,11 @@ export interface TelegramSendAnimationParams extends TelegramAuthParams {
|
||||
caption?: string
|
||||
}
|
||||
|
||||
export interface TelegramSendDocumentParams extends TelegramAuthParams {
|
||||
files?: any
|
||||
caption?: string
|
||||
}
|
||||
|
||||
export interface TelegramDeleteMessageParams extends TelegramAuthParams {
|
||||
messageId: number
|
||||
}
|
||||
@@ -157,11 +162,19 @@ export interface TelegramSendPhotoResponse extends ToolResponse {
|
||||
}
|
||||
}
|
||||
|
||||
export interface TelegramSendDocumentResponse extends ToolResponse {
|
||||
output: {
|
||||
message: string
|
||||
data?: TelegramMedia
|
||||
}
|
||||
}
|
||||
|
||||
export type TelegramResponse =
|
||||
| TelegramSendMessageResponse
|
||||
| TelegramSendPhotoResponse
|
||||
| TelegramSendAudioResponse
|
||||
| TelegramSendMediaResponse
|
||||
| TelegramSendDocumentResponse
|
||||
| TelegramDeleteMessageResponse
|
||||
|
||||
// Legacy type for backwards compatibility
|
||||
|
||||
@@ -17,10 +17,16 @@ export const visionTool: ToolConfig<VisionParams, VisionResponse> = {
|
||||
},
|
||||
imageUrl: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Publicly accessible image URL',
|
||||
},
|
||||
imageFile: {
|
||||
type: 'file',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Image file to analyze',
|
||||
},
|
||||
model: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
@@ -37,93 +43,29 @@ export const visionTool: ToolConfig<VisionParams, VisionResponse> = {
|
||||
|
||||
request: {
|
||||
method: 'POST',
|
||||
url: (params) => {
|
||||
if (params.model?.startsWith('claude-3')) {
|
||||
return 'https://api.anthropic.com/v1/messages'
|
||||
}
|
||||
return 'https://api.openai.com/v1/chat/completions'
|
||||
},
|
||||
headers: (params) => {
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
return params.model?.startsWith('claude-3')
|
||||
? {
|
||||
...headers,
|
||||
'x-api-key': params.apiKey,
|
||||
'anthropic-version': '2023-06-01',
|
||||
}
|
||||
: {
|
||||
...headers,
|
||||
Authorization: `Bearer ${params.apiKey}`,
|
||||
}
|
||||
},
|
||||
url: '/api/tools/vision/analyze',
|
||||
headers: () => ({
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => {
|
||||
const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
|
||||
const prompt = params.prompt || defaultPrompt
|
||||
|
||||
if (params.model?.startsWith('claude-3')) {
|
||||
return {
|
||||
model: params.model,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
{ type: 'text', text: prompt },
|
||||
{
|
||||
type: 'image',
|
||||
source: { type: 'url', url: params.imageUrl },
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
model: 'gpt-4o',
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: [
|
||||
{ type: 'text', text: prompt },
|
||||
{
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: params.imageUrl,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
max_tokens: 1000,
|
||||
apiKey: params.apiKey,
|
||||
imageUrl: params.imageUrl || null,
|
||||
imageFile: params.imageFile || null,
|
||||
model: params.model || 'gpt-4o',
|
||||
prompt: params.prompt || null,
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
|
||||
const result = data.content?.[0]?.text || data.choices?.[0]?.message?.content
|
||||
|
||||
if (!data.success) {
|
||||
throw new Error(data.error || 'Failed to analyze image')
|
||||
}
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
content: result,
|
||||
model: data.model,
|
||||
tokens: data.content
|
||||
? data.usage?.input_tokens + data.usage?.output_tokens
|
||||
: data.usage?.total_tokens,
|
||||
usage: data.usage
|
||||
? {
|
||||
input_tokens: data.usage.input_tokens,
|
||||
output_tokens: data.usage.output_tokens,
|
||||
total_tokens:
|
||||
data.usage.total_tokens || data.usage.input_tokens + data.usage.output_tokens,
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
output: data.output,
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@@ -2,7 +2,8 @@ import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface VisionParams {
|
||||
apiKey: string
|
||||
imageUrl: string
|
||||
imageUrl?: string
|
||||
imageFile?: any
|
||||
model?: string
|
||||
prompt?: string
|
||||
}
|
||||
|
||||
@@ -31,6 +31,14 @@ export const gmailPollingTrigger: TriggerConfig = {
|
||||
'Include only emails with selected labels, or exclude emails with selected labels',
|
||||
required: true,
|
||||
},
|
||||
searchQuery: {
|
||||
type: 'string',
|
||||
label: 'Gmail Search Query',
|
||||
placeholder: 'subject:report OR from:important@example.com',
|
||||
description:
|
||||
'Optional Gmail search query to filter emails. Use the same format as Gmail search box (e.g., "subject:invoice", "from:boss@company.com", "has:attachment"). Leave empty to search all emails.',
|
||||
required: false,
|
||||
},
|
||||
markAsRead: {
|
||||
type: 'boolean',
|
||||
label: 'Mark as Read',
|
||||
|
||||
@@ -8,6 +8,15 @@
|
||||
*/
|
||||
export const DEFAULT_FREE_CREDITS = 10
|
||||
|
||||
/**
|
||||
* Storage limit constants (in GB)
|
||||
* Can be overridden via environment variables
|
||||
*/
|
||||
export const DEFAULT_FREE_STORAGE_LIMIT_GB = 5
|
||||
export const DEFAULT_PRO_STORAGE_LIMIT_GB = 50
|
||||
export const DEFAULT_TEAM_STORAGE_LIMIT_GB = 500
|
||||
export const DEFAULT_ENTERPRISE_STORAGE_LIMIT_GB = 500
|
||||
|
||||
/**
|
||||
* Tag slots available for knowledge base documents and embeddings
|
||||
*/
|
||||
|
||||
18
packages/db/migrations/0100_public_black_cat.sql
Normal file
18
packages/db/migrations/0100_public_black_cat.sql
Normal file
@@ -0,0 +1,18 @@
|
||||
CREATE TABLE "workspace_file" (
|
||||
"id" text PRIMARY KEY NOT NULL,
|
||||
"workspace_id" text NOT NULL,
|
||||
"name" text NOT NULL,
|
||||
"key" text NOT NULL,
|
||||
"size" integer NOT NULL,
|
||||
"type" text NOT NULL,
|
||||
"uploaded_by" text NOT NULL,
|
||||
"uploaded_at" timestamp DEFAULT now() NOT NULL,
|
||||
CONSTRAINT "workspace_file_key_unique" UNIQUE("key")
|
||||
);
|
||||
--> statement-breakpoint
|
||||
ALTER TABLE "organization" ADD COLUMN "storage_used_bytes" bigint DEFAULT 0 NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "user_stats" ADD COLUMN "storage_used_bytes" bigint DEFAULT 0 NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "workspace_file" ADD CONSTRAINT "workspace_file_workspace_id_workspace_id_fk" FOREIGN KEY ("workspace_id") REFERENCES "public"."workspace"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
ALTER TABLE "workspace_file" ADD CONSTRAINT "workspace_file_uploaded_by_user_id_fk" FOREIGN KEY ("uploaded_by") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||
CREATE INDEX "workspace_file_workspace_id_idx" ON "workspace_file" USING btree ("workspace_id");--> statement-breakpoint
|
||||
CREATE INDEX "workspace_file_key_idx" ON "workspace_file" USING btree ("key");
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user