Compare commits

..

40 Commits

Author SHA1 Message Date
Vikhyath Mondreti
1282befcf1 update skills 2026-02-03 16:04:16 -08:00
Vikhyath Mondreti
b7ccbc8cb9 fix 2026-02-03 15:58:51 -08:00
Vikhyath Mondreti
5a78b55b1e fix 2026-02-03 15:53:24 -08:00
Vikhyath Mondreti
aa1b158b26 fix dropbox 2026-02-03 15:50:50 -08:00
Vikhyath Mondreti
2d96ac55db fix sendgrid 2026-02-03 15:44:39 -08:00
Vikhyath Mondreti
bd5866ed6b fix dropbox upload file 2026-02-03 15:35:15 -08:00
Vikhyath Mondreti
cfc360404a fix slack to include successful execs 2026-02-03 13:19:58 -08:00
Vikhyath Mondreti
b0457bc7c1 improve docs 2026-02-03 13:06:16 -08:00
Vikhyath Mondreti
4669ec9823 Merge branch 'improvement/double-fetch' of github.com:simstudioai/sim into improvement/double-fetch 2026-02-03 12:53:41 -08:00
Vikhyath Mondreti
ed1ca6e861 user files should be passed through 2026-02-03 12:53:28 -08:00
waleed
a529f06adb added wand to ssh block 2026-02-03 12:30:51 -08:00
Vikhyath Mondreti
4f2b5a5ec6 accept all types 2026-02-03 12:29:55 -08:00
Vikhyath Mondreti
dc3d449d99 remove file only annotation 2026-02-03 12:24:45 -08:00
Vikhyath Mondreti
fa81609a92 cleanup fireflies 2026-02-03 12:17:55 -08:00
Vikhyath Mondreti
f256a9fa8c make interface simpler 2026-02-03 12:15:26 -08:00
Vikhyath Mondreti
47c9604577 update single file blocks 2026-02-03 12:09:55 -08:00
Vikhyath Mondreti
ff0753a298 fix more v2 blocks 2026-02-03 11:55:51 -08:00
Vikhyath Mondreti
3b747086bf fix for v2 versions 2026-02-03 11:53:57 -08:00
Vikhyath Mondreti
285490666f fix v2 blocmks for ocr 2026-02-03 11:50:26 -08:00
Vikhyath Mondreti
c230e1aae2 normalize file input 2026-02-03 11:38:14 -08:00
Vikhyath Mondreti
6e5e8debc5 fix 2026-02-03 11:23:54 -08:00
Vikhyath Mondreti
66b954d15d fix file block adv mode 2026-02-03 11:00:05 -08:00
Vikhyath Mondreti
4169a25e29 address bugbot comment 2026-02-03 09:49:07 -08:00
Vikhyath Mondreti
0aeaf6faee remove leftover type 2026-02-03 01:22:49 -08:00
Vikhyath Mondreti
a6ec6a0e6c fix typing 2026-02-03 01:22:21 -08:00
Vikhyath Mondreti
cbe0f8aed2 fix ocr integrations 2026-02-03 01:18:13 -08:00
Vikhyath Mondreti
6e642fc705 address more bugbot comments 2026-02-03 00:58:58 -08:00
Vikhyath Mondreti
1c857cdcda fix circular impport 2026-02-03 00:52:12 -08:00
Vikhyath Mondreti
7570e509ff Merge branch 'staging' into improvement/double-fetch 2026-02-02 20:24:34 -08:00
Vikhyath Mondreti
1ff35405fa fix type check 2026-02-02 20:14:46 -08:00
Vikhyath Mondreti
3ceabbb816 fix more bugbot comments 2026-02-02 20:12:31 -08:00
Vikhyath Mondreti
a65f3b8e6b fix tests 2026-02-02 17:26:57 -08:00
Vikhyath Mondreti
5ecbf6cf4a consolidate more code 2026-02-02 17:21:22 -08:00
Vikhyath Mondreti
42767fc4f4 fix types 2026-02-02 17:13:23 -08:00
Vikhyath Mondreti
5a0becf76f fix integrations 2026-02-02 17:04:17 -08:00
Vikhyath Mondreti
f4a3c94f87 consolidate more code 2026-02-02 15:11:32 -08:00
Vikhyath Mondreti
9ec0c8f3f5 separate server and client logic 2026-02-02 15:00:53 -08:00
Vikhyath Mondreti
39ca1f61c7 more integrations 2026-02-02 01:08:38 -08:00
Vikhyath Mondreti
1da3407f41 progress on files 2026-02-01 11:14:32 -08:00
Vikhyath Mondreti
bea0a685ae improvement(collab): do not refetch active workflow id 2026-01-31 18:50:14 -08:00
255 changed files with 7872 additions and 3126 deletions

View File

@@ -183,6 +183,109 @@ export const {ServiceName}Block: BlockConfig = {
} }
``` ```
## File Input Handling
When your block accepts file uploads, use the basic/advanced mode pattern with `normalizeFileInput`.
### Basic/Advanced File Pattern
```typescript
// Basic mode: Visual file upload
{
id: 'uploadFile',
title: 'File',
type: 'file-upload',
canonicalParamId: 'file', // Both map to 'file' param
placeholder: 'Upload file',
mode: 'basic',
multiple: false,
required: true,
condition: { field: 'operation', value: 'upload' },
},
// Advanced mode: Reference from other blocks
{
id: 'fileRef',
title: 'File',
type: 'short-input',
canonicalParamId: 'file', // Both map to 'file' param
placeholder: 'Reference file (e.g., {{file_block.output}})',
mode: 'advanced',
required: true,
condition: { field: 'operation', value: 'upload' },
},
```
**Critical constraints:**
- `canonicalParamId` must NOT match any subblock's `id` in the same block
- Values are stored under subblock `id`, not `canonicalParamId`
### Normalizing File Input in tools.config
Use `normalizeFileInput` to handle all input variants:
```typescript
import { normalizeFileInput } from '@/blocks/utils'
tools: {
access: ['service_upload'],
config: {
tool: (params) => {
// Check all field IDs: uploadFile (basic), fileRef (advanced), fileContent (legacy)
const normalizedFile = normalizeFileInput(
params.uploadFile || params.fileRef || params.fileContent,
{ single: true }
)
if (normalizedFile) {
params.file = normalizedFile
}
return `service_${params.operation}`
},
},
}
```
**Why this pattern?**
- Values come through as `params.uploadFile` or `params.fileRef` (the subblock IDs)
- `canonicalParamId` only controls UI/schema mapping, not runtime values
- `normalizeFileInput` handles JSON strings from advanced mode template resolution
### File Input Types in `inputs`
Use `type: 'json'` for file inputs:
```typescript
inputs: {
uploadFile: { type: 'json', description: 'Uploaded file (UserFile)' },
fileRef: { type: 'json', description: 'File reference from previous block' },
// Legacy field for backwards compatibility
fileContent: { type: 'string', description: 'Legacy: base64 encoded content' },
}
```
### Multiple Files
For multiple file uploads:
```typescript
{
id: 'attachments',
title: 'Attachments',
type: 'file-upload',
multiple: true, // Allow multiple files
maxSize: 25, // Max size in MB per file
acceptedTypes: 'image/*,application/pdf,.doc,.docx',
}
// In tools.config:
const normalizedFiles = normalizeFileInput(
params.attachments || params.attachmentRefs,
// No { single: true } - returns array
)
if (normalizedFiles) {
params.files = normalizedFiles
}
```
## Condition Syntax ## Condition Syntax
Controls when a field is shown based on other field values. Controls when a field is shown based on other field values.

View File

@@ -457,7 +457,230 @@ You can usually find this in the service's brand/press kit page, or copy it from
Paste the SVG code here and I'll convert it to a React component. Paste the SVG code here and I'll convert it to a React component.
``` ```
## Common Gotchas ## File Handling
When your integration handles file uploads or downloads, follow these patterns to work with `UserFile` objects consistently.
### What is a UserFile?
A `UserFile` is the standard file representation in Sim:
```typescript
interface UserFile {
id: string // Unique identifier
name: string // Original filename
url: string // Presigned URL for download
size: number // File size in bytes
type: string // MIME type (e.g., 'application/pdf')
base64?: string // Optional base64 content (if small file)
key?: string // Internal storage key
context?: object // Storage context metadata
}
```
### File Input Pattern (Uploads)
For tools that accept file uploads, **always route through an internal API endpoint** rather than calling external APIs directly. This ensures proper file content retrieval.
#### 1. Block SubBlocks for File Input
Use the basic/advanced mode pattern:
```typescript
// Basic mode: File upload UI
{
id: 'uploadFile',
title: 'File',
type: 'file-upload',
canonicalParamId: 'file', // Maps to 'file' param
placeholder: 'Upload file',
mode: 'basic',
multiple: false,
required: true,
condition: { field: 'operation', value: 'upload' },
},
// Advanced mode: Reference from previous block
{
id: 'fileRef',
title: 'File',
type: 'short-input',
canonicalParamId: 'file', // Same canonical param
placeholder: 'Reference file (e.g., {{file_block.output}})',
mode: 'advanced',
required: true,
condition: { field: 'operation', value: 'upload' },
},
```
**Critical:** `canonicalParamId` must NOT match any subblock `id`.
#### 2. Normalize File Input in Block Config
In `tools.config.tool`, use `normalizeFileInput` to handle all input variants:
```typescript
import { normalizeFileInput } from '@/blocks/utils'
tools: {
config: {
tool: (params) => {
// Normalize file from basic (uploadFile), advanced (fileRef), or legacy (fileContent)
const normalizedFile = normalizeFileInput(
params.uploadFile || params.fileRef || params.fileContent,
{ single: true }
)
if (normalizedFile) {
params.file = normalizedFile
}
return `{service}_${params.operation}`
},
},
}
```
#### 3. Create Internal API Route
Create `apps/sim/app/api/tools/{service}/{action}/route.ts`:
```typescript
import { createLogger } from '@sim/logger'
import { NextResponse, type NextRequest } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { FileInputSchema, type RawFileInput } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
const logger = createLogger('{Service}UploadAPI')
const RequestSchema = z.object({
accessToken: z.string(),
file: FileInputSchema.optional().nullable(),
// Legacy field for backwards compatibility
fileContent: z.string().optional().nullable(),
// ... other params
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const data = RequestSchema.parse(body)
let fileBuffer: Buffer
let fileName: string
// Prefer UserFile input, fall back to legacy base64
if (data.file) {
const userFiles = processFilesToUserFiles([data.file as RawFileInput], requestId, logger)
if (userFiles.length === 0) {
return NextResponse.json({ success: false, error: 'Invalid file' }, { status: 400 })
}
const userFile = userFiles[0]
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
fileName = userFile.name
} else if (data.fileContent) {
// Legacy: base64 string (backwards compatibility)
fileBuffer = Buffer.from(data.fileContent, 'base64')
fileName = 'file'
} else {
return NextResponse.json({ success: false, error: 'File required' }, { status: 400 })
}
// Now call external API with fileBuffer
const response = await fetch('https://api.{service}.com/upload', {
method: 'POST',
headers: { Authorization: `Bearer ${data.accessToken}` },
body: new Uint8Array(fileBuffer), // Convert Buffer for fetch
})
// ... handle response
}
```
#### 4. Update Tool to Use Internal Route
```typescript
export const {service}UploadTool: ToolConfig<Params, Response> = {
id: '{service}_upload',
// ...
params: {
file: { type: 'file', required: false, visibility: 'user-or-llm' },
fileContent: { type: 'string', required: false, visibility: 'hidden' }, // Legacy
},
request: {
url: '/api/tools/{service}/upload', // Internal route
method: 'POST',
body: (params) => ({
accessToken: params.accessToken,
file: params.file,
fileContent: params.fileContent,
}),
},
}
```
### File Output Pattern (Downloads)
For tools that return files, use `FileToolProcessor` to store files and return `UserFile` objects.
#### In Tool transformResponse
```typescript
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
transformResponse: async (response, context) => {
const data = await response.json()
// Process file outputs to UserFile objects
const fileProcessor = new FileToolProcessor(context)
const file = await fileProcessor.processFileData({
data: data.content, // base64 or buffer
mimeType: data.mimeType,
filename: data.filename,
})
return {
success: true,
output: { file },
}
}
```
#### In API Route (for complex file handling)
```typescript
// Return file data that FileToolProcessor can handle
return NextResponse.json({
success: true,
output: {
file: {
data: base64Content,
mimeType: 'application/pdf',
filename: 'document.pdf',
},
},
})
```
### Key Helpers Reference
| Helper | Location | Purpose |
|--------|----------|---------|
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get file Buffer from UserFile |
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
| `isUserFile` | `@/lib/core/utils/user-file` | Type guard for UserFile objects |
| `FileInputSchema` | `@/lib/uploads/utils/file-schemas` | Zod schema for file validation |
### Common Gotchas
1. **OAuth serviceId must match** - The `serviceId` in oauth-input must match the OAuth provider configuration 1. **OAuth serviceId must match** - The `serviceId` in oauth-input must match the OAuth provider configuration
2. **Tool IDs are snake_case** - `stripe_create_payment`, not `stripeCreatePayment` 2. **Tool IDs are snake_case** - `stripe_create_payment`, not `stripeCreatePayment`
@@ -465,3 +688,5 @@ Paste the SVG code here and I'll convert it to a React component.
4. **Alphabetical ordering** - Keep imports and registry entries alphabetically sorted 4. **Alphabetical ordering** - Keep imports and registry entries alphabetically sorted
5. **Required can be conditional** - Use `required: { field: 'op', value: 'create' }` instead of always true 5. **Required can be conditional** - Use `required: { field: 'op', value: 'create' }` instead of always true
6. **DependsOn clears options** - When a dependency changes, selector options are refetched 6. **DependsOn clears options** - When a dependency changes, selector options are refetched
7. **Never pass Buffer directly to fetch** - Convert to `new Uint8Array(buffer)` for TypeScript compatibility
8. **Always handle legacy file params** - Keep hidden `fileContent` params for backwards compatibility

View File

@@ -195,6 +195,52 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
{service}_webhook: {service}WebhookTrigger, {service}_webhook: {service}WebhookTrigger,
``` ```
## File Handling
When integrations handle file uploads/downloads, use `UserFile` objects consistently.
### File Input (Uploads)
1. **Block subBlocks:** Use basic/advanced mode pattern with `canonicalParamId`
2. **Normalize in block config:** Use `normalizeFileInput` from `@/blocks/utils`
3. **Internal API route:** Create route that uses `downloadFileFromStorage` to get file content
4. **Tool routes to internal API:** Don't call external APIs directly with files
```typescript
// In block tools.config:
import { normalizeFileInput } from '@/blocks/utils'
const normalizedFile = normalizeFileInput(
params.uploadFile || params.fileRef || params.fileContent,
{ single: true }
)
if (normalizedFile) params.file = normalizedFile
```
### File Output (Downloads)
Use `FileToolProcessor` in tool `transformResponse` to store files:
```typescript
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
const processor = new FileToolProcessor(context)
const file = await processor.processFileData({
data: base64Content,
mimeType: 'application/pdf',
filename: 'doc.pdf',
})
```
### Key Helpers
| Helper | Location | Purpose |
|--------|----------|---------|
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get Buffer from UserFile |
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
## Checklist ## Checklist
- [ ] Look up API docs for the service - [ ] Look up API docs for the service
@@ -207,3 +253,5 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
- [ ] Register block in `blocks/registry.ts` - [ ] Register block in `blocks/registry.ts`
- [ ] (Optional) Create triggers in `triggers/{service}/` - [ ] (Optional) Create triggers in `triggers/{service}/`
- [ ] (Optional) Register triggers in `triggers/registry.ts` - [ ] (Optional) Register triggers in `triggers/registry.ts`
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
- [ ] (If file uploads) Use `normalizeFileInput` in block config

View File

@@ -193,6 +193,52 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
{service}_webhook: {service}WebhookTrigger, {service}_webhook: {service}WebhookTrigger,
``` ```
## File Handling
When integrations handle file uploads/downloads, use `UserFile` objects consistently.
### File Input (Uploads)
1. **Block subBlocks:** Use basic/advanced mode pattern with `canonicalParamId`
2. **Normalize in block config:** Use `normalizeFileInput` from `@/blocks/utils`
3. **Internal API route:** Create route that uses `downloadFileFromStorage` to get file content
4. **Tool routes to internal API:** Don't call external APIs directly with files
```typescript
// In block tools.config:
import { normalizeFileInput } from '@/blocks/utils'
const normalizedFile = normalizeFileInput(
params.uploadFile || params.fileRef || params.fileContent,
{ single: true }
)
if (normalizedFile) params.file = normalizedFile
```
### File Output (Downloads)
Use `FileToolProcessor` in tool `transformResponse` to store files:
```typescript
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
const processor = new FileToolProcessor(context)
const file = await processor.processFileData({
data: base64Content,
mimeType: 'application/pdf',
filename: 'doc.pdf',
})
```
### Key Helpers
| Helper | Location | Purpose |
|--------|----------|---------|
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get Buffer from UserFile |
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
## Checklist ## Checklist
- [ ] Look up API docs for the service - [ ] Look up API docs for the service
@@ -205,3 +251,5 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
- [ ] Register block in `blocks/registry.ts` - [ ] Register block in `blocks/registry.ts`
- [ ] (Optional) Create triggers in `triggers/{service}/` - [ ] (Optional) Create triggers in `triggers/{service}/`
- [ ] (Optional) Register triggers in `triggers/registry.ts` - [ ] (Optional) Register triggers in `triggers/registry.ts`
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
- [ ] (If file uploads) Use `normalizeFileInput` in block config

View File

@@ -265,6 +265,23 @@ Register in `blocks/registry.ts` (alphabetically).
**dependsOn:** `['field']` or `{ all: ['a'], any: ['b', 'c'] }` **dependsOn:** `['field']` or `{ all: ['a'], any: ['b', 'c'] }`
**File Input Pattern (basic/advanced mode):**
```typescript
// Basic: file-upload UI
{ id: 'uploadFile', type: 'file-upload', canonicalParamId: 'file', mode: 'basic' },
// Advanced: reference from other blocks
{ id: 'fileRef', type: 'short-input', canonicalParamId: 'file', mode: 'advanced' },
```
In `tools.config.tool`, normalize with:
```typescript
import { normalizeFileInput } from '@/blocks/utils'
const file = normalizeFileInput(params.uploadFile || params.fileRef, { single: true })
if (file) params.file = file
```
For file uploads, create an internal API route (`/api/tools/{service}/upload`) that uses `downloadFileFromStorage` to get file content from `UserFile` objects.
### 3. Icon (`components/icons.tsx`) ### 3. Icon (`components/icons.tsx`)
```typescript ```typescript
@@ -293,3 +310,5 @@ Register in `triggers/registry.ts`.
- [ ] Create block in `blocks/blocks/{service}.ts` - [ ] Create block in `blocks/blocks/{service}.ts`
- [ ] Register block in `blocks/registry.ts` - [ ] Register block in `blocks/registry.ts`
- [ ] (Optional) Create and register triggers - [ ] (Optional) Create and register triggers
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
- [ ] (If file uploads) Use `normalizeFileInput` in block config

View File

@@ -213,25 +213,6 @@ Different subscription plans have different usage limits:
| **Team** | $40/seat (pooled, adjustable) | 300 sync, 2,500 async | | **Team** | $40/seat (pooled, adjustable) | 300 sync, 2,500 async |
| **Enterprise** | Custom | Custom | | **Enterprise** | Custom | Custom |
## Execution Time Limits
Workflows have maximum execution time limits based on your subscription plan:
| Plan | Sync Execution Limit |
|------|---------------------|
| **Free** | 5 minutes |
| **Pro** | 60 minutes |
| **Team** | 60 minutes |
| **Enterprise** | 60 minutes |
**Sync executions** run immediately and return results directly. These are triggered via the API with `async: false` (default) or through the UI.
**Async executions** (triggered via API with `async: true`, webhooks, or schedules) run in the background with a 90-minute time limit for all plans.
<Callout type="info">
If a workflow exceeds its time limit, it will be terminated and marked as failed with a timeout error. Design long-running workflows to use async execution or break them into smaller workflows.
</Callout>
## Billing Model ## Billing Model
Sim uses a **base subscription + overage** billing model: Sim uses a **base subscription + overage** billing model:

View File

@@ -0,0 +1,168 @@
---
title: Passing Files
---
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
Sim makes it easy to work with files throughout your workflows. Blocks can receive files, process them, and pass them to other blocks seamlessly.
## File Objects
When blocks output files (like Gmail attachments, generated images, or parsed documents), they return a standardized file object:
```json
{
"name": "report.pdf",
"url": "https://...",
"base64": "JVBERi0xLjQK...",
"type": "application/pdf",
"size": 245678
}
```
You can access any of these properties when referencing files from previous blocks.
## The File Block
The **File block** is the universal entry point for files in your workflows. It accepts files from any source and outputs standardized file objects that work with all integrations.
**Inputs:**
- **Uploaded files** - Drag and drop or select files directly
- **External URLs** - Any publicly accessible file URL
- **Files from other blocks** - Pass files from Gmail attachments, Slack downloads, etc.
**Outputs:**
- A list of `UserFile` objects with consistent structure (`name`, `url`, `base64`, `type`, `size`)
- `combinedContent` - Extracted text content from all files (for documents)
**Example usage:**
```
// Get all files from the File block
<file.files>
// Get the first file
<file.files[0]>
// Get combined text content from parsed documents
<file.combinedContent>
```
The File block automatically:
- Detects file types from URLs and extensions
- Extracts text from PDFs, CSVs, and documents
- Generates base64 encoding for binary files
- Creates presigned URLs for secure access
Use the File block when you need to normalize files from different sources before passing them to other blocks like Vision, STT, or email integrations.
## Passing Files Between Blocks
Reference files from previous blocks using the tag dropdown. Click in any file input field and type `<` to see available outputs.
**Common patterns:**
```
// Single file from a block
<gmail.attachments[0]>
// Pass the whole file object
<file_parser.files[0]>
// Access specific properties
<gmail.attachments[0].name>
<gmail.attachments[0].base64>
```
Most blocks accept the full file object and extract what they need automatically. You don't need to manually extract `base64` or `url` in most cases.
## Triggering Workflows with Files
When calling a workflow via API that expects file input, include files in your request:
<Tabs items={['Base64', 'URL']}>
<Tab value="Base64">
```bash
curl -X POST "https://sim.ai/api/workflows/YOUR_WORKFLOW_ID/execute" \
-H "Content-Type: application/json" \
-H "x-api-key: YOUR_API_KEY" \
-d '{
"document": {
"name": "report.pdf",
"base64": "JVBERi0xLjQK...",
"type": "application/pdf"
}
}'
```
</Tab>
<Tab value="URL">
```bash
curl -X POST "https://sim.ai/api/workflows/YOUR_WORKFLOW_ID/execute" \
-H "Content-Type: application/json" \
-H "x-api-key: YOUR_API_KEY" \
-d '{
"document": {
"name": "report.pdf",
"url": "https://example.com/report.pdf",
"type": "application/pdf"
}
}'
```
</Tab>
</Tabs>
The workflow's Start block should have an input field configured to receive the file parameter.
## Receiving Files in API Responses
When a workflow outputs files, they're included in the response:
```json
{
"success": true,
"output": {
"generatedFile": {
"name": "output.png",
"url": "https://...",
"base64": "iVBORw0KGgo...",
"type": "image/png",
"size": 34567
}
}
}
```
Use `url` for direct downloads or `base64` for inline processing.
## Blocks That Work with Files
**File inputs:**
- **File** - Parse documents, images, and text files
- **Vision** - Analyze images with AI models
- **Mistral Parser** - Extract text from PDFs
**File outputs:**
- **Gmail** - Email attachments
- **Slack** - Downloaded files
- **TTS** - Generated audio files
- **Video Generator** - Generated videos
- **Image Generator** - Generated images
**File storage:**
- **Supabase** - Upload/download from storage
- **S3** - AWS S3 operations
- **Google Drive** - Drive file operations
- **Dropbox** - Dropbox file operations
<Callout type="info">
Files are automatically available to downstream blocks. The execution engine handles all file transfer and format conversion.
</Callout>
## Best Practices
1. **Use file objects directly** - Pass the full file object rather than extracting individual properties. Blocks handle the conversion automatically.
2. **Check file types** - Ensure the file type matches what the receiving block expects. The Vision block needs images, the File block handles documents.
3. **Consider file size** - Large files increase execution time. For very large files, consider using storage blocks (S3, Supabase) for intermediate storage.

View File

@@ -1,3 +1,3 @@
{ {
"pages": ["index", "basics", "api", "logging", "costs"] "pages": ["index", "basics", "files", "api", "logging", "costs"]
} }

View File

@@ -11,7 +11,7 @@ import {
Database, Database,
DollarSign, DollarSign,
HardDrive, HardDrive,
Timer, Workflow,
} from 'lucide-react' } from 'lucide-react'
import { useRouter } from 'next/navigation' import { useRouter } from 'next/navigation'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
@@ -44,7 +44,7 @@ interface PricingTier {
const FREE_PLAN_FEATURES: PricingFeature[] = [ const FREE_PLAN_FEATURES: PricingFeature[] = [
{ icon: DollarSign, text: '$20 usage limit' }, { icon: DollarSign, text: '$20 usage limit' },
{ icon: HardDrive, text: '5GB file storage' }, { icon: HardDrive, text: '5GB file storage' },
{ icon: Timer, text: '5 min execution limit' }, { icon: Workflow, text: 'Public template access' },
{ icon: Database, text: 'Limited log retention' }, { icon: Database, text: 'Limited log retention' },
{ icon: Code2, text: 'CLI/SDK Access' }, { icon: Code2, text: 'CLI/SDK Access' },
] ]

View File

@@ -16,7 +16,7 @@ import {
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { getBrandConfig } from '@/lib/branding/branding' import { getBrandConfig } from '@/lib/branding/branding'
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis' import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
import { validateExternalUrl } from '@/lib/core/security/input-validation' import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
import { SSE_HEADERS } from '@/lib/core/utils/sse' import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
import { markExecutionCancelled } from '@/lib/execution/cancellation' import { markExecutionCancelled } from '@/lib/execution/cancellation'
@@ -1119,7 +1119,7 @@ async function handlePushNotificationSet(
) )
} }
const urlValidation = validateExternalUrl( const urlValidation = await validateUrlWithDNS(
params.pushNotificationConfig.url, params.pushNotificationConfig.url,
'Push notification URL' 'Push notification URL'
) )

View File

@@ -4,12 +4,10 @@ import { createLogger } from '@sim/logger'
import { and, eq, lt, sql } from 'drizzle-orm' import { and, eq, lt, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal' import { verifyCronAuth } from '@/lib/auth/internal'
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
const logger = createLogger('CleanupStaleExecutions') const logger = createLogger('CleanupStaleExecutions')
const STALE_THRESHOLD_MS = getMaxExecutionTimeout() + 5 * 60 * 1000 const STALE_THRESHOLD_MINUTES = 30
const STALE_THRESHOLD_MINUTES = Math.ceil(STALE_THRESHOLD_MS / 60000)
const MAX_INT32 = 2_147_483_647 const MAX_INT32 = 2_147_483_647
export async function GET(request: NextRequest) { export async function GET(request: NextRequest) {

View File

@@ -6,7 +6,11 @@ import { createLogger } from '@sim/logger'
import binaryExtensionsList from 'binary-extensions' import binaryExtensionsList from 'binary-extensions'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation' import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
import { isSupportedFileType, parseFile } from '@/lib/file-parsers' import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads' import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution' import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
@@ -19,6 +23,7 @@ import {
getMimeTypeFromExtension, getMimeTypeFromExtension,
getViewerUrl, getViewerUrl,
inferContextFromKey, inferContextFromKey,
isInternalFileUrl,
} from '@/lib/uploads/utils/file-utils' } from '@/lib/uploads/utils/file-utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { verifyFileAccess } from '@/app/api/files/authorization' import { verifyFileAccess } from '@/app/api/files/authorization'
@@ -215,7 +220,7 @@ async function parseFileSingle(
} }
} }
if (filePath.includes('/api/files/serve/')) { if (isInternalFileUrl(filePath)) {
return handleCloudFile(filePath, fileType, undefined, userId, executionContext) return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
} }
@@ -246,7 +251,7 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
return { isValid: false, error: 'Invalid path: tilde character not allowed' } return { isValid: false, error: 'Invalid path: tilde character not allowed' }
} }
if (filePath.startsWith('/') && !filePath.startsWith('/api/files/serve/')) { if (filePath.startsWith('/') && !isInternalFileUrl(filePath)) {
return { isValid: false, error: 'Path outside allowed directory' } return { isValid: false, error: 'Path outside allowed directory' }
} }
@@ -420,7 +425,7 @@ async function handleExternalUrl(
return parseResult return parseResult
} catch (error) { } catch (error) {
logger.error(`Error handling external URL ${url}:`, error) logger.error(`Error handling external URL ${sanitizeUrlForLog(url)}:`, error)
return { return {
success: false, success: false,
error: `Error fetching URL: ${(error as Error).message}`, error: `Error fetching URL: ${(error as Error).message}`,

View File

@@ -21,7 +21,6 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateInternalToken } from '@/lib/auth/internal' import { generateInternalToken } from '@/lib/auth/internal'
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
const logger = createLogger('WorkflowMcpServeAPI') const logger = createLogger('WorkflowMcpServeAPI')
@@ -265,7 +264,7 @@ async function handleToolsCall(
method: 'POST', method: 'POST',
headers, headers,
body: JSON.stringify({ input: params.arguments || {}, triggerType: 'mcp' }), body: JSON.stringify({ input: params.arguments || {}, triggerType: 'mcp' }),
signal: AbortSignal.timeout(getMaxExecutionTimeout()), signal: AbortSignal.timeout(600000), // 10 minute timeout
}) })
const executeResult = await response.json() const executeResult = await response.json()

View File

@@ -1,8 +1,5 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import type { NextRequest } from 'next/server' import type { NextRequest } from 'next/server'
import { getHighestPrioritySubscription } from '@/lib/billing/core/plan'
import { getExecutionTimeout } from '@/lib/core/execution-limits'
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpService } from '@/lib/mcp/service' import { mcpService } from '@/lib/mcp/service'
import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types' import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types'
@@ -10,6 +7,7 @@ import {
categorizeError, categorizeError,
createMcpErrorResponse, createMcpErrorResponse,
createMcpSuccessResponse, createMcpSuccessResponse,
MCP_CONSTANTS,
validateStringParam, validateStringParam,
} from '@/lib/mcp/utils' } from '@/lib/mcp/utils'
@@ -173,16 +171,13 @@ export const POST = withMcpAuth('read')(
arguments: args, arguments: args,
} }
const userSubscription = await getHighestPrioritySubscription(userId)
const executionTimeout = getExecutionTimeout(
userSubscription?.plan as SubscriptionPlan | undefined,
'sync'
)
const result = await Promise.race([ const result = await Promise.race([
mcpService.executeTool(userId, serverId, toolCall, workspaceId), mcpService.executeTool(userId, serverId, toolCall, workspaceId),
new Promise<never>((_, reject) => new Promise<never>((_, reject) =>
setTimeout(() => reject(new Error('Tool execution timeout')), executionTimeout) setTimeout(
() => reject(new Error('Tool execution timeout')),
MCP_CONSTANTS.EXECUTION_TIMEOUT
)
), ),
]) ])

View File

@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils' import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -95,6 +96,14 @@ export async function POST(request: NextRequest) {
if (validatedData.files && validatedData.files.length > 0) { if (validatedData.files && validatedData.files.length > 0) {
for (const file of validatedData.files) { for (const file of validatedData.files) {
if (file.type === 'url') { if (file.type === 'url') {
const urlValidation = await validateUrlWithDNS(file.data, 'fileUrl')
if (!urlValidation.isValid) {
return NextResponse.json(
{ success: false, error: urlValidation.error },
{ status: 400 }
)
}
const filePart: FilePart = { const filePart: FilePart = {
kind: 'file', kind: 'file',
file: { file: {

View File

@@ -3,7 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { createA2AClient } from '@/lib/a2a/utils' import { createA2AClient } from '@/lib/a2a/utils'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { validateExternalUrl } from '@/lib/core/security/input-validation' import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -40,7 +40,7 @@ export async function POST(request: NextRequest) {
const body = await request.json() const body = await request.json()
const validatedData = A2ASetPushNotificationSchema.parse(body) const validatedData = A2ASetPushNotificationSchema.parse(body)
const urlValidation = validateExternalUrl(validatedData.webhookUrl, 'Webhook URL') const urlValidation = await validateUrlWithDNS(validatedData.webhookUrl, 'Webhook URL')
if (!urlValidation.isValid) { if (!urlValidation.isValid) {
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error }) logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
return NextResponse.json( return NextResponse.json(

View File

@@ -92,6 +92,9 @@ export async function POST(request: NextRequest) {
formData.append('comment', comment) formData.append('comment', comment)
} }
// Add minorEdit field as required by Confluence API
formData.append('minorEdit', 'false')
const response = await fetch(url, { const response = await fetch(url, {
method: 'POST', method: 'POST',
headers: { headers: {

View File

@@ -4,6 +4,7 @@ import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { validateNumericId } from '@/lib/core/security/input-validation' import { validateNumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
@@ -15,7 +16,7 @@ const DiscordSendMessageSchema = z.object({
botToken: z.string().min(1, 'Bot token is required'), botToken: z.string().min(1, 'Bot token is required'),
channelId: z.string().min(1, 'Channel ID is required'), channelId: z.string().min(1, 'Channel ID is required'),
content: z.string().optional().nullable(), content: z.string().optional().nullable(),
files: z.array(z.any()).optional().nullable(), files: RawFileInputArraySchema.optional().nullable(),
}) })
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {
@@ -101,6 +102,12 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`) logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`)
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger) const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
const filesOutput: Array<{
name: string
mimeType: string
data: string
size: number
}> = []
if (userFiles.length === 0) { if (userFiles.length === 0) {
logger.warn(`[${requestId}] No valid files to upload, falling back to text-only`) logger.warn(`[${requestId}] No valid files to upload, falling back to text-only`)
@@ -137,6 +144,12 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Downloading file ${i}: ${userFile.name}`) logger.info(`[${requestId}] Downloading file ${i}: ${userFile.name}`)
const buffer = await downloadFileFromStorage(userFile, requestId, logger) const buffer = await downloadFileFromStorage(userFile, requestId, logger)
filesOutput.push({
name: userFile.name,
mimeType: userFile.type || 'application/octet-stream',
data: buffer.toString('base64'),
size: buffer.length,
})
const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type }) const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type })
formData.append(`files[${i}]`, blob, userFile.name) formData.append(`files[${i}]`, blob, userFile.name)
@@ -173,6 +186,7 @@ export async function POST(request: NextRequest) {
message: data.content, message: data.content,
data: data, data: data,
fileCount: userFiles.length, fileCount: userFiles.length,
files: filesOutput,
}, },
}) })
} catch (error) { } catch (error) {

View File

@@ -0,0 +1,141 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles, type RawFileInput } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
export const dynamic = 'force-dynamic'
const logger = createLogger('DropboxUploadAPI')
/**
* Escapes non-ASCII characters in JSON string for HTTP header safety.
* Dropbox API requires characters 0x7F and all non-ASCII to be escaped as \uXXXX.
*/
function httpHeaderSafeJson(value: object): string {
return JSON.stringify(value).replace(/[\u007f-\uffff]/g, (c) => {
return '\\u' + ('0000' + c.charCodeAt(0).toString(16)).slice(-4)
})
}
const DropboxUploadSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
path: z.string().min(1, 'Destination path is required'),
file: FileInputSchema.optional().nullable(),
// Legacy field for backwards compatibility
fileContent: z.string().optional().nullable(),
fileName: z.string().optional().nullable(),
mode: z.enum(['add', 'overwrite']).optional().nullable(),
autorename: z.boolean().optional().nullable(),
mute: z.boolean().optional().nullable(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized Dropbox upload attempt: ${authResult.error}`)
return NextResponse.json(
{ success: false, error: authResult.error || 'Authentication required' },
{ status: 401 }
)
}
logger.info(`[${requestId}] Authenticated Dropbox upload request via ${authResult.authType}`)
const body = await request.json()
const validatedData = DropboxUploadSchema.parse(body)
let fileBuffer: Buffer
let fileName: string
// Prefer UserFile input, fall back to legacy base64 string
if (validatedData.file) {
// Process UserFile input
const userFiles = processFilesToUserFiles(
[validatedData.file as RawFileInput],
requestId,
logger
)
if (userFiles.length === 0) {
return NextResponse.json({ success: false, error: 'Invalid file input' }, { status: 400 })
}
const userFile = userFiles[0]
logger.info(`[${requestId}] Downloading file: ${userFile.name} (${userFile.size} bytes)`)
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
fileName = userFile.name
} else if (validatedData.fileContent) {
// Legacy: base64 string input (backwards compatibility)
logger.info(`[${requestId}] Using legacy base64 content input`)
fileBuffer = Buffer.from(validatedData.fileContent, 'base64')
fileName = validatedData.fileName || 'file'
} else {
return NextResponse.json({ success: false, error: 'File is required' }, { status: 400 })
}
// Determine final path
let finalPath = validatedData.path
if (finalPath.endsWith('/')) {
finalPath = `${finalPath}${fileName}`
}
logger.info(`[${requestId}] Uploading to Dropbox: ${finalPath} (${fileBuffer.length} bytes)`)
const dropboxApiArg = {
path: finalPath,
mode: validatedData.mode || 'add',
autorename: validatedData.autorename ?? true,
mute: validatedData.mute ?? false,
}
const response = await fetch('https://content.dropboxapi.com/2/files/upload', {
method: 'POST',
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
'Content-Type': 'application/octet-stream',
'Dropbox-API-Arg': httpHeaderSafeJson(dropboxApiArg),
},
body: new Uint8Array(fileBuffer),
})
const data = await response.json()
if (!response.ok) {
const errorMessage = data.error_summary || data.error?.message || 'Failed to upload file'
logger.error(`[${requestId}] Dropbox API error:`, { status: response.status, data })
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status })
}
logger.info(`[${requestId}] File uploaded successfully to ${data.path_display}`)
return NextResponse.json({
success: true,
output: {
file: data,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Validation error:`, error.errors)
return NextResponse.json(
{ success: false, error: error.errors[0]?.message || 'Validation failed' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Unexpected error:`, error)
return NextResponse.json(
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,195 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request'
export const dynamic = 'force-dynamic'
const logger = createLogger('GitHubLatestCommitAPI')
interface GitHubErrorResponse {
message?: string
}
interface GitHubCommitResponse {
sha: string
html_url: string
commit: {
message: string
author: { name: string; email: string; date: string }
committer: { name: string; email: string; date: string }
}
author?: { login: string; avatar_url: string; html_url: string }
committer?: { login: string; avatar_url: string; html_url: string }
stats?: { additions: number; deletions: number; total: number }
files?: Array<{
filename: string
status: string
additions: number
deletions: number
changes: number
patch?: string
raw_url?: string
blob_url?: string
}>
}
const GitHubLatestCommitSchema = z.object({
owner: z.string().min(1, 'Owner is required'),
repo: z.string().min(1, 'Repo is required'),
branch: z.string().optional().nullable(),
apiKey: z.string().min(1, 'API key is required'),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized GitHub latest commit attempt: ${authResult.error}`)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
const body = await request.json()
const validatedData = GitHubLatestCommitSchema.parse(body)
const { owner, repo, branch, apiKey } = validatedData
const baseUrl = `https://api.github.com/repos/${owner}/${repo}`
const commitUrl = branch ? `${baseUrl}/commits/${branch}` : `${baseUrl}/commits/HEAD`
logger.info(`[${requestId}] Fetching latest commit from GitHub`, { owner, repo, branch })
const urlValidation = await validateUrlWithDNS(commitUrl, 'commitUrl')
if (!urlValidation.isValid) {
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
}
const response = await secureFetchWithPinnedIP(commitUrl, urlValidation.resolvedIP!, {
method: 'GET',
headers: {
Accept: 'application/vnd.github.v3+json',
Authorization: `Bearer ${apiKey}`,
'X-GitHub-Api-Version': '2022-11-28',
},
})
if (!response.ok) {
const errorData = (await response.json().catch(() => ({}))) as GitHubErrorResponse
logger.error(`[${requestId}] GitHub API error`, {
status: response.status,
error: errorData,
})
return NextResponse.json(
{ success: false, error: errorData.message || `GitHub API error: ${response.status}` },
{ status: 400 }
)
}
const data = (await response.json()) as GitHubCommitResponse
const content = `Latest commit: "${data.commit.message}" by ${data.commit.author.name} on ${data.commit.author.date}. SHA: ${data.sha}`
const files = data.files || []
const fileDetailsWithContent = []
for (const file of files) {
const fileDetail: Record<string, any> = {
filename: file.filename,
additions: file.additions,
deletions: file.deletions,
changes: file.changes,
status: file.status,
raw_url: file.raw_url,
blob_url: file.blob_url,
patch: file.patch,
content: undefined,
}
if (file.status !== 'removed' && file.raw_url) {
try {
const rawUrlValidation = await validateUrlWithDNS(file.raw_url, 'rawUrl')
if (rawUrlValidation.isValid) {
const contentResponse = await secureFetchWithPinnedIP(
file.raw_url,
rawUrlValidation.resolvedIP!,
{
headers: {
Authorization: `Bearer ${apiKey}`,
'X-GitHub-Api-Version': '2022-11-28',
},
}
)
if (contentResponse.ok) {
fileDetail.content = await contentResponse.text()
}
}
} catch (error) {
logger.warn(`[${requestId}] Failed to fetch content for ${file.filename}:`, error)
}
}
fileDetailsWithContent.push(fileDetail)
}
logger.info(`[${requestId}] Latest commit fetched successfully`, {
sha: data.sha,
fileCount: files.length,
})
return NextResponse.json({
success: true,
output: {
content,
metadata: {
sha: data.sha,
html_url: data.html_url,
commit_message: data.commit.message,
author: {
name: data.commit.author.name,
login: data.author?.login || 'Unknown',
avatar_url: data.author?.avatar_url || '',
html_url: data.author?.html_url || '',
},
committer: {
name: data.commit.committer.name,
login: data.committer?.login || 'Unknown',
avatar_url: data.committer?.avatar_url || '',
html_url: data.committer?.html_url || '',
},
stats: data.stats
? {
additions: data.stats.additions,
deletions: data.stats.deletions,
total: data.stats.total,
}
: undefined,
files: fileDetailsWithContent.length > 0 ? fileDetailsWithContent : undefined,
},
},
})
} catch (error) {
logger.error(`[${requestId}] Error fetching GitHub latest commit:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Unknown error occurred',
},
{ status: 500 }
)
}
}

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
import { import {
@@ -28,7 +29,7 @@ const GmailDraftSchema = z.object({
replyToMessageId: z.string().optional().nullable(), replyToMessageId: z.string().optional().nullable(),
cc: z.string().optional().nullable(), cc: z.string().optional().nullable(),
bcc: z.string().optional().nullable(), bcc: z.string().optional().nullable(),
attachments: z.array(z.any()).optional().nullable(), attachments: RawFileInputArraySchema.optional().nullable(),
}) })
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
import { import {
@@ -28,7 +29,7 @@ const GmailSendSchema = z.object({
replyToMessageId: z.string().optional().nullable(), replyToMessageId: z.string().optional().nullable(),
cc: z.string().optional().nullable(), cc: z.string().optional().nullable(),
bcc: z.string().optional().nullable(), bcc: z.string().optional().nullable(),
attachments: z.array(z.any()).optional().nullable(), attachments: RawFileInputArraySchema.optional().nullable(),
}) })
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {

View File

@@ -0,0 +1,252 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request'
import type { GoogleDriveFile, GoogleDriveRevision } from '@/tools/google_drive/types'
import {
ALL_FILE_FIELDS,
ALL_REVISION_FIELDS,
DEFAULT_EXPORT_FORMATS,
GOOGLE_WORKSPACE_MIME_TYPES,
} from '@/tools/google_drive/utils'
export const dynamic = 'force-dynamic'
const logger = createLogger('GoogleDriveDownloadAPI')
/** Google API error response structure */
interface GoogleApiErrorResponse {
error?: {
message?: string
code?: number
status?: string
}
}
/** Google Drive revisions list response */
interface GoogleDriveRevisionsResponse {
revisions?: GoogleDriveRevision[]
nextPageToken?: string
}
const GoogleDriveDownloadSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
fileId: z.string().min(1, 'File ID is required'),
mimeType: z.string().optional().nullable(),
fileName: z.string().optional().nullable(),
includeRevisions: z.boolean().optional().default(true),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized Google Drive download attempt: ${authResult.error}`)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
const body = await request.json()
const validatedData = GoogleDriveDownloadSchema.parse(body)
const {
accessToken,
fileId,
mimeType: exportMimeType,
fileName,
includeRevisions,
} = validatedData
const authHeader = `Bearer ${accessToken}`
logger.info(`[${requestId}] Getting file metadata from Google Drive`, { fileId })
const metadataUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?fields=${ALL_FILE_FIELDS}&supportsAllDrives=true`
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
if (!metadataUrlValidation.isValid) {
return NextResponse.json(
{ success: false, error: metadataUrlValidation.error },
{ status: 400 }
)
}
const metadataResponse = await secureFetchWithPinnedIP(
metadataUrl,
metadataUrlValidation.resolvedIP!,
{
headers: { Authorization: authHeader },
}
)
if (!metadataResponse.ok) {
const errorDetails = (await metadataResponse
.json()
.catch(() => ({}))) as GoogleApiErrorResponse
logger.error(`[${requestId}] Failed to get file metadata`, {
status: metadataResponse.status,
error: errorDetails,
})
return NextResponse.json(
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
{ status: 400 }
)
}
const metadata = (await metadataResponse.json()) as GoogleDriveFile
const fileMimeType = metadata.mimeType
let fileBuffer: Buffer
let finalMimeType = fileMimeType
if (GOOGLE_WORKSPACE_MIME_TYPES.includes(fileMimeType)) {
const exportFormat = exportMimeType || DEFAULT_EXPORT_FORMATS[fileMimeType] || 'text/plain'
finalMimeType = exportFormat
logger.info(`[${requestId}] Exporting Google Workspace file`, {
fileId,
mimeType: fileMimeType,
exportFormat,
})
const exportUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/export?mimeType=${encodeURIComponent(exportFormat)}&supportsAllDrives=true`
const exportUrlValidation = await validateUrlWithDNS(exportUrl, 'exportUrl')
if (!exportUrlValidation.isValid) {
return NextResponse.json(
{ success: false, error: exportUrlValidation.error },
{ status: 400 }
)
}
const exportResponse = await secureFetchWithPinnedIP(
exportUrl,
exportUrlValidation.resolvedIP!,
{ headers: { Authorization: authHeader } }
)
if (!exportResponse.ok) {
const exportError = (await exportResponse
.json()
.catch(() => ({}))) as GoogleApiErrorResponse
logger.error(`[${requestId}] Failed to export file`, {
status: exportResponse.status,
error: exportError,
})
return NextResponse.json(
{
success: false,
error: exportError.error?.message || 'Failed to export Google Workspace file',
},
{ status: 400 }
)
}
const arrayBuffer = await exportResponse.arrayBuffer()
fileBuffer = Buffer.from(arrayBuffer)
} else {
logger.info(`[${requestId}] Downloading regular file`, { fileId, mimeType: fileMimeType })
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?alt=media&supportsAllDrives=true`
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
if (!downloadUrlValidation.isValid) {
return NextResponse.json(
{ success: false, error: downloadUrlValidation.error },
{ status: 400 }
)
}
const downloadResponse = await secureFetchWithPinnedIP(
downloadUrl,
downloadUrlValidation.resolvedIP!,
{ headers: { Authorization: authHeader } }
)
if (!downloadResponse.ok) {
const downloadError = (await downloadResponse
.json()
.catch(() => ({}))) as GoogleApiErrorResponse
logger.error(`[${requestId}] Failed to download file`, {
status: downloadResponse.status,
error: downloadError,
})
return NextResponse.json(
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
{ status: 400 }
)
}
const arrayBuffer = await downloadResponse.arrayBuffer()
fileBuffer = Buffer.from(arrayBuffer)
}
const canReadRevisions = metadata.capabilities?.canReadRevisions === true
if (includeRevisions && canReadRevisions) {
try {
const revisionsUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/revisions?fields=revisions(${ALL_REVISION_FIELDS})&pageSize=100`
const revisionsUrlValidation = await validateUrlWithDNS(revisionsUrl, 'revisionsUrl')
if (revisionsUrlValidation.isValid) {
const revisionsResponse = await secureFetchWithPinnedIP(
revisionsUrl,
revisionsUrlValidation.resolvedIP!,
{ headers: { Authorization: authHeader } }
)
if (revisionsResponse.ok) {
const revisionsData = (await revisionsResponse.json()) as GoogleDriveRevisionsResponse
metadata.revisions = revisionsData.revisions
logger.info(`[${requestId}] Fetched file revisions`, {
fileId,
revisionCount: metadata.revisions?.length || 0,
})
}
}
} catch (error) {
logger.warn(`[${requestId}] Error fetching revisions, continuing without them`, { error })
}
}
const resolvedName = fileName || metadata.name || 'download'
logger.info(`[${requestId}] File downloaded successfully`, {
fileId,
name: resolvedName,
size: fileBuffer.length,
mimeType: finalMimeType,
})
const base64Data = fileBuffer.toString('base64')
return NextResponse.json({
success: true,
output: {
file: {
name: resolvedName,
mimeType: finalMimeType,
data: base64Data,
size: fileBuffer.length,
},
metadata,
},
})
} catch (error) {
logger.error(`[${requestId}] Error downloading Google Drive file:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Unknown error occurred',
},
{ status: 500 }
)
}
}

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils' import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
import { import {
@@ -20,7 +21,7 @@ const GOOGLE_DRIVE_API_BASE = 'https://www.googleapis.com/upload/drive/v3/files'
const GoogleDriveUploadSchema = z.object({ const GoogleDriveUploadSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'), accessToken: z.string().min(1, 'Access token is required'),
fileName: z.string().min(1, 'File name is required'), fileName: z.string().min(1, 'File name is required'),
file: z.any().optional().nullable(), file: RawFileInputSchema.optional().nullable(),
mimeType: z.string().optional().nullable(), mimeType: z.string().optional().nullable(),
folderId: z.string().optional().nullable(), folderId: z.string().optional().nullable(),
}) })

View File

@@ -0,0 +1,131 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request'
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
export const dynamic = 'force-dynamic'
const logger = createLogger('GoogleVaultDownloadExportFileAPI')
const GoogleVaultDownloadExportFileSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
bucketName: z.string().min(1, 'Bucket name is required'),
objectName: z.string().min(1, 'Object name is required'),
fileName: z.string().optional().nullable(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized Google Vault download attempt: ${authResult.error}`)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
const body = await request.json()
const validatedData = GoogleVaultDownloadExportFileSchema.parse(body)
const { accessToken, bucketName, objectName, fileName } = validatedData
const bucket = encodeURIComponent(bucketName)
const object = encodeURIComponent(objectName)
const downloadUrl = `https://storage.googleapis.com/storage/v1/b/${bucket}/o/${object}?alt=media`
logger.info(`[${requestId}] Downloading file from Google Vault`, { bucketName, objectName })
const urlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
if (!urlValidation.isValid) {
return NextResponse.json(
{ success: false, error: enhanceGoogleVaultError(urlValidation.error || 'Invalid URL') },
{ status: 400 }
)
}
const downloadResponse = await secureFetchWithPinnedIP(downloadUrl, urlValidation.resolvedIP!, {
method: 'GET',
headers: {
Authorization: `Bearer ${accessToken}`,
},
})
if (!downloadResponse.ok) {
const errorText = await downloadResponse.text().catch(() => '')
const errorMessage = `Failed to download file: ${errorText || downloadResponse.statusText}`
logger.error(`[${requestId}] Failed to download Vault export file`, {
status: downloadResponse.status,
error: errorText,
})
return NextResponse.json(
{ success: false, error: enhanceGoogleVaultError(errorMessage) },
{ status: 400 }
)
}
const contentType = downloadResponse.headers.get('content-type') || 'application/octet-stream'
const disposition = downloadResponse.headers.get('content-disposition') || ''
const match = disposition.match(/filename\*=UTF-8''([^;]+)|filename="([^"]+)"/)
let resolvedName = fileName
if (!resolvedName) {
if (match?.[1]) {
try {
resolvedName = decodeURIComponent(match[1])
} catch {
resolvedName = match[1]
}
} else if (match?.[2]) {
resolvedName = match[2]
} else if (objectName) {
const parts = objectName.split('/')
resolvedName = parts[parts.length - 1] || 'vault-export.bin'
} else {
resolvedName = 'vault-export.bin'
}
}
const arrayBuffer = await downloadResponse.arrayBuffer()
const buffer = Buffer.from(arrayBuffer)
logger.info(`[${requestId}] Vault export file downloaded successfully`, {
name: resolvedName,
size: buffer.length,
mimeType: contentType,
})
return NextResponse.json({
success: true,
output: {
file: {
name: resolvedName,
mimeType: contentType,
data: buffer.toString('base64'),
size: buffer.length,
},
},
})
} catch (error) {
logger.error(`[${requestId}] Error downloading Google Vault export file:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Unknown error occurred',
},
{ status: 500 }
)
}
}

View File

@@ -1,7 +1,10 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { validateImageUrl } from '@/lib/core/security/input-validation' import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
const logger = createLogger('ImageProxyAPI') const logger = createLogger('ImageProxyAPI')
@@ -26,7 +29,7 @@ export async function GET(request: NextRequest) {
return new NextResponse('Missing URL parameter', { status: 400 }) return new NextResponse('Missing URL parameter', { status: 400 })
} }
const urlValidation = validateImageUrl(imageUrl) const urlValidation = await validateUrlWithDNS(imageUrl, 'imageUrl')
if (!urlValidation.isValid) { if (!urlValidation.isValid) {
logger.warn(`[${requestId}] Blocked image proxy request`, { logger.warn(`[${requestId}] Blocked image proxy request`, {
url: imageUrl.substring(0, 100), url: imageUrl.substring(0, 100),
@@ -38,7 +41,8 @@ export async function GET(request: NextRequest) {
logger.info(`[${requestId}] Proxying image request for: ${imageUrl}`) logger.info(`[${requestId}] Proxying image request for: ${imageUrl}`)
try { try {
const imageResponse = await fetch(imageUrl, { const imageResponse = await secureFetchWithPinnedIP(imageUrl, urlValidation.resolvedIP!, {
method: 'GET',
headers: { headers: {
'User-Agent': 'User-Agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
@@ -64,14 +68,14 @@ export async function GET(request: NextRequest) {
const contentType = imageResponse.headers.get('content-type') || 'image/jpeg' const contentType = imageResponse.headers.get('content-type') || 'image/jpeg'
const imageBlob = await imageResponse.blob() const imageArrayBuffer = await imageResponse.arrayBuffer()
if (imageBlob.size === 0) { if (imageArrayBuffer.byteLength === 0) {
logger.error(`[${requestId}] Empty image blob received`) logger.error(`[${requestId}] Empty image received`)
return new NextResponse('Empty image received', { status: 404 }) return new NextResponse('Empty image received', { status: 404 })
} }
return new NextResponse(imageBlob, { return new NextResponse(imageArrayBuffer, {
headers: { headers: {
'Content-Type': contentType, 'Content-Type': contentType,
'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Origin': '*',

View File

@@ -0,0 +1,121 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
import { getJiraCloudId } from '@/tools/jira/utils'
const logger = createLogger('JiraAddAttachmentAPI')
export const dynamic = 'force-dynamic'
const JiraAddAttachmentSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
domain: z.string().min(1, 'Domain is required'),
issueKey: z.string().min(1, 'Issue key is required'),
files: RawFileInputArraySchema,
cloudId: z.string().optional().nullable(),
})
export async function POST(request: NextRequest) {
const requestId = `jira-attach-${Date.now()}`
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
return NextResponse.json(
{ success: false, error: authResult.error || 'Unauthorized' },
{ status: 401 }
)
}
const body = await request.json()
const validatedData = JiraAddAttachmentSchema.parse(body)
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
if (userFiles.length === 0) {
return NextResponse.json(
{ success: false, error: 'No valid files provided for upload' },
{ status: 400 }
)
}
const cloudId =
validatedData.cloudId ||
(await getJiraCloudId(validatedData.domain, validatedData.accessToken))
const formData = new FormData()
const filesOutput: Array<{ name: string; mimeType: string; data: string; size: number }> = []
for (const file of userFiles) {
const buffer = await downloadFileFromStorage(file, requestId, logger)
filesOutput.push({
name: file.name,
mimeType: file.type || 'application/octet-stream',
data: buffer.toString('base64'),
size: buffer.length,
})
const blob = new Blob([new Uint8Array(buffer)], {
type: file.type || 'application/octet-stream',
})
formData.append('file', blob, file.name)
}
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${validatedData.issueKey}/attachments`
const response = await fetch(url, {
method: 'POST',
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
'X-Atlassian-Token': 'no-check',
},
body: formData,
})
if (!response.ok) {
const errorText = await response.text()
logger.error(`[${requestId}] Jira attachment upload failed`, {
status: response.status,
statusText: response.statusText,
error: errorText,
})
return NextResponse.json(
{
success: false,
error: `Failed to upload attachments: ${response.statusText}`,
},
{ status: response.status }
)
}
const attachments = await response.json()
const attachmentIds = Array.isArray(attachments)
? attachments.map((attachment) => attachment.id).filter(Boolean)
: []
return NextResponse.json({
success: true,
output: {
ts: new Date().toISOString(),
issueKey: validatedData.issueKey,
attachmentIds,
files: filesOutput,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ success: false, error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
logger.error(`[${requestId}] Jira attachment upload error`, error)
return NextResponse.json(
{ success: false, error: error instanceof Error ? error.message : 'Internal server error' },
{ status: 500 }
)
}
}

View File

@@ -2,9 +2,11 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { uploadFilesForTeamsMessage } from '@/tools/microsoft_teams/server-utils'
import type { GraphApiErrorResponse, GraphChatMessage } from '@/tools/microsoft_teams/types'
import { resolveMentionsForChannel, type TeamsMention } from '@/tools/microsoft_teams/utils' import { resolveMentionsForChannel, type TeamsMention } from '@/tools/microsoft_teams/utils'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -16,7 +18,7 @@ const TeamsWriteChannelSchema = z.object({
teamId: z.string().min(1, 'Team ID is required'), teamId: z.string().min(1, 'Team ID is required'),
channelId: z.string().min(1, 'Channel ID is required'), channelId: z.string().min(1, 'Channel ID is required'),
content: z.string().min(1, 'Message content is required'), content: z.string().min(1, 'Message content is required'),
files: z.array(z.any()).optional().nullable(), files: RawFileInputArraySchema.optional().nullable(),
}) })
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {
@@ -53,93 +55,12 @@ export async function POST(request: NextRequest) {
fileCount: validatedData.files?.length || 0, fileCount: validatedData.files?.length || 0,
}) })
const attachments: any[] = [] const { attachments, filesOutput } = await uploadFilesForTeamsMessage({
if (validatedData.files && validatedData.files.length > 0) { rawFiles: validatedData.files || [],
const rawFiles = validatedData.files accessToken: validatedData.accessToken,
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to OneDrive`) requestId,
logger,
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger) })
for (const file of userFiles) {
try {
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
const buffer = await downloadFileFromStorage(file, requestId, logger)
const uploadUrl =
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
encodeURIComponent(file.name) +
':/content'
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
const uploadResponse = await fetch(uploadUrl, {
method: 'PUT',
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
'Content-Type': file.type || 'application/octet-stream',
},
body: new Uint8Array(buffer),
})
if (!uploadResponse.ok) {
const errorData = await uploadResponse.json().catch(() => ({}))
logger.error(`[${requestId}] Teams upload failed:`, errorData)
throw new Error(
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
)
}
const uploadedFile = await uploadResponse.json()
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
id: uploadedFile.id,
webUrl: uploadedFile.webUrl,
})
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
const fileDetailsResponse = await fetch(fileDetailsUrl, {
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
},
})
if (!fileDetailsResponse.ok) {
const errorData = await fileDetailsResponse.json().catch(() => ({}))
logger.error(`[${requestId}] Failed to get file details:`, errorData)
throw new Error(
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
)
}
const fileDetails = await fileDetailsResponse.json()
logger.info(`[${requestId}] Got file details`, {
webDavUrl: fileDetails.webDavUrl,
eTag: fileDetails.eTag,
})
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
attachments.push({
id: attachmentId,
contentType: 'reference',
contentUrl: fileDetails.webDavUrl,
name: file.name,
})
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
} catch (error) {
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
throw new Error(
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
)
}
}
logger.info(
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
)
}
let messageContent = validatedData.content let messageContent = validatedData.content
let contentType: 'text' | 'html' = 'text' let contentType: 'text' | 'html' = 'text'
@@ -197,17 +118,21 @@ export async function POST(request: NextRequest) {
const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages` const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages`
const teamsResponse = await fetch(teamsUrl, { const teamsResponse = await secureFetchWithValidation(
method: 'POST', teamsUrl,
headers: { {
'Content-Type': 'application/json', method: 'POST',
Authorization: `Bearer ${validatedData.accessToken}`, headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${validatedData.accessToken}`,
},
body: JSON.stringify(messageBody),
}, },
body: JSON.stringify(messageBody), 'teamsUrl'
}) )
if (!teamsResponse.ok) { if (!teamsResponse.ok) {
const errorData = await teamsResponse.json().catch(() => ({})) const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData) logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
return NextResponse.json( return NextResponse.json(
{ {
@@ -218,7 +143,7 @@ export async function POST(request: NextRequest) {
) )
} }
const responseData = await teamsResponse.json() const responseData = (await teamsResponse.json()) as GraphChatMessage
logger.info(`[${requestId}] Teams channel message sent successfully`, { logger.info(`[${requestId}] Teams channel message sent successfully`, {
messageId: responseData.id, messageId: responseData.id,
attachmentCount: attachments.length, attachmentCount: attachments.length,
@@ -237,6 +162,7 @@ export async function POST(request: NextRequest) {
url: responseData.webUrl || '', url: responseData.webUrl || '',
attachmentCount: attachments.length, attachmentCount: attachments.length,
}, },
files: filesOutput,
}, },
}) })
} catch (error) { } catch (error) {

View File

@@ -2,9 +2,11 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { uploadFilesForTeamsMessage } from '@/tools/microsoft_teams/server-utils'
import type { GraphApiErrorResponse, GraphChatMessage } from '@/tools/microsoft_teams/types'
import { resolveMentionsForChat, type TeamsMention } from '@/tools/microsoft_teams/utils' import { resolveMentionsForChat, type TeamsMention } from '@/tools/microsoft_teams/utils'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -15,7 +17,7 @@ const TeamsWriteChatSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'), accessToken: z.string().min(1, 'Access token is required'),
chatId: z.string().min(1, 'Chat ID is required'), chatId: z.string().min(1, 'Chat ID is required'),
content: z.string().min(1, 'Message content is required'), content: z.string().min(1, 'Message content is required'),
files: z.array(z.any()).optional().nullable(), files: RawFileInputArraySchema.optional().nullable(),
}) })
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {
@@ -51,93 +53,12 @@ export async function POST(request: NextRequest) {
fileCount: validatedData.files?.length || 0, fileCount: validatedData.files?.length || 0,
}) })
const attachments: any[] = [] const { attachments, filesOutput } = await uploadFilesForTeamsMessage({
if (validatedData.files && validatedData.files.length > 0) { rawFiles: validatedData.files || [],
const rawFiles = validatedData.files accessToken: validatedData.accessToken,
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to Teams`) requestId,
logger,
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger) })
for (const file of userFiles) {
try {
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
const buffer = await downloadFileFromStorage(file, requestId, logger)
const uploadUrl =
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
encodeURIComponent(file.name) +
':/content'
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
const uploadResponse = await fetch(uploadUrl, {
method: 'PUT',
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
'Content-Type': file.type || 'application/octet-stream',
},
body: new Uint8Array(buffer),
})
if (!uploadResponse.ok) {
const errorData = await uploadResponse.json().catch(() => ({}))
logger.error(`[${requestId}] Teams upload failed:`, errorData)
throw new Error(
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
)
}
const uploadedFile = await uploadResponse.json()
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
id: uploadedFile.id,
webUrl: uploadedFile.webUrl,
})
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
const fileDetailsResponse = await fetch(fileDetailsUrl, {
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
},
})
if (!fileDetailsResponse.ok) {
const errorData = await fileDetailsResponse.json().catch(() => ({}))
logger.error(`[${requestId}] Failed to get file details:`, errorData)
throw new Error(
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
)
}
const fileDetails = await fileDetailsResponse.json()
logger.info(`[${requestId}] Got file details`, {
webDavUrl: fileDetails.webDavUrl,
eTag: fileDetails.eTag,
})
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
attachments.push({
id: attachmentId,
contentType: 'reference',
contentUrl: fileDetails.webDavUrl,
name: file.name,
})
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
} catch (error) {
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
throw new Error(
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
)
}
}
logger.info(
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
)
}
let messageContent = validatedData.content let messageContent = validatedData.content
let contentType: 'text' | 'html' = 'text' let contentType: 'text' | 'html' = 'text'
@@ -194,17 +115,21 @@ export async function POST(request: NextRequest) {
const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages` const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages`
const teamsResponse = await fetch(teamsUrl, { const teamsResponse = await secureFetchWithValidation(
method: 'POST', teamsUrl,
headers: { {
'Content-Type': 'application/json', method: 'POST',
Authorization: `Bearer ${validatedData.accessToken}`, headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${validatedData.accessToken}`,
},
body: JSON.stringify(messageBody),
}, },
body: JSON.stringify(messageBody), 'teamsUrl'
}) )
if (!teamsResponse.ok) { if (!teamsResponse.ok) {
const errorData = await teamsResponse.json().catch(() => ({})) const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData) logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
return NextResponse.json( return NextResponse.json(
{ {
@@ -215,7 +140,7 @@ export async function POST(request: NextRequest) {
) )
} }
const responseData = await teamsResponse.json() const responseData = (await teamsResponse.json()) as GraphChatMessage
logger.info(`[${requestId}] Teams message sent successfully`, { logger.info(`[${requestId}] Teams message sent successfully`, {
messageId: responseData.id, messageId: responseData.id,
attachmentCount: attachments.length, attachmentCount: attachments.length,
@@ -233,6 +158,7 @@ export async function POST(request: NextRequest) {
url: responseData.webUrl || '', url: responseData.webUrl || '',
attachmentCount: attachments.length, attachmentCount: attachments.length,
}, },
files: filesOutput,
}, },
}) })
} catch (error) { } catch (error) {

View File

@@ -2,15 +2,17 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { import {
extractStorageKey, secureFetchWithPinnedIP,
inferContextFromKey, validateUrlWithDNS,
isInternalFileUrl, } from '@/lib/core/security/input-validation.server'
} from '@/lib/uploads/utils/file-utils' import { generateRequestId } from '@/lib/core/utils/request'
import { verifyFileAccess } from '@/app/api/files/authorization' import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
import {
downloadFileFromStorage,
resolveInternalFileUrl,
} from '@/lib/uploads/utils/file-utils.server'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -18,7 +20,9 @@ const logger = createLogger('MistralParseAPI')
const MistralParseSchema = z.object({ const MistralParseSchema = z.object({
apiKey: z.string().min(1, 'API key is required'), apiKey: z.string().min(1, 'API key is required'),
filePath: z.string().min(1, 'File path is required'), filePath: z.string().min(1, 'File path is required').optional(),
fileData: FileInputSchema.optional(),
file: FileInputSchema.optional(),
resultType: z.string().optional(), resultType: z.string().optional(),
pages: z.array(z.number()).optional(), pages: z.array(z.number()).optional(),
includeImageBase64: z.boolean().optional(), includeImageBase64: z.boolean().optional(),
@@ -49,66 +53,140 @@ export async function POST(request: NextRequest) {
const body = await request.json() const body = await request.json()
const validatedData = MistralParseSchema.parse(body) const validatedData = MistralParseSchema.parse(body)
const fileData = validatedData.file || validatedData.fileData
const filePath = typeof fileData === 'string' ? fileData : validatedData.filePath
if (!fileData && (!filePath || filePath.trim() === '')) {
return NextResponse.json(
{
success: false,
error: 'File input is required',
},
{ status: 400 }
)
}
logger.info(`[${requestId}] Mistral parse request`, { logger.info(`[${requestId}] Mistral parse request`, {
filePath: validatedData.filePath, hasFileData: Boolean(fileData),
isWorkspaceFile: isInternalFileUrl(validatedData.filePath), filePath,
isWorkspaceFile: filePath ? isInternalFileUrl(filePath) : false,
userId, userId,
}) })
let fileUrl = validatedData.filePath const mistralBody: any = {
model: 'mistral-ocr-latest',
}
if (isInternalFileUrl(validatedData.filePath)) { if (fileData && typeof fileData === 'object') {
const rawFile = fileData
let userFile
try { try {
const storageKey = extractStorageKey(validatedData.filePath) userFile = processSingleFileToUserFile(rawFile, requestId, logger)
const context = inferContextFromKey(storageKey)
const hasAccess = await verifyFileAccess(
storageKey,
userId,
undefined, // customConfig
context, // context
false // isLocal
)
if (!hasAccess) {
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) { } catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json( return NextResponse.json(
{ {
success: false, success: false,
error: 'Failed to generate file access URL', error: error instanceof Error ? error.message : 'Failed to process file',
}, },
{ status: 500 } { status: 400 }
) )
} }
} else if (validatedData.filePath?.startsWith('/')) {
const baseUrl = getBaseUrl()
fileUrl = `${baseUrl}${validatedData.filePath}`
}
const mistralBody: any = { let mimeType = userFile.type
model: 'mistral-ocr-latest', if (!mimeType || mimeType === 'application/octet-stream') {
document: { const filename = userFile.name?.toLowerCase() || ''
type: 'document_url', if (filename.endsWith('.pdf')) {
document_url: fileUrl, mimeType = 'application/pdf'
}, } else if (filename.endsWith('.png')) {
mimeType = 'image/png'
} else if (filename.endsWith('.jpg') || filename.endsWith('.jpeg')) {
mimeType = 'image/jpeg'
} else if (filename.endsWith('.gif')) {
mimeType = 'image/gif'
} else if (filename.endsWith('.webp')) {
mimeType = 'image/webp'
} else {
mimeType = 'application/pdf'
}
}
let base64 = userFile.base64
if (!base64) {
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
base64 = buffer.toString('base64')
}
const base64Payload = base64.startsWith('data:')
? base64
: `data:${mimeType};base64,${base64}`
// Mistral API uses different document types for images vs documents
const isImage = mimeType.startsWith('image/')
if (isImage) {
mistralBody.document = {
type: 'image_url',
image_url: base64Payload,
}
} else {
mistralBody.document = {
type: 'document_url',
document_url: base64Payload,
}
}
} else if (filePath) {
let fileUrl = filePath
const isInternalFilePath = isInternalFileUrl(filePath)
if (isInternalFilePath) {
const resolution = await resolveInternalFileUrl(filePath, userId, requestId, logger)
if (resolution.error) {
return NextResponse.json(
{
success: false,
error: resolution.error.message,
},
{ status: resolution.error.status }
)
}
fileUrl = resolution.fileUrl || fileUrl
} else if (filePath.startsWith('/')) {
logger.warn(`[${requestId}] Invalid internal path`, {
userId,
path: filePath.substring(0, 50),
})
return NextResponse.json(
{
success: false,
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
},
{ status: 400 }
)
} else {
const urlValidation = await validateUrlWithDNS(fileUrl, 'filePath')
if (!urlValidation.isValid) {
return NextResponse.json(
{
success: false,
error: urlValidation.error,
},
{ status: 400 }
)
}
}
const imageExtensions = ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.avif']
const pathname = new URL(fileUrl).pathname.toLowerCase()
const isImageUrl = imageExtensions.some((ext) => pathname.endsWith(ext))
if (isImageUrl) {
mistralBody.document = {
type: 'image_url',
image_url: fileUrl,
}
} else {
mistralBody.document = {
type: 'document_url',
document_url: fileUrl,
}
}
} }
if (validatedData.pages) { if (validatedData.pages) {
@@ -124,15 +202,34 @@ export async function POST(request: NextRequest) {
mistralBody.image_min_size = validatedData.imageMinSize mistralBody.image_min_size = validatedData.imageMinSize
} }
const mistralResponse = await fetch('https://api.mistral.ai/v1/ocr', { const mistralEndpoint = 'https://api.mistral.ai/v1/ocr'
method: 'POST', const mistralValidation = await validateUrlWithDNS(mistralEndpoint, 'Mistral API URL')
headers: { if (!mistralValidation.isValid) {
'Content-Type': 'application/json', logger.error(`[${requestId}] Mistral API URL validation failed`, {
Accept: 'application/json', error: mistralValidation.error,
Authorization: `Bearer ${validatedData.apiKey}`, })
}, return NextResponse.json(
body: JSON.stringify(mistralBody), {
}) success: false,
error: 'Failed to reach Mistral API',
},
{ status: 502 }
)
}
const mistralResponse = await secureFetchWithPinnedIP(
mistralEndpoint,
mistralValidation.resolvedIP!,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
Authorization: `Bearer ${validatedData.apiKey}`,
},
body: JSON.stringify(mistralBody),
}
)
if (!mistralResponse.ok) { if (!mistralResponse.ok) {
const errorText = await mistralResponse.text() const errorText = await mistralResponse.text()

View File

@@ -0,0 +1,177 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request'
export const dynamic = 'force-dynamic'
/** Microsoft Graph API error response structure */
interface GraphApiError {
error?: {
code?: string
message?: string
}
}
/** Microsoft Graph API drive item metadata response */
interface DriveItemMetadata {
id?: string
name?: string
folder?: Record<string, unknown>
file?: {
mimeType?: string
}
}
const logger = createLogger('OneDriveDownloadAPI')
const OneDriveDownloadSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
fileId: z.string().min(1, 'File ID is required'),
fileName: z.string().optional().nullable(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized OneDrive download attempt: ${authResult.error}`)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
const body = await request.json()
const validatedData = OneDriveDownloadSchema.parse(body)
const { accessToken, fileId, fileName } = validatedData
const authHeader = `Bearer ${accessToken}`
logger.info(`[${requestId}] Getting file metadata from OneDrive`, { fileId })
const metadataUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}`
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
if (!metadataUrlValidation.isValid) {
return NextResponse.json(
{ success: false, error: metadataUrlValidation.error },
{ status: 400 }
)
}
const metadataResponse = await secureFetchWithPinnedIP(
metadataUrl,
metadataUrlValidation.resolvedIP!,
{
headers: { Authorization: authHeader },
}
)
if (!metadataResponse.ok) {
const errorDetails = (await metadataResponse.json().catch(() => ({}))) as GraphApiError
logger.error(`[${requestId}] Failed to get file metadata`, {
status: metadataResponse.status,
error: errorDetails,
})
return NextResponse.json(
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
{ status: 400 }
)
}
const metadata = (await metadataResponse.json()) as DriveItemMetadata
if (metadata.folder && !metadata.file) {
logger.error(`[${requestId}] Attempted to download a folder`, {
itemId: metadata.id,
itemName: metadata.name,
})
return NextResponse.json(
{
success: false,
error: `Cannot download folder "${metadata.name}". Please select a file instead.`,
},
{ status: 400 }
)
}
const mimeType = metadata.file?.mimeType || 'application/octet-stream'
logger.info(`[${requestId}] Downloading file from OneDrive`, { fileId, mimeType })
const downloadUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}/content`
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
if (!downloadUrlValidation.isValid) {
return NextResponse.json(
{ success: false, error: downloadUrlValidation.error },
{ status: 400 }
)
}
const downloadResponse = await secureFetchWithPinnedIP(
downloadUrl,
downloadUrlValidation.resolvedIP!,
{
headers: { Authorization: authHeader },
}
)
if (!downloadResponse.ok) {
const downloadError = (await downloadResponse.json().catch(() => ({}))) as GraphApiError
logger.error(`[${requestId}] Failed to download file`, {
status: downloadResponse.status,
error: downloadError,
})
return NextResponse.json(
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
{ status: 400 }
)
}
const arrayBuffer = await downloadResponse.arrayBuffer()
const fileBuffer = Buffer.from(arrayBuffer)
const resolvedName = fileName || metadata.name || 'download'
logger.info(`[${requestId}] File downloaded successfully`, {
fileId,
name: resolvedName,
size: fileBuffer.length,
mimeType,
})
const base64Data = fileBuffer.toString('base64')
return NextResponse.json({
success: true,
output: {
file: {
name: resolvedName,
mimeType,
data: base64Data,
size: fileBuffer.length,
},
},
})
} catch (error) {
logger.error(`[${requestId}] Error downloading OneDrive file:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Unknown error occurred',
},
{ status: 500 }
)
}
}

View File

@@ -4,7 +4,9 @@ import * as XLSX from 'xlsx'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation' import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { import {
getExtensionFromMimeType, getExtensionFromMimeType,
processSingleFileToUserFile, processSingleFileToUserFile,
@@ -29,12 +31,33 @@ const ExcelValuesSchema = z.union([
const OneDriveUploadSchema = z.object({ const OneDriveUploadSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'), accessToken: z.string().min(1, 'Access token is required'),
fileName: z.string().min(1, 'File name is required'), fileName: z.string().min(1, 'File name is required'),
file: z.any().optional(), file: RawFileInputSchema.optional(),
folderId: z.string().optional().nullable(), folderId: z.string().optional().nullable(),
mimeType: z.string().nullish(), mimeType: z.string().nullish(),
values: ExcelValuesSchema.optional().nullable(), values: ExcelValuesSchema.optional().nullable(),
conflictBehavior: z.enum(['fail', 'replace', 'rename']).optional().nullable(),
}) })
/** Microsoft Graph DriveItem response */
interface OneDriveFileData {
id: string
name: string
size: number
webUrl: string
createdDateTime: string
lastModifiedDateTime: string
file?: { mimeType: string }
parentReference?: { id: string; path: string }
'@microsoft.graph.downloadUrl'?: string
}
/** Microsoft Graph Excel range response */
interface ExcelRangeData {
address?: string
addressLocal?: string
values?: unknown[][]
}
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {
const requestId = generateRequestId() const requestId = generateRequestId()
@@ -88,25 +111,9 @@ export async function POST(request: NextRequest) {
) )
} }
let fileToProcess
if (Array.isArray(rawFile)) {
if (rawFile.length === 0) {
return NextResponse.json(
{
success: false,
error: 'No file provided',
},
{ status: 400 }
)
}
fileToProcess = rawFile[0]
} else {
fileToProcess = rawFile
}
let userFile let userFile
try { try {
userFile = processSingleFileToUserFile(fileToProcess, requestId, logger) userFile = processSingleFileToUserFile(rawFile, requestId, logger)
} catch (error) { } catch (error) {
return NextResponse.json( return NextResponse.json(
{ {
@@ -179,14 +186,23 @@ export async function POST(request: NextRequest) {
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content` uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
} }
const uploadResponse = await fetch(uploadUrl, { // Add conflict behavior if specified (defaults to replace by Microsoft Graph API)
method: 'PUT', if (validatedData.conflictBehavior) {
headers: { uploadUrl += `?@microsoft.graph.conflictBehavior=${validatedData.conflictBehavior}`
Authorization: `Bearer ${validatedData.accessToken}`, }
'Content-Type': mimeType,
const uploadResponse = await secureFetchWithValidation(
uploadUrl,
{
method: 'PUT',
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
'Content-Type': mimeType,
},
body: fileBuffer,
}, },
body: new Uint8Array(fileBuffer), 'uploadUrl'
}) )
if (!uploadResponse.ok) { if (!uploadResponse.ok) {
const errorText = await uploadResponse.text() const errorText = await uploadResponse.text()
@@ -200,7 +216,7 @@ export async function POST(request: NextRequest) {
) )
} }
const fileData = await uploadResponse.json() const fileData = (await uploadResponse.json()) as OneDriveFileData
let excelWriteResult: any | undefined let excelWriteResult: any | undefined
const shouldWriteExcelContent = const shouldWriteExcelContent =
@@ -209,8 +225,11 @@ export async function POST(request: NextRequest) {
if (shouldWriteExcelContent) { if (shouldWriteExcelContent) {
try { try {
let workbookSessionId: string | undefined let workbookSessionId: string | undefined
const sessionResp = await fetch( const sessionUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/createSession`, fileData.id
)}/workbook/createSession`
const sessionResp = await secureFetchWithValidation(
sessionUrl,
{ {
method: 'POST', method: 'POST',
headers: { headers: {
@@ -218,11 +237,12 @@ export async function POST(request: NextRequest) {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
}, },
body: JSON.stringify({ persistChanges: true }), body: JSON.stringify({ persistChanges: true }),
} },
'sessionUrl'
) )
if (sessionResp.ok) { if (sessionResp.ok) {
const sessionData = await sessionResp.json() const sessionData = (await sessionResp.json()) as { id?: string }
workbookSessionId = sessionData?.id workbookSessionId = sessionData?.id
} }
@@ -231,14 +251,19 @@ export async function POST(request: NextRequest) {
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent( const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
fileData.id fileData.id
)}/workbook/worksheets?$select=name&$orderby=position&$top=1` )}/workbook/worksheets?$select=name&$orderby=position&$top=1`
const listResp = await fetch(listUrl, { const listResp = await secureFetchWithValidation(
headers: { listUrl,
Authorization: `Bearer ${validatedData.accessToken}`, {
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}), method: 'GET',
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
},
}, },
}) 'listUrl'
)
if (listResp.ok) { if (listResp.ok) {
const listData = await listResp.json() const listData = (await listResp.json()) as { value?: Array<{ name?: string }> }
const firstSheetName = listData?.value?.[0]?.name const firstSheetName = listData?.value?.[0]?.name
if (firstSheetName) { if (firstSheetName) {
sheetName = firstSheetName sheetName = firstSheetName
@@ -297,15 +322,19 @@ export async function POST(request: NextRequest) {
)}')/range(address='${encodeURIComponent(computedRangeAddress)}')` )}')/range(address='${encodeURIComponent(computedRangeAddress)}')`
) )
const excelWriteResponse = await fetch(url.toString(), { const excelWriteResponse = await secureFetchWithValidation(
method: 'PATCH', url.toString(),
headers: { {
Authorization: `Bearer ${validatedData.accessToken}`, method: 'PATCH',
'Content-Type': 'application/json', headers: {
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}), Authorization: `Bearer ${validatedData.accessToken}`,
'Content-Type': 'application/json',
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
},
body: JSON.stringify({ values: processedValues }),
}, },
body: JSON.stringify({ values: processedValues }), 'excelWriteUrl'
}) )
if (!excelWriteResponse || !excelWriteResponse.ok) { if (!excelWriteResponse || !excelWriteResponse.ok) {
const errorText = excelWriteResponse ? await excelWriteResponse.text() : 'no response' const errorText = excelWriteResponse ? await excelWriteResponse.text() : 'no response'
@@ -320,7 +349,7 @@ export async function POST(request: NextRequest) {
details: errorText, details: errorText,
} }
} else { } else {
const writeData = await excelWriteResponse.json() const writeData = (await excelWriteResponse.json()) as ExcelRangeData
const addr = writeData.address || writeData.addressLocal const addr = writeData.address || writeData.addressLocal
const v = writeData.values || [] const v = writeData.values || []
excelWriteResult = { excelWriteResult = {
@@ -328,21 +357,25 @@ export async function POST(request: NextRequest) {
updatedRange: addr, updatedRange: addr,
updatedRows: Array.isArray(v) ? v.length : undefined, updatedRows: Array.isArray(v) ? v.length : undefined,
updatedColumns: Array.isArray(v) && v[0] ? v[0].length : undefined, updatedColumns: Array.isArray(v) && v[0] ? v[0].length : undefined,
updatedCells: Array.isArray(v) && v[0] ? v.length * (v[0] as any[]).length : undefined, updatedCells: Array.isArray(v) && v[0] ? v.length * v[0].length : undefined,
} }
} }
if (workbookSessionId) { if (workbookSessionId) {
try { try {
const closeResp = await fetch( const closeUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/closeSession`, fileData.id
)}/workbook/closeSession`
const closeResp = await secureFetchWithValidation(
closeUrl,
{ {
method: 'POST', method: 'POST',
headers: { headers: {
Authorization: `Bearer ${validatedData.accessToken}`, Authorization: `Bearer ${validatedData.accessToken}`,
'workbook-session-id': workbookSessionId, 'workbook-session-id': workbookSessionId,
}, },
} },
'closeSessionUrl'
) )
if (!closeResp.ok) { if (!closeResp.ok) {
const closeText = await closeResp.text() const closeText = await closeResp.text()

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
@@ -18,7 +19,7 @@ const OutlookDraftSchema = z.object({
contentType: z.enum(['text', 'html']).optional().nullable(), contentType: z.enum(['text', 'html']).optional().nullable(),
cc: z.string().optional().nullable(), cc: z.string().optional().nullable(),
bcc: z.string().optional().nullable(), bcc: z.string().optional().nullable(),
attachments: z.array(z.any()).optional().nullable(), attachments: RawFileInputArraySchema.optional().nullable(),
}) })
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
@@ -20,7 +21,7 @@ const OutlookSendSchema = z.object({
bcc: z.string().optional().nullable(), bcc: z.string().optional().nullable(),
replyToMessageId: z.string().optional().nullable(), replyToMessageId: z.string().optional().nullable(),
conversationId: z.string().optional().nullable(), conversationId: z.string().optional().nullable(),
attachments: z.array(z.any()).optional().nullable(), attachments: RawFileInputArraySchema.optional().nullable(),
}) })
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {
@@ -95,14 +96,14 @@ export async function POST(request: NextRequest) {
if (attachments.length > 0) { if (attachments.length > 0) {
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0) const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
const maxSize = 4 * 1024 * 1024 // 4MB const maxSize = 3 * 1024 * 1024 // 3MB - Microsoft Graph API limit for inline attachments
if (totalSize > maxSize) { if (totalSize > maxSize) {
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2) const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
return NextResponse.json( return NextResponse.json(
{ {
success: false, success: false,
error: `Total attachment size (${sizeMB}MB) exceeds Outlook's limit of 4MB per request`, error: `Total attachment size (${sizeMB}MB) exceeds Microsoft Graph API limit of 3MB per request`,
}, },
{ status: 400 } { status: 400 }
) )

View File

@@ -0,0 +1,165 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request'
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
export const dynamic = 'force-dynamic'
const logger = createLogger('PipedriveGetFilesAPI')
interface PipedriveFile {
id?: number
name?: string
url?: string
}
interface PipedriveApiResponse {
success: boolean
data?: PipedriveFile[]
error?: string
}
const PipedriveGetFilesSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
deal_id: z.string().optional().nullable(),
person_id: z.string().optional().nullable(),
org_id: z.string().optional().nullable(),
limit: z.string().optional().nullable(),
downloadFiles: z.boolean().optional().default(false),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized Pipedrive get files attempt: ${authResult.error}`)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
const body = await request.json()
const validatedData = PipedriveGetFilesSchema.parse(body)
const { accessToken, deal_id, person_id, org_id, limit, downloadFiles } = validatedData
const baseUrl = 'https://api.pipedrive.com/v1/files'
const queryParams = new URLSearchParams()
if (deal_id) queryParams.append('deal_id', deal_id)
if (person_id) queryParams.append('person_id', person_id)
if (org_id) queryParams.append('org_id', org_id)
if (limit) queryParams.append('limit', limit)
const queryString = queryParams.toString()
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
logger.info(`[${requestId}] Fetching files from Pipedrive`, { deal_id, person_id, org_id })
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
if (!urlValidation.isValid) {
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
}
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
method: 'GET',
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/json',
},
})
const data = (await response.json()) as PipedriveApiResponse
if (!data.success) {
logger.error(`[${requestId}] Pipedrive API request failed`, { data })
return NextResponse.json(
{ success: false, error: data.error || 'Failed to fetch files from Pipedrive' },
{ status: 400 }
)
}
const files = data.data || []
const downloadedFiles: Array<{
name: string
mimeType: string
data: string
size: number
}> = []
if (downloadFiles) {
for (const file of files) {
if (!file?.url) continue
try {
const fileUrlValidation = await validateUrlWithDNS(file.url, 'fileUrl')
if (!fileUrlValidation.isValid) continue
const downloadResponse = await secureFetchWithPinnedIP(
file.url,
fileUrlValidation.resolvedIP!,
{
method: 'GET',
headers: { Authorization: `Bearer ${accessToken}` },
}
)
if (!downloadResponse.ok) continue
const arrayBuffer = await downloadResponse.arrayBuffer()
const buffer = Buffer.from(arrayBuffer)
const extension = getFileExtension(file.name || '')
const mimeType =
downloadResponse.headers.get('content-type') || getMimeTypeFromExtension(extension)
const fileName = file.name || `pipedrive-file-${file.id || Date.now()}`
downloadedFiles.push({
name: fileName,
mimeType,
data: buffer.toString('base64'),
size: buffer.length,
})
} catch (error) {
logger.warn(`[${requestId}] Failed to download file ${file.id}:`, error)
}
}
}
logger.info(`[${requestId}] Pipedrive files fetched successfully`, {
fileCount: files.length,
downloadedCount: downloadedFiles.length,
})
return NextResponse.json({
success: true,
output: {
files,
downloadedFiles: downloadedFiles.length > 0 ? downloadedFiles : undefined,
total_items: files.length,
success: true,
},
})
} catch (error) {
logger.error(`[${requestId}] Error fetching Pipedrive files:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Unknown error occurred',
},
{ status: 500 }
)
}
}

View File

@@ -2,15 +2,14 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { import {
extractStorageKey, secureFetchWithPinnedIP,
inferContextFromKey, validateUrlWithDNS,
isInternalFileUrl, } from '@/lib/core/security/input-validation.server'
} from '@/lib/uploads/utils/file-utils' import { generateRequestId } from '@/lib/core/utils/request'
import { verifyFileAccess } from '@/app/api/files/authorization' import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -18,7 +17,8 @@ const logger = createLogger('PulseParseAPI')
const PulseParseSchema = z.object({ const PulseParseSchema = z.object({
apiKey: z.string().min(1, 'API key is required'), apiKey: z.string().min(1, 'API key is required'),
filePath: z.string().min(1, 'File path is required'), filePath: z.string().optional(),
file: RawFileInputSchema.optional(),
pages: z.string().optional(), pages: z.string().optional(),
extractFigure: z.boolean().optional(), extractFigure: z.boolean().optional(),
figureDescription: z.boolean().optional(), figureDescription: z.boolean().optional(),
@@ -51,50 +51,30 @@ export async function POST(request: NextRequest) {
const validatedData = PulseParseSchema.parse(body) const validatedData = PulseParseSchema.parse(body)
logger.info(`[${requestId}] Pulse parse request`, { logger.info(`[${requestId}] Pulse parse request`, {
fileName: validatedData.file?.name,
filePath: validatedData.filePath, filePath: validatedData.filePath,
isWorkspaceFile: isInternalFileUrl(validatedData.filePath), isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false,
userId, userId,
}) })
let fileUrl = validatedData.filePath const resolution = await resolveFileInputToUrl({
file: validatedData.file,
filePath: validatedData.filePath,
userId,
requestId,
logger,
})
if (isInternalFileUrl(validatedData.filePath)) { if (resolution.error) {
try { return NextResponse.json(
const storageKey = extractStorageKey(validatedData.filePath) { success: false, error: resolution.error.message },
const context = inferContextFromKey(storageKey) { status: resolution.error.status }
)
}
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false) const fileUrl = resolution.fileUrl
if (!fileUrl) {
if (!hasAccess) { return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 })
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json(
{
success: false,
error: 'Failed to generate file access URL',
},
{ status: 500 }
)
}
} else if (validatedData.filePath?.startsWith('/')) {
const baseUrl = getBaseUrl()
fileUrl = `${baseUrl}${validatedData.filePath}`
} }
const formData = new FormData() const formData = new FormData()
@@ -119,13 +99,36 @@ export async function POST(request: NextRequest) {
formData.append('chunk_size', String(validatedData.chunkSize)) formData.append('chunk_size', String(validatedData.chunkSize))
} }
const pulseResponse = await fetch('https://api.runpulse.com/extract', { const pulseEndpoint = 'https://api.runpulse.com/extract'
method: 'POST', const pulseValidation = await validateUrlWithDNS(pulseEndpoint, 'Pulse API URL')
headers: { if (!pulseValidation.isValid) {
'x-api-key': validatedData.apiKey, logger.error(`[${requestId}] Pulse API URL validation failed`, {
}, error: pulseValidation.error,
body: formData, })
}) return NextResponse.json(
{
success: false,
error: 'Failed to reach Pulse API',
},
{ status: 502 }
)
}
const pulsePayload = new Response(formData)
const contentType = pulsePayload.headers.get('content-type') || 'multipart/form-data'
const bodyBuffer = Buffer.from(await pulsePayload.arrayBuffer())
const pulseResponse = await secureFetchWithPinnedIP(
pulseEndpoint,
pulseValidation.resolvedIP!,
{
method: 'POST',
headers: {
'x-api-key': validatedData.apiKey,
'Content-Type': contentType,
},
body: bodyBuffer,
}
)
if (!pulseResponse.ok) { if (!pulseResponse.ok) {
const errorText = await pulseResponse.text() const errorText = await pulseResponse.text()

View File

@@ -2,15 +2,14 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { StorageService } from '@/lib/uploads'
import { import {
extractStorageKey, secureFetchWithPinnedIP,
inferContextFromKey, validateUrlWithDNS,
isInternalFileUrl, } from '@/lib/core/security/input-validation.server'
} from '@/lib/uploads/utils/file-utils' import { generateRequestId } from '@/lib/core/utils/request'
import { verifyFileAccess } from '@/app/api/files/authorization' import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -18,7 +17,8 @@ const logger = createLogger('ReductoParseAPI')
const ReductoParseSchema = z.object({ const ReductoParseSchema = z.object({
apiKey: z.string().min(1, 'API key is required'), apiKey: z.string().min(1, 'API key is required'),
filePath: z.string().min(1, 'File path is required'), filePath: z.string().optional(),
file: RawFileInputSchema.optional(),
pages: z.array(z.number()).optional(), pages: z.array(z.number()).optional(),
tableOutputFormat: z.enum(['html', 'md']).optional(), tableOutputFormat: z.enum(['html', 'md']).optional(),
}) })
@@ -47,56 +47,30 @@ export async function POST(request: NextRequest) {
const validatedData = ReductoParseSchema.parse(body) const validatedData = ReductoParseSchema.parse(body)
logger.info(`[${requestId}] Reducto parse request`, { logger.info(`[${requestId}] Reducto parse request`, {
fileName: validatedData.file?.name,
filePath: validatedData.filePath, filePath: validatedData.filePath,
isWorkspaceFile: isInternalFileUrl(validatedData.filePath), isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false,
userId, userId,
}) })
let fileUrl = validatedData.filePath const resolution = await resolveFileInputToUrl({
file: validatedData.file,
filePath: validatedData.filePath,
userId,
requestId,
logger,
})
if (isInternalFileUrl(validatedData.filePath)) { if (resolution.error) {
try { return NextResponse.json(
const storageKey = extractStorageKey(validatedData.filePath) { success: false, error: resolution.error.message },
const context = inferContextFromKey(storageKey) { status: resolution.error.status }
)
}
const hasAccess = await verifyFileAccess( const fileUrl = resolution.fileUrl
storageKey, if (!fileUrl) {
userId, return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 })
undefined, // customConfig
context, // context
false // isLocal
)
if (!hasAccess) {
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json(
{
success: false,
error: 'Failed to generate file access URL',
},
{ status: 500 }
)
}
} else if (validatedData.filePath?.startsWith('/')) {
const baseUrl = getBaseUrl()
fileUrl = `${baseUrl}${validatedData.filePath}`
} }
const reductoBody: Record<string, unknown> = { const reductoBody: Record<string, unknown> = {
@@ -104,8 +78,13 @@ export async function POST(request: NextRequest) {
} }
if (validatedData.pages && validatedData.pages.length > 0) { if (validatedData.pages && validatedData.pages.length > 0) {
// Reducto API expects page_range as an object with start/end, not an array
const pages = validatedData.pages
reductoBody.settings = { reductoBody.settings = {
page_range: validatedData.pages, page_range: {
start: Math.min(...pages),
end: Math.max(...pages),
},
} }
} }
@@ -115,15 +94,34 @@ export async function POST(request: NextRequest) {
} }
} }
const reductoResponse = await fetch('https://platform.reducto.ai/parse', { const reductoEndpoint = 'https://platform.reducto.ai/parse'
method: 'POST', const reductoValidation = await validateUrlWithDNS(reductoEndpoint, 'Reducto API URL')
headers: { if (!reductoValidation.isValid) {
'Content-Type': 'application/json', logger.error(`[${requestId}] Reducto API URL validation failed`, {
Accept: 'application/json', error: reductoValidation.error,
Authorization: `Bearer ${validatedData.apiKey}`, })
}, return NextResponse.json(
body: JSON.stringify(reductoBody), {
}) success: false,
error: 'Failed to reach Reducto API',
},
{ status: 502 }
)
}
const reductoResponse = await secureFetchWithPinnedIP(
reductoEndpoint,
reductoValidation.resolvedIP!,
{
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
Authorization: `Bearer ${validatedData.apiKey}`,
},
body: JSON.stringify(reductoBody),
}
)
if (!reductoResponse.ok) { if (!reductoResponse.ok) {
const errorText = await reductoResponse.text() const errorText = await reductoResponse.text()

View File

@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils' import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
@@ -17,7 +18,7 @@ const S3PutObjectSchema = z.object({
region: z.string().min(1, 'Region is required'), region: z.string().min(1, 'Region is required'),
bucketName: z.string().min(1, 'Bucket name is required'), bucketName: z.string().min(1, 'Bucket name is required'),
objectKey: z.string().min(1, 'Object key is required'), objectKey: z.string().min(1, 'Object key is required'),
file: z.any().optional().nullable(), file: RawFileInputSchema.optional().nullable(),
content: z.string().optional().nullable(), content: z.string().optional().nullable(),
contentType: z.string().optional().nullable(), contentType: z.string().optional().nullable(),
acl: z.string().optional().nullable(), acl: z.string().optional().nullable(),

View File

@@ -0,0 +1,188 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
export const dynamic = 'force-dynamic'
const logger = createLogger('SendGridSendMailAPI')
const SendGridSendMailSchema = z.object({
apiKey: z.string().min(1, 'API key is required'),
from: z.string().min(1, 'From email is required'),
fromName: z.string().optional().nullable(),
to: z.string().min(1, 'To email is required'),
toName: z.string().optional().nullable(),
subject: z.string().optional().nullable(),
content: z.string().optional().nullable(),
contentType: z.string().optional().nullable(),
cc: z.string().optional().nullable(),
bcc: z.string().optional().nullable(),
replyTo: z.string().optional().nullable(),
replyToName: z.string().optional().nullable(),
templateId: z.string().optional().nullable(),
dynamicTemplateData: z.any().optional().nullable(),
attachments: RawFileInputArraySchema.optional().nullable(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized SendGrid send attempt: ${authResult.error}`)
return NextResponse.json(
{ success: false, error: authResult.error || 'Authentication required' },
{ status: 401 }
)
}
logger.info(`[${requestId}] Authenticated SendGrid send request via ${authResult.authType}`)
const body = await request.json()
const validatedData = SendGridSendMailSchema.parse(body)
logger.info(`[${requestId}] Sending SendGrid email`, {
to: validatedData.to,
subject: validatedData.subject || '(template)',
hasAttachments: !!(validatedData.attachments && validatedData.attachments.length > 0),
attachmentCount: validatedData.attachments?.length || 0,
})
// Build personalizations
const personalizations: Record<string, unknown> = {
to: [
{ email: validatedData.to, ...(validatedData.toName && { name: validatedData.toName }) },
],
}
if (validatedData.cc) {
personalizations.cc = [{ email: validatedData.cc }]
}
if (validatedData.bcc) {
personalizations.bcc = [{ email: validatedData.bcc }]
}
if (validatedData.templateId && validatedData.dynamicTemplateData) {
personalizations.dynamic_template_data =
typeof validatedData.dynamicTemplateData === 'string'
? JSON.parse(validatedData.dynamicTemplateData)
: validatedData.dynamicTemplateData
}
// Build mail body
const mailBody: Record<string, unknown> = {
personalizations: [personalizations],
from: {
email: validatedData.from,
...(validatedData.fromName && { name: validatedData.fromName }),
},
subject: validatedData.subject,
}
if (validatedData.templateId) {
mailBody.template_id = validatedData.templateId
} else {
mailBody.content = [
{
type: validatedData.contentType || 'text/plain',
value: validatedData.content,
},
]
}
if (validatedData.replyTo) {
mailBody.reply_to = {
email: validatedData.replyTo,
...(validatedData.replyToName && { name: validatedData.replyToName }),
}
}
// Process attachments from UserFile objects
if (validatedData.attachments && validatedData.attachments.length > 0) {
const rawAttachments = validatedData.attachments
logger.info(`[${requestId}] Processing ${rawAttachments.length} attachment(s)`)
const userFiles = processFilesToUserFiles(rawAttachments, requestId, logger)
if (userFiles.length > 0) {
const sendGridAttachments = await Promise.all(
userFiles.map(async (file) => {
try {
logger.info(
`[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)`
)
const buffer = await downloadFileFromStorage(file, requestId, logger)
return {
content: buffer.toString('base64'),
filename: file.name,
type: file.type || 'application/octet-stream',
disposition: 'attachment',
}
} catch (error) {
logger.error(`[${requestId}] Failed to download attachment ${file.name}:`, error)
throw new Error(
`Failed to download attachment "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
)
}
})
)
mailBody.attachments = sendGridAttachments
}
}
// Send to SendGrid
const response = await fetch('https://api.sendgrid.com/v3/mail/send', {
method: 'POST',
headers: {
Authorization: `Bearer ${validatedData.apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(mailBody),
})
if (!response.ok) {
const errorData = await response.json().catch(() => ({}))
const errorMessage =
errorData.errors?.[0]?.message || errorData.message || 'Failed to send email'
logger.error(`[${requestId}] SendGrid API error:`, { status: response.status, errorData })
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status })
}
const messageId = response.headers.get('X-Message-Id')
logger.info(`[${requestId}] Email sent successfully`, { messageId })
return NextResponse.json({
success: true,
output: {
success: true,
messageId: messageId || undefined,
to: validatedData.to,
subject: validatedData.subject || '',
},
})
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Validation error:`, error.errors)
return NextResponse.json(
{ success: false, error: error.errors[0]?.message || 'Validation failed' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Unexpected error:`, error)
return NextResponse.json(
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
{ status: 500 }
)
}
}

View File

@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils' import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -111,6 +112,8 @@ export async function POST(request: NextRequest) {
const buffer = Buffer.concat(chunks) const buffer = Buffer.concat(chunks)
const fileName = path.basename(remotePath) const fileName = path.basename(remotePath)
const extension = getFileExtension(fileName)
const mimeType = getMimeTypeFromExtension(extension)
let content: string let content: string
if (params.encoding === 'base64') { if (params.encoding === 'base64') {
@@ -124,6 +127,12 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ return NextResponse.json({
success: true, success: true,
fileName, fileName,
file: {
name: fileName,
mimeType,
data: buffer.toString('base64'),
size: buffer.length,
},
content, content,
size: buffer.length, size: buffer.length,
encoding: params.encoding, encoding: params.encoding,

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
import { import {
@@ -26,14 +27,7 @@ const UploadSchema = z.object({
privateKey: z.string().nullish(), privateKey: z.string().nullish(),
passphrase: z.string().nullish(), passphrase: z.string().nullish(),
remotePath: z.string().min(1, 'Remote path is required'), remotePath: z.string().min(1, 'Remote path is required'),
files: z files: RawFileInputArraySchema.optional().nullable(),
.union([z.array(z.any()), z.string(), z.number(), z.null(), z.undefined()])
.transform((val) => {
if (Array.isArray(val)) return val
if (val === null || val === undefined || val === '') return undefined
return undefined
})
.nullish(),
fileContent: z.string().nullish(), fileContent: z.string().nullish(),
fileName: z.string().nullish(), fileName: z.string().nullish(),
overwrite: z.boolean().default(true), overwrite: z.boolean().default(true),

View File

@@ -2,9 +2,12 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
import type { MicrosoftGraphDriveItem } from '@/tools/onedrive/types'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -16,7 +19,7 @@ const SharepointUploadSchema = z.object({
driveId: z.string().optional().nullable(), driveId: z.string().optional().nullable(),
folderPath: z.string().optional().nullable(), folderPath: z.string().optional().nullable(),
fileName: z.string().optional().nullable(), fileName: z.string().optional().nullable(),
files: z.array(z.any()).optional().nullable(), files: RawFileInputArraySchema.optional().nullable(),
}) })
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {
@@ -79,18 +82,23 @@ export async function POST(request: NextRequest) {
let effectiveDriveId = validatedData.driveId let effectiveDriveId = validatedData.driveId
if (!effectiveDriveId) { if (!effectiveDriveId) {
logger.info(`[${requestId}] No driveId provided, fetching default drive for site`) logger.info(`[${requestId}] No driveId provided, fetching default drive for site`)
const driveResponse = await fetch( const driveUrl = `https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`
`https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`, const driveResponse = await secureFetchWithValidation(
driveUrl,
{ {
method: 'GET',
headers: { headers: {
Authorization: `Bearer ${validatedData.accessToken}`, Authorization: `Bearer ${validatedData.accessToken}`,
Accept: 'application/json', Accept: 'application/json',
}, },
} },
'driveUrl'
) )
if (!driveResponse.ok) { if (!driveResponse.ok) {
const errorData = await driveResponse.json().catch(() => ({})) const errorData = (await driveResponse.json().catch(() => ({}))) as {
error?: { message?: string }
}
logger.error(`[${requestId}] Failed to get default drive:`, errorData) logger.error(`[${requestId}] Failed to get default drive:`, errorData)
return NextResponse.json( return NextResponse.json(
{ {
@@ -101,7 +109,7 @@ export async function POST(request: NextRequest) {
) )
} }
const driveData = await driveResponse.json() const driveData = (await driveResponse.json()) as { id: string }
effectiveDriveId = driveData.id effectiveDriveId = driveData.id
logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`) logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`)
} }
@@ -145,34 +153,87 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Uploading to: ${uploadUrl}`) logger.info(`[${requestId}] Uploading to: ${uploadUrl}`)
const uploadResponse = await fetch(uploadUrl, { const uploadResponse = await secureFetchWithValidation(
method: 'PUT', uploadUrl,
headers: { {
Authorization: `Bearer ${validatedData.accessToken}`, method: 'PUT',
'Content-Type': userFile.type || 'application/octet-stream', headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
'Content-Type': userFile.type || 'application/octet-stream',
},
body: buffer,
}, },
body: new Uint8Array(buffer), 'uploadUrl'
}) )
if (!uploadResponse.ok) { if (!uploadResponse.ok) {
const errorData = await uploadResponse.json().catch(() => ({})) const errorData = await uploadResponse.json().catch(() => ({}))
logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData) logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData)
if (uploadResponse.status === 409) { if (uploadResponse.status === 409) {
logger.warn(`[${requestId}] File ${fileName} already exists, attempting to replace`) // File exists - retry with conflict behavior set to replace
logger.warn(`[${requestId}] File ${fileName} already exists, retrying with replace`)
const replaceUrl = `${uploadUrl}?@microsoft.graph.conflictBehavior=replace`
const replaceResponse = await secureFetchWithValidation(
replaceUrl,
{
method: 'PUT',
headers: {
Authorization: `Bearer ${validatedData.accessToken}`,
'Content-Type': userFile.type || 'application/octet-stream',
},
body: buffer,
},
'replaceUrl'
)
if (!replaceResponse.ok) {
const replaceErrorData = (await replaceResponse.json().catch(() => ({}))) as {
error?: { message?: string }
}
logger.error(`[${requestId}] Failed to replace file ${fileName}:`, replaceErrorData)
return NextResponse.json(
{
success: false,
error: replaceErrorData.error?.message || `Failed to replace file: ${fileName}`,
},
{ status: replaceResponse.status }
)
}
const replaceData = (await replaceResponse.json()) as {
id: string
name: string
webUrl: string
size: number
createdDateTime: string
lastModifiedDateTime: string
}
logger.info(`[${requestId}] File replaced successfully: ${fileName}`)
uploadedFiles.push({
id: replaceData.id,
name: replaceData.name,
webUrl: replaceData.webUrl,
size: replaceData.size,
createdDateTime: replaceData.createdDateTime,
lastModifiedDateTime: replaceData.lastModifiedDateTime,
})
continue continue
} }
return NextResponse.json( return NextResponse.json(
{ {
success: false, success: false,
error: errorData.error?.message || `Failed to upload file: ${fileName}`, error:
(errorData as { error?: { message?: string } }).error?.message ||
`Failed to upload file: ${fileName}`,
}, },
{ status: uploadResponse.status } { status: uploadResponse.status }
) )
} }
const uploadData = await uploadResponse.json() const uploadData = (await uploadResponse.json()) as MicrosoftGraphDriveItem
logger.info(`[${requestId}] File uploaded successfully: ${fileName}`) logger.info(`[${requestId}] File uploaded successfully: ${fileName}`)
uploadedFiles.push({ uploadedFiles.push({

View File

@@ -0,0 +1,170 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request'
export const dynamic = 'force-dynamic'
const logger = createLogger('SlackDownloadAPI')
const SlackDownloadSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
fileId: z.string().min(1, 'File ID is required'),
fileName: z.string().optional().nullable(),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized Slack download attempt: ${authResult.error}`)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
logger.info(`[${requestId}] Authenticated Slack download request via ${authResult.authType}`, {
userId: authResult.userId,
})
const body = await request.json()
const validatedData = SlackDownloadSchema.parse(body)
const { accessToken, fileId, fileName } = validatedData
logger.info(`[${requestId}] Getting file info from Slack`, { fileId })
const infoResponse = await fetch(`https://slack.com/api/files.info?file=${fileId}`, {
method: 'GET',
headers: {
Authorization: `Bearer ${accessToken}`,
},
})
if (!infoResponse.ok) {
const errorDetails = await infoResponse.json().catch(() => ({}))
logger.error(`[${requestId}] Failed to get file info from Slack`, {
status: infoResponse.status,
statusText: infoResponse.statusText,
error: errorDetails,
})
return NextResponse.json(
{
success: false,
error: errorDetails.error || 'Failed to get file info',
},
{ status: 400 }
)
}
const data = await infoResponse.json()
if (!data.ok) {
logger.error(`[${requestId}] Slack API returned error`, { error: data.error })
return NextResponse.json(
{
success: false,
error: data.error || 'Slack API error',
},
{ status: 400 }
)
}
const file = data.file
const resolvedFileName = fileName || file.name || 'download'
const mimeType = file.mimetype || 'application/octet-stream'
const urlPrivate = file.url_private
if (!urlPrivate) {
return NextResponse.json(
{
success: false,
error: 'File does not have a download URL',
},
{ status: 400 }
)
}
const urlValidation = await validateUrlWithDNS(urlPrivate, 'urlPrivate')
if (!urlValidation.isValid) {
return NextResponse.json(
{
success: false,
error: urlValidation.error,
},
{ status: 400 }
)
}
logger.info(`[${requestId}] Downloading file from Slack`, {
fileId,
fileName: resolvedFileName,
mimeType,
})
const downloadResponse = await secureFetchWithPinnedIP(urlPrivate, urlValidation.resolvedIP!, {
headers: {
Authorization: `Bearer ${accessToken}`,
},
})
if (!downloadResponse.ok) {
logger.error(`[${requestId}] Failed to download file content`, {
status: downloadResponse.status,
statusText: downloadResponse.statusText,
})
return NextResponse.json(
{
success: false,
error: 'Failed to download file content',
},
{ status: 400 }
)
}
const arrayBuffer = await downloadResponse.arrayBuffer()
const fileBuffer = Buffer.from(arrayBuffer)
logger.info(`[${requestId}] File downloaded successfully`, {
fileId,
name: resolvedFileName,
size: fileBuffer.length,
mimeType,
})
const base64Data = fileBuffer.toString('base64')
return NextResponse.json({
success: true,
output: {
file: {
name: resolvedFileName,
mimeType,
data: base64Data,
size: fileBuffer.length,
},
},
})
} catch (error) {
logger.error(`[${requestId}] Error downloading Slack file:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Unknown error occurred',
},
{ status: 500 }
)
}
}

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { sendSlackMessage } from '../utils' import { sendSlackMessage } from '../utils'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -16,7 +17,7 @@ const SlackSendMessageSchema = z
userId: z.string().optional().nullable(), userId: z.string().optional().nullable(),
text: z.string().min(1, 'Message text is required'), text: z.string().min(1, 'Message text is required'),
thread_ts: z.string().optional().nullable(), thread_ts: z.string().optional().nullable(),
files: z.array(z.any()).optional().nullable(), files: RawFileInputArraySchema.optional().nullable(),
}) })
.refine((data) => data.channel || data.userId, { .refine((data) => data.channel || data.userId, {
message: 'Either channel or userId is required', message: 'Either channel or userId is required',

View File

@@ -1,6 +1,8 @@
import type { Logger } from '@sim/logger' import type { Logger } from '@sim/logger'
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
import type { ToolFileData } from '@/tools/types'
/** /**
* Sends a message to a Slack channel using chat.postMessage * Sends a message to a Slack channel using chat.postMessage
@@ -70,9 +72,10 @@ export async function uploadFilesToSlack(
accessToken: string, accessToken: string,
requestId: string, requestId: string,
logger: Logger logger: Logger
): Promise<string[]> { ): Promise<{ fileIds: string[]; files: ToolFileData[] }> {
const userFiles = processFilesToUserFiles(files, requestId, logger) const userFiles = processFilesToUserFiles(files, requestId, logger)
const uploadedFileIds: string[] = [] const uploadedFileIds: string[] = []
const uploadedFiles: ToolFileData[] = []
for (const userFile of userFiles) { for (const userFile of userFiles) {
logger.info(`[${requestId}] Uploading file: ${userFile.name}`) logger.info(`[${requestId}] Uploading file: ${userFile.name}`)
@@ -100,10 +103,14 @@ export async function uploadFilesToSlack(
logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`) logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`)
const uploadResponse = await fetch(urlData.upload_url, { const uploadResponse = await secureFetchWithValidation(
method: 'POST', urlData.upload_url,
body: new Uint8Array(buffer), {
}) method: 'POST',
body: buffer,
},
'uploadUrl'
)
if (!uploadResponse.ok) { if (!uploadResponse.ok) {
logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`) logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`)
@@ -112,9 +119,16 @@ export async function uploadFilesToSlack(
logger.info(`[${requestId}] File data uploaded successfully`) logger.info(`[${requestId}] File data uploaded successfully`)
uploadedFileIds.push(urlData.file_id) uploadedFileIds.push(urlData.file_id)
// Only add to uploadedFiles after successful upload to keep arrays in sync
uploadedFiles.push({
name: userFile.name,
mimeType: userFile.type || 'application/octet-stream',
data: buffer.toString('base64'),
size: buffer.length,
})
} }
return uploadedFileIds return { fileIds: uploadedFileIds, files: uploadedFiles }
} }
/** /**
@@ -124,7 +138,8 @@ export async function completeSlackFileUpload(
uploadedFileIds: string[], uploadedFileIds: string[],
channel: string, channel: string,
text: string, text: string,
accessToken: string accessToken: string,
threadTs?: string | null
): Promise<{ ok: boolean; files?: any[]; error?: string }> { ): Promise<{ ok: boolean; files?: any[]; error?: string }> {
const response = await fetch('https://slack.com/api/files.completeUploadExternal', { const response = await fetch('https://slack.com/api/files.completeUploadExternal', {
method: 'POST', method: 'POST',
@@ -136,6 +151,7 @@ export async function completeSlackFileUpload(
files: uploadedFileIds.map((id) => ({ id })), files: uploadedFileIds.map((id) => ({ id })),
channel_id: channel, channel_id: channel,
initial_comment: text, initial_comment: text,
...(threadTs && { thread_ts: threadTs }),
}), }),
}) })
@@ -217,7 +233,13 @@ export async function sendSlackMessage(
logger: Logger logger: Logger
): Promise<{ ): Promise<{
success: boolean success: boolean
output?: { message: any; ts: string; channel: string; fileCount?: number } output?: {
message: any
ts: string
channel: string
fileCount?: number
files?: ToolFileData[]
}
error?: string error?: string
}> { }> {
const { accessToken, text, threadTs, files } = params const { accessToken, text, threadTs, files } = params
@@ -249,10 +271,15 @@ export async function sendSlackMessage(
// Process files // Process files
logger.info(`[${requestId}] Processing ${files.length} file(s)`) logger.info(`[${requestId}] Processing ${files.length} file(s)`)
const uploadedFileIds = await uploadFilesToSlack(files, accessToken, requestId, logger) const { fileIds, files: uploadedFiles } = await uploadFilesToSlack(
files,
accessToken,
requestId,
logger
)
// No valid files uploaded - send text-only // No valid files uploaded - send text-only
if (uploadedFileIds.length === 0) { if (fileIds.length === 0) {
logger.warn(`[${requestId}] No valid files to upload, sending text-only message`) logger.warn(`[${requestId}] No valid files to upload, sending text-only message`)
const data = await postSlackMessage(accessToken, channel, text, threadTs) const data = await postSlackMessage(accessToken, channel, text, threadTs)
@@ -264,8 +291,8 @@ export async function sendSlackMessage(
return { success: true, output: formatMessageSuccessResponse(data, text) } return { success: true, output: formatMessageSuccessResponse(data, text) }
} }
// Complete file upload // Complete file upload with thread support
const completeData = await completeSlackFileUpload(uploadedFileIds, channel, text, accessToken) const completeData = await completeSlackFileUpload(fileIds, channel, text, accessToken, threadTs)
if (!completeData.ok) { if (!completeData.ok) {
logger.error(`[${requestId}] Failed to complete upload:`, completeData.error) logger.error(`[${requestId}] Failed to complete upload:`, completeData.error)
@@ -282,7 +309,8 @@ export async function sendSlackMessage(
message: fileMessage, message: fileMessage,
ts: fileMessage.ts, ts: fileMessage.ts,
channel, channel,
fileCount: uploadedFileIds.length, fileCount: fileIds.length,
files: uploadedFiles,
}, },
} }
} }

View File

@@ -4,6 +4,7 @@ import nodemailer from 'nodemailer'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
@@ -28,7 +29,7 @@ const SmtpSendSchema = z.object({
cc: z.string().optional().nullable(), cc: z.string().optional().nullable(),
bcc: z.string().optional().nullable(), bcc: z.string().optional().nullable(),
replyTo: z.string().optional().nullable(), replyTo: z.string().optional().nullable(),
attachments: z.array(z.any()).optional().nullable(), attachments: RawFileInputArraySchema.optional().nullable(),
}) })
export async function POST(request: NextRequest) { export async function POST(request: NextRequest) {

View File

@@ -5,6 +5,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import type { Client, SFTPWrapper } from 'ssh2' import type { Client, SFTPWrapper } from 'ssh2'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils' import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
const logger = createLogger('SSHDownloadFileAPI') const logger = createLogger('SSHDownloadFileAPI')
@@ -79,6 +80,16 @@ export async function POST(request: NextRequest) {
}) })
}) })
// Check file size limit (50MB to prevent memory exhaustion)
const maxSize = 50 * 1024 * 1024
if (stats.size > maxSize) {
const sizeMB = (stats.size / (1024 * 1024)).toFixed(2)
return NextResponse.json(
{ error: `File size (${sizeMB}MB) exceeds download limit of 50MB` },
{ status: 400 }
)
}
// Read file content // Read file content
const content = await new Promise<Buffer>((resolve, reject) => { const content = await new Promise<Buffer>((resolve, reject) => {
const chunks: Buffer[] = [] const chunks: Buffer[] = []
@@ -96,6 +107,8 @@ export async function POST(request: NextRequest) {
}) })
const fileName = path.basename(remotePath) const fileName = path.basename(remotePath)
const extension = getFileExtension(fileName)
const mimeType = getMimeTypeFromExtension(extension)
// Encode content as base64 for binary safety // Encode content as base64 for binary safety
const base64Content = content.toString('base64') const base64Content = content.toString('base64')
@@ -104,6 +117,12 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ return NextResponse.json({
downloaded: true, downloaded: true,
file: {
name: fileName,
mimeType,
data: base64Content,
size: stats.size,
},
content: base64Content, content: base64Content,
fileName: fileName, fileName: fileName,
remotePath: remotePath, remotePath: remotePath,

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { env } from '@/lib/core/config/env' import { env } from '@/lib/core/config/env'
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction' import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction'
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils' import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
@@ -123,6 +124,10 @@ export async function POST(request: NextRequest) {
const variablesObject = processVariables(params.variables) const variablesObject = processVariables(params.variables)
const startUrl = normalizeUrl(rawStartUrl) const startUrl = normalizeUrl(rawStartUrl)
const urlValidation = await validateUrlWithDNS(startUrl, 'startUrl')
if (!urlValidation.isValid) {
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
}
logger.info('Starting Stagehand agent process', { logger.info('Starting Stagehand agent process', {
rawStartUrl, rawStartUrl,

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { env } from '@/lib/core/config/env' import { env } from '@/lib/core/config/env'
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils' import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
const logger = createLogger('StagehandExtractAPI') const logger = createLogger('StagehandExtractAPI')
@@ -51,6 +52,10 @@ export async function POST(request: NextRequest) {
const params = validationResult.data const params = validationResult.data
const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params
const url = normalizeUrl(rawUrl) const url = normalizeUrl(rawUrl)
const urlValidation = await validateUrlWithDNS(url, 'url')
if (!urlValidation.isValid) {
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
}
logger.info('Starting Stagehand extraction process', { logger.info('Starting Stagehand extraction process', {
rawUrl, rawUrl,

View File

@@ -2,7 +2,15 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { extractAudioFromVideo, isVideoFile } from '@/lib/audio/extractor' import { extractAudioFromVideo, isVideoFile } from '@/lib/audio/extractor'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { getMimeTypeFromExtension, isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
import {
downloadFileFromStorage,
resolveInternalFileUrl,
} from '@/lib/uploads/utils/file-utils.server'
import type { UserFile } from '@/executor/types' import type { UserFile } from '@/executor/types'
import type { TranscriptSegment } from '@/tools/stt/types' import type { TranscriptSegment } from '@/tools/stt/types'
@@ -45,6 +53,7 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
} }
const userId = authResult.userId
const body: SttRequestBody = await request.json() const body: SttRequestBody = await request.json()
const { const {
provider, provider,
@@ -72,13 +81,25 @@ export async function POST(request: NextRequest) {
let audioMimeType: string let audioMimeType: string
if (body.audioFile) { if (body.audioFile) {
if (Array.isArray(body.audioFile) && body.audioFile.length !== 1) {
return NextResponse.json({ error: 'audioFile must be a single file' }, { status: 400 })
}
const file = Array.isArray(body.audioFile) ? body.audioFile[0] : body.audioFile const file = Array.isArray(body.audioFile) ? body.audioFile[0] : body.audioFile
logger.info(`[${requestId}] Processing uploaded file: ${file.name}`) logger.info(`[${requestId}] Processing uploaded file: ${file.name}`)
audioBuffer = await downloadFileFromStorage(file, requestId, logger) audioBuffer = await downloadFileFromStorage(file, requestId, logger)
audioFileName = file.name audioFileName = file.name
audioMimeType = file.type // file.type may be missing if the file came from a block that doesn't preserve it
// Infer from filename extension as fallback
const ext = file.name.split('.').pop()?.toLowerCase() || ''
audioMimeType = file.type || getMimeTypeFromExtension(ext)
} else if (body.audioFileReference) { } else if (body.audioFileReference) {
if (Array.isArray(body.audioFileReference) && body.audioFileReference.length !== 1) {
return NextResponse.json(
{ error: 'audioFileReference must be a single file' },
{ status: 400 }
)
}
const file = Array.isArray(body.audioFileReference) const file = Array.isArray(body.audioFileReference)
? body.audioFileReference[0] ? body.audioFileReference[0]
: body.audioFileReference : body.audioFileReference
@@ -86,18 +107,54 @@ export async function POST(request: NextRequest) {
audioBuffer = await downloadFileFromStorage(file, requestId, logger) audioBuffer = await downloadFileFromStorage(file, requestId, logger)
audioFileName = file.name audioFileName = file.name
audioMimeType = file.type
const ext = file.name.split('.').pop()?.toLowerCase() || ''
audioMimeType = file.type || getMimeTypeFromExtension(ext)
} else if (body.audioUrl) { } else if (body.audioUrl) {
logger.info(`[${requestId}] Downloading from URL: ${body.audioUrl}`) logger.info(`[${requestId}] Downloading from URL: ${body.audioUrl}`)
const response = await fetch(body.audioUrl) let audioUrl = body.audioUrl.trim()
if (audioUrl.startsWith('/') && !isInternalFileUrl(audioUrl)) {
return NextResponse.json(
{
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
},
{ status: 400 }
)
}
if (isInternalFileUrl(audioUrl)) {
if (!userId) {
return NextResponse.json(
{ error: 'Authentication required for internal file access' },
{ status: 401 }
)
}
const resolution = await resolveInternalFileUrl(audioUrl, userId, requestId, logger)
if (resolution.error) {
return NextResponse.json(
{ error: resolution.error.message },
{ status: resolution.error.status }
)
}
audioUrl = resolution.fileUrl || audioUrl
}
const urlValidation = await validateUrlWithDNS(audioUrl, 'audioUrl')
if (!urlValidation.isValid) {
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
}
const response = await secureFetchWithPinnedIP(audioUrl, urlValidation.resolvedIP!, {
method: 'GET',
})
if (!response.ok) { if (!response.ok) {
throw new Error(`Failed to download audio from URL: ${response.statusText}`) throw new Error(`Failed to download audio from URL: ${response.statusText}`)
} }
const arrayBuffer = await response.arrayBuffer() const arrayBuffer = await response.arrayBuffer()
audioBuffer = Buffer.from(arrayBuffer) audioBuffer = Buffer.from(arrayBuffer)
audioFileName = body.audioUrl.split('/').pop() || 'audio_file' audioFileName = audioUrl.split('/').pop() || 'audio_file'
audioMimeType = response.headers.get('content-type') || 'audio/mpeg' audioMimeType = response.headers.get('content-type') || 'audio/mpeg'
} else { } else {
return NextResponse.json( return NextResponse.json(
@@ -149,7 +206,9 @@ export async function POST(request: NextRequest) {
translateToEnglish, translateToEnglish,
model, model,
body.prompt, body.prompt,
body.temperature body.temperature,
audioMimeType,
audioFileName
) )
transcript = result.transcript transcript = result.transcript
segments = result.segments segments = result.segments
@@ -162,7 +221,8 @@ export async function POST(request: NextRequest) {
language, language,
timestamps, timestamps,
diarization, diarization,
model model,
audioMimeType
) )
transcript = result.transcript transcript = result.transcript
segments = result.segments segments = result.segments
@@ -252,7 +312,9 @@ async function transcribeWithWhisper(
translate?: boolean, translate?: boolean,
model?: string, model?: string,
prompt?: string, prompt?: string,
temperature?: number temperature?: number,
mimeType?: string,
fileName?: string
): Promise<{ ): Promise<{
transcript: string transcript: string
segments?: TranscriptSegment[] segments?: TranscriptSegment[]
@@ -261,8 +323,11 @@ async function transcribeWithWhisper(
}> { }> {
const formData = new FormData() const formData = new FormData()
const blob = new Blob([new Uint8Array(audioBuffer)], { type: 'audio/mpeg' }) // Use actual MIME type and filename if provided
formData.append('file', blob, 'audio.mp3') const actualMimeType = mimeType || 'audio/mpeg'
const actualFileName = fileName || 'audio.mp3'
const blob = new Blob([new Uint8Array(audioBuffer)], { type: actualMimeType })
formData.append('file', blob, actualFileName)
formData.append('model', model || 'whisper-1') formData.append('model', model || 'whisper-1')
if (language && language !== 'auto') { if (language && language !== 'auto') {
@@ -279,10 +344,11 @@ async function transcribeWithWhisper(
formData.append('response_format', 'verbose_json') formData.append('response_format', 'verbose_json')
// OpenAI API uses array notation for timestamp_granularities
if (timestamps === 'word') { if (timestamps === 'word') {
formData.append('timestamp_granularities', 'word') formData.append('timestamp_granularities[]', 'word')
} else if (timestamps === 'sentence') { } else if (timestamps === 'sentence') {
formData.append('timestamp_granularities', 'segment') formData.append('timestamp_granularities[]', 'segment')
} }
const endpoint = translate ? 'translations' : 'transcriptions' const endpoint = translate ? 'translations' : 'transcriptions'
@@ -325,7 +391,8 @@ async function transcribeWithDeepgram(
language?: string, language?: string,
timestamps?: 'none' | 'sentence' | 'word', timestamps?: 'none' | 'sentence' | 'word',
diarization?: boolean, diarization?: boolean,
model?: string model?: string,
mimeType?: string
): Promise<{ ): Promise<{
transcript: string transcript: string
segments?: TranscriptSegment[] segments?: TranscriptSegment[]
@@ -357,7 +424,7 @@ async function transcribeWithDeepgram(
method: 'POST', method: 'POST',
headers: { headers: {
Authorization: `Token ${apiKey}`, Authorization: `Token ${apiKey}`,
'Content-Type': 'audio/mpeg', 'Content-Type': mimeType || 'audio/mpeg',
}, },
body: new Uint8Array(audioBuffer), body: new Uint8Array(audioBuffer),
}) })
@@ -513,7 +580,8 @@ async function transcribeWithAssemblyAI(
audio_url: upload_url, audio_url: upload_url,
} }
if (model === 'best' || model === 'nano') { // AssemblyAI supports 'best', 'slam-1', or 'universal' for speech_model
if (model === 'best' || model === 'slam-1' || model === 'universal') {
transcriptRequest.speech_model = model transcriptRequest.speech_model = model
} }

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils' import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
@@ -16,7 +17,7 @@ const SupabaseStorageUploadSchema = z.object({
bucket: z.string().min(1, 'Bucket name is required'), bucket: z.string().min(1, 'Bucket name is required'),
fileName: z.string().min(1, 'File name is required'), fileName: z.string().min(1, 'File name is required'),
path: z.string().optional().nullable(), path: z.string().optional().nullable(),
fileData: z.any(), fileData: FileInputSchema,
contentType: z.string().optional().nullable(), contentType: z.string().optional().nullable(),
upsert: z.boolean().optional().default(false), upsert: z.boolean().optional().default(false),
}) })

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils' import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
import { convertMarkdownToHTML } from '@/tools/telegram/utils' import { convertMarkdownToHTML } from '@/tools/telegram/utils'
@@ -14,7 +15,7 @@ const logger = createLogger('TelegramSendDocumentAPI')
const TelegramSendDocumentSchema = z.object({ const TelegramSendDocumentSchema = z.object({
botToken: z.string().min(1, 'Bot token is required'), botToken: z.string().min(1, 'Bot token is required'),
chatId: z.string().min(1, 'Chat ID is required'), chatId: z.string().min(1, 'Chat ID is required'),
files: z.array(z.any()).optional().nullable(), files: RawFileInputArraySchema.optional().nullable(),
caption: z.string().optional().nullable(), caption: z.string().optional().nullable(),
}) })
@@ -93,6 +94,14 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Uploading document: ${userFile.name}`) logger.info(`[${requestId}] Uploading document: ${userFile.name}`)
const buffer = await downloadFileFromStorage(userFile, requestId, logger) const buffer = await downloadFileFromStorage(userFile, requestId, logger)
const filesOutput = [
{
name: userFile.name,
mimeType: userFile.type || 'application/octet-stream',
data: buffer.toString('base64'),
size: buffer.length,
},
]
logger.info(`[${requestId}] Downloaded file: ${buffer.length} bytes`) logger.info(`[${requestId}] Downloaded file: ${buffer.length} bytes`)
@@ -135,6 +144,7 @@ export async function POST(request: NextRequest) {
output: { output: {
message: 'Document sent successfully', message: 'Document sent successfully',
data: data.result, data: data.result,
files: filesOutput,
}, },
}) })
} catch (error) { } catch (error) {

View File

@@ -3,19 +3,18 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { validateAwsRegion, validateS3BucketName } from '@/lib/core/security/input-validation'
import { import {
validateAwsRegion, secureFetchWithPinnedIP,
validateExternalUrl, validateUrlWithDNS,
validateS3BucketName, } from '@/lib/core/security/input-validation.server'
} from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { StorageService } from '@/lib/uploads' import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
import { import {
extractStorageKey, downloadFileFromStorage,
inferContextFromKey, resolveInternalFileUrl,
isInternalFileUrl, } from '@/lib/uploads/utils/file-utils.server'
} from '@/lib/uploads/utils/file-utils'
import { verifyFileAccess } from '@/app/api/files/authorization'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
@@ -35,6 +34,7 @@ const TextractParseSchema = z
region: z.string().min(1, 'AWS region is required'), region: z.string().min(1, 'AWS region is required'),
processingMode: z.enum(['sync', 'async']).optional().default('sync'), processingMode: z.enum(['sync', 'async']).optional().default('sync'),
filePath: z.string().optional(), filePath: z.string().optional(),
file: RawFileInputSchema.optional(),
s3Uri: z.string().optional(), s3Uri: z.string().optional(),
featureTypes: z featureTypes: z
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT'])) .array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
@@ -50,6 +50,20 @@ const TextractParseSchema = z
path: ['region'], path: ['region'],
}) })
} }
if (data.processingMode === 'async' && !data.s3Uri) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'S3 URI is required for multi-page processing (s3://bucket/key)',
path: ['s3Uri'],
})
}
if (data.processingMode !== 'async' && !data.file && !data.filePath) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: 'File input is required for single-page processing',
path: ['filePath'],
})
}
}) })
function getSignatureKey( function getSignatureKey(
@@ -111,7 +125,14 @@ function signAwsRequest(
} }
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> { async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
const response = await fetch(url) const urlValidation = await validateUrlWithDNS(url, 'Document URL')
if (!urlValidation.isValid) {
throw new Error(urlValidation.error || 'Invalid document URL')
}
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
method: 'GET',
})
if (!response.ok) { if (!response.ok) {
throw new Error(`Failed to fetch document: ${response.statusText}`) throw new Error(`Failed to fetch document: ${response.statusText}`)
} }
@@ -318,8 +339,8 @@ export async function POST(request: NextRequest) {
logger.info(`[${requestId}] Textract parse request`, { logger.info(`[${requestId}] Textract parse request`, {
processingMode, processingMode,
filePath: validatedData.filePath?.substring(0, 50), hasFile: Boolean(validatedData.file),
s3Uri: validatedData.s3Uri?.substring(0, 50), hasS3Uri: Boolean(validatedData.s3Uri),
featureTypes, featureTypes,
userId, userId,
}) })
@@ -414,90 +435,89 @@ export async function POST(request: NextRequest) {
}) })
} }
if (!validatedData.filePath) { let bytes = ''
return NextResponse.json( let contentType = 'application/octet-stream'
{ let isPdf = false
success: false,
error: 'File path is required for single-page processing',
},
{ status: 400 }
)
}
let fileUrl = validatedData.filePath if (validatedData.file) {
let userFile
const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath)
if (isInternalFilePath) {
try { try {
const storageKey = extractStorageKey(validatedData.filePath) userFile = processSingleFileToUserFile(validatedData.file, requestId, logger)
const context = inferContextFromKey(storageKey)
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
if (!hasAccess) {
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
userId,
key: storageKey,
context,
})
return NextResponse.json(
{
success: false,
error: 'File not found',
},
{ status: 404 }
)
}
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
} catch (error) { } catch (error) {
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
return NextResponse.json( return NextResponse.json(
{ {
success: false, success: false,
error: 'Failed to generate file access URL', error: error instanceof Error ? error.message : 'Failed to process file',
},
{ status: 500 }
)
}
} else if (validatedData.filePath?.startsWith('/')) {
// Reject arbitrary absolute paths that don't contain /api/files/serve/
logger.warn(`[${requestId}] Invalid internal path`, {
userId,
path: validatedData.filePath.substring(0, 50),
})
return NextResponse.json(
{
success: false,
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
},
{ status: 400 }
)
} else {
const urlValidation = validateExternalUrl(fileUrl, 'Document URL')
if (!urlValidation.isValid) {
logger.warn(`[${requestId}] SSRF attempt blocked`, {
userId,
url: fileUrl.substring(0, 100),
error: urlValidation.error,
})
return NextResponse.json(
{
success: false,
error: urlValidation.error,
}, },
{ status: 400 } { status: 400 }
) )
} }
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
bytes = buffer.toString('base64')
contentType = userFile.type || 'application/octet-stream'
isPdf = contentType.includes('pdf') || userFile.name?.toLowerCase().endsWith('.pdf')
} else if (validatedData.filePath) {
let fileUrl = validatedData.filePath
const isInternalFilePath = isInternalFileUrl(fileUrl)
if (isInternalFilePath) {
const resolution = await resolveInternalFileUrl(fileUrl, userId, requestId, logger)
if (resolution.error) {
return NextResponse.json(
{
success: false,
error: resolution.error.message,
},
{ status: resolution.error.status }
)
}
fileUrl = resolution.fileUrl || fileUrl
} else if (fileUrl.startsWith('/')) {
logger.warn(`[${requestId}] Invalid internal path`, {
userId,
path: fileUrl.substring(0, 50),
})
return NextResponse.json(
{
success: false,
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
},
{ status: 400 }
)
} else {
const urlValidation = await validateUrlWithDNS(fileUrl, 'Document URL')
if (!urlValidation.isValid) {
logger.warn(`[${requestId}] SSRF attempt blocked`, {
userId,
url: fileUrl.substring(0, 100),
error: urlValidation.error,
})
return NextResponse.json(
{
success: false,
error: urlValidation.error,
},
{ status: 400 }
)
}
}
const fetched = await fetchDocumentBytes(fileUrl)
bytes = fetched.bytes
contentType = fetched.contentType
isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
} else {
return NextResponse.json(
{
success: false,
error: 'File input is required for single-page processing',
},
{ status: 400 }
)
} }
const { bytes, contentType } = await fetchDocumentBytes(fileUrl)
// Track if this is a PDF for better error messaging
const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
const uri = '/' const uri = '/'
let textractBody: Record<string, unknown> let textractBody: Record<string, unknown>

View File

@@ -0,0 +1,250 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request'
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
export const dynamic = 'force-dynamic'
const logger = createLogger('TwilioGetRecordingAPI')
interface TwilioRecordingResponse {
sid?: string
call_sid?: string
duration?: string
status?: string
channels?: number
source?: string
price?: string
price_unit?: string
uri?: string
error_code?: number
message?: string
error_message?: string
}
interface TwilioErrorResponse {
message?: string
}
interface TwilioTranscription {
transcription_text?: string
status?: string
price?: string
price_unit?: string
}
interface TwilioTranscriptionsResponse {
transcriptions?: TwilioTranscription[]
}
const TwilioGetRecordingSchema = z.object({
accountSid: z.string().min(1, 'Account SID is required'),
authToken: z.string().min(1, 'Auth token is required'),
recordingSid: z.string().min(1, 'Recording SID is required'),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized Twilio get recording attempt: ${authResult.error}`)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
const body = await request.json()
const validatedData = TwilioGetRecordingSchema.parse(body)
const { accountSid, authToken, recordingSid } = validatedData
if (!accountSid.startsWith('AC')) {
return NextResponse.json(
{
success: false,
error: `Invalid Account SID format. Account SID must start with "AC" (you provided: ${accountSid.substring(0, 2)}...)`,
},
{ status: 400 }
)
}
const twilioAuth = Buffer.from(`${accountSid}:${authToken}`).toString('base64')
logger.info(`[${requestId}] Getting recording info from Twilio`, { recordingSid })
const infoUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Recordings/${recordingSid}.json`
const infoUrlValidation = await validateUrlWithDNS(infoUrl, 'infoUrl')
if (!infoUrlValidation.isValid) {
return NextResponse.json({ success: false, error: infoUrlValidation.error }, { status: 400 })
}
const infoResponse = await secureFetchWithPinnedIP(infoUrl, infoUrlValidation.resolvedIP!, {
method: 'GET',
headers: { Authorization: `Basic ${twilioAuth}` },
})
if (!infoResponse.ok) {
const errorData = (await infoResponse.json().catch(() => ({}))) as TwilioErrorResponse
logger.error(`[${requestId}] Twilio API error`, {
status: infoResponse.status,
error: errorData,
})
return NextResponse.json(
{ success: false, error: errorData.message || `Twilio API error: ${infoResponse.status}` },
{ status: 400 }
)
}
const data = (await infoResponse.json()) as TwilioRecordingResponse
if (data.error_code) {
return NextResponse.json({
success: false,
output: {
success: false,
error: data.message || data.error_message || 'Failed to retrieve recording',
},
error: data.message || data.error_message || 'Failed to retrieve recording',
})
}
const baseUrl = 'https://api.twilio.com'
const mediaUrl = data.uri ? `${baseUrl}${data.uri.replace('.json', '')}` : undefined
let transcriptionText: string | undefined
let transcriptionStatus: string | undefined
let transcriptionPrice: string | undefined
let transcriptionPriceUnit: string | undefined
let file:
| {
name: string
mimeType: string
data: string
size: number
}
| undefined
try {
const transcriptionUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Transcriptions.json?RecordingSid=${data.sid}`
logger.info(`[${requestId}] Checking for transcriptions`)
const transcriptionUrlValidation = await validateUrlWithDNS(
transcriptionUrl,
'transcriptionUrl'
)
if (transcriptionUrlValidation.isValid) {
const transcriptionResponse = await secureFetchWithPinnedIP(
transcriptionUrl,
transcriptionUrlValidation.resolvedIP!,
{
method: 'GET',
headers: { Authorization: `Basic ${twilioAuth}` },
}
)
if (transcriptionResponse.ok) {
const transcriptionData =
(await transcriptionResponse.json()) as TwilioTranscriptionsResponse
if (transcriptionData.transcriptions && transcriptionData.transcriptions.length > 0) {
const transcription = transcriptionData.transcriptions[0]
transcriptionText = transcription.transcription_text
transcriptionStatus = transcription.status
transcriptionPrice = transcription.price
transcriptionPriceUnit = transcription.price_unit
logger.info(`[${requestId}] Transcription found`, {
status: transcriptionStatus,
textLength: transcriptionText?.length,
})
}
}
}
} catch (error) {
logger.warn(`[${requestId}] Failed to fetch transcription:`, error)
}
if (mediaUrl) {
try {
const mediaUrlValidation = await validateUrlWithDNS(mediaUrl, 'mediaUrl')
if (mediaUrlValidation.isValid) {
const mediaResponse = await secureFetchWithPinnedIP(
mediaUrl,
mediaUrlValidation.resolvedIP!,
{
method: 'GET',
headers: { Authorization: `Basic ${twilioAuth}` },
}
)
if (mediaResponse.ok) {
const contentType =
mediaResponse.headers.get('content-type') || 'application/octet-stream'
const extension = getExtensionFromMimeType(contentType) || 'dat'
const arrayBuffer = await mediaResponse.arrayBuffer()
const buffer = Buffer.from(arrayBuffer)
const fileName = `${data.sid || recordingSid}.${extension}`
file = {
name: fileName,
mimeType: contentType,
data: buffer.toString('base64'),
size: buffer.length,
}
}
}
} catch (error) {
logger.warn(`[${requestId}] Failed to download recording media:`, error)
}
}
logger.info(`[${requestId}] Twilio recording fetched successfully`, {
recordingSid: data.sid,
hasFile: !!file,
hasTranscription: !!transcriptionText,
})
return NextResponse.json({
success: true,
output: {
success: true,
recordingSid: data.sid,
callSid: data.call_sid,
duration: data.duration ? Number.parseInt(data.duration, 10) : undefined,
status: data.status,
channels: data.channels,
source: data.source,
mediaUrl,
file,
price: data.price,
priceUnit: data.price_unit,
uri: data.uri,
transcriptionText,
transcriptionStatus,
transcriptionPrice,
transcriptionPriceUnit,
},
})
} catch (error) {
logger.error(`[${requestId}] Error fetching Twilio recording:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Unknown error occurred',
},
{ status: 500 }
)
}
}

View File

@@ -1,10 +1,20 @@
import { GoogleGenAI } from '@google/genai'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils' import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server' import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
import {
downloadFileFromStorage,
resolveInternalFileUrl,
} from '@/lib/uploads/utils/file-utils.server'
import { convertUsageMetadata, extractTextContent } from '@/providers/google/utils'
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
@@ -13,8 +23,8 @@ const logger = createLogger('VisionAnalyzeAPI')
const VisionAnalyzeSchema = z.object({ const VisionAnalyzeSchema = z.object({
apiKey: z.string().min(1, 'API key is required'), apiKey: z.string().min(1, 'API key is required'),
imageUrl: z.string().optional().nullable(), imageUrl: z.string().optional().nullable(),
imageFile: z.any().optional().nullable(), imageFile: RawFileInputSchema.optional().nullable(),
model: z.string().optional().default('gpt-4o'), model: z.string().optional().default('gpt-5.2'),
prompt: z.string().optional().nullable(), prompt: z.string().optional().nullable(),
}) })
@@ -39,6 +49,7 @@ export async function POST(request: NextRequest) {
userId: authResult.userId, userId: authResult.userId,
}) })
const userId = authResult.userId
const body = await request.json() const body = await request.json()
const validatedData = VisionAnalyzeSchema.parse(body) const validatedData = VisionAnalyzeSchema.parse(body)
@@ -77,18 +88,72 @@ export async function POST(request: NextRequest) {
) )
} }
const buffer = await downloadFileFromStorage(userFile, requestId, logger) let base64 = userFile.base64
let bufferLength = 0
const base64 = buffer.toString('base64') if (!base64) {
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
base64 = buffer.toString('base64')
bufferLength = buffer.length
}
const mimeType = userFile.type || 'image/jpeg' const mimeType = userFile.type || 'image/jpeg'
imageSource = `data:${mimeType};base64,${base64}` imageSource = `data:${mimeType};base64,${base64}`
logger.info(`[${requestId}] Converted image to base64 (${buffer.length} bytes)`) if (bufferLength > 0) {
logger.info(`[${requestId}] Converted image to base64 (${bufferLength} bytes)`)
}
}
let imageUrlValidation: Awaited<ReturnType<typeof validateUrlWithDNS>> | null = null
if (imageSource && !imageSource.startsWith('data:')) {
if (imageSource.startsWith('/') && !isInternalFileUrl(imageSource)) {
return NextResponse.json(
{
success: false,
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
},
{ status: 400 }
)
}
if (isInternalFileUrl(imageSource)) {
if (!userId) {
return NextResponse.json(
{
success: false,
error: 'Authentication required for internal file access',
},
{ status: 401 }
)
}
const resolution = await resolveInternalFileUrl(imageSource, userId, requestId, logger)
if (resolution.error) {
return NextResponse.json(
{
success: false,
error: resolution.error.message,
},
{ status: resolution.error.status }
)
}
imageSource = resolution.fileUrl || imageSource
}
imageUrlValidation = await validateUrlWithDNS(imageSource, 'imageUrl')
if (!imageUrlValidation.isValid) {
return NextResponse.json(
{
success: false,
error: imageUrlValidation.error,
},
{ status: 400 }
)
}
} }
const defaultPrompt = 'Please analyze this image and describe what you see in detail.' const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
const prompt = validatedData.prompt || defaultPrompt const prompt = validatedData.prompt || defaultPrompt
const isClaude = validatedData.model.startsWith('claude-3') const isClaude = validatedData.model.startsWith('claude-')
const isGemini = validatedData.model.startsWith('gemini-')
const apiUrl = isClaude const apiUrl = isClaude
? 'https://api.anthropic.com/v1/messages' ? 'https://api.anthropic.com/v1/messages'
: 'https://api.openai.com/v1/chat/completions' : 'https://api.openai.com/v1/chat/completions'
@@ -106,6 +171,72 @@ export async function POST(request: NextRequest) {
let requestBody: any let requestBody: any
if (isGemini) {
let base64Payload = imageSource
if (!base64Payload.startsWith('data:')) {
const urlValidation =
imageUrlValidation || (await validateUrlWithDNS(base64Payload, 'imageUrl'))
if (!urlValidation.isValid) {
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
}
const response = await secureFetchWithPinnedIP(base64Payload, urlValidation.resolvedIP!, {
method: 'GET',
})
if (!response.ok) {
return NextResponse.json(
{ success: false, error: 'Failed to fetch image for Gemini' },
{ status: 400 }
)
}
const contentType =
response.headers.get('content-type') || validatedData.imageFile?.type || 'image/jpeg'
const arrayBuffer = await response.arrayBuffer()
const base64 = Buffer.from(arrayBuffer).toString('base64')
base64Payload = `data:${contentType};base64,${base64}`
}
const base64Marker = ';base64,'
const markerIndex = base64Payload.indexOf(base64Marker)
if (!base64Payload.startsWith('data:') || markerIndex === -1) {
return NextResponse.json(
{ success: false, error: 'Invalid base64 image format' },
{ status: 400 }
)
}
const rawMimeType = base64Payload.slice('data:'.length, markerIndex)
const mediaType = rawMimeType.split(';')[0] || 'image/jpeg'
const base64Data = base64Payload.slice(markerIndex + base64Marker.length)
if (!base64Data) {
return NextResponse.json(
{ success: false, error: 'Invalid base64 image format' },
{ status: 400 }
)
}
const ai = new GoogleGenAI({ apiKey: validatedData.apiKey })
const geminiResponse = await ai.models.generateContent({
model: validatedData.model,
contents: [
{
role: 'user',
parts: [{ text: prompt }, { inlineData: { mimeType: mediaType, data: base64Data } }],
},
],
})
const content = extractTextContent(geminiResponse.candidates?.[0])
const usage = convertUsageMetadata(geminiResponse.usageMetadata)
return NextResponse.json({
success: true,
output: {
content,
model: validatedData.model,
tokens: usage.totalTokenCount || undefined,
},
})
}
if (isClaude) { if (isClaude) {
if (imageSource.startsWith('data:')) { if (imageSource.startsWith('data:')) {
const base64Match = imageSource.match(/^data:([^;]+);base64,(.+)$/) const base64Match = imageSource.match(/^data:([^;]+);base64,(.+)$/)
@@ -172,7 +303,7 @@ export async function POST(request: NextRequest) {
], ],
}, },
], ],
max_tokens: 1000, max_completion_tokens: 1000,
} }
} }

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid' import { checkInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
import { import {
getFileExtension, getFileExtension,
getMimeTypeFromExtension, getMimeTypeFromExtension,
@@ -19,7 +20,7 @@ const WORDPRESS_COM_API_BASE = 'https://public-api.wordpress.com/wp/v2/sites'
const WordPressUploadSchema = z.object({ const WordPressUploadSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'), accessToken: z.string().min(1, 'Access token is required'),
siteId: z.string().min(1, 'Site ID is required'), siteId: z.string().min(1, 'Site ID is required'),
file: z.any().optional().nullable(), file: RawFileInputSchema.optional().nullable(),
filename: z.string().optional().nullable(), filename: z.string().optional().nullable(),
title: z.string().optional().nullable(), title: z.string().optional().nullable(),
caption: z.string().optional().nullable(), caption: z.string().optional().nullable(),

View File

@@ -0,0 +1,216 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import {
secureFetchWithPinnedIP,
validateUrlWithDNS,
} from '@/lib/core/security/input-validation.server'
import { generateRequestId } from '@/lib/core/utils/request'
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
export const dynamic = 'force-dynamic'
const logger = createLogger('ZoomGetRecordingsAPI')
interface ZoomRecordingFile {
id?: string
meeting_id?: string
recording_start?: string
recording_end?: string
file_type?: string
file_extension?: string
file_size?: number
play_url?: string
download_url?: string
status?: string
recording_type?: string
}
interface ZoomRecordingsResponse {
uuid?: string
id?: string | number
account_id?: string
host_id?: string
topic?: string
type?: number
start_time?: string
duration?: number
total_size?: number
recording_count?: number
share_url?: string
recording_files?: ZoomRecordingFile[]
}
interface ZoomErrorResponse {
message?: string
code?: number
}
const ZoomGetRecordingsSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
meetingId: z.string().min(1, 'Meeting ID is required'),
includeFolderItems: z.boolean().optional(),
ttl: z.number().optional(),
downloadFiles: z.boolean().optional().default(false),
})
export async function POST(request: NextRequest) {
const requestId = generateRequestId()
try {
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
if (!authResult.success) {
logger.warn(`[${requestId}] Unauthorized Zoom get recordings attempt: ${authResult.error}`)
return NextResponse.json(
{
success: false,
error: authResult.error || 'Authentication required',
},
{ status: 401 }
)
}
const body = await request.json()
const validatedData = ZoomGetRecordingsSchema.parse(body)
const { accessToken, meetingId, includeFolderItems, ttl, downloadFiles } = validatedData
const baseUrl = `https://api.zoom.us/v2/meetings/${encodeURIComponent(meetingId)}/recordings`
const queryParams = new URLSearchParams()
if (includeFolderItems != null) {
queryParams.append('include_folder_items', String(includeFolderItems))
}
if (ttl) {
queryParams.append('ttl', String(ttl))
}
const queryString = queryParams.toString()
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
logger.info(`[${requestId}] Fetching recordings from Zoom`, { meetingId })
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
if (!urlValidation.isValid) {
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
}
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
method: 'GET',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${accessToken}`,
},
})
if (!response.ok) {
const errorData = (await response.json().catch(() => ({}))) as ZoomErrorResponse
logger.error(`[${requestId}] Zoom API error`, {
status: response.status,
error: errorData,
})
return NextResponse.json(
{ success: false, error: errorData.message || `Zoom API error: ${response.status}` },
{ status: 400 }
)
}
const data = (await response.json()) as ZoomRecordingsResponse
const files: Array<{
name: string
mimeType: string
data: string
size: number
}> = []
if (downloadFiles && Array.isArray(data.recording_files)) {
for (const file of data.recording_files) {
if (!file?.download_url) continue
try {
const fileUrlValidation = await validateUrlWithDNS(file.download_url, 'downloadUrl')
if (!fileUrlValidation.isValid) continue
const downloadResponse = await secureFetchWithPinnedIP(
file.download_url,
fileUrlValidation.resolvedIP!,
{
method: 'GET',
headers: { Authorization: `Bearer ${accessToken}` },
}
)
if (!downloadResponse.ok) continue
const contentType =
downloadResponse.headers.get('content-type') || 'application/octet-stream'
const arrayBuffer = await downloadResponse.arrayBuffer()
const buffer = Buffer.from(arrayBuffer)
const extension =
file.file_extension?.toString().toLowerCase() ||
getExtensionFromMimeType(contentType) ||
'dat'
const fileName = `zoom-recording-${file.id || file.recording_start || Date.now()}.${extension}`
files.push({
name: fileName,
mimeType: contentType,
data: buffer.toString('base64'),
size: buffer.length,
})
} catch (error) {
logger.warn(`[${requestId}] Failed to download recording file:`, error)
}
}
}
logger.info(`[${requestId}] Zoom recordings fetched successfully`, {
recordingCount: data.recording_files?.length || 0,
downloadedCount: files.length,
})
return NextResponse.json({
success: true,
output: {
recording: {
uuid: data.uuid,
id: data.id,
account_id: data.account_id,
host_id: data.host_id,
topic: data.topic,
type: data.type,
start_time: data.start_time,
duration: data.duration,
total_size: data.total_size,
recording_count: data.recording_count,
share_url: data.share_url,
recording_files: (data.recording_files || []).map((file: ZoomRecordingFile) => ({
id: file.id,
meeting_id: file.meeting_id,
recording_start: file.recording_start,
recording_end: file.recording_end,
file_type: file.file_type,
file_extension: file.file_extension,
file_size: file.file_size,
play_url: file.play_url,
download_url: file.download_url,
status: file.status,
recording_type: file.recording_type,
})),
},
files: files.length > 0 ? files : undefined,
},
})
} catch (error) {
logger.error(`[${requestId}] Error fetching Zoom recordings:`, error)
return NextResponse.json(
{
success: false,
error: error instanceof Error ? error.message : 'Unknown error occurred',
},
{ status: 500 }
)
}
}

View File

@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid' import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { getTimeoutErrorMessage, isTimeoutError } from '@/lib/core/execution-limits'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse' import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { markExecutionCancelled } from '@/lib/execution/cancellation' import { markExecutionCancelled } from '@/lib/execution/cancellation'
@@ -117,16 +116,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId) const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
const abortController = new AbortController() const abortController = new AbortController()
let isStreamClosed = false let isStreamClosed = false
let isTimedOut = false
const syncTimeout = preprocessResult.executionTimeout?.sync
let timeoutId: NodeJS.Timeout | undefined
if (syncTimeout) {
timeoutId = setTimeout(() => {
isTimedOut = true
abortController.abort()
}, syncTimeout)
}
const stream = new ReadableStream<Uint8Array>({ const stream = new ReadableStream<Uint8Array>({
async start(controller) { async start(controller) {
@@ -178,33 +167,13 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}) })
if (result.status === 'cancelled') { if (result.status === 'cancelled') {
if (isTimedOut && syncTimeout) { sendEvent({
const timeoutErrorMessage = getTimeoutErrorMessage(null, syncTimeout) type: 'execution:cancelled',
logger.info(`[${requestId}] Run-from-block execution timed out`, { timestamp: new Date().toISOString(),
timeoutMs: syncTimeout, executionId,
}) workflowId,
data: { duration: result.metadata?.duration || 0 },
await loggingSession.markAsFailed(timeoutErrorMessage) })
sendEvent({
type: 'execution:error',
timestamp: new Date().toISOString(),
executionId,
workflowId,
data: {
error: timeoutErrorMessage,
duration: result.metadata?.duration || 0,
},
})
} else {
sendEvent({
type: 'execution:cancelled',
timestamp: new Date().toISOString(),
executionId,
workflowId,
data: { duration: result.metadata?.duration || 0 },
})
}
} else { } else {
sendEvent({ sendEvent({
type: 'execution:completed', type: 'execution:completed',
@@ -221,25 +190,11 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}) })
} }
} catch (error: unknown) { } catch (error: unknown) {
const isTimeout = isTimeoutError(error) || isTimedOut const errorMessage = error instanceof Error ? error.message : 'Unknown error'
const errorMessage = isTimeout logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`)
? getTimeoutErrorMessage(error, syncTimeout)
: error instanceof Error
? error.message
: 'Unknown error'
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`, {
isTimeout,
})
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
await loggingSession.safeCompleteWithError({
totalDurationMs: executionResult?.metadata?.duration,
error: { message: errorMessage },
traceSpans: executionResult?.logs as any,
})
sendEvent({ sendEvent({
type: 'execution:error', type: 'execution:error',
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
@@ -251,7 +206,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}, },
}) })
} finally { } finally {
if (timeoutId) clearTimeout(timeoutId)
if (!isStreamClosed) { if (!isStreamClosed) {
try { try {
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n')) controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'))
@@ -262,7 +216,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}, },
cancel() { cancel() {
isStreamClosed = true isStreamClosed = true
if (timeoutId) clearTimeout(timeoutId)
abortController.abort() abortController.abort()
markExecutionCancelled(executionId).catch(() => {}) markExecutionCancelled(executionId).catch(() => {})
}, },

View File

@@ -5,7 +5,6 @@ import { validate as uuidValidate, v4 as uuidv4 } from 'uuid'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags' import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
import { getTimeoutErrorMessage, isTimeoutError } from '@/lib/core/execution-limits'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse' import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
@@ -121,6 +120,10 @@ type AsyncExecutionParams = {
triggerType: CoreTriggerType triggerType: CoreTriggerType
} }
/**
* Handles async workflow execution by queueing a background job.
* Returns immediately with a 202 Accepted response containing the job ID.
*/
async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextResponse> { async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextResponse> {
const { requestId, workflowId, userId, input, triggerType } = params const { requestId, workflowId, userId, input, triggerType } = params
@@ -402,7 +405,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
if (!enableSSE) { if (!enableSSE) {
logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`) logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`)
const syncTimeout = preprocessResult.executionTimeout?.sync
try { try {
const metadata: ExecutionMetadata = { const metadata: ExecutionMetadata = {
requestId, requestId,
@@ -436,7 +438,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
includeFileBase64, includeFileBase64,
base64MaxBytes, base64MaxBytes,
stopAfterBlockId, stopAfterBlockId,
abortSignal: syncTimeout ? AbortSignal.timeout(syncTimeout) : undefined,
}) })
const outputWithBase64 = includeFileBase64 const outputWithBase64 = includeFileBase64
@@ -472,23 +473,11 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
return NextResponse.json(filteredResult) return NextResponse.json(filteredResult)
} catch (error: unknown) { } catch (error: unknown) {
const isTimeout = isTimeoutError(error) const errorMessage = error instanceof Error ? error.message : 'Unknown error'
const errorMessage = isTimeout logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`)
? getTimeoutErrorMessage(error, syncTimeout)
: error instanceof Error
? error.message
: 'Unknown error'
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`, { isTimeout })
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
await loggingSession.safeCompleteWithError({
totalDurationMs: executionResult?.metadata?.duration,
error: { message: errorMessage },
traceSpans: executionResult?.logs as any,
})
return NextResponse.json( return NextResponse.json(
{ {
success: false, success: false,
@@ -502,7 +491,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
} }
: undefined, : undefined,
}, },
{ status: isTimeout ? 408 : 500 } { status: 500 }
) )
} }
} }
@@ -548,16 +537,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const encoder = new TextEncoder() const encoder = new TextEncoder()
const abortController = new AbortController() const abortController = new AbortController()
let isStreamClosed = false let isStreamClosed = false
let isTimedOut = false
const syncTimeout = preprocessResult.executionTimeout?.sync
let timeoutId: NodeJS.Timeout | undefined
if (syncTimeout) {
timeoutId = setTimeout(() => {
isTimedOut = true
abortController.abort()
}, syncTimeout)
}
const stream = new ReadableStream<Uint8Array>({ const stream = new ReadableStream<Uint8Array>({
async start(controller) { async start(controller) {
@@ -784,35 +763,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
} }
if (result.status === 'cancelled') { if (result.status === 'cancelled') {
if (isTimedOut && syncTimeout) { logger.info(`[${requestId}] Workflow execution was cancelled`)
const timeoutErrorMessage = getTimeoutErrorMessage(null, syncTimeout) sendEvent({
logger.info(`[${requestId}] Workflow execution timed out`, { timeoutMs: syncTimeout }) type: 'execution:cancelled',
timestamp: new Date().toISOString(),
await loggingSession.markAsFailed(timeoutErrorMessage) executionId,
workflowId,
sendEvent({ data: {
type: 'execution:error', duration: result.metadata?.duration || 0,
timestamp: new Date().toISOString(), },
executionId, })
workflowId,
data: {
error: timeoutErrorMessage,
duration: result.metadata?.duration || 0,
},
})
} else {
logger.info(`[${requestId}] Workflow execution was cancelled`)
sendEvent({
type: 'execution:cancelled',
timestamp: new Date().toISOString(),
executionId,
workflowId,
data: {
duration: result.metadata?.duration || 0,
},
})
}
return return
} }
@@ -839,23 +799,11 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
// Cleanup base64 cache for this execution // Cleanup base64 cache for this execution
await cleanupExecutionBase64Cache(executionId) await cleanupExecutionBase64Cache(executionId)
} catch (error: unknown) { } catch (error: unknown) {
const isTimeout = isTimeoutError(error) || isTimedOut const errorMessage = error instanceof Error ? error.message : 'Unknown error'
const errorMessage = isTimeout logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`)
? getTimeoutErrorMessage(error, syncTimeout)
: error instanceof Error
? error.message
: 'Unknown error'
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`, { isTimeout })
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
await loggingSession.safeCompleteWithError({
totalDurationMs: executionResult?.metadata?.duration,
error: { message: errorMessage },
traceSpans: executionResult?.logs as any,
})
sendEvent({ sendEvent({
type: 'execution:error', type: 'execution:error',
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
@@ -867,18 +815,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}, },
}) })
} finally { } finally {
if (timeoutId) clearTimeout(timeoutId)
if (!isStreamClosed) { if (!isStreamClosed) {
try { try {
controller.enqueue(encoder.encode('data: [DONE]\n\n')) controller.enqueue(encoder.encode('data: [DONE]\n\n'))
controller.close() controller.close()
} catch {} } catch {
// Stream already closed - nothing to do
}
} }
} }
}, },
cancel() { cancel() {
isStreamClosed = true isStreamClosed = true
if (timeoutId) clearTimeout(timeoutId)
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`) logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
abortController.abort() abortController.abort()
markExecutionCancelled(executionId).catch(() => {}) markExecutionCancelled(executionId).catch(() => {})

View File

@@ -807,7 +807,7 @@ export function Chat() {
const newReservedFields: StartInputFormatField[] = missingStartReservedFields.map( const newReservedFields: StartInputFormatField[] = missingStartReservedFields.map(
(fieldName) => { (fieldName) => {
const defaultType = fieldName === 'files' ? 'files' : 'string' const defaultType = fieldName === 'files' ? 'file[]' : 'string'
return { return {
id: crypto.randomUUID(), id: crypto.randomUUID(),

View File

@@ -179,7 +179,7 @@ export function A2aDeploy({
newFields.push({ newFields.push({
id: crypto.randomUUID(), id: crypto.randomUUID(),
name: 'files', name: 'files',
type: 'files', type: 'file[]',
value: '', value: '',
collapsed: false, collapsed: false,
}) })

View File

@@ -26,7 +26,7 @@ import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/
interface Field { interface Field {
id: string id: string
name: string name: string
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files' type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'file[]'
value?: string value?: string
description?: string description?: string
collapsed?: boolean collapsed?: boolean
@@ -57,7 +57,7 @@ const TYPE_OPTIONS: ComboboxOption[] = [
{ label: 'Boolean', value: 'boolean' }, { label: 'Boolean', value: 'boolean' },
{ label: 'Object', value: 'object' }, { label: 'Object', value: 'object' },
{ label: 'Array', value: 'array' }, { label: 'Array', value: 'array' },
{ label: 'Files', value: 'files' }, { label: 'Files', value: 'file[]' },
] ]
/** /**
@@ -448,7 +448,7 @@ export function FieldFormat({
) )
} }
if (field.type === 'files') { if (field.type === 'file[]') {
const lineCount = fieldValue.split('\n').length const lineCount = fieldValue.split('\n').length
const gutterWidth = calculateGutterWidth(lineCount) const gutterWidth = calculateGutterWidth(lineCount)

View File

@@ -225,7 +225,7 @@ const getOutputTypeForPath = (
const chatModeTypes: Record<string, string> = { const chatModeTypes: Record<string, string> = {
input: 'string', input: 'string',
conversationId: 'string', conversationId: 'string',
files: 'files', files: 'file[]',
} }
return chatModeTypes[outputPath] || 'any' return chatModeTypes[outputPath] || 'any'
} }
@@ -1563,16 +1563,11 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
blockTagGroups.sort((a, b) => a.distance - b.distance) blockTagGroups.sort((a, b) => a.distance - b.distance)
finalBlockTagGroups.push(...blockTagGroups) finalBlockTagGroups.push(...blockTagGroups)
const contextualTags: string[] = [] const groupTags = finalBlockTagGroups.flatMap((group) => group.tags)
if (loopBlockGroup) { const tags = [...groupTags, ...variableTags]
contextualTags.push(...loopBlockGroup.tags)
}
if (parallelBlockGroup) {
contextualTags.push(...parallelBlockGroup.tags)
}
return { return {
tags: [...allBlockTags, ...variableTags, ...contextualTags], tags,
variableInfoMap, variableInfoMap,
blockTagGroups: finalBlockTagGroups, blockTagGroups: finalBlockTagGroups,
} }
@@ -1746,7 +1741,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
mergedSubBlocks mergedSubBlocks
) )
if (fieldType === 'files' || fieldType === 'file[]' || fieldType === 'array') { if (fieldType === 'file' || fieldType === 'file[]' || fieldType === 'array') {
const blockName = parts[0] const blockName = parts[0]
const remainingPath = parts.slice(2).join('.') const remainingPath = parts.slice(2).join('.')
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}` processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`

View File

@@ -188,7 +188,7 @@ export function useBlockOutputFields({
baseOutputs = { baseOutputs = {
input: { type: 'string', description: 'User message' }, input: { type: 'string', description: 'User message' },
conversationId: { type: 'string', description: 'Conversation ID' }, conversationId: { type: 'string', description: 'Conversation ID' },
files: { type: 'files', description: 'Uploaded files' }, files: { type: 'file[]', description: 'Uploaded files' },
} }
} else { } else {
const inputFormatValue = mergedSubBlocks?.inputFormat?.value const inputFormatValue = mergedSubBlocks?.inputFormat?.value

View File

@@ -27,7 +27,7 @@ import { useExecutionStore } from '@/stores/execution'
import { useNotificationStore } from '@/stores/notifications' import { useNotificationStore } from '@/stores/notifications'
import { useVariablesStore } from '@/stores/panel' import { useVariablesStore } from '@/stores/panel'
import { useEnvironmentStore } from '@/stores/settings/environment' import { useEnvironmentStore } from '@/stores/settings/environment'
import { useTerminalConsoleStore } from '@/stores/terminal' import { type ConsoleEntry, useTerminalConsoleStore } from '@/stores/terminal'
import { useWorkflowDiffStore } from '@/stores/workflow-diff' import { useWorkflowDiffStore } from '@/stores/workflow-diff'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { mergeSubblockState } from '@/stores/workflows/utils' import { mergeSubblockState } from '@/stores/workflows/utils'
@@ -1153,29 +1153,30 @@ export function useWorkflowExecution() {
logs: accumulatedBlockLogs, logs: accumulatedBlockLogs,
} }
if (activeWorkflowId) { // Only add workflow-level error if no blocks have executed yet
cancelRunningEntries(activeWorkflowId) // This catches pre-execution errors (validation, serialization, etc.)
} // Block execution errors are already logged via onBlockError callback
const { entries } = useTerminalConsoleStore.getState()
const existingLogs = entries.filter(
(log: ConsoleEntry) => log.executionId === executionId
)
addConsole({ if (existingLogs.length === 0) {
input: {}, // No blocks executed yet - this is a pre-execution error
output: {}, addConsole({
success: false, input: {},
error: data.error, output: {},
durationMs: data.duration || 0, success: false,
startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(), error: data.error,
endedAt: new Date().toISOString(), durationMs: data.duration || 0,
workflowId: activeWorkflowId, startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(),
blockId: 'workflow-error', endedAt: new Date().toISOString(),
executionId, workflowId: activeWorkflowId,
blockName: 'Workflow Error', blockId: 'validation',
blockType: 'error', executionId,
}) blockName: 'Workflow Validation',
}, blockType: 'validation',
})
onExecutionCancelled: () => {
if (activeWorkflowId) {
cancelRunningEntries(activeWorkflowId)
} }
}, },
}, },
@@ -1717,28 +1718,13 @@ export function useWorkflowExecution() {
'Workflow was modified. Run the workflow again to enable running from block.', 'Workflow was modified. Run the workflow again to enable running from block.',
workflowId, workflowId,
}) })
} else {
addNotification({
level: 'error',
message: data.error || 'Run from block failed',
workflowId,
})
} }
cancelRunningEntries(workflowId)
addConsole({
input: {},
output: {},
success: false,
error: data.error,
durationMs: data.duration || 0,
startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(),
endedAt: new Date().toISOString(),
workflowId,
blockId: 'workflow-error',
executionId,
blockName: 'Workflow Error',
blockType: 'error',
})
},
onExecutionCancelled: () => {
cancelRunningEntries(workflowId)
}, },
}, },
}) })

View File

@@ -1,11 +1,11 @@
import { import {
Building2,
Clock, Clock,
Database, Database,
HardDrive, HardDrive,
HeadphonesIcon, HeadphonesIcon,
Server, Server,
ShieldCheck, ShieldCheck,
Timer,
Users, Users,
Zap, Zap,
} from 'lucide-react' } from 'lucide-react'
@@ -15,8 +15,8 @@ import type { PlanFeature } from '@/app/workspace/[workspaceId]/w/components/sid
export const PRO_PLAN_FEATURES: PlanFeature[] = [ export const PRO_PLAN_FEATURES: PlanFeature[] = [
{ icon: Zap, text: '150 runs per minute (sync)' }, { icon: Zap, text: '150 runs per minute (sync)' },
{ icon: Clock, text: '1,000 runs per minute (async)' }, { icon: Clock, text: '1,000 runs per minute (async)' },
{ icon: Timer, text: '60 min sync execution limit' },
{ icon: HardDrive, text: '50GB file storage' }, { icon: HardDrive, text: '50GB file storage' },
{ icon: Building2, text: 'Unlimited workspaces' },
{ icon: Users, text: 'Unlimited invites' }, { icon: Users, text: 'Unlimited invites' },
{ icon: Database, text: 'Unlimited log retention' }, { icon: Database, text: 'Unlimited log retention' },
] ]
@@ -24,8 +24,8 @@ export const PRO_PLAN_FEATURES: PlanFeature[] = [
export const TEAM_PLAN_FEATURES: PlanFeature[] = [ export const TEAM_PLAN_FEATURES: PlanFeature[] = [
{ icon: Zap, text: '300 runs per minute (sync)' }, { icon: Zap, text: '300 runs per minute (sync)' },
{ icon: Clock, text: '2,500 runs per minute (async)' }, { icon: Clock, text: '2,500 runs per minute (async)' },
{ icon: Timer, text: '60 min sync execution limit' },
{ icon: HardDrive, text: '500GB file storage (pooled)' }, { icon: HardDrive, text: '500GB file storage (pooled)' },
{ icon: Building2, text: 'Unlimited workspaces' },
{ icon: Users, text: 'Unlimited invites' }, { icon: Users, text: 'Unlimited invites' },
{ icon: Database, text: 'Unlimited log retention' }, { icon: Database, text: 'Unlimited log retention' },
{ icon: SlackMonoIcon, text: 'Dedicated Slack channel' }, { icon: SlackMonoIcon, text: 'Dedicated Slack channel' },

View File

@@ -417,11 +417,11 @@ async function executeWebhookJobInternal(
if (triggerBlock?.subBlocks?.inputFormat?.value) { if (triggerBlock?.subBlocks?.inputFormat?.value) {
const inputFormat = triggerBlock.subBlocks.inputFormat.value as unknown as Array<{ const inputFormat = triggerBlock.subBlocks.inputFormat.value as unknown as Array<{
name: string name: string
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files' type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'file[]'
}> }>
logger.debug(`[${requestId}] Processing generic webhook files from inputFormat`) logger.debug(`[${requestId}] Processing generic webhook files from inputFormat`)
const fileFields = inputFormat.filter((field) => field.type === 'files') const fileFields = inputFormat.filter((field) => field.type === 'file[]')
if (fileFields.length > 0 && typeof input === 'object' && input !== null) { if (fileFields.length > 0 && typeof input === 'object' && input !== null) {
const executionContext = { const executionContext = {

View File

@@ -442,7 +442,16 @@ describe('Blocks Module', () => {
}) })
it('should have valid output types', () => { it('should have valid output types', () => {
const validPrimitiveTypes = ['string', 'number', 'boolean', 'json', 'array', 'files', 'any'] const validPrimitiveTypes = [
'string',
'number',
'boolean',
'json',
'array',
'file',
'file[]',
'any',
]
const blocks = getAllBlocks() const blocks = getAllBlocks()
for (const block of blocks) { for (const block of blocks) {
for (const [key, outputConfig] of Object.entries(block.outputs)) { for (const [key, outputConfig] of Object.entries(block.outputs)) {

View File

@@ -1,5 +1,6 @@
import { A2AIcon } from '@/components/icons' import { A2AIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { ToolResponse } from '@/tools/types' import type { ToolResponse } from '@/tools/types'
export interface A2AResponse extends ToolResponse { export interface A2AResponse extends ToolResponse {
@@ -214,6 +215,14 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
], ],
config: { config: {
tool: (params) => params.operation as string, tool: (params) => params.operation as string,
params: (params) => {
const { fileUpload, fileReference, ...rest } = params
const normalizedFiles = normalizeFileInput(fileUpload || fileReference || params.files)
return {
...rest,
...(normalizedFiles && { files: normalizedFiles }),
}
},
}, },
}, },
inputs: { inputs: {

View File

@@ -26,7 +26,7 @@ export const ChatTriggerBlock: BlockConfig = {
outputs: { outputs: {
input: { type: 'string', description: 'User message' }, input: { type: 'string', description: 'User message' },
conversationId: { type: 'string', description: 'Conversation ID' }, conversationId: { type: 'string', description: 'Conversation ID' },
files: { type: 'files', description: 'Uploaded files' }, files: { type: 'file[]', description: 'Uploaded files' },
}, },
triggers: { triggers: {
enabled: true, enabled: true,

View File

@@ -1,6 +1,7 @@
import { ConfluenceIcon } from '@/components/icons' import { ConfluenceIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { ConfluenceResponse } from '@/tools/confluence/types' import type { ConfluenceResponse } from '@/tools/confluence/types'
export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = { export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
@@ -651,14 +652,15 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
if (operation === 'upload_attachment') { if (operation === 'upload_attachment') {
const fileInput = attachmentFileUpload || attachmentFileReference || attachmentFile const fileInput = attachmentFileUpload || attachmentFileReference || attachmentFile
if (!fileInput) { const normalizedFile = normalizeFileInput(fileInput, { single: true })
if (!normalizedFile) {
throw new Error('File is required for upload attachment operation.') throw new Error('File is required for upload attachment operation.')
} }
return { return {
credential, credential,
pageId: effectivePageId, pageId: effectivePageId,
operation, operation,
file: fileInput, file: normalizedFile,
fileName: attachmentFileName, fileName: attachmentFileName,
comment: attachmentComment, comment: attachmentComment,
...rest, ...rest,

View File

@@ -1,6 +1,7 @@
import { DiscordIcon } from '@/components/icons' import { DiscordIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { DiscordResponse } from '@/tools/discord/types' import type { DiscordResponse } from '@/tools/discord/types'
export const DiscordBlock: BlockConfig<DiscordResponse> = { export const DiscordBlock: BlockConfig<DiscordResponse> = {
@@ -578,13 +579,14 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
if (!params.serverId) throw new Error('Server ID is required') if (!params.serverId) throw new Error('Server ID is required')
switch (params.operation) { switch (params.operation) {
case 'discord_send_message': case 'discord_send_message': {
return { return {
...commonParams, ...commonParams,
channelId: params.channelId, channelId: params.channelId,
content: params.content, content: params.content,
files: params.attachmentFiles || params.files, files: normalizeFileInput(params.attachmentFiles || params.files),
} }
}
case 'discord_get_messages': case 'discord_get_messages':
return { return {
...commonParams, ...commonParams,
@@ -789,6 +791,7 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
}, },
outputs: { outputs: {
message: { type: 'string', description: 'Status message' }, message: { type: 'string', description: 'Status message' },
files: { type: 'file[]', description: 'Files attached to the message' },
data: { type: 'json', description: 'Response data' }, data: { type: 'json', description: 'Response data' },
}, },
} }

View File

@@ -1,6 +1,7 @@
import { DropboxIcon } from '@/components/icons' import { DropboxIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { DropboxResponse } from '@/tools/dropbox/types' import type { DropboxResponse } from '@/tools/dropbox/types'
export const DropboxBlock: BlockConfig<DropboxResponse> = { export const DropboxBlock: BlockConfig<DropboxResponse> = {
@@ -60,12 +61,25 @@ export const DropboxBlock: BlockConfig<DropboxResponse> = {
required: true, required: true,
}, },
{ {
id: 'fileContent', id: 'uploadFile',
title: 'File Content', title: 'File',
type: 'long-input', type: 'file-upload',
placeholder: 'Base64 encoded file content or file reference', canonicalParamId: 'file',
condition: { field: 'operation', value: 'dropbox_upload' }, placeholder: 'Upload file to send to Dropbox',
mode: 'basic',
multiple: false,
required: true, required: true,
condition: { field: 'operation', value: 'dropbox_upload' },
},
{
id: 'fileRef',
title: 'File',
type: 'short-input',
canonicalParamId: 'file',
placeholder: 'Reference file from previous blocks',
mode: 'advanced',
required: true,
condition: { field: 'operation', value: 'dropbox_upload' },
}, },
{ {
id: 'mode', id: 'mode',
@@ -303,6 +317,16 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
params.maxResults = Number(params.maxResults) params.maxResults = Number(params.maxResults)
} }
// Normalize file input for upload operation
// Check all possible field IDs: uploadFile (basic), fileRef (advanced), fileContent (legacy)
const normalizedFile = normalizeFileInput(
params.uploadFile || params.fileRef || params.fileContent,
{ single: true }
)
if (normalizedFile) {
params.file = normalizedFile
}
switch (params.operation) { switch (params.operation) {
case 'dropbox_upload': case 'dropbox_upload':
return 'dropbox_upload' return 'dropbox_upload'
@@ -337,7 +361,10 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
path: { type: 'string', description: 'Path in Dropbox' }, path: { type: 'string', description: 'Path in Dropbox' },
autorename: { type: 'boolean', description: 'Auto-rename on conflict' }, autorename: { type: 'boolean', description: 'Auto-rename on conflict' },
// Upload inputs // Upload inputs
fileContent: { type: 'string', description: 'Base64 encoded file content' }, uploadFile: { type: 'json', description: 'Uploaded file (UserFile)' },
file: { type: 'json', description: 'File to upload (UserFile object)' },
fileRef: { type: 'json', description: 'File reference from previous block' },
fileContent: { type: 'string', description: 'Legacy: base64 encoded file content' },
fileName: { type: 'string', description: 'Optional filename' }, fileName: { type: 'string', description: 'Optional filename' },
mode: { type: 'string', description: 'Write mode: add or overwrite' }, mode: { type: 'string', description: 'Write mode: add or overwrite' },
mute: { type: 'boolean', description: 'Mute notifications' }, mute: { type: 'boolean', description: 'Mute notifications' },
@@ -360,7 +387,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
}, },
outputs: { outputs: {
// Upload/Download outputs // Upload/Download outputs
file: { type: 'json', description: 'File metadata' }, file: { type: 'file', description: 'Downloaded file stored in execution files' },
content: { type: 'string', description: 'File content (base64)' }, content: { type: 'string', description: 'File content (base64)' },
temporaryLink: { type: 'string', description: 'Temporary download link' }, temporaryLink: { type: 'string', description: 'Temporary download link' },
// List folder outputs // List folder outputs

View File

@@ -73,5 +73,6 @@ export const ElevenLabsBlock: BlockConfig<ElevenLabsBlockResponse> = {
outputs: { outputs: {
audioUrl: { type: 'string', description: 'Generated audio URL' }, audioUrl: { type: 'string', description: 'Generated audio URL' },
audioFile: { type: 'file', description: 'Generated audio file' },
}, },
} }

View File

@@ -1,11 +1,48 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { DocumentIcon } from '@/components/icons' import { DocumentIcon } from '@/components/icons'
import { inferContextFromKey } from '@/lib/uploads/utils/file-utils'
import type { BlockConfig, SubBlockType } from '@/blocks/types' import type { BlockConfig, SubBlockType } from '@/blocks/types'
import { createVersionedToolSelector } from '@/blocks/utils' import { createVersionedToolSelector, normalizeFileInput } from '@/blocks/utils'
import type { FileParserOutput } from '@/tools/file/types' import type { FileParserOutput, FileParserV3Output } from '@/tools/file/types'
const logger = createLogger('FileBlock') const logger = createLogger('FileBlock')
const resolveFilePathFromInput = (fileInput: unknown): string | null => {
if (!fileInput || typeof fileInput !== 'object') {
return null
}
const record = fileInput as Record<string, unknown>
if (typeof record.path === 'string' && record.path.trim() !== '') {
return record.path
}
if (typeof record.url === 'string' && record.url.trim() !== '') {
return record.url
}
if (typeof record.key === 'string' && record.key.trim() !== '') {
const key = record.key.trim()
const context = typeof record.context === 'string' ? record.context : inferContextFromKey(key)
return `/api/files/serve/${encodeURIComponent(key)}?context=${context}`
}
return null
}
const resolveFilePathsFromInput = (fileInput: unknown): string[] => {
if (!fileInput) {
return []
}
if (Array.isArray(fileInput)) {
return fileInput
.map((file) => resolveFilePathFromInput(file))
.filter((path): path is string => Boolean(path))
}
const resolved = resolveFilePathFromInput(fileInput)
return resolved ? [resolved] : []
}
export const FileBlock: BlockConfig<FileParserOutput> = { export const FileBlock: BlockConfig<FileParserOutput> = {
type: 'file', type: 'file',
name: 'File (Legacy)', name: 'File (Legacy)',
@@ -79,24 +116,14 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
// Handle file upload input // Handle file upload input
if (inputMethod === 'upload') { if (inputMethod === 'upload') {
// Handle case where 'file' is an array (multiple files) const filePaths = resolveFilePathsFromInput(params.file)
if (params.file && Array.isArray(params.file) && params.file.length > 0) { if (filePaths.length > 0) {
const filePaths = params.file.map((file) => file.path)
return { return {
filePath: filePaths.length === 1 ? filePaths[0] : filePaths, filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
fileType: params.fileType || 'auto', fileType: params.fileType || 'auto',
} }
} }
// Handle case where 'file' is a single file object
if (params.file?.path) {
return {
filePath: params.file.path,
fileType: params.fileType || 'auto',
}
}
// If no files, return error // If no files, return error
logger.error('No files provided for upload method') logger.error('No files provided for upload method')
throw new Error('Please upload a file') throw new Error('Please upload a file')
@@ -116,7 +143,7 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
}, },
outputs: { outputs: {
files: { files: {
type: 'json', type: 'file[]',
description: 'Array of parsed file objects with content, metadata, and file properties', description: 'Array of parsed file objects with content, metadata, and file properties',
}, },
combinedContent: { combinedContent: {
@@ -124,7 +151,7 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
description: 'All file contents merged into a single text string', description: 'All file contents merged into a single text string',
}, },
processedFiles: { processedFiles: {
type: 'files', type: 'file[]',
description: 'Array of UserFile objects for downstream use (attachments, uploads, etc.)', description: 'Array of UserFile objects for downstream use (attachments, uploads, etc.)',
}, },
}, },
@@ -133,9 +160,9 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
export const FileV2Block: BlockConfig<FileParserOutput> = { export const FileV2Block: BlockConfig<FileParserOutput> = {
...FileBlock, ...FileBlock,
type: 'file_v2', type: 'file_v2',
name: 'File', name: 'File (Legacy)',
description: 'Read and parse multiple files', description: 'Read and parse multiple files',
hideFromToolbar: false, hideFromToolbar: true,
subBlocks: [ subBlocks: [
{ {
id: 'file', id: 'file',
@@ -173,7 +200,21 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
throw new Error('File is required') throw new Error('File is required')
} }
if (typeof fileInput === 'string') { // First, try to normalize as file objects (handles JSON strings from advanced mode)
const normalizedFiles = normalizeFileInput(fileInput)
if (normalizedFiles) {
const filePaths = resolveFilePathsFromInput(normalizedFiles)
if (filePaths.length > 0) {
return {
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
fileType: params.fileType || 'auto',
workspaceId: params._context?.workspaceId,
}
}
}
// If normalization fails, treat as direct URL string
if (typeof fileInput === 'string' && fileInput.trim()) {
return { return {
filePath: fileInput.trim(), filePath: fileInput.trim(),
fileType: params.fileType || 'auto', fileType: params.fileType || 'auto',
@@ -181,21 +222,6 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
} }
} }
if (Array.isArray(fileInput) && fileInput.length > 0) {
const filePaths = fileInput.map((file) => file.path)
return {
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
fileType: params.fileType || 'auto',
}
}
if (fileInput?.path) {
return {
filePath: fileInput.path,
fileType: params.fileType || 'auto',
}
}
logger.error('Invalid file input format') logger.error('Invalid file input format')
throw new Error('Invalid file input') throw new Error('Invalid file input')
}, },
@@ -209,7 +235,7 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
}, },
outputs: { outputs: {
files: { files: {
type: 'json', type: 'file[]',
description: 'Array of parsed file objects with content, metadata, and file properties', description: 'Array of parsed file objects with content, metadata, and file properties',
}, },
combinedContent: { combinedContent: {
@@ -218,3 +244,96 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
}, },
}, },
} }
export const FileV3Block: BlockConfig<FileParserV3Output> = {
type: 'file_v3',
name: 'File',
description: 'Read and parse multiple files',
longDescription:
'Upload files directly or import from external URLs to get UserFile objects for use in other blocks.',
docsLink: 'https://docs.sim.ai/tools/file',
category: 'tools',
bgColor: '#40916C',
icon: DocumentIcon,
subBlocks: [
{
id: 'file',
title: 'Files',
type: 'file-upload' as SubBlockType,
canonicalParamId: 'fileInput',
acceptedTypes: '*',
placeholder: 'Upload files to process',
multiple: true,
mode: 'basic',
maxSize: 100,
required: true,
},
{
id: 'fileUrl',
title: 'File URL',
type: 'short-input' as SubBlockType,
canonicalParamId: 'fileInput',
placeholder: 'https://example.com/document.pdf',
mode: 'advanced',
required: true,
},
],
tools: {
access: ['file_parser_v3'],
config: {
tool: () => 'file_parser_v3',
params: (params) => {
const fileInput = params.fileInput ?? params.file ?? params.fileUrl ?? params.filePath
if (!fileInput) {
logger.error('No file input provided')
throw new Error('File input is required')
}
// First, try to normalize as file objects (handles JSON strings from advanced mode)
const normalizedFiles = normalizeFileInput(fileInput)
if (normalizedFiles) {
const filePaths = resolveFilePathsFromInput(normalizedFiles)
if (filePaths.length > 0) {
return {
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
fileType: params.fileType || 'auto',
workspaceId: params._context?.workspaceId,
workflowId: params._context?.workflowId,
executionId: params._context?.executionId,
}
}
}
// If normalization fails, treat as direct URL string
if (typeof fileInput === 'string' && fileInput.trim()) {
return {
filePath: fileInput.trim(),
fileType: params.fileType || 'auto',
workspaceId: params._context?.workspaceId,
workflowId: params._context?.workflowId,
executionId: params._context?.executionId,
}
}
logger.error('Invalid file input format')
throw new Error('File input is required')
},
},
},
inputs: {
fileInput: { type: 'json', description: 'File input (upload or URL)' },
fileUrl: { type: 'string', description: 'External file URL (advanced mode)' },
file: { type: 'json', description: 'Uploaded file data (basic mode)' },
fileType: { type: 'string', description: 'File type' },
},
outputs: {
files: {
type: 'file[]',
description: 'Parsed files as UserFile objects',
},
combinedContent: {
type: 'string',
description: 'All file contents merged into a single text string',
},
},
}

View File

@@ -1,13 +1,16 @@
import { FirefliesIcon } from '@/components/icons' import { FirefliesIcon } from '@/components/icons'
import { resolveHttpsUrlFromFileInput } from '@/lib/uploads/utils/file-utils'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { FirefliesResponse } from '@/tools/fireflies/types' import type { FirefliesResponse } from '@/tools/fireflies/types'
import { getTrigger } from '@/triggers' import { getTrigger } from '@/triggers'
export const FirefliesBlock: BlockConfig<FirefliesResponse> = { export const FirefliesBlock: BlockConfig<FirefliesResponse> = {
type: 'fireflies', type: 'fireflies',
name: 'Fireflies', name: 'Fireflies (Legacy)',
description: 'Interact with Fireflies.ai meeting transcripts and recordings', description: 'Interact with Fireflies.ai meeting transcripts and recordings',
hideFromToolbar: true,
authMode: AuthMode.ApiKey, authMode: AuthMode.ApiKey,
triggerAllowed: true, triggerAllowed: true,
longDescription: longDescription:
@@ -587,3 +590,61 @@ Return ONLY the summary text - no quotes, no labels.`,
available: ['fireflies_transcription_complete'], available: ['fireflies_transcription_complete'],
}, },
} }
const firefliesV2SubBlocks = (FirefliesBlock.subBlocks || []).filter(
(subBlock) => subBlock.id !== 'audioUrl'
)
const firefliesV2Inputs = FirefliesBlock.inputs
? Object.fromEntries(Object.entries(FirefliesBlock.inputs).filter(([key]) => key !== 'audioUrl'))
: {}
export const FirefliesV2Block: BlockConfig<FirefliesResponse> = {
...FirefliesBlock,
type: 'fireflies_v2',
name: 'Fireflies',
description: 'Interact with Fireflies.ai meeting transcripts and recordings',
hideFromToolbar: false,
subBlocks: firefliesV2SubBlocks,
tools: {
...FirefliesBlock.tools,
config: {
...FirefliesBlock.tools?.config,
tool: (params) =>
FirefliesBlock.tools?.config?.tool
? FirefliesBlock.tools.config.tool(params)
: params.operation || 'fireflies_list_transcripts',
params: (params) => {
const baseParams = FirefliesBlock.tools?.config?.params
if (!baseParams) {
return params
}
if (params.operation === 'fireflies_upload_audio') {
const audioFile = normalizeFileInput(params.audioFile || params.audioFileReference, {
single: true,
})
if (!audioFile) {
throw new Error('Audio file is required.')
}
const audioUrl = resolveHttpsUrlFromFileInput(audioFile)
if (!audioUrl) {
throw new Error('Audio file must include a https URL.')
}
return baseParams({
...params,
audioUrl,
audioFile: undefined,
audioFileReference: undefined,
})
}
return baseParams(params)
},
},
},
inputs: {
...firefliesV2Inputs,
audioFileReference: { type: 'json', description: 'Audio/video file reference' },
},
}

View File

@@ -1,7 +1,7 @@
import { GmailIcon } from '@/components/icons' import { GmailIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { createVersionedToolSelector } from '@/blocks/utils' import { createVersionedToolSelector, normalizeFileInput } from '@/blocks/utils'
import type { GmailToolResponse } from '@/tools/gmail/types' import type { GmailToolResponse } from '@/tools/gmail/types'
import { getTrigger } from '@/triggers' import { getTrigger } from '@/triggers'
@@ -418,6 +418,8 @@ Return ONLY the search query - no explanations, no extra text.`,
labelActionMessageId, labelActionMessageId,
labelManagement, labelManagement,
manualLabelManagement, manualLabelManagement,
attachmentFiles,
attachments,
...rest ...rest
} = params } = params
@@ -465,9 +467,13 @@ Return ONLY the search query - no explanations, no extra text.`,
} }
} }
// Normalize attachments for send/draft operations
const normalizedAttachments = normalizeFileInput(attachmentFiles || attachments)
return { return {
...rest, ...rest,
credential, credential,
...(normalizedAttachments && { attachments: normalizedAttachments }),
} }
}, },
}, },
@@ -516,7 +522,7 @@ Return ONLY the search query - no explanations, no extra text.`,
// Tool outputs // Tool outputs
content: { type: 'string', description: 'Response content' }, content: { type: 'string', description: 'Response content' },
metadata: { type: 'json', description: 'Email metadata' }, metadata: { type: 'json', description: 'Email metadata' },
attachments: { type: 'json', description: 'Email attachments array' }, attachments: { type: 'file[]', description: 'Email attachments array' },
// Trigger outputs // Trigger outputs
email_id: { type: 'string', description: 'Gmail message ID' }, email_id: { type: 'string', description: 'Gmail message ID' },
thread_id: { type: 'string', description: 'Gmail thread ID' }, thread_id: { type: 'string', description: 'Gmail thread ID' },
@@ -579,7 +585,7 @@ export const GmailV2Block: BlockConfig<GmailToolResponse> = {
date: { type: 'string', description: 'Date' }, date: { type: 'string', description: 'Date' },
body: { type: 'string', description: 'Email body text (best-effort)' }, body: { type: 'string', description: 'Email body text (best-effort)' },
results: { type: 'json', description: 'Search/read summary results' }, results: { type: 'json', description: 'Search/read summary results' },
attachments: { type: 'json', description: 'Downloaded attachments (if enabled)' }, attachments: { type: 'file[]', description: 'Downloaded attachments (if enabled)' },
// Draft-specific outputs // Draft-specific outputs
draftId: { draftId: {

View File

@@ -1,6 +1,7 @@
import { GoogleDriveIcon } from '@/components/icons' import { GoogleDriveIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { GoogleDriveResponse } from '@/tools/google_drive/types' import type { GoogleDriveResponse } from '@/tools/google_drive/types'
export const GoogleDriveBlock: BlockConfig<GoogleDriveResponse> = { export const GoogleDriveBlock: BlockConfig<GoogleDriveResponse> = {
@@ -782,6 +783,8 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
manualDestinationFolderId, manualDestinationFolderId,
fileSelector, fileSelector,
manualFileId, manualFileId,
file,
fileUpload,
mimeType, mimeType,
shareType, shareType,
starred, starred,
@@ -789,6 +792,9 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
...rest ...rest
} = params } = params
// Normalize file input - handles both basic (file-upload) and advanced (short-input) modes
const normalizedFile = normalizeFileInput(file ?? fileUpload, { single: true })
// Use folderSelector if provided, otherwise use manualFolderId // Use folderSelector if provided, otherwise use manualFolderId
const effectiveFolderId = (folderSelector || manualFolderId || '').trim() const effectiveFolderId = (folderSelector || manualFolderId || '').trim()
@@ -813,6 +819,7 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
folderId: effectiveFolderId || undefined, folderId: effectiveFolderId || undefined,
fileId: effectiveFileId || undefined, fileId: effectiveFileId || undefined,
destinationFolderId: effectiveDestinationFolderId || undefined, destinationFolderId: effectiveDestinationFolderId || undefined,
file: normalizedFile,
pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined, pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined,
mimeType: mimeType, mimeType: mimeType,
type: shareType, // Map shareType to type for share tool type: shareType, // Map shareType to type for share tool
@@ -861,7 +868,7 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
permissionId: { type: 'string', description: 'Permission ID to remove' }, permissionId: { type: 'string', description: 'Permission ID to remove' },
}, },
outputs: { outputs: {
file: { type: 'json', description: 'File metadata or downloaded file data' }, file: { type: 'file', description: 'Downloaded file stored in execution files' },
files: { type: 'json', description: 'List of files' }, files: { type: 'json', description: 'List of files' },
metadata: { type: 'json', description: 'Complete file metadata (from download)' }, metadata: { type: 'json', description: 'Complete file metadata (from download)' },
content: { type: 'string', description: 'File content as text' }, content: { type: 'string', description: 'File content as text' },

View File

@@ -1,6 +1,7 @@
import { GoogleSheetsIcon } from '@/components/icons' import { GoogleSheetsIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { createVersionedToolSelector } from '@/blocks/utils'
import type { GoogleSheetsResponse, GoogleSheetsV2Response } from '@/tools/google_sheets/types' import type { GoogleSheetsResponse, GoogleSheetsV2Response } from '@/tools/google_sheets/types'
// Legacy block - hidden from toolbar // Legacy block - hidden from toolbar
@@ -681,34 +682,38 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
'google_sheets_copy_sheet_v2', 'google_sheets_copy_sheet_v2',
], ],
config: { config: {
tool: (params) => { tool: createVersionedToolSelector({
switch (params.operation) { baseToolSelector: (params) => {
case 'read': switch (params.operation) {
return 'google_sheets_read_v2' case 'read':
case 'write': return 'google_sheets_read'
return 'google_sheets_write_v2' case 'write':
case 'update': return 'google_sheets_write'
return 'google_sheets_update_v2' case 'update':
case 'append': return 'google_sheets_update'
return 'google_sheets_append_v2' case 'append':
case 'clear': return 'google_sheets_append'
return 'google_sheets_clear_v2' case 'clear':
case 'get_info': return 'google_sheets_clear'
return 'google_sheets_get_spreadsheet_v2' case 'get_info':
case 'create': return 'google_sheets_get_spreadsheet'
return 'google_sheets_create_spreadsheet_v2' case 'create':
case 'batch_get': return 'google_sheets_create_spreadsheet'
return 'google_sheets_batch_get_v2' case 'batch_get':
case 'batch_update': return 'google_sheets_batch_get'
return 'google_sheets_batch_update_v2' case 'batch_update':
case 'batch_clear': return 'google_sheets_batch_update'
return 'google_sheets_batch_clear_v2' case 'batch_clear':
case 'copy_sheet': return 'google_sheets_batch_clear'
return 'google_sheets_copy_sheet_v2' case 'copy_sheet':
default: return 'google_sheets_copy_sheet'
throw new Error(`Invalid Google Sheets V2 operation: ${params.operation}`) default:
} throw new Error(`Invalid Google Sheets operation: ${params.operation}`)
}, }
},
suffix: '_v2',
fallbackToolId: 'google_sheets_read_v2',
}),
params: (params) => { params: (params) => {
const { const {
credential, credential,

View File

@@ -1,12 +1,15 @@
import { GoogleSlidesIcon } from '@/components/icons' import { GoogleSlidesIcon } from '@/components/icons'
import { resolveHttpsUrlFromFileInput } from '@/lib/uploads/utils/file-utils'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { GoogleSlidesResponse } from '@/tools/google_slides/types' import type { GoogleSlidesResponse } from '@/tools/google_slides/types'
export const GoogleSlidesBlock: BlockConfig<GoogleSlidesResponse> = { export const GoogleSlidesBlock: BlockConfig<GoogleSlidesResponse> = {
type: 'google_slides', type: 'google_slides',
name: 'Google Slides', name: 'Google Slides (Legacy)',
description: 'Read, write, and create presentations', description: 'Read, write, and create presentations',
hideFromToolbar: true,
authMode: AuthMode.OAuth, authMode: AuthMode.OAuth,
longDescription: longDescription:
'Integrate Google Slides into the workflow. Can read, write, create presentations, replace text, add slides, add images, get thumbnails, get page details, delete objects, duplicate objects, reorder slides, create tables, create shapes, and insert text.', 'Integrate Google Slides into the workflow. Can read, write, create presentations, replace text, add slides, add images, get thumbnails, get page details, delete objects, duplicate objects, reorder slides, create tables, create shapes, and insert text.',
@@ -315,12 +318,26 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
required: true, required: true,
}, },
{ {
id: 'imageUrl', id: 'imageFile',
title: 'Image URL', title: 'Image',
type: 'short-input', type: 'file-upload',
placeholder: 'Public URL of the image (PNG, JPEG, or GIF)', canonicalParamId: 'imageSource',
condition: { field: 'operation', value: 'add_image' }, placeholder: 'Upload image (PNG, JPEG, or GIF)',
mode: 'basic',
multiple: false,
required: true, required: true,
acceptedTypes: '.png,.jpg,.jpeg,.gif',
condition: { field: 'operation', value: 'add_image' },
},
{
id: 'imageUrl',
title: 'Image',
type: 'short-input',
canonicalParamId: 'imageSource',
placeholder: 'Reference image from previous blocks or enter URL',
mode: 'advanced',
required: true,
condition: { field: 'operation', value: 'add_image' },
}, },
{ {
id: 'imageWidth', id: 'imageWidth',
@@ -809,7 +826,9 @@ Return ONLY the text content - no explanations, no markdown formatting markers,
placeholderIdMappings: { type: 'string', description: 'JSON array of placeholder ID mappings' }, placeholderIdMappings: { type: 'string', description: 'JSON array of placeholder ID mappings' },
// Add image operation // Add image operation
pageObjectId: { type: 'string', description: 'Slide object ID for image' }, pageObjectId: { type: 'string', description: 'Slide object ID for image' },
imageUrl: { type: 'string', description: 'Image URL' }, imageFile: { type: 'json', description: 'Uploaded image (UserFile)' },
imageUrl: { type: 'string', description: 'Image URL or reference' },
imageSource: { type: 'json', description: 'Image source (file or URL)' },
imageWidth: { type: 'number', description: 'Image width in points' }, imageWidth: { type: 'number', description: 'Image width in points' },
imageHeight: { type: 'number', description: 'Image height in points' }, imageHeight: { type: 'number', description: 'Image height in points' },
positionX: { type: 'number', description: 'X position in points' }, positionX: { type: 'number', description: 'X position in points' },
@@ -887,3 +906,85 @@ Return ONLY the text content - no explanations, no markdown formatting markers,
text: { type: 'string', description: 'Text that was inserted' }, text: { type: 'string', description: 'Text that was inserted' },
}, },
} }
const googleSlidesV2SubBlocks = (GoogleSlidesBlock.subBlocks || []).flatMap((subBlock) => {
if (subBlock.id === 'imageFile') {
return [
{
...subBlock,
canonicalParamId: 'imageFile',
},
]
}
if (subBlock.id !== 'imageUrl') {
return [subBlock]
}
return [
{
id: 'imageFileReference',
title: 'Image',
type: 'short-input' as const,
canonicalParamId: 'imageFile',
placeholder: 'Reference image from previous blocks',
mode: 'advanced' as const,
required: true,
condition: { field: 'operation', value: 'add_image' },
},
]
})
const googleSlidesV2Inputs = GoogleSlidesBlock.inputs
? Object.fromEntries(
Object.entries(GoogleSlidesBlock.inputs).filter(
([key]) => key !== 'imageUrl' && key !== 'imageSource'
)
)
: {}
export const GoogleSlidesV2Block: BlockConfig<GoogleSlidesResponse> = {
...GoogleSlidesBlock,
type: 'google_slides_v2',
name: 'Google Slides',
description: 'Read, write, and create presentations',
hideFromToolbar: false,
subBlocks: googleSlidesV2SubBlocks,
tools: {
access: GoogleSlidesBlock.tools!.access,
config: {
tool: GoogleSlidesBlock.tools!.config!.tool,
params: (params) => {
const baseParams = GoogleSlidesBlock.tools?.config?.params
if (!baseParams) {
return params
}
if (params.operation === 'add_image') {
const imageInput = params.imageFile || params.imageFileReference || params.imageSource
const fileObject = normalizeFileInput(imageInput, { single: true })
if (!fileObject) {
throw new Error('Image file is required.')
}
const imageUrl = resolveHttpsUrlFromFileInput(fileObject)
if (!imageUrl) {
throw new Error('Image file must include a https URL.')
}
return baseParams({
...params,
imageUrl,
imageFileReference: undefined,
imageSource: undefined,
})
}
return baseParams(params)
},
},
},
inputs: {
...googleSlidesV2Inputs,
imageFileReference: { type: 'json', description: 'Image file reference' },
},
}

View File

@@ -526,7 +526,7 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
description: description:
'Single hold object (for create_matters_holds or list_matters_holds with holdId)', 'Single hold object (for create_matters_holds or list_matters_holds with holdId)',
}, },
file: { type: 'json', description: 'Downloaded export file (UserFile) from execution files' }, file: { type: 'file', description: 'Downloaded export file (UserFile) from execution files' },
nextPageToken: { nextPageToken: {
type: 'string', type: 'string',
description: 'Token for fetching next page of results (for list operations)', description: 'Token for fetching next page of results (for list operations)',

View File

@@ -149,7 +149,7 @@ export const ImageGeneratorBlock: BlockConfig<DalleResponse> = {
}, },
outputs: { outputs: {
content: { type: 'string', description: 'Generation response' }, content: { type: 'string', description: 'Generation response' },
image: { type: 'string', description: 'Generated image URL' }, image: { type: 'file', description: 'Generated image file (UserFile)' },
metadata: { type: 'json', description: 'Generation metadata' }, metadata: { type: 'json', description: 'Generation metadata' },
}, },
} }

View File

@@ -44,7 +44,7 @@ export const ImapBlock: BlockConfig = {
bodyHtml: { type: 'string', description: 'HTML email body' }, bodyHtml: { type: 'string', description: 'HTML email body' },
mailbox: { type: 'string', description: 'Mailbox/folder where email was received' }, mailbox: { type: 'string', description: 'Mailbox/folder where email was received' },
hasAttachments: { type: 'boolean', description: 'Whether email has attachments' }, hasAttachments: { type: 'boolean', description: 'Whether email has attachments' },
attachments: { type: 'json', description: 'Array of email attachments' }, attachments: { type: 'file[]', description: 'Array of email attachments' },
timestamp: { type: 'string', description: 'Event timestamp' }, timestamp: { type: 'string', description: 'Event timestamp' },
}, },
triggers: { triggers: {

View File

@@ -1,6 +1,7 @@
import { JiraIcon } from '@/components/icons' import { JiraIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { JiraResponse } from '@/tools/jira/types' import type { JiraResponse } from '@/tools/jira/types'
import { getTrigger } from '@/triggers' import { getTrigger } from '@/triggers'
@@ -34,6 +35,7 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
{ label: 'Update Comment', id: 'update_comment' }, { label: 'Update Comment', id: 'update_comment' },
{ label: 'Delete Comment', id: 'delete_comment' }, { label: 'Delete Comment', id: 'delete_comment' },
{ label: 'Get Attachments', id: 'get_attachments' }, { label: 'Get Attachments', id: 'get_attachments' },
{ label: 'Add Attachment', id: 'add_attachment' },
{ label: 'Delete Attachment', id: 'delete_attachment' }, { label: 'Delete Attachment', id: 'delete_attachment' },
{ label: 'Add Worklog', id: 'add_worklog' }, { label: 'Add Worklog', id: 'add_worklog' },
{ label: 'Get Worklogs', id: 'get_worklogs' }, { label: 'Get Worklogs', id: 'get_worklogs' },
@@ -137,6 +139,7 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
'update_comment', 'update_comment',
'delete_comment', 'delete_comment',
'get_attachments', 'get_attachments',
'add_attachment',
'add_worklog', 'add_worklog',
'get_worklogs', 'get_worklogs',
'update_worklog', 'update_worklog',
@@ -168,6 +171,7 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
'update_comment', 'update_comment',
'delete_comment', 'delete_comment',
'get_attachments', 'get_attachments',
'add_attachment',
'add_worklog', 'add_worklog',
'get_worklogs', 'get_worklogs',
'update_worklog', 'update_worklog',
@@ -407,6 +411,27 @@ Return ONLY the comment text - no explanations.`,
condition: { field: 'operation', value: ['update_comment', 'delete_comment'] }, condition: { field: 'operation', value: ['update_comment', 'delete_comment'] },
}, },
// Attachment fields // Attachment fields
{
id: 'attachmentFiles',
title: 'Attachments',
type: 'file-upload',
canonicalParamId: 'files',
placeholder: 'Upload files',
condition: { field: 'operation', value: 'add_attachment' },
mode: 'basic',
multiple: true,
required: true,
},
{
id: 'files',
title: 'File References',
type: 'short-input',
canonicalParamId: 'files',
placeholder: 'File reference from previous block',
condition: { field: 'operation', value: 'add_attachment' },
mode: 'advanced',
required: true,
},
{ {
id: 'attachmentId', id: 'attachmentId',
title: 'Attachment ID', title: 'Attachment ID',
@@ -576,6 +601,7 @@ Return ONLY the comment text - no explanations.`,
'jira_update_comment', 'jira_update_comment',
'jira_delete_comment', 'jira_delete_comment',
'jira_get_attachments', 'jira_get_attachments',
'jira_add_attachment',
'jira_delete_attachment', 'jira_delete_attachment',
'jira_add_worklog', 'jira_add_worklog',
'jira_get_worklogs', 'jira_get_worklogs',
@@ -623,6 +649,8 @@ Return ONLY the comment text - no explanations.`,
return 'jira_delete_comment' return 'jira_delete_comment'
case 'get_attachments': case 'get_attachments':
return 'jira_get_attachments' return 'jira_get_attachments'
case 'add_attachment':
return 'jira_add_attachment'
case 'delete_attachment': case 'delete_attachment':
return 'jira_delete_attachment' return 'jira_delete_attachment'
case 'add_worklog': case 'add_worklog':
@@ -838,6 +866,20 @@ Return ONLY the comment text - no explanations.`,
issueKey: effectiveIssueKey, issueKey: effectiveIssueKey,
} }
} }
case 'add_attachment': {
if (!effectiveIssueKey) {
throw new Error('Issue Key is required to add attachments.')
}
const normalizedFiles = normalizeFileInput(params.attachmentFiles || params.files)
if (!normalizedFiles || normalizedFiles.length === 0) {
throw new Error('At least one attachment file is required.')
}
return {
...baseParams,
issueKey: effectiveIssueKey,
files: normalizedFiles,
}
}
case 'delete_attachment': { case 'delete_attachment': {
return { return {
...baseParams, ...baseParams,
@@ -982,6 +1024,8 @@ Return ONLY the comment text - no explanations.`,
commentBody: { type: 'string', description: 'Text content for comment operations' }, commentBody: { type: 'string', description: 'Text content for comment operations' },
commentId: { type: 'string', description: 'Comment ID for update/delete operations' }, commentId: { type: 'string', description: 'Comment ID for update/delete operations' },
// Attachment operation inputs // Attachment operation inputs
attachmentFiles: { type: 'json', description: 'Files to attach (UI upload)' },
files: { type: 'array', description: 'Files to attach (UserFile array)' },
attachmentId: { type: 'string', description: 'Attachment ID for delete operation' }, attachmentId: { type: 'string', description: 'Attachment ID for delete operation' },
// Worklog operation inputs // Worklog operation inputs
timeSpentSeconds: { timeSpentSeconds: {
@@ -1052,6 +1096,8 @@ Return ONLY the comment text - no explanations.`,
type: 'json', type: 'json',
description: 'Array of attachments with id, filename, size, mimeType, created, author', description: 'Array of attachments with id, filename, size, mimeType, created, author',
}, },
files: { type: 'file[]', description: 'Uploaded attachment files' },
attachmentIds: { type: 'json', description: 'Uploaded attachment IDs' },
// jira_delete_attachment, jira_delete_comment, jira_delete_issue, jira_delete_worklog, jira_delete_issue_link outputs // jira_delete_attachment, jira_delete_comment, jira_delete_issue, jira_delete_worklog, jira_delete_issue_link outputs
attachmentId: { type: 'string', description: 'Deleted attachment ID' }, attachmentId: { type: 'string', description: 'Deleted attachment ID' },

View File

@@ -1,6 +1,7 @@
import { LinearIcon } from '@/components/icons' import { LinearIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { LinearResponse } from '@/tools/linear/types' import type { LinearResponse } from '@/tools/linear/types'
import { getTrigger } from '@/triggers' import { getTrigger } from '@/triggers'
@@ -668,17 +669,44 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
generationType: 'timestamp', generationType: 'timestamp',
}, },
}, },
// Attachment file
{
id: 'attachmentFileUpload',
title: 'Attachment',
type: 'file-upload',
canonicalParamId: 'file',
placeholder: 'Upload attachment',
condition: {
field: 'operation',
value: ['linear_create_attachment'],
},
mode: 'basic',
multiple: false,
},
{
id: 'file',
title: 'File Reference',
type: 'short-input',
canonicalParamId: 'file',
placeholder: 'File reference from previous block',
condition: {
field: 'operation',
value: ['linear_create_attachment'],
},
mode: 'advanced',
},
// Attachment URL // Attachment URL
{ {
id: 'url', id: 'url',
title: 'URL', title: 'URL',
type: 'short-input', type: 'short-input',
placeholder: 'Enter URL', placeholder: 'Enter URL',
required: true, required: false,
condition: { condition: {
field: 'operation', field: 'operation',
value: ['linear_create_attachment'], value: ['linear_create_attachment'],
}, },
mode: 'advanced',
}, },
// Attachment title // Attachment title
{ {
@@ -1742,16 +1770,34 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
teamId: effectiveTeamId, teamId: effectiveTeamId,
} }
case 'linear_create_attachment': case 'linear_create_attachment': {
if (!params.issueId?.trim() || !params.url?.trim()) { if (!params.issueId?.trim()) {
throw new Error('Issue ID and URL are required.') throw new Error('Issue ID is required.')
}
// Normalize file inputs - handles JSON stringified values from advanced mode
const attachmentFile =
normalizeFileInput(params.attachmentFileUpload, {
single: true,
errorMessage: 'Attachment file must be a single file.',
}) ||
normalizeFileInput(params.file, {
single: true,
errorMessage: 'Attachment file must be a single file.',
})
const attachmentUrl =
params.url?.trim() ||
(attachmentFile ? (attachmentFile as { url?: string }).url : undefined)
if (!attachmentUrl) {
throw new Error('URL or file is required.')
} }
return { return {
...baseParams, ...baseParams,
issueId: params.issueId.trim(), issueId: params.issueId.trim(),
url: params.url.trim(), url: attachmentUrl,
file: attachmentFile,
title: params.attachmentTitle, title: params.attachmentTitle,
} }
}
case 'linear_list_attachments': case 'linear_list_attachments':
if (!params.issueId?.trim()) { if (!params.issueId?.trim()) {
@@ -2248,6 +2294,8 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
endDate: { type: 'string', description: 'End date' }, endDate: { type: 'string', description: 'End date' },
targetDate: { type: 'string', description: 'Target date' }, targetDate: { type: 'string', description: 'Target date' },
url: { type: 'string', description: 'URL' }, url: { type: 'string', description: 'URL' },
attachmentFileUpload: { type: 'json', description: 'File to attach (UI upload)' },
file: { type: 'json', description: 'File to attach (UserFile)' },
attachmentTitle: { type: 'string', description: 'Attachment title' }, attachmentTitle: { type: 'string', description: 'Attachment title' },
attachmentId: { type: 'string', description: 'Attachment identifier' }, attachmentId: { type: 'string', description: 'Attachment identifier' },
relationType: { type: 'string', description: 'Relation type' }, relationType: { type: 'string', description: 'Relation type' },

View File

@@ -1,6 +1,7 @@
import { MicrosoftExcelIcon } from '@/components/icons' import { MicrosoftExcelIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { createVersionedToolSelector } from '@/blocks/utils'
import type { import type {
MicrosoftExcelResponse, MicrosoftExcelResponse,
MicrosoftExcelV2Response, MicrosoftExcelV2Response,
@@ -489,16 +490,20 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
tools: { tools: {
access: ['microsoft_excel_read_v2', 'microsoft_excel_write_v2'], access: ['microsoft_excel_read_v2', 'microsoft_excel_write_v2'],
config: { config: {
tool: (params) => { tool: createVersionedToolSelector({
switch (params.operation) { baseToolSelector: (params) => {
case 'read': switch (params.operation) {
return 'microsoft_excel_read_v2' case 'read':
case 'write': return 'microsoft_excel_read'
return 'microsoft_excel_write_v2' case 'write':
default: return 'microsoft_excel_write'
throw new Error(`Invalid Microsoft Excel V2 operation: ${params.operation}`) default:
} throw new Error(`Invalid Microsoft Excel operation: ${params.operation}`)
}, }
},
suffix: '_v2',
fallbackToolId: 'microsoft_excel_read_v2',
}),
params: (params) => { params: (params) => {
const { const {
credential, credential,

View File

@@ -1,6 +1,7 @@
import { MicrosoftTeamsIcon } from '@/components/icons' import { MicrosoftTeamsIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { MicrosoftTeamsResponse } from '@/tools/microsoft_teams/types' import type { MicrosoftTeamsResponse } from '@/tools/microsoft_teams/types'
import { getTrigger } from '@/triggers' import { getTrigger } from '@/triggers'
@@ -344,9 +345,11 @@ export const MicrosoftTeamsBlock: BlockConfig<MicrosoftTeamsResponse> = {
} }
// Add files if provided // Add files if provided
const fileParam = attachmentFiles || files if (operation === 'write_chat' || operation === 'write_channel') {
if (fileParam && (operation === 'write_chat' || operation === 'write_channel')) { const normalizedFiles = normalizeFileInput(attachmentFiles || files)
baseParams.files = fileParam if (normalizedFiles) {
baseParams.files = normalizedFiles
}
} }
// Add messageId if provided // Add messageId if provided
@@ -462,7 +465,8 @@ export const MicrosoftTeamsBlock: BlockConfig<MicrosoftTeamsResponse> = {
messages: { type: 'json', description: 'Array of message objects' }, messages: { type: 'json', description: 'Array of message objects' },
totalAttachments: { type: 'number', description: 'Total number of attachments' }, totalAttachments: { type: 'number', description: 'Total number of attachments' },
attachmentTypes: { type: 'json', description: 'Array of attachment content types' }, attachmentTypes: { type: 'json', description: 'Array of attachment content types' },
attachments: { type: 'array', description: 'Downloaded message attachments' }, attachments: { type: 'file[]', description: 'Downloaded message attachments' },
files: { type: 'file[]', description: 'Files attached to the message' },
updatedContent: { updatedContent: {
type: 'boolean', type: 'boolean',
description: 'Whether content was successfully updated/sent', description: 'Whether content was successfully updated/sent',

View File

@@ -1,6 +1,6 @@
import { MistralIcon } from '@/components/icons' import { MistralIcon } from '@/components/icons'
import { AuthMode, type BlockConfig, type SubBlockType } from '@/blocks/types' import { AuthMode, type BlockConfig, type SubBlockType } from '@/blocks/types'
import { createVersionedToolSelector } from '@/blocks/utils' import { createVersionedToolSelector, normalizeFileInput } from '@/blocks/utils'
import type { MistralParserOutput } from '@/tools/mistral/types' import type { MistralParserOutput } from '@/tools/mistral/types'
export const MistralParseBlock: BlockConfig<MistralParserOutput> = { export const MistralParseBlock: BlockConfig<MistralParserOutput> = {
@@ -94,7 +94,7 @@ export const MistralParseBlock: BlockConfig<MistralParserOutput> = {
if (!params.fileUpload) { if (!params.fileUpload) {
throw new Error('Please upload a PDF document') throw new Error('Please upload a PDF document')
} }
parameters.fileUpload = params.fileUpload parameters.file = params.fileUpload
} }
let pagesArray: number[] | undefined let pagesArray: number[] | undefined
@@ -159,14 +159,16 @@ export const MistralParseV2Block: BlockConfig<MistralParserOutput> = {
placeholder: 'Upload a PDF document', placeholder: 'Upload a PDF document',
mode: 'basic', mode: 'basic',
maxSize: 50, maxSize: 50,
required: true,
}, },
{ {
id: 'filePath', id: 'fileReference',
title: 'PDF Document', title: 'File Reference',
type: 'short-input' as SubBlockType, type: 'short-input' as SubBlockType,
canonicalParamId: 'document', canonicalParamId: 'document',
placeholder: 'Document URL', placeholder: 'File reference from previous block',
mode: 'advanced', mode: 'advanced',
required: true,
}, },
{ {
id: 'resultType', id: 'resultType',
@@ -211,15 +213,14 @@ export const MistralParseV2Block: BlockConfig<MistralParserOutput> = {
resultType: params.resultType || 'markdown', resultType: params.resultType || 'markdown',
} }
const documentInput = params.fileUpload || params.filePath || params.document const documentInput = normalizeFileInput(
params.fileUpload || params.fileReference || params.document,
{ single: true }
)
if (!documentInput) { if (!documentInput) {
throw new Error('PDF document is required') throw new Error('PDF document is required')
} }
if (typeof documentInput === 'object') { parameters.file = documentInput
parameters.fileUpload = documentInput
} else if (typeof documentInput === 'string') {
parameters.filePath = documentInput.trim()
}
let pagesArray: number[] | undefined let pagesArray: number[] | undefined
if (params.pages && params.pages.trim() !== '') { if (params.pages && params.pages.trim() !== '') {
@@ -254,8 +255,8 @@ export const MistralParseV2Block: BlockConfig<MistralParserOutput> = {
}, },
}, },
inputs: { inputs: {
document: { type: 'json', description: 'Document input (file upload or URL reference)' }, document: { type: 'json', description: 'Document input (file upload or file reference)' },
filePath: { type: 'string', description: 'PDF document URL (advanced mode)' }, fileReference: { type: 'json', description: 'File reference (advanced mode)' },
fileUpload: { type: 'json', description: 'Uploaded PDF file (basic mode)' }, fileUpload: { type: 'json', description: 'Uploaded PDF file (basic mode)' },
apiKey: { type: 'string', description: 'Mistral API key' }, apiKey: { type: 'string', description: 'Mistral API key' },
resultType: { type: 'string', description: 'Output format type' }, resultType: { type: 'string', description: 'Output format type' },

View File

@@ -412,6 +412,7 @@ export const NotionV2Block: BlockConfig<any> = {
'notion_read_database_v2', 'notion_read_database_v2',
'notion_write_v2', 'notion_write_v2',
'notion_create_page_v2', 'notion_create_page_v2',
'notion_update_page_v2',
'notion_query_database_v2', 'notion_query_database_v2',
'notion_search_v2', 'notion_search_v2',
'notion_create_database_v2', 'notion_create_database_v2',

View File

@@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger'
import { MicrosoftOneDriveIcon } from '@/components/icons' import { MicrosoftOneDriveIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { OneDriveResponse } from '@/tools/onedrive/types' import type { OneDriveResponse } from '@/tools/onedrive/types'
import { normalizeExcelValuesForToolParams } from '@/tools/onedrive/utils' import { normalizeExcelValuesForToolParams } from '@/tools/onedrive/utils'
@@ -352,17 +353,31 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
} }
}, },
params: (params) => { params: (params) => {
const { credential, folderId, fileId, mimeType, values, downloadFileName, ...rest } = params const {
credential,
folderId,
fileId,
mimeType,
values,
downloadFileName,
file,
fileReference,
...rest
} = params
let normalizedValues: ReturnType<typeof normalizeExcelValuesForToolParams> let normalizedValues: ReturnType<typeof normalizeExcelValuesForToolParams>
if (values !== undefined) { if (values !== undefined) {
normalizedValues = normalizeExcelValuesForToolParams(values) normalizedValues = normalizeExcelValuesForToolParams(values)
} }
// Normalize file input from both basic (file-upload) and advanced (short-input) modes
const normalizedFile = normalizeFileInput(file || fileReference, { single: true })
return { return {
credential, credential,
...rest, ...rest,
values: normalizedValues, values: normalizedValues,
file: normalizedFile,
folderId: folderId || undefined, folderId: folderId || undefined,
fileId: fileId || undefined, fileId: fileId || undefined,
pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined, pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined,
@@ -393,7 +408,7 @@ export const OneDriveBlock: BlockConfig<OneDriveResponse> = {
deleted: { type: 'boolean', description: 'Whether the file was deleted' }, deleted: { type: 'boolean', description: 'Whether the file was deleted' },
fileId: { type: 'string', description: 'The ID of the deleted file' }, fileId: { type: 'string', description: 'The ID of the deleted file' },
file: { file: {
type: 'json', type: 'file',
description: 'The OneDrive file object, including details such as id, name, size, and more.', description: 'The OneDrive file object, including details such as id, name, size, and more.',
}, },
files: { files: {

View File

@@ -1,6 +1,7 @@
import { OutlookIcon } from '@/components/icons' import { OutlookIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { OutlookResponse } from '@/tools/outlook/types' import type { OutlookResponse } from '@/tools/outlook/types'
import { getTrigger } from '@/triggers' import { getTrigger } from '@/triggers'
@@ -335,12 +336,20 @@ export const OutlookBlock: BlockConfig<OutlookResponse> = {
copyMessageId, copyMessageId,
copyDestinationFolder, copyDestinationFolder,
manualCopyDestinationFolder, manualCopyDestinationFolder,
attachmentFiles,
attachments,
...rest ...rest
} = params } = params
// Handle both selector and manual folder input // Handle both selector and manual folder input
const effectiveFolder = (folder || manualFolder || '').trim() const effectiveFolder = (folder || manualFolder || '').trim()
// Normalize file attachments from either basic (file-upload) or advanced (short-input) mode
const normalizedAttachments = normalizeFileInput(attachmentFiles || attachments)
if (normalizedAttachments) {
rest.attachments = normalizedAttachments
}
if (rest.operation === 'read_outlook') { if (rest.operation === 'read_outlook') {
rest.folder = effectiveFolder || 'INBOX' rest.folder = effectiveFolder || 'INBOX'
} }
@@ -440,7 +449,7 @@ export const OutlookBlock: BlockConfig<OutlookResponse> = {
sentDateTime: { type: 'string', description: 'Email sent timestamp' }, sentDateTime: { type: 'string', description: 'Email sent timestamp' },
hasAttachments: { type: 'boolean', description: 'Whether email has attachments' }, hasAttachments: { type: 'boolean', description: 'Whether email has attachments' },
attachments: { attachments: {
type: 'json', type: 'file[]',
description: 'Email attachments (if includeAttachments is enabled)', description: 'Email attachments (if includeAttachments is enabled)',
}, },
isRead: { type: 'boolean', description: 'Whether email is read' }, isRead: { type: 'boolean', description: 'Whether email is read' },

View File

@@ -804,6 +804,7 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
deals: { type: 'json', description: 'Array of deal objects' }, deals: { type: 'json', description: 'Array of deal objects' },
deal: { type: 'json', description: 'Single deal object' }, deal: { type: 'json', description: 'Single deal object' },
files: { type: 'json', description: 'Array of file objects' }, files: { type: 'json', description: 'Array of file objects' },
downloadedFiles: { type: 'file[]', description: 'Downloaded files from Pipedrive' },
messages: { type: 'json', description: 'Array of mail message objects' }, messages: { type: 'json', description: 'Array of mail message objects' },
pipelines: { type: 'json', description: 'Array of pipeline objects' }, pipelines: { type: 'json', description: 'Array of pipeline objects' },
projects: { type: 'json', description: 'Array of project objects' }, projects: { type: 'json', description: 'Array of project objects' },

View File

@@ -1,11 +1,13 @@
import { PulseIcon } from '@/components/icons' import { PulseIcon } from '@/components/icons'
import { AuthMode, type BlockConfig, type SubBlockType } from '@/blocks/types' import { AuthMode, type BlockConfig, type SubBlockType } from '@/blocks/types'
import { createVersionedToolSelector, normalizeFileInput } from '@/blocks/utils'
import type { PulseParserOutput } from '@/tools/pulse/types' import type { PulseParserOutput } from '@/tools/pulse/types'
export const PulseBlock: BlockConfig<PulseParserOutput> = { export const PulseBlock: BlockConfig<PulseParserOutput> = {
type: 'pulse', type: 'pulse',
name: 'Pulse', name: 'Pulse',
description: 'Extract text from documents using Pulse OCR', description: 'Extract text from documents using Pulse OCR',
hideFromToolbar: true,
authMode: AuthMode.ApiKey, authMode: AuthMode.ApiKey,
longDescription: longDescription:
'Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via URL or upload.', 'Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via URL or upload.',
@@ -77,7 +79,7 @@ export const PulseBlock: BlockConfig<PulseParserOutput> = {
throw new Error('Document is required') throw new Error('Document is required')
} }
if (typeof documentInput === 'object') { if (typeof documentInput === 'object') {
parameters.fileUpload = documentInput parameters.file = documentInput
} else if (typeof documentInput === 'string') { } else if (typeof documentInput === 'string') {
parameters.filePath = documentInput.trim() parameters.filePath = documentInput.trim()
} }
@@ -126,3 +128,88 @@ export const PulseBlock: BlockConfig<PulseParserOutput> = {
figures: { type: 'json', description: 'Extracted figures if figure extraction was enabled' }, figures: { type: 'json', description: 'Extracted figures if figure extraction was enabled' },
}, },
} }
const pulseV2Inputs = PulseBlock.inputs
? {
...Object.fromEntries(
Object.entries(PulseBlock.inputs).filter(([key]) => key !== 'filePath')
),
fileReference: { type: 'json', description: 'File reference (advanced mode)' },
}
: {}
const pulseV2SubBlocks = (PulseBlock.subBlocks || []).flatMap((subBlock) => {
if (subBlock.id === 'filePath') {
return [] // Remove the old filePath subblock
}
if (subBlock.id === 'fileUpload') {
// Insert fileReference right after fileUpload
return [
subBlock,
{
id: 'fileReference',
title: 'Document',
type: 'short-input' as SubBlockType,
canonicalParamId: 'document',
placeholder: 'File reference',
mode: 'advanced' as const,
},
]
}
return [subBlock]
})
export const PulseV2Block: BlockConfig<PulseParserOutput> = {
...PulseBlock,
type: 'pulse_v2',
name: 'Pulse',
hideFromToolbar: false,
longDescription:
'Integrate Pulse into the workflow. Extract text from PDF documents, images, and Office files via upload or file references.',
subBlocks: pulseV2SubBlocks,
tools: {
access: ['pulse_parser_v2'],
config: {
tool: createVersionedToolSelector({
baseToolSelector: () => 'pulse_parser',
suffix: '_v2',
fallbackToolId: 'pulse_parser_v2',
}),
params: (params) => {
if (!params || !params.apiKey || params.apiKey.trim() === '') {
throw new Error('Pulse API key is required')
}
const parameters: Record<string, unknown> = {
apiKey: params.apiKey.trim(),
}
const normalizedFile = normalizeFileInput(
params.fileUpload || params.fileReference || params.document,
{ single: true }
)
if (!normalizedFile) {
throw new Error('Document file is required')
}
parameters.file = normalizedFile
if (params.pages && params.pages.trim() !== '') {
parameters.pages = params.pages.trim()
}
if (params.chunking && params.chunking.trim() !== '') {
parameters.chunking = params.chunking.trim()
}
if (params.chunkSize && params.chunkSize.trim() !== '') {
const size = Number.parseInt(params.chunkSize.trim(), 10)
if (!Number.isNaN(size) && size > 0) {
parameters.chunkSize = size
}
}
return parameters
},
},
},
inputs: pulseV2Inputs,
}

View File

@@ -1,11 +1,13 @@
import { ReductoIcon } from '@/components/icons' import { ReductoIcon } from '@/components/icons'
import { AuthMode, type BlockConfig, type SubBlockType } from '@/blocks/types' import { AuthMode, type BlockConfig, type SubBlockType } from '@/blocks/types'
import { createVersionedToolSelector, normalizeFileInput } from '@/blocks/utils'
import type { ReductoParserOutput } from '@/tools/reducto/types' import type { ReductoParserOutput } from '@/tools/reducto/types'
export const ReductoBlock: BlockConfig<ReductoParserOutput> = { export const ReductoBlock: BlockConfig<ReductoParserOutput> = {
type: 'reducto', type: 'reducto',
name: 'Reducto', name: 'Reducto',
description: 'Extract text from PDF documents', description: 'Extract text from PDF documents',
hideFromToolbar: true,
authMode: AuthMode.ApiKey, authMode: AuthMode.ApiKey,
longDescription: `Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents, or from a URL.`, longDescription: `Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents, or from a URL.`,
docsLink: 'https://docs.sim.ai/tools/reducto', docsLink: 'https://docs.sim.ai/tools/reducto',
@@ -74,7 +76,7 @@ export const ReductoBlock: BlockConfig<ReductoParserOutput> = {
} }
if (typeof documentInput === 'object') { if (typeof documentInput === 'object') {
parameters.fileUpload = documentInput parameters.file = documentInput
} else if (typeof documentInput === 'string') { } else if (typeof documentInput === 'string') {
parameters.filePath = documentInput.trim() parameters.filePath = documentInput.trim()
} }
@@ -132,3 +134,103 @@ export const ReductoBlock: BlockConfig<ReductoParserOutput> = {
studio_link: { type: 'string', description: 'Link to Reducto studio interface' }, studio_link: { type: 'string', description: 'Link to Reducto studio interface' },
}, },
} }
const reductoV2Inputs = ReductoBlock.inputs
? {
...Object.fromEntries(
Object.entries(ReductoBlock.inputs).filter(([key]) => key !== 'filePath')
),
fileReference: { type: 'json', description: 'File reference (advanced mode)' },
}
: {}
const reductoV2SubBlocks = (ReductoBlock.subBlocks || []).flatMap((subBlock) => {
if (subBlock.id === 'filePath') {
return []
}
if (subBlock.id === 'fileUpload') {
return [
subBlock,
{
id: 'fileReference',
title: 'PDF Document',
type: 'short-input' as SubBlockType,
canonicalParamId: 'document',
placeholder: 'File reference',
mode: 'advanced' as const,
},
]
}
return [subBlock]
})
export const ReductoV2Block: BlockConfig<ReductoParserOutput> = {
...ReductoBlock,
type: 'reducto_v2',
name: 'Reducto',
hideFromToolbar: false,
longDescription: `Integrate Reducto Parse into the workflow. Can extract text from uploaded PDF documents or file references.`,
subBlocks: reductoV2SubBlocks,
tools: {
access: ['reducto_parser_v2'],
config: {
tool: createVersionedToolSelector({
baseToolSelector: () => 'reducto_parser',
suffix: '_v2',
fallbackToolId: 'reducto_parser_v2',
}),
params: (params) => {
if (!params || !params.apiKey || params.apiKey.trim() === '') {
throw new Error('Reducto API key is required')
}
const parameters: Record<string, unknown> = {
apiKey: params.apiKey.trim(),
}
const documentInput = normalizeFileInput(
params.fileUpload || params.fileReference || params.document,
{ single: true }
)
if (!documentInput) {
throw new Error('PDF document file is required')
}
parameters.file = documentInput
let pagesArray: number[] | undefined
if (params.pages && params.pages.trim() !== '') {
try {
pagesArray = params.pages
.split(',')
.map((p: string) => p.trim())
.filter((p: string) => p.length > 0)
.map((p: string) => {
const num = Number.parseInt(p, 10)
if (Number.isNaN(num) || num < 0) {
throw new Error(`Invalid page number: ${p}`)
}
return num
})
if (pagesArray && pagesArray.length === 0) {
pagesArray = undefined
}
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : String(error)
throw new Error(`Page number format error: ${errorMessage}`)
}
}
if (pagesArray && pagesArray.length > 0) {
parameters.pages = pagesArray
}
if (params.tableOutputFormat) {
parameters.tableOutputFormat = params.tableOutputFormat
}
return parameters
},
},
},
inputs: reductoV2Inputs,
}

View File

@@ -1,6 +1,7 @@
import { S3Icon } from '@/components/icons' import { S3Icon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { S3Response } from '@/tools/s3/types' import type { S3Response } from '@/tools/s3/types'
export const S3Block: BlockConfig<S3Response> = { export const S3Block: BlockConfig<S3Response> = {
@@ -271,7 +272,8 @@ export const S3Block: BlockConfig<S3Response> = {
throw new Error('Object Key is required for upload') throw new Error('Object Key is required for upload')
} }
// Use file from uploadFile if in basic mode, otherwise use file reference // Use file from uploadFile if in basic mode, otherwise use file reference
const fileParam = params.uploadFile || params.file // normalizeFileInput handles JSON stringified values from advanced mode
const fileParam = normalizeFileInput(params.uploadFile || params.file, { single: true })
return { return {
accessKeyId: params.accessKeyId, accessKeyId: params.accessKeyId,
@@ -418,6 +420,7 @@ export const S3Block: BlockConfig<S3Response> = {
type: 'string', type: 'string',
description: 'S3 URI (s3://bucket/key) for use with other AWS services', description: 'S3 URI (s3://bucket/key) for use with other AWS services',
}, },
file: { type: 'file', description: 'Downloaded file stored in execution files' },
objects: { type: 'json', description: 'List of objects (for list operation)' }, objects: { type: 'json', description: 'List of objects (for list operation)' },
deleted: { type: 'boolean', description: 'Deletion status' }, deleted: { type: 'boolean', description: 'Deletion status' },
metadata: { type: 'json', description: 'Operation metadata' }, metadata: { type: 'json', description: 'Operation metadata' },

View File

@@ -1,5 +1,6 @@
import { SendgridIcon } from '@/components/icons' import { SendgridIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { SendMailResult } from '@/tools/sendgrid/types' import type { SendMailResult } from '@/tools/sendgrid/types'
export const SendGridBlock: BlockConfig<SendMailResult> = { export const SendGridBlock: BlockConfig<SendMailResult> = {
@@ -561,9 +562,14 @@ Return ONLY the HTML content.`,
templateGenerations, templateGenerations,
listPageSize, listPageSize,
templatePageSize, templatePageSize,
attachmentFiles,
attachments,
...rest ...rest
} = params } = params
// Normalize attachments for send_mail operation
const normalizedAttachments = normalizeFileInput(attachmentFiles || attachments)
// Map renamed fields back to tool parameter names // Map renamed fields back to tool parameter names
return { return {
...rest, ...rest,
@@ -577,6 +583,7 @@ Return ONLY the HTML content.`,
...(templateGenerations && { generations: templateGenerations }), ...(templateGenerations && { generations: templateGenerations }),
...(listPageSize && { pageSize: listPageSize }), ...(listPageSize && { pageSize: listPageSize }),
...(templatePageSize && { pageSize: templatePageSize }), ...(templatePageSize && { pageSize: templatePageSize }),
...(normalizedAttachments && { attachments: normalizedAttachments }),
} }
}, },
}, },

View File

@@ -1,6 +1,7 @@
import { SftpIcon } from '@/components/icons' import { SftpIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { SftpUploadResult } from '@/tools/sftp/types' import type { SftpUploadResult } from '@/tools/sftp/types'
export const SftpBlock: BlockConfig<SftpUploadResult> = { export const SftpBlock: BlockConfig<SftpUploadResult> = {
@@ -222,7 +223,7 @@ export const SftpBlock: BlockConfig<SftpUploadResult> = {
return { return {
...connectionConfig, ...connectionConfig,
remotePath: params.remotePath, remotePath: params.remotePath,
files: params.files, files: normalizeFileInput(params.uploadFiles || params.files),
overwrite: params.overwrite !== false, overwrite: params.overwrite !== false,
permissions: params.permissions, permissions: params.permissions,
} }
@@ -293,6 +294,7 @@ export const SftpBlock: BlockConfig<SftpUploadResult> = {
outputs: { outputs: {
success: { type: 'boolean', description: 'Whether the operation was successful' }, success: { type: 'boolean', description: 'Whether the operation was successful' },
uploadedFiles: { type: 'json', description: 'Array of uploaded file details' }, uploadedFiles: { type: 'json', description: 'Array of uploaded file details' },
file: { type: 'file', description: 'Downloaded file stored in execution files' },
fileName: { type: 'string', description: 'Downloaded file name' }, fileName: { type: 'string', description: 'Downloaded file name' },
content: { type: 'string', description: 'Downloaded file content' }, content: { type: 'string', description: 'Downloaded file content' },
size: { type: 'number', description: 'File size in bytes' }, size: { type: 'number', description: 'File size in bytes' },

View File

@@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger'
import { MicrosoftSharepointIcon } from '@/components/icons' import { MicrosoftSharepointIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { SharepointResponse } from '@/tools/sharepoint/types' import type { SharepointResponse } from '@/tools/sharepoint/types'
const logger = createLogger('SharepointBlock') const logger = createLogger('SharepointBlock')
@@ -449,7 +450,7 @@ Return ONLY the JSON object - no explanations, no markdown, no extra text.`,
} }
// Handle file upload files parameter // Handle file upload files parameter
const fileParam = uploadFiles || files const normalizedFiles = normalizeFileInput(uploadFiles || files)
const baseParams: Record<string, any> = { const baseParams: Record<string, any> = {
credential, credential,
siteId: effectiveSiteId || undefined, siteId: effectiveSiteId || undefined,
@@ -463,8 +464,8 @@ Return ONLY the JSON object - no explanations, no markdown, no extra text.`,
} }
// Add files if provided // Add files if provided
if (fileParam) { if (normalizedFiles) {
baseParams.files = fileParam baseParams.files = normalizedFiles
} }
if (columnDefinitions) { if (columnDefinitions) {

View File

@@ -1,6 +1,7 @@
import { SlackIcon } from '@/components/icons' import { SlackIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { SlackResponse } from '@/tools/slack/types' import type { SlackResponse } from '@/tools/slack/types'
import { getTrigger } from '@/triggers' import { getTrigger } from '@/triggers'
@@ -620,9 +621,9 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
if (threadTs) { if (threadTs) {
baseParams.thread_ts = threadTs baseParams.thread_ts = threadTs
} }
const fileParam = attachmentFiles || files const normalizedFiles = normalizeFileInput(attachmentFiles || files)
if (fileParam) { if (normalizedFiles) {
baseParams.files = fileParam baseParams.files = normalizedFiles
} }
break break
} }
@@ -796,6 +797,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
type: 'number', type: 'number',
description: 'Number of files uploaded (when files are attached)', description: 'Number of files uploaded (when files are attached)',
}, },
files: { type: 'file[]', description: 'Files attached to the message' },
// slack_canvas outputs // slack_canvas outputs
canvas_id: { type: 'string', description: 'Canvas identifier for created canvases' }, canvas_id: { type: 'string', description: 'Canvas identifier for created canvases' },
@@ -859,7 +861,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
// slack_download outputs // slack_download outputs
file: { file: {
type: 'json', type: 'file',
description: 'Downloaded file stored in execution files', description: 'Downloaded file stored in execution files',
}, },

View File

@@ -1,6 +1,7 @@
import { SmtpIcon } from '@/components/icons' import { SmtpIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types' import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types' import { AuthMode } from '@/blocks/types'
import { normalizeFileInput } from '@/blocks/utils'
import type { SmtpSendMailResult } from '@/tools/smtp/types' import type { SmtpSendMailResult } from '@/tools/smtp/types'
export const SmtpBlock: BlockConfig<SmtpSendMailResult> = { export const SmtpBlock: BlockConfig<SmtpSendMailResult> = {
@@ -176,7 +177,7 @@ export const SmtpBlock: BlockConfig<SmtpSendMailResult> = {
cc: params.cc, cc: params.cc,
bcc: params.bcc, bcc: params.bcc,
replyTo: params.replyTo, replyTo: params.replyTo,
attachments: params.attachments, attachments: normalizeFileInput(params.attachmentFiles || params.attachments),
}), }),
}, },
}, },

Some files were not shown because too many files have changed in this diff Show More