mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-04 19:55:08 -05:00
Compare commits
3 Commits
fix/restor
...
cursor/run
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f3d7673331 | ||
|
|
40983b4b99 | ||
|
|
68bdf50684 |
@@ -183,109 +183,6 @@ export const {ServiceName}Block: BlockConfig = {
|
||||
}
|
||||
```
|
||||
|
||||
## File Input Handling
|
||||
|
||||
When your block accepts file uploads, use the basic/advanced mode pattern with `normalizeFileInput`.
|
||||
|
||||
### Basic/Advanced File Pattern
|
||||
|
||||
```typescript
|
||||
// Basic mode: Visual file upload
|
||||
{
|
||||
id: 'uploadFile',
|
||||
title: 'File',
|
||||
type: 'file-upload',
|
||||
canonicalParamId: 'file', // Both map to 'file' param
|
||||
placeholder: 'Upload file',
|
||||
mode: 'basic',
|
||||
multiple: false,
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
},
|
||||
// Advanced mode: Reference from other blocks
|
||||
{
|
||||
id: 'fileRef',
|
||||
title: 'File',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'file', // Both map to 'file' param
|
||||
placeholder: 'Reference file (e.g., {{file_block.output}})',
|
||||
mode: 'advanced',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
},
|
||||
```
|
||||
|
||||
**Critical constraints:**
|
||||
- `canonicalParamId` must NOT match any subblock's `id` in the same block
|
||||
- Values are stored under subblock `id`, not `canonicalParamId`
|
||||
|
||||
### Normalizing File Input in tools.config
|
||||
|
||||
Use `normalizeFileInput` to handle all input variants:
|
||||
|
||||
```typescript
|
||||
import { normalizeFileInput } from '@/blocks/utils'
|
||||
|
||||
tools: {
|
||||
access: ['service_upload'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Check all field IDs: uploadFile (basic), fileRef (advanced), fileContent (legacy)
|
||||
const normalizedFile = normalizeFileInput(
|
||||
params.uploadFile || params.fileRef || params.fileContent,
|
||||
{ single: true }
|
||||
)
|
||||
if (normalizedFile) {
|
||||
params.file = normalizedFile
|
||||
}
|
||||
return `service_${params.operation}`
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
**Why this pattern?**
|
||||
- Values come through as `params.uploadFile` or `params.fileRef` (the subblock IDs)
|
||||
- `canonicalParamId` only controls UI/schema mapping, not runtime values
|
||||
- `normalizeFileInput` handles JSON strings from advanced mode template resolution
|
||||
|
||||
### File Input Types in `inputs`
|
||||
|
||||
Use `type: 'json'` for file inputs:
|
||||
|
||||
```typescript
|
||||
inputs: {
|
||||
uploadFile: { type: 'json', description: 'Uploaded file (UserFile)' },
|
||||
fileRef: { type: 'json', description: 'File reference from previous block' },
|
||||
// Legacy field for backwards compatibility
|
||||
fileContent: { type: 'string', description: 'Legacy: base64 encoded content' },
|
||||
}
|
||||
```
|
||||
|
||||
### Multiple Files
|
||||
|
||||
For multiple file uploads:
|
||||
|
||||
```typescript
|
||||
{
|
||||
id: 'attachments',
|
||||
title: 'Attachments',
|
||||
type: 'file-upload',
|
||||
multiple: true, // Allow multiple files
|
||||
maxSize: 25, // Max size in MB per file
|
||||
acceptedTypes: 'image/*,application/pdf,.doc,.docx',
|
||||
}
|
||||
|
||||
// In tools.config:
|
||||
const normalizedFiles = normalizeFileInput(
|
||||
params.attachments || params.attachmentRefs,
|
||||
// No { single: true } - returns array
|
||||
)
|
||||
if (normalizedFiles) {
|
||||
params.files = normalizedFiles
|
||||
}
|
||||
```
|
||||
|
||||
## Condition Syntax
|
||||
|
||||
Controls when a field is shown based on other field values.
|
||||
|
||||
@@ -457,230 +457,7 @@ You can usually find this in the service's brand/press kit page, or copy it from
|
||||
Paste the SVG code here and I'll convert it to a React component.
|
||||
```
|
||||
|
||||
## File Handling
|
||||
|
||||
When your integration handles file uploads or downloads, follow these patterns to work with `UserFile` objects consistently.
|
||||
|
||||
### What is a UserFile?
|
||||
|
||||
A `UserFile` is the standard file representation in Sim:
|
||||
|
||||
```typescript
|
||||
interface UserFile {
|
||||
id: string // Unique identifier
|
||||
name: string // Original filename
|
||||
url: string // Presigned URL for download
|
||||
size: number // File size in bytes
|
||||
type: string // MIME type (e.g., 'application/pdf')
|
||||
base64?: string // Optional base64 content (if small file)
|
||||
key?: string // Internal storage key
|
||||
context?: object // Storage context metadata
|
||||
}
|
||||
```
|
||||
|
||||
### File Input Pattern (Uploads)
|
||||
|
||||
For tools that accept file uploads, **always route through an internal API endpoint** rather than calling external APIs directly. This ensures proper file content retrieval.
|
||||
|
||||
#### 1. Block SubBlocks for File Input
|
||||
|
||||
Use the basic/advanced mode pattern:
|
||||
|
||||
```typescript
|
||||
// Basic mode: File upload UI
|
||||
{
|
||||
id: 'uploadFile',
|
||||
title: 'File',
|
||||
type: 'file-upload',
|
||||
canonicalParamId: 'file', // Maps to 'file' param
|
||||
placeholder: 'Upload file',
|
||||
mode: 'basic',
|
||||
multiple: false,
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
},
|
||||
// Advanced mode: Reference from previous block
|
||||
{
|
||||
id: 'fileRef',
|
||||
title: 'File',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'file', // Same canonical param
|
||||
placeholder: 'Reference file (e.g., {{file_block.output}})',
|
||||
mode: 'advanced',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'upload' },
|
||||
},
|
||||
```
|
||||
|
||||
**Critical:** `canonicalParamId` must NOT match any subblock `id`.
|
||||
|
||||
#### 2. Normalize File Input in Block Config
|
||||
|
||||
In `tools.config.tool`, use `normalizeFileInput` to handle all input variants:
|
||||
|
||||
```typescript
|
||||
import { normalizeFileInput } from '@/blocks/utils'
|
||||
|
||||
tools: {
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Normalize file from basic (uploadFile), advanced (fileRef), or legacy (fileContent)
|
||||
const normalizedFile = normalizeFileInput(
|
||||
params.uploadFile || params.fileRef || params.fileContent,
|
||||
{ single: true }
|
||||
)
|
||||
if (normalizedFile) {
|
||||
params.file = normalizedFile
|
||||
}
|
||||
return `{service}_${params.operation}`
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
#### 3. Create Internal API Route
|
||||
|
||||
Create `apps/sim/app/api/tools/{service}/{action}/route.ts`:
|
||||
|
||||
```typescript
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { NextResponse, type NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { FileInputSchema, type RawFileInput } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
const logger = createLogger('{Service}UploadAPI')
|
||||
|
||||
const RequestSchema = z.object({
|
||||
accessToken: z.string(),
|
||||
file: FileInputSchema.optional().nullable(),
|
||||
// Legacy field for backwards compatibility
|
||||
fileContent: z.string().optional().nullable(),
|
||||
// ... other params
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success) {
|
||||
return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const data = RequestSchema.parse(body)
|
||||
|
||||
let fileBuffer: Buffer
|
||||
let fileName: string
|
||||
|
||||
// Prefer UserFile input, fall back to legacy base64
|
||||
if (data.file) {
|
||||
const userFiles = processFilesToUserFiles([data.file as RawFileInput], requestId, logger)
|
||||
if (userFiles.length === 0) {
|
||||
return NextResponse.json({ success: false, error: 'Invalid file' }, { status: 400 })
|
||||
}
|
||||
const userFile = userFiles[0]
|
||||
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
fileName = userFile.name
|
||||
} else if (data.fileContent) {
|
||||
// Legacy: base64 string (backwards compatibility)
|
||||
fileBuffer = Buffer.from(data.fileContent, 'base64')
|
||||
fileName = 'file'
|
||||
} else {
|
||||
return NextResponse.json({ success: false, error: 'File required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Now call external API with fileBuffer
|
||||
const response = await fetch('https://api.{service}.com/upload', {
|
||||
method: 'POST',
|
||||
headers: { Authorization: `Bearer ${data.accessToken}` },
|
||||
body: new Uint8Array(fileBuffer), // Convert Buffer for fetch
|
||||
})
|
||||
|
||||
// ... handle response
|
||||
}
|
||||
```
|
||||
|
||||
#### 4. Update Tool to Use Internal Route
|
||||
|
||||
```typescript
|
||||
export const {service}UploadTool: ToolConfig<Params, Response> = {
|
||||
id: '{service}_upload',
|
||||
// ...
|
||||
params: {
|
||||
file: { type: 'file', required: false, visibility: 'user-or-llm' },
|
||||
fileContent: { type: 'string', required: false, visibility: 'hidden' }, // Legacy
|
||||
},
|
||||
request: {
|
||||
url: '/api/tools/{service}/upload', // Internal route
|
||||
method: 'POST',
|
||||
body: (params) => ({
|
||||
accessToken: params.accessToken,
|
||||
file: params.file,
|
||||
fileContent: params.fileContent,
|
||||
}),
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### File Output Pattern (Downloads)
|
||||
|
||||
For tools that return files, use `FileToolProcessor` to store files and return `UserFile` objects.
|
||||
|
||||
#### In Tool transformResponse
|
||||
|
||||
```typescript
|
||||
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
|
||||
|
||||
transformResponse: async (response, context) => {
|
||||
const data = await response.json()
|
||||
|
||||
// Process file outputs to UserFile objects
|
||||
const fileProcessor = new FileToolProcessor(context)
|
||||
const file = await fileProcessor.processFileData({
|
||||
data: data.content, // base64 or buffer
|
||||
mimeType: data.mimeType,
|
||||
filename: data.filename,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: { file },
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### In API Route (for complex file handling)
|
||||
|
||||
```typescript
|
||||
// Return file data that FileToolProcessor can handle
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
data: base64Content,
|
||||
mimeType: 'application/pdf',
|
||||
filename: 'document.pdf',
|
||||
},
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
### Key Helpers Reference
|
||||
|
||||
| Helper | Location | Purpose |
|
||||
|--------|----------|---------|
|
||||
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
|
||||
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
|
||||
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get file Buffer from UserFile |
|
||||
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
|
||||
| `isUserFile` | `@/lib/core/utils/user-file` | Type guard for UserFile objects |
|
||||
| `FileInputSchema` | `@/lib/uploads/utils/file-schemas` | Zod schema for file validation |
|
||||
|
||||
### Common Gotchas
|
||||
## Common Gotchas
|
||||
|
||||
1. **OAuth serviceId must match** - The `serviceId` in oauth-input must match the OAuth provider configuration
|
||||
2. **Tool IDs are snake_case** - `stripe_create_payment`, not `stripeCreatePayment`
|
||||
@@ -688,5 +465,3 @@ return NextResponse.json({
|
||||
4. **Alphabetical ordering** - Keep imports and registry entries alphabetically sorted
|
||||
5. **Required can be conditional** - Use `required: { field: 'op', value: 'create' }` instead of always true
|
||||
6. **DependsOn clears options** - When a dependency changes, selector options are refetched
|
||||
7. **Never pass Buffer directly to fetch** - Convert to `new Uint8Array(buffer)` for TypeScript compatibility
|
||||
8. **Always handle legacy file params** - Keep hidden `fileContent` params for backwards compatibility
|
||||
|
||||
@@ -195,52 +195,6 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
|
||||
{service}_webhook: {service}WebhookTrigger,
|
||||
```
|
||||
|
||||
## File Handling
|
||||
|
||||
When integrations handle file uploads/downloads, use `UserFile` objects consistently.
|
||||
|
||||
### File Input (Uploads)
|
||||
|
||||
1. **Block subBlocks:** Use basic/advanced mode pattern with `canonicalParamId`
|
||||
2. **Normalize in block config:** Use `normalizeFileInput` from `@/blocks/utils`
|
||||
3. **Internal API route:** Create route that uses `downloadFileFromStorage` to get file content
|
||||
4. **Tool routes to internal API:** Don't call external APIs directly with files
|
||||
|
||||
```typescript
|
||||
// In block tools.config:
|
||||
import { normalizeFileInput } from '@/blocks/utils'
|
||||
|
||||
const normalizedFile = normalizeFileInput(
|
||||
params.uploadFile || params.fileRef || params.fileContent,
|
||||
{ single: true }
|
||||
)
|
||||
if (normalizedFile) params.file = normalizedFile
|
||||
```
|
||||
|
||||
### File Output (Downloads)
|
||||
|
||||
Use `FileToolProcessor` in tool `transformResponse` to store files:
|
||||
|
||||
```typescript
|
||||
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
|
||||
|
||||
const processor = new FileToolProcessor(context)
|
||||
const file = await processor.processFileData({
|
||||
data: base64Content,
|
||||
mimeType: 'application/pdf',
|
||||
filename: 'doc.pdf',
|
||||
})
|
||||
```
|
||||
|
||||
### Key Helpers
|
||||
|
||||
| Helper | Location | Purpose |
|
||||
|--------|----------|---------|
|
||||
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
|
||||
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
|
||||
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get Buffer from UserFile |
|
||||
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Look up API docs for the service
|
||||
@@ -253,5 +207,3 @@ const file = await processor.processFileData({
|
||||
- [ ] Register block in `blocks/registry.ts`
|
||||
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
||||
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
||||
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
|
||||
- [ ] (If file uploads) Use `normalizeFileInput` in block config
|
||||
|
||||
@@ -193,52 +193,6 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
|
||||
{service}_webhook: {service}WebhookTrigger,
|
||||
```
|
||||
|
||||
## File Handling
|
||||
|
||||
When integrations handle file uploads/downloads, use `UserFile` objects consistently.
|
||||
|
||||
### File Input (Uploads)
|
||||
|
||||
1. **Block subBlocks:** Use basic/advanced mode pattern with `canonicalParamId`
|
||||
2. **Normalize in block config:** Use `normalizeFileInput` from `@/blocks/utils`
|
||||
3. **Internal API route:** Create route that uses `downloadFileFromStorage` to get file content
|
||||
4. **Tool routes to internal API:** Don't call external APIs directly with files
|
||||
|
||||
```typescript
|
||||
// In block tools.config:
|
||||
import { normalizeFileInput } from '@/blocks/utils'
|
||||
|
||||
const normalizedFile = normalizeFileInput(
|
||||
params.uploadFile || params.fileRef || params.fileContent,
|
||||
{ single: true }
|
||||
)
|
||||
if (normalizedFile) params.file = normalizedFile
|
||||
```
|
||||
|
||||
### File Output (Downloads)
|
||||
|
||||
Use `FileToolProcessor` in tool `transformResponse` to store files:
|
||||
|
||||
```typescript
|
||||
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
|
||||
|
||||
const processor = new FileToolProcessor(context)
|
||||
const file = await processor.processFileData({
|
||||
data: base64Content,
|
||||
mimeType: 'application/pdf',
|
||||
filename: 'doc.pdf',
|
||||
})
|
||||
```
|
||||
|
||||
### Key Helpers
|
||||
|
||||
| Helper | Location | Purpose |
|
||||
|--------|----------|---------|
|
||||
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
|
||||
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
|
||||
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get Buffer from UserFile |
|
||||
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Look up API docs for the service
|
||||
@@ -251,5 +205,3 @@ const file = await processor.processFileData({
|
||||
- [ ] Register block in `blocks/registry.ts`
|
||||
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
||||
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
||||
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
|
||||
- [ ] (If file uploads) Use `normalizeFileInput` in block config
|
||||
|
||||
19
CLAUDE.md
19
CLAUDE.md
@@ -265,23 +265,6 @@ Register in `blocks/registry.ts` (alphabetically).
|
||||
|
||||
**dependsOn:** `['field']` or `{ all: ['a'], any: ['b', 'c'] }`
|
||||
|
||||
**File Input Pattern (basic/advanced mode):**
|
||||
```typescript
|
||||
// Basic: file-upload UI
|
||||
{ id: 'uploadFile', type: 'file-upload', canonicalParamId: 'file', mode: 'basic' },
|
||||
// Advanced: reference from other blocks
|
||||
{ id: 'fileRef', type: 'short-input', canonicalParamId: 'file', mode: 'advanced' },
|
||||
```
|
||||
|
||||
In `tools.config.tool`, normalize with:
|
||||
```typescript
|
||||
import { normalizeFileInput } from '@/blocks/utils'
|
||||
const file = normalizeFileInput(params.uploadFile || params.fileRef, { single: true })
|
||||
if (file) params.file = file
|
||||
```
|
||||
|
||||
For file uploads, create an internal API route (`/api/tools/{service}/upload`) that uses `downloadFileFromStorage` to get file content from `UserFile` objects.
|
||||
|
||||
### 3. Icon (`components/icons.tsx`)
|
||||
|
||||
```typescript
|
||||
@@ -310,5 +293,3 @@ Register in `triggers/registry.ts`.
|
||||
- [ ] Create block in `blocks/blocks/{service}.ts`
|
||||
- [ ] Register block in `blocks/registry.ts`
|
||||
- [ ] (Optional) Create and register triggers
|
||||
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
|
||||
- [ ] (If file uploads) Use `normalizeFileInput` in block config
|
||||
|
||||
@@ -213,25 +213,6 @@ Different subscription plans have different usage limits:
|
||||
| **Team** | $40/seat (pooled, adjustable) | 300 sync, 2,500 async |
|
||||
| **Enterprise** | Custom | Custom |
|
||||
|
||||
## Execution Time Limits
|
||||
|
||||
Workflows have maximum execution time limits based on your subscription plan:
|
||||
|
||||
| Plan | Sync Execution | Async Execution |
|
||||
|------|----------------|-----------------|
|
||||
| **Free** | 5 minutes | 10 minutes |
|
||||
| **Pro** | 50 minutes | 90 minutes |
|
||||
| **Team** | 50 minutes | 90 minutes |
|
||||
| **Enterprise** | 50 minutes | 90 minutes |
|
||||
|
||||
**Sync executions** run immediately and return results directly. These are triggered via the API with `async: false` (default) or through the UI.
|
||||
**Async executions** (triggered via API with `async: true`, webhooks, or schedules) run in the background. Async time limits are up to 2x the sync limit, capped at 90 minutes.
|
||||
|
||||
|
||||
<Callout type="info">
|
||||
If a workflow exceeds its time limit, it will be terminated and marked as failed with a timeout error. Design long-running workflows to use async execution or break them into smaller workflows.
|
||||
</Callout>
|
||||
|
||||
## Billing Model
|
||||
|
||||
Sim uses a **base subscription + overage** billing model:
|
||||
|
||||
@@ -1,168 +0,0 @@
|
||||
---
|
||||
title: Passing Files
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
|
||||
Sim makes it easy to work with files throughout your workflows. Blocks can receive files, process them, and pass them to other blocks seamlessly.
|
||||
|
||||
## File Objects
|
||||
|
||||
When blocks output files (like Gmail attachments, generated images, or parsed documents), they return a standardized file object:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "report.pdf",
|
||||
"url": "https://...",
|
||||
"base64": "JVBERi0xLjQK...",
|
||||
"type": "application/pdf",
|
||||
"size": 245678
|
||||
}
|
||||
```
|
||||
|
||||
You can access any of these properties when referencing files from previous blocks.
|
||||
|
||||
## The File Block
|
||||
|
||||
The **File block** is the universal entry point for files in your workflows. It accepts files from any source and outputs standardized file objects that work with all integrations.
|
||||
|
||||
**Inputs:**
|
||||
- **Uploaded files** - Drag and drop or select files directly
|
||||
- **External URLs** - Any publicly accessible file URL
|
||||
- **Files from other blocks** - Pass files from Gmail attachments, Slack downloads, etc.
|
||||
|
||||
**Outputs:**
|
||||
- A list of `UserFile` objects with consistent structure (`name`, `url`, `base64`, `type`, `size`)
|
||||
- `combinedContent` - Extracted text content from all files (for documents)
|
||||
|
||||
**Example usage:**
|
||||
|
||||
```
|
||||
// Get all files from the File block
|
||||
<file.files>
|
||||
|
||||
// Get the first file
|
||||
<file.files[0]>
|
||||
|
||||
// Get combined text content from parsed documents
|
||||
<file.combinedContent>
|
||||
```
|
||||
|
||||
The File block automatically:
|
||||
- Detects file types from URLs and extensions
|
||||
- Extracts text from PDFs, CSVs, and documents
|
||||
- Generates base64 encoding for binary files
|
||||
- Creates presigned URLs for secure access
|
||||
|
||||
Use the File block when you need to normalize files from different sources before passing them to other blocks like Vision, STT, or email integrations.
|
||||
|
||||
## Passing Files Between Blocks
|
||||
|
||||
Reference files from previous blocks using the tag dropdown. Click in any file input field and type `<` to see available outputs.
|
||||
|
||||
**Common patterns:**
|
||||
|
||||
```
|
||||
// Single file from a block
|
||||
<gmail.attachments[0]>
|
||||
|
||||
// Pass the whole file object
|
||||
<file_parser.files[0]>
|
||||
|
||||
// Access specific properties
|
||||
<gmail.attachments[0].name>
|
||||
<gmail.attachments[0].base64>
|
||||
```
|
||||
|
||||
Most blocks accept the full file object and extract what they need automatically. You don't need to manually extract `base64` or `url` in most cases.
|
||||
|
||||
## Triggering Workflows with Files
|
||||
|
||||
When calling a workflow via API that expects file input, include files in your request:
|
||||
|
||||
<Tabs items={['Base64', 'URL']}>
|
||||
<Tab value="Base64">
|
||||
```bash
|
||||
curl -X POST "https://sim.ai/api/workflows/YOUR_WORKFLOW_ID/execute" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "x-api-key: YOUR_API_KEY" \
|
||||
-d '{
|
||||
"document": {
|
||||
"name": "report.pdf",
|
||||
"base64": "JVBERi0xLjQK...",
|
||||
"type": "application/pdf"
|
||||
}
|
||||
}'
|
||||
```
|
||||
</Tab>
|
||||
<Tab value="URL">
|
||||
```bash
|
||||
curl -X POST "https://sim.ai/api/workflows/YOUR_WORKFLOW_ID/execute" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "x-api-key: YOUR_API_KEY" \
|
||||
-d '{
|
||||
"document": {
|
||||
"name": "report.pdf",
|
||||
"url": "https://example.com/report.pdf",
|
||||
"type": "application/pdf"
|
||||
}
|
||||
}'
|
||||
```
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
The workflow's Start block should have an input field configured to receive the file parameter.
|
||||
|
||||
## Receiving Files in API Responses
|
||||
|
||||
When a workflow outputs files, they're included in the response:
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"output": {
|
||||
"generatedFile": {
|
||||
"name": "output.png",
|
||||
"url": "https://...",
|
||||
"base64": "iVBORw0KGgo...",
|
||||
"type": "image/png",
|
||||
"size": 34567
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Use `url` for direct downloads or `base64` for inline processing.
|
||||
|
||||
## Blocks That Work with Files
|
||||
|
||||
**File inputs:**
|
||||
- **File** - Parse documents, images, and text files
|
||||
- **Vision** - Analyze images with AI models
|
||||
- **Mistral Parser** - Extract text from PDFs
|
||||
|
||||
**File outputs:**
|
||||
- **Gmail** - Email attachments
|
||||
- **Slack** - Downloaded files
|
||||
- **TTS** - Generated audio files
|
||||
- **Video Generator** - Generated videos
|
||||
- **Image Generator** - Generated images
|
||||
|
||||
**File storage:**
|
||||
- **Supabase** - Upload/download from storage
|
||||
- **S3** - AWS S3 operations
|
||||
- **Google Drive** - Drive file operations
|
||||
- **Dropbox** - Dropbox file operations
|
||||
|
||||
<Callout type="info">
|
||||
Files are automatically available to downstream blocks. The execution engine handles all file transfer and format conversion.
|
||||
</Callout>
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use file objects directly** - Pass the full file object rather than extracting individual properties. Blocks handle the conversion automatically.
|
||||
|
||||
2. **Check file types** - Ensure the file type matches what the receiving block expects. The Vision block needs images, the File block handles documents.
|
||||
|
||||
3. **Consider file size** - Large files increase execution time. For very large files, consider using storage blocks (S3, Supabase) for intermediate storage.
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"pages": ["index", "basics", "files", "api", "logging", "costs"]
|
||||
"pages": ["index", "basics", "api", "logging", "costs"]
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
Database,
|
||||
DollarSign,
|
||||
HardDrive,
|
||||
Timer,
|
||||
Workflow,
|
||||
} from 'lucide-react'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
@@ -44,7 +44,7 @@ interface PricingTier {
|
||||
const FREE_PLAN_FEATURES: PricingFeature[] = [
|
||||
{ icon: DollarSign, text: '$20 usage limit' },
|
||||
{ icon: HardDrive, text: '5GB file storage' },
|
||||
{ icon: Timer, text: '5 min execution limit' },
|
||||
{ icon: Workflow, text: 'Public template access' },
|
||||
{ icon: Database, text: 'Limited log retention' },
|
||||
{ icon: Code2, text: 'CLI/SDK Access' },
|
||||
]
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getBrandConfig } from '@/lib/branding/branding'
|
||||
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
@@ -1119,7 +1119,7 @@ async function handlePushNotificationSet(
|
||||
)
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(
|
||||
const urlValidation = validateExternalUrl(
|
||||
params.pushNotificationConfig.url,
|
||||
'Push notification URL'
|
||||
)
|
||||
|
||||
@@ -21,7 +21,6 @@ const UpdateCreatorProfileSchema = z.object({
|
||||
name: z.string().min(1, 'Name is required').max(100, 'Max 100 characters').optional(),
|
||||
profileImageUrl: z.string().optional().or(z.literal('')),
|
||||
details: CreatorProfileDetailsSchema.optional(),
|
||||
verified: z.boolean().optional(), // Verification status (super users only)
|
||||
})
|
||||
|
||||
// Helper to check if user has permission to manage profile
|
||||
@@ -98,29 +97,11 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
return NextResponse.json({ error: 'Profile not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Verification changes require super user permission
|
||||
if (data.verified !== undefined) {
|
||||
const { verifyEffectiveSuperUser } = await import('@/lib/templates/permissions')
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to change creator verification: ${id}`)
|
||||
return NextResponse.json(
|
||||
{ error: 'Only super users can change verification status' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// For non-verified updates, check regular permissions
|
||||
const hasNonVerifiedUpdates =
|
||||
data.name !== undefined || data.profileImageUrl !== undefined || data.details !== undefined
|
||||
|
||||
if (hasNonVerifiedUpdates) {
|
||||
const canEdit = await hasPermission(session.user.id, existing[0])
|
||||
if (!canEdit) {
|
||||
logger.warn(`[${requestId}] User denied permission to update profile: ${id}`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
// Check permissions
|
||||
const canEdit = await hasPermission(session.user.id, existing[0])
|
||||
if (!canEdit) {
|
||||
logger.warn(`[${requestId}] User denied permission to update profile: ${id}`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
const updateData: any = {
|
||||
@@ -130,7 +111,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
if (data.name !== undefined) updateData.name = data.name
|
||||
if (data.profileImageUrl !== undefined) updateData.profileImageUrl = data.profileImageUrl
|
||||
if (data.details !== undefined) updateData.details = data.details
|
||||
if (data.verified !== undefined) updateData.verified = data.verified
|
||||
|
||||
const updated = await db
|
||||
.update(templateCreators)
|
||||
|
||||
113
apps/sim/app/api/creators/[id]/verify/route.ts
Normal file
113
apps/sim/app/api/creators/[id]/verify/route.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { db } from '@sim/db'
|
||||
import { templateCreators } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('CreatorVerificationAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
// POST /api/creators/[id]/verify - Verify a creator (super users only)
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized verification attempt for creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to verify creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can verify creators' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if creator exists
|
||||
const existingCreator = await db
|
||||
.select()
|
||||
.from(templateCreators)
|
||||
.where(eq(templateCreators.id, id))
|
||||
.limit(1)
|
||||
|
||||
if (existingCreator.length === 0) {
|
||||
logger.warn(`[${requestId}] Creator not found for verification: ${id}`)
|
||||
return NextResponse.json({ error: 'Creator not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Update creator verified status to true
|
||||
await db
|
||||
.update(templateCreators)
|
||||
.set({ verified: true, updatedAt: new Date() })
|
||||
.where(eq(templateCreators.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Creator verified: ${id} by super user: ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Creator verified successfully',
|
||||
creatorId: id,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error verifying creator ${id}`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
// DELETE /api/creators/[id]/verify - Unverify a creator (super users only)
|
||||
export async function DELETE(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const requestId = generateRequestId()
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized unverification attempt for creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Check if user is a super user
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to unverify creator: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can unverify creators' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Check if creator exists
|
||||
const existingCreator = await db
|
||||
.select()
|
||||
.from(templateCreators)
|
||||
.where(eq(templateCreators.id, id))
|
||||
.limit(1)
|
||||
|
||||
if (existingCreator.length === 0) {
|
||||
logger.warn(`[${requestId}] Creator not found for unverification: ${id}`)
|
||||
return NextResponse.json({ error: 'Creator not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Update creator verified status to false
|
||||
await db
|
||||
.update(templateCreators)
|
||||
.set({ verified: false, updatedAt: new Date() })
|
||||
.where(eq(templateCreators.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Creator unverified: ${id} by super user: ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Creator unverified successfully',
|
||||
creatorId: id,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error unverifying creator ${id}`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -1,16 +1,13 @@
|
||||
import { asyncJobs, db } from '@sim/db'
|
||||
import { db } from '@sim/db'
|
||||
import { workflowExecutionLogs } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, lt, sql } from 'drizzle-orm'
|
||||
import { and, eq, lt, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { JOB_RETENTION_HOURS, JOB_STATUS } from '@/lib/core/async-jobs'
|
||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
|
||||
const logger = createLogger('CleanupStaleExecutions')
|
||||
|
||||
const STALE_THRESHOLD_MS = getMaxExecutionTimeout() + 5 * 60 * 1000
|
||||
const STALE_THRESHOLD_MINUTES = Math.ceil(STALE_THRESHOLD_MS / 60000)
|
||||
const STALE_THRESHOLD_MINUTES = 30
|
||||
const MAX_INT32 = 2_147_483_647
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
@@ -81,102 +78,12 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
logger.info(`Stale execution cleanup completed. Cleaned: ${cleaned}, Failed: ${failed}`)
|
||||
|
||||
// Clean up stale async jobs (stuck in processing)
|
||||
let asyncJobsMarkedFailed = 0
|
||||
|
||||
try {
|
||||
const staleAsyncJobs = await db
|
||||
.update(asyncJobs)
|
||||
.set({
|
||||
status: JOB_STATUS.FAILED,
|
||||
completedAt: new Date(),
|
||||
error: `Job terminated: stuck in processing for more than ${STALE_THRESHOLD_MINUTES} minutes`,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(eq(asyncJobs.status, JOB_STATUS.PROCESSING), lt(asyncJobs.startedAt, staleThreshold))
|
||||
)
|
||||
.returning({ id: asyncJobs.id })
|
||||
|
||||
asyncJobsMarkedFailed = staleAsyncJobs.length
|
||||
if (asyncJobsMarkedFailed > 0) {
|
||||
logger.info(`Marked ${asyncJobsMarkedFailed} stale async jobs as failed`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to clean up stale async jobs:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
|
||||
// Clean up stale pending jobs (never started, e.g., due to server crash before startJob())
|
||||
let stalePendingJobsMarkedFailed = 0
|
||||
|
||||
try {
|
||||
const stalePendingJobs = await db
|
||||
.update(asyncJobs)
|
||||
.set({
|
||||
status: JOB_STATUS.FAILED,
|
||||
completedAt: new Date(),
|
||||
error: `Job terminated: stuck in pending state for more than ${STALE_THRESHOLD_MINUTES} minutes (never started)`,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(
|
||||
and(eq(asyncJobs.status, JOB_STATUS.PENDING), lt(asyncJobs.createdAt, staleThreshold))
|
||||
)
|
||||
.returning({ id: asyncJobs.id })
|
||||
|
||||
stalePendingJobsMarkedFailed = stalePendingJobs.length
|
||||
if (stalePendingJobsMarkedFailed > 0) {
|
||||
logger.info(`Marked ${stalePendingJobsMarkedFailed} stale pending jobs as failed`)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to clean up stale pending jobs:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
|
||||
// Delete completed/failed jobs older than retention period
|
||||
const retentionThreshold = new Date(Date.now() - JOB_RETENTION_HOURS * 60 * 60 * 1000)
|
||||
let asyncJobsDeleted = 0
|
||||
|
||||
try {
|
||||
const deletedJobs = await db
|
||||
.delete(asyncJobs)
|
||||
.where(
|
||||
and(
|
||||
inArray(asyncJobs.status, [JOB_STATUS.COMPLETED, JOB_STATUS.FAILED]),
|
||||
lt(asyncJobs.completedAt, retentionThreshold)
|
||||
)
|
||||
)
|
||||
.returning({ id: asyncJobs.id })
|
||||
|
||||
asyncJobsDeleted = deletedJobs.length
|
||||
if (asyncJobsDeleted > 0) {
|
||||
logger.info(
|
||||
`Deleted ${asyncJobsDeleted} old async jobs (retention: ${JOB_RETENTION_HOURS}h)`
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to delete old async jobs:', {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
executions: {
|
||||
found: staleExecutions.length,
|
||||
cleaned,
|
||||
failed,
|
||||
thresholdMinutes: STALE_THRESHOLD_MINUTES,
|
||||
},
|
||||
asyncJobs: {
|
||||
staleProcessingMarkedFailed: asyncJobsMarkedFailed,
|
||||
stalePendingMarkedFailed: stalePendingJobsMarkedFailed,
|
||||
oldDeleted: asyncJobsDeleted,
|
||||
staleThresholdMinutes: STALE_THRESHOLD_MINUTES,
|
||||
retentionHours: JOB_RETENTION_HOURS,
|
||||
},
|
||||
found: staleExecutions.length,
|
||||
cleaned,
|
||||
failed,
|
||||
thresholdMinutes: STALE_THRESHOLD_MINUTES,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error('Error in stale execution cleanup job:', error)
|
||||
|
||||
@@ -6,11 +6,7 @@ import { createLogger } from '@sim/logger'
|
||||
import binaryExtensionsList from 'binary-extensions'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
||||
@@ -23,7 +19,6 @@ import {
|
||||
getMimeTypeFromExtension,
|
||||
getViewerUrl,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
@@ -220,7 +215,7 @@ async function parseFileSingle(
|
||||
}
|
||||
}
|
||||
|
||||
if (isInternalFileUrl(filePath)) {
|
||||
if (filePath.includes('/api/files/serve/')) {
|
||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||
}
|
||||
|
||||
@@ -251,7 +246,7 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
||||
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
|
||||
}
|
||||
|
||||
if (filePath.startsWith('/') && !isInternalFileUrl(filePath)) {
|
||||
if (filePath.startsWith('/') && !filePath.startsWith('/api/files/serve/')) {
|
||||
return { isValid: false, error: 'Path outside allowed directory' }
|
||||
}
|
||||
|
||||
@@ -425,7 +420,7 @@ async function handleExternalUrl(
|
||||
|
||||
return parseResult
|
||||
} catch (error) {
|
||||
logger.error(`Error handling external URL ${sanitizeUrlForLog(url)}:`, error)
|
||||
logger.error(`Error handling external URL ${url}:`, error)
|
||||
return {
|
||||
success: false,
|
||||
error: `Error fetching URL: ${(error as Error).message}`,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { runs } from '@trigger.dev/sdk'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getJobQueue, JOB_STATUS } from '@/lib/core/async-jobs'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createErrorResponse } from '@/app/api/workflows/utils'
|
||||
|
||||
@@ -15,6 +15,8 @@ export async function GET(
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
logger.debug(`[${requestId}] Getting status for task: ${taskId}`)
|
||||
|
||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn(`[${requestId}] Unauthorized task status request`)
|
||||
@@ -23,60 +25,76 @@ export async function GET(
|
||||
|
||||
const authenticatedUserId = authResult.userId
|
||||
|
||||
const jobQueue = await getJobQueue()
|
||||
const job = await jobQueue.getJob(taskId)
|
||||
const run = await runs.retrieve(taskId)
|
||||
|
||||
if (!job) {
|
||||
return createErrorResponse('Task not found', 404)
|
||||
}
|
||||
logger.debug(`[${requestId}] Task ${taskId} status: ${run.status}`)
|
||||
|
||||
if (job.metadata?.workflowId) {
|
||||
const payload = run.payload as any
|
||||
if (payload?.workflowId) {
|
||||
const { verifyWorkflowAccess } = await import('@/socket/middleware/permissions')
|
||||
const accessCheck = await verifyWorkflowAccess(
|
||||
authenticatedUserId,
|
||||
job.metadata.workflowId as string
|
||||
)
|
||||
const accessCheck = await verifyWorkflowAccess(authenticatedUserId, payload.workflowId)
|
||||
if (!accessCheck.hasAccess) {
|
||||
logger.warn(`[${requestId}] Access denied to workflow ${job.metadata.workflowId}`)
|
||||
logger.warn(`[${requestId}] User ${authenticatedUserId} denied access to task ${taskId}`, {
|
||||
workflowId: payload.workflowId,
|
||||
})
|
||||
return createErrorResponse('Access denied', 403)
|
||||
}
|
||||
logger.debug(`[${requestId}] User ${authenticatedUserId} has access to task ${taskId}`)
|
||||
} else {
|
||||
if (payload?.userId && payload.userId !== authenticatedUserId) {
|
||||
logger.warn(
|
||||
`[${requestId}] User ${authenticatedUserId} attempted to access task ${taskId} owned by ${payload.userId}`
|
||||
)
|
||||
return createErrorResponse('Access denied', 403)
|
||||
}
|
||||
if (!payload?.userId) {
|
||||
logger.warn(
|
||||
`[${requestId}] Task ${taskId} has no ownership information in payload. Denying access for security.`
|
||||
)
|
||||
return createErrorResponse('Access denied', 403)
|
||||
}
|
||||
} else if (job.metadata?.userId && job.metadata.userId !== authenticatedUserId) {
|
||||
logger.warn(`[${requestId}] Access denied to user ${job.metadata.userId}`)
|
||||
return createErrorResponse('Access denied', 403)
|
||||
} else if (!job.metadata?.userId && !job.metadata?.workflowId) {
|
||||
logger.warn(`[${requestId}] Access denied to job ${taskId}`)
|
||||
return createErrorResponse('Access denied', 403)
|
||||
}
|
||||
|
||||
const mappedStatus = job.status === JOB_STATUS.PENDING ? 'queued' : job.status
|
||||
const statusMap = {
|
||||
QUEUED: 'queued',
|
||||
WAITING_FOR_DEPLOY: 'queued',
|
||||
EXECUTING: 'processing',
|
||||
RESCHEDULED: 'processing',
|
||||
FROZEN: 'processing',
|
||||
COMPLETED: 'completed',
|
||||
CANCELED: 'cancelled',
|
||||
FAILED: 'failed',
|
||||
CRASHED: 'failed',
|
||||
INTERRUPTED: 'failed',
|
||||
SYSTEM_FAILURE: 'failed',
|
||||
EXPIRED: 'failed',
|
||||
} as const
|
||||
|
||||
const mappedStatus = statusMap[run.status as keyof typeof statusMap] || 'unknown'
|
||||
|
||||
const response: any = {
|
||||
success: true,
|
||||
taskId,
|
||||
status: mappedStatus,
|
||||
metadata: {
|
||||
startedAt: job.startedAt,
|
||||
startedAt: run.startedAt,
|
||||
},
|
||||
}
|
||||
|
||||
if (job.status === JOB_STATUS.COMPLETED) {
|
||||
response.output = job.output
|
||||
response.metadata.completedAt = job.completedAt
|
||||
if (job.startedAt && job.completedAt) {
|
||||
response.metadata.duration = job.completedAt.getTime() - job.startedAt.getTime()
|
||||
}
|
||||
if (mappedStatus === 'completed') {
|
||||
response.output = run.output // This contains the workflow execution results
|
||||
response.metadata.completedAt = run.finishedAt
|
||||
response.metadata.duration = run.durationMs
|
||||
}
|
||||
|
||||
if (job.status === JOB_STATUS.FAILED) {
|
||||
response.error = job.error
|
||||
response.metadata.completedAt = job.completedAt
|
||||
if (job.startedAt && job.completedAt) {
|
||||
response.metadata.duration = job.completedAt.getTime() - job.startedAt.getTime()
|
||||
}
|
||||
if (mappedStatus === 'failed') {
|
||||
response.error = run.error
|
||||
response.metadata.completedAt = run.finishedAt
|
||||
response.metadata.duration = run.durationMs
|
||||
}
|
||||
|
||||
if (job.status === JOB_STATUS.PROCESSING || job.status === JOB_STATUS.PENDING) {
|
||||
response.estimatedDuration = 180000
|
||||
if (mappedStatus === 'processing' || mappedStatus === 'queued') {
|
||||
response.estimatedDuration = 180000 // 3 minutes max from our config
|
||||
}
|
||||
|
||||
return NextResponse.json(response)
|
||||
|
||||
@@ -21,7 +21,6 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServeAPI')
|
||||
@@ -265,7 +264,7 @@ async function handleToolsCall(
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({ input: params.arguments || {}, triggerType: 'mcp' }),
|
||||
signal: AbortSignal.timeout(getMaxExecutionTimeout()),
|
||||
signal: AbortSignal.timeout(600000), // 10 minute timeout
|
||||
})
|
||||
|
||||
const executeResult = await response.json()
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/plan'
|
||||
import { getExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpService } from '@/lib/mcp/service'
|
||||
import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types'
|
||||
@@ -10,6 +7,7 @@ import {
|
||||
categorizeError,
|
||||
createMcpErrorResponse,
|
||||
createMcpSuccessResponse,
|
||||
MCP_CONSTANTS,
|
||||
validateStringParam,
|
||||
} from '@/lib/mcp/utils'
|
||||
|
||||
@@ -173,16 +171,13 @@ export const POST = withMcpAuth('read')(
|
||||
arguments: args,
|
||||
}
|
||||
|
||||
const userSubscription = await getHighestPrioritySubscription(userId)
|
||||
const executionTimeout = getExecutionTimeout(
|
||||
userSubscription?.plan as SubscriptionPlan | undefined,
|
||||
'sync'
|
||||
)
|
||||
|
||||
const result = await Promise.race([
|
||||
mcpService.executeTool(userId, serverId, toolCall, workspaceId),
|
||||
new Promise<never>((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Tool execution timeout')), executionTimeout)
|
||||
setTimeout(
|
||||
() => reject(new Error('Tool execution timeout')),
|
||||
MCP_CONSTANTS.EXECUTION_TIMEOUT
|
||||
)
|
||||
),
|
||||
])
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { db, workflowDeploymentVersion, workflowSchedule } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { tasks } from '@trigger.dev/sdk'
|
||||
import { and, eq, isNull, lt, lte, not, or, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { getJobQueue, shouldExecuteInline } from '@/lib/core/async-jobs'
|
||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { executeScheduleJob } from '@/background/schedule-execution'
|
||||
|
||||
@@ -54,67 +55,72 @@ export async function GET(request: NextRequest) {
|
||||
logger.debug(`[${requestId}] Successfully queried schedules: ${dueSchedules.length} found`)
|
||||
logger.info(`[${requestId}] Processing ${dueSchedules.length} due scheduled workflows`)
|
||||
|
||||
const jobQueue = await getJobQueue()
|
||||
if (isTriggerDevEnabled) {
|
||||
const triggerPromises = dueSchedules.map(async (schedule) => {
|
||||
const queueTime = schedule.lastQueuedAt ?? queuedAt
|
||||
|
||||
const queuePromises = dueSchedules.map(async (schedule) => {
|
||||
const queueTime = schedule.lastQueuedAt ?? queuedAt
|
||||
try {
|
||||
const payload = {
|
||||
scheduleId: schedule.id,
|
||||
workflowId: schedule.workflowId,
|
||||
blockId: schedule.blockId || undefined,
|
||||
cronExpression: schedule.cronExpression || undefined,
|
||||
lastRanAt: schedule.lastRanAt?.toISOString(),
|
||||
failedCount: schedule.failedCount || 0,
|
||||
now: queueTime.toISOString(),
|
||||
scheduledFor: schedule.nextRunAt?.toISOString(),
|
||||
}
|
||||
|
||||
const payload = {
|
||||
scheduleId: schedule.id,
|
||||
workflowId: schedule.workflowId,
|
||||
blockId: schedule.blockId || undefined,
|
||||
cronExpression: schedule.cronExpression || undefined,
|
||||
lastRanAt: schedule.lastRanAt?.toISOString(),
|
||||
failedCount: schedule.failedCount || 0,
|
||||
now: queueTime.toISOString(),
|
||||
scheduledFor: schedule.nextRunAt?.toISOString(),
|
||||
}
|
||||
|
||||
try {
|
||||
const jobId = await jobQueue.enqueue('schedule-execution', payload, {
|
||||
metadata: { workflowId: schedule.workflowId },
|
||||
})
|
||||
logger.info(
|
||||
`[${requestId}] Queued schedule execution task ${jobId} for workflow ${schedule.workflowId}`
|
||||
)
|
||||
|
||||
if (shouldExecuteInline()) {
|
||||
void (async () => {
|
||||
try {
|
||||
await jobQueue.startJob(jobId)
|
||||
const output = await executeScheduleJob(payload)
|
||||
await jobQueue.completeJob(jobId, output)
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
logger.error(
|
||||
`[${requestId}] Schedule execution failed for workflow ${schedule.workflowId}`,
|
||||
{ jobId, error: errorMessage }
|
||||
)
|
||||
try {
|
||||
await jobQueue.markJobFailed(jobId, errorMessage)
|
||||
} catch (markFailedError) {
|
||||
logger.error(`[${requestId}] Failed to mark job as failed`, {
|
||||
jobId,
|
||||
error:
|
||||
markFailedError instanceof Error
|
||||
? markFailedError.message
|
||||
: String(markFailedError),
|
||||
})
|
||||
}
|
||||
}
|
||||
})()
|
||||
const handle = await tasks.trigger('schedule-execution', payload)
|
||||
logger.info(
|
||||
`[${requestId}] Queued schedule execution task ${handle.id} for workflow ${schedule.workflowId}`
|
||||
)
|
||||
return handle
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to trigger schedule execution for workflow ${schedule.workflowId}`,
|
||||
error
|
||||
)
|
||||
return null
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to queue schedule execution for workflow ${schedule.workflowId}`,
|
||||
error
|
||||
})
|
||||
|
||||
await Promise.allSettled(triggerPromises)
|
||||
|
||||
logger.info(`[${requestId}] Queued ${dueSchedules.length} schedule executions to Trigger.dev`)
|
||||
} else {
|
||||
const directExecutionPromises = dueSchedules.map(async (schedule) => {
|
||||
const queueTime = schedule.lastQueuedAt ?? queuedAt
|
||||
|
||||
const payload = {
|
||||
scheduleId: schedule.id,
|
||||
workflowId: schedule.workflowId,
|
||||
blockId: schedule.blockId || undefined,
|
||||
cronExpression: schedule.cronExpression || undefined,
|
||||
lastRanAt: schedule.lastRanAt?.toISOString(),
|
||||
failedCount: schedule.failedCount || 0,
|
||||
now: queueTime.toISOString(),
|
||||
scheduledFor: schedule.nextRunAt?.toISOString(),
|
||||
}
|
||||
|
||||
void executeScheduleJob(payload).catch((error) => {
|
||||
logger.error(
|
||||
`[${requestId}] Direct schedule execution failed for workflow ${schedule.workflowId}`,
|
||||
error
|
||||
)
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Queued direct schedule execution for workflow ${schedule.workflowId} (Trigger.dev disabled)`
|
||||
)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
await Promise.allSettled(queuePromises)
|
||||
await Promise.allSettled(directExecutionPromises)
|
||||
|
||||
logger.info(`[${requestId}] Queued ${dueSchedules.length} schedule executions`)
|
||||
logger.info(
|
||||
`[${requestId}] Queued ${dueSchedules.length} direct schedule executions (Trigger.dev disabled)`
|
||||
)
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Scheduled workflow executions processed',
|
||||
|
||||
101
apps/sim/app/api/templates/[id]/approve/route.ts
Normal file
101
apps/sim/app/api/templates/[id]/approve/route.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
import { db } from '@sim/db'
|
||||
import { templates } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('TemplateApprovalAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
/**
|
||||
* POST /api/templates/[id]/approve - Approve a template (super users only)
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized template approval attempt for ID: ${id}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
if (existingTemplate.length === 0) {
|
||||
logger.warn(`[${requestId}] Template not found for approval: ${id}`)
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
await db
|
||||
.update(templates)
|
||||
.set({ status: 'approved', updatedAt: new Date() })
|
||||
.where(eq(templates.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Template approved: ${id} by super user: ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Template approved successfully',
|
||||
templateId: id,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error approving template ${id}`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* DELETE /api/templates/[id]/approve - Unapprove a template (super users only)
|
||||
*/
|
||||
export async function DELETE(
|
||||
_request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const requestId = generateRequestId()
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized template rejection attempt for ID: ${id}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
if (existingTemplate.length === 0) {
|
||||
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
await db
|
||||
.update(templates)
|
||||
.set({ status: 'rejected', updatedAt: new Date() })
|
||||
.where(eq(templates.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Template rejected: ${id} by super user: ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Template rejected successfully',
|
||||
templateId: id,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error rejecting template ${id}`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
55
apps/sim/app/api/templates/[id]/reject/route.ts
Normal file
55
apps/sim/app/api/templates/[id]/reject/route.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import { db } from '@sim/db'
|
||||
import { templates } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
||||
|
||||
const logger = createLogger('TemplateRejectionAPI')
|
||||
|
||||
export const revalidate = 0
|
||||
|
||||
/**
|
||||
* POST /api/templates/[id]/reject - Reject a template (super users only)
|
||||
*/
|
||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id } = await params
|
||||
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
logger.warn(`[${requestId}] Unauthorized template rejection attempt for ID: ${id}`)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||
}
|
||||
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
if (existingTemplate.length === 0) {
|
||||
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
|
||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
await db
|
||||
.update(templates)
|
||||
.set({ status: 'rejected', updatedAt: new Date() })
|
||||
.where(eq(templates.id, id))
|
||||
|
||||
logger.info(`[${requestId}] Template rejected: ${id} by super user: ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
message: 'Template rejected successfully',
|
||||
templateId: id,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error rejecting template ${id}`, error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
@@ -106,7 +106,6 @@ const updateTemplateSchema = z.object({
|
||||
creatorId: z.string().optional(), // Creator profile ID
|
||||
tags: z.array(z.string()).max(10, 'Maximum 10 tags allowed').optional(),
|
||||
updateState: z.boolean().optional(), // Explicitly request state update from current workflow
|
||||
status: z.enum(['approved', 'rejected', 'pending']).optional(), // Status change (super users only)
|
||||
})
|
||||
|
||||
// PUT /api/templates/[id] - Update a template
|
||||
@@ -132,7 +131,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
)
|
||||
}
|
||||
|
||||
const { name, details, creatorId, tags, updateState, status } = validationResult.data
|
||||
const { name, details, creatorId, tags, updateState } = validationResult.data
|
||||
|
||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||
|
||||
@@ -143,44 +142,21 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
|
||||
const template = existingTemplate[0]
|
||||
|
||||
// Status changes require super user permission
|
||||
if (status !== undefined) {
|
||||
const { verifyEffectiveSuperUser } = await import('@/lib/templates/permissions')
|
||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||
if (!effectiveSuperUser) {
|
||||
logger.warn(`[${requestId}] Non-super user attempted to change template status: ${id}`)
|
||||
return NextResponse.json(
|
||||
{ error: 'Only super users can change template status' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
if (!template.creatorId) {
|
||||
logger.warn(`[${requestId}] Template ${id} has no creator, denying update`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
// For non-status updates, verify creator permission
|
||||
const hasNonStatusUpdates =
|
||||
name !== undefined ||
|
||||
details !== undefined ||
|
||||
creatorId !== undefined ||
|
||||
tags !== undefined ||
|
||||
updateState
|
||||
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
|
||||
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
|
||||
session.user.id,
|
||||
template.creatorId,
|
||||
'admin'
|
||||
)
|
||||
|
||||
if (hasNonStatusUpdates) {
|
||||
if (!template.creatorId) {
|
||||
logger.warn(`[${requestId}] Template ${id} has no creator, denying update`)
|
||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
|
||||
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
|
||||
session.user.id,
|
||||
template.creatorId,
|
||||
'admin'
|
||||
)
|
||||
|
||||
if (!hasPermission) {
|
||||
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
|
||||
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
|
||||
}
|
||||
if (!hasPermission) {
|
||||
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
|
||||
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
|
||||
}
|
||||
|
||||
const updateData: any = {
|
||||
@@ -191,7 +167,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
if (details !== undefined) updateData.details = details
|
||||
if (tags !== undefined) updateData.tags = tags
|
||||
if (creatorId !== undefined) updateData.creatorId = creatorId
|
||||
if (status !== undefined) updateData.status = status
|
||||
|
||||
if (updateState && template.workflowId) {
|
||||
const { verifyWorkflowAccess } = await import('@/socket/middleware/permissions')
|
||||
|
||||
@@ -4,7 +4,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -96,14 +95,6 @@ export async function POST(request: NextRequest) {
|
||||
if (validatedData.files && validatedData.files.length > 0) {
|
||||
for (const file of validatedData.files) {
|
||||
if (file.type === 'url') {
|
||||
const urlValidation = await validateUrlWithDNS(file.data, 'fileUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: urlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const filePart: FilePart = {
|
||||
kind: 'file',
|
||||
file: {
|
||||
|
||||
@@ -3,7 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createA2AClient } from '@/lib/a2a/utils'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -40,7 +40,7 @@ export async function POST(request: NextRequest) {
|
||||
const body = await request.json()
|
||||
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(validatedData.webhookUrl, 'Webhook URL')
|
||||
const urlValidation = validateExternalUrl(validatedData.webhookUrl, 'Webhook URL')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
|
||||
return NextResponse.json(
|
||||
|
||||
@@ -92,9 +92,6 @@ export async function POST(request: NextRequest) {
|
||||
formData.append('comment', comment)
|
||||
}
|
||||
|
||||
// Add minorEdit field as required by Confluence API
|
||||
formData.append('minorEdit', 'false')
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
|
||||
@@ -4,7 +4,6 @@ import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateNumericId } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
@@ -16,7 +15,7 @@ const DiscordSendMessageSchema = z.object({
|
||||
botToken: z.string().min(1, 'Bot token is required'),
|
||||
channelId: z.string().min(1, 'Channel ID is required'),
|
||||
content: z.string().optional().nullable(),
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
@@ -102,12 +101,6 @@ export async function POST(request: NextRequest) {
|
||||
logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`)
|
||||
|
||||
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||
const filesOutput: Array<{
|
||||
name: string
|
||||
mimeType: string
|
||||
data: string
|
||||
size: number
|
||||
}> = []
|
||||
|
||||
if (userFiles.length === 0) {
|
||||
logger.warn(`[${requestId}] No valid files to upload, falling back to text-only`)
|
||||
@@ -144,12 +137,6 @@ export async function POST(request: NextRequest) {
|
||||
logger.info(`[${requestId}] Downloading file ${i}: ${userFile.name}`)
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
filesOutput.push({
|
||||
name: userFile.name,
|
||||
mimeType: userFile.type || 'application/octet-stream',
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
})
|
||||
|
||||
const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type })
|
||||
formData.append(`files[${i}]`, blob, userFile.name)
|
||||
@@ -186,7 +173,6 @@ export async function POST(request: NextRequest) {
|
||||
message: data.content,
|
||||
data: data,
|
||||
fileCount: userFiles.length,
|
||||
files: filesOutput,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,132 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { httpHeaderSafeJson } from '@/lib/core/utils/validation'
|
||||
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles, type RawFileInput } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('DropboxUploadAPI')
|
||||
|
||||
const DropboxUploadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
path: z.string().min(1, 'Destination path is required'),
|
||||
file: FileInputSchema.optional().nullable(),
|
||||
// Legacy field for backwards compatibility
|
||||
fileContent: z.string().optional().nullable(),
|
||||
fileName: z.string().optional().nullable(),
|
||||
mode: z.enum(['add', 'overwrite']).optional().nullable(),
|
||||
autorename: z.boolean().optional().nullable(),
|
||||
mute: z.boolean().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Dropbox upload attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: authResult.error || 'Authentication required' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Dropbox upload request via ${authResult.authType}`)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = DropboxUploadSchema.parse(body)
|
||||
|
||||
let fileBuffer: Buffer
|
||||
let fileName: string
|
||||
|
||||
// Prefer UserFile input, fall back to legacy base64 string
|
||||
if (validatedData.file) {
|
||||
// Process UserFile input
|
||||
const userFiles = processFilesToUserFiles(
|
||||
[validatedData.file as RawFileInput],
|
||||
requestId,
|
||||
logger
|
||||
)
|
||||
|
||||
if (userFiles.length === 0) {
|
||||
return NextResponse.json({ success: false, error: 'Invalid file input' }, { status: 400 })
|
||||
}
|
||||
|
||||
const userFile = userFiles[0]
|
||||
logger.info(`[${requestId}] Downloading file: ${userFile.name} (${userFile.size} bytes)`)
|
||||
|
||||
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
fileName = userFile.name
|
||||
} else if (validatedData.fileContent) {
|
||||
// Legacy: base64 string input (backwards compatibility)
|
||||
logger.info(`[${requestId}] Using legacy base64 content input`)
|
||||
fileBuffer = Buffer.from(validatedData.fileContent, 'base64')
|
||||
fileName = validatedData.fileName || 'file'
|
||||
} else {
|
||||
return NextResponse.json({ success: false, error: 'File is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Determine final path
|
||||
let finalPath = validatedData.path
|
||||
if (finalPath.endsWith('/')) {
|
||||
finalPath = `${finalPath}${fileName}`
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Uploading to Dropbox: ${finalPath} (${fileBuffer.length} bytes)`)
|
||||
|
||||
const dropboxApiArg = {
|
||||
path: finalPath,
|
||||
mode: validatedData.mode || 'add',
|
||||
autorename: validatedData.autorename ?? true,
|
||||
mute: validatedData.mute ?? false,
|
||||
}
|
||||
|
||||
const response = await fetch('https://content.dropboxapi.com/2/files/upload', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Dropbox-API-Arg': httpHeaderSafeJson(dropboxApiArg),
|
||||
},
|
||||
body: new Uint8Array(fileBuffer),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
const errorMessage = data.error_summary || data.error?.message || 'Failed to upload file'
|
||||
logger.error(`[${requestId}] Dropbox API error:`, { status: response.status, data })
|
||||
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] File uploaded successfully to ${data.path_display}`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: data,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Validation error:`, error.errors)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: error.errors[0]?.message || 'Validation failed' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Unexpected error:`, error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GitHubLatestCommitAPI')
|
||||
|
||||
interface GitHubErrorResponse {
|
||||
message?: string
|
||||
}
|
||||
|
||||
interface GitHubCommitResponse {
|
||||
sha: string
|
||||
html_url: string
|
||||
commit: {
|
||||
message: string
|
||||
author: { name: string; email: string; date: string }
|
||||
committer: { name: string; email: string; date: string }
|
||||
}
|
||||
author?: { login: string; avatar_url: string; html_url: string }
|
||||
committer?: { login: string; avatar_url: string; html_url: string }
|
||||
stats?: { additions: number; deletions: number; total: number }
|
||||
files?: Array<{
|
||||
filename: string
|
||||
status: string
|
||||
additions: number
|
||||
deletions: number
|
||||
changes: number
|
||||
patch?: string
|
||||
raw_url?: string
|
||||
blob_url?: string
|
||||
}>
|
||||
}
|
||||
|
||||
const GitHubLatestCommitSchema = z.object({
|
||||
owner: z.string().min(1, 'Owner is required'),
|
||||
repo: z.string().min(1, 'Repo is required'),
|
||||
branch: z.string().optional().nullable(),
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized GitHub latest commit attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = GitHubLatestCommitSchema.parse(body)
|
||||
|
||||
const { owner, repo, branch, apiKey } = validatedData
|
||||
|
||||
const baseUrl = `https://api.github.com/repos/${owner}/${repo}`
|
||||
const commitUrl = branch ? `${baseUrl}/commits/${branch}` : `${baseUrl}/commits/HEAD`
|
||||
|
||||
logger.info(`[${requestId}] Fetching latest commit from GitHub`, { owner, repo, branch })
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(commitUrl, 'commitUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(commitUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept: 'application/vnd.github.v3+json',
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = (await response.json().catch(() => ({}))) as GitHubErrorResponse
|
||||
logger.error(`[${requestId}] GitHub API error`, {
|
||||
status: response.status,
|
||||
error: errorData,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: errorData.message || `GitHub API error: ${response.status}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const data = (await response.json()) as GitHubCommitResponse
|
||||
|
||||
const content = `Latest commit: "${data.commit.message}" by ${data.commit.author.name} on ${data.commit.author.date}. SHA: ${data.sha}`
|
||||
|
||||
const files = data.files || []
|
||||
const fileDetailsWithContent = []
|
||||
|
||||
for (const file of files) {
|
||||
const fileDetail: Record<string, any> = {
|
||||
filename: file.filename,
|
||||
additions: file.additions,
|
||||
deletions: file.deletions,
|
||||
changes: file.changes,
|
||||
status: file.status,
|
||||
raw_url: file.raw_url,
|
||||
blob_url: file.blob_url,
|
||||
patch: file.patch,
|
||||
content: undefined,
|
||||
}
|
||||
|
||||
if (file.status !== 'removed' && file.raw_url) {
|
||||
try {
|
||||
const rawUrlValidation = await validateUrlWithDNS(file.raw_url, 'rawUrl')
|
||||
if (rawUrlValidation.isValid) {
|
||||
const contentResponse = await secureFetchWithPinnedIP(
|
||||
file.raw_url,
|
||||
rawUrlValidation.resolvedIP!,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiKey}`,
|
||||
'X-GitHub-Api-Version': '2022-11-28',
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if (contentResponse.ok) {
|
||||
fileDetail.content = await contentResponse.text()
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to fetch content for ${file.filename}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
fileDetailsWithContent.push(fileDetail)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Latest commit fetched successfully`, {
|
||||
sha: data.sha,
|
||||
fileCount: files.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
content,
|
||||
metadata: {
|
||||
sha: data.sha,
|
||||
html_url: data.html_url,
|
||||
commit_message: data.commit.message,
|
||||
author: {
|
||||
name: data.commit.author.name,
|
||||
login: data.author?.login || 'Unknown',
|
||||
avatar_url: data.author?.avatar_url || '',
|
||||
html_url: data.author?.html_url || '',
|
||||
},
|
||||
committer: {
|
||||
name: data.commit.committer.name,
|
||||
login: data.committer?.login || 'Unknown',
|
||||
avatar_url: data.committer?.avatar_url || '',
|
||||
html_url: data.committer?.html_url || '',
|
||||
},
|
||||
stats: data.stats
|
||||
? {
|
||||
additions: data.stats.additions,
|
||||
deletions: data.stats.deletions,
|
||||
total: data.stats.total,
|
||||
}
|
||||
: undefined,
|
||||
files: fileDetailsWithContent.length > 0 ? fileDetailsWithContent : undefined,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching GitHub latest commit:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import {
|
||||
@@ -29,7 +28,7 @@ const GmailDraftSchema = z.object({
|
||||
replyToMessageId: z.string().optional().nullable(),
|
||||
cc: z.string().optional().nullable(),
|
||||
bcc: z.string().optional().nullable(),
|
||||
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||
attachments: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import {
|
||||
@@ -29,7 +28,7 @@ const GmailSendSchema = z.object({
|
||||
replyToMessageId: z.string().optional().nullable(),
|
||||
cc: z.string().optional().nullable(),
|
||||
bcc: z.string().optional().nullable(),
|
||||
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||
attachments: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -1,252 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import type { GoogleDriveFile, GoogleDriveRevision } from '@/tools/google_drive/types'
|
||||
import {
|
||||
ALL_FILE_FIELDS,
|
||||
ALL_REVISION_FIELDS,
|
||||
DEFAULT_EXPORT_FORMATS,
|
||||
GOOGLE_WORKSPACE_MIME_TYPES,
|
||||
} from '@/tools/google_drive/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GoogleDriveDownloadAPI')
|
||||
|
||||
/** Google API error response structure */
|
||||
interface GoogleApiErrorResponse {
|
||||
error?: {
|
||||
message?: string
|
||||
code?: number
|
||||
status?: string
|
||||
}
|
||||
}
|
||||
|
||||
/** Google Drive revisions list response */
|
||||
interface GoogleDriveRevisionsResponse {
|
||||
revisions?: GoogleDriveRevision[]
|
||||
nextPageToken?: string
|
||||
}
|
||||
|
||||
const GoogleDriveDownloadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileId: z.string().min(1, 'File ID is required'),
|
||||
mimeType: z.string().optional().nullable(),
|
||||
fileName: z.string().optional().nullable(),
|
||||
includeRevisions: z.boolean().optional().default(true),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Google Drive download attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = GoogleDriveDownloadSchema.parse(body)
|
||||
|
||||
const {
|
||||
accessToken,
|
||||
fileId,
|
||||
mimeType: exportMimeType,
|
||||
fileName,
|
||||
includeRevisions,
|
||||
} = validatedData
|
||||
const authHeader = `Bearer ${accessToken}`
|
||||
|
||||
logger.info(`[${requestId}] Getting file metadata from Google Drive`, { fileId })
|
||||
|
||||
const metadataUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?fields=${ALL_FILE_FIELDS}&supportsAllDrives=true`
|
||||
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
|
||||
if (!metadataUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: metadataUrlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const metadataResponse = await secureFetchWithPinnedIP(
|
||||
metadataUrl,
|
||||
metadataUrlValidation.resolvedIP!,
|
||||
{
|
||||
headers: { Authorization: authHeader },
|
||||
}
|
||||
)
|
||||
|
||||
if (!metadataResponse.ok) {
|
||||
const errorDetails = (await metadataResponse
|
||||
.json()
|
||||
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||
logger.error(`[${requestId}] Failed to get file metadata`, {
|
||||
status: metadataResponse.status,
|
||||
error: errorDetails,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const metadata = (await metadataResponse.json()) as GoogleDriveFile
|
||||
const fileMimeType = metadata.mimeType
|
||||
|
||||
let fileBuffer: Buffer
|
||||
let finalMimeType = fileMimeType
|
||||
|
||||
if (GOOGLE_WORKSPACE_MIME_TYPES.includes(fileMimeType)) {
|
||||
const exportFormat = exportMimeType || DEFAULT_EXPORT_FORMATS[fileMimeType] || 'text/plain'
|
||||
finalMimeType = exportFormat
|
||||
|
||||
logger.info(`[${requestId}] Exporting Google Workspace file`, {
|
||||
fileId,
|
||||
mimeType: fileMimeType,
|
||||
exportFormat,
|
||||
})
|
||||
|
||||
const exportUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/export?mimeType=${encodeURIComponent(exportFormat)}&supportsAllDrives=true`
|
||||
const exportUrlValidation = await validateUrlWithDNS(exportUrl, 'exportUrl')
|
||||
if (!exportUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: exportUrlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const exportResponse = await secureFetchWithPinnedIP(
|
||||
exportUrl,
|
||||
exportUrlValidation.resolvedIP!,
|
||||
{ headers: { Authorization: authHeader } }
|
||||
)
|
||||
|
||||
if (!exportResponse.ok) {
|
||||
const exportError = (await exportResponse
|
||||
.json()
|
||||
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||
logger.error(`[${requestId}] Failed to export file`, {
|
||||
status: exportResponse.status,
|
||||
error: exportError,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: exportError.error?.message || 'Failed to export Google Workspace file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const arrayBuffer = await exportResponse.arrayBuffer()
|
||||
fileBuffer = Buffer.from(arrayBuffer)
|
||||
} else {
|
||||
logger.info(`[${requestId}] Downloading regular file`, { fileId, mimeType: fileMimeType })
|
||||
|
||||
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?alt=media&supportsAllDrives=true`
|
||||
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||
if (!downloadUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: downloadUrlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(
|
||||
downloadUrl,
|
||||
downloadUrlValidation.resolvedIP!,
|
||||
{ headers: { Authorization: authHeader } }
|
||||
)
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
const downloadError = (await downloadResponse
|
||||
.json()
|
||||
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||
logger.error(`[${requestId}] Failed to download file`, {
|
||||
status: downloadResponse.status,
|
||||
error: downloadError,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
fileBuffer = Buffer.from(arrayBuffer)
|
||||
}
|
||||
|
||||
const canReadRevisions = metadata.capabilities?.canReadRevisions === true
|
||||
if (includeRevisions && canReadRevisions) {
|
||||
try {
|
||||
const revisionsUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/revisions?fields=revisions(${ALL_REVISION_FIELDS})&pageSize=100`
|
||||
const revisionsUrlValidation = await validateUrlWithDNS(revisionsUrl, 'revisionsUrl')
|
||||
if (revisionsUrlValidation.isValid) {
|
||||
const revisionsResponse = await secureFetchWithPinnedIP(
|
||||
revisionsUrl,
|
||||
revisionsUrlValidation.resolvedIP!,
|
||||
{ headers: { Authorization: authHeader } }
|
||||
)
|
||||
|
||||
if (revisionsResponse.ok) {
|
||||
const revisionsData = (await revisionsResponse.json()) as GoogleDriveRevisionsResponse
|
||||
metadata.revisions = revisionsData.revisions
|
||||
logger.info(`[${requestId}] Fetched file revisions`, {
|
||||
fileId,
|
||||
revisionCount: metadata.revisions?.length || 0,
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Error fetching revisions, continuing without them`, { error })
|
||||
}
|
||||
}
|
||||
|
||||
const resolvedName = fileName || metadata.name || 'download'
|
||||
|
||||
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||
fileId,
|
||||
name: resolvedName,
|
||||
size: fileBuffer.length,
|
||||
mimeType: finalMimeType,
|
||||
})
|
||||
|
||||
const base64Data = fileBuffer.toString('base64')
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedName,
|
||||
mimeType: finalMimeType,
|
||||
data: base64Data,
|
||||
size: fileBuffer.length,
|
||||
},
|
||||
metadata,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error downloading Google Drive file:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import {
|
||||
@@ -21,7 +20,7 @@ const GOOGLE_DRIVE_API_BASE = 'https://www.googleapis.com/upload/drive/v3/files'
|
||||
const GoogleDriveUploadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileName: z.string().min(1, 'File name is required'),
|
||||
file: RawFileInputSchema.optional().nullable(),
|
||||
file: z.any().optional().nullable(),
|
||||
mimeType: z.string().optional().nullable(),
|
||||
folderId: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
@@ -1,131 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GoogleVaultDownloadExportFileAPI')
|
||||
|
||||
const GoogleVaultDownloadExportFileSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||
objectName: z.string().min(1, 'Object name is required'),
|
||||
fileName: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Google Vault download attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = GoogleVaultDownloadExportFileSchema.parse(body)
|
||||
|
||||
const { accessToken, bucketName, objectName, fileName } = validatedData
|
||||
|
||||
const bucket = encodeURIComponent(bucketName)
|
||||
const object = encodeURIComponent(objectName)
|
||||
const downloadUrl = `https://storage.googleapis.com/storage/v1/b/${bucket}/o/${object}?alt=media`
|
||||
|
||||
logger.info(`[${requestId}] Downloading file from Google Vault`, { bucketName, objectName })
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: enhanceGoogleVaultError(urlValidation.error || 'Invalid URL') },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(downloadUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
const errorText = await downloadResponse.text().catch(() => '')
|
||||
const errorMessage = `Failed to download file: ${errorText || downloadResponse.statusText}`
|
||||
logger.error(`[${requestId}] Failed to download Vault export file`, {
|
||||
status: downloadResponse.status,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: enhanceGoogleVaultError(errorMessage) },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const contentType = downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||
const disposition = downloadResponse.headers.get('content-disposition') || ''
|
||||
const match = disposition.match(/filename\*=UTF-8''([^;]+)|filename="([^"]+)"/)
|
||||
|
||||
let resolvedName = fileName
|
||||
if (!resolvedName) {
|
||||
if (match?.[1]) {
|
||||
try {
|
||||
resolvedName = decodeURIComponent(match[1])
|
||||
} catch {
|
||||
resolvedName = match[1]
|
||||
}
|
||||
} else if (match?.[2]) {
|
||||
resolvedName = match[2]
|
||||
} else if (objectName) {
|
||||
const parts = objectName.split('/')
|
||||
resolvedName = parts[parts.length - 1] || 'vault-export.bin'
|
||||
} else {
|
||||
resolvedName = 'vault-export.bin'
|
||||
}
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
|
||||
logger.info(`[${requestId}] Vault export file downloaded successfully`, {
|
||||
name: resolvedName,
|
||||
size: buffer.length,
|
||||
mimeType: contentType,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedName,
|
||||
mimeType: contentType,
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error downloading Google Vault export file:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { validateImageUrl } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
const logger = createLogger('ImageProxyAPI')
|
||||
@@ -29,7 +26,7 @@ export async function GET(request: NextRequest) {
|
||||
return new NextResponse('Missing URL parameter', { status: 400 })
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(imageUrl, 'imageUrl')
|
||||
const urlValidation = validateImageUrl(imageUrl)
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] Blocked image proxy request`, {
|
||||
url: imageUrl.substring(0, 100),
|
||||
@@ -41,8 +38,7 @@ export async function GET(request: NextRequest) {
|
||||
logger.info(`[${requestId}] Proxying image request for: ${imageUrl}`)
|
||||
|
||||
try {
|
||||
const imageResponse = await secureFetchWithPinnedIP(imageUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
const imageResponse = await fetch(imageUrl, {
|
||||
headers: {
|
||||
'User-Agent':
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
|
||||
@@ -68,14 +64,14 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
const contentType = imageResponse.headers.get('content-type') || 'image/jpeg'
|
||||
|
||||
const imageArrayBuffer = await imageResponse.arrayBuffer()
|
||||
const imageBlob = await imageResponse.blob()
|
||||
|
||||
if (imageArrayBuffer.byteLength === 0) {
|
||||
logger.error(`[${requestId}] Empty image received`)
|
||||
if (imageBlob.size === 0) {
|
||||
logger.error(`[${requestId}] Empty image blob received`)
|
||||
return new NextResponse('Empty image received', { status: 404 })
|
||||
}
|
||||
|
||||
return new NextResponse(imageArrayBuffer, {
|
||||
return new NextResponse(imageBlob, {
|
||||
headers: {
|
||||
'Content-Type': contentType,
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
const logger = createLogger('JiraAddAttachmentAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const JiraAddAttachmentSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
domain: z.string().min(1, 'Domain is required'),
|
||||
issueKey: z.string().min(1, 'Issue key is required'),
|
||||
files: RawFileInputArraySchema,
|
||||
cloudId: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = `jira-attach-${Date.now()}`
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
if (!authResult.success) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: authResult.error || 'Unauthorized' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = JiraAddAttachmentSchema.parse(body)
|
||||
|
||||
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||
if (userFiles.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'No valid files provided for upload' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const cloudId =
|
||||
validatedData.cloudId ||
|
||||
(await getJiraCloudId(validatedData.domain, validatedData.accessToken))
|
||||
|
||||
const formData = new FormData()
|
||||
const filesOutput: Array<{ name: string; mimeType: string; data: string; size: number }> = []
|
||||
|
||||
for (const file of userFiles) {
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
filesOutput.push({
|
||||
name: file.name,
|
||||
mimeType: file.type || 'application/octet-stream',
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
})
|
||||
const blob = new Blob([new Uint8Array(buffer)], {
|
||||
type: file.type || 'application/octet-stream',
|
||||
})
|
||||
formData.append('file', blob, file.name)
|
||||
}
|
||||
|
||||
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${validatedData.issueKey}/attachments`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'X-Atlassian-Token': 'no-check',
|
||||
},
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text()
|
||||
logger.error(`[${requestId}] Jira attachment upload failed`, {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Failed to upload attachments: ${response.statusText}`,
|
||||
},
|
||||
{ status: response.status }
|
||||
)
|
||||
}
|
||||
|
||||
const attachments = await response.json()
|
||||
const attachmentIds = Array.isArray(attachments)
|
||||
? attachments.map((attachment) => attachment.id).filter(Boolean)
|
||||
: []
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
ts: new Date().toISOString(),
|
||||
issueKey: validatedData.issueKey,
|
||||
attachmentIds,
|
||||
files: filesOutput,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Jira attachment upload error`, error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: error instanceof Error ? error.message : 'Internal server error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -2,11 +2,9 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { uploadFilesForTeamsMessage } from '@/tools/microsoft_teams/server-utils'
|
||||
import type { GraphApiErrorResponse, GraphChatMessage } from '@/tools/microsoft_teams/types'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { resolveMentionsForChannel, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -18,7 +16,7 @@ const TeamsWriteChannelSchema = z.object({
|
||||
teamId: z.string().min(1, 'Team ID is required'),
|
||||
channelId: z.string().min(1, 'Channel ID is required'),
|
||||
content: z.string().min(1, 'Message content is required'),
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
@@ -55,12 +53,93 @@ export async function POST(request: NextRequest) {
|
||||
fileCount: validatedData.files?.length || 0,
|
||||
})
|
||||
|
||||
const { attachments, filesOutput } = await uploadFilesForTeamsMessage({
|
||||
rawFiles: validatedData.files || [],
|
||||
accessToken: validatedData.accessToken,
|
||||
requestId,
|
||||
logger,
|
||||
})
|
||||
const attachments: any[] = []
|
||||
if (validatedData.files && validatedData.files.length > 0) {
|
||||
const rawFiles = validatedData.files
|
||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to OneDrive`)
|
||||
|
||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
||||
|
||||
for (const file of userFiles) {
|
||||
try {
|
||||
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
|
||||
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
|
||||
const uploadUrl =
|
||||
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
|
||||
encodeURIComponent(file.name) +
|
||||
':/content'
|
||||
|
||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
||||
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': file.type || 'application/octet-stream',
|
||||
},
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadedFile = await uploadResponse.json()
|
||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
||||
id: uploadedFile.id,
|
||||
webUrl: uploadedFile.webUrl,
|
||||
})
|
||||
|
||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
||||
|
||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!fileDetailsResponse.ok) {
|
||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const fileDetails = await fileDetailsResponse.json()
|
||||
logger.info(`[${requestId}] Got file details`, {
|
||||
webDavUrl: fileDetails.webDavUrl,
|
||||
eTag: fileDetails.eTag,
|
||||
})
|
||||
|
||||
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
|
||||
|
||||
attachments.push({
|
||||
id: attachmentId,
|
||||
contentType: 'reference',
|
||||
contentUrl: fileDetails.webDavUrl,
|
||||
name: file.name,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
|
||||
)
|
||||
}
|
||||
|
||||
let messageContent = validatedData.content
|
||||
let contentType: 'text' | 'html' = 'text'
|
||||
@@ -118,21 +197,17 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages`
|
||||
|
||||
const teamsResponse = await secureFetchWithValidation(
|
||||
teamsUrl,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(messageBody),
|
||||
const teamsResponse = await fetch(teamsUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
'teamsUrl'
|
||||
)
|
||||
body: JSON.stringify(messageBody),
|
||||
})
|
||||
|
||||
if (!teamsResponse.ok) {
|
||||
const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
|
||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
@@ -143,7 +218,7 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const responseData = (await teamsResponse.json()) as GraphChatMessage
|
||||
const responseData = await teamsResponse.json()
|
||||
logger.info(`[${requestId}] Teams channel message sent successfully`, {
|
||||
messageId: responseData.id,
|
||||
attachmentCount: attachments.length,
|
||||
@@ -162,7 +237,6 @@ export async function POST(request: NextRequest) {
|
||||
url: responseData.webUrl || '',
|
||||
attachmentCount: attachments.length,
|
||||
},
|
||||
files: filesOutput,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
|
||||
@@ -2,11 +2,9 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { uploadFilesForTeamsMessage } from '@/tools/microsoft_teams/server-utils'
|
||||
import type { GraphApiErrorResponse, GraphChatMessage } from '@/tools/microsoft_teams/types'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { resolveMentionsForChat, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -17,7 +15,7 @@ const TeamsWriteChatSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
chatId: z.string().min(1, 'Chat ID is required'),
|
||||
content: z.string().min(1, 'Message content is required'),
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
@@ -53,12 +51,93 @@ export async function POST(request: NextRequest) {
|
||||
fileCount: validatedData.files?.length || 0,
|
||||
})
|
||||
|
||||
const { attachments, filesOutput } = await uploadFilesForTeamsMessage({
|
||||
rawFiles: validatedData.files || [],
|
||||
accessToken: validatedData.accessToken,
|
||||
requestId,
|
||||
logger,
|
||||
})
|
||||
const attachments: any[] = []
|
||||
if (validatedData.files && validatedData.files.length > 0) {
|
||||
const rawFiles = validatedData.files
|
||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to Teams`)
|
||||
|
||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
||||
|
||||
for (const file of userFiles) {
|
||||
try {
|
||||
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
|
||||
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
|
||||
const uploadUrl =
|
||||
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
|
||||
encodeURIComponent(file.name) +
|
||||
':/content'
|
||||
|
||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
||||
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': file.type || 'application/octet-stream',
|
||||
},
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const uploadedFile = await uploadResponse.json()
|
||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
||||
id: uploadedFile.id,
|
||||
webUrl: uploadedFile.webUrl,
|
||||
})
|
||||
|
||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
||||
|
||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!fileDetailsResponse.ok) {
|
||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
||||
throw new Error(
|
||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
|
||||
const fileDetails = await fileDetailsResponse.json()
|
||||
logger.info(`[${requestId}] Got file details`, {
|
||||
webDavUrl: fileDetails.webDavUrl,
|
||||
eTag: fileDetails.eTag,
|
||||
})
|
||||
|
||||
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
|
||||
|
||||
attachments.push({
|
||||
id: attachmentId,
|
||||
contentType: 'reference',
|
||||
contentUrl: fileDetails.webDavUrl,
|
||||
name: file.name,
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
|
||||
)
|
||||
}
|
||||
|
||||
let messageContent = validatedData.content
|
||||
let contentType: 'text' | 'html' = 'text'
|
||||
@@ -115,21 +194,17 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages`
|
||||
|
||||
const teamsResponse = await secureFetchWithValidation(
|
||||
teamsUrl,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify(messageBody),
|
||||
const teamsResponse = await fetch(teamsUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
'teamsUrl'
|
||||
)
|
||||
body: JSON.stringify(messageBody),
|
||||
})
|
||||
|
||||
if (!teamsResponse.ok) {
|
||||
const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
|
||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
@@ -140,7 +215,7 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const responseData = (await teamsResponse.json()) as GraphChatMessage
|
||||
const responseData = await teamsResponse.json()
|
||||
logger.info(`[${requestId}] Teams message sent successfully`, {
|
||||
messageId: responseData.id,
|
||||
attachmentCount: attachments.length,
|
||||
@@ -158,7 +233,6 @@ export async function POST(request: NextRequest) {
|
||||
url: responseData.webUrl || '',
|
||||
attachmentCount: attachments.length,
|
||||
},
|
||||
files: filesOutput,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
|
||||
@@ -2,17 +2,15 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import {
|
||||
downloadFileFromStorage,
|
||||
resolveInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils.server'
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -20,9 +18,7 @@ const logger = createLogger('MistralParseAPI')
|
||||
|
||||
const MistralParseSchema = z.object({
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
filePath: z.string().min(1, 'File path is required').optional(),
|
||||
fileData: FileInputSchema.optional(),
|
||||
file: FileInputSchema.optional(),
|
||||
filePath: z.string().min(1, 'File path is required'),
|
||||
resultType: z.string().optional(),
|
||||
pages: z.array(z.number()).optional(),
|
||||
includeImageBase64: z.boolean().optional(),
|
||||
@@ -53,140 +49,66 @@ export async function POST(request: NextRequest) {
|
||||
const body = await request.json()
|
||||
const validatedData = MistralParseSchema.parse(body)
|
||||
|
||||
const fileData = validatedData.file || validatedData.fileData
|
||||
const filePath = typeof fileData === 'string' ? fileData : validatedData.filePath
|
||||
|
||||
if (!fileData && (!filePath || filePath.trim() === '')) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File input is required',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Mistral parse request`, {
|
||||
hasFileData: Boolean(fileData),
|
||||
filePath,
|
||||
isWorkspaceFile: filePath ? isInternalFileUrl(filePath) : false,
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
const mistralBody: any = {
|
||||
model: 'mistral-ocr-latest',
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const hasAccess = await verifyFileAccess(
|
||||
storageKey,
|
||||
userId,
|
||||
undefined, // customConfig
|
||||
context, // context
|
||||
false // isLocal
|
||||
)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File not found',
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
||||
}
|
||||
|
||||
if (fileData && typeof fileData === 'object') {
|
||||
const rawFile = fileData
|
||||
let userFile
|
||||
try {
|
||||
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let mimeType = userFile.type
|
||||
if (!mimeType || mimeType === 'application/octet-stream') {
|
||||
const filename = userFile.name?.toLowerCase() || ''
|
||||
if (filename.endsWith('.pdf')) {
|
||||
mimeType = 'application/pdf'
|
||||
} else if (filename.endsWith('.png')) {
|
||||
mimeType = 'image/png'
|
||||
} else if (filename.endsWith('.jpg') || filename.endsWith('.jpeg')) {
|
||||
mimeType = 'image/jpeg'
|
||||
} else if (filename.endsWith('.gif')) {
|
||||
mimeType = 'image/gif'
|
||||
} else if (filename.endsWith('.webp')) {
|
||||
mimeType = 'image/webp'
|
||||
} else {
|
||||
mimeType = 'application/pdf'
|
||||
}
|
||||
}
|
||||
let base64 = userFile.base64
|
||||
if (!base64) {
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
base64 = buffer.toString('base64')
|
||||
}
|
||||
const base64Payload = base64.startsWith('data:')
|
||||
? base64
|
||||
: `data:${mimeType};base64,${base64}`
|
||||
|
||||
// Mistral API uses different document types for images vs documents
|
||||
const isImage = mimeType.startsWith('image/')
|
||||
if (isImage) {
|
||||
mistralBody.document = {
|
||||
type: 'image_url',
|
||||
image_url: base64Payload,
|
||||
}
|
||||
} else {
|
||||
mistralBody.document = {
|
||||
type: 'document_url',
|
||||
document_url: base64Payload,
|
||||
}
|
||||
}
|
||||
} else if (filePath) {
|
||||
let fileUrl = filePath
|
||||
|
||||
const isInternalFilePath = isInternalFileUrl(filePath)
|
||||
if (isInternalFilePath) {
|
||||
const resolution = await resolveInternalFileUrl(filePath, userId, requestId, logger)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: resolution.error.message,
|
||||
},
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
fileUrl = resolution.fileUrl || fileUrl
|
||||
} else if (filePath.startsWith('/')) {
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: filePath.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = await validateUrlWithDNS(fileUrl, 'filePath')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const imageExtensions = ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.avif']
|
||||
const pathname = new URL(fileUrl).pathname.toLowerCase()
|
||||
const isImageUrl = imageExtensions.some((ext) => pathname.endsWith(ext))
|
||||
|
||||
if (isImageUrl) {
|
||||
mistralBody.document = {
|
||||
type: 'image_url',
|
||||
image_url: fileUrl,
|
||||
}
|
||||
} else {
|
||||
mistralBody.document = {
|
||||
type: 'document_url',
|
||||
document_url: fileUrl,
|
||||
}
|
||||
}
|
||||
const mistralBody: any = {
|
||||
model: 'mistral-ocr-latest',
|
||||
document: {
|
||||
type: 'document_url',
|
||||
document_url: fileUrl,
|
||||
},
|
||||
}
|
||||
|
||||
if (validatedData.pages) {
|
||||
@@ -202,34 +124,15 @@ export async function POST(request: NextRequest) {
|
||||
mistralBody.image_min_size = validatedData.imageMinSize
|
||||
}
|
||||
|
||||
const mistralEndpoint = 'https://api.mistral.ai/v1/ocr'
|
||||
const mistralValidation = await validateUrlWithDNS(mistralEndpoint, 'Mistral API URL')
|
||||
if (!mistralValidation.isValid) {
|
||||
logger.error(`[${requestId}] Mistral API URL validation failed`, {
|
||||
error: mistralValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to reach Mistral API',
|
||||
},
|
||||
{ status: 502 }
|
||||
)
|
||||
}
|
||||
|
||||
const mistralResponse = await secureFetchWithPinnedIP(
|
||||
mistralEndpoint,
|
||||
mistralValidation.resolvedIP!,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
},
|
||||
body: JSON.stringify(mistralBody),
|
||||
}
|
||||
)
|
||||
const mistralResponse = await fetch('https://api.mistral.ai/v1/ocr', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
},
|
||||
body: JSON.stringify(mistralBody),
|
||||
})
|
||||
|
||||
if (!mistralResponse.ok) {
|
||||
const errorText = await mistralResponse.text()
|
||||
|
||||
@@ -1,177 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/** Microsoft Graph API error response structure */
|
||||
interface GraphApiError {
|
||||
error?: {
|
||||
code?: string
|
||||
message?: string
|
||||
}
|
||||
}
|
||||
|
||||
/** Microsoft Graph API drive item metadata response */
|
||||
interface DriveItemMetadata {
|
||||
id?: string
|
||||
name?: string
|
||||
folder?: Record<string, unknown>
|
||||
file?: {
|
||||
mimeType?: string
|
||||
}
|
||||
}
|
||||
|
||||
const logger = createLogger('OneDriveDownloadAPI')
|
||||
|
||||
const OneDriveDownloadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileId: z.string().min(1, 'File ID is required'),
|
||||
fileName: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized OneDrive download attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = OneDriveDownloadSchema.parse(body)
|
||||
|
||||
const { accessToken, fileId, fileName } = validatedData
|
||||
const authHeader = `Bearer ${accessToken}`
|
||||
|
||||
logger.info(`[${requestId}] Getting file metadata from OneDrive`, { fileId })
|
||||
|
||||
const metadataUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}`
|
||||
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
|
||||
if (!metadataUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: metadataUrlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const metadataResponse = await secureFetchWithPinnedIP(
|
||||
metadataUrl,
|
||||
metadataUrlValidation.resolvedIP!,
|
||||
{
|
||||
headers: { Authorization: authHeader },
|
||||
}
|
||||
)
|
||||
|
||||
if (!metadataResponse.ok) {
|
||||
const errorDetails = (await metadataResponse.json().catch(() => ({}))) as GraphApiError
|
||||
logger.error(`[${requestId}] Failed to get file metadata`, {
|
||||
status: metadataResponse.status,
|
||||
error: errorDetails,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const metadata = (await metadataResponse.json()) as DriveItemMetadata
|
||||
|
||||
if (metadata.folder && !metadata.file) {
|
||||
logger.error(`[${requestId}] Attempted to download a folder`, {
|
||||
itemId: metadata.id,
|
||||
itemName: metadata.name,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Cannot download folder "${metadata.name}". Please select a file instead.`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const mimeType = metadata.file?.mimeType || 'application/octet-stream'
|
||||
|
||||
logger.info(`[${requestId}] Downloading file from OneDrive`, { fileId, mimeType })
|
||||
|
||||
const downloadUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}/content`
|
||||
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||
if (!downloadUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: downloadUrlValidation.error },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(
|
||||
downloadUrl,
|
||||
downloadUrlValidation.resolvedIP!,
|
||||
{
|
||||
headers: { Authorization: authHeader },
|
||||
}
|
||||
)
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
const downloadError = (await downloadResponse.json().catch(() => ({}))) as GraphApiError
|
||||
logger.error(`[${requestId}] Failed to download file`, {
|
||||
status: downloadResponse.status,
|
||||
error: downloadError,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const fileBuffer = Buffer.from(arrayBuffer)
|
||||
|
||||
const resolvedName = fileName || metadata.name || 'download'
|
||||
|
||||
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||
fileId,
|
||||
name: resolvedName,
|
||||
size: fileBuffer.length,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const base64Data = fileBuffer.toString('base64')
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedName,
|
||||
mimeType,
|
||||
data: base64Data,
|
||||
size: fileBuffer.length,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error downloading OneDrive file:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -4,9 +4,7 @@ import * as XLSX from 'xlsx'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import {
|
||||
getExtensionFromMimeType,
|
||||
processSingleFileToUserFile,
|
||||
@@ -31,33 +29,12 @@ const ExcelValuesSchema = z.union([
|
||||
const OneDriveUploadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileName: z.string().min(1, 'File name is required'),
|
||||
file: RawFileInputSchema.optional(),
|
||||
file: z.any().optional(),
|
||||
folderId: z.string().optional().nullable(),
|
||||
mimeType: z.string().nullish(),
|
||||
values: ExcelValuesSchema.optional().nullable(),
|
||||
conflictBehavior: z.enum(['fail', 'replace', 'rename']).optional().nullable(),
|
||||
})
|
||||
|
||||
/** Microsoft Graph DriveItem response */
|
||||
interface OneDriveFileData {
|
||||
id: string
|
||||
name: string
|
||||
size: number
|
||||
webUrl: string
|
||||
createdDateTime: string
|
||||
lastModifiedDateTime: string
|
||||
file?: { mimeType: string }
|
||||
parentReference?: { id: string; path: string }
|
||||
'@microsoft.graph.downloadUrl'?: string
|
||||
}
|
||||
|
||||
/** Microsoft Graph Excel range response */
|
||||
interface ExcelRangeData {
|
||||
address?: string
|
||||
addressLocal?: string
|
||||
values?: unknown[][]
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
@@ -111,9 +88,25 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
let fileToProcess
|
||||
if (Array.isArray(rawFile)) {
|
||||
if (rawFile.length === 0) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'No file provided',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
fileToProcess = rawFile[0]
|
||||
} else {
|
||||
fileToProcess = rawFile
|
||||
}
|
||||
|
||||
let userFile
|
||||
try {
|
||||
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||
userFile = processSingleFileToUserFile(fileToProcess, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
@@ -186,23 +179,14 @@ export async function POST(request: NextRequest) {
|
||||
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
||||
}
|
||||
|
||||
// Add conflict behavior if specified (defaults to replace by Microsoft Graph API)
|
||||
if (validatedData.conflictBehavior) {
|
||||
uploadUrl += `?@microsoft.graph.conflictBehavior=${validatedData.conflictBehavior}`
|
||||
}
|
||||
|
||||
const uploadResponse = await secureFetchWithValidation(
|
||||
uploadUrl,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': mimeType,
|
||||
},
|
||||
body: fileBuffer,
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': mimeType,
|
||||
},
|
||||
'uploadUrl'
|
||||
)
|
||||
body: new Uint8Array(fileBuffer),
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorText = await uploadResponse.text()
|
||||
@@ -216,7 +200,7 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const fileData = (await uploadResponse.json()) as OneDriveFileData
|
||||
const fileData = await uploadResponse.json()
|
||||
|
||||
let excelWriteResult: any | undefined
|
||||
const shouldWriteExcelContent =
|
||||
@@ -225,11 +209,8 @@ export async function POST(request: NextRequest) {
|
||||
if (shouldWriteExcelContent) {
|
||||
try {
|
||||
let workbookSessionId: string | undefined
|
||||
const sessionUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||
fileData.id
|
||||
)}/workbook/createSession`
|
||||
const sessionResp = await secureFetchWithValidation(
|
||||
sessionUrl,
|
||||
const sessionResp = await fetch(
|
||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/createSession`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
@@ -237,12 +218,11 @@ export async function POST(request: NextRequest) {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ persistChanges: true }),
|
||||
},
|
||||
'sessionUrl'
|
||||
}
|
||||
)
|
||||
|
||||
if (sessionResp.ok) {
|
||||
const sessionData = (await sessionResp.json()) as { id?: string }
|
||||
const sessionData = await sessionResp.json()
|
||||
workbookSessionId = sessionData?.id
|
||||
}
|
||||
|
||||
@@ -251,19 +231,14 @@ export async function POST(request: NextRequest) {
|
||||
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||
fileData.id
|
||||
)}/workbook/worksheets?$select=name&$orderby=position&$top=1`
|
||||
const listResp = await secureFetchWithValidation(
|
||||
listUrl,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||
},
|
||||
const listResp = await fetch(listUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||
},
|
||||
'listUrl'
|
||||
)
|
||||
})
|
||||
if (listResp.ok) {
|
||||
const listData = (await listResp.json()) as { value?: Array<{ name?: string }> }
|
||||
const listData = await listResp.json()
|
||||
const firstSheetName = listData?.value?.[0]?.name
|
||||
if (firstSheetName) {
|
||||
sheetName = firstSheetName
|
||||
@@ -322,19 +297,15 @@ export async function POST(request: NextRequest) {
|
||||
)}')/range(address='${encodeURIComponent(computedRangeAddress)}')`
|
||||
)
|
||||
|
||||
const excelWriteResponse = await secureFetchWithValidation(
|
||||
url.toString(),
|
||||
{
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||
},
|
||||
body: JSON.stringify({ values: processedValues }),
|
||||
const excelWriteResponse = await fetch(url.toString(), {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||
},
|
||||
'excelWriteUrl'
|
||||
)
|
||||
body: JSON.stringify({ values: processedValues }),
|
||||
})
|
||||
|
||||
if (!excelWriteResponse || !excelWriteResponse.ok) {
|
||||
const errorText = excelWriteResponse ? await excelWriteResponse.text() : 'no response'
|
||||
@@ -349,7 +320,7 @@ export async function POST(request: NextRequest) {
|
||||
details: errorText,
|
||||
}
|
||||
} else {
|
||||
const writeData = (await excelWriteResponse.json()) as ExcelRangeData
|
||||
const writeData = await excelWriteResponse.json()
|
||||
const addr = writeData.address || writeData.addressLocal
|
||||
const v = writeData.values || []
|
||||
excelWriteResult = {
|
||||
@@ -357,25 +328,21 @@ export async function POST(request: NextRequest) {
|
||||
updatedRange: addr,
|
||||
updatedRows: Array.isArray(v) ? v.length : undefined,
|
||||
updatedColumns: Array.isArray(v) && v[0] ? v[0].length : undefined,
|
||||
updatedCells: Array.isArray(v) && v[0] ? v.length * v[0].length : undefined,
|
||||
updatedCells: Array.isArray(v) && v[0] ? v.length * (v[0] as any[]).length : undefined,
|
||||
}
|
||||
}
|
||||
|
||||
if (workbookSessionId) {
|
||||
try {
|
||||
const closeUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||
fileData.id
|
||||
)}/workbook/closeSession`
|
||||
const closeResp = await secureFetchWithValidation(
|
||||
closeUrl,
|
||||
const closeResp = await fetch(
|
||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/closeSession`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'workbook-session-id': workbookSessionId,
|
||||
},
|
||||
},
|
||||
'closeSessionUrl'
|
||||
}
|
||||
)
|
||||
if (!closeResp.ok) {
|
||||
const closeText = await closeResp.text()
|
||||
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
@@ -19,7 +18,7 @@ const OutlookDraftSchema = z.object({
|
||||
contentType: z.enum(['text', 'html']).optional().nullable(),
|
||||
cc: z.string().optional().nullable(),
|
||||
bcc: z.string().optional().nullable(),
|
||||
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||
attachments: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
@@ -21,7 +20,7 @@ const OutlookSendSchema = z.object({
|
||||
bcc: z.string().optional().nullable(),
|
||||
replyToMessageId: z.string().optional().nullable(),
|
||||
conversationId: z.string().optional().nullable(),
|
||||
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||
attachments: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
@@ -96,14 +95,14 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
if (attachments.length > 0) {
|
||||
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
||||
const maxSize = 3 * 1024 * 1024 // 3MB - Microsoft Graph API limit for inline attachments
|
||||
const maxSize = 4 * 1024 * 1024 // 4MB
|
||||
|
||||
if (totalSize > maxSize) {
|
||||
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Total attachment size (${sizeMB}MB) exceeds Microsoft Graph API limit of 3MB per request`,
|
||||
error: `Total attachment size (${sizeMB}MB) exceeds Outlook's limit of 4MB per request`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
|
||||
@@ -1,165 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('PipedriveGetFilesAPI')
|
||||
|
||||
interface PipedriveFile {
|
||||
id?: number
|
||||
name?: string
|
||||
url?: string
|
||||
}
|
||||
|
||||
interface PipedriveApiResponse {
|
||||
success: boolean
|
||||
data?: PipedriveFile[]
|
||||
error?: string
|
||||
}
|
||||
|
||||
const PipedriveGetFilesSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
deal_id: z.string().optional().nullable(),
|
||||
person_id: z.string().optional().nullable(),
|
||||
org_id: z.string().optional().nullable(),
|
||||
limit: z.string().optional().nullable(),
|
||||
downloadFiles: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Pipedrive get files attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = PipedriveGetFilesSchema.parse(body)
|
||||
|
||||
const { accessToken, deal_id, person_id, org_id, limit, downloadFiles } = validatedData
|
||||
|
||||
const baseUrl = 'https://api.pipedrive.com/v1/files'
|
||||
const queryParams = new URLSearchParams()
|
||||
|
||||
if (deal_id) queryParams.append('deal_id', deal_id)
|
||||
if (person_id) queryParams.append('person_id', person_id)
|
||||
if (org_id) queryParams.append('org_id', org_id)
|
||||
if (limit) queryParams.append('limit', limit)
|
||||
|
||||
const queryString = queryParams.toString()
|
||||
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||
|
||||
logger.info(`[${requestId}] Fetching files from Pipedrive`, { deal_id, person_id, org_id })
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
const data = (await response.json()) as PipedriveApiResponse
|
||||
|
||||
if (!data.success) {
|
||||
logger.error(`[${requestId}] Pipedrive API request failed`, { data })
|
||||
return NextResponse.json(
|
||||
{ success: false, error: data.error || 'Failed to fetch files from Pipedrive' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const files = data.data || []
|
||||
const downloadedFiles: Array<{
|
||||
name: string
|
||||
mimeType: string
|
||||
data: string
|
||||
size: number
|
||||
}> = []
|
||||
|
||||
if (downloadFiles) {
|
||||
for (const file of files) {
|
||||
if (!file?.url) continue
|
||||
|
||||
try {
|
||||
const fileUrlValidation = await validateUrlWithDNS(file.url, 'fileUrl')
|
||||
if (!fileUrlValidation.isValid) continue
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(
|
||||
file.url,
|
||||
fileUrlValidation.resolvedIP!,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
}
|
||||
)
|
||||
|
||||
if (!downloadResponse.ok) continue
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
const extension = getFileExtension(file.name || '')
|
||||
const mimeType =
|
||||
downloadResponse.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||
const fileName = file.name || `pipedrive-file-${file.id || Date.now()}`
|
||||
|
||||
downloadedFiles.push({
|
||||
name: fileName,
|
||||
mimeType,
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to download file ${file.id}:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Pipedrive files fetched successfully`, {
|
||||
fileCount: files.length,
|
||||
downloadedCount: downloadedFiles.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
files,
|
||||
downloadedFiles: downloadedFiles.length > 0 ? downloadedFiles : undefined,
|
||||
total_items: files.length,
|
||||
success: true,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching Pipedrive files:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -2,14 +2,15 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||
import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -17,8 +18,7 @@ const logger = createLogger('PulseParseAPI')
|
||||
|
||||
const PulseParseSchema = z.object({
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
filePath: z.string().optional(),
|
||||
file: RawFileInputSchema.optional(),
|
||||
filePath: z.string().min(1, 'File path is required'),
|
||||
pages: z.string().optional(),
|
||||
extractFigure: z.boolean().optional(),
|
||||
figureDescription: z.boolean().optional(),
|
||||
@@ -51,30 +51,50 @@ export async function POST(request: NextRequest) {
|
||||
const validatedData = PulseParseSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Pulse parse request`, {
|
||||
fileName: validatedData.file?.name,
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false,
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
const resolution = await resolveFileInputToUrl({
|
||||
file: validatedData.file,
|
||||
filePath: validatedData.filePath,
|
||||
userId,
|
||||
requestId,
|
||||
logger,
|
||||
})
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: resolution.error.message },
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const fileUrl = resolution.fileUrl
|
||||
if (!fileUrl) {
|
||||
return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 })
|
||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File not found',
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
||||
}
|
||||
|
||||
const formData = new FormData()
|
||||
@@ -99,36 +119,13 @@ export async function POST(request: NextRequest) {
|
||||
formData.append('chunk_size', String(validatedData.chunkSize))
|
||||
}
|
||||
|
||||
const pulseEndpoint = 'https://api.runpulse.com/extract'
|
||||
const pulseValidation = await validateUrlWithDNS(pulseEndpoint, 'Pulse API URL')
|
||||
if (!pulseValidation.isValid) {
|
||||
logger.error(`[${requestId}] Pulse API URL validation failed`, {
|
||||
error: pulseValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to reach Pulse API',
|
||||
},
|
||||
{ status: 502 }
|
||||
)
|
||||
}
|
||||
|
||||
const pulsePayload = new Response(formData)
|
||||
const contentType = pulsePayload.headers.get('content-type') || 'multipart/form-data'
|
||||
const bodyBuffer = Buffer.from(await pulsePayload.arrayBuffer())
|
||||
const pulseResponse = await secureFetchWithPinnedIP(
|
||||
pulseEndpoint,
|
||||
pulseValidation.resolvedIP!,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'x-api-key': validatedData.apiKey,
|
||||
'Content-Type': contentType,
|
||||
},
|
||||
body: bodyBuffer,
|
||||
}
|
||||
)
|
||||
const pulseResponse = await fetch('https://api.runpulse.com/extract', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'x-api-key': validatedData.apiKey,
|
||||
},
|
||||
body: formData,
|
||||
})
|
||||
|
||||
if (!pulseResponse.ok) {
|
||||
const errorText = await pulseResponse.text()
|
||||
|
||||
@@ -2,14 +2,15 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||
import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import {
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -17,8 +18,7 @@ const logger = createLogger('ReductoParseAPI')
|
||||
|
||||
const ReductoParseSchema = z.object({
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
filePath: z.string().optional(),
|
||||
file: RawFileInputSchema.optional(),
|
||||
filePath: z.string().min(1, 'File path is required'),
|
||||
pages: z.array(z.number()).optional(),
|
||||
tableOutputFormat: z.enum(['html', 'md']).optional(),
|
||||
})
|
||||
@@ -47,30 +47,56 @@ export async function POST(request: NextRequest) {
|
||||
const validatedData = ReductoParseSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Reducto parse request`, {
|
||||
fileName: validatedData.file?.name,
|
||||
filePath: validatedData.filePath,
|
||||
isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false,
|
||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
||||
userId,
|
||||
})
|
||||
|
||||
const resolution = await resolveFileInputToUrl({
|
||||
file: validatedData.file,
|
||||
filePath: validatedData.filePath,
|
||||
userId,
|
||||
requestId,
|
||||
logger,
|
||||
})
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: resolution.error.message },
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
if (isInternalFileUrl(validatedData.filePath)) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const fileUrl = resolution.fileUrl
|
||||
if (!fileUrl) {
|
||||
return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 })
|
||||
const hasAccess = await verifyFileAccess(
|
||||
storageKey,
|
||||
userId,
|
||||
undefined, // customConfig
|
||||
context, // context
|
||||
false // isLocal
|
||||
)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File not found',
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
const baseUrl = getBaseUrl()
|
||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
||||
}
|
||||
|
||||
const reductoBody: Record<string, unknown> = {
|
||||
@@ -78,13 +104,8 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
if (validatedData.pages && validatedData.pages.length > 0) {
|
||||
// Reducto API expects page_range as an object with start/end, not an array
|
||||
const pages = validatedData.pages
|
||||
reductoBody.settings = {
|
||||
page_range: {
|
||||
start: Math.min(...pages),
|
||||
end: Math.max(...pages),
|
||||
},
|
||||
page_range: validatedData.pages,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,34 +115,15 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
const reductoEndpoint = 'https://platform.reducto.ai/parse'
|
||||
const reductoValidation = await validateUrlWithDNS(reductoEndpoint, 'Reducto API URL')
|
||||
if (!reductoValidation.isValid) {
|
||||
logger.error(`[${requestId}] Reducto API URL validation failed`, {
|
||||
error: reductoValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to reach Reducto API',
|
||||
},
|
||||
{ status: 502 }
|
||||
)
|
||||
}
|
||||
|
||||
const reductoResponse = await secureFetchWithPinnedIP(
|
||||
reductoEndpoint,
|
||||
reductoValidation.resolvedIP!,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
},
|
||||
body: JSON.stringify(reductoBody),
|
||||
}
|
||||
)
|
||||
const reductoResponse = await fetch('https://platform.reducto.ai/parse', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Accept: 'application/json',
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
},
|
||||
body: JSON.stringify(reductoBody),
|
||||
})
|
||||
|
||||
if (!reductoResponse.ok) {
|
||||
const errorText = await reductoResponse.text()
|
||||
|
||||
@@ -4,7 +4,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
@@ -18,7 +17,7 @@ const S3PutObjectSchema = z.object({
|
||||
region: z.string().min(1, 'Region is required'),
|
||||
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||
objectKey: z.string().min(1, 'Object key is required'),
|
||||
file: RawFileInputSchema.optional().nullable(),
|
||||
file: z.any().optional().nullable(),
|
||||
content: z.string().optional().nullable(),
|
||||
contentType: z.string().optional().nullable(),
|
||||
acl: z.string().optional().nullable(),
|
||||
|
||||
@@ -1,188 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SendGridSendMailAPI')
|
||||
|
||||
const SendGridSendMailSchema = z.object({
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
from: z.string().min(1, 'From email is required'),
|
||||
fromName: z.string().optional().nullable(),
|
||||
to: z.string().min(1, 'To email is required'),
|
||||
toName: z.string().optional().nullable(),
|
||||
subject: z.string().optional().nullable(),
|
||||
content: z.string().optional().nullable(),
|
||||
contentType: z.string().optional().nullable(),
|
||||
cc: z.string().optional().nullable(),
|
||||
bcc: z.string().optional().nullable(),
|
||||
replyTo: z.string().optional().nullable(),
|
||||
replyToName: z.string().optional().nullable(),
|
||||
templateId: z.string().optional().nullable(),
|
||||
dynamicTemplateData: z.any().optional().nullable(),
|
||||
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized SendGrid send attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: authResult.error || 'Authentication required' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated SendGrid send request via ${authResult.authType}`)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = SendGridSendMailSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending SendGrid email`, {
|
||||
to: validatedData.to,
|
||||
subject: validatedData.subject || '(template)',
|
||||
hasAttachments: !!(validatedData.attachments && validatedData.attachments.length > 0),
|
||||
attachmentCount: validatedData.attachments?.length || 0,
|
||||
})
|
||||
|
||||
// Build personalizations
|
||||
const personalizations: Record<string, unknown> = {
|
||||
to: [
|
||||
{ email: validatedData.to, ...(validatedData.toName && { name: validatedData.toName }) },
|
||||
],
|
||||
}
|
||||
|
||||
if (validatedData.cc) {
|
||||
personalizations.cc = [{ email: validatedData.cc }]
|
||||
}
|
||||
|
||||
if (validatedData.bcc) {
|
||||
personalizations.bcc = [{ email: validatedData.bcc }]
|
||||
}
|
||||
|
||||
if (validatedData.templateId && validatedData.dynamicTemplateData) {
|
||||
personalizations.dynamic_template_data =
|
||||
typeof validatedData.dynamicTemplateData === 'string'
|
||||
? JSON.parse(validatedData.dynamicTemplateData)
|
||||
: validatedData.dynamicTemplateData
|
||||
}
|
||||
|
||||
// Build mail body
|
||||
const mailBody: Record<string, unknown> = {
|
||||
personalizations: [personalizations],
|
||||
from: {
|
||||
email: validatedData.from,
|
||||
...(validatedData.fromName && { name: validatedData.fromName }),
|
||||
},
|
||||
subject: validatedData.subject,
|
||||
}
|
||||
|
||||
if (validatedData.templateId) {
|
||||
mailBody.template_id = validatedData.templateId
|
||||
} else {
|
||||
mailBody.content = [
|
||||
{
|
||||
type: validatedData.contentType || 'text/plain',
|
||||
value: validatedData.content,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
if (validatedData.replyTo) {
|
||||
mailBody.reply_to = {
|
||||
email: validatedData.replyTo,
|
||||
...(validatedData.replyToName && { name: validatedData.replyToName }),
|
||||
}
|
||||
}
|
||||
|
||||
// Process attachments from UserFile objects
|
||||
if (validatedData.attachments && validatedData.attachments.length > 0) {
|
||||
const rawAttachments = validatedData.attachments
|
||||
logger.info(`[${requestId}] Processing ${rawAttachments.length} attachment(s)`)
|
||||
|
||||
const userFiles = processFilesToUserFiles(rawAttachments, requestId, logger)
|
||||
|
||||
if (userFiles.length > 0) {
|
||||
const sendGridAttachments = await Promise.all(
|
||||
userFiles.map(async (file) => {
|
||||
try {
|
||||
logger.info(
|
||||
`[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)`
|
||||
)
|
||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
|
||||
return {
|
||||
content: buffer.toString('base64'),
|
||||
filename: file.name,
|
||||
type: file.type || 'application/octet-stream',
|
||||
disposition: 'attachment',
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to download attachment ${file.name}:`, error)
|
||||
throw new Error(
|
||||
`Failed to download attachment "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||
)
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
mailBody.attachments = sendGridAttachments
|
||||
}
|
||||
}
|
||||
|
||||
// Send to SendGrid
|
||||
const response = await fetch('https://api.sendgrid.com/v3/mail/send', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(mailBody),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json().catch(() => ({}))
|
||||
const errorMessage =
|
||||
errorData.errors?.[0]?.message || errorData.message || 'Failed to send email'
|
||||
logger.error(`[${requestId}] SendGrid API error:`, { status: response.status, errorData })
|
||||
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status })
|
||||
}
|
||||
|
||||
const messageId = response.headers.get('X-Message-Id')
|
||||
logger.info(`[${requestId}] Email sent successfully`, { messageId })
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
messageId: messageId || undefined,
|
||||
to: validatedData.to,
|
||||
subject: validatedData.subject || '',
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.warn(`[${requestId}] Validation error:`, error.errors)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: error.errors[0]?.message || 'Validation failed' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${requestId}] Unexpected error:`, error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||
import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -112,8 +111,6 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
const buffer = Buffer.concat(chunks)
|
||||
const fileName = path.basename(remotePath)
|
||||
const extension = getFileExtension(fileName)
|
||||
const mimeType = getMimeTypeFromExtension(extension)
|
||||
|
||||
let content: string
|
||||
if (params.encoding === 'base64') {
|
||||
@@ -127,12 +124,6 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
fileName,
|
||||
file: {
|
||||
name: fileName,
|
||||
mimeType,
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
},
|
||||
content,
|
||||
size: buffer.length,
|
||||
encoding: params.encoding,
|
||||
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import {
|
||||
@@ -27,7 +26,14 @@ const UploadSchema = z.object({
|
||||
privateKey: z.string().nullish(),
|
||||
passphrase: z.string().nullish(),
|
||||
remotePath: z.string().min(1, 'Remote path is required'),
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
files: z
|
||||
.union([z.array(z.any()), z.string(), z.number(), z.null(), z.undefined()])
|
||||
.transform((val) => {
|
||||
if (Array.isArray(val)) return val
|
||||
if (val === null || val === undefined || val === '') return undefined
|
||||
return undefined
|
||||
})
|
||||
.nullish(),
|
||||
fileContent: z.string().nullish(),
|
||||
fileName: z.string().nullish(),
|
||||
overwrite: z.boolean().default(true),
|
||||
|
||||
@@ -2,12 +2,9 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import type { MicrosoftGraphDriveItem } from '@/tools/onedrive/types'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -19,7 +16,7 @@ const SharepointUploadSchema = z.object({
|
||||
driveId: z.string().optional().nullable(),
|
||||
folderPath: z.string().optional().nullable(),
|
||||
fileName: z.string().optional().nullable(),
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
@@ -82,23 +79,18 @@ export async function POST(request: NextRequest) {
|
||||
let effectiveDriveId = validatedData.driveId
|
||||
if (!effectiveDriveId) {
|
||||
logger.info(`[${requestId}] No driveId provided, fetching default drive for site`)
|
||||
const driveUrl = `https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`
|
||||
const driveResponse = await secureFetchWithValidation(
|
||||
driveUrl,
|
||||
const driveResponse = await fetch(
|
||||
`https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
Accept: 'application/json',
|
||||
},
|
||||
},
|
||||
'driveUrl'
|
||||
}
|
||||
)
|
||||
|
||||
if (!driveResponse.ok) {
|
||||
const errorData = (await driveResponse.json().catch(() => ({}))) as {
|
||||
error?: { message?: string }
|
||||
}
|
||||
const errorData = await driveResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to get default drive:`, errorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
@@ -109,7 +101,7 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
const driveData = (await driveResponse.json()) as { id: string }
|
||||
const driveData = await driveResponse.json()
|
||||
effectiveDriveId = driveData.id
|
||||
logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`)
|
||||
}
|
||||
@@ -153,87 +145,34 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Uploading to: ${uploadUrl}`)
|
||||
|
||||
const uploadResponse = await secureFetchWithValidation(
|
||||
uploadUrl,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': userFile.type || 'application/octet-stream',
|
||||
},
|
||||
body: buffer,
|
||||
const uploadResponse = await fetch(uploadUrl, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': userFile.type || 'application/octet-stream',
|
||||
},
|
||||
'uploadUrl'
|
||||
)
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData)
|
||||
|
||||
if (uploadResponse.status === 409) {
|
||||
// File exists - retry with conflict behavior set to replace
|
||||
logger.warn(`[${requestId}] File ${fileName} already exists, retrying with replace`)
|
||||
const replaceUrl = `${uploadUrl}?@microsoft.graph.conflictBehavior=replace`
|
||||
const replaceResponse = await secureFetchWithValidation(
|
||||
replaceUrl,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
'Content-Type': userFile.type || 'application/octet-stream',
|
||||
},
|
||||
body: buffer,
|
||||
},
|
||||
'replaceUrl'
|
||||
)
|
||||
|
||||
if (!replaceResponse.ok) {
|
||||
const replaceErrorData = (await replaceResponse.json().catch(() => ({}))) as {
|
||||
error?: { message?: string }
|
||||
}
|
||||
logger.error(`[${requestId}] Failed to replace file ${fileName}:`, replaceErrorData)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: replaceErrorData.error?.message || `Failed to replace file: ${fileName}`,
|
||||
},
|
||||
{ status: replaceResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const replaceData = (await replaceResponse.json()) as {
|
||||
id: string
|
||||
name: string
|
||||
webUrl: string
|
||||
size: number
|
||||
createdDateTime: string
|
||||
lastModifiedDateTime: string
|
||||
}
|
||||
logger.info(`[${requestId}] File replaced successfully: ${fileName}`)
|
||||
|
||||
uploadedFiles.push({
|
||||
id: replaceData.id,
|
||||
name: replaceData.name,
|
||||
webUrl: replaceData.webUrl,
|
||||
size: replaceData.size,
|
||||
createdDateTime: replaceData.createdDateTime,
|
||||
lastModifiedDateTime: replaceData.lastModifiedDateTime,
|
||||
})
|
||||
logger.warn(`[${requestId}] File ${fileName} already exists, attempting to replace`)
|
||||
continue
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error:
|
||||
(errorData as { error?: { message?: string } }).error?.message ||
|
||||
`Failed to upload file: ${fileName}`,
|
||||
error: errorData.error?.message || `Failed to upload file: ${fileName}`,
|
||||
},
|
||||
{ status: uploadResponse.status }
|
||||
)
|
||||
}
|
||||
|
||||
const uploadData = (await uploadResponse.json()) as MicrosoftGraphDriveItem
|
||||
const uploadData = await uploadResponse.json()
|
||||
logger.info(`[${requestId}] File uploaded successfully: ${fileName}`)
|
||||
|
||||
uploadedFiles.push({
|
||||
|
||||
@@ -1,170 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SlackDownloadAPI')
|
||||
|
||||
const SlackDownloadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
fileId: z.string().min(1, 'File ID is required'),
|
||||
fileName: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Slack download attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Authenticated Slack download request via ${authResult.authType}`, {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = SlackDownloadSchema.parse(body)
|
||||
|
||||
const { accessToken, fileId, fileName } = validatedData
|
||||
|
||||
logger.info(`[${requestId}] Getting file info from Slack`, { fileId })
|
||||
|
||||
const infoResponse = await fetch(`https://slack.com/api/files.info?file=${fileId}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!infoResponse.ok) {
|
||||
const errorDetails = await infoResponse.json().catch(() => ({}))
|
||||
logger.error(`[${requestId}] Failed to get file info from Slack`, {
|
||||
status: infoResponse.status,
|
||||
statusText: infoResponse.statusText,
|
||||
error: errorDetails,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: errorDetails.error || 'Failed to get file info',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const data = await infoResponse.json()
|
||||
|
||||
if (!data.ok) {
|
||||
logger.error(`[${requestId}] Slack API returned error`, { error: data.error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: data.error || 'Slack API error',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const file = data.file
|
||||
const resolvedFileName = fileName || file.name || 'download'
|
||||
const mimeType = file.mimetype || 'application/octet-stream'
|
||||
const urlPrivate = file.url_private
|
||||
|
||||
if (!urlPrivate) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File does not have a download URL',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(urlPrivate, 'urlPrivate')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Downloading file from Slack`, {
|
||||
fileId,
|
||||
fileName: resolvedFileName,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(urlPrivate, urlValidation.resolvedIP!, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
logger.error(`[${requestId}] Failed to download file content`, {
|
||||
status: downloadResponse.status,
|
||||
statusText: downloadResponse.statusText,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to download file content',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const fileBuffer = Buffer.from(arrayBuffer)
|
||||
|
||||
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||
fileId,
|
||||
name: resolvedFileName,
|
||||
size: fileBuffer.length,
|
||||
mimeType,
|
||||
})
|
||||
|
||||
const base64Data = fileBuffer.toString('base64')
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
file: {
|
||||
name: resolvedFileName,
|
||||
mimeType,
|
||||
data: base64Data,
|
||||
size: fileBuffer.length,
|
||||
},
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error downloading Slack file:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { sendSlackMessage } from '../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -17,7 +16,7 @@ const SlackSendMessageSchema = z
|
||||
userId: z.string().optional().nullable(),
|
||||
text: z.string().min(1, 'Message text is required'),
|
||||
thread_ts: z.string().optional().nullable(),
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
.refine((data) => data.channel || data.userId, {
|
||||
message: 'Either channel or userId is required',
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import type { Logger } from '@sim/logger'
|
||||
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import type { ToolFileData } from '@/tools/types'
|
||||
|
||||
/**
|
||||
* Sends a message to a Slack channel using chat.postMessage
|
||||
@@ -72,10 +70,9 @@ export async function uploadFilesToSlack(
|
||||
accessToken: string,
|
||||
requestId: string,
|
||||
logger: Logger
|
||||
): Promise<{ fileIds: string[]; files: ToolFileData[] }> {
|
||||
): Promise<string[]> {
|
||||
const userFiles = processFilesToUserFiles(files, requestId, logger)
|
||||
const uploadedFileIds: string[] = []
|
||||
const uploadedFiles: ToolFileData[] = []
|
||||
|
||||
for (const userFile of userFiles) {
|
||||
logger.info(`[${requestId}] Uploading file: ${userFile.name}`)
|
||||
@@ -103,14 +100,10 @@ export async function uploadFilesToSlack(
|
||||
|
||||
logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`)
|
||||
|
||||
const uploadResponse = await secureFetchWithValidation(
|
||||
urlData.upload_url,
|
||||
{
|
||||
method: 'POST',
|
||||
body: buffer,
|
||||
},
|
||||
'uploadUrl'
|
||||
)
|
||||
const uploadResponse = await fetch(urlData.upload_url, {
|
||||
method: 'POST',
|
||||
body: new Uint8Array(buffer),
|
||||
})
|
||||
|
||||
if (!uploadResponse.ok) {
|
||||
logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`)
|
||||
@@ -119,16 +112,9 @@ export async function uploadFilesToSlack(
|
||||
|
||||
logger.info(`[${requestId}] File data uploaded successfully`)
|
||||
uploadedFileIds.push(urlData.file_id)
|
||||
// Only add to uploadedFiles after successful upload to keep arrays in sync
|
||||
uploadedFiles.push({
|
||||
name: userFile.name,
|
||||
mimeType: userFile.type || 'application/octet-stream',
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
})
|
||||
}
|
||||
|
||||
return { fileIds: uploadedFileIds, files: uploadedFiles }
|
||||
return uploadedFileIds
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -138,8 +124,7 @@ export async function completeSlackFileUpload(
|
||||
uploadedFileIds: string[],
|
||||
channel: string,
|
||||
text: string,
|
||||
accessToken: string,
|
||||
threadTs?: string | null
|
||||
accessToken: string
|
||||
): Promise<{ ok: boolean; files?: any[]; error?: string }> {
|
||||
const response = await fetch('https://slack.com/api/files.completeUploadExternal', {
|
||||
method: 'POST',
|
||||
@@ -151,7 +136,6 @@ export async function completeSlackFileUpload(
|
||||
files: uploadedFileIds.map((id) => ({ id })),
|
||||
channel_id: channel,
|
||||
initial_comment: text,
|
||||
...(threadTs && { thread_ts: threadTs }),
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -233,13 +217,7 @@ export async function sendSlackMessage(
|
||||
logger: Logger
|
||||
): Promise<{
|
||||
success: boolean
|
||||
output?: {
|
||||
message: any
|
||||
ts: string
|
||||
channel: string
|
||||
fileCount?: number
|
||||
files?: ToolFileData[]
|
||||
}
|
||||
output?: { message: any; ts: string; channel: string; fileCount?: number }
|
||||
error?: string
|
||||
}> {
|
||||
const { accessToken, text, threadTs, files } = params
|
||||
@@ -271,15 +249,10 @@ export async function sendSlackMessage(
|
||||
|
||||
// Process files
|
||||
logger.info(`[${requestId}] Processing ${files.length} file(s)`)
|
||||
const { fileIds, files: uploadedFiles } = await uploadFilesToSlack(
|
||||
files,
|
||||
accessToken,
|
||||
requestId,
|
||||
logger
|
||||
)
|
||||
const uploadedFileIds = await uploadFilesToSlack(files, accessToken, requestId, logger)
|
||||
|
||||
// No valid files uploaded - send text-only
|
||||
if (fileIds.length === 0) {
|
||||
if (uploadedFileIds.length === 0) {
|
||||
logger.warn(`[${requestId}] No valid files to upload, sending text-only message`)
|
||||
|
||||
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
||||
@@ -291,8 +264,8 @@ export async function sendSlackMessage(
|
||||
return { success: true, output: formatMessageSuccessResponse(data, text) }
|
||||
}
|
||||
|
||||
// Complete file upload with thread support
|
||||
const completeData = await completeSlackFileUpload(fileIds, channel, text, accessToken, threadTs)
|
||||
// Complete file upload
|
||||
const completeData = await completeSlackFileUpload(uploadedFileIds, channel, text, accessToken)
|
||||
|
||||
if (!completeData.ok) {
|
||||
logger.error(`[${requestId}] Failed to complete upload:`, completeData.error)
|
||||
@@ -309,8 +282,7 @@ export async function sendSlackMessage(
|
||||
message: fileMessage,
|
||||
ts: fileMessage.ts,
|
||||
channel,
|
||||
fileCount: fileIds.length,
|
||||
files: uploadedFiles,
|
||||
fileCount: uploadedFileIds.length,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import nodemailer from 'nodemailer'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
@@ -29,7 +28,7 @@ const SmtpSendSchema = z.object({
|
||||
cc: z.string().optional().nullable(),
|
||||
bcc: z.string().optional().nullable(),
|
||||
replyTo: z.string().optional().nullable(),
|
||||
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||
attachments: z.array(z.any()).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -5,7 +5,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import type { Client, SFTPWrapper } from 'ssh2'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
|
||||
|
||||
const logger = createLogger('SSHDownloadFileAPI')
|
||||
@@ -80,16 +79,6 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
})
|
||||
|
||||
// Check file size limit (50MB to prevent memory exhaustion)
|
||||
const maxSize = 50 * 1024 * 1024
|
||||
if (stats.size > maxSize) {
|
||||
const sizeMB = (stats.size / (1024 * 1024)).toFixed(2)
|
||||
return NextResponse.json(
|
||||
{ error: `File size (${sizeMB}MB) exceeds download limit of 50MB` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
// Read file content
|
||||
const content = await new Promise<Buffer>((resolve, reject) => {
|
||||
const chunks: Buffer[] = []
|
||||
@@ -107,8 +96,6 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
|
||||
const fileName = path.basename(remotePath)
|
||||
const extension = getFileExtension(fileName)
|
||||
const mimeType = getMimeTypeFromExtension(extension)
|
||||
|
||||
// Encode content as base64 for binary safety
|
||||
const base64Content = content.toString('base64')
|
||||
@@ -117,12 +104,6 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
return NextResponse.json({
|
||||
downloaded: true,
|
||||
file: {
|
||||
name: fileName,
|
||||
mimeType,
|
||||
data: base64Content,
|
||||
size: stats.size,
|
||||
},
|
||||
content: base64Content,
|
||||
fileName: fileName,
|
||||
remotePath: remotePath,
|
||||
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction'
|
||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||
|
||||
@@ -124,10 +123,6 @@ export async function POST(request: NextRequest) {
|
||||
const variablesObject = processVariables(params.variables)
|
||||
|
||||
const startUrl = normalizeUrl(rawStartUrl)
|
||||
const urlValidation = await validateUrlWithDNS(startUrl, 'startUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('Starting Stagehand agent process', {
|
||||
rawStartUrl,
|
||||
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||
|
||||
const logger = createLogger('StagehandExtractAPI')
|
||||
@@ -52,10 +51,6 @@ export async function POST(request: NextRequest) {
|
||||
const params = validationResult.data
|
||||
const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params
|
||||
const url = normalizeUrl(rawUrl)
|
||||
const urlValidation = await validateUrlWithDNS(url, 'url')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
logger.info('Starting Stagehand extraction process', {
|
||||
rawUrl,
|
||||
|
||||
@@ -2,16 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { extractAudioFromVideo, isVideoFile } from '@/lib/audio/extractor'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { getMimeTypeFromExtension, isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
downloadFileFromStorage,
|
||||
resolveInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils.server'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import type { TranscriptSegment } from '@/tools/stt/types'
|
||||
|
||||
@@ -54,7 +45,6 @@ export async function POST(request: NextRequest) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const userId = authResult.userId
|
||||
const body: SttRequestBody = await request.json()
|
||||
const {
|
||||
provider,
|
||||
@@ -82,25 +72,13 @@ export async function POST(request: NextRequest) {
|
||||
let audioMimeType: string
|
||||
|
||||
if (body.audioFile) {
|
||||
if (Array.isArray(body.audioFile) && body.audioFile.length !== 1) {
|
||||
return NextResponse.json({ error: 'audioFile must be a single file' }, { status: 400 })
|
||||
}
|
||||
const file = Array.isArray(body.audioFile) ? body.audioFile[0] : body.audioFile
|
||||
logger.info(`[${requestId}] Processing uploaded file: ${file.name}`)
|
||||
|
||||
audioBuffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
audioFileName = file.name
|
||||
// file.type may be missing if the file came from a block that doesn't preserve it
|
||||
// Infer from filename extension as fallback
|
||||
const ext = file.name.split('.').pop()?.toLowerCase() || ''
|
||||
audioMimeType = file.type || getMimeTypeFromExtension(ext)
|
||||
audioMimeType = file.type
|
||||
} else if (body.audioFileReference) {
|
||||
if (Array.isArray(body.audioFileReference) && body.audioFileReference.length !== 1) {
|
||||
return NextResponse.json(
|
||||
{ error: 'audioFileReference must be a single file' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const file = Array.isArray(body.audioFileReference)
|
||||
? body.audioFileReference[0]
|
||||
: body.audioFileReference
|
||||
@@ -108,54 +86,18 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
audioBuffer = await downloadFileFromStorage(file, requestId, logger)
|
||||
audioFileName = file.name
|
||||
|
||||
const ext = file.name.split('.').pop()?.toLowerCase() || ''
|
||||
audioMimeType = file.type || getMimeTypeFromExtension(ext)
|
||||
audioMimeType = file.type
|
||||
} else if (body.audioUrl) {
|
||||
logger.info(`[${requestId}] Downloading from URL: ${body.audioUrl}`)
|
||||
|
||||
let audioUrl = body.audioUrl.trim()
|
||||
if (audioUrl.startsWith('/') && !isInternalFileUrl(audioUrl)) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (isInternalFileUrl(audioUrl)) {
|
||||
if (!userId) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Authentication required for internal file access' },
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
const resolution = await resolveInternalFileUrl(audioUrl, userId, requestId, logger)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{ error: resolution.error.message },
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
audioUrl = resolution.fileUrl || audioUrl
|
||||
}
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(audioUrl, 'audioUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(audioUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
})
|
||||
const response = await fetch(body.audioUrl)
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to download audio from URL: ${response.statusText}`)
|
||||
}
|
||||
|
||||
const arrayBuffer = await response.arrayBuffer()
|
||||
audioBuffer = Buffer.from(arrayBuffer)
|
||||
audioFileName = audioUrl.split('/').pop() || 'audio_file'
|
||||
audioFileName = body.audioUrl.split('/').pop() || 'audio_file'
|
||||
audioMimeType = response.headers.get('content-type') || 'audio/mpeg'
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
@@ -207,9 +149,7 @@ export async function POST(request: NextRequest) {
|
||||
translateToEnglish,
|
||||
model,
|
||||
body.prompt,
|
||||
body.temperature,
|
||||
audioMimeType,
|
||||
audioFileName
|
||||
body.temperature
|
||||
)
|
||||
transcript = result.transcript
|
||||
segments = result.segments
|
||||
@@ -222,8 +162,7 @@ export async function POST(request: NextRequest) {
|
||||
language,
|
||||
timestamps,
|
||||
diarization,
|
||||
model,
|
||||
audioMimeType
|
||||
model
|
||||
)
|
||||
transcript = result.transcript
|
||||
segments = result.segments
|
||||
@@ -313,9 +252,7 @@ async function transcribeWithWhisper(
|
||||
translate?: boolean,
|
||||
model?: string,
|
||||
prompt?: string,
|
||||
temperature?: number,
|
||||
mimeType?: string,
|
||||
fileName?: string
|
||||
temperature?: number
|
||||
): Promise<{
|
||||
transcript: string
|
||||
segments?: TranscriptSegment[]
|
||||
@@ -324,11 +261,8 @@ async function transcribeWithWhisper(
|
||||
}> {
|
||||
const formData = new FormData()
|
||||
|
||||
// Use actual MIME type and filename if provided
|
||||
const actualMimeType = mimeType || 'audio/mpeg'
|
||||
const actualFileName = fileName || 'audio.mp3'
|
||||
const blob = new Blob([new Uint8Array(audioBuffer)], { type: actualMimeType })
|
||||
formData.append('file', blob, actualFileName)
|
||||
const blob = new Blob([new Uint8Array(audioBuffer)], { type: 'audio/mpeg' })
|
||||
formData.append('file', blob, 'audio.mp3')
|
||||
formData.append('model', model || 'whisper-1')
|
||||
|
||||
if (language && language !== 'auto') {
|
||||
@@ -345,11 +279,10 @@ async function transcribeWithWhisper(
|
||||
|
||||
formData.append('response_format', 'verbose_json')
|
||||
|
||||
// OpenAI API uses array notation for timestamp_granularities
|
||||
if (timestamps === 'word') {
|
||||
formData.append('timestamp_granularities[]', 'word')
|
||||
formData.append('timestamp_granularities', 'word')
|
||||
} else if (timestamps === 'sentence') {
|
||||
formData.append('timestamp_granularities[]', 'segment')
|
||||
formData.append('timestamp_granularities', 'segment')
|
||||
}
|
||||
|
||||
const endpoint = translate ? 'translations' : 'transcriptions'
|
||||
@@ -392,8 +325,7 @@ async function transcribeWithDeepgram(
|
||||
language?: string,
|
||||
timestamps?: 'none' | 'sentence' | 'word',
|
||||
diarization?: boolean,
|
||||
model?: string,
|
||||
mimeType?: string
|
||||
model?: string
|
||||
): Promise<{
|
||||
transcript: string
|
||||
segments?: TranscriptSegment[]
|
||||
@@ -425,7 +357,7 @@ async function transcribeWithDeepgram(
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Token ${apiKey}`,
|
||||
'Content-Type': mimeType || 'audio/mpeg',
|
||||
'Content-Type': 'audio/mpeg',
|
||||
},
|
||||
body: new Uint8Array(audioBuffer),
|
||||
})
|
||||
@@ -581,8 +513,7 @@ async function transcribeWithAssemblyAI(
|
||||
audio_url: upload_url,
|
||||
}
|
||||
|
||||
// AssemblyAI supports 'best', 'slam-1', or 'universal' for speech_model
|
||||
if (model === 'best' || model === 'slam-1' || model === 'universal') {
|
||||
if (model === 'best' || model === 'nano') {
|
||||
transcriptRequest.speech_model = model
|
||||
}
|
||||
|
||||
@@ -637,8 +568,7 @@ async function transcribeWithAssemblyAI(
|
||||
|
||||
let transcript: any
|
||||
let attempts = 0
|
||||
const pollIntervalMs = 5000
|
||||
const maxAttempts = Math.ceil(DEFAULT_EXECUTION_TIMEOUT_MS / pollIntervalMs)
|
||||
const maxAttempts = 60 // 5 minutes with 5-second intervals
|
||||
|
||||
while (attempts < maxAttempts) {
|
||||
const statusResponse = await fetch(`https://api.assemblyai.com/v2/transcript/${id}`, {
|
||||
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
@@ -17,7 +16,7 @@ const SupabaseStorageUploadSchema = z.object({
|
||||
bucket: z.string().min(1, 'Bucket name is required'),
|
||||
fileName: z.string().min(1, 'File name is required'),
|
||||
path: z.string().optional().nullable(),
|
||||
fileData: FileInputSchema,
|
||||
fileData: z.any(),
|
||||
contentType: z.string().optional().nullable(),
|
||||
upsert: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import { convertMarkdownToHTML } from '@/tools/telegram/utils'
|
||||
@@ -15,7 +14,7 @@ const logger = createLogger('TelegramSendDocumentAPI')
|
||||
const TelegramSendDocumentSchema = z.object({
|
||||
botToken: z.string().min(1, 'Bot token is required'),
|
||||
chatId: z.string().min(1, 'Chat ID is required'),
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
files: z.array(z.any()).optional().nullable(),
|
||||
caption: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
@@ -94,14 +93,6 @@ export async function POST(request: NextRequest) {
|
||||
logger.info(`[${requestId}] Uploading document: ${userFile.name}`)
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
const filesOutput = [
|
||||
{
|
||||
name: userFile.name,
|
||||
mimeType: userFile.type || 'application/octet-stream',
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
},
|
||||
]
|
||||
|
||||
logger.info(`[${requestId}] Downloaded file: ${buffer.length} bytes`)
|
||||
|
||||
@@ -144,7 +135,6 @@ export async function POST(request: NextRequest) {
|
||||
output: {
|
||||
message: 'Document sent successfully',
|
||||
data: data.result,
|
||||
files: filesOutput,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
|
||||
@@ -3,19 +3,19 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
import { validateAwsRegion, validateS3BucketName } from '@/lib/core/security/input-validation'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
validateAwsRegion,
|
||||
validateExternalUrl,
|
||||
validateS3BucketName,
|
||||
} from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import {
|
||||
downloadFileFromStorage,
|
||||
resolveInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils.server'
|
||||
extractStorageKey,
|
||||
inferContextFromKey,
|
||||
isInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils'
|
||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
||||
@@ -35,7 +35,6 @@ const TextractParseSchema = z
|
||||
region: z.string().min(1, 'AWS region is required'),
|
||||
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
||||
filePath: z.string().optional(),
|
||||
file: RawFileInputSchema.optional(),
|
||||
s3Uri: z.string().optional(),
|
||||
featureTypes: z
|
||||
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
||||
@@ -51,20 +50,6 @@ const TextractParseSchema = z
|
||||
path: ['region'],
|
||||
})
|
||||
}
|
||||
if (data.processingMode === 'async' && !data.s3Uri) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'S3 URI is required for multi-page processing (s3://bucket/key)',
|
||||
path: ['s3Uri'],
|
||||
})
|
||||
}
|
||||
if (data.processingMode !== 'async' && !data.file && !data.filePath) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message: 'File input is required for single-page processing',
|
||||
path: ['filePath'],
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
function getSignatureKey(
|
||||
@@ -126,14 +111,7 @@ function signAwsRequest(
|
||||
}
|
||||
|
||||
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
||||
const urlValidation = await validateUrlWithDNS(url, 'Document URL')
|
||||
if (!urlValidation.isValid) {
|
||||
throw new Error(urlValidation.error || 'Invalid document URL')
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
})
|
||||
const response = await fetch(url)
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||
}
|
||||
@@ -227,8 +205,8 @@ async function pollForJobCompletion(
|
||||
useAnalyzeDocument: boolean,
|
||||
requestId: string
|
||||
): Promise<Record<string, unknown>> {
|
||||
const pollIntervalMs = 5000
|
||||
const maxPollTimeMs = DEFAULT_EXECUTION_TIMEOUT_MS
|
||||
const pollIntervalMs = 5000 // 5 seconds between polls
|
||||
const maxPollTimeMs = 180000 // 3 minutes maximum polling time
|
||||
const maxAttempts = Math.ceil(maxPollTimeMs / pollIntervalMs)
|
||||
|
||||
const getTarget = useAnalyzeDocument
|
||||
@@ -340,8 +318,8 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`[${requestId}] Textract parse request`, {
|
||||
processingMode,
|
||||
hasFile: Boolean(validatedData.file),
|
||||
hasS3Uri: Boolean(validatedData.s3Uri),
|
||||
filePath: validatedData.filePath?.substring(0, 50),
|
||||
s3Uri: validatedData.s3Uri?.substring(0, 50),
|
||||
featureTypes,
|
||||
userId,
|
||||
})
|
||||
@@ -436,89 +414,90 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
}
|
||||
|
||||
let bytes = ''
|
||||
let contentType = 'application/octet-stream'
|
||||
let isPdf = false
|
||||
|
||||
if (validatedData.file) {
|
||||
let userFile
|
||||
try {
|
||||
userFile = processSingleFileToUserFile(validatedData.file, requestId, logger)
|
||||
} catch (error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
bytes = buffer.toString('base64')
|
||||
contentType = userFile.type || 'application/octet-stream'
|
||||
isPdf = contentType.includes('pdf') || userFile.name?.toLowerCase().endsWith('.pdf')
|
||||
} else if (validatedData.filePath) {
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
const isInternalFilePath = isInternalFileUrl(fileUrl)
|
||||
|
||||
if (isInternalFilePath) {
|
||||
const resolution = await resolveInternalFileUrl(fileUrl, userId, requestId, logger)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: resolution.error.message,
|
||||
},
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
fileUrl = resolution.fileUrl || fileUrl
|
||||
} else if (fileUrl.startsWith('/')) {
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: fileUrl.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = await validateUrlWithDNS(fileUrl, 'Document URL')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
||||
userId,
|
||||
url: fileUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const fetched = await fetchDocumentBytes(fileUrl)
|
||||
bytes = fetched.bytes
|
||||
contentType = fetched.contentType
|
||||
isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
||||
} else {
|
||||
if (!validatedData.filePath) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File input is required for single-page processing',
|
||||
error: 'File path is required for single-page processing',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
let fileUrl = validatedData.filePath
|
||||
|
||||
const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath)
|
||||
|
||||
if (isInternalFilePath) {
|
||||
try {
|
||||
const storageKey = extractStorageKey(validatedData.filePath)
|
||||
const context = inferContextFromKey(storageKey)
|
||||
|
||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
||||
|
||||
if (!hasAccess) {
|
||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
||||
userId,
|
||||
key: storageKey,
|
||||
context,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'File not found',
|
||||
},
|
||||
{ status: 404 }
|
||||
)
|
||||
}
|
||||
|
||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Failed to generate file access URL',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
} else if (validatedData.filePath?.startsWith('/')) {
|
||||
// Reject arbitrary absolute paths that don't contain /api/files/serve/
|
||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||
userId,
|
||||
path: validatedData.filePath.substring(0, 50),
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
} else {
|
||||
const urlValidation = validateExternalUrl(fileUrl, 'Document URL')
|
||||
if (!urlValidation.isValid) {
|
||||
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
||||
userId,
|
||||
url: fileUrl.substring(0, 100),
|
||||
error: urlValidation.error,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: urlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const { bytes, contentType } = await fetchDocumentBytes(fileUrl)
|
||||
|
||||
// Track if this is a PDF for better error messaging
|
||||
const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
||||
|
||||
const uri = '/'
|
||||
|
||||
let textractBody: Record<string, unknown>
|
||||
|
||||
@@ -2,7 +2,6 @@ import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
@@ -61,7 +60,7 @@ export async function POST(request: NextRequest) {
|
||||
text,
|
||||
model_id: modelId,
|
||||
}),
|
||||
signal: AbortSignal.timeout(DEFAULT_EXECUTION_TIMEOUT_MS),
|
||||
signal: AbortSignal.timeout(60000),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
|
||||
@@ -1,250 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('TwilioGetRecordingAPI')
|
||||
|
||||
interface TwilioRecordingResponse {
|
||||
sid?: string
|
||||
call_sid?: string
|
||||
duration?: string
|
||||
status?: string
|
||||
channels?: number
|
||||
source?: string
|
||||
price?: string
|
||||
price_unit?: string
|
||||
uri?: string
|
||||
error_code?: number
|
||||
message?: string
|
||||
error_message?: string
|
||||
}
|
||||
|
||||
interface TwilioErrorResponse {
|
||||
message?: string
|
||||
}
|
||||
|
||||
interface TwilioTranscription {
|
||||
transcription_text?: string
|
||||
status?: string
|
||||
price?: string
|
||||
price_unit?: string
|
||||
}
|
||||
|
||||
interface TwilioTranscriptionsResponse {
|
||||
transcriptions?: TwilioTranscription[]
|
||||
}
|
||||
|
||||
const TwilioGetRecordingSchema = z.object({
|
||||
accountSid: z.string().min(1, 'Account SID is required'),
|
||||
authToken: z.string().min(1, 'Auth token is required'),
|
||||
recordingSid: z.string().min(1, 'Recording SID is required'),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Twilio get recording attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = TwilioGetRecordingSchema.parse(body)
|
||||
|
||||
const { accountSid, authToken, recordingSid } = validatedData
|
||||
|
||||
if (!accountSid.startsWith('AC')) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: `Invalid Account SID format. Account SID must start with "AC" (you provided: ${accountSid.substring(0, 2)}...)`,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const twilioAuth = Buffer.from(`${accountSid}:${authToken}`).toString('base64')
|
||||
|
||||
logger.info(`[${requestId}] Getting recording info from Twilio`, { recordingSid })
|
||||
|
||||
const infoUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Recordings/${recordingSid}.json`
|
||||
const infoUrlValidation = await validateUrlWithDNS(infoUrl, 'infoUrl')
|
||||
if (!infoUrlValidation.isValid) {
|
||||
return NextResponse.json({ success: false, error: infoUrlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const infoResponse = await secureFetchWithPinnedIP(infoUrl, infoUrlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||
})
|
||||
|
||||
if (!infoResponse.ok) {
|
||||
const errorData = (await infoResponse.json().catch(() => ({}))) as TwilioErrorResponse
|
||||
logger.error(`[${requestId}] Twilio API error`, {
|
||||
status: infoResponse.status,
|
||||
error: errorData,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: errorData.message || `Twilio API error: ${infoResponse.status}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const data = (await infoResponse.json()) as TwilioRecordingResponse
|
||||
|
||||
if (data.error_code) {
|
||||
return NextResponse.json({
|
||||
success: false,
|
||||
output: {
|
||||
success: false,
|
||||
error: data.message || data.error_message || 'Failed to retrieve recording',
|
||||
},
|
||||
error: data.message || data.error_message || 'Failed to retrieve recording',
|
||||
})
|
||||
}
|
||||
|
||||
const baseUrl = 'https://api.twilio.com'
|
||||
const mediaUrl = data.uri ? `${baseUrl}${data.uri.replace('.json', '')}` : undefined
|
||||
|
||||
let transcriptionText: string | undefined
|
||||
let transcriptionStatus: string | undefined
|
||||
let transcriptionPrice: string | undefined
|
||||
let transcriptionPriceUnit: string | undefined
|
||||
let file:
|
||||
| {
|
||||
name: string
|
||||
mimeType: string
|
||||
data: string
|
||||
size: number
|
||||
}
|
||||
| undefined
|
||||
|
||||
try {
|
||||
const transcriptionUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Transcriptions.json?RecordingSid=${data.sid}`
|
||||
logger.info(`[${requestId}] Checking for transcriptions`)
|
||||
|
||||
const transcriptionUrlValidation = await validateUrlWithDNS(
|
||||
transcriptionUrl,
|
||||
'transcriptionUrl'
|
||||
)
|
||||
if (transcriptionUrlValidation.isValid) {
|
||||
const transcriptionResponse = await secureFetchWithPinnedIP(
|
||||
transcriptionUrl,
|
||||
transcriptionUrlValidation.resolvedIP!,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||
}
|
||||
)
|
||||
|
||||
if (transcriptionResponse.ok) {
|
||||
const transcriptionData =
|
||||
(await transcriptionResponse.json()) as TwilioTranscriptionsResponse
|
||||
|
||||
if (transcriptionData.transcriptions && transcriptionData.transcriptions.length > 0) {
|
||||
const transcription = transcriptionData.transcriptions[0]
|
||||
transcriptionText = transcription.transcription_text
|
||||
transcriptionStatus = transcription.status
|
||||
transcriptionPrice = transcription.price
|
||||
transcriptionPriceUnit = transcription.price_unit
|
||||
logger.info(`[${requestId}] Transcription found`, {
|
||||
status: transcriptionStatus,
|
||||
textLength: transcriptionText?.length,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to fetch transcription:`, error)
|
||||
}
|
||||
|
||||
if (mediaUrl) {
|
||||
try {
|
||||
const mediaUrlValidation = await validateUrlWithDNS(mediaUrl, 'mediaUrl')
|
||||
if (mediaUrlValidation.isValid) {
|
||||
const mediaResponse = await secureFetchWithPinnedIP(
|
||||
mediaUrl,
|
||||
mediaUrlValidation.resolvedIP!,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||
}
|
||||
)
|
||||
|
||||
if (mediaResponse.ok) {
|
||||
const contentType =
|
||||
mediaResponse.headers.get('content-type') || 'application/octet-stream'
|
||||
const extension = getExtensionFromMimeType(contentType) || 'dat'
|
||||
const arrayBuffer = await mediaResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
const fileName = `${data.sid || recordingSid}.${extension}`
|
||||
|
||||
file = {
|
||||
name: fileName,
|
||||
mimeType: contentType,
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to download recording media:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Twilio recording fetched successfully`, {
|
||||
recordingSid: data.sid,
|
||||
hasFile: !!file,
|
||||
hasTranscription: !!transcriptionText,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
recordingSid: data.sid,
|
||||
callSid: data.call_sid,
|
||||
duration: data.duration ? Number.parseInt(data.duration, 10) : undefined,
|
||||
status: data.status,
|
||||
channels: data.channels,
|
||||
source: data.source,
|
||||
mediaUrl,
|
||||
file,
|
||||
price: data.price,
|
||||
priceUnit: data.price_unit,
|
||||
uri: data.uri,
|
||||
transcriptionText,
|
||||
transcriptionStatus,
|
||||
transcriptionPrice,
|
||||
transcriptionPriceUnit,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching Twilio recording:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
import type { UserFile } from '@/executor/types'
|
||||
import type { VideoRequestBody } from '@/tools/video/types'
|
||||
@@ -327,12 +326,11 @@ async function generateWithRunway(
|
||||
|
||||
logger.info(`[${requestId}] Runway task created: ${taskId}`)
|
||||
|
||||
const pollIntervalMs = 5000
|
||||
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||
const maxAttempts = 120 // 10 minutes with 5-second intervals
|
||||
let attempts = 0
|
||||
|
||||
while (attempts < maxAttempts) {
|
||||
await sleep(pollIntervalMs)
|
||||
await sleep(5000) // Poll every 5 seconds
|
||||
|
||||
const statusResponse = await fetch(`https://api.dev.runwayml.com/v1/tasks/${taskId}`, {
|
||||
headers: {
|
||||
@@ -372,7 +370,7 @@ async function generateWithRunway(
|
||||
attempts++
|
||||
}
|
||||
|
||||
throw new Error('Runway generation timed out')
|
||||
throw new Error('Runway generation timed out after 10 minutes')
|
||||
}
|
||||
|
||||
async function generateWithVeo(
|
||||
@@ -431,12 +429,11 @@ async function generateWithVeo(
|
||||
|
||||
logger.info(`[${requestId}] Veo operation created: ${operationName}`)
|
||||
|
||||
const pollIntervalMs = 5000
|
||||
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||
const maxAttempts = 60 // 5 minutes with 5-second intervals
|
||||
let attempts = 0
|
||||
|
||||
while (attempts < maxAttempts) {
|
||||
await sleep(pollIntervalMs)
|
||||
await sleep(5000)
|
||||
|
||||
const statusResponse = await fetch(
|
||||
`https://generativelanguage.googleapis.com/v1beta/${operationName}`,
|
||||
@@ -488,7 +485,7 @@ async function generateWithVeo(
|
||||
attempts++
|
||||
}
|
||||
|
||||
throw new Error('Veo generation timed out')
|
||||
throw new Error('Veo generation timed out after 5 minutes')
|
||||
}
|
||||
|
||||
async function generateWithLuma(
|
||||
@@ -544,12 +541,11 @@ async function generateWithLuma(
|
||||
|
||||
logger.info(`[${requestId}] Luma generation created: ${generationId}`)
|
||||
|
||||
const pollIntervalMs = 5000
|
||||
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||
const maxAttempts = 120 // 10 minutes
|
||||
let attempts = 0
|
||||
|
||||
while (attempts < maxAttempts) {
|
||||
await sleep(pollIntervalMs)
|
||||
await sleep(5000)
|
||||
|
||||
const statusResponse = await fetch(
|
||||
`https://api.lumalabs.ai/dream-machine/v1/generations/${generationId}`,
|
||||
@@ -596,7 +592,7 @@ async function generateWithLuma(
|
||||
attempts++
|
||||
}
|
||||
|
||||
throw new Error('Luma generation timed out')
|
||||
throw new Error('Luma generation timed out after 10 minutes')
|
||||
}
|
||||
|
||||
async function generateWithMiniMax(
|
||||
@@ -662,13 +658,14 @@ async function generateWithMiniMax(
|
||||
|
||||
logger.info(`[${requestId}] MiniMax task created: ${taskId}`)
|
||||
|
||||
const pollIntervalMs = 5000
|
||||
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||
// Poll for completion (6-10 minutes typical)
|
||||
const maxAttempts = 120 // 10 minutes with 5-second intervals
|
||||
let attempts = 0
|
||||
|
||||
while (attempts < maxAttempts) {
|
||||
await sleep(pollIntervalMs)
|
||||
await sleep(5000)
|
||||
|
||||
// Query task status
|
||||
const statusResponse = await fetch(
|
||||
`https://api.minimax.io/v1/query/video_generation?task_id=${taskId}`,
|
||||
{
|
||||
@@ -746,7 +743,7 @@ async function generateWithMiniMax(
|
||||
attempts++
|
||||
}
|
||||
|
||||
throw new Error('MiniMax generation timed out')
|
||||
throw new Error('MiniMax generation timed out after 10 minutes')
|
||||
}
|
||||
|
||||
// Helper function to strip subpaths from Fal.ai model IDs for status/result endpoints
|
||||
@@ -864,12 +861,11 @@ async function generateWithFalAI(
|
||||
// Get base model ID (without subpath) for status and result endpoints
|
||||
const baseModelId = getBaseModelId(falModelId)
|
||||
|
||||
const pollIntervalMs = 5000
|
||||
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||
const maxAttempts = 96 // 8 minutes with 5-second intervals
|
||||
let attempts = 0
|
||||
|
||||
while (attempts < maxAttempts) {
|
||||
await sleep(pollIntervalMs)
|
||||
await sleep(5000)
|
||||
|
||||
const statusResponse = await fetch(
|
||||
`https://queue.fal.run/${baseModelId}/requests/${requestIdFal}/status`,
|
||||
@@ -942,7 +938,7 @@ async function generateWithFalAI(
|
||||
attempts++
|
||||
}
|
||||
|
||||
throw new Error('Fal.ai generation timed out')
|
||||
throw new Error('Fal.ai generation timed out after 8 minutes')
|
||||
}
|
||||
|
||||
function getVideoDimensions(
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
import { GoogleGenAI } from '@google/genai'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import {
|
||||
downloadFileFromStorage,
|
||||
resolveInternalFileUrl,
|
||||
} from '@/lib/uploads/utils/file-utils.server'
|
||||
import { convertUsageMetadata, extractTextContent } from '@/providers/google/utils'
|
||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -23,8 +13,8 @@ const logger = createLogger('VisionAnalyzeAPI')
|
||||
const VisionAnalyzeSchema = z.object({
|
||||
apiKey: z.string().min(1, 'API key is required'),
|
||||
imageUrl: z.string().optional().nullable(),
|
||||
imageFile: RawFileInputSchema.optional().nullable(),
|
||||
model: z.string().optional().default('gpt-5.2'),
|
||||
imageFile: z.any().optional().nullable(),
|
||||
model: z.string().optional().default('gpt-4o'),
|
||||
prompt: z.string().optional().nullable(),
|
||||
})
|
||||
|
||||
@@ -49,7 +39,6 @@ export async function POST(request: NextRequest) {
|
||||
userId: authResult.userId,
|
||||
})
|
||||
|
||||
const userId = authResult.userId
|
||||
const body = await request.json()
|
||||
const validatedData = VisionAnalyzeSchema.parse(body)
|
||||
|
||||
@@ -88,72 +77,18 @@ export async function POST(request: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
let base64 = userFile.base64
|
||||
let bufferLength = 0
|
||||
if (!base64) {
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
base64 = buffer.toString('base64')
|
||||
bufferLength = buffer.length
|
||||
}
|
||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||
|
||||
const base64 = buffer.toString('base64')
|
||||
const mimeType = userFile.type || 'image/jpeg'
|
||||
imageSource = `data:${mimeType};base64,${base64}`
|
||||
if (bufferLength > 0) {
|
||||
logger.info(`[${requestId}] Converted image to base64 (${bufferLength} bytes)`)
|
||||
}
|
||||
}
|
||||
|
||||
let imageUrlValidation: Awaited<ReturnType<typeof validateUrlWithDNS>> | null = null
|
||||
if (imageSource && !imageSource.startsWith('data:')) {
|
||||
if (imageSource.startsWith('/') && !isInternalFileUrl(imageSource)) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
if (isInternalFileUrl(imageSource)) {
|
||||
if (!userId) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: 'Authentication required for internal file access',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
const resolution = await resolveInternalFileUrl(imageSource, userId, requestId, logger)
|
||||
if (resolution.error) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: resolution.error.message,
|
||||
},
|
||||
{ status: resolution.error.status }
|
||||
)
|
||||
}
|
||||
imageSource = resolution.fileUrl || imageSource
|
||||
}
|
||||
|
||||
imageUrlValidation = await validateUrlWithDNS(imageSource, 'imageUrl')
|
||||
if (!imageUrlValidation.isValid) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: imageUrlValidation.error,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
logger.info(`[${requestId}] Converted image to base64 (${buffer.length} bytes)`)
|
||||
}
|
||||
|
||||
const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
|
||||
const prompt = validatedData.prompt || defaultPrompt
|
||||
|
||||
const isClaude = validatedData.model.startsWith('claude-')
|
||||
const isGemini = validatedData.model.startsWith('gemini-')
|
||||
const isClaude = validatedData.model.startsWith('claude-3')
|
||||
const apiUrl = isClaude
|
||||
? 'https://api.anthropic.com/v1/messages'
|
||||
: 'https://api.openai.com/v1/chat/completions'
|
||||
@@ -171,72 +106,6 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
let requestBody: any
|
||||
|
||||
if (isGemini) {
|
||||
let base64Payload = imageSource
|
||||
if (!base64Payload.startsWith('data:')) {
|
||||
const urlValidation =
|
||||
imageUrlValidation || (await validateUrlWithDNS(base64Payload, 'imageUrl'))
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(base64Payload, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
})
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Failed to fetch image for Gemini' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const contentType =
|
||||
response.headers.get('content-type') || validatedData.imageFile?.type || 'image/jpeg'
|
||||
const arrayBuffer = await response.arrayBuffer()
|
||||
const base64 = Buffer.from(arrayBuffer).toString('base64')
|
||||
base64Payload = `data:${contentType};base64,${base64}`
|
||||
}
|
||||
const base64Marker = ';base64,'
|
||||
const markerIndex = base64Payload.indexOf(base64Marker)
|
||||
if (!base64Payload.startsWith('data:') || markerIndex === -1) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Invalid base64 image format' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
const rawMimeType = base64Payload.slice('data:'.length, markerIndex)
|
||||
const mediaType = rawMimeType.split(';')[0] || 'image/jpeg'
|
||||
const base64Data = base64Payload.slice(markerIndex + base64Marker.length)
|
||||
if (!base64Data) {
|
||||
return NextResponse.json(
|
||||
{ success: false, error: 'Invalid base64 image format' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const ai = new GoogleGenAI({ apiKey: validatedData.apiKey })
|
||||
const geminiResponse = await ai.models.generateContent({
|
||||
model: validatedData.model,
|
||||
contents: [
|
||||
{
|
||||
role: 'user',
|
||||
parts: [{ text: prompt }, { inlineData: { mimeType: mediaType, data: base64Data } }],
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
const content = extractTextContent(geminiResponse.candidates?.[0])
|
||||
const usage = convertUsageMetadata(geminiResponse.usageMetadata)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
content,
|
||||
model: validatedData.model,
|
||||
tokens: usage.totalTokenCount || undefined,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if (isClaude) {
|
||||
if (imageSource.startsWith('data:')) {
|
||||
const base64Match = imageSource.match(/^data:([^;]+);base64,(.+)$/)
|
||||
@@ -303,7 +172,7 @@ export async function POST(request: NextRequest) {
|
||||
],
|
||||
},
|
||||
],
|
||||
max_completion_tokens: 1000,
|
||||
max_tokens: 1000,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||
import {
|
||||
getFileExtension,
|
||||
getMimeTypeFromExtension,
|
||||
@@ -20,7 +19,7 @@ const WORDPRESS_COM_API_BASE = 'https://public-api.wordpress.com/wp/v2/sites'
|
||||
const WordPressUploadSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
siteId: z.string().min(1, 'Site ID is required'),
|
||||
file: RawFileInputSchema.optional().nullable(),
|
||||
file: z.any().optional().nullable(),
|
||||
filename: z.string().optional().nullable(),
|
||||
title: z.string().optional().nullable(),
|
||||
caption: z.string().optional().nullable(),
|
||||
|
||||
@@ -1,216 +0,0 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import {
|
||||
secureFetchWithPinnedIP,
|
||||
validateUrlWithDNS,
|
||||
} from '@/lib/core/security/input-validation.server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('ZoomGetRecordingsAPI')
|
||||
|
||||
interface ZoomRecordingFile {
|
||||
id?: string
|
||||
meeting_id?: string
|
||||
recording_start?: string
|
||||
recording_end?: string
|
||||
file_type?: string
|
||||
file_extension?: string
|
||||
file_size?: number
|
||||
play_url?: string
|
||||
download_url?: string
|
||||
status?: string
|
||||
recording_type?: string
|
||||
}
|
||||
|
||||
interface ZoomRecordingsResponse {
|
||||
uuid?: string
|
||||
id?: string | number
|
||||
account_id?: string
|
||||
host_id?: string
|
||||
topic?: string
|
||||
type?: number
|
||||
start_time?: string
|
||||
duration?: number
|
||||
total_size?: number
|
||||
recording_count?: number
|
||||
share_url?: string
|
||||
recording_files?: ZoomRecordingFile[]
|
||||
}
|
||||
|
||||
interface ZoomErrorResponse {
|
||||
message?: string
|
||||
code?: number
|
||||
}
|
||||
|
||||
const ZoomGetRecordingsSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
meetingId: z.string().min(1, 'Meeting ID is required'),
|
||||
includeFolderItems: z.boolean().optional(),
|
||||
ttl: z.number().optional(),
|
||||
downloadFiles: z.boolean().optional().default(false),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Zoom get recordings attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = ZoomGetRecordingsSchema.parse(body)
|
||||
|
||||
const { accessToken, meetingId, includeFolderItems, ttl, downloadFiles } = validatedData
|
||||
|
||||
const baseUrl = `https://api.zoom.us/v2/meetings/${encodeURIComponent(meetingId)}/recordings`
|
||||
const queryParams = new URLSearchParams()
|
||||
|
||||
if (includeFolderItems != null) {
|
||||
queryParams.append('include_folder_items', String(includeFolderItems))
|
||||
}
|
||||
if (ttl) {
|
||||
queryParams.append('ttl', String(ttl))
|
||||
}
|
||||
|
||||
const queryString = queryParams.toString()
|
||||
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||
|
||||
logger.info(`[${requestId}] Fetching recordings from Zoom`, { meetingId })
|
||||
|
||||
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||
if (!urlValidation.isValid) {
|
||||
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||
}
|
||||
|
||||
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${accessToken}`,
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = (await response.json().catch(() => ({}))) as ZoomErrorResponse
|
||||
logger.error(`[${requestId}] Zoom API error`, {
|
||||
status: response.status,
|
||||
error: errorData,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ success: false, error: errorData.message || `Zoom API error: ${response.status}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const data = (await response.json()) as ZoomRecordingsResponse
|
||||
const files: Array<{
|
||||
name: string
|
||||
mimeType: string
|
||||
data: string
|
||||
size: number
|
||||
}> = []
|
||||
|
||||
if (downloadFiles && Array.isArray(data.recording_files)) {
|
||||
for (const file of data.recording_files) {
|
||||
if (!file?.download_url) continue
|
||||
|
||||
try {
|
||||
const fileUrlValidation = await validateUrlWithDNS(file.download_url, 'downloadUrl')
|
||||
if (!fileUrlValidation.isValid) continue
|
||||
|
||||
const downloadResponse = await secureFetchWithPinnedIP(
|
||||
file.download_url,
|
||||
fileUrlValidation.resolvedIP!,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${accessToken}` },
|
||||
}
|
||||
)
|
||||
|
||||
if (!downloadResponse.ok) continue
|
||||
|
||||
const contentType =
|
||||
downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
const extension =
|
||||
file.file_extension?.toString().toLowerCase() ||
|
||||
getExtensionFromMimeType(contentType) ||
|
||||
'dat'
|
||||
const fileName = `zoom-recording-${file.id || file.recording_start || Date.now()}.${extension}`
|
||||
|
||||
files.push({
|
||||
name: fileName,
|
||||
mimeType: contentType,
|
||||
data: buffer.toString('base64'),
|
||||
size: buffer.length,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.warn(`[${requestId}] Failed to download recording file:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Zoom recordings fetched successfully`, {
|
||||
recordingCount: data.recording_files?.length || 0,
|
||||
downloadedCount: files.length,
|
||||
})
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
recording: {
|
||||
uuid: data.uuid,
|
||||
id: data.id,
|
||||
account_id: data.account_id,
|
||||
host_id: data.host_id,
|
||||
topic: data.topic,
|
||||
type: data.type,
|
||||
start_time: data.start_time,
|
||||
duration: data.duration,
|
||||
total_size: data.total_size,
|
||||
recording_count: data.recording_count,
|
||||
share_url: data.share_url,
|
||||
recording_files: (data.recording_files || []).map((file: ZoomRecordingFile) => ({
|
||||
id: file.id,
|
||||
meeting_id: file.meeting_id,
|
||||
recording_start: file.recording_start,
|
||||
recording_end: file.recording_end,
|
||||
file_type: file.file_type,
|
||||
file_extension: file.file_extension,
|
||||
file_size: file.file_size,
|
||||
play_url: file.play_url,
|
||||
download_url: file.download_url,
|
||||
status: file.status,
|
||||
recording_type: file.recording_type,
|
||||
})),
|
||||
},
|
||||
files: files.length > 0 ? files : undefined,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error fetching Zoom recordings:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,190 @@
|
||||
import { db, workflowDeploymentVersion } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
|
||||
import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
|
||||
import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
cleanupDeploymentVersion,
|
||||
createSchedulesForDeploy,
|
||||
validateWorkflowSchedules,
|
||||
} from '@/lib/workflows/schedules'
|
||||
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('WorkflowActivateDeploymentAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string; version: string }> }
|
||||
) {
|
||||
const requestId = generateRequestId()
|
||||
const { id, version } = await params
|
||||
|
||||
try {
|
||||
const {
|
||||
error,
|
||||
session,
|
||||
workflow: workflowData,
|
||||
} = await validateWorkflowPermissions(id, requestId, 'admin')
|
||||
if (error) {
|
||||
return createErrorResponse(error.message, error.status)
|
||||
}
|
||||
|
||||
const actorUserId = session?.user?.id
|
||||
if (!actorUserId) {
|
||||
logger.warn(`[${requestId}] Unable to resolve actor user for deployment activation: ${id}`)
|
||||
return createErrorResponse('Unable to determine activating user', 400)
|
||||
}
|
||||
|
||||
const versionNum = Number(version)
|
||||
if (!Number.isFinite(versionNum)) {
|
||||
return createErrorResponse('Invalid version number', 400)
|
||||
}
|
||||
|
||||
const [versionRow] = await db
|
||||
.select({
|
||||
id: workflowDeploymentVersion.id,
|
||||
state: workflowDeploymentVersion.state,
|
||||
})
|
||||
.from(workflowDeploymentVersion)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, id),
|
||||
eq(workflowDeploymentVersion.version, versionNum)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!versionRow?.state) {
|
||||
return createErrorResponse('Deployment version not found', 404)
|
||||
}
|
||||
|
||||
const [currentActiveVersion] = await db
|
||||
.select({ id: workflowDeploymentVersion.id })
|
||||
.from(workflowDeploymentVersion)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
const previousVersionId = currentActiveVersion?.id
|
||||
|
||||
const deployedState = versionRow.state as { blocks?: Record<string, BlockState> }
|
||||
const blocks = deployedState.blocks
|
||||
if (!blocks || typeof blocks !== 'object') {
|
||||
return createErrorResponse('Invalid deployed state structure', 500)
|
||||
}
|
||||
|
||||
const scheduleValidation = validateWorkflowSchedules(blocks)
|
||||
if (!scheduleValidation.isValid) {
|
||||
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
|
||||
}
|
||||
|
||||
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
|
||||
request,
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
userId: actorUserId,
|
||||
blocks,
|
||||
requestId,
|
||||
deploymentVersionId: versionRow.id,
|
||||
previousVersionId,
|
||||
forceRecreateSubscriptions: true,
|
||||
})
|
||||
|
||||
if (!triggerSaveResult.success) {
|
||||
return createErrorResponse(
|
||||
triggerSaveResult.error?.message || 'Failed to sync trigger configuration',
|
||||
triggerSaveResult.error?.status || 500
|
||||
)
|
||||
}
|
||||
|
||||
const scheduleResult = await createSchedulesForDeploy(id, blocks, db, versionRow.id)
|
||||
|
||||
if (!scheduleResult.success) {
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId: versionRow.id,
|
||||
})
|
||||
if (previousVersionId) {
|
||||
await restorePreviousVersionWebhooks({
|
||||
request,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
userId: actorUserId,
|
||||
previousVersionId,
|
||||
requestId,
|
||||
})
|
||||
}
|
||||
return createErrorResponse(scheduleResult.error || 'Failed to sync schedules', 500)
|
||||
}
|
||||
|
||||
const result = await activateWorkflowVersion({ workflowId: id, version: versionNum })
|
||||
if (!result.success) {
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId: versionRow.id,
|
||||
})
|
||||
if (previousVersionId) {
|
||||
await restorePreviousVersionWebhooks({
|
||||
request,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
userId: actorUserId,
|
||||
previousVersionId,
|
||||
requestId,
|
||||
})
|
||||
}
|
||||
return createErrorResponse(result.error || 'Failed to activate deployment', 400)
|
||||
}
|
||||
|
||||
if (previousVersionId && previousVersionId !== versionRow.id) {
|
||||
try {
|
||||
logger.info(
|
||||
`[${requestId}] Cleaning up previous version ${previousVersionId} webhooks/schedules`
|
||||
)
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId: previousVersionId,
|
||||
skipExternalCleanup: true,
|
||||
})
|
||||
logger.info(`[${requestId}] Previous version cleanup completed`)
|
||||
} catch (cleanupError) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to clean up previous version ${previousVersionId}`,
|
||||
cleanupError
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
await syncMcpToolsForWorkflow({
|
||||
workflowId: id,
|
||||
requestId,
|
||||
state: versionRow.state,
|
||||
context: 'activate',
|
||||
})
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
deployedAt: result.deployedAt,
|
||||
warnings: triggerSaveResult.warnings,
|
||||
})
|
||||
} catch (error: any) {
|
||||
logger.error(`[${requestId}] Error activating deployment for workflow: ${id}`, error)
|
||||
return createErrorResponse(error.message || 'Failed to activate deployment', 500)
|
||||
}
|
||||
}
|
||||
@@ -4,17 +4,8 @@ import { and, eq } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
|
||||
import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
|
||||
import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils'
|
||||
import {
|
||||
cleanupDeploymentVersion,
|
||||
createSchedulesForDeploy,
|
||||
validateWorkflowSchedules,
|
||||
} from '@/lib/workflows/schedules'
|
||||
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('WorkflowDeploymentVersionAPI')
|
||||
|
||||
@@ -32,14 +23,10 @@ const patchBodySchema = z
|
||||
.max(500, 'Description must be 500 characters or less')
|
||||
.nullable()
|
||||
.optional(),
|
||||
isActive: z.literal(true).optional(), // Set to true to activate this version
|
||||
})
|
||||
.refine(
|
||||
(data) => data.name !== undefined || data.description !== undefined || data.isActive === true,
|
||||
{
|
||||
message: 'At least one of name, description, or isActive must be provided',
|
||||
}
|
||||
)
|
||||
.refine((data) => data.name !== undefined || data.description !== undefined, {
|
||||
message: 'At least one of name or description must be provided',
|
||||
})
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
export const runtime = 'nodejs'
|
||||
@@ -95,22 +82,7 @@ export async function PATCH(
|
||||
const { id, version } = await params
|
||||
|
||||
try {
|
||||
const body = await request.json()
|
||||
const validation = patchBodySchema.safeParse(body)
|
||||
|
||||
if (!validation.success) {
|
||||
return createErrorResponse(validation.error.errors[0]?.message || 'Invalid request body', 400)
|
||||
}
|
||||
|
||||
const { name, description, isActive } = validation.data
|
||||
|
||||
// Activation requires admin permission, other updates require write
|
||||
const requiredPermission = isActive ? 'admin' : 'write'
|
||||
const {
|
||||
error,
|
||||
session,
|
||||
workflow: workflowData,
|
||||
} = await validateWorkflowPermissions(id, requestId, requiredPermission)
|
||||
const { error } = await validateWorkflowPermissions(id, requestId, 'write')
|
||||
if (error) {
|
||||
return createErrorResponse(error.message, error.status)
|
||||
}
|
||||
@@ -120,193 +92,15 @@ export async function PATCH(
|
||||
return createErrorResponse('Invalid version', 400)
|
||||
}
|
||||
|
||||
// Handle activation
|
||||
if (isActive) {
|
||||
const actorUserId = session?.user?.id
|
||||
if (!actorUserId) {
|
||||
logger.warn(`[${requestId}] Unable to resolve actor user for deployment activation: ${id}`)
|
||||
return createErrorResponse('Unable to determine activating user', 400)
|
||||
}
|
||||
const body = await request.json()
|
||||
const validation = patchBodySchema.safeParse(body)
|
||||
|
||||
const [versionRow] = await db
|
||||
.select({
|
||||
id: workflowDeploymentVersion.id,
|
||||
state: workflowDeploymentVersion.state,
|
||||
})
|
||||
.from(workflowDeploymentVersion)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, id),
|
||||
eq(workflowDeploymentVersion.version, versionNum)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
if (!versionRow?.state) {
|
||||
return createErrorResponse('Deployment version not found', 404)
|
||||
}
|
||||
|
||||
const [currentActiveVersion] = await db
|
||||
.select({ id: workflowDeploymentVersion.id })
|
||||
.from(workflowDeploymentVersion)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, id),
|
||||
eq(workflowDeploymentVersion.isActive, true)
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
|
||||
const previousVersionId = currentActiveVersion?.id
|
||||
|
||||
const deployedState = versionRow.state as { blocks?: Record<string, BlockState> }
|
||||
const blocks = deployedState.blocks
|
||||
if (!blocks || typeof blocks !== 'object') {
|
||||
return createErrorResponse('Invalid deployed state structure', 500)
|
||||
}
|
||||
|
||||
const scheduleValidation = validateWorkflowSchedules(blocks)
|
||||
if (!scheduleValidation.isValid) {
|
||||
return createErrorResponse(
|
||||
`Invalid schedule configuration: ${scheduleValidation.error}`,
|
||||
400
|
||||
)
|
||||
}
|
||||
|
||||
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
|
||||
request,
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
userId: actorUserId,
|
||||
blocks,
|
||||
requestId,
|
||||
deploymentVersionId: versionRow.id,
|
||||
previousVersionId,
|
||||
forceRecreateSubscriptions: true,
|
||||
})
|
||||
|
||||
if (!triggerSaveResult.success) {
|
||||
return createErrorResponse(
|
||||
triggerSaveResult.error?.message || 'Failed to sync trigger configuration',
|
||||
triggerSaveResult.error?.status || 500
|
||||
)
|
||||
}
|
||||
|
||||
const scheduleResult = await createSchedulesForDeploy(id, blocks, db, versionRow.id)
|
||||
|
||||
if (!scheduleResult.success) {
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId: versionRow.id,
|
||||
})
|
||||
if (previousVersionId) {
|
||||
await restorePreviousVersionWebhooks({
|
||||
request,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
userId: actorUserId,
|
||||
previousVersionId,
|
||||
requestId,
|
||||
})
|
||||
}
|
||||
return createErrorResponse(scheduleResult.error || 'Failed to sync schedules', 500)
|
||||
}
|
||||
|
||||
const result = await activateWorkflowVersion({ workflowId: id, version: versionNum })
|
||||
if (!result.success) {
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId: versionRow.id,
|
||||
})
|
||||
if (previousVersionId) {
|
||||
await restorePreviousVersionWebhooks({
|
||||
request,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
userId: actorUserId,
|
||||
previousVersionId,
|
||||
requestId,
|
||||
})
|
||||
}
|
||||
return createErrorResponse(result.error || 'Failed to activate deployment', 400)
|
||||
}
|
||||
|
||||
if (previousVersionId && previousVersionId !== versionRow.id) {
|
||||
try {
|
||||
logger.info(
|
||||
`[${requestId}] Cleaning up previous version ${previousVersionId} webhooks/schedules`
|
||||
)
|
||||
await cleanupDeploymentVersion({
|
||||
workflowId: id,
|
||||
workflow: workflowData as Record<string, unknown>,
|
||||
requestId,
|
||||
deploymentVersionId: previousVersionId,
|
||||
skipExternalCleanup: true,
|
||||
})
|
||||
logger.info(`[${requestId}] Previous version cleanup completed`)
|
||||
} catch (cleanupError) {
|
||||
logger.error(
|
||||
`[${requestId}] Failed to clean up previous version ${previousVersionId}`,
|
||||
cleanupError
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
await syncMcpToolsForWorkflow({
|
||||
workflowId: id,
|
||||
requestId,
|
||||
state: versionRow.state,
|
||||
context: 'activate',
|
||||
})
|
||||
|
||||
// Apply name/description updates if provided alongside activation
|
||||
let updatedName: string | null | undefined
|
||||
let updatedDescription: string | null | undefined
|
||||
if (name !== undefined || description !== undefined) {
|
||||
const activationUpdateData: { name?: string; description?: string | null } = {}
|
||||
if (name !== undefined) {
|
||||
activationUpdateData.name = name
|
||||
}
|
||||
if (description !== undefined) {
|
||||
activationUpdateData.description = description
|
||||
}
|
||||
|
||||
const [updated] = await db
|
||||
.update(workflowDeploymentVersion)
|
||||
.set(activationUpdateData)
|
||||
.where(
|
||||
and(
|
||||
eq(workflowDeploymentVersion.workflowId, id),
|
||||
eq(workflowDeploymentVersion.version, versionNum)
|
||||
)
|
||||
)
|
||||
.returning({
|
||||
name: workflowDeploymentVersion.name,
|
||||
description: workflowDeploymentVersion.description,
|
||||
})
|
||||
|
||||
if (updated) {
|
||||
updatedName = updated.name
|
||||
updatedDescription = updated.description
|
||||
logger.info(
|
||||
`[${requestId}] Updated deployment version ${version} metadata during activation`,
|
||||
{ name: activationUpdateData.name, description: activationUpdateData.description }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return createSuccessResponse({
|
||||
success: true,
|
||||
deployedAt: result.deployedAt,
|
||||
warnings: triggerSaveResult.warnings,
|
||||
...(updatedName !== undefined && { name: updatedName }),
|
||||
...(updatedDescription !== undefined && { description: updatedDescription }),
|
||||
})
|
||||
if (!validation.success) {
|
||||
return createErrorResponse(validation.error.errors[0]?.message || 'Invalid request body', 400)
|
||||
}
|
||||
|
||||
// Handle name/description updates
|
||||
const { name, description } = validation.data
|
||||
|
||||
const updateData: { name?: string; description?: string | null } = {}
|
||||
if (name !== undefined) {
|
||||
updateData.name = name
|
||||
|
||||
235
apps/sim/app/api/workflows/[id]/execute-from-block/route.ts
Normal file
235
apps/sim/app/api/workflows/[id]/execute-from-block/route.ts
Normal file
@@ -0,0 +1,235 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata, SerializableExecutionState } from '@/executor/execution/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
|
||||
const logger = createLogger('ExecuteFromBlockAPI')
|
||||
|
||||
const ExecuteFromBlockSchema = z.object({
|
||||
startBlockId: z.string().min(1, 'Start block ID is required'),
|
||||
sourceSnapshot: z.object({
|
||||
blockStates: z.record(z.any()),
|
||||
executedBlocks: z.array(z.string()),
|
||||
blockLogs: z.array(z.any()),
|
||||
decisions: z.object({
|
||||
router: z.record(z.string()),
|
||||
condition: z.record(z.string()),
|
||||
}),
|
||||
completedLoops: z.array(z.string()),
|
||||
loopExecutions: z.record(z.any()).optional(),
|
||||
parallelExecutions: z.record(z.any()).optional(),
|
||||
parallelBlockMapping: z.record(z.any()).optional(),
|
||||
activeExecutionPath: z.array(z.string()),
|
||||
}),
|
||||
input: z.any().optional(),
|
||||
})
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||
const requestId = generateRequestId()
|
||||
const { id: workflowId } = await params
|
||||
|
||||
try {
|
||||
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
const userId = auth.userId
|
||||
|
||||
let body: unknown
|
||||
try {
|
||||
body = await req.json()
|
||||
} catch {
|
||||
return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 })
|
||||
}
|
||||
|
||||
const validation = ExecuteFromBlockSchema.safeParse(body)
|
||||
if (!validation.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body:`, validation.error.errors)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid request body',
|
||||
details: validation.error.errors.map((e) => ({
|
||||
path: e.path.join('.'),
|
||||
message: e.message,
|
||||
})),
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { startBlockId, sourceSnapshot, input } = validation.data
|
||||
const executionId = uuidv4()
|
||||
|
||||
// Run preprocessing checks (billing, rate limits, usage limits)
|
||||
const preprocessResult = await preprocessExecution({
|
||||
workflowId,
|
||||
userId,
|
||||
triggerType: 'manual',
|
||||
executionId,
|
||||
requestId,
|
||||
checkRateLimit: false, // Manual executions don't rate limit
|
||||
checkDeployment: false, // Run-from-block doesn't require deployment
|
||||
})
|
||||
|
||||
if (!preprocessResult.success) {
|
||||
const { error } = preprocessResult
|
||||
logger.warn(`[${requestId}] Preprocessing failed for run-from-block`, {
|
||||
workflowId,
|
||||
error: error?.message,
|
||||
statusCode: error?.statusCode,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: error?.message || 'Execution blocked' },
|
||||
{ status: error?.statusCode || 500 }
|
||||
)
|
||||
}
|
||||
|
||||
const workflowRecord = preprocessResult.workflowRecord
|
||||
if (!workflowRecord?.workspaceId) {
|
||||
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
|
||||
}
|
||||
|
||||
const workspaceId = workflowRecord.workspaceId
|
||||
const workflowUserId = workflowRecord.userId
|
||||
|
||||
logger.info(`[${requestId}] Starting run-from-block execution`, {
|
||||
workflowId,
|
||||
startBlockId,
|
||||
executedBlocksCount: sourceSnapshot.executedBlocks.length,
|
||||
billingActorUserId: preprocessResult.actorUserId,
|
||||
})
|
||||
|
||||
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
|
||||
const abortController = new AbortController()
|
||||
let isStreamClosed = false
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
const { sendEvent, onBlockStart, onBlockComplete, onStream } = createSSECallbacks({
|
||||
executionId,
|
||||
workflowId,
|
||||
controller,
|
||||
isStreamClosed: () => isStreamClosed,
|
||||
setStreamClosed: () => {
|
||||
isStreamClosed = true
|
||||
},
|
||||
})
|
||||
|
||||
const metadata: ExecutionMetadata = {
|
||||
requestId,
|
||||
workflowId,
|
||||
userId,
|
||||
executionId,
|
||||
triggerType: 'manual',
|
||||
workspaceId,
|
||||
workflowUserId,
|
||||
useDraftState: true,
|
||||
isClientSession: true,
|
||||
startTime: new Date().toISOString(),
|
||||
}
|
||||
|
||||
const snapshot = new ExecutionSnapshot(metadata, {}, input || {}, {})
|
||||
|
||||
try {
|
||||
const startTime = new Date()
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:started',
|
||||
timestamp: startTime.toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { startTime: startTime.toISOString() },
|
||||
})
|
||||
|
||||
const result = await executeWorkflowCore({
|
||||
snapshot,
|
||||
loggingSession,
|
||||
abortSignal: abortController.signal,
|
||||
runFromBlock: {
|
||||
startBlockId,
|
||||
sourceSnapshot: sourceSnapshot as SerializableExecutionState,
|
||||
},
|
||||
callbacks: { onBlockStart, onBlockComplete, onStream },
|
||||
})
|
||||
|
||||
if (result.status === 'cancelled') {
|
||||
sendEvent({
|
||||
type: 'execution:cancelled',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { duration: result.metadata?.duration || 0 },
|
||||
})
|
||||
} else {
|
||||
sendEvent({
|
||||
type: 'execution:completed',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
success: result.success,
|
||||
output: result.output,
|
||||
duration: result.metadata?.duration || 0,
|
||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`)
|
||||
|
||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
error: executionResult?.error || errorMessage,
|
||||
duration: executionResult?.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (!isStreamClosed) {
|
||||
try {
|
||||
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'))
|
||||
controller.close()
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
},
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
abortController.abort()
|
||||
markExecutionCancelled(executionId).catch(() => {})
|
||||
},
|
||||
})
|
||||
|
||||
return new NextResponse(stream, {
|
||||
headers: { ...SSE_HEADERS, 'X-Execution-Id': executionId },
|
||||
})
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Failed to start run-from-block execution:`, error)
|
||||
return NextResponse.json(
|
||||
{ error: errorMessage || 'Failed to start execution' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,10 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { tasks } from '@trigger.dev/sdk'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { validate as uuidValidate, v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getJobQueue, shouldExecuteInline } from '@/lib/core/async-jobs'
|
||||
import {
|
||||
createTimeoutAbortController,
|
||||
getTimeoutErrorMessage,
|
||||
isTimeoutError,
|
||||
} from '@/lib/core/execution-limits'
|
||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
@@ -16,7 +12,6 @@ import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { processInputFileFields } from '@/lib/execution/files'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||
import {
|
||||
cleanupExecutionBase64Cache,
|
||||
hydrateUserFilesWithBase64,
|
||||
@@ -30,7 +25,7 @@ import {
|
||||
} from '@/lib/workflows/persistence/utils'
|
||||
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
|
||||
import { createHttpResponseFromBlock, workflowHasResponseBlock } from '@/lib/workflows/utils'
|
||||
import { executeWorkflowJob, type WorkflowExecutionPayload } from '@/background/workflow-execution'
|
||||
import type { WorkflowExecutionPayload } from '@/background/workflow-execution'
|
||||
import { normalizeName } from '@/executor/constants'
|
||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||
import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types'
|
||||
@@ -59,25 +54,6 @@ const ExecuteWorkflowSchema = z.object({
|
||||
})
|
||||
.optional(),
|
||||
stopAfterBlockId: z.string().optional(),
|
||||
runFromBlock: z
|
||||
.object({
|
||||
startBlockId: z.string().min(1, 'Start block ID is required'),
|
||||
sourceSnapshot: z.object({
|
||||
blockStates: z.record(z.any()),
|
||||
executedBlocks: z.array(z.string()),
|
||||
blockLogs: z.array(z.any()),
|
||||
decisions: z.object({
|
||||
router: z.record(z.string()),
|
||||
condition: z.record(z.string()),
|
||||
}),
|
||||
completedLoops: z.array(z.string()),
|
||||
loopExecutions: z.record(z.any()).optional(),
|
||||
parallelExecutions: z.record(z.any()).optional(),
|
||||
parallelBlockMapping: z.record(z.any()).optional(),
|
||||
activeExecutionPath: z.array(z.string()),
|
||||
}),
|
||||
})
|
||||
.optional(),
|
||||
})
|
||||
|
||||
export const runtime = 'nodejs'
|
||||
@@ -142,66 +118,45 @@ type AsyncExecutionParams = {
|
||||
userId: string
|
||||
input: any
|
||||
triggerType: CoreTriggerType
|
||||
executionId: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles async workflow execution by queueing a background job.
|
||||
* Returns immediately with a 202 Accepted response containing the job ID.
|
||||
*/
|
||||
async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextResponse> {
|
||||
const { requestId, workflowId, userId, input, triggerType, executionId } = params
|
||||
const { requestId, workflowId, userId, input, triggerType } = params
|
||||
|
||||
if (!isTriggerDevEnabled) {
|
||||
logger.warn(`[${requestId}] Async mode requested but TRIGGER_DEV_ENABLED is false`)
|
||||
return NextResponse.json(
|
||||
{ error: 'Async execution is not enabled. Set TRIGGER_DEV_ENABLED=true to use async mode.' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const payload: WorkflowExecutionPayload = {
|
||||
workflowId,
|
||||
userId,
|
||||
input,
|
||||
triggerType,
|
||||
executionId,
|
||||
}
|
||||
|
||||
try {
|
||||
const jobQueue = await getJobQueue()
|
||||
const jobId = await jobQueue.enqueue('workflow-execution', payload, {
|
||||
metadata: { workflowId, userId },
|
||||
})
|
||||
const handle = await tasks.trigger('workflow-execution', payload)
|
||||
|
||||
logger.info(`[${requestId}] Queued async workflow execution`, {
|
||||
workflowId,
|
||||
jobId,
|
||||
jobId: handle.id,
|
||||
})
|
||||
|
||||
if (shouldExecuteInline()) {
|
||||
void (async () => {
|
||||
try {
|
||||
await jobQueue.startJob(jobId)
|
||||
const output = await executeWorkflowJob(payload)
|
||||
await jobQueue.completeJob(jobId, output)
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||
logger.error(`[${requestId}] Async workflow execution failed`, {
|
||||
jobId,
|
||||
error: errorMessage,
|
||||
})
|
||||
try {
|
||||
await jobQueue.markJobFailed(jobId, errorMessage)
|
||||
} catch (markFailedError) {
|
||||
logger.error(`[${requestId}] Failed to mark job as failed`, {
|
||||
jobId,
|
||||
error:
|
||||
markFailedError instanceof Error
|
||||
? markFailedError.message
|
||||
: String(markFailedError),
|
||||
})
|
||||
}
|
||||
}
|
||||
})()
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: true,
|
||||
async: true,
|
||||
jobId,
|
||||
executionId,
|
||||
jobId: handle.id,
|
||||
message: 'Workflow execution queued',
|
||||
statusUrl: `${getBaseUrl()}/api/jobs/${jobId}`,
|
||||
statusUrl: `${getBaseUrl()}/api/jobs/${handle.id}`,
|
||||
},
|
||||
{ status: 202 }
|
||||
)
|
||||
@@ -269,7 +224,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
stopAfterBlockId,
|
||||
runFromBlock,
|
||||
} = validation.data
|
||||
|
||||
// For API key and internal JWT auth, the entire body is the input (except for our control fields)
|
||||
@@ -286,7 +240,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
base64MaxBytes,
|
||||
workflowStateOverride,
|
||||
stopAfterBlockId: _stopAfterBlockId,
|
||||
runFromBlock: _runFromBlock,
|
||||
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
||||
...rest
|
||||
} = body
|
||||
@@ -365,7 +318,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
userId: actorUserId,
|
||||
input,
|
||||
triggerType: loggingTriggerType,
|
||||
executionId,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -453,10 +405,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
if (!enableSSE) {
|
||||
logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`)
|
||||
const timeoutController = createTimeoutAbortController(
|
||||
preprocessResult.executionTimeout?.sync
|
||||
)
|
||||
|
||||
try {
|
||||
const metadata: ExecutionMetadata = {
|
||||
requestId,
|
||||
@@ -490,38 +438,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId,
|
||||
runFromBlock,
|
||||
abortSignal: timeoutController.signal,
|
||||
})
|
||||
|
||||
if (
|
||||
result.status === 'cancelled' &&
|
||||
timeoutController.isTimedOut() &&
|
||||
timeoutController.timeoutMs
|
||||
) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||
logger.info(`[${requestId}] Non-SSE execution timed out`, {
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
})
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
output: result.output,
|
||||
error: timeoutErrorMessage,
|
||||
metadata: result.metadata
|
||||
? {
|
||||
duration: result.metadata.duration,
|
||||
startTime: result.metadata.startTime,
|
||||
endTime: result.metadata.endTime,
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
{ status: 408 }
|
||||
)
|
||||
}
|
||||
|
||||
const outputWithBase64 = includeFileBase64
|
||||
? ((await hydrateUserFilesWithBase64(result.output, {
|
||||
requestId,
|
||||
@@ -532,6 +450,9 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
const resultWithBase64 = { ...result, output: outputWithBase64 }
|
||||
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
|
||||
const hasResponseBlock = workflowHasResponseBlock(resultWithBase64)
|
||||
if (hasResponseBlock) {
|
||||
return createHttpResponseFromBlock(resultWithBase64)
|
||||
@@ -539,7 +460,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
const filteredResult = {
|
||||
success: result.success,
|
||||
executionId,
|
||||
output: outputWithBase64,
|
||||
error: result.error,
|
||||
metadata: result.metadata
|
||||
@@ -554,17 +474,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
return NextResponse.json(filteredResult)
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
|
||||
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`)
|
||||
|
||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||
|
||||
await loggingSession.safeCompleteWithError({
|
||||
totalDurationMs: executionResult?.metadata?.duration,
|
||||
error: { message: errorMessage },
|
||||
traceSpans: executionResult?.logs as any,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
@@ -580,15 +493,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
} finally {
|
||||
timeoutController.cleanup()
|
||||
if (executionId) {
|
||||
try {
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Failed to cleanup base64 cache`, { error })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -602,6 +506,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
cachedWorkflowData?.blocks || {}
|
||||
)
|
||||
const streamVariables = cachedWorkflowData?.variables ?? (workflow as any).variables
|
||||
|
||||
const stream = await createStreamingResponse({
|
||||
requestId,
|
||||
workflow: {
|
||||
@@ -619,7 +524,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api',
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
timeoutMs: preprocessResult.executionTimeout?.sync,
|
||||
},
|
||||
executionId,
|
||||
})
|
||||
@@ -631,7 +535,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
|
||||
const encoder = new TextEncoder()
|
||||
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
||||
const abortController = new AbortController()
|
||||
let isStreamClosed = false
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
@@ -827,11 +731,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
onStream,
|
||||
},
|
||||
loggingSession,
|
||||
abortSignal: timeoutController.signal,
|
||||
abortSignal: abortController.signal,
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId,
|
||||
runFromBlock,
|
||||
})
|
||||
|
||||
if (result.status === 'paused') {
|
||||
@@ -864,37 +767,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
|
||||
if (result.status === 'cancelled') {
|
||||
if (timeoutController.isTimedOut() && timeoutController.timeoutMs) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||
logger.info(`[${requestId}] Workflow execution timed out`, {
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
})
|
||||
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
error: timeoutErrorMessage,
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:cancelled',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
}
|
||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||
sendEvent({
|
||||
type: 'execution:cancelled',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
@@ -917,26 +799,14 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||
},
|
||||
})
|
||||
} catch (error: unknown) {
|
||||
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
||||
const errorMessage = isTimeout
|
||||
? getTimeoutErrorMessage(error, timeoutController.timeoutMs)
|
||||
: error instanceof Error
|
||||
? error.message
|
||||
: 'Unknown error'
|
||||
|
||||
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`, { isTimeout })
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`)
|
||||
|
||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||
const { traceSpans, totalDuration } = executionResult
|
||||
? buildTraceSpans(executionResult)
|
||||
: { traceSpans: [], totalDuration: 0 }
|
||||
|
||||
await loggingSession.safeCompleteWithError({
|
||||
totalDurationMs: totalDuration || executionResult?.metadata?.duration,
|
||||
error: { message: errorMessage },
|
||||
traceSpans,
|
||||
})
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:error',
|
||||
@@ -949,23 +819,20 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
timeoutController.cleanup()
|
||||
if (executionId) {
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
}
|
||||
if (!isStreamClosed) {
|
||||
try {
|
||||
controller.enqueue(encoder.encode('data: [DONE]\n\n'))
|
||||
controller.close()
|
||||
} catch {}
|
||||
} catch {
|
||||
// Stream already closed - nothing to do
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
timeoutController.cleanup()
|
||||
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
||||
timeoutController.abort()
|
||||
abortController.abort()
|
||||
markExecutionCancelled(executionId).catch(() => {})
|
||||
},
|
||||
})
|
||||
|
||||
@@ -508,10 +508,8 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
|
||||
|
||||
setIsApproving(true)
|
||||
try {
|
||||
const response = await fetch(`/api/templates/${template.id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ status: 'approved' }),
|
||||
const response = await fetch(`/api/templates/${template.id}/approve`, {
|
||||
method: 'POST',
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
@@ -533,10 +531,8 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
|
||||
|
||||
setIsRejecting(true)
|
||||
try {
|
||||
const response = await fetch(`/api/templates/${template.id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ status: 'rejected' }),
|
||||
const response = await fetch(`/api/templates/${template.id}/reject`, {
|
||||
method: 'POST',
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
@@ -558,11 +554,10 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
|
||||
|
||||
setIsVerifying(true)
|
||||
try {
|
||||
const response = await fetch(`/api/creators/${template.creator.id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ verified: !template.creator.verified }),
|
||||
})
|
||||
const endpoint = `/api/creators/${template.creator.id}/verify`
|
||||
const method = template.creator.verified ? 'DELETE' : 'POST'
|
||||
|
||||
const response = await fetch(endpoint, { method })
|
||||
|
||||
if (response.ok) {
|
||||
// Refresh page to show updated verification status
|
||||
|
||||
@@ -192,10 +192,10 @@ export const ActionBar = memo(
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
{(() => {
|
||||
if (disabled) return getTooltipMessage('Run from block')
|
||||
if (disabled) return getTooltipMessage('Run')
|
||||
if (isExecuting) return 'Execution in progress'
|
||||
if (!dependenciesSatisfied) return 'Run previous blocks first'
|
||||
return 'Run from block'
|
||||
if (!dependenciesSatisfied) return 'Disabled: Run Blocks Before'
|
||||
return 'Run'
|
||||
})()}
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
|
||||
@@ -273,7 +273,7 @@ export function BlockMenu({
|
||||
}
|
||||
}}
|
||||
>
|
||||
Run from block
|
||||
Run
|
||||
</PopoverItem>
|
||||
{/* Hide "Run until" for triggers - they're always at the start */}
|
||||
{!hasTriggerBlock && (
|
||||
|
||||
@@ -807,7 +807,7 @@ export function Chat() {
|
||||
|
||||
const newReservedFields: StartInputFormatField[] = missingStartReservedFields.map(
|
||||
(fieldName) => {
|
||||
const defaultType = fieldName === 'files' ? 'file[]' : 'string'
|
||||
const defaultType = fieldName === 'files' ? 'files' : 'string'
|
||||
|
||||
return {
|
||||
id: crypto.randomUUID(),
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { memo, useCallback, useMemo } from 'react'
|
||||
import ReactMarkdown from 'react-markdown'
|
||||
import type { NodeProps } from 'reactflow'
|
||||
import remarkBreaks from 'remark-breaks'
|
||||
import remarkGfm from 'remark-gfm'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { BLOCK_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||
@@ -306,7 +305,7 @@ function getEmbedInfo(url: string): EmbedInfo | null {
|
||||
const NoteMarkdown = memo(function NoteMarkdown({ content }: { content: string }) {
|
||||
return (
|
||||
<ReactMarkdown
|
||||
remarkPlugins={[remarkGfm, remarkBreaks]}
|
||||
remarkPlugins={[remarkGfm]}
|
||||
components={{
|
||||
p: ({ children }: any) => (
|
||||
<p className='mb-1 break-words text-[var(--text-primary)] text-sm leading-[1.25rem] last:mb-0'>
|
||||
|
||||
@@ -179,7 +179,7 @@ export function A2aDeploy({
|
||||
newFields.push({
|
||||
id: crypto.randomUUID(),
|
||||
name: 'files',
|
||||
type: 'file[]',
|
||||
type: 'files',
|
||||
value: '',
|
||||
collapsed: false,
|
||||
})
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
Tooltip,
|
||||
} from '@/components/emcn'
|
||||
import { Skeleton } from '@/components/ui'
|
||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||
import { OutputSelect } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/chat/components/output-select/output-select'
|
||||
|
||||
interface WorkflowDeploymentInfo {
|
||||
@@ -77,6 +78,7 @@ export function ApiDeploy({
|
||||
async: false,
|
||||
})
|
||||
|
||||
const isAsyncEnabled = isTruthy(getEnv('NEXT_PUBLIC_TRIGGER_DEV_ENABLED'))
|
||||
const info = deploymentInfo ? { ...deploymentInfo, needsRedeployment } : null
|
||||
|
||||
const getBaseEndpoint = () => {
|
||||
@@ -270,7 +272,7 @@ response = requests.post(
|
||||
)
|
||||
|
||||
job = response.json()
|
||||
print(job) # Contains jobId and executionId`
|
||||
print(job) # Contains job_id for status checking`
|
||||
|
||||
case 'javascript':
|
||||
return `const response = await fetch("${endpoint}", {
|
||||
@@ -284,7 +286,7 @@ print(job) # Contains jobId and executionId`
|
||||
});
|
||||
|
||||
const job = await response.json();
|
||||
console.log(job); // Contains jobId and executionId`
|
||||
console.log(job); // Contains job_id for status checking`
|
||||
|
||||
case 'typescript':
|
||||
return `const response = await fetch("${endpoint}", {
|
||||
@@ -297,8 +299,8 @@ console.log(job); // Contains jobId and executionId`
|
||||
body: JSON.stringify(${JSON.stringify(payload)})
|
||||
});
|
||||
|
||||
const job: { jobId: string; executionId: string } = await response.json();
|
||||
console.log(job); // Contains jobId and executionId`
|
||||
const job: { job_id: string } = await response.json();
|
||||
console.log(job); // Contains job_id for status checking`
|
||||
|
||||
default:
|
||||
return ''
|
||||
@@ -537,49 +539,55 @@ console.log(limits);`
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Run workflow (async)
|
||||
</Label>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => handleCopy('async', getAsyncCommand())}
|
||||
aria-label='Copy command'
|
||||
className='!p-1.5 -my-1.5'
|
||||
>
|
||||
{copied.async ? <Check className='h-3 w-3' /> : <Clipboard className='h-3 w-3' />}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>{copied.async ? 'Copied' : 'Copy'}</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
<Combobox
|
||||
size='sm'
|
||||
className='!w-fit !py-[2px] min-w-[100px] rounded-[6px] px-[9px]'
|
||||
options={[
|
||||
{ label: 'Execute Job', value: 'execute' },
|
||||
{ label: 'Check Status', value: 'status' },
|
||||
{ label: 'Rate Limits', value: 'rate-limits' },
|
||||
]}
|
||||
value={asyncExampleType}
|
||||
onChange={(value) => setAsyncExampleType(value as AsyncExampleType)}
|
||||
align='end'
|
||||
dropdownWidth={160}
|
||||
/>
|
||||
{isAsyncEnabled && (
|
||||
<div>
|
||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||
Run workflow (async)
|
||||
</Label>
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => handleCopy('async', getAsyncCommand())}
|
||||
aria-label='Copy command'
|
||||
className='!p-1.5 -my-1.5'
|
||||
>
|
||||
{copied.async ? (
|
||||
<Check className='h-3 w-3' />
|
||||
) : (
|
||||
<Clipboard className='h-3 w-3' />
|
||||
)}
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content>
|
||||
<span>{copied.async ? 'Copied' : 'Copy'}</span>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
<Combobox
|
||||
size='sm'
|
||||
className='!w-fit !py-[2px] min-w-[100px] rounded-[6px] px-[9px]'
|
||||
options={[
|
||||
{ label: 'Execute Job', value: 'execute' },
|
||||
{ label: 'Check Status', value: 'status' },
|
||||
{ label: 'Rate Limits', value: 'rate-limits' },
|
||||
]}
|
||||
value={asyncExampleType}
|
||||
onChange={(value) => setAsyncExampleType(value as AsyncExampleType)}
|
||||
align='end'
|
||||
dropdownWidth={160}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={getAsyncCommand()}
|
||||
language={LANGUAGE_SYNTAX[language]}
|
||||
wrapText
|
||||
className='!min-h-0 rounded-[4px] border border-[var(--border-1)]'
|
||||
/>
|
||||
</div>
|
||||
<Code.Viewer
|
||||
code={getAsyncCommand()}
|
||||
language={LANGUAGE_SYNTAX[language]}
|
||||
wrapText
|
||||
className='!min-h-0 rounded-[4px] border border-[var(--border-1)]'
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -368,7 +368,6 @@ export function FileUpload({
|
||||
const uploadedFile: UploadedFile = {
|
||||
name: selectedFile.name,
|
||||
path: selectedFile.path,
|
||||
key: selectedFile.key,
|
||||
size: selectedFile.size,
|
||||
type: selectedFile.type,
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/
|
||||
interface Field {
|
||||
id: string
|
||||
name: string
|
||||
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'file[]'
|
||||
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
||||
value?: string
|
||||
description?: string
|
||||
collapsed?: boolean
|
||||
@@ -57,7 +57,7 @@ const TYPE_OPTIONS: ComboboxOption[] = [
|
||||
{ label: 'Boolean', value: 'boolean' },
|
||||
{ label: 'Object', value: 'object' },
|
||||
{ label: 'Array', value: 'array' },
|
||||
{ label: 'Files', value: 'file[]' },
|
||||
{ label: 'Files', value: 'files' },
|
||||
]
|
||||
|
||||
/**
|
||||
@@ -448,7 +448,7 @@ export function FieldFormat({
|
||||
)
|
||||
}
|
||||
|
||||
if (field.type === 'file[]') {
|
||||
if (field.type === 'files') {
|
||||
const lineCount = fieldValue.split('\n').length
|
||||
const gutterWidth = calculateGutterWidth(lineCount)
|
||||
|
||||
|
||||
@@ -225,7 +225,7 @@ const getOutputTypeForPath = (
|
||||
const chatModeTypes: Record<string, string> = {
|
||||
input: 'string',
|
||||
conversationId: 'string',
|
||||
files: 'file[]',
|
||||
files: 'files',
|
||||
}
|
||||
return chatModeTypes[outputPath] || 'any'
|
||||
}
|
||||
@@ -1568,11 +1568,16 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
blockTagGroups.sort((a, b) => a.distance - b.distance)
|
||||
finalBlockTagGroups.push(...blockTagGroups)
|
||||
|
||||
const groupTags = finalBlockTagGroups.flatMap((group) => group.tags)
|
||||
const tags = [...groupTags, ...variableTags]
|
||||
const contextualTags: string[] = []
|
||||
if (loopBlockGroup) {
|
||||
contextualTags.push(...loopBlockGroup.tags)
|
||||
}
|
||||
if (parallelBlockGroup) {
|
||||
contextualTags.push(...parallelBlockGroup.tags)
|
||||
}
|
||||
|
||||
return {
|
||||
tags,
|
||||
tags: [...allBlockTags, ...variableTags, ...contextualTags],
|
||||
variableInfoMap,
|
||||
blockTagGroups: finalBlockTagGroups,
|
||||
}
|
||||
@@ -1746,7 +1751,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
||||
mergedSubBlocks
|
||||
)
|
||||
|
||||
if (fieldType === 'file' || fieldType === 'file[]' || fieldType === 'array') {
|
||||
if (fieldType === 'files' || fieldType === 'file[]' || fieldType === 'array') {
|
||||
const blockName = parts[0]
|
||||
const remainingPath = parts.slice(2).join('.')
|
||||
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`
|
||||
|
||||
@@ -78,15 +78,21 @@ const IconComponent = ({ icon: Icon, className }: { icon: any; className?: strin
|
||||
* @returns Editor panel content
|
||||
*/
|
||||
export function Editor() {
|
||||
const { currentBlockId, connectionsHeight, toggleConnectionsCollapsed, registerRenameCallback } =
|
||||
usePanelEditorStore(
|
||||
useShallow((state) => ({
|
||||
currentBlockId: state.currentBlockId,
|
||||
connectionsHeight: state.connectionsHeight,
|
||||
toggleConnectionsCollapsed: state.toggleConnectionsCollapsed,
|
||||
registerRenameCallback: state.registerRenameCallback,
|
||||
}))
|
||||
)
|
||||
const {
|
||||
currentBlockId,
|
||||
connectionsHeight,
|
||||
toggleConnectionsCollapsed,
|
||||
shouldFocusRename,
|
||||
setShouldFocusRename,
|
||||
} = usePanelEditorStore(
|
||||
useShallow((state) => ({
|
||||
currentBlockId: state.currentBlockId,
|
||||
connectionsHeight: state.connectionsHeight,
|
||||
toggleConnectionsCollapsed: state.toggleConnectionsCollapsed,
|
||||
shouldFocusRename: state.shouldFocusRename,
|
||||
setShouldFocusRename: state.setShouldFocusRename,
|
||||
}))
|
||||
)
|
||||
const currentWorkflow = useCurrentWorkflow()
|
||||
const currentBlock = currentBlockId ? currentWorkflow.getBlockById(currentBlockId) : null
|
||||
const blockConfig = currentBlock ? getBlock(currentBlock.type) : null
|
||||
@@ -223,7 +229,6 @@ export function Editor() {
|
||||
|
||||
const [isRenaming, setIsRenaming] = useState(false)
|
||||
const [editedName, setEditedName] = useState('')
|
||||
const renamingBlockIdRef = useRef<string | null>(null)
|
||||
|
||||
/**
|
||||
* Ref callback that auto-selects the input text when mounted.
|
||||
@@ -235,62 +240,44 @@ export function Editor() {
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Starts the rename process for the current block.
|
||||
* Reads from stores directly to avoid stale closures when called via registered callback.
|
||||
* Captures the block ID in a ref to ensure the correct block is renamed even if selection changes.
|
||||
* Handles starting the rename process.
|
||||
*/
|
||||
const handleStartRename = useCallback(() => {
|
||||
const blockId = usePanelEditorStore.getState().currentBlockId
|
||||
if (!blockId) return
|
||||
|
||||
const blocks = useWorkflowStore.getState().blocks
|
||||
const block = blocks[blockId]
|
||||
if (!block) return
|
||||
|
||||
const parentId = block.data?.parentId as string | undefined
|
||||
const isParentLocked = parentId ? (blocks[parentId]?.locked ?? false) : false
|
||||
const isLocked = (block.locked ?? false) || isParentLocked
|
||||
if (!userPermissions.canEdit || isLocked) return
|
||||
|
||||
renamingBlockIdRef.current = blockId
|
||||
setEditedName(block.name || '')
|
||||
if (!canEditBlock || !currentBlock) return
|
||||
setEditedName(currentBlock.name || '')
|
||||
setIsRenaming(true)
|
||||
}, [userPermissions.canEdit])
|
||||
}, [canEditBlock, currentBlock])
|
||||
|
||||
/**
|
||||
* Saves the renamed block using the captured block ID from when rename started.
|
||||
* Handles saving the renamed block.
|
||||
*/
|
||||
const handleSaveRename = useCallback(() => {
|
||||
const blockIdToRename = renamingBlockIdRef.current
|
||||
if (!blockIdToRename || !isRenaming) return
|
||||
|
||||
const blocks = useWorkflowStore.getState().blocks
|
||||
const blockToRename = blocks[blockIdToRename]
|
||||
if (!currentBlockId || !isRenaming) return
|
||||
|
||||
const trimmedName = editedName.trim()
|
||||
if (trimmedName && blockToRename && trimmedName !== blockToRename.name) {
|
||||
const result = collaborativeUpdateBlockName(blockIdToRename, trimmedName)
|
||||
if (trimmedName && trimmedName !== currentBlock?.name) {
|
||||
const result = collaborativeUpdateBlockName(currentBlockId, trimmedName)
|
||||
if (!result.success) {
|
||||
return
|
||||
}
|
||||
}
|
||||
renamingBlockIdRef.current = null
|
||||
setIsRenaming(false)
|
||||
}, [isRenaming, editedName, collaborativeUpdateBlockName])
|
||||
}, [currentBlockId, isRenaming, editedName, currentBlock?.name, collaborativeUpdateBlockName])
|
||||
|
||||
/**
|
||||
* Handles canceling the rename process.
|
||||
*/
|
||||
const handleCancelRename = useCallback(() => {
|
||||
renamingBlockIdRef.current = null
|
||||
setIsRenaming(false)
|
||||
setEditedName('')
|
||||
}, [])
|
||||
|
||||
useEffect(() => {
|
||||
registerRenameCallback(handleStartRename)
|
||||
return () => registerRenameCallback(null)
|
||||
}, [registerRenameCallback, handleStartRename])
|
||||
if (shouldFocusRename && currentBlock) {
|
||||
handleStartRename()
|
||||
setShouldFocusRename(false)
|
||||
}
|
||||
}, [shouldFocusRename, currentBlock, handleStartRename, setShouldFocusRename])
|
||||
|
||||
/**
|
||||
* Handles opening documentation link in a new secure tab.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type React from 'react'
|
||||
import { AlertTriangleIcon, BanIcon, RepeatIcon, SplitIcon, XCircleIcon } from 'lucide-react'
|
||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
||||
import { getBlock } from '@/blocks'
|
||||
import { TERMINAL_BLOCK_COLUMN_WIDTH } from '@/stores/constants'
|
||||
import type { ConsoleEntry } from '@/stores/terminal'
|
||||
@@ -12,15 +12,6 @@ const SUBFLOW_COLORS = {
|
||||
parallel: '#FEE12B',
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Special block type colors for errors and system messages
|
||||
*/
|
||||
const SPECIAL_BLOCK_COLORS = {
|
||||
error: '#ef4444',
|
||||
validation: '#f59e0b',
|
||||
cancelled: '#6b7280',
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Retrieves the icon component for a given block type
|
||||
*/
|
||||
@@ -41,18 +32,6 @@ export function getBlockIcon(
|
||||
return SplitIcon
|
||||
}
|
||||
|
||||
if (blockType === 'error') {
|
||||
return XCircleIcon
|
||||
}
|
||||
|
||||
if (blockType === 'validation') {
|
||||
return AlertTriangleIcon
|
||||
}
|
||||
|
||||
if (blockType === 'cancelled') {
|
||||
return BanIcon
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -71,16 +50,6 @@ export function getBlockColor(blockType: string): string {
|
||||
if (blockType === 'parallel') {
|
||||
return SUBFLOW_COLORS.parallel
|
||||
}
|
||||
// Special block types for errors and system messages
|
||||
if (blockType === 'error') {
|
||||
return SPECIAL_BLOCK_COLORS.error
|
||||
}
|
||||
if (blockType === 'validation') {
|
||||
return SPECIAL_BLOCK_COLORS.validation
|
||||
}
|
||||
if (blockType === 'cancelled') {
|
||||
return SPECIAL_BLOCK_COLORS.cancelled
|
||||
}
|
||||
return '#6b7280'
|
||||
}
|
||||
|
||||
|
||||
@@ -188,7 +188,7 @@ export function useBlockOutputFields({
|
||||
baseOutputs = {
|
||||
input: { type: 'string', description: 'User message' },
|
||||
conversationId: { type: 'string', description: 'Conversation ID' },
|
||||
files: { type: 'file[]', description: 'Uploaded files' },
|
||||
files: { type: 'files', description: 'Uploaded files' },
|
||||
}
|
||||
} else {
|
||||
const inputFormatValue = mergedSubBlocks?.inputFormat?.value
|
||||
|
||||
@@ -4,11 +4,6 @@ import { useQueryClient } from '@tanstack/react-query'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||
import { processStreamingBlockLogs } from '@/lib/tokenization'
|
||||
import type {
|
||||
BlockCompletedData,
|
||||
BlockErrorData,
|
||||
BlockStartedData,
|
||||
} from '@/lib/workflows/executor/execution-events'
|
||||
import {
|
||||
extractTriggerMockPayload,
|
||||
selectBestTrigger,
|
||||
@@ -22,13 +17,7 @@ import {
|
||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
|
||||
import { getBlock } from '@/blocks'
|
||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
import type {
|
||||
BlockLog,
|
||||
BlockState,
|
||||
ExecutionResult,
|
||||
NormalizedBlockOutput,
|
||||
StreamingExecution,
|
||||
} from '@/executor/types'
|
||||
import type { BlockLog, BlockState, ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import { coerceValue } from '@/executor/utils/start-block'
|
||||
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
||||
@@ -38,7 +27,7 @@ import { useExecutionStore } from '@/stores/execution'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useVariablesStore } from '@/stores/panel'
|
||||
import { useEnvironmentStore } from '@/stores/settings/environment'
|
||||
import { useTerminalConsoleStore } from '@/stores/terminal'
|
||||
import { type ConsoleEntry, useTerminalConsoleStore } from '@/stores/terminal'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
@@ -52,19 +41,6 @@ interface DebugValidationResult {
|
||||
error?: string
|
||||
}
|
||||
|
||||
interface BlockEventHandlerConfig {
|
||||
workflowId?: string
|
||||
executionId?: string
|
||||
workflowEdges: Array<{ id: string; target: string }>
|
||||
activeBlocksSet: Set<string>
|
||||
accumulatedBlockLogs: BlockLog[]
|
||||
accumulatedBlockStates: Map<string, BlockState>
|
||||
executedBlockIds: Set<string>
|
||||
consoleMode: 'update' | 'add'
|
||||
includeStartConsoleEntry: boolean
|
||||
onBlockCompleteCallback?: (blockId: string, output: unknown) => Promise<void>
|
||||
}
|
||||
|
||||
const WORKFLOW_EXECUTION_FAILURE_MESSAGE = 'Workflow execution failed'
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
@@ -173,340 +149,6 @@ export function useWorkflowExecution() {
|
||||
setActiveBlocks,
|
||||
])
|
||||
|
||||
/**
|
||||
* Builds timing fields for execution-level console entries.
|
||||
*/
|
||||
const buildExecutionTiming = useCallback((durationMs?: number) => {
|
||||
const normalizedDuration = durationMs || 0
|
||||
return {
|
||||
durationMs: normalizedDuration,
|
||||
startedAt: new Date(Date.now() - normalizedDuration).toISOString(),
|
||||
endedAt: new Date().toISOString(),
|
||||
}
|
||||
}, [])
|
||||
|
||||
/**
|
||||
* Adds an execution-level error entry to the console when appropriate.
|
||||
*/
|
||||
const addExecutionErrorConsoleEntry = useCallback(
|
||||
(params: {
|
||||
workflowId?: string
|
||||
executionId?: string
|
||||
error?: string
|
||||
durationMs?: number
|
||||
blockLogs: BlockLog[]
|
||||
isPreExecutionError?: boolean
|
||||
}) => {
|
||||
if (!params.workflowId) return
|
||||
|
||||
const hasBlockError = params.blockLogs.some((log) => log.error)
|
||||
const isPreExecutionError = params.isPreExecutionError ?? false
|
||||
if (!isPreExecutionError && hasBlockError) {
|
||||
return
|
||||
}
|
||||
|
||||
const errorMessage = params.error || 'Execution failed'
|
||||
const isTimeout = errorMessage.toLowerCase().includes('timed out')
|
||||
const timing = buildExecutionTiming(params.durationMs)
|
||||
|
||||
addConsole({
|
||||
input: {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
durationMs: timing.durationMs,
|
||||
startedAt: timing.startedAt,
|
||||
executionOrder: isPreExecutionError ? 0 : Number.MAX_SAFE_INTEGER,
|
||||
endedAt: timing.endedAt,
|
||||
workflowId: params.workflowId,
|
||||
blockId: isPreExecutionError
|
||||
? 'validation'
|
||||
: isTimeout
|
||||
? 'timeout-error'
|
||||
: 'execution-error',
|
||||
executionId: params.executionId,
|
||||
blockName: isPreExecutionError
|
||||
? 'Workflow Validation'
|
||||
: isTimeout
|
||||
? 'Timeout Error'
|
||||
: 'Execution Error',
|
||||
blockType: isPreExecutionError ? 'validation' : 'error',
|
||||
})
|
||||
},
|
||||
[addConsole, buildExecutionTiming]
|
||||
)
|
||||
|
||||
/**
|
||||
* Adds an execution-level cancellation entry to the console.
|
||||
*/
|
||||
const addExecutionCancelledConsoleEntry = useCallback(
|
||||
(params: { workflowId?: string; executionId?: string; durationMs?: number }) => {
|
||||
if (!params.workflowId) return
|
||||
|
||||
const timing = buildExecutionTiming(params.durationMs)
|
||||
addConsole({
|
||||
input: {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: 'Execution was cancelled',
|
||||
durationMs: timing.durationMs,
|
||||
startedAt: timing.startedAt,
|
||||
executionOrder: Number.MAX_SAFE_INTEGER,
|
||||
endedAt: timing.endedAt,
|
||||
workflowId: params.workflowId,
|
||||
blockId: 'cancelled',
|
||||
executionId: params.executionId,
|
||||
blockName: 'Execution Cancelled',
|
||||
blockType: 'cancelled',
|
||||
})
|
||||
},
|
||||
[addConsole, buildExecutionTiming]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles workflow-level execution errors for console output.
|
||||
*/
|
||||
const handleExecutionErrorConsole = useCallback(
|
||||
(params: {
|
||||
workflowId?: string
|
||||
executionId?: string
|
||||
error?: string
|
||||
durationMs?: number
|
||||
blockLogs: BlockLog[]
|
||||
isPreExecutionError?: boolean
|
||||
}) => {
|
||||
if (params.workflowId) {
|
||||
cancelRunningEntries(params.workflowId)
|
||||
}
|
||||
addExecutionErrorConsoleEntry(params)
|
||||
},
|
||||
[addExecutionErrorConsoleEntry, cancelRunningEntries]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handles workflow-level execution cancellations for console output.
|
||||
*/
|
||||
const handleExecutionCancelledConsole = useCallback(
|
||||
(params: { workflowId?: string; executionId?: string; durationMs?: number }) => {
|
||||
if (params.workflowId) {
|
||||
cancelRunningEntries(params.workflowId)
|
||||
}
|
||||
addExecutionCancelledConsoleEntry(params)
|
||||
},
|
||||
[addExecutionCancelledConsoleEntry, cancelRunningEntries]
|
||||
)
|
||||
|
||||
const buildBlockEventHandlers = useCallback(
|
||||
(config: BlockEventHandlerConfig) => {
|
||||
const {
|
||||
workflowId,
|
||||
executionId,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
accumulatedBlockStates,
|
||||
executedBlockIds,
|
||||
consoleMode,
|
||||
includeStartConsoleEntry,
|
||||
onBlockCompleteCallback,
|
||||
} = config
|
||||
|
||||
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
||||
if (isActive) {
|
||||
activeBlocksSet.add(blockId)
|
||||
} else {
|
||||
activeBlocksSet.delete(blockId)
|
||||
}
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
}
|
||||
|
||||
const markIncomingEdges = (blockId: string) => {
|
||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === blockId)
|
||||
incomingEdges.forEach((edge) => {
|
||||
setEdgeRunStatus(edge.id, 'success')
|
||||
})
|
||||
}
|
||||
|
||||
const isContainerBlockType = (blockType?: string) => {
|
||||
return blockType === 'loop' || blockType === 'parallel'
|
||||
}
|
||||
|
||||
const createBlockLogEntry = (
|
||||
data: BlockCompletedData | BlockErrorData,
|
||||
options: { success: boolean; output?: unknown; error?: string }
|
||||
): BlockLog => ({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: options.output ?? {},
|
||||
success: options.success,
|
||||
error: options.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt: data.startedAt,
|
||||
executionOrder: data.executionOrder,
|
||||
endedAt: data.endedAt,
|
||||
})
|
||||
|
||||
const addConsoleEntry = (data: BlockCompletedData, output: NormalizedBlockOutput) => {
|
||||
if (!workflowId) return
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt: data.startedAt,
|
||||
executionOrder: data.executionOrder,
|
||||
endedAt: data.endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
}
|
||||
|
||||
const addConsoleErrorEntry = (data: BlockErrorData) => {
|
||||
if (!workflowId) return
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt: data.startedAt,
|
||||
executionOrder: data.executionOrder,
|
||||
endedAt: data.endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
}
|
||||
|
||||
const updateConsoleEntry = (data: BlockCompletedData) => {
|
||||
updateConsole(
|
||||
data.blockId,
|
||||
{
|
||||
input: data.input || {},
|
||||
replaceOutput: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt: data.startedAt,
|
||||
endedAt: data.endedAt,
|
||||
isRunning: false,
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
},
|
||||
executionId
|
||||
)
|
||||
}
|
||||
|
||||
const updateConsoleErrorEntry = (data: BlockErrorData) => {
|
||||
updateConsole(
|
||||
data.blockId,
|
||||
{
|
||||
input: data.input || {},
|
||||
replaceOutput: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt: data.startedAt,
|
||||
endedAt: data.endedAt,
|
||||
isRunning: false,
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
},
|
||||
executionId
|
||||
)
|
||||
}
|
||||
|
||||
const onBlockStarted = (data: BlockStartedData) => {
|
||||
updateActiveBlocks(data.blockId, true)
|
||||
markIncomingEdges(data.blockId)
|
||||
|
||||
if (!includeStartConsoleEntry || !workflowId) return
|
||||
|
||||
const startedAt = new Date().toISOString()
|
||||
addConsole({
|
||||
input: {},
|
||||
output: undefined,
|
||||
success: undefined,
|
||||
durationMs: undefined,
|
||||
startedAt,
|
||||
executionOrder: data.executionOrder,
|
||||
endedAt: undefined,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
isRunning: true,
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
}
|
||||
|
||||
const onBlockCompleted = (data: BlockCompletedData) => {
|
||||
updateActiveBlocks(data.blockId, false)
|
||||
setBlockRunStatus(data.blockId, 'success')
|
||||
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
output: data.output,
|
||||
executed: true,
|
||||
executionTime: data.durationMs,
|
||||
})
|
||||
|
||||
if (isContainerBlockType(data.blockType)) {
|
||||
return
|
||||
}
|
||||
|
||||
accumulatedBlockLogs.push(createBlockLogEntry(data, { success: true, output: data.output }))
|
||||
|
||||
if (consoleMode === 'update') {
|
||||
updateConsoleEntry(data)
|
||||
} else {
|
||||
addConsoleEntry(data, data.output as NormalizedBlockOutput)
|
||||
}
|
||||
|
||||
if (onBlockCompleteCallback) {
|
||||
onBlockCompleteCallback(data.blockId, data.output).catch((error) => {
|
||||
logger.error('Error in onBlockComplete callback:', error)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const onBlockError = (data: BlockErrorData) => {
|
||||
updateActiveBlocks(data.blockId, false)
|
||||
setBlockRunStatus(data.blockId, 'error')
|
||||
|
||||
accumulatedBlockLogs.push(
|
||||
createBlockLogEntry(data, { success: false, output: {}, error: data.error })
|
||||
)
|
||||
|
||||
if (consoleMode === 'update') {
|
||||
updateConsoleErrorEntry(data)
|
||||
} else {
|
||||
addConsoleErrorEntry(data)
|
||||
}
|
||||
}
|
||||
|
||||
return { onBlockStarted, onBlockCompleted, onBlockError }
|
||||
},
|
||||
[addConsole, setActiveBlocks, setBlockRunStatus, setEdgeRunStatus, updateConsole]
|
||||
)
|
||||
|
||||
/**
|
||||
* Checks if debug session is complete based on execution result
|
||||
*/
|
||||
@@ -1147,12 +789,7 @@ export function useWorkflowExecution() {
|
||||
const startBlock = TriggerUtils.findStartBlock(filteredStates, 'chat')
|
||||
|
||||
if (!startBlock) {
|
||||
throw new WorkflowValidationError(
|
||||
TriggerUtils.getTriggerValidationMessage('chat', 'missing'),
|
||||
'validation',
|
||||
'validation',
|
||||
'Workflow Validation'
|
||||
)
|
||||
throw new Error(TriggerUtils.getTriggerValidationMessage('chat', 'missing'))
|
||||
}
|
||||
|
||||
startBlockId = startBlock.blockId
|
||||
@@ -1163,12 +800,7 @@ export function useWorkflowExecution() {
|
||||
})
|
||||
|
||||
if (candidates.length === 0) {
|
||||
const error = new WorkflowValidationError(
|
||||
'Workflow requires at least one trigger block to execute',
|
||||
'validation',
|
||||
'validation',
|
||||
'Workflow Validation'
|
||||
)
|
||||
const error = new Error('Workflow requires at least one trigger block to execute')
|
||||
logger.error('No trigger blocks found for manual run', {
|
||||
allBlockTypes: Object.values(filteredStates).map((b) => b.type),
|
||||
})
|
||||
@@ -1181,12 +813,7 @@ export function useWorkflowExecution() {
|
||||
(candidate) => candidate.path === StartBlockPath.SPLIT_API
|
||||
)
|
||||
if (apiCandidates.length > 1) {
|
||||
const error = new WorkflowValidationError(
|
||||
'Multiple API Trigger blocks found. Keep only one.',
|
||||
'validation',
|
||||
'validation',
|
||||
'Workflow Validation'
|
||||
)
|
||||
const error = new Error('Multiple API Trigger blocks found. Keep only one.')
|
||||
logger.error('Multiple API triggers found')
|
||||
setIsExecuting(false)
|
||||
throw error
|
||||
@@ -1206,12 +833,7 @@ export function useWorkflowExecution() {
|
||||
const outgoingConnections = workflowEdges.filter((edge) => edge.source === startBlockId)
|
||||
if (outgoingConnections.length === 0) {
|
||||
const triggerName = selectedTrigger.name || selectedTrigger.type
|
||||
const error = new WorkflowValidationError(
|
||||
`${triggerName} must be connected to other blocks to execute`,
|
||||
'validation',
|
||||
'validation',
|
||||
'Workflow Validation'
|
||||
)
|
||||
const error = new Error(`${triggerName} must be connected to other blocks to execute`)
|
||||
logger.error('Trigger has no outgoing connections', { triggerName, startBlockId })
|
||||
setIsExecuting(false)
|
||||
throw error
|
||||
@@ -1237,12 +859,7 @@ export function useWorkflowExecution() {
|
||||
|
||||
// If we don't have a valid startBlockId at this point, throw an error
|
||||
if (!startBlockId) {
|
||||
const error = new WorkflowValidationError(
|
||||
'No valid trigger block found to start execution',
|
||||
'validation',
|
||||
'validation',
|
||||
'Workflow Validation'
|
||||
)
|
||||
const error = new Error('No valid trigger block found to start execution')
|
||||
logger.error('No startBlockId found after trigger search')
|
||||
setIsExecuting(false)
|
||||
throw error
|
||||
@@ -1275,19 +892,6 @@ export function useWorkflowExecution() {
|
||||
|
||||
// Execute the workflow
|
||||
try {
|
||||
const blockHandlers = buildBlockEventHandlers({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
accumulatedBlockStates,
|
||||
executedBlockIds,
|
||||
consoleMode: 'update',
|
||||
includeStartConsoleEntry: true,
|
||||
onBlockCompleteCallback: onBlockComplete,
|
||||
})
|
||||
|
||||
await executionStream.execute({
|
||||
workflowId: activeWorkflowId,
|
||||
input: finalWorkflowInput,
|
||||
@@ -1310,9 +914,145 @@ export function useWorkflowExecution() {
|
||||
logger.info('Server execution started:', data)
|
||||
},
|
||||
|
||||
onBlockStarted: blockHandlers.onBlockStarted,
|
||||
onBlockCompleted: blockHandlers.onBlockCompleted,
|
||||
onBlockError: blockHandlers.onBlockError,
|
||||
onBlockStarted: (data) => {
|
||||
activeBlocksSet.add(data.blockId)
|
||||
// Create a new Set to trigger React re-render
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track edges that led to this block as soon as execution starts
|
||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === data.blockId)
|
||||
incomingEdges.forEach((edge) => {
|
||||
setEdgeRunStatus(edge.id, 'success')
|
||||
})
|
||||
|
||||
// Add entry to terminal immediately with isRunning=true
|
||||
// Use server-provided executionOrder to ensure correct sort order
|
||||
const startedAt = new Date().toISOString()
|
||||
addConsole({
|
||||
input: {},
|
||||
output: undefined,
|
||||
success: undefined,
|
||||
durationMs: undefined,
|
||||
startedAt,
|
||||
executionOrder: data.executionOrder,
|
||||
endedAt: undefined,
|
||||
workflowId: activeWorkflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
isRunning: true,
|
||||
// Pass through iteration context for subflow grouping
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onBlockCompleted: (data) => {
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
setBlockRunStatus(data.blockId, 'success')
|
||||
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
output: data.output,
|
||||
executed: true,
|
||||
executionTime: data.durationMs,
|
||||
})
|
||||
|
||||
const isContainerBlock = data.blockType === 'loop' || data.blockType === 'parallel'
|
||||
if (isContainerBlock) return
|
||||
|
||||
const startedAt = data.startedAt
|
||||
const endedAt = data.endedAt
|
||||
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
executionOrder: data.executionOrder,
|
||||
endedAt,
|
||||
})
|
||||
|
||||
// Update existing console entry (created in onBlockStarted) with completion data
|
||||
updateConsole(
|
||||
data.blockId,
|
||||
{
|
||||
input: data.input || {},
|
||||
replaceOutput: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
isRunning: false,
|
||||
// Pass through iteration context for subflow grouping
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
},
|
||||
executionId
|
||||
)
|
||||
|
||||
// Call onBlockComplete callback if provided
|
||||
if (onBlockComplete) {
|
||||
onBlockComplete(data.blockId, data.output).catch((error) => {
|
||||
logger.error('Error in onBlockComplete callback:', error)
|
||||
})
|
||||
}
|
||||
},
|
||||
|
||||
onBlockError: (data) => {
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
// Create a new Set to trigger React re-render
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track failed block execution in run path
|
||||
setBlockRunStatus(data.blockId, 'error')
|
||||
|
||||
const startedAt = data.startedAt
|
||||
const endedAt = data.endedAt
|
||||
|
||||
// Accumulate block error log for the execution result
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
executionOrder: data.executionOrder,
|
||||
endedAt,
|
||||
})
|
||||
|
||||
// Update existing console entry (created in onBlockStarted) with error data
|
||||
updateConsole(
|
||||
data.blockId,
|
||||
{
|
||||
input: data.input || {},
|
||||
replaceOutput: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
isRunning: false,
|
||||
// Pass through iteration context for subflow grouping
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
},
|
||||
executionId
|
||||
)
|
||||
},
|
||||
|
||||
onStreamChunk: (data) => {
|
||||
const existing = streamedContent.get(data.blockId) || ''
|
||||
@@ -1417,23 +1157,33 @@ export function useWorkflowExecution() {
|
||||
logs: accumulatedBlockLogs,
|
||||
}
|
||||
|
||||
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
||||
handleExecutionErrorConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId,
|
||||
error: data.error,
|
||||
durationMs: data.duration,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
isPreExecutionError,
|
||||
})
|
||||
},
|
||||
// Only add workflow-level error if no blocks have executed yet
|
||||
// This catches pre-execution errors (validation, serialization, etc.)
|
||||
// Block execution errors are already logged via onBlockError callback
|
||||
const { entries } = useTerminalConsoleStore.getState()
|
||||
const existingLogs = entries.filter(
|
||||
(log: ConsoleEntry) => log.executionId === executionId
|
||||
)
|
||||
|
||||
onExecutionCancelled: (data) => {
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId: activeWorkflowId,
|
||||
executionId,
|
||||
durationMs: data?.duration,
|
||||
})
|
||||
if (existingLogs.length === 0) {
|
||||
// No blocks executed yet - this is a pre-execution error
|
||||
// Use 0 for executionOrder so validation errors appear first
|
||||
addConsole({
|
||||
input: {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.duration || 0,
|
||||
startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(),
|
||||
executionOrder: 0,
|
||||
endedAt: new Date().toISOString(),
|
||||
workflowId: activeWorkflowId,
|
||||
blockId: 'validation',
|
||||
executionId,
|
||||
blockName: 'Workflow Validation',
|
||||
blockType: 'validation',
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -1829,27 +1579,115 @@ export function useWorkflowExecution() {
|
||||
const activeBlocksSet = new Set<string>()
|
||||
|
||||
try {
|
||||
const blockHandlers = buildBlockEventHandlers({
|
||||
workflowId,
|
||||
executionId,
|
||||
workflowEdges,
|
||||
activeBlocksSet,
|
||||
accumulatedBlockLogs,
|
||||
accumulatedBlockStates,
|
||||
executedBlockIds,
|
||||
consoleMode: 'add',
|
||||
includeStartConsoleEntry: false,
|
||||
})
|
||||
|
||||
await executionStream.executeFromBlock({
|
||||
workflowId,
|
||||
startBlockId: blockId,
|
||||
sourceSnapshot: effectiveSnapshot,
|
||||
input: workflowInput,
|
||||
callbacks: {
|
||||
onBlockStarted: blockHandlers.onBlockStarted,
|
||||
onBlockCompleted: blockHandlers.onBlockCompleted,
|
||||
onBlockError: blockHandlers.onBlockError,
|
||||
onBlockStarted: (data) => {
|
||||
activeBlocksSet.add(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === data.blockId)
|
||||
incomingEdges.forEach((edge) => {
|
||||
setEdgeRunStatus(edge.id, 'success')
|
||||
})
|
||||
},
|
||||
|
||||
onBlockCompleted: (data) => {
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(data.blockId, 'success')
|
||||
|
||||
executedBlockIds.add(data.blockId)
|
||||
accumulatedBlockStates.set(data.blockId, {
|
||||
output: data.output,
|
||||
executed: true,
|
||||
executionTime: data.durationMs,
|
||||
})
|
||||
|
||||
const isContainerBlock = data.blockType === 'loop' || data.blockType === 'parallel'
|
||||
if (isContainerBlock) return
|
||||
|
||||
const startedAt = data.startedAt
|
||||
const endedAt = data.endedAt
|
||||
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
executionOrder: data.executionOrder,
|
||||
endedAt,
|
||||
})
|
||||
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: data.output,
|
||||
success: true,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
executionOrder: data.executionOrder,
|
||||
endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onBlockError: (data) => {
|
||||
activeBlocksSet.delete(data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
setBlockRunStatus(data.blockId, 'error')
|
||||
|
||||
const startedAt = data.startedAt
|
||||
const endedAt = data.endedAt
|
||||
|
||||
accumulatedBlockLogs.push({
|
||||
blockId: data.blockId,
|
||||
blockName: data.blockName || 'Unknown Block',
|
||||
blockType: data.blockType || 'unknown',
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
executionOrder: data.executionOrder,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
endedAt,
|
||||
})
|
||||
|
||||
addConsole({
|
||||
input: data.input || {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.durationMs,
|
||||
startedAt,
|
||||
executionOrder: data.executionOrder,
|
||||
endedAt,
|
||||
workflowId,
|
||||
blockId: data.blockId,
|
||||
executionId,
|
||||
blockName: data.blockName,
|
||||
blockType: data.blockType,
|
||||
iterationCurrent: data.iterationCurrent,
|
||||
iterationTotal: data.iterationTotal,
|
||||
iterationType: data.iterationType,
|
||||
})
|
||||
},
|
||||
|
||||
onExecutionCompleted: (data) => {
|
||||
if (data.success) {
|
||||
@@ -1892,23 +1730,13 @@ export function useWorkflowExecution() {
|
||||
'Workflow was modified. Run the workflow again to enable running from block.',
|
||||
workflowId,
|
||||
})
|
||||
} else {
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message: data.error || 'Run from block failed',
|
||||
workflowId,
|
||||
})
|
||||
}
|
||||
|
||||
handleExecutionErrorConsole({
|
||||
workflowId,
|
||||
executionId,
|
||||
error: data.error,
|
||||
durationMs: data.duration,
|
||||
blockLogs: accumulatedBlockLogs,
|
||||
})
|
||||
},
|
||||
|
||||
onExecutionCancelled: (data) => {
|
||||
handleExecutionCancelledConsole({
|
||||
workflowId,
|
||||
executionId,
|
||||
durationMs: data?.duration,
|
||||
})
|
||||
},
|
||||
},
|
||||
})
|
||||
@@ -1930,9 +1758,7 @@ export function useWorkflowExecution() {
|
||||
setBlockRunStatus,
|
||||
setEdgeRunStatus,
|
||||
addNotification,
|
||||
buildBlockEventHandlers,
|
||||
handleExecutionErrorConsole,
|
||||
handleExecutionCancelledConsole,
|
||||
addConsole,
|
||||
executionStream,
|
||||
]
|
||||
)
|
||||
|
||||
@@ -1132,7 +1132,7 @@ const WorkflowContent = React.memo(() => {
|
||||
const handleContextRename = useCallback(() => {
|
||||
if (contextMenuBlocks.length === 1) {
|
||||
usePanelEditorStore.getState().setCurrentBlockId(contextMenuBlocks[0].id)
|
||||
usePanelEditorStore.getState().triggerRename()
|
||||
usePanelEditorStore.getState().setShouldFocusRename(true)
|
||||
}
|
||||
}, [contextMenuBlocks])
|
||||
|
||||
@@ -1173,7 +1173,7 @@ const WorkflowContent = React.memo(() => {
|
||||
block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
|
||||
|
||||
if (isInsideSubflow) return { canRun: false, reason: 'Cannot run from inside subflow' }
|
||||
if (!dependenciesSatisfied) return { canRun: false, reason: 'Run previous blocks first' }
|
||||
if (!dependenciesSatisfied) return { canRun: false, reason: 'Disabled: Run Blocks Before' }
|
||||
if (isNoteBlock) return { canRun: false, reason: undefined }
|
||||
if (isExecuting) return { canRun: false, reason: undefined }
|
||||
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import {
|
||||
Building2,
|
||||
Clock,
|
||||
Database,
|
||||
HardDrive,
|
||||
HeadphonesIcon,
|
||||
Server,
|
||||
ShieldCheck,
|
||||
Timer,
|
||||
Users,
|
||||
Zap,
|
||||
} from 'lucide-react'
|
||||
@@ -15,8 +15,8 @@ import type { PlanFeature } from '@/app/workspace/[workspaceId]/w/components/sid
|
||||
export const PRO_PLAN_FEATURES: PlanFeature[] = [
|
||||
{ icon: Zap, text: '150 runs per minute (sync)' },
|
||||
{ icon: Clock, text: '1,000 runs per minute (async)' },
|
||||
{ icon: Timer, text: '50 min sync execution limit' },
|
||||
{ icon: HardDrive, text: '50GB file storage' },
|
||||
{ icon: Building2, text: 'Unlimited workspaces' },
|
||||
{ icon: Users, text: 'Unlimited invites' },
|
||||
{ icon: Database, text: 'Unlimited log retention' },
|
||||
]
|
||||
@@ -24,8 +24,8 @@ export const PRO_PLAN_FEATURES: PlanFeature[] = [
|
||||
export const TEAM_PLAN_FEATURES: PlanFeature[] = [
|
||||
{ icon: Zap, text: '300 runs per minute (sync)' },
|
||||
{ icon: Clock, text: '2,500 runs per minute (async)' },
|
||||
{ icon: Timer, text: '50 min sync execution limit' },
|
||||
{ icon: HardDrive, text: '500GB file storage (pooled)' },
|
||||
{ icon: Building2, text: 'Unlimited workspaces' },
|
||||
{ icon: Users, text: 'Unlimited invites' },
|
||||
{ icon: Database, text: 'Unlimited log retention' },
|
||||
{ icon: SlackMonoIcon, text: 'Dedicated Slack channel' },
|
||||
|
||||
@@ -52,8 +52,9 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
const workflowName = extractWorkflowName(content, filename)
|
||||
clearDiff()
|
||||
|
||||
const parsedContent = JSON.parse(content)
|
||||
const workflowColor =
|
||||
(workflowData.metadata as { color?: string } | undefined)?.color || '#3972F6'
|
||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
||||
|
||||
const result = await createWorkflowMutation.mutateAsync({
|
||||
name: workflowName,
|
||||
@@ -61,20 +62,23 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
workspaceId,
|
||||
folderId: folderId || undefined,
|
||||
sortOrder,
|
||||
color: workflowColor,
|
||||
})
|
||||
const newWorkflowId = result.id
|
||||
|
||||
const stateResponse = await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
if (workflowColor !== '#3972F6') {
|
||||
await fetch(`/api/workflows/${newWorkflowId}`, {
|
||||
method: 'PATCH',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ color: workflowColor }),
|
||||
})
|
||||
}
|
||||
|
||||
await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(workflowData),
|
||||
})
|
||||
|
||||
if (!stateResponse.ok) {
|
||||
logger.error(`Failed to save workflow state for ${newWorkflowId}`)
|
||||
}
|
||||
|
||||
if (workflowData.variables) {
|
||||
const variablesArray = Array.isArray(workflowData.variables)
|
||||
? workflowData.variables
|
||||
@@ -97,15 +101,11 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
||||
}
|
||||
}
|
||||
|
||||
const variablesResponse = await fetch(`/api/workflows/${newWorkflowId}/variables`, {
|
||||
await fetch(`/api/workflows/${newWorkflowId}/variables`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ variables: variablesRecord }),
|
||||
})
|
||||
|
||||
if (!variablesResponse.ok) {
|
||||
logger.error(`Failed to save variables for ${newWorkflowId}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -160,8 +160,9 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
const workflowName = extractWorkflowName(workflow.content, workflow.name)
|
||||
clearDiff()
|
||||
|
||||
const parsedContent = JSON.parse(workflow.content)
|
||||
const workflowColor =
|
||||
(workflowData.metadata as { color?: string } | undefined)?.color || '#3972F6'
|
||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
||||
|
||||
const createWorkflowResponse = await fetch('/api/workflows', {
|
||||
method: 'POST',
|
||||
@@ -215,18 +216,11 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
||||
}
|
||||
}
|
||||
|
||||
const variablesResponse = await fetch(
|
||||
`/api/workflows/${newWorkflow.id}/variables`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ variables: variablesRecord }),
|
||||
}
|
||||
)
|
||||
|
||||
if (!variablesResponse.ok) {
|
||||
logger.error(`Failed to save variables for ${newWorkflow.id}`)
|
||||
}
|
||||
await fetch(`/api/workflows/${newWorkflow.id}/variables`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ variables: variablesRecord }),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@ import { createLogger } from '@sim/logger'
|
||||
import { useParams } from 'next/navigation'
|
||||
import { io, type Socket } from 'socket.io-client'
|
||||
import { getEnv } from '@/lib/core/config/env'
|
||||
import { useOperationQueueStore } from '@/stores/operation-queue/store'
|
||||
|
||||
const logger = createLogger('SocketContext')
|
||||
|
||||
@@ -139,7 +138,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
const [authFailed, setAuthFailed] = useState(false)
|
||||
const initializedRef = useRef(false)
|
||||
const socketRef = useRef<Socket | null>(null)
|
||||
const triggerOfflineMode = useOperationQueueStore((state) => state.triggerOfflineMode)
|
||||
|
||||
const params = useParams()
|
||||
const urlWorkflowId = params?.workflowId as string | undefined
|
||||
@@ -343,12 +341,9 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
})
|
||||
})
|
||||
|
||||
socketInstance.on('join-workflow-error', ({ error, code }) => {
|
||||
socketInstance.on('join-workflow-error', ({ error }) => {
|
||||
isRejoiningRef.current = false
|
||||
logger.error('Failed to join workflow:', { error, code })
|
||||
if (code === 'ROOM_MANAGER_UNAVAILABLE') {
|
||||
triggerOfflineMode()
|
||||
}
|
||||
logger.error('Failed to join workflow:', error)
|
||||
})
|
||||
|
||||
socketInstance.on('workflow-operation', (data) => {
|
||||
|
||||
@@ -4,7 +4,6 @@ import { task } from '@trigger.dev/sdk'
|
||||
import { Cron } from 'croner'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createTimeoutAbortController, getTimeoutErrorMessage } from '@/lib/core/execution-limits'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||
@@ -121,7 +120,6 @@ async function runWorkflowExecution({
|
||||
loggingSession,
|
||||
requestId,
|
||||
executionId,
|
||||
asyncTimeout,
|
||||
}: {
|
||||
payload: ScheduleExecutionPayload
|
||||
workflowRecord: WorkflowRecord
|
||||
@@ -129,7 +127,6 @@ async function runWorkflowExecution({
|
||||
loggingSession: LoggingSession
|
||||
requestId: string
|
||||
executionId: string
|
||||
asyncTimeout?: number
|
||||
}): Promise<RunWorkflowResult> {
|
||||
try {
|
||||
logger.debug(`[${requestId}] Loading deployed workflow ${payload.workflowId}`)
|
||||
@@ -184,33 +181,15 @@ async function runWorkflowExecution({
|
||||
[]
|
||||
)
|
||||
|
||||
const timeoutController = createTimeoutAbortController(asyncTimeout)
|
||||
const executionResult = await executeWorkflowCore({
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
base64MaxBytes: undefined,
|
||||
})
|
||||
|
||||
let executionResult
|
||||
try {
|
||||
executionResult = await executeWorkflowCore({
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
base64MaxBytes: undefined,
|
||||
abortSignal: timeoutController.signal,
|
||||
})
|
||||
} finally {
|
||||
timeoutController.cleanup()
|
||||
}
|
||||
|
||||
if (
|
||||
executionResult.status === 'cancelled' &&
|
||||
timeoutController.isTimedOut() &&
|
||||
timeoutController.timeoutMs
|
||||
) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||
logger.info(`[${requestId}] Scheduled workflow execution timed out`, {
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
})
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
} else if (executionResult.status === 'paused') {
|
||||
if (executionResult.status === 'paused') {
|
||||
if (!executionResult.snapshotSeed) {
|
||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||
executionId,
|
||||
@@ -474,7 +453,6 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
|
||||
loggingSession,
|
||||
requestId,
|
||||
executionId,
|
||||
asyncTimeout: preprocessResult.executionTimeout?.async,
|
||||
})
|
||||
|
||||
if (executionResult.status === 'skip') {
|
||||
|
||||
@@ -4,14 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { task } from '@trigger.dev/sdk'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing'
|
||||
import {
|
||||
createTimeoutAbortController,
|
||||
getExecutionTimeout,
|
||||
getTimeoutErrorMessage,
|
||||
} from '@/lib/core/execution-limits'
|
||||
import { IdempotencyService, webhookIdempotency } from '@/lib/core/idempotency'
|
||||
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
|
||||
import { processExecutionFiles } from '@/lib/execution/files'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||
@@ -141,13 +134,7 @@ async function executeWebhookJobInternal(
|
||||
requestId
|
||||
)
|
||||
|
||||
const userSubscription = await getHighestPrioritySubscription(payload.userId)
|
||||
const asyncTimeout = getExecutionTimeout(
|
||||
userSubscription?.plan as SubscriptionPlan | undefined,
|
||||
'async'
|
||||
)
|
||||
const timeoutController = createTimeoutAbortController(asyncTimeout)
|
||||
|
||||
// Track deploymentVersionId at function scope so it's available in catch block
|
||||
let deploymentVersionId: string | undefined
|
||||
|
||||
try {
|
||||
@@ -254,22 +241,11 @@ async function executeWebhookJobInternal(
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
base64MaxBytes: undefined,
|
||||
abortSignal: timeoutController.signal,
|
||||
includeFileBase64: true, // Enable base64 hydration
|
||||
base64MaxBytes: undefined, // Use default limit
|
||||
})
|
||||
|
||||
if (
|
||||
executionResult.status === 'cancelled' &&
|
||||
timeoutController.isTimedOut() &&
|
||||
timeoutController.timeoutMs
|
||||
) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||
logger.info(`[${requestId}] Airtable webhook execution timed out`, {
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
})
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
} else if (executionResult.status === 'paused') {
|
||||
if (executionResult.status === 'paused') {
|
||||
if (!executionResult.snapshotSeed) {
|
||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||
executionId,
|
||||
@@ -441,11 +417,11 @@ async function executeWebhookJobInternal(
|
||||
if (triggerBlock?.subBlocks?.inputFormat?.value) {
|
||||
const inputFormat = triggerBlock.subBlocks.inputFormat.value as unknown as Array<{
|
||||
name: string
|
||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'file[]'
|
||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
||||
}>
|
||||
logger.debug(`[${requestId}] Processing generic webhook files from inputFormat`)
|
||||
|
||||
const fileFields = inputFormat.filter((field) => field.type === 'file[]')
|
||||
const fileFields = inputFormat.filter((field) => field.type === 'files')
|
||||
|
||||
if (fileFields.length > 0 && typeof input === 'object' && input !== null) {
|
||||
const executionContext = {
|
||||
@@ -521,20 +497,9 @@ async function executeWebhookJobInternal(
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
abortSignal: timeoutController.signal,
|
||||
})
|
||||
|
||||
if (
|
||||
executionResult.status === 'cancelled' &&
|
||||
timeoutController.isTimedOut() &&
|
||||
timeoutController.timeoutMs
|
||||
) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||
logger.info(`[${requestId}] Webhook execution timed out`, {
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
})
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
} else if (executionResult.status === 'paused') {
|
||||
if (executionResult.status === 'paused') {
|
||||
if (!executionResult.snapshotSeed) {
|
||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||
executionId,
|
||||
@@ -636,8 +601,6 @@ async function executeWebhookJobInternal(
|
||||
}
|
||||
|
||||
throw error
|
||||
} finally {
|
||||
timeoutController.cleanup()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { task } from '@trigger.dev/sdk'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createTimeoutAbortController, getTimeoutErrorMessage } from '@/lib/core/execution-limits'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||
@@ -20,7 +19,6 @@ export type WorkflowExecutionPayload = {
|
||||
userId: string
|
||||
input?: any
|
||||
triggerType?: CoreTriggerType
|
||||
executionId?: string
|
||||
metadata?: Record<string, any>
|
||||
}
|
||||
|
||||
@@ -31,7 +29,7 @@ export type WorkflowExecutionPayload = {
|
||||
*/
|
||||
export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||
const workflowId = payload.workflowId
|
||||
const executionId = payload.executionId || uuidv4()
|
||||
const executionId = uuidv4()
|
||||
const requestId = executionId.slice(0, 8)
|
||||
|
||||
logger.info(`[${requestId}] Starting workflow execution job: ${workflowId}`, {
|
||||
@@ -105,33 +103,15 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||
[]
|
||||
)
|
||||
|
||||
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.async)
|
||||
const result = await executeWorkflowCore({
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
base64MaxBytes: undefined,
|
||||
})
|
||||
|
||||
let result
|
||||
try {
|
||||
result = await executeWorkflowCore({
|
||||
snapshot,
|
||||
callbacks: {},
|
||||
loggingSession,
|
||||
includeFileBase64: true,
|
||||
base64MaxBytes: undefined,
|
||||
abortSignal: timeoutController.signal,
|
||||
})
|
||||
} finally {
|
||||
timeoutController.cleanup()
|
||||
}
|
||||
|
||||
if (
|
||||
result.status === 'cancelled' &&
|
||||
timeoutController.isTimedOut() &&
|
||||
timeoutController.timeoutMs
|
||||
) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||
logger.info(`[${requestId}] Workflow execution timed out`, {
|
||||
timeoutMs: timeoutController.timeoutMs,
|
||||
})
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
} else if (result.status === 'paused') {
|
||||
if (result.status === 'paused') {
|
||||
if (!result.snapshotSeed) {
|
||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||
executionId,
|
||||
|
||||
@@ -442,16 +442,7 @@ describe('Blocks Module', () => {
|
||||
})
|
||||
|
||||
it('should have valid output types', () => {
|
||||
const validPrimitiveTypes = [
|
||||
'string',
|
||||
'number',
|
||||
'boolean',
|
||||
'json',
|
||||
'array',
|
||||
'file',
|
||||
'file[]',
|
||||
'any',
|
||||
]
|
||||
const validPrimitiveTypes = ['string', 'number', 'boolean', 'json', 'array', 'files', 'any']
|
||||
const blocks = getAllBlocks()
|
||||
for (const block of blocks) {
|
||||
for (const [key, outputConfig] of Object.entries(block.outputs)) {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { A2AIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { normalizeFileInput } from '@/blocks/utils'
|
||||
import type { ToolResponse } from '@/tools/types'
|
||||
|
||||
export interface A2AResponse extends ToolResponse {
|
||||
@@ -215,14 +214,6 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
||||
],
|
||||
config: {
|
||||
tool: (params) => params.operation as string,
|
||||
params: (params) => {
|
||||
const { fileUpload, fileReference, ...rest } = params
|
||||
const normalizedFiles = normalizeFileInput(fileUpload || fileReference || params.files)
|
||||
return {
|
||||
...rest,
|
||||
...(normalizedFiles && { files: normalizedFiles }),
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -26,7 +26,7 @@ export const ChatTriggerBlock: BlockConfig = {
|
||||
outputs: {
|
||||
input: { type: 'string', description: 'User message' },
|
||||
conversationId: { type: 'string', description: 'Conversation ID' },
|
||||
files: { type: 'file[]', description: 'Uploaded files' },
|
||||
files: { type: 'files', description: 'Uploaded files' },
|
||||
},
|
||||
triggers: {
|
||||
enabled: true,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { ConfluenceIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import { normalizeFileInput } from '@/blocks/utils'
|
||||
import type { ConfluenceResponse } from '@/tools/confluence/types'
|
||||
|
||||
export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
||||
@@ -652,15 +651,14 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
||||
|
||||
if (operation === 'upload_attachment') {
|
||||
const fileInput = attachmentFileUpload || attachmentFileReference || attachmentFile
|
||||
const normalizedFile = normalizeFileInput(fileInput, { single: true })
|
||||
if (!normalizedFile) {
|
||||
if (!fileInput) {
|
||||
throw new Error('File is required for upload attachment operation.')
|
||||
}
|
||||
return {
|
||||
credential,
|
||||
pageId: effectivePageId,
|
||||
operation,
|
||||
file: normalizedFile,
|
||||
file: fileInput,
|
||||
fileName: attachmentFileName,
|
||||
comment: attachmentComment,
|
||||
...rest,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { DiscordIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import { normalizeFileInput } from '@/blocks/utils'
|
||||
import type { DiscordResponse } from '@/tools/discord/types'
|
||||
|
||||
export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
||||
@@ -579,14 +578,13 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
||||
if (!params.serverId) throw new Error('Server ID is required')
|
||||
|
||||
switch (params.operation) {
|
||||
case 'discord_send_message': {
|
||||
case 'discord_send_message':
|
||||
return {
|
||||
...commonParams,
|
||||
channelId: params.channelId,
|
||||
content: params.content,
|
||||
files: normalizeFileInput(params.attachmentFiles || params.files),
|
||||
files: params.attachmentFiles || params.files,
|
||||
}
|
||||
}
|
||||
case 'discord_get_messages':
|
||||
return {
|
||||
...commonParams,
|
||||
@@ -791,7 +789,6 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
||||
},
|
||||
outputs: {
|
||||
message: { type: 'string', description: 'Status message' },
|
||||
files: { type: 'file[]', description: 'Files attached to the message' },
|
||||
data: { type: 'json', description: 'Response data' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { DropboxIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import { normalizeFileInput } from '@/blocks/utils'
|
||||
import type { DropboxResponse } from '@/tools/dropbox/types'
|
||||
|
||||
export const DropboxBlock: BlockConfig<DropboxResponse> = {
|
||||
@@ -61,25 +60,12 @@ export const DropboxBlock: BlockConfig<DropboxResponse> = {
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'uploadFile',
|
||||
title: 'File',
|
||||
type: 'file-upload',
|
||||
canonicalParamId: 'file',
|
||||
placeholder: 'Upload file to send to Dropbox',
|
||||
mode: 'basic',
|
||||
multiple: false,
|
||||
required: true,
|
||||
id: 'fileContent',
|
||||
title: 'File Content',
|
||||
type: 'long-input',
|
||||
placeholder: 'Base64 encoded file content or file reference',
|
||||
condition: { field: 'operation', value: 'dropbox_upload' },
|
||||
},
|
||||
{
|
||||
id: 'fileRef',
|
||||
title: 'File',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'file',
|
||||
placeholder: 'Reference file from previous blocks',
|
||||
mode: 'advanced',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: 'dropbox_upload' },
|
||||
},
|
||||
{
|
||||
id: 'mode',
|
||||
@@ -317,16 +303,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
params.maxResults = Number(params.maxResults)
|
||||
}
|
||||
|
||||
// Normalize file input for upload operation
|
||||
// Check all possible field IDs: uploadFile (basic), fileRef (advanced), fileContent (legacy)
|
||||
const normalizedFile = normalizeFileInput(
|
||||
params.uploadFile || params.fileRef || params.fileContent,
|
||||
{ single: true }
|
||||
)
|
||||
if (normalizedFile) {
|
||||
params.file = normalizedFile
|
||||
}
|
||||
|
||||
switch (params.operation) {
|
||||
case 'dropbox_upload':
|
||||
return 'dropbox_upload'
|
||||
@@ -361,10 +337,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
path: { type: 'string', description: 'Path in Dropbox' },
|
||||
autorename: { type: 'boolean', description: 'Auto-rename on conflict' },
|
||||
// Upload inputs
|
||||
uploadFile: { type: 'json', description: 'Uploaded file (UserFile)' },
|
||||
file: { type: 'json', description: 'File to upload (UserFile object)' },
|
||||
fileRef: { type: 'json', description: 'File reference from previous block' },
|
||||
fileContent: { type: 'string', description: 'Legacy: base64 encoded file content' },
|
||||
fileContent: { type: 'string', description: 'Base64 encoded file content' },
|
||||
fileName: { type: 'string', description: 'Optional filename' },
|
||||
mode: { type: 'string', description: 'Write mode: add or overwrite' },
|
||||
mute: { type: 'boolean', description: 'Mute notifications' },
|
||||
@@ -387,7 +360,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
},
|
||||
outputs: {
|
||||
// Upload/Download outputs
|
||||
file: { type: 'file', description: 'Downloaded file stored in execution files' },
|
||||
file: { type: 'json', description: 'File metadata' },
|
||||
content: { type: 'string', description: 'File content (base64)' },
|
||||
temporaryLink: { type: 'string', description: 'Temporary download link' },
|
||||
// List folder outputs
|
||||
|
||||
@@ -73,6 +73,5 @@ export const ElevenLabsBlock: BlockConfig<ElevenLabsBlockResponse> = {
|
||||
|
||||
outputs: {
|
||||
audioUrl: { type: 'string', description: 'Generated audio URL' },
|
||||
audioFile: { type: 'file', description: 'Generated audio file' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,48 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { DocumentIcon } from '@/components/icons'
|
||||
import { inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||
import type { BlockConfig, SubBlockType } from '@/blocks/types'
|
||||
import { createVersionedToolSelector, normalizeFileInput } from '@/blocks/utils'
|
||||
import type { FileParserOutput, FileParserV3Output } from '@/tools/file/types'
|
||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||
import type { FileParserOutput } from '@/tools/file/types'
|
||||
|
||||
const logger = createLogger('FileBlock')
|
||||
|
||||
const resolveFilePathFromInput = (fileInput: unknown): string | null => {
|
||||
if (!fileInput || typeof fileInput !== 'object') {
|
||||
return null
|
||||
}
|
||||
|
||||
const record = fileInput as Record<string, unknown>
|
||||
if (typeof record.path === 'string' && record.path.trim() !== '') {
|
||||
return record.path
|
||||
}
|
||||
if (typeof record.url === 'string' && record.url.trim() !== '') {
|
||||
return record.url
|
||||
}
|
||||
if (typeof record.key === 'string' && record.key.trim() !== '') {
|
||||
const key = record.key.trim()
|
||||
const context = typeof record.context === 'string' ? record.context : inferContextFromKey(key)
|
||||
return `/api/files/serve/${encodeURIComponent(key)}?context=${context}`
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
const resolveFilePathsFromInput = (fileInput: unknown): string[] => {
|
||||
if (!fileInput) {
|
||||
return []
|
||||
}
|
||||
|
||||
if (Array.isArray(fileInput)) {
|
||||
return fileInput
|
||||
.map((file) => resolveFilePathFromInput(file))
|
||||
.filter((path): path is string => Boolean(path))
|
||||
}
|
||||
|
||||
const resolved = resolveFilePathFromInput(fileInput)
|
||||
return resolved ? [resolved] : []
|
||||
}
|
||||
|
||||
export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
type: 'file',
|
||||
name: 'File (Legacy)',
|
||||
@@ -116,14 +79,24 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
|
||||
// Handle file upload input
|
||||
if (inputMethod === 'upload') {
|
||||
const filePaths = resolveFilePathsFromInput(params.file)
|
||||
if (filePaths.length > 0) {
|
||||
// Handle case where 'file' is an array (multiple files)
|
||||
if (params.file && Array.isArray(params.file) && params.file.length > 0) {
|
||||
const filePaths = params.file.map((file) => file.path)
|
||||
|
||||
return {
|
||||
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
||||
fileType: params.fileType || 'auto',
|
||||
}
|
||||
}
|
||||
|
||||
// Handle case where 'file' is a single file object
|
||||
if (params.file?.path) {
|
||||
return {
|
||||
filePath: params.file.path,
|
||||
fileType: params.fileType || 'auto',
|
||||
}
|
||||
}
|
||||
|
||||
// If no files, return error
|
||||
logger.error('No files provided for upload method')
|
||||
throw new Error('Please upload a file')
|
||||
@@ -143,7 +116,7 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
},
|
||||
outputs: {
|
||||
files: {
|
||||
type: 'file[]',
|
||||
type: 'json',
|
||||
description: 'Array of parsed file objects with content, metadata, and file properties',
|
||||
},
|
||||
combinedContent: {
|
||||
@@ -151,7 +124,7 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
description: 'All file contents merged into a single text string',
|
||||
},
|
||||
processedFiles: {
|
||||
type: 'file[]',
|
||||
type: 'files',
|
||||
description: 'Array of UserFile objects for downstream use (attachments, uploads, etc.)',
|
||||
},
|
||||
},
|
||||
@@ -160,9 +133,9 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||
export const FileV2Block: BlockConfig<FileParserOutput> = {
|
||||
...FileBlock,
|
||||
type: 'file_v2',
|
||||
name: 'File (Legacy)',
|
||||
name: 'File',
|
||||
description: 'Read and parse multiple files',
|
||||
hideFromToolbar: true,
|
||||
hideFromToolbar: false,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'file',
|
||||
@@ -200,21 +173,7 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
|
||||
throw new Error('File is required')
|
||||
}
|
||||
|
||||
// First, try to normalize as file objects (handles JSON strings from advanced mode)
|
||||
const normalizedFiles = normalizeFileInput(fileInput)
|
||||
if (normalizedFiles) {
|
||||
const filePaths = resolveFilePathsFromInput(normalizedFiles)
|
||||
if (filePaths.length > 0) {
|
||||
return {
|
||||
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
||||
fileType: params.fileType || 'auto',
|
||||
workspaceId: params._context?.workspaceId,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If normalization fails, treat as direct URL string
|
||||
if (typeof fileInput === 'string' && fileInput.trim()) {
|
||||
if (typeof fileInput === 'string') {
|
||||
return {
|
||||
filePath: fileInput.trim(),
|
||||
fileType: params.fileType || 'auto',
|
||||
@@ -222,6 +181,21 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(fileInput) && fileInput.length > 0) {
|
||||
const filePaths = fileInput.map((file) => file.path)
|
||||
return {
|
||||
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
||||
fileType: params.fileType || 'auto',
|
||||
}
|
||||
}
|
||||
|
||||
if (fileInput?.path) {
|
||||
return {
|
||||
filePath: fileInput.path,
|
||||
fileType: params.fileType || 'auto',
|
||||
}
|
||||
}
|
||||
|
||||
logger.error('Invalid file input format')
|
||||
throw new Error('Invalid file input')
|
||||
},
|
||||
@@ -235,7 +209,7 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
|
||||
},
|
||||
outputs: {
|
||||
files: {
|
||||
type: 'file[]',
|
||||
type: 'json',
|
||||
description: 'Array of parsed file objects with content, metadata, and file properties',
|
||||
},
|
||||
combinedContent: {
|
||||
@@ -244,96 +218,3 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
export const FileV3Block: BlockConfig<FileParserV3Output> = {
|
||||
type: 'file_v3',
|
||||
name: 'File',
|
||||
description: 'Read and parse multiple files',
|
||||
longDescription:
|
||||
'Upload files directly or import from external URLs to get UserFile objects for use in other blocks.',
|
||||
docsLink: 'https://docs.sim.ai/tools/file',
|
||||
category: 'tools',
|
||||
bgColor: '#40916C',
|
||||
icon: DocumentIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'file',
|
||||
title: 'Files',
|
||||
type: 'file-upload' as SubBlockType,
|
||||
canonicalParamId: 'fileInput',
|
||||
acceptedTypes: '*',
|
||||
placeholder: 'Upload files to process',
|
||||
multiple: true,
|
||||
mode: 'basic',
|
||||
maxSize: 100,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'fileUrl',
|
||||
title: 'File URL',
|
||||
type: 'short-input' as SubBlockType,
|
||||
canonicalParamId: 'fileInput',
|
||||
placeholder: 'https://example.com/document.pdf',
|
||||
mode: 'advanced',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: ['file_parser_v3'],
|
||||
config: {
|
||||
tool: () => 'file_parser_v3',
|
||||
params: (params) => {
|
||||
const fileInput = params.fileInput ?? params.file ?? params.fileUrl ?? params.filePath
|
||||
if (!fileInput) {
|
||||
logger.error('No file input provided')
|
||||
throw new Error('File input is required')
|
||||
}
|
||||
|
||||
// First, try to normalize as file objects (handles JSON strings from advanced mode)
|
||||
const normalizedFiles = normalizeFileInput(fileInput)
|
||||
if (normalizedFiles) {
|
||||
const filePaths = resolveFilePathsFromInput(normalizedFiles)
|
||||
if (filePaths.length > 0) {
|
||||
return {
|
||||
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
||||
fileType: params.fileType || 'auto',
|
||||
workspaceId: params._context?.workspaceId,
|
||||
workflowId: params._context?.workflowId,
|
||||
executionId: params._context?.executionId,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If normalization fails, treat as direct URL string
|
||||
if (typeof fileInput === 'string' && fileInput.trim()) {
|
||||
return {
|
||||
filePath: fileInput.trim(),
|
||||
fileType: params.fileType || 'auto',
|
||||
workspaceId: params._context?.workspaceId,
|
||||
workflowId: params._context?.workflowId,
|
||||
executionId: params._context?.executionId,
|
||||
}
|
||||
}
|
||||
|
||||
logger.error('Invalid file input format')
|
||||
throw new Error('File input is required')
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
fileInput: { type: 'json', description: 'File input (upload or URL)' },
|
||||
fileUrl: { type: 'string', description: 'External file URL (advanced mode)' },
|
||||
file: { type: 'json', description: 'Uploaded file data (basic mode)' },
|
||||
fileType: { type: 'string', description: 'File type' },
|
||||
},
|
||||
outputs: {
|
||||
files: {
|
||||
type: 'file[]',
|
||||
description: 'Parsed files as UserFile objects',
|
||||
},
|
||||
combinedContent: {
|
||||
type: 'string',
|
||||
description: 'All file contents merged into a single text string',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user