mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-05 04:05:14 -05:00
Compare commits
60 Commits
cursor/run
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
46822e91f3 | ||
|
|
36ec68d93e | ||
|
|
fce566cc2f | ||
|
|
1933e1aad5 | ||
|
|
793adda986 | ||
|
|
8d846c5983 | ||
|
|
362f4c2918 | ||
|
|
c77e351067 | ||
|
|
a627faabe7 | ||
|
|
f811594875 | ||
|
|
0bc245b7a9 | ||
|
|
0a08ac03b9 | ||
|
|
7977ac88ca | ||
|
|
5b0c2156e0 | ||
|
|
2bb68335ee | ||
|
|
8528fbe2d2 | ||
|
|
31fdd2be13 | ||
|
|
028bc652c2 | ||
|
|
c6bf5cd58c | ||
|
|
11dc18a80d | ||
|
|
ab4e9dc72f | ||
|
|
1c58c35bd8 | ||
|
|
d63a5cb504 | ||
|
|
8bd5d41723 | ||
|
|
c12931bc50 | ||
|
|
e9c4251c1c | ||
|
|
cc2be33d6b | ||
|
|
45371e521e | ||
|
|
0ce0f98aa5 | ||
|
|
dff1c9d083 | ||
|
|
b09f683072 | ||
|
|
a8bb0db660 | ||
|
|
af82820a28 | ||
|
|
4372841797 | ||
|
|
5e8c843241 | ||
|
|
7bf3d73ee6 | ||
|
|
7ffc11a738 | ||
|
|
be578e2ed7 | ||
|
|
f415e5edc4 | ||
|
|
13a6e6c3fa | ||
|
|
f5ab7f21ae | ||
|
|
bfb6fffe38 | ||
|
|
4fbec0a43f | ||
|
|
585f5e365b | ||
|
|
3792bdd252 | ||
|
|
eb5d1f3e5b | ||
|
|
54ab82c8dd | ||
|
|
f895bf469b | ||
|
|
dd3209af06 | ||
|
|
b6ba3b50a7 | ||
|
|
b304233062 | ||
|
|
57e4b49bd6 | ||
|
|
e12dd204ed | ||
|
|
3d9d9cbc54 | ||
|
|
0f4ec962ad | ||
|
|
4827866f9a | ||
|
|
3e697d9ed9 | ||
|
|
4431a1a484 | ||
|
|
4d1a9a3f22 | ||
|
|
eb07a080fb |
@@ -183,6 +183,109 @@ export const {ServiceName}Block: BlockConfig = {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## File Input Handling
|
||||||
|
|
||||||
|
When your block accepts file uploads, use the basic/advanced mode pattern with `normalizeFileInput`.
|
||||||
|
|
||||||
|
### Basic/Advanced File Pattern
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Basic mode: Visual file upload
|
||||||
|
{
|
||||||
|
id: 'uploadFile',
|
||||||
|
title: 'File',
|
||||||
|
type: 'file-upload',
|
||||||
|
canonicalParamId: 'file', // Both map to 'file' param
|
||||||
|
placeholder: 'Upload file',
|
||||||
|
mode: 'basic',
|
||||||
|
multiple: false,
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'upload' },
|
||||||
|
},
|
||||||
|
// Advanced mode: Reference from other blocks
|
||||||
|
{
|
||||||
|
id: 'fileRef',
|
||||||
|
title: 'File',
|
||||||
|
type: 'short-input',
|
||||||
|
canonicalParamId: 'file', // Both map to 'file' param
|
||||||
|
placeholder: 'Reference file (e.g., {{file_block.output}})',
|
||||||
|
mode: 'advanced',
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'upload' },
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
**Critical constraints:**
|
||||||
|
- `canonicalParamId` must NOT match any subblock's `id` in the same block
|
||||||
|
- Values are stored under subblock `id`, not `canonicalParamId`
|
||||||
|
|
||||||
|
### Normalizing File Input in tools.config
|
||||||
|
|
||||||
|
Use `normalizeFileInput` to handle all input variants:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
|
|
||||||
|
tools: {
|
||||||
|
access: ['service_upload'],
|
||||||
|
config: {
|
||||||
|
tool: (params) => {
|
||||||
|
// Check all field IDs: uploadFile (basic), fileRef (advanced), fileContent (legacy)
|
||||||
|
const normalizedFile = normalizeFileInput(
|
||||||
|
params.uploadFile || params.fileRef || params.fileContent,
|
||||||
|
{ single: true }
|
||||||
|
)
|
||||||
|
if (normalizedFile) {
|
||||||
|
params.file = normalizedFile
|
||||||
|
}
|
||||||
|
return `service_${params.operation}`
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why this pattern?**
|
||||||
|
- Values come through as `params.uploadFile` or `params.fileRef` (the subblock IDs)
|
||||||
|
- `canonicalParamId` only controls UI/schema mapping, not runtime values
|
||||||
|
- `normalizeFileInput` handles JSON strings from advanced mode template resolution
|
||||||
|
|
||||||
|
### File Input Types in `inputs`
|
||||||
|
|
||||||
|
Use `type: 'json'` for file inputs:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
inputs: {
|
||||||
|
uploadFile: { type: 'json', description: 'Uploaded file (UserFile)' },
|
||||||
|
fileRef: { type: 'json', description: 'File reference from previous block' },
|
||||||
|
// Legacy field for backwards compatibility
|
||||||
|
fileContent: { type: 'string', description: 'Legacy: base64 encoded content' },
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multiple Files
|
||||||
|
|
||||||
|
For multiple file uploads:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
id: 'attachments',
|
||||||
|
title: 'Attachments',
|
||||||
|
type: 'file-upload',
|
||||||
|
multiple: true, // Allow multiple files
|
||||||
|
maxSize: 25, // Max size in MB per file
|
||||||
|
acceptedTypes: 'image/*,application/pdf,.doc,.docx',
|
||||||
|
}
|
||||||
|
|
||||||
|
// In tools.config:
|
||||||
|
const normalizedFiles = normalizeFileInput(
|
||||||
|
params.attachments || params.attachmentRefs,
|
||||||
|
// No { single: true } - returns array
|
||||||
|
)
|
||||||
|
if (normalizedFiles) {
|
||||||
|
params.files = normalizedFiles
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Condition Syntax
|
## Condition Syntax
|
||||||
|
|
||||||
Controls when a field is shown based on other field values.
|
Controls when a field is shown based on other field values.
|
||||||
|
|||||||
@@ -457,7 +457,230 @@ You can usually find this in the service's brand/press kit page, or copy it from
|
|||||||
Paste the SVG code here and I'll convert it to a React component.
|
Paste the SVG code here and I'll convert it to a React component.
|
||||||
```
|
```
|
||||||
|
|
||||||
## Common Gotchas
|
## File Handling
|
||||||
|
|
||||||
|
When your integration handles file uploads or downloads, follow these patterns to work with `UserFile` objects consistently.
|
||||||
|
|
||||||
|
### What is a UserFile?
|
||||||
|
|
||||||
|
A `UserFile` is the standard file representation in Sim:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface UserFile {
|
||||||
|
id: string // Unique identifier
|
||||||
|
name: string // Original filename
|
||||||
|
url: string // Presigned URL for download
|
||||||
|
size: number // File size in bytes
|
||||||
|
type: string // MIME type (e.g., 'application/pdf')
|
||||||
|
base64?: string // Optional base64 content (if small file)
|
||||||
|
key?: string // Internal storage key
|
||||||
|
context?: object // Storage context metadata
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Input Pattern (Uploads)
|
||||||
|
|
||||||
|
For tools that accept file uploads, **always route through an internal API endpoint** rather than calling external APIs directly. This ensures proper file content retrieval.
|
||||||
|
|
||||||
|
#### 1. Block SubBlocks for File Input
|
||||||
|
|
||||||
|
Use the basic/advanced mode pattern:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Basic mode: File upload UI
|
||||||
|
{
|
||||||
|
id: 'uploadFile',
|
||||||
|
title: 'File',
|
||||||
|
type: 'file-upload',
|
||||||
|
canonicalParamId: 'file', // Maps to 'file' param
|
||||||
|
placeholder: 'Upload file',
|
||||||
|
mode: 'basic',
|
||||||
|
multiple: false,
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'upload' },
|
||||||
|
},
|
||||||
|
// Advanced mode: Reference from previous block
|
||||||
|
{
|
||||||
|
id: 'fileRef',
|
||||||
|
title: 'File',
|
||||||
|
type: 'short-input',
|
||||||
|
canonicalParamId: 'file', // Same canonical param
|
||||||
|
placeholder: 'Reference file (e.g., {{file_block.output}})',
|
||||||
|
mode: 'advanced',
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'upload' },
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
**Critical:** `canonicalParamId` must NOT match any subblock `id`.
|
||||||
|
|
||||||
|
#### 2. Normalize File Input in Block Config
|
||||||
|
|
||||||
|
In `tools.config.tool`, use `normalizeFileInput` to handle all input variants:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
|
|
||||||
|
tools: {
|
||||||
|
config: {
|
||||||
|
tool: (params) => {
|
||||||
|
// Normalize file from basic (uploadFile), advanced (fileRef), or legacy (fileContent)
|
||||||
|
const normalizedFile = normalizeFileInput(
|
||||||
|
params.uploadFile || params.fileRef || params.fileContent,
|
||||||
|
{ single: true }
|
||||||
|
)
|
||||||
|
if (normalizedFile) {
|
||||||
|
params.file = normalizedFile
|
||||||
|
}
|
||||||
|
return `{service}_${params.operation}`
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 3. Create Internal API Route
|
||||||
|
|
||||||
|
Create `apps/sim/app/api/tools/{service}/{action}/route.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { NextResponse, type NextRequest } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { FileInputSchema, type RawFileInput } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
|
const logger = createLogger('{Service}UploadAPI')
|
||||||
|
|
||||||
|
const RequestSchema = z.object({
|
||||||
|
accessToken: z.string(),
|
||||||
|
file: FileInputSchema.optional().nullable(),
|
||||||
|
// Legacy field for backwards compatibility
|
||||||
|
fileContent: z.string().optional().nullable(),
|
||||||
|
// ... other params
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!authResult.success) {
|
||||||
|
return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const data = RequestSchema.parse(body)
|
||||||
|
|
||||||
|
let fileBuffer: Buffer
|
||||||
|
let fileName: string
|
||||||
|
|
||||||
|
// Prefer UserFile input, fall back to legacy base64
|
||||||
|
if (data.file) {
|
||||||
|
const userFiles = processFilesToUserFiles([data.file as RawFileInput], requestId, logger)
|
||||||
|
if (userFiles.length === 0) {
|
||||||
|
return NextResponse.json({ success: false, error: 'Invalid file' }, { status: 400 })
|
||||||
|
}
|
||||||
|
const userFile = userFiles[0]
|
||||||
|
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
fileName = userFile.name
|
||||||
|
} else if (data.fileContent) {
|
||||||
|
// Legacy: base64 string (backwards compatibility)
|
||||||
|
fileBuffer = Buffer.from(data.fileContent, 'base64')
|
||||||
|
fileName = 'file'
|
||||||
|
} else {
|
||||||
|
return NextResponse.json({ success: false, error: 'File required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now call external API with fileBuffer
|
||||||
|
const response = await fetch('https://api.{service}.com/upload', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { Authorization: `Bearer ${data.accessToken}` },
|
||||||
|
body: new Uint8Array(fileBuffer), // Convert Buffer for fetch
|
||||||
|
})
|
||||||
|
|
||||||
|
// ... handle response
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 4. Update Tool to Use Internal Route
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export const {service}UploadTool: ToolConfig<Params, Response> = {
|
||||||
|
id: '{service}_upload',
|
||||||
|
// ...
|
||||||
|
params: {
|
||||||
|
file: { type: 'file', required: false, visibility: 'user-or-llm' },
|
||||||
|
fileContent: { type: 'string', required: false, visibility: 'hidden' }, // Legacy
|
||||||
|
},
|
||||||
|
request: {
|
||||||
|
url: '/api/tools/{service}/upload', // Internal route
|
||||||
|
method: 'POST',
|
||||||
|
body: (params) => ({
|
||||||
|
accessToken: params.accessToken,
|
||||||
|
file: params.file,
|
||||||
|
fileContent: params.fileContent,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Output Pattern (Downloads)
|
||||||
|
|
||||||
|
For tools that return files, use `FileToolProcessor` to store files and return `UserFile` objects.
|
||||||
|
|
||||||
|
#### In Tool transformResponse
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
|
||||||
|
|
||||||
|
transformResponse: async (response, context) => {
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
// Process file outputs to UserFile objects
|
||||||
|
const fileProcessor = new FileToolProcessor(context)
|
||||||
|
const file = await fileProcessor.processFileData({
|
||||||
|
data: data.content, // base64 or buffer
|
||||||
|
mimeType: data.mimeType,
|
||||||
|
filename: data.filename,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: { file },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### In API Route (for complex file handling)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Return file data that FileToolProcessor can handle
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
data: base64Content,
|
||||||
|
mimeType: 'application/pdf',
|
||||||
|
filename: 'document.pdf',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Helpers Reference
|
||||||
|
|
||||||
|
| Helper | Location | Purpose |
|
||||||
|
|--------|----------|---------|
|
||||||
|
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
|
||||||
|
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
|
||||||
|
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get file Buffer from UserFile |
|
||||||
|
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
|
||||||
|
| `isUserFile` | `@/lib/core/utils/user-file` | Type guard for UserFile objects |
|
||||||
|
| `FileInputSchema` | `@/lib/uploads/utils/file-schemas` | Zod schema for file validation |
|
||||||
|
|
||||||
|
### Common Gotchas
|
||||||
|
|
||||||
1. **OAuth serviceId must match** - The `serviceId` in oauth-input must match the OAuth provider configuration
|
1. **OAuth serviceId must match** - The `serviceId` in oauth-input must match the OAuth provider configuration
|
||||||
2. **Tool IDs are snake_case** - `stripe_create_payment`, not `stripeCreatePayment`
|
2. **Tool IDs are snake_case** - `stripe_create_payment`, not `stripeCreatePayment`
|
||||||
@@ -465,3 +688,5 @@ Paste the SVG code here and I'll convert it to a React component.
|
|||||||
4. **Alphabetical ordering** - Keep imports and registry entries alphabetically sorted
|
4. **Alphabetical ordering** - Keep imports and registry entries alphabetically sorted
|
||||||
5. **Required can be conditional** - Use `required: { field: 'op', value: 'create' }` instead of always true
|
5. **Required can be conditional** - Use `required: { field: 'op', value: 'create' }` instead of always true
|
||||||
6. **DependsOn clears options** - When a dependency changes, selector options are refetched
|
6. **DependsOn clears options** - When a dependency changes, selector options are refetched
|
||||||
|
7. **Never pass Buffer directly to fetch** - Convert to `new Uint8Array(buffer)` for TypeScript compatibility
|
||||||
|
8. **Always handle legacy file params** - Keep hidden `fileContent` params for backwards compatibility
|
||||||
|
|||||||
@@ -195,6 +195,52 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
|
|||||||
{service}_webhook: {service}WebhookTrigger,
|
{service}_webhook: {service}WebhookTrigger,
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## File Handling
|
||||||
|
|
||||||
|
When integrations handle file uploads/downloads, use `UserFile` objects consistently.
|
||||||
|
|
||||||
|
### File Input (Uploads)
|
||||||
|
|
||||||
|
1. **Block subBlocks:** Use basic/advanced mode pattern with `canonicalParamId`
|
||||||
|
2. **Normalize in block config:** Use `normalizeFileInput` from `@/blocks/utils`
|
||||||
|
3. **Internal API route:** Create route that uses `downloadFileFromStorage` to get file content
|
||||||
|
4. **Tool routes to internal API:** Don't call external APIs directly with files
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In block tools.config:
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
|
|
||||||
|
const normalizedFile = normalizeFileInput(
|
||||||
|
params.uploadFile || params.fileRef || params.fileContent,
|
||||||
|
{ single: true }
|
||||||
|
)
|
||||||
|
if (normalizedFile) params.file = normalizedFile
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Output (Downloads)
|
||||||
|
|
||||||
|
Use `FileToolProcessor` in tool `transformResponse` to store files:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
|
||||||
|
|
||||||
|
const processor = new FileToolProcessor(context)
|
||||||
|
const file = await processor.processFileData({
|
||||||
|
data: base64Content,
|
||||||
|
mimeType: 'application/pdf',
|
||||||
|
filename: 'doc.pdf',
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Helpers
|
||||||
|
|
||||||
|
| Helper | Location | Purpose |
|
||||||
|
|--------|----------|---------|
|
||||||
|
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
|
||||||
|
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
|
||||||
|
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get Buffer from UserFile |
|
||||||
|
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
|
||||||
|
|
||||||
## Checklist
|
## Checklist
|
||||||
|
|
||||||
- [ ] Look up API docs for the service
|
- [ ] Look up API docs for the service
|
||||||
@@ -207,3 +253,5 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
|
|||||||
- [ ] Register block in `blocks/registry.ts`
|
- [ ] Register block in `blocks/registry.ts`
|
||||||
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
||||||
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
||||||
|
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
|
||||||
|
- [ ] (If file uploads) Use `normalizeFileInput` in block config
|
||||||
|
|||||||
@@ -193,6 +193,52 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
|
|||||||
{service}_webhook: {service}WebhookTrigger,
|
{service}_webhook: {service}WebhookTrigger,
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## File Handling
|
||||||
|
|
||||||
|
When integrations handle file uploads/downloads, use `UserFile` objects consistently.
|
||||||
|
|
||||||
|
### File Input (Uploads)
|
||||||
|
|
||||||
|
1. **Block subBlocks:** Use basic/advanced mode pattern with `canonicalParamId`
|
||||||
|
2. **Normalize in block config:** Use `normalizeFileInput` from `@/blocks/utils`
|
||||||
|
3. **Internal API route:** Create route that uses `downloadFileFromStorage` to get file content
|
||||||
|
4. **Tool routes to internal API:** Don't call external APIs directly with files
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In block tools.config:
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
|
|
||||||
|
const normalizedFile = normalizeFileInput(
|
||||||
|
params.uploadFile || params.fileRef || params.fileContent,
|
||||||
|
{ single: true }
|
||||||
|
)
|
||||||
|
if (normalizedFile) params.file = normalizedFile
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Output (Downloads)
|
||||||
|
|
||||||
|
Use `FileToolProcessor` in tool `transformResponse` to store files:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
|
||||||
|
|
||||||
|
const processor = new FileToolProcessor(context)
|
||||||
|
const file = await processor.processFileData({
|
||||||
|
data: base64Content,
|
||||||
|
mimeType: 'application/pdf',
|
||||||
|
filename: 'doc.pdf',
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Helpers
|
||||||
|
|
||||||
|
| Helper | Location | Purpose |
|
||||||
|
|--------|----------|---------|
|
||||||
|
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
|
||||||
|
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
|
||||||
|
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get Buffer from UserFile |
|
||||||
|
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
|
||||||
|
|
||||||
## Checklist
|
## Checklist
|
||||||
|
|
||||||
- [ ] Look up API docs for the service
|
- [ ] Look up API docs for the service
|
||||||
@@ -205,3 +251,5 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
|
|||||||
- [ ] Register block in `blocks/registry.ts`
|
- [ ] Register block in `blocks/registry.ts`
|
||||||
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
||||||
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
||||||
|
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
|
||||||
|
- [ ] (If file uploads) Use `normalizeFileInput` in block config
|
||||||
|
|||||||
19
CLAUDE.md
19
CLAUDE.md
@@ -265,6 +265,23 @@ Register in `blocks/registry.ts` (alphabetically).
|
|||||||
|
|
||||||
**dependsOn:** `['field']` or `{ all: ['a'], any: ['b', 'c'] }`
|
**dependsOn:** `['field']` or `{ all: ['a'], any: ['b', 'c'] }`
|
||||||
|
|
||||||
|
**File Input Pattern (basic/advanced mode):**
|
||||||
|
```typescript
|
||||||
|
// Basic: file-upload UI
|
||||||
|
{ id: 'uploadFile', type: 'file-upload', canonicalParamId: 'file', mode: 'basic' },
|
||||||
|
// Advanced: reference from other blocks
|
||||||
|
{ id: 'fileRef', type: 'short-input', canonicalParamId: 'file', mode: 'advanced' },
|
||||||
|
```
|
||||||
|
|
||||||
|
In `tools.config.tool`, normalize with:
|
||||||
|
```typescript
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
|
const file = normalizeFileInput(params.uploadFile || params.fileRef, { single: true })
|
||||||
|
if (file) params.file = file
|
||||||
|
```
|
||||||
|
|
||||||
|
For file uploads, create an internal API route (`/api/tools/{service}/upload`) that uses `downloadFileFromStorage` to get file content from `UserFile` objects.
|
||||||
|
|
||||||
### 3. Icon (`components/icons.tsx`)
|
### 3. Icon (`components/icons.tsx`)
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
@@ -293,3 +310,5 @@ Register in `triggers/registry.ts`.
|
|||||||
- [ ] Create block in `blocks/blocks/{service}.ts`
|
- [ ] Create block in `blocks/blocks/{service}.ts`
|
||||||
- [ ] Register block in `blocks/registry.ts`
|
- [ ] Register block in `blocks/registry.ts`
|
||||||
- [ ] (Optional) Create and register triggers
|
- [ ] (Optional) Create and register triggers
|
||||||
|
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
|
||||||
|
- [ ] (If file uploads) Use `normalizeFileInput` in block config
|
||||||
|
|||||||
@@ -213,6 +213,25 @@ Different subscription plans have different usage limits:
|
|||||||
| **Team** | $40/seat (pooled, adjustable) | 300 sync, 2,500 async |
|
| **Team** | $40/seat (pooled, adjustable) | 300 sync, 2,500 async |
|
||||||
| **Enterprise** | Custom | Custom |
|
| **Enterprise** | Custom | Custom |
|
||||||
|
|
||||||
|
## Execution Time Limits
|
||||||
|
|
||||||
|
Workflows have maximum execution time limits based on your subscription plan:
|
||||||
|
|
||||||
|
| Plan | Sync Execution | Async Execution |
|
||||||
|
|------|----------------|-----------------|
|
||||||
|
| **Free** | 5 minutes | 10 minutes |
|
||||||
|
| **Pro** | 50 minutes | 90 minutes |
|
||||||
|
| **Team** | 50 minutes | 90 minutes |
|
||||||
|
| **Enterprise** | 50 minutes | 90 minutes |
|
||||||
|
|
||||||
|
**Sync executions** run immediately and return results directly. These are triggered via the API with `async: false` (default) or through the UI.
|
||||||
|
**Async executions** (triggered via API with `async: true`, webhooks, or schedules) run in the background. Async time limits are up to 2x the sync limit, capped at 90 minutes.
|
||||||
|
|
||||||
|
|
||||||
|
<Callout type="info">
|
||||||
|
If a workflow exceeds its time limit, it will be terminated and marked as failed with a timeout error. Design long-running workflows to use async execution or break them into smaller workflows.
|
||||||
|
</Callout>
|
||||||
|
|
||||||
## Billing Model
|
## Billing Model
|
||||||
|
|
||||||
Sim uses a **base subscription + overage** billing model:
|
Sim uses a **base subscription + overage** billing model:
|
||||||
|
|||||||
168
apps/docs/content/docs/en/execution/files.mdx
Normal file
168
apps/docs/content/docs/en/execution/files.mdx
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
---
|
||||||
|
title: Passing Files
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Callout } from 'fumadocs-ui/components/callout'
|
||||||
|
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||||
|
|
||||||
|
Sim makes it easy to work with files throughout your workflows. Blocks can receive files, process them, and pass them to other blocks seamlessly.
|
||||||
|
|
||||||
|
## File Objects
|
||||||
|
|
||||||
|
When blocks output files (like Gmail attachments, generated images, or parsed documents), they return a standardized file object:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "report.pdf",
|
||||||
|
"url": "https://...",
|
||||||
|
"base64": "JVBERi0xLjQK...",
|
||||||
|
"type": "application/pdf",
|
||||||
|
"size": 245678
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You can access any of these properties when referencing files from previous blocks.
|
||||||
|
|
||||||
|
## The File Block
|
||||||
|
|
||||||
|
The **File block** is the universal entry point for files in your workflows. It accepts files from any source and outputs standardized file objects that work with all integrations.
|
||||||
|
|
||||||
|
**Inputs:**
|
||||||
|
- **Uploaded files** - Drag and drop or select files directly
|
||||||
|
- **External URLs** - Any publicly accessible file URL
|
||||||
|
- **Files from other blocks** - Pass files from Gmail attachments, Slack downloads, etc.
|
||||||
|
|
||||||
|
**Outputs:**
|
||||||
|
- A list of `UserFile` objects with consistent structure (`name`, `url`, `base64`, `type`, `size`)
|
||||||
|
- `combinedContent` - Extracted text content from all files (for documents)
|
||||||
|
|
||||||
|
**Example usage:**
|
||||||
|
|
||||||
|
```
|
||||||
|
// Get all files from the File block
|
||||||
|
<file.files>
|
||||||
|
|
||||||
|
// Get the first file
|
||||||
|
<file.files[0]>
|
||||||
|
|
||||||
|
// Get combined text content from parsed documents
|
||||||
|
<file.combinedContent>
|
||||||
|
```
|
||||||
|
|
||||||
|
The File block automatically:
|
||||||
|
- Detects file types from URLs and extensions
|
||||||
|
- Extracts text from PDFs, CSVs, and documents
|
||||||
|
- Generates base64 encoding for binary files
|
||||||
|
- Creates presigned URLs for secure access
|
||||||
|
|
||||||
|
Use the File block when you need to normalize files from different sources before passing them to other blocks like Vision, STT, or email integrations.
|
||||||
|
|
||||||
|
## Passing Files Between Blocks
|
||||||
|
|
||||||
|
Reference files from previous blocks using the tag dropdown. Click in any file input field and type `<` to see available outputs.
|
||||||
|
|
||||||
|
**Common patterns:**
|
||||||
|
|
||||||
|
```
|
||||||
|
// Single file from a block
|
||||||
|
<gmail.attachments[0]>
|
||||||
|
|
||||||
|
// Pass the whole file object
|
||||||
|
<file_parser.files[0]>
|
||||||
|
|
||||||
|
// Access specific properties
|
||||||
|
<gmail.attachments[0].name>
|
||||||
|
<gmail.attachments[0].base64>
|
||||||
|
```
|
||||||
|
|
||||||
|
Most blocks accept the full file object and extract what they need automatically. You don't need to manually extract `base64` or `url` in most cases.
|
||||||
|
|
||||||
|
## Triggering Workflows with Files
|
||||||
|
|
||||||
|
When calling a workflow via API that expects file input, include files in your request:
|
||||||
|
|
||||||
|
<Tabs items={['Base64', 'URL']}>
|
||||||
|
<Tab value="Base64">
|
||||||
|
```bash
|
||||||
|
curl -X POST "https://sim.ai/api/workflows/YOUR_WORKFLOW_ID/execute" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "x-api-key: YOUR_API_KEY" \
|
||||||
|
-d '{
|
||||||
|
"document": {
|
||||||
|
"name": "report.pdf",
|
||||||
|
"base64": "JVBERi0xLjQK...",
|
||||||
|
"type": "application/pdf"
|
||||||
|
}
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
</Tab>
|
||||||
|
<Tab value="URL">
|
||||||
|
```bash
|
||||||
|
curl -X POST "https://sim.ai/api/workflows/YOUR_WORKFLOW_ID/execute" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "x-api-key: YOUR_API_KEY" \
|
||||||
|
-d '{
|
||||||
|
"document": {
|
||||||
|
"name": "report.pdf",
|
||||||
|
"url": "https://example.com/report.pdf",
|
||||||
|
"type": "application/pdf"
|
||||||
|
}
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
</Tab>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
|
The workflow's Start block should have an input field configured to receive the file parameter.
|
||||||
|
|
||||||
|
## Receiving Files in API Responses
|
||||||
|
|
||||||
|
When a workflow outputs files, they're included in the response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"output": {
|
||||||
|
"generatedFile": {
|
||||||
|
"name": "output.png",
|
||||||
|
"url": "https://...",
|
||||||
|
"base64": "iVBORw0KGgo...",
|
||||||
|
"type": "image/png",
|
||||||
|
"size": 34567
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `url` for direct downloads or `base64` for inline processing.
|
||||||
|
|
||||||
|
## Blocks That Work with Files
|
||||||
|
|
||||||
|
**File inputs:**
|
||||||
|
- **File** - Parse documents, images, and text files
|
||||||
|
- **Vision** - Analyze images with AI models
|
||||||
|
- **Mistral Parser** - Extract text from PDFs
|
||||||
|
|
||||||
|
**File outputs:**
|
||||||
|
- **Gmail** - Email attachments
|
||||||
|
- **Slack** - Downloaded files
|
||||||
|
- **TTS** - Generated audio files
|
||||||
|
- **Video Generator** - Generated videos
|
||||||
|
- **Image Generator** - Generated images
|
||||||
|
|
||||||
|
**File storage:**
|
||||||
|
- **Supabase** - Upload/download from storage
|
||||||
|
- **S3** - AWS S3 operations
|
||||||
|
- **Google Drive** - Drive file operations
|
||||||
|
- **Dropbox** - Dropbox file operations
|
||||||
|
|
||||||
|
<Callout type="info">
|
||||||
|
Files are automatically available to downstream blocks. The execution engine handles all file transfer and format conversion.
|
||||||
|
</Callout>
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
1. **Use file objects directly** - Pass the full file object rather than extracting individual properties. Blocks handle the conversion automatically.
|
||||||
|
|
||||||
|
2. **Check file types** - Ensure the file type matches what the receiving block expects. The Vision block needs images, the File block handles documents.
|
||||||
|
|
||||||
|
3. **Consider file size** - Large files increase execution time. For very large files, consider using storage blocks (S3, Supabase) for intermediate storage.
|
||||||
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"pages": ["index", "basics", "api", "logging", "costs"]
|
"pages": ["index", "basics", "files", "api", "logging", "costs"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import {
|
|||||||
Database,
|
Database,
|
||||||
DollarSign,
|
DollarSign,
|
||||||
HardDrive,
|
HardDrive,
|
||||||
Workflow,
|
Timer,
|
||||||
} from 'lucide-react'
|
} from 'lucide-react'
|
||||||
import { useRouter } from 'next/navigation'
|
import { useRouter } from 'next/navigation'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
@@ -44,7 +44,7 @@ interface PricingTier {
|
|||||||
const FREE_PLAN_FEATURES: PricingFeature[] = [
|
const FREE_PLAN_FEATURES: PricingFeature[] = [
|
||||||
{ icon: DollarSign, text: '$20 usage limit' },
|
{ icon: DollarSign, text: '$20 usage limit' },
|
||||||
{ icon: HardDrive, text: '5GB file storage' },
|
{ icon: HardDrive, text: '5GB file storage' },
|
||||||
{ icon: Workflow, text: 'Public template access' },
|
{ icon: Timer, text: '5 min execution limit' },
|
||||||
{ icon: Database, text: 'Limited log retention' },
|
{ icon: Database, text: 'Limited log retention' },
|
||||||
{ icon: Code2, text: 'CLI/SDK Access' },
|
{ icon: Code2, text: 'CLI/SDK Access' },
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import {
|
|||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { getBrandConfig } from '@/lib/branding/branding'
|
import { getBrandConfig } from '@/lib/branding/branding'
|
||||||
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
||||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||||
@@ -1119,7 +1119,7 @@ async function handlePushNotificationSet(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const urlValidation = validateExternalUrl(
|
const urlValidation = await validateUrlWithDNS(
|
||||||
params.pushNotificationConfig.url,
|
params.pushNotificationConfig.url,
|
||||||
'Push notification URL'
|
'Push notification URL'
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ const UpdateCreatorProfileSchema = z.object({
|
|||||||
name: z.string().min(1, 'Name is required').max(100, 'Max 100 characters').optional(),
|
name: z.string().min(1, 'Name is required').max(100, 'Max 100 characters').optional(),
|
||||||
profileImageUrl: z.string().optional().or(z.literal('')),
|
profileImageUrl: z.string().optional().or(z.literal('')),
|
||||||
details: CreatorProfileDetailsSchema.optional(),
|
details: CreatorProfileDetailsSchema.optional(),
|
||||||
|
verified: z.boolean().optional(), // Verification status (super users only)
|
||||||
})
|
})
|
||||||
|
|
||||||
// Helper to check if user has permission to manage profile
|
// Helper to check if user has permission to manage profile
|
||||||
@@ -97,12 +98,30 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
return NextResponse.json({ error: 'Profile not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Profile not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check permissions
|
// Verification changes require super user permission
|
||||||
|
if (data.verified !== undefined) {
|
||||||
|
const { verifyEffectiveSuperUser } = await import('@/lib/templates/permissions')
|
||||||
|
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||||
|
if (!effectiveSuperUser) {
|
||||||
|
logger.warn(`[${requestId}] Non-super user attempted to change creator verification: ${id}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Only super users can change verification status' },
|
||||||
|
{ status: 403 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For non-verified updates, check regular permissions
|
||||||
|
const hasNonVerifiedUpdates =
|
||||||
|
data.name !== undefined || data.profileImageUrl !== undefined || data.details !== undefined
|
||||||
|
|
||||||
|
if (hasNonVerifiedUpdates) {
|
||||||
const canEdit = await hasPermission(session.user.id, existing[0])
|
const canEdit = await hasPermission(session.user.id, existing[0])
|
||||||
if (!canEdit) {
|
if (!canEdit) {
|
||||||
logger.warn(`[${requestId}] User denied permission to update profile: ${id}`)
|
logger.warn(`[${requestId}] User denied permission to update profile: ${id}`)
|
||||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const updateData: any = {
|
const updateData: any = {
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
@@ -111,6 +130,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
if (data.name !== undefined) updateData.name = data.name
|
if (data.name !== undefined) updateData.name = data.name
|
||||||
if (data.profileImageUrl !== undefined) updateData.profileImageUrl = data.profileImageUrl
|
if (data.profileImageUrl !== undefined) updateData.profileImageUrl = data.profileImageUrl
|
||||||
if (data.details !== undefined) updateData.details = data.details
|
if (data.details !== undefined) updateData.details = data.details
|
||||||
|
if (data.verified !== undefined) updateData.verified = data.verified
|
||||||
|
|
||||||
const updated = await db
|
const updated = await db
|
||||||
.update(templateCreators)
|
.update(templateCreators)
|
||||||
|
|||||||
@@ -1,113 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { templateCreators } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
|
||||||
|
|
||||||
const logger = createLogger('CreatorVerificationAPI')
|
|
||||||
|
|
||||||
export const revalidate = 0
|
|
||||||
|
|
||||||
// POST /api/creators/[id]/verify - Verify a creator (super users only)
|
|
||||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const { id } = await params
|
|
||||||
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized verification attempt for creator: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if user is a super user
|
|
||||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
|
||||||
if (!effectiveSuperUser) {
|
|
||||||
logger.warn(`[${requestId}] Non-super user attempted to verify creator: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Only super users can verify creators' }, { status: 403 })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if creator exists
|
|
||||||
const existingCreator = await db
|
|
||||||
.select()
|
|
||||||
.from(templateCreators)
|
|
||||||
.where(eq(templateCreators.id, id))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (existingCreator.length === 0) {
|
|
||||||
logger.warn(`[${requestId}] Creator not found for verification: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Creator not found' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update creator verified status to true
|
|
||||||
await db
|
|
||||||
.update(templateCreators)
|
|
||||||
.set({ verified: true, updatedAt: new Date() })
|
|
||||||
.where(eq(templateCreators.id, id))
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Creator verified: ${id} by super user: ${session.user.id}`)
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
message: 'Creator verified successfully',
|
|
||||||
creatorId: id,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Error verifying creator ${id}`, error)
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DELETE /api/creators/[id]/verify - Unverify a creator (super users only)
|
|
||||||
export async function DELETE(
|
|
||||||
request: NextRequest,
|
|
||||||
{ params }: { params: Promise<{ id: string }> }
|
|
||||||
) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const { id } = await params
|
|
||||||
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized unverification attempt for creator: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if user is a super user
|
|
||||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
|
||||||
if (!effectiveSuperUser) {
|
|
||||||
logger.warn(`[${requestId}] Non-super user attempted to unverify creator: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Only super users can unverify creators' }, { status: 403 })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if creator exists
|
|
||||||
const existingCreator = await db
|
|
||||||
.select()
|
|
||||||
.from(templateCreators)
|
|
||||||
.where(eq(templateCreators.id, id))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (existingCreator.length === 0) {
|
|
||||||
logger.warn(`[${requestId}] Creator not found for unverification: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Creator not found' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update creator verified status to false
|
|
||||||
await db
|
|
||||||
.update(templateCreators)
|
|
||||||
.set({ verified: false, updatedAt: new Date() })
|
|
||||||
.where(eq(templateCreators.id, id))
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Creator unverified: ${id} by super user: ${session.user.id}`)
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
message: 'Creator unverified successfully',
|
|
||||||
creatorId: id,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Error unverifying creator ${id}`, error)
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,13 +1,16 @@
|
|||||||
import { db } from '@sim/db'
|
import { asyncJobs, db } from '@sim/db'
|
||||||
import { workflowExecutionLogs } from '@sim/db/schema'
|
import { workflowExecutionLogs } from '@sim/db/schema'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { and, eq, lt, sql } from 'drizzle-orm'
|
import { and, eq, inArray, lt, sql } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||||
|
import { JOB_RETENTION_HOURS, JOB_STATUS } from '@/lib/core/async-jobs'
|
||||||
|
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
|
|
||||||
const logger = createLogger('CleanupStaleExecutions')
|
const logger = createLogger('CleanupStaleExecutions')
|
||||||
|
|
||||||
const STALE_THRESHOLD_MINUTES = 30
|
const STALE_THRESHOLD_MS = getMaxExecutionTimeout() + 5 * 60 * 1000
|
||||||
|
const STALE_THRESHOLD_MINUTES = Math.ceil(STALE_THRESHOLD_MS / 60000)
|
||||||
const MAX_INT32 = 2_147_483_647
|
const MAX_INT32 = 2_147_483_647
|
||||||
|
|
||||||
export async function GET(request: NextRequest) {
|
export async function GET(request: NextRequest) {
|
||||||
@@ -78,12 +81,102 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
logger.info(`Stale execution cleanup completed. Cleaned: ${cleaned}, Failed: ${failed}`)
|
logger.info(`Stale execution cleanup completed. Cleaned: ${cleaned}, Failed: ${failed}`)
|
||||||
|
|
||||||
|
// Clean up stale async jobs (stuck in processing)
|
||||||
|
let asyncJobsMarkedFailed = 0
|
||||||
|
|
||||||
|
try {
|
||||||
|
const staleAsyncJobs = await db
|
||||||
|
.update(asyncJobs)
|
||||||
|
.set({
|
||||||
|
status: JOB_STATUS.FAILED,
|
||||||
|
completedAt: new Date(),
|
||||||
|
error: `Job terminated: stuck in processing for more than ${STALE_THRESHOLD_MINUTES} minutes`,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(
|
||||||
|
and(eq(asyncJobs.status, JOB_STATUS.PROCESSING), lt(asyncJobs.startedAt, staleThreshold))
|
||||||
|
)
|
||||||
|
.returning({ id: asyncJobs.id })
|
||||||
|
|
||||||
|
asyncJobsMarkedFailed = staleAsyncJobs.length
|
||||||
|
if (asyncJobsMarkedFailed > 0) {
|
||||||
|
logger.info(`Marked ${asyncJobsMarkedFailed} stale async jobs as failed`)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to clean up stale async jobs:', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up stale pending jobs (never started, e.g., due to server crash before startJob())
|
||||||
|
let stalePendingJobsMarkedFailed = 0
|
||||||
|
|
||||||
|
try {
|
||||||
|
const stalePendingJobs = await db
|
||||||
|
.update(asyncJobs)
|
||||||
|
.set({
|
||||||
|
status: JOB_STATUS.FAILED,
|
||||||
|
completedAt: new Date(),
|
||||||
|
error: `Job terminated: stuck in pending state for more than ${STALE_THRESHOLD_MINUTES} minutes (never started)`,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(
|
||||||
|
and(eq(asyncJobs.status, JOB_STATUS.PENDING), lt(asyncJobs.createdAt, staleThreshold))
|
||||||
|
)
|
||||||
|
.returning({ id: asyncJobs.id })
|
||||||
|
|
||||||
|
stalePendingJobsMarkedFailed = stalePendingJobs.length
|
||||||
|
if (stalePendingJobsMarkedFailed > 0) {
|
||||||
|
logger.info(`Marked ${stalePendingJobsMarkedFailed} stale pending jobs as failed`)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to clean up stale pending jobs:', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete completed/failed jobs older than retention period
|
||||||
|
const retentionThreshold = new Date(Date.now() - JOB_RETENTION_HOURS * 60 * 60 * 1000)
|
||||||
|
let asyncJobsDeleted = 0
|
||||||
|
|
||||||
|
try {
|
||||||
|
const deletedJobs = await db
|
||||||
|
.delete(asyncJobs)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
inArray(asyncJobs.status, [JOB_STATUS.COMPLETED, JOB_STATUS.FAILED]),
|
||||||
|
lt(asyncJobs.completedAt, retentionThreshold)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.returning({ id: asyncJobs.id })
|
||||||
|
|
||||||
|
asyncJobsDeleted = deletedJobs.length
|
||||||
|
if (asyncJobsDeleted > 0) {
|
||||||
|
logger.info(
|
||||||
|
`Deleted ${asyncJobsDeleted} old async jobs (retention: ${JOB_RETENTION_HOURS}h)`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to delete old async jobs:', {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
|
executions: {
|
||||||
found: staleExecutions.length,
|
found: staleExecutions.length,
|
||||||
cleaned,
|
cleaned,
|
||||||
failed,
|
failed,
|
||||||
thresholdMinutes: STALE_THRESHOLD_MINUTES,
|
thresholdMinutes: STALE_THRESHOLD_MINUTES,
|
||||||
|
},
|
||||||
|
asyncJobs: {
|
||||||
|
staleProcessingMarkedFailed: asyncJobsMarkedFailed,
|
||||||
|
stalePendingMarkedFailed: stalePendingJobsMarkedFailed,
|
||||||
|
oldDeleted: asyncJobsDeleted,
|
||||||
|
staleThresholdMinutes: STALE_THRESHOLD_MINUTES,
|
||||||
|
retentionHours: JOB_RETENTION_HOURS,
|
||||||
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error in stale execution cleanup job:', error)
|
logger.error('Error in stale execution cleanup job:', error)
|
||||||
|
|||||||
@@ -6,7 +6,11 @@ import { createLogger } from '@sim/logger'
|
|||||||
import binaryExtensionsList from 'binary-extensions'
|
import binaryExtensionsList from 'binary-extensions'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||||
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
||||||
@@ -19,6 +23,7 @@ import {
|
|||||||
getMimeTypeFromExtension,
|
getMimeTypeFromExtension,
|
||||||
getViewerUrl,
|
getViewerUrl,
|
||||||
inferContextFromKey,
|
inferContextFromKey,
|
||||||
|
isInternalFileUrl,
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
} from '@/lib/uploads/utils/file-utils'
|
||||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||||
@@ -215,7 +220,7 @@ async function parseFileSingle(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filePath.includes('/api/files/serve/')) {
|
if (isInternalFileUrl(filePath)) {
|
||||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -246,7 +251,7 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
|||||||
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
|
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filePath.startsWith('/') && !filePath.startsWith('/api/files/serve/')) {
|
if (filePath.startsWith('/') && !isInternalFileUrl(filePath)) {
|
||||||
return { isValid: false, error: 'Path outside allowed directory' }
|
return { isValid: false, error: 'Path outside allowed directory' }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -420,7 +425,7 @@ async function handleExternalUrl(
|
|||||||
|
|
||||||
return parseResult
|
return parseResult
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Error handling external URL ${url}:`, error)
|
logger.error(`Error handling external URL ${sanitizeUrlForLog(url)}:`, error)
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error: `Error fetching URL: ${(error as Error).message}`,
|
error: `Error fetching URL: ${(error as Error).message}`,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { runs } from '@trigger.dev/sdk'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { getJobQueue, JOB_STATUS } from '@/lib/core/async-jobs'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createErrorResponse } from '@/app/api/workflows/utils'
|
import { createErrorResponse } from '@/app/api/workflows/utils'
|
||||||
|
|
||||||
@@ -15,8 +15,6 @@ export async function GET(
|
|||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
try {
|
try {
|
||||||
logger.debug(`[${requestId}] Getting status for task: ${taskId}`)
|
|
||||||
|
|
||||||
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
if (!authResult.success || !authResult.userId) {
|
if (!authResult.success || !authResult.userId) {
|
||||||
logger.warn(`[${requestId}] Unauthorized task status request`)
|
logger.warn(`[${requestId}] Unauthorized task status request`)
|
||||||
@@ -25,76 +23,60 @@ export async function GET(
|
|||||||
|
|
||||||
const authenticatedUserId = authResult.userId
|
const authenticatedUserId = authResult.userId
|
||||||
|
|
||||||
const run = await runs.retrieve(taskId)
|
const jobQueue = await getJobQueue()
|
||||||
|
const job = await jobQueue.getJob(taskId)
|
||||||
|
|
||||||
logger.debug(`[${requestId}] Task ${taskId} status: ${run.status}`)
|
if (!job) {
|
||||||
|
return createErrorResponse('Task not found', 404)
|
||||||
|
}
|
||||||
|
|
||||||
const payload = run.payload as any
|
if (job.metadata?.workflowId) {
|
||||||
if (payload?.workflowId) {
|
|
||||||
const { verifyWorkflowAccess } = await import('@/socket/middleware/permissions')
|
const { verifyWorkflowAccess } = await import('@/socket/middleware/permissions')
|
||||||
const accessCheck = await verifyWorkflowAccess(authenticatedUserId, payload.workflowId)
|
const accessCheck = await verifyWorkflowAccess(
|
||||||
|
authenticatedUserId,
|
||||||
|
job.metadata.workflowId as string
|
||||||
|
)
|
||||||
if (!accessCheck.hasAccess) {
|
if (!accessCheck.hasAccess) {
|
||||||
logger.warn(`[${requestId}] User ${authenticatedUserId} denied access to task ${taskId}`, {
|
logger.warn(`[${requestId}] Access denied to workflow ${job.metadata.workflowId}`)
|
||||||
workflowId: payload.workflowId,
|
|
||||||
})
|
|
||||||
return createErrorResponse('Access denied', 403)
|
return createErrorResponse('Access denied', 403)
|
||||||
}
|
}
|
||||||
logger.debug(`[${requestId}] User ${authenticatedUserId} has access to task ${taskId}`)
|
} else if (job.metadata?.userId && job.metadata.userId !== authenticatedUserId) {
|
||||||
} else {
|
logger.warn(`[${requestId}] Access denied to user ${job.metadata.userId}`)
|
||||||
if (payload?.userId && payload.userId !== authenticatedUserId) {
|
|
||||||
logger.warn(
|
|
||||||
`[${requestId}] User ${authenticatedUserId} attempted to access task ${taskId} owned by ${payload.userId}`
|
|
||||||
)
|
|
||||||
return createErrorResponse('Access denied', 403)
|
return createErrorResponse('Access denied', 403)
|
||||||
}
|
} else if (!job.metadata?.userId && !job.metadata?.workflowId) {
|
||||||
if (!payload?.userId) {
|
logger.warn(`[${requestId}] Access denied to job ${taskId}`)
|
||||||
logger.warn(
|
|
||||||
`[${requestId}] Task ${taskId} has no ownership information in payload. Denying access for security.`
|
|
||||||
)
|
|
||||||
return createErrorResponse('Access denied', 403)
|
return createErrorResponse('Access denied', 403)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
const statusMap = {
|
const mappedStatus = job.status === JOB_STATUS.PENDING ? 'queued' : job.status
|
||||||
QUEUED: 'queued',
|
|
||||||
WAITING_FOR_DEPLOY: 'queued',
|
|
||||||
EXECUTING: 'processing',
|
|
||||||
RESCHEDULED: 'processing',
|
|
||||||
FROZEN: 'processing',
|
|
||||||
COMPLETED: 'completed',
|
|
||||||
CANCELED: 'cancelled',
|
|
||||||
FAILED: 'failed',
|
|
||||||
CRASHED: 'failed',
|
|
||||||
INTERRUPTED: 'failed',
|
|
||||||
SYSTEM_FAILURE: 'failed',
|
|
||||||
EXPIRED: 'failed',
|
|
||||||
} as const
|
|
||||||
|
|
||||||
const mappedStatus = statusMap[run.status as keyof typeof statusMap] || 'unknown'
|
|
||||||
|
|
||||||
const response: any = {
|
const response: any = {
|
||||||
success: true,
|
success: true,
|
||||||
taskId,
|
taskId,
|
||||||
status: mappedStatus,
|
status: mappedStatus,
|
||||||
metadata: {
|
metadata: {
|
||||||
startedAt: run.startedAt,
|
startedAt: job.startedAt,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mappedStatus === 'completed') {
|
if (job.status === JOB_STATUS.COMPLETED) {
|
||||||
response.output = run.output // This contains the workflow execution results
|
response.output = job.output
|
||||||
response.metadata.completedAt = run.finishedAt
|
response.metadata.completedAt = job.completedAt
|
||||||
response.metadata.duration = run.durationMs
|
if (job.startedAt && job.completedAt) {
|
||||||
|
response.metadata.duration = job.completedAt.getTime() - job.startedAt.getTime()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mappedStatus === 'failed') {
|
if (job.status === JOB_STATUS.FAILED) {
|
||||||
response.error = run.error
|
response.error = job.error
|
||||||
response.metadata.completedAt = run.finishedAt
|
response.metadata.completedAt = job.completedAt
|
||||||
response.metadata.duration = run.durationMs
|
if (job.startedAt && job.completedAt) {
|
||||||
|
response.metadata.duration = job.completedAt.getTime() - job.startedAt.getTime()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (mappedStatus === 'processing' || mappedStatus === 'queued') {
|
if (job.status === JOB_STATUS.PROCESSING || job.status === JOB_STATUS.PENDING) {
|
||||||
response.estimatedDuration = 180000 // 3 minutes max from our config
|
response.estimatedDuration = 180000
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json(response)
|
return NextResponse.json(response)
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ import { and, eq } from 'drizzle-orm'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateInternalToken } from '@/lib/auth/internal'
|
import { generateInternalToken } from '@/lib/auth/internal'
|
||||||
|
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowMcpServeAPI')
|
const logger = createLogger('WorkflowMcpServeAPI')
|
||||||
@@ -264,7 +265,7 @@ async function handleToolsCall(
|
|||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers,
|
headers,
|
||||||
body: JSON.stringify({ input: params.arguments || {}, triggerType: 'mcp' }),
|
body: JSON.stringify({ input: params.arguments || {}, triggerType: 'mcp' }),
|
||||||
signal: AbortSignal.timeout(600000), // 10 minute timeout
|
signal: AbortSignal.timeout(getMaxExecutionTimeout()),
|
||||||
})
|
})
|
||||||
|
|
||||||
const executeResult = await response.json()
|
const executeResult = await response.json()
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import type { NextRequest } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
|
import { getHighestPrioritySubscription } from '@/lib/billing/core/plan'
|
||||||
|
import { getExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
|
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
|
||||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||||
import { mcpService } from '@/lib/mcp/service'
|
import { mcpService } from '@/lib/mcp/service'
|
||||||
import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types'
|
import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types'
|
||||||
@@ -7,7 +10,6 @@ import {
|
|||||||
categorizeError,
|
categorizeError,
|
||||||
createMcpErrorResponse,
|
createMcpErrorResponse,
|
||||||
createMcpSuccessResponse,
|
createMcpSuccessResponse,
|
||||||
MCP_CONSTANTS,
|
|
||||||
validateStringParam,
|
validateStringParam,
|
||||||
} from '@/lib/mcp/utils'
|
} from '@/lib/mcp/utils'
|
||||||
|
|
||||||
@@ -171,13 +173,16 @@ export const POST = withMcpAuth('read')(
|
|||||||
arguments: args,
|
arguments: args,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const userSubscription = await getHighestPrioritySubscription(userId)
|
||||||
|
const executionTimeout = getExecutionTimeout(
|
||||||
|
userSubscription?.plan as SubscriptionPlan | undefined,
|
||||||
|
'sync'
|
||||||
|
)
|
||||||
|
|
||||||
const result = await Promise.race([
|
const result = await Promise.race([
|
||||||
mcpService.executeTool(userId, serverId, toolCall, workspaceId),
|
mcpService.executeTool(userId, serverId, toolCall, workspaceId),
|
||||||
new Promise<never>((_, reject) =>
|
new Promise<never>((_, reject) =>
|
||||||
setTimeout(
|
setTimeout(() => reject(new Error('Tool execution timeout')), executionTimeout)
|
||||||
() => reject(new Error('Tool execution timeout')),
|
|
||||||
MCP_CONSTANTS.EXECUTION_TIMEOUT
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
import { db, workflowDeploymentVersion, workflowSchedule } from '@sim/db'
|
import { db, workflowDeploymentVersion, workflowSchedule } from '@sim/db'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { tasks } from '@trigger.dev/sdk'
|
|
||||||
import { and, eq, isNull, lt, lte, not, or, sql } from 'drizzle-orm'
|
import { and, eq, isNull, lt, lte, not, or, sql } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
import { getJobQueue, shouldExecuteInline } from '@/lib/core/async-jobs'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { executeScheduleJob } from '@/background/schedule-execution'
|
import { executeScheduleJob } from '@/background/schedule-execution'
|
||||||
|
|
||||||
@@ -55,72 +54,67 @@ export async function GET(request: NextRequest) {
|
|||||||
logger.debug(`[${requestId}] Successfully queried schedules: ${dueSchedules.length} found`)
|
logger.debug(`[${requestId}] Successfully queried schedules: ${dueSchedules.length} found`)
|
||||||
logger.info(`[${requestId}] Processing ${dueSchedules.length} due scheduled workflows`)
|
logger.info(`[${requestId}] Processing ${dueSchedules.length} due scheduled workflows`)
|
||||||
|
|
||||||
if (isTriggerDevEnabled) {
|
const jobQueue = await getJobQueue()
|
||||||
const triggerPromises = dueSchedules.map(async (schedule) => {
|
|
||||||
|
const queuePromises = dueSchedules.map(async (schedule) => {
|
||||||
const queueTime = schedule.lastQueuedAt ?? queuedAt
|
const queueTime = schedule.lastQueuedAt ?? queuedAt
|
||||||
|
|
||||||
|
const payload = {
|
||||||
|
scheduleId: schedule.id,
|
||||||
|
workflowId: schedule.workflowId,
|
||||||
|
blockId: schedule.blockId || undefined,
|
||||||
|
cronExpression: schedule.cronExpression || undefined,
|
||||||
|
lastRanAt: schedule.lastRanAt?.toISOString(),
|
||||||
|
failedCount: schedule.failedCount || 0,
|
||||||
|
now: queueTime.toISOString(),
|
||||||
|
scheduledFor: schedule.nextRunAt?.toISOString(),
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const payload = {
|
const jobId = await jobQueue.enqueue('schedule-execution', payload, {
|
||||||
scheduleId: schedule.id,
|
metadata: { workflowId: schedule.workflowId },
|
||||||
workflowId: schedule.workflowId,
|
})
|
||||||
blockId: schedule.blockId || undefined,
|
|
||||||
cronExpression: schedule.cronExpression || undefined,
|
|
||||||
lastRanAt: schedule.lastRanAt?.toISOString(),
|
|
||||||
failedCount: schedule.failedCount || 0,
|
|
||||||
now: queueTime.toISOString(),
|
|
||||||
scheduledFor: schedule.nextRunAt?.toISOString(),
|
|
||||||
}
|
|
||||||
|
|
||||||
const handle = await tasks.trigger('schedule-execution', payload)
|
|
||||||
logger.info(
|
logger.info(
|
||||||
`[${requestId}] Queued schedule execution task ${handle.id} for workflow ${schedule.workflowId}`
|
`[${requestId}] Queued schedule execution task ${jobId} for workflow ${schedule.workflowId}`
|
||||||
)
|
)
|
||||||
return handle
|
|
||||||
|
if (shouldExecuteInline()) {
|
||||||
|
void (async () => {
|
||||||
|
try {
|
||||||
|
await jobQueue.startJob(jobId)
|
||||||
|
const output = await executeScheduleJob(payload)
|
||||||
|
await jobQueue.completeJob(jobId, output)
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||||
|
logger.error(
|
||||||
|
`[${requestId}] Schedule execution failed for workflow ${schedule.workflowId}`,
|
||||||
|
{ jobId, error: errorMessage }
|
||||||
|
)
|
||||||
|
try {
|
||||||
|
await jobQueue.markJobFailed(jobId, errorMessage)
|
||||||
|
} catch (markFailedError) {
|
||||||
|
logger.error(`[${requestId}] Failed to mark job as failed`, {
|
||||||
|
jobId,
|
||||||
|
error:
|
||||||
|
markFailedError instanceof Error
|
||||||
|
? markFailedError.message
|
||||||
|
: String(markFailedError),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(
|
logger.error(
|
||||||
`[${requestId}] Failed to trigger schedule execution for workflow ${schedule.workflowId}`,
|
`[${requestId}] Failed to queue schedule execution for workflow ${schedule.workflowId}`,
|
||||||
error
|
error
|
||||||
)
|
)
|
||||||
return null
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
await Promise.allSettled(triggerPromises)
|
await Promise.allSettled(queuePromises)
|
||||||
|
|
||||||
logger.info(`[${requestId}] Queued ${dueSchedules.length} schedule executions to Trigger.dev`)
|
logger.info(`[${requestId}] Queued ${dueSchedules.length} schedule executions`)
|
||||||
} else {
|
|
||||||
const directExecutionPromises = dueSchedules.map(async (schedule) => {
|
|
||||||
const queueTime = schedule.lastQueuedAt ?? queuedAt
|
|
||||||
|
|
||||||
const payload = {
|
|
||||||
scheduleId: schedule.id,
|
|
||||||
workflowId: schedule.workflowId,
|
|
||||||
blockId: schedule.blockId || undefined,
|
|
||||||
cronExpression: schedule.cronExpression || undefined,
|
|
||||||
lastRanAt: schedule.lastRanAt?.toISOString(),
|
|
||||||
failedCount: schedule.failedCount || 0,
|
|
||||||
now: queueTime.toISOString(),
|
|
||||||
scheduledFor: schedule.nextRunAt?.toISOString(),
|
|
||||||
}
|
|
||||||
|
|
||||||
void executeScheduleJob(payload).catch((error) => {
|
|
||||||
logger.error(
|
|
||||||
`[${requestId}] Direct schedule execution failed for workflow ${schedule.workflowId}`,
|
|
||||||
error
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Queued direct schedule execution for workflow ${schedule.workflowId} (Trigger.dev disabled)`
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
await Promise.allSettled(directExecutionPromises)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Queued ${dueSchedules.length} direct schedule executions (Trigger.dev disabled)`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
message: 'Scheduled workflow executions processed',
|
message: 'Scheduled workflow executions processed',
|
||||||
|
|||||||
@@ -1,101 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { templates } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
|
||||||
|
|
||||||
const logger = createLogger('TemplateApprovalAPI')
|
|
||||||
|
|
||||||
export const revalidate = 0
|
|
||||||
|
|
||||||
/**
|
|
||||||
* POST /api/templates/[id]/approve - Approve a template (super users only)
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const { id } = await params
|
|
||||||
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized template approval attempt for ID: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
|
||||||
if (!effectiveSuperUser) {
|
|
||||||
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
|
||||||
if (existingTemplate.length === 0) {
|
|
||||||
logger.warn(`[${requestId}] Template not found for approval: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
await db
|
|
||||||
.update(templates)
|
|
||||||
.set({ status: 'approved', updatedAt: new Date() })
|
|
||||||
.where(eq(templates.id, id))
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Template approved: ${id} by super user: ${session.user.id}`)
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
message: 'Template approved successfully',
|
|
||||||
templateId: id,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Error approving template ${id}`, error)
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* DELETE /api/templates/[id]/approve - Unapprove a template (super users only)
|
|
||||||
*/
|
|
||||||
export async function DELETE(
|
|
||||||
_request: NextRequest,
|
|
||||||
{ params }: { params: Promise<{ id: string }> }
|
|
||||||
) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const { id } = await params
|
|
||||||
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized template rejection attempt for ID: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
|
||||||
if (!effectiveSuperUser) {
|
|
||||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
|
||||||
if (existingTemplate.length === 0) {
|
|
||||||
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
await db
|
|
||||||
.update(templates)
|
|
||||||
.set({ status: 'rejected', updatedAt: new Date() })
|
|
||||||
.where(eq(templates.id, id))
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Template rejected: ${id} by super user: ${session.user.id}`)
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
message: 'Template rejected successfully',
|
|
||||||
templateId: id,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Error rejecting template ${id}`, error)
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { templates } from '@sim/db/schema'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { verifyEffectiveSuperUser } from '@/lib/templates/permissions'
|
|
||||||
|
|
||||||
const logger = createLogger('TemplateRejectionAPI')
|
|
||||||
|
|
||||||
export const revalidate = 0
|
|
||||||
|
|
||||||
/**
|
|
||||||
* POST /api/templates/[id]/reject - Reject a template (super users only)
|
|
||||||
*/
|
|
||||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const { id } = await params
|
|
||||||
|
|
||||||
try {
|
|
||||||
const session = await getSession()
|
|
||||||
if (!session?.user?.id) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized template rejection attempt for ID: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
|
||||||
if (!effectiveSuperUser) {
|
|
||||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
|
||||||
if (existingTemplate.length === 0) {
|
|
||||||
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
await db
|
|
||||||
.update(templates)
|
|
||||||
.set({ status: 'rejected', updatedAt: new Date() })
|
|
||||||
.where(eq(templates.id, id))
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Template rejected: ${id} by super user: ${session.user.id}`)
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
message: 'Template rejected successfully',
|
|
||||||
templateId: id,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Error rejecting template ${id}`, error)
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -106,6 +106,7 @@ const updateTemplateSchema = z.object({
|
|||||||
creatorId: z.string().optional(), // Creator profile ID
|
creatorId: z.string().optional(), // Creator profile ID
|
||||||
tags: z.array(z.string()).max(10, 'Maximum 10 tags allowed').optional(),
|
tags: z.array(z.string()).max(10, 'Maximum 10 tags allowed').optional(),
|
||||||
updateState: z.boolean().optional(), // Explicitly request state update from current workflow
|
updateState: z.boolean().optional(), // Explicitly request state update from current workflow
|
||||||
|
status: z.enum(['approved', 'rejected', 'pending']).optional(), // Status change (super users only)
|
||||||
})
|
})
|
||||||
|
|
||||||
// PUT /api/templates/[id] - Update a template
|
// PUT /api/templates/[id] - Update a template
|
||||||
@@ -131,7 +132,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const { name, details, creatorId, tags, updateState } = validationResult.data
|
const { name, details, creatorId, tags, updateState, status } = validationResult.data
|
||||||
|
|
||||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||||
|
|
||||||
@@ -142,6 +143,28 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
|
|
||||||
const template = existingTemplate[0]
|
const template = existingTemplate[0]
|
||||||
|
|
||||||
|
// Status changes require super user permission
|
||||||
|
if (status !== undefined) {
|
||||||
|
const { verifyEffectiveSuperUser } = await import('@/lib/templates/permissions')
|
||||||
|
const { effectiveSuperUser } = await verifyEffectiveSuperUser(session.user.id)
|
||||||
|
if (!effectiveSuperUser) {
|
||||||
|
logger.warn(`[${requestId}] Non-super user attempted to change template status: ${id}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Only super users can change template status' },
|
||||||
|
{ status: 403 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// For non-status updates, verify creator permission
|
||||||
|
const hasNonStatusUpdates =
|
||||||
|
name !== undefined ||
|
||||||
|
details !== undefined ||
|
||||||
|
creatorId !== undefined ||
|
||||||
|
tags !== undefined ||
|
||||||
|
updateState
|
||||||
|
|
||||||
|
if (hasNonStatusUpdates) {
|
||||||
if (!template.creatorId) {
|
if (!template.creatorId) {
|
||||||
logger.warn(`[${requestId}] Template ${id} has no creator, denying update`)
|
logger.warn(`[${requestId}] Template ${id} has no creator, denying update`)
|
||||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||||
@@ -158,6 +181,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
|
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
|
||||||
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
|
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const updateData: any = {
|
const updateData: any = {
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
@@ -167,6 +191,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
if (details !== undefined) updateData.details = details
|
if (details !== undefined) updateData.details = details
|
||||||
if (tags !== undefined) updateData.tags = tags
|
if (tags !== undefined) updateData.tags = tags
|
||||||
if (creatorId !== undefined) updateData.creatorId = creatorId
|
if (creatorId !== undefined) updateData.creatorId = creatorId
|
||||||
|
if (status !== undefined) updateData.status = status
|
||||||
|
|
||||||
if (updateState && template.workflowId) {
|
if (updateState && template.workflowId) {
|
||||||
const { verifyWorkflowAccess } = await import('@/socket/middleware/permissions')
|
const { verifyWorkflowAccess } = await import('@/socket/middleware/permissions')
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -95,6 +96,14 @@ export async function POST(request: NextRequest) {
|
|||||||
if (validatedData.files && validatedData.files.length > 0) {
|
if (validatedData.files && validatedData.files.length > 0) {
|
||||||
for (const file of validatedData.files) {
|
for (const file of validatedData.files) {
|
||||||
if (file.type === 'url') {
|
if (file.type === 'url') {
|
||||||
|
const urlValidation = await validateUrlWithDNS(file.data, 'fileUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: urlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const filePart: FilePart = {
|
const filePart: FilePart = {
|
||||||
kind: 'file',
|
kind: 'file',
|
||||||
file: {
|
file: {
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { createA2AClient } from '@/lib/a2a/utils'
|
import { createA2AClient } from '@/lib/a2a/utils'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -40,7 +40,7 @@ export async function POST(request: NextRequest) {
|
|||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
||||||
|
|
||||||
const urlValidation = validateExternalUrl(validatedData.webhookUrl, 'Webhook URL')
|
const urlValidation = await validateUrlWithDNS(validatedData.webhookUrl, 'Webhook URL')
|
||||||
if (!urlValidation.isValid) {
|
if (!urlValidation.isValid) {
|
||||||
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
|
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
|
|||||||
@@ -92,6 +92,9 @@ export async function POST(request: NextRequest) {
|
|||||||
formData.append('comment', comment)
|
formData.append('comment', comment)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add minorEdit field as required by Confluence API
|
||||||
|
formData.append('minorEdit', 'false')
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { z } from 'zod'
|
|||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateNumericId } from '@/lib/core/security/input-validation'
|
import { validateNumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -15,7 +16,7 @@ const DiscordSendMessageSchema = z.object({
|
|||||||
botToken: z.string().min(1, 'Bot token is required'),
|
botToken: z.string().min(1, 'Bot token is required'),
|
||||||
channelId: z.string().min(1, 'Channel ID is required'),
|
channelId: z.string().min(1, 'Channel ID is required'),
|
||||||
content: z.string().optional().nullable(),
|
content: z.string().optional().nullable(),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -101,6 +102,12 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`)
|
logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`)
|
||||||
|
|
||||||
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||||
|
const filesOutput: Array<{
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}> = []
|
||||||
|
|
||||||
if (userFiles.length === 0) {
|
if (userFiles.length === 0) {
|
||||||
logger.warn(`[${requestId}] No valid files to upload, falling back to text-only`)
|
logger.warn(`[${requestId}] No valid files to upload, falling back to text-only`)
|
||||||
@@ -137,6 +144,12 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Downloading file ${i}: ${userFile.name}`)
|
logger.info(`[${requestId}] Downloading file ${i}: ${userFile.name}`)
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
filesOutput.push({
|
||||||
|
name: userFile.name,
|
||||||
|
mimeType: userFile.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
|
||||||
const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type })
|
const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type })
|
||||||
formData.append(`files[${i}]`, blob, userFile.name)
|
formData.append(`files[${i}]`, blob, userFile.name)
|
||||||
@@ -173,6 +186,7 @@ export async function POST(request: NextRequest) {
|
|||||||
message: data.content,
|
message: data.content,
|
||||||
data: data,
|
data: data,
|
||||||
fileCount: userFiles.length,
|
fileCount: userFiles.length,
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
132
apps/sim/app/api/tools/dropbox/upload/route.ts
Normal file
132
apps/sim/app/api/tools/dropbox/upload/route.ts
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { httpHeaderSafeJson } from '@/lib/core/utils/validation'
|
||||||
|
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { processFilesToUserFiles, type RawFileInput } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('DropboxUploadAPI')
|
||||||
|
|
||||||
|
const DropboxUploadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
path: z.string().min(1, 'Destination path is required'),
|
||||||
|
file: FileInputSchema.optional().nullable(),
|
||||||
|
// Legacy field for backwards compatibility
|
||||||
|
fileContent: z.string().optional().nullable(),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
mode: z.enum(['add', 'overwrite']).optional().nullable(),
|
||||||
|
autorename: z.boolean().optional().nullable(),
|
||||||
|
mute: z.boolean().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Dropbox upload attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: authResult.error || 'Authentication required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Authenticated Dropbox upload request via ${authResult.authType}`)
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = DropboxUploadSchema.parse(body)
|
||||||
|
|
||||||
|
let fileBuffer: Buffer
|
||||||
|
let fileName: string
|
||||||
|
|
||||||
|
// Prefer UserFile input, fall back to legacy base64 string
|
||||||
|
if (validatedData.file) {
|
||||||
|
// Process UserFile input
|
||||||
|
const userFiles = processFilesToUserFiles(
|
||||||
|
[validatedData.file as RawFileInput],
|
||||||
|
requestId,
|
||||||
|
logger
|
||||||
|
)
|
||||||
|
|
||||||
|
if (userFiles.length === 0) {
|
||||||
|
return NextResponse.json({ success: false, error: 'Invalid file input' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userFile = userFiles[0]
|
||||||
|
logger.info(`[${requestId}] Downloading file: ${userFile.name} (${userFile.size} bytes)`)
|
||||||
|
|
||||||
|
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
fileName = userFile.name
|
||||||
|
} else if (validatedData.fileContent) {
|
||||||
|
// Legacy: base64 string input (backwards compatibility)
|
||||||
|
logger.info(`[${requestId}] Using legacy base64 content input`)
|
||||||
|
fileBuffer = Buffer.from(validatedData.fileContent, 'base64')
|
||||||
|
fileName = validatedData.fileName || 'file'
|
||||||
|
} else {
|
||||||
|
return NextResponse.json({ success: false, error: 'File is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine final path
|
||||||
|
let finalPath = validatedData.path
|
||||||
|
if (finalPath.endsWith('/')) {
|
||||||
|
finalPath = `${finalPath}${fileName}`
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Uploading to Dropbox: ${finalPath} (${fileBuffer.length} bytes)`)
|
||||||
|
|
||||||
|
const dropboxApiArg = {
|
||||||
|
path: finalPath,
|
||||||
|
mode: validatedData.mode || 'add',
|
||||||
|
autorename: validatedData.autorename ?? true,
|
||||||
|
mute: validatedData.mute ?? false,
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch('https://content.dropboxapi.com/2/files/upload', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': 'application/octet-stream',
|
||||||
|
'Dropbox-API-Arg': httpHeaderSafeJson(dropboxApiArg),
|
||||||
|
},
|
||||||
|
body: new Uint8Array(fileBuffer),
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorMessage = data.error_summary || data.error?.message || 'Failed to upload file'
|
||||||
|
logger.error(`[${requestId}] Dropbox API error:`, { status: response.status, data })
|
||||||
|
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File uploaded successfully to ${data.path_display}`)
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: data,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
logger.warn(`[${requestId}] Validation error:`, error.errors)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error.errors[0]?.message || 'Validation failed' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] Unexpected error:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
195
apps/sim/app/api/tools/github/latest-commit/route.ts
Normal file
195
apps/sim/app/api/tools/github/latest-commit/route.ts
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('GitHubLatestCommitAPI')
|
||||||
|
|
||||||
|
interface GitHubErrorResponse {
|
||||||
|
message?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GitHubCommitResponse {
|
||||||
|
sha: string
|
||||||
|
html_url: string
|
||||||
|
commit: {
|
||||||
|
message: string
|
||||||
|
author: { name: string; email: string; date: string }
|
||||||
|
committer: { name: string; email: string; date: string }
|
||||||
|
}
|
||||||
|
author?: { login: string; avatar_url: string; html_url: string }
|
||||||
|
committer?: { login: string; avatar_url: string; html_url: string }
|
||||||
|
stats?: { additions: number; deletions: number; total: number }
|
||||||
|
files?: Array<{
|
||||||
|
filename: string
|
||||||
|
status: string
|
||||||
|
additions: number
|
||||||
|
deletions: number
|
||||||
|
changes: number
|
||||||
|
patch?: string
|
||||||
|
raw_url?: string
|
||||||
|
blob_url?: string
|
||||||
|
}>
|
||||||
|
}
|
||||||
|
|
||||||
|
const GitHubLatestCommitSchema = z.object({
|
||||||
|
owner: z.string().min(1, 'Owner is required'),
|
||||||
|
repo: z.string().min(1, 'Repo is required'),
|
||||||
|
branch: z.string().optional().nullable(),
|
||||||
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized GitHub latest commit attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = GitHubLatestCommitSchema.parse(body)
|
||||||
|
|
||||||
|
const { owner, repo, branch, apiKey } = validatedData
|
||||||
|
|
||||||
|
const baseUrl = `https://api.github.com/repos/${owner}/${repo}`
|
||||||
|
const commitUrl = branch ? `${baseUrl}/commits/${branch}` : `${baseUrl}/commits/HEAD`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Fetching latest commit from GitHub`, { owner, repo, branch })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(commitUrl, 'commitUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(commitUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/vnd.github.v3+json',
|
||||||
|
Authorization: `Bearer ${apiKey}`,
|
||||||
|
'X-GitHub-Api-Version': '2022-11-28',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = (await response.json().catch(() => ({}))) as GitHubErrorResponse
|
||||||
|
logger.error(`[${requestId}] GitHub API error`, {
|
||||||
|
status: response.status,
|
||||||
|
error: errorData,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorData.message || `GitHub API error: ${response.status}` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as GitHubCommitResponse
|
||||||
|
|
||||||
|
const content = `Latest commit: "${data.commit.message}" by ${data.commit.author.name} on ${data.commit.author.date}. SHA: ${data.sha}`
|
||||||
|
|
||||||
|
const files = data.files || []
|
||||||
|
const fileDetailsWithContent = []
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
const fileDetail: Record<string, any> = {
|
||||||
|
filename: file.filename,
|
||||||
|
additions: file.additions,
|
||||||
|
deletions: file.deletions,
|
||||||
|
changes: file.changes,
|
||||||
|
status: file.status,
|
||||||
|
raw_url: file.raw_url,
|
||||||
|
blob_url: file.blob_url,
|
||||||
|
patch: file.patch,
|
||||||
|
content: undefined,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.status !== 'removed' && file.raw_url) {
|
||||||
|
try {
|
||||||
|
const rawUrlValidation = await validateUrlWithDNS(file.raw_url, 'rawUrl')
|
||||||
|
if (rawUrlValidation.isValid) {
|
||||||
|
const contentResponse = await secureFetchWithPinnedIP(
|
||||||
|
file.raw_url,
|
||||||
|
rawUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${apiKey}`,
|
||||||
|
'X-GitHub-Api-Version': '2022-11-28',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (contentResponse.ok) {
|
||||||
|
fileDetail.content = await contentResponse.text()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to fetch content for ${file.filename}:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fileDetailsWithContent.push(fileDetail)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Latest commit fetched successfully`, {
|
||||||
|
sha: data.sha,
|
||||||
|
fileCount: files.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
content,
|
||||||
|
metadata: {
|
||||||
|
sha: data.sha,
|
||||||
|
html_url: data.html_url,
|
||||||
|
commit_message: data.commit.message,
|
||||||
|
author: {
|
||||||
|
name: data.commit.author.name,
|
||||||
|
login: data.author?.login || 'Unknown',
|
||||||
|
avatar_url: data.author?.avatar_url || '',
|
||||||
|
html_url: data.author?.html_url || '',
|
||||||
|
},
|
||||||
|
committer: {
|
||||||
|
name: data.commit.committer.name,
|
||||||
|
login: data.committer?.login || 'Unknown',
|
||||||
|
avatar_url: data.committer?.avatar_url || '',
|
||||||
|
html_url: data.committer?.html_url || '',
|
||||||
|
},
|
||||||
|
stats: data.stats
|
||||||
|
? {
|
||||||
|
additions: data.stats.additions,
|
||||||
|
deletions: data.stats.deletions,
|
||||||
|
total: data.stats.total,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
files: fileDetailsWithContent.length > 0 ? fileDetailsWithContent : undefined,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching GitHub latest commit:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -28,7 +29,7 @@ const GmailDraftSchema = z.object({
|
|||||||
replyToMessageId: z.string().optional().nullable(),
|
replyToMessageId: z.string().optional().nullable(),
|
||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -28,7 +29,7 @@ const GmailSendSchema = z.object({
|
|||||||
replyToMessageId: z.string().optional().nullable(),
|
replyToMessageId: z.string().optional().nullable(),
|
||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
252
apps/sim/app/api/tools/google_drive/download/route.ts
Normal file
252
apps/sim/app/api/tools/google_drive/download/route.ts
Normal file
@@ -0,0 +1,252 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import type { GoogleDriveFile, GoogleDriveRevision } from '@/tools/google_drive/types'
|
||||||
|
import {
|
||||||
|
ALL_FILE_FIELDS,
|
||||||
|
ALL_REVISION_FIELDS,
|
||||||
|
DEFAULT_EXPORT_FORMATS,
|
||||||
|
GOOGLE_WORKSPACE_MIME_TYPES,
|
||||||
|
} from '@/tools/google_drive/utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('GoogleDriveDownloadAPI')
|
||||||
|
|
||||||
|
/** Google API error response structure */
|
||||||
|
interface GoogleApiErrorResponse {
|
||||||
|
error?: {
|
||||||
|
message?: string
|
||||||
|
code?: number
|
||||||
|
status?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Google Drive revisions list response */
|
||||||
|
interface GoogleDriveRevisionsResponse {
|
||||||
|
revisions?: GoogleDriveRevision[]
|
||||||
|
nextPageToken?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const GoogleDriveDownloadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
fileId: z.string().min(1, 'File ID is required'),
|
||||||
|
mimeType: z.string().optional().nullable(),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
includeRevisions: z.boolean().optional().default(true),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Google Drive download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = GoogleDriveDownloadSchema.parse(body)
|
||||||
|
|
||||||
|
const {
|
||||||
|
accessToken,
|
||||||
|
fileId,
|
||||||
|
mimeType: exportMimeType,
|
||||||
|
fileName,
|
||||||
|
includeRevisions,
|
||||||
|
} = validatedData
|
||||||
|
const authHeader = `Bearer ${accessToken}`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting file metadata from Google Drive`, { fileId })
|
||||||
|
|
||||||
|
const metadataUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?fields=${ALL_FILE_FIELDS}&supportsAllDrives=true`
|
||||||
|
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
|
||||||
|
if (!metadataUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: metadataUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadataResponse = await secureFetchWithPinnedIP(
|
||||||
|
metadataUrl,
|
||||||
|
metadataUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: { Authorization: authHeader },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!metadataResponse.ok) {
|
||||||
|
const errorDetails = (await metadataResponse
|
||||||
|
.json()
|
||||||
|
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||||
|
logger.error(`[${requestId}] Failed to get file metadata`, {
|
||||||
|
status: metadataResponse.status,
|
||||||
|
error: errorDetails,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = (await metadataResponse.json()) as GoogleDriveFile
|
||||||
|
const fileMimeType = metadata.mimeType
|
||||||
|
|
||||||
|
let fileBuffer: Buffer
|
||||||
|
let finalMimeType = fileMimeType
|
||||||
|
|
||||||
|
if (GOOGLE_WORKSPACE_MIME_TYPES.includes(fileMimeType)) {
|
||||||
|
const exportFormat = exportMimeType || DEFAULT_EXPORT_FORMATS[fileMimeType] || 'text/plain'
|
||||||
|
finalMimeType = exportFormat
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Exporting Google Workspace file`, {
|
||||||
|
fileId,
|
||||||
|
mimeType: fileMimeType,
|
||||||
|
exportFormat,
|
||||||
|
})
|
||||||
|
|
||||||
|
const exportUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/export?mimeType=${encodeURIComponent(exportFormat)}&supportsAllDrives=true`
|
||||||
|
const exportUrlValidation = await validateUrlWithDNS(exportUrl, 'exportUrl')
|
||||||
|
if (!exportUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: exportUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const exportResponse = await secureFetchWithPinnedIP(
|
||||||
|
exportUrl,
|
||||||
|
exportUrlValidation.resolvedIP!,
|
||||||
|
{ headers: { Authorization: authHeader } }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!exportResponse.ok) {
|
||||||
|
const exportError = (await exportResponse
|
||||||
|
.json()
|
||||||
|
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||||
|
logger.error(`[${requestId}] Failed to export file`, {
|
||||||
|
status: exportResponse.status,
|
||||||
|
error: exportError,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: exportError.error?.message || 'Failed to export Google Workspace file',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await exportResponse.arrayBuffer()
|
||||||
|
fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
} else {
|
||||||
|
logger.info(`[${requestId}] Downloading regular file`, { fileId, mimeType: fileMimeType })
|
||||||
|
|
||||||
|
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?alt=media&supportsAllDrives=true`
|
||||||
|
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||||
|
if (!downloadUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
downloadUrl,
|
||||||
|
downloadUrlValidation.resolvedIP!,
|
||||||
|
{ headers: { Authorization: authHeader } }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
const downloadError = (await downloadResponse
|
||||||
|
.json()
|
||||||
|
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||||
|
logger.error(`[${requestId}] Failed to download file`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
error: downloadError,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
const canReadRevisions = metadata.capabilities?.canReadRevisions === true
|
||||||
|
if (includeRevisions && canReadRevisions) {
|
||||||
|
try {
|
||||||
|
const revisionsUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/revisions?fields=revisions(${ALL_REVISION_FIELDS})&pageSize=100`
|
||||||
|
const revisionsUrlValidation = await validateUrlWithDNS(revisionsUrl, 'revisionsUrl')
|
||||||
|
if (revisionsUrlValidation.isValid) {
|
||||||
|
const revisionsResponse = await secureFetchWithPinnedIP(
|
||||||
|
revisionsUrl,
|
||||||
|
revisionsUrlValidation.resolvedIP!,
|
||||||
|
{ headers: { Authorization: authHeader } }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (revisionsResponse.ok) {
|
||||||
|
const revisionsData = (await revisionsResponse.json()) as GoogleDriveRevisionsResponse
|
||||||
|
metadata.revisions = revisionsData.revisions
|
||||||
|
logger.info(`[${requestId}] Fetched file revisions`, {
|
||||||
|
fileId,
|
||||||
|
revisionCount: metadata.revisions?.length || 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Error fetching revisions, continuing without them`, { error })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedName = fileName || metadata.name || 'download'
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||||
|
fileId,
|
||||||
|
name: resolvedName,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
mimeType: finalMimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const base64Data = fileBuffer.toString('base64')
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedName,
|
||||||
|
mimeType: finalMimeType,
|
||||||
|
data: base64Data,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
},
|
||||||
|
metadata,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading Google Drive file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -20,7 +21,7 @@ const GOOGLE_DRIVE_API_BASE = 'https://www.googleapis.com/upload/drive/v3/files'
|
|||||||
const GoogleDriveUploadSchema = z.object({
|
const GoogleDriveUploadSchema = z.object({
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
file: z.any().optional().nullable(),
|
file: RawFileInputSchema.optional().nullable(),
|
||||||
mimeType: z.string().optional().nullable(),
|
mimeType: z.string().optional().nullable(),
|
||||||
folderId: z.string().optional().nullable(),
|
folderId: z.string().optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -0,0 +1,131 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('GoogleVaultDownloadExportFileAPI')
|
||||||
|
|
||||||
|
const GoogleVaultDownloadExportFileSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||||
|
objectName: z.string().min(1, 'Object name is required'),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Google Vault download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = GoogleVaultDownloadExportFileSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, bucketName, objectName, fileName } = validatedData
|
||||||
|
|
||||||
|
const bucket = encodeURIComponent(bucketName)
|
||||||
|
const object = encodeURIComponent(objectName)
|
||||||
|
const downloadUrl = `https://storage.googleapis.com/storage/v1/b/${bucket}/o/${object}?alt=media`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Downloading file from Google Vault`, { bucketName, objectName })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: enhanceGoogleVaultError(urlValidation.error || 'Invalid URL') },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(downloadUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
const errorText = await downloadResponse.text().catch(() => '')
|
||||||
|
const errorMessage = `Failed to download file: ${errorText || downloadResponse.statusText}`
|
||||||
|
logger.error(`[${requestId}] Failed to download Vault export file`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
error: errorText,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: enhanceGoogleVaultError(errorMessage) },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentType = downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||||
|
const disposition = downloadResponse.headers.get('content-disposition') || ''
|
||||||
|
const match = disposition.match(/filename\*=UTF-8''([^;]+)|filename="([^"]+)"/)
|
||||||
|
|
||||||
|
let resolvedName = fileName
|
||||||
|
if (!resolvedName) {
|
||||||
|
if (match?.[1]) {
|
||||||
|
try {
|
||||||
|
resolvedName = decodeURIComponent(match[1])
|
||||||
|
} catch {
|
||||||
|
resolvedName = match[1]
|
||||||
|
}
|
||||||
|
} else if (match?.[2]) {
|
||||||
|
resolvedName = match[2]
|
||||||
|
} else if (objectName) {
|
||||||
|
const parts = objectName.split('/')
|
||||||
|
resolvedName = parts[parts.length - 1] || 'vault-export.bin'
|
||||||
|
} else {
|
||||||
|
resolvedName = 'vault-export.bin'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Vault export file downloaded successfully`, {
|
||||||
|
name: resolvedName,
|
||||||
|
size: buffer.length,
|
||||||
|
mimeType: contentType,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedName,
|
||||||
|
mimeType: contentType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading Google Vault export file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,10 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateImageUrl } from '@/lib/core/security/input-validation'
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
const logger = createLogger('ImageProxyAPI')
|
const logger = createLogger('ImageProxyAPI')
|
||||||
@@ -26,7 +29,7 @@ export async function GET(request: NextRequest) {
|
|||||||
return new NextResponse('Missing URL parameter', { status: 400 })
|
return new NextResponse('Missing URL parameter', { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const urlValidation = validateImageUrl(imageUrl)
|
const urlValidation = await validateUrlWithDNS(imageUrl, 'imageUrl')
|
||||||
if (!urlValidation.isValid) {
|
if (!urlValidation.isValid) {
|
||||||
logger.warn(`[${requestId}] Blocked image proxy request`, {
|
logger.warn(`[${requestId}] Blocked image proxy request`, {
|
||||||
url: imageUrl.substring(0, 100),
|
url: imageUrl.substring(0, 100),
|
||||||
@@ -38,7 +41,8 @@ export async function GET(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Proxying image request for: ${imageUrl}`)
|
logger.info(`[${requestId}] Proxying image request for: ${imageUrl}`)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const imageResponse = await fetch(imageUrl, {
|
const imageResponse = await secureFetchWithPinnedIP(imageUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
'User-Agent':
|
'User-Agent':
|
||||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
|
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
|
||||||
@@ -64,14 +68,14 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
const contentType = imageResponse.headers.get('content-type') || 'image/jpeg'
|
const contentType = imageResponse.headers.get('content-type') || 'image/jpeg'
|
||||||
|
|
||||||
const imageBlob = await imageResponse.blob()
|
const imageArrayBuffer = await imageResponse.arrayBuffer()
|
||||||
|
|
||||||
if (imageBlob.size === 0) {
|
if (imageArrayBuffer.byteLength === 0) {
|
||||||
logger.error(`[${requestId}] Empty image blob received`)
|
logger.error(`[${requestId}] Empty image received`)
|
||||||
return new NextResponse('Empty image received', { status: 404 })
|
return new NextResponse('Empty image received', { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
return new NextResponse(imageBlob, {
|
return new NextResponse(imageArrayBuffer, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': contentType,
|
'Content-Type': contentType,
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
|
|||||||
121
apps/sim/app/api/tools/jira/add-attachment/route.ts
Normal file
121
apps/sim/app/api/tools/jira/add-attachment/route.ts
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('JiraAddAttachmentAPI')
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const JiraAddAttachmentSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
domain: z.string().min(1, 'Domain is required'),
|
||||||
|
issueKey: z.string().min(1, 'Issue key is required'),
|
||||||
|
files: RawFileInputArraySchema,
|
||||||
|
cloudId: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = `jira-attach-${Date.now()}`
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!authResult.success) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: authResult.error || 'Unauthorized' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = JiraAddAttachmentSchema.parse(body)
|
||||||
|
|
||||||
|
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||||
|
if (userFiles.length === 0) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'No valid files provided for upload' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const cloudId =
|
||||||
|
validatedData.cloudId ||
|
||||||
|
(await getJiraCloudId(validatedData.domain, validatedData.accessToken))
|
||||||
|
|
||||||
|
const formData = new FormData()
|
||||||
|
const filesOutput: Array<{ name: string; mimeType: string; data: string; size: number }> = []
|
||||||
|
|
||||||
|
for (const file of userFiles) {
|
||||||
|
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||||
|
filesOutput.push({
|
||||||
|
name: file.name,
|
||||||
|
mimeType: file.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
const blob = new Blob([new Uint8Array(buffer)], {
|
||||||
|
type: file.type || 'application/octet-stream',
|
||||||
|
})
|
||||||
|
formData.append('file', blob, file.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${validatedData.issueKey}/attachments`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'X-Atlassian-Token': 'no-check',
|
||||||
|
},
|
||||||
|
body: formData,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text()
|
||||||
|
logger.error(`[${requestId}] Jira attachment upload failed`, {
|
||||||
|
status: response.status,
|
||||||
|
statusText: response.statusText,
|
||||||
|
error: errorText,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: `Failed to upload attachments: ${response.statusText}`,
|
||||||
|
},
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const attachments = await response.json()
|
||||||
|
const attachmentIds = Array.isArray(attachments)
|
||||||
|
? attachments.map((attachment) => attachment.id).filter(Boolean)
|
||||||
|
: []
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
ts: new Date().toISOString(),
|
||||||
|
issueKey: validatedData.issueKey,
|
||||||
|
attachmentIds,
|
||||||
|
files: filesOutput,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid request data', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] Jira attachment upload error`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error instanceof Error ? error.message : 'Internal server error' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,9 +2,11 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { uploadFilesForTeamsMessage } from '@/tools/microsoft_teams/server-utils'
|
||||||
|
import type { GraphApiErrorResponse, GraphChatMessage } from '@/tools/microsoft_teams/types'
|
||||||
import { resolveMentionsForChannel, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
import { resolveMentionsForChannel, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -16,7 +18,7 @@ const TeamsWriteChannelSchema = z.object({
|
|||||||
teamId: z.string().min(1, 'Team ID is required'),
|
teamId: z.string().min(1, 'Team ID is required'),
|
||||||
channelId: z.string().min(1, 'Channel ID is required'),
|
channelId: z.string().min(1, 'Channel ID is required'),
|
||||||
content: z.string().min(1, 'Message content is required'),
|
content: z.string().min(1, 'Message content is required'),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -53,94 +55,13 @@ export async function POST(request: NextRequest) {
|
|||||||
fileCount: validatedData.files?.length || 0,
|
fileCount: validatedData.files?.length || 0,
|
||||||
})
|
})
|
||||||
|
|
||||||
const attachments: any[] = []
|
const { attachments, filesOutput } = await uploadFilesForTeamsMessage({
|
||||||
if (validatedData.files && validatedData.files.length > 0) {
|
rawFiles: validatedData.files || [],
|
||||||
const rawFiles = validatedData.files
|
accessToken: validatedData.accessToken,
|
||||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to OneDrive`)
|
requestId,
|
||||||
|
logger,
|
||||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
|
||||||
|
|
||||||
for (const file of userFiles) {
|
|
||||||
try {
|
|
||||||
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
|
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
|
||||||
|
|
||||||
const uploadUrl =
|
|
||||||
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
|
|
||||||
encodeURIComponent(file.name) +
|
|
||||||
':/content'
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
'Content-Type': file.type || 'application/octet-stream',
|
|
||||||
},
|
|
||||||
body: new Uint8Array(buffer),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
|
||||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const uploadedFile = await uploadResponse.json()
|
|
||||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
|
||||||
id: uploadedFile.id,
|
|
||||||
webUrl: uploadedFile.webUrl,
|
|
||||||
})
|
|
||||||
|
|
||||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
|
||||||
|
|
||||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!fileDetailsResponse.ok) {
|
|
||||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const fileDetails = await fileDetailsResponse.json()
|
|
||||||
logger.info(`[${requestId}] Got file details`, {
|
|
||||||
webDavUrl: fileDetails.webDavUrl,
|
|
||||||
eTag: fileDetails.eTag,
|
|
||||||
})
|
|
||||||
|
|
||||||
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
|
|
||||||
|
|
||||||
attachments.push({
|
|
||||||
id: attachmentId,
|
|
||||||
contentType: 'reference',
|
|
||||||
contentUrl: fileDetails.webDavUrl,
|
|
||||||
name: file.name,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let messageContent = validatedData.content
|
let messageContent = validatedData.content
|
||||||
let contentType: 'text' | 'html' = 'text'
|
let contentType: 'text' | 'html' = 'text'
|
||||||
const mentionEntities: TeamsMention[] = []
|
const mentionEntities: TeamsMention[] = []
|
||||||
@@ -197,17 +118,21 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages`
|
const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages`
|
||||||
|
|
||||||
const teamsResponse = await fetch(teamsUrl, {
|
const teamsResponse = await secureFetchWithValidation(
|
||||||
|
teamsUrl,
|
||||||
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
},
|
},
|
||||||
body: JSON.stringify(messageBody),
|
body: JSON.stringify(messageBody),
|
||||||
})
|
},
|
||||||
|
'teamsUrl'
|
||||||
|
)
|
||||||
|
|
||||||
if (!teamsResponse.ok) {
|
if (!teamsResponse.ok) {
|
||||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
|
||||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -218,7 +143,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const responseData = await teamsResponse.json()
|
const responseData = (await teamsResponse.json()) as GraphChatMessage
|
||||||
logger.info(`[${requestId}] Teams channel message sent successfully`, {
|
logger.info(`[${requestId}] Teams channel message sent successfully`, {
|
||||||
messageId: responseData.id,
|
messageId: responseData.id,
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
@@ -237,6 +162,7 @@ export async function POST(request: NextRequest) {
|
|||||||
url: responseData.webUrl || '',
|
url: responseData.webUrl || '',
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
},
|
},
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -2,9 +2,11 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { uploadFilesForTeamsMessage } from '@/tools/microsoft_teams/server-utils'
|
||||||
|
import type { GraphApiErrorResponse, GraphChatMessage } from '@/tools/microsoft_teams/types'
|
||||||
import { resolveMentionsForChat, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
import { resolveMentionsForChat, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -15,7 +17,7 @@ const TeamsWriteChatSchema = z.object({
|
|||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
chatId: z.string().min(1, 'Chat ID is required'),
|
chatId: z.string().min(1, 'Chat ID is required'),
|
||||||
content: z.string().min(1, 'Message content is required'),
|
content: z.string().min(1, 'Message content is required'),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -51,94 +53,13 @@ export async function POST(request: NextRequest) {
|
|||||||
fileCount: validatedData.files?.length || 0,
|
fileCount: validatedData.files?.length || 0,
|
||||||
})
|
})
|
||||||
|
|
||||||
const attachments: any[] = []
|
const { attachments, filesOutput } = await uploadFilesForTeamsMessage({
|
||||||
if (validatedData.files && validatedData.files.length > 0) {
|
rawFiles: validatedData.files || [],
|
||||||
const rawFiles = validatedData.files
|
accessToken: validatedData.accessToken,
|
||||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to Teams`)
|
requestId,
|
||||||
|
logger,
|
||||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
|
||||||
|
|
||||||
for (const file of userFiles) {
|
|
||||||
try {
|
|
||||||
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
|
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
|
||||||
|
|
||||||
const uploadUrl =
|
|
||||||
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
|
|
||||||
encodeURIComponent(file.name) +
|
|
||||||
':/content'
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
'Content-Type': file.type || 'application/octet-stream',
|
|
||||||
},
|
|
||||||
body: new Uint8Array(buffer),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
|
||||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const uploadedFile = await uploadResponse.json()
|
|
||||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
|
||||||
id: uploadedFile.id,
|
|
||||||
webUrl: uploadedFile.webUrl,
|
|
||||||
})
|
|
||||||
|
|
||||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
|
||||||
|
|
||||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!fileDetailsResponse.ok) {
|
|
||||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const fileDetails = await fileDetailsResponse.json()
|
|
||||||
logger.info(`[${requestId}] Got file details`, {
|
|
||||||
webDavUrl: fileDetails.webDavUrl,
|
|
||||||
eTag: fileDetails.eTag,
|
|
||||||
})
|
|
||||||
|
|
||||||
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
|
|
||||||
|
|
||||||
attachments.push({
|
|
||||||
id: attachmentId,
|
|
||||||
contentType: 'reference',
|
|
||||||
contentUrl: fileDetails.webDavUrl,
|
|
||||||
name: file.name,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let messageContent = validatedData.content
|
let messageContent = validatedData.content
|
||||||
let contentType: 'text' | 'html' = 'text'
|
let contentType: 'text' | 'html' = 'text'
|
||||||
const mentionEntities: TeamsMention[] = []
|
const mentionEntities: TeamsMention[] = []
|
||||||
@@ -194,17 +115,21 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages`
|
const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages`
|
||||||
|
|
||||||
const teamsResponse = await fetch(teamsUrl, {
|
const teamsResponse = await secureFetchWithValidation(
|
||||||
|
teamsUrl,
|
||||||
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
},
|
},
|
||||||
body: JSON.stringify(messageBody),
|
body: JSON.stringify(messageBody),
|
||||||
})
|
},
|
||||||
|
'teamsUrl'
|
||||||
|
)
|
||||||
|
|
||||||
if (!teamsResponse.ok) {
|
if (!teamsResponse.ok) {
|
||||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
|
||||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -215,7 +140,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const responseData = await teamsResponse.json()
|
const responseData = (await teamsResponse.json()) as GraphChatMessage
|
||||||
logger.info(`[${requestId}] Teams message sent successfully`, {
|
logger.info(`[${requestId}] Teams message sent successfully`, {
|
||||||
messageId: responseData.id,
|
messageId: responseData.id,
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
@@ -233,6 +158,7 @@ export async function POST(request: NextRequest) {
|
|||||||
url: responseData.webUrl || '',
|
url: responseData.webUrl || '',
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
},
|
},
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -2,15 +2,17 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { StorageService } from '@/lib/uploads'
|
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
secureFetchWithPinnedIP,
|
||||||
inferContextFromKey,
|
validateUrlWithDNS,
|
||||||
isInternalFileUrl,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import {
|
||||||
|
downloadFileFromStorage,
|
||||||
|
resolveInternalFileUrl,
|
||||||
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -18,7 +20,9 @@ const logger = createLogger('MistralParseAPI')
|
|||||||
|
|
||||||
const MistralParseSchema = z.object({
|
const MistralParseSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
filePath: z.string().min(1, 'File path is required'),
|
filePath: z.string().min(1, 'File path is required').optional(),
|
||||||
|
fileData: FileInputSchema.optional(),
|
||||||
|
file: FileInputSchema.optional(),
|
||||||
resultType: z.string().optional(),
|
resultType: z.string().optional(),
|
||||||
pages: z.array(z.number()).optional(),
|
pages: z.array(z.number()).optional(),
|
||||||
includeImageBase64: z.boolean().optional(),
|
includeImageBase64: z.boolean().optional(),
|
||||||
@@ -49,66 +53,140 @@ export async function POST(request: NextRequest) {
|
|||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const validatedData = MistralParseSchema.parse(body)
|
const validatedData = MistralParseSchema.parse(body)
|
||||||
|
|
||||||
|
const fileData = validatedData.file || validatedData.fileData
|
||||||
|
const filePath = typeof fileData === 'string' ? fileData : validatedData.filePath
|
||||||
|
|
||||||
|
if (!fileData && (!filePath || filePath.trim() === '')) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'File input is required',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`[${requestId}] Mistral parse request`, {
|
logger.info(`[${requestId}] Mistral parse request`, {
|
||||||
filePath: validatedData.filePath,
|
hasFileData: Boolean(fileData),
|
||||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
filePath,
|
||||||
|
isWorkspaceFile: filePath ? isInternalFileUrl(filePath) : false,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
|
||||||
|
|
||||||
if (isInternalFileUrl(validatedData.filePath)) {
|
|
||||||
try {
|
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
|
||||||
|
|
||||||
const context = inferContextFromKey(storageKey)
|
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(
|
|
||||||
storageKey,
|
|
||||||
userId,
|
|
||||||
undefined, // customConfig
|
|
||||||
context, // context
|
|
||||||
false // isLocal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Failed to generate file access URL',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
|
||||||
}
|
|
||||||
|
|
||||||
const mistralBody: any = {
|
const mistralBody: any = {
|
||||||
model: 'mistral-ocr-latest',
|
model: 'mistral-ocr-latest',
|
||||||
document: {
|
}
|
||||||
|
|
||||||
|
if (fileData && typeof fileData === 'object') {
|
||||||
|
const rawFile = fileData
|
||||||
|
let userFile
|
||||||
|
try {
|
||||||
|
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||||
|
} catch (error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let mimeType = userFile.type
|
||||||
|
if (!mimeType || mimeType === 'application/octet-stream') {
|
||||||
|
const filename = userFile.name?.toLowerCase() || ''
|
||||||
|
if (filename.endsWith('.pdf')) {
|
||||||
|
mimeType = 'application/pdf'
|
||||||
|
} else if (filename.endsWith('.png')) {
|
||||||
|
mimeType = 'image/png'
|
||||||
|
} else if (filename.endsWith('.jpg') || filename.endsWith('.jpeg')) {
|
||||||
|
mimeType = 'image/jpeg'
|
||||||
|
} else if (filename.endsWith('.gif')) {
|
||||||
|
mimeType = 'image/gif'
|
||||||
|
} else if (filename.endsWith('.webp')) {
|
||||||
|
mimeType = 'image/webp'
|
||||||
|
} else {
|
||||||
|
mimeType = 'application/pdf'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let base64 = userFile.base64
|
||||||
|
if (!base64) {
|
||||||
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
base64 = buffer.toString('base64')
|
||||||
|
}
|
||||||
|
const base64Payload = base64.startsWith('data:')
|
||||||
|
? base64
|
||||||
|
: `data:${mimeType};base64,${base64}`
|
||||||
|
|
||||||
|
// Mistral API uses different document types for images vs documents
|
||||||
|
const isImage = mimeType.startsWith('image/')
|
||||||
|
if (isImage) {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: base64Payload,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'document_url',
|
||||||
|
document_url: base64Payload,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (filePath) {
|
||||||
|
let fileUrl = filePath
|
||||||
|
|
||||||
|
const isInternalFilePath = isInternalFileUrl(filePath)
|
||||||
|
if (isInternalFilePath) {
|
||||||
|
const resolution = await resolveInternalFileUrl(filePath, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: resolution.error.message,
|
||||||
|
},
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
fileUrl = resolution.fileUrl || fileUrl
|
||||||
|
} else if (filePath.startsWith('/')) {
|
||||||
|
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||||
|
userId,
|
||||||
|
path: filePath.substring(0, 50),
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
const urlValidation = await validateUrlWithDNS(fileUrl, 'filePath')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: urlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const imageExtensions = ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.avif']
|
||||||
|
const pathname = new URL(fileUrl).pathname.toLowerCase()
|
||||||
|
const isImageUrl = imageExtensions.some((ext) => pathname.endsWith(ext))
|
||||||
|
|
||||||
|
if (isImageUrl) {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: fileUrl,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
mistralBody.document = {
|
||||||
type: 'document_url',
|
type: 'document_url',
|
||||||
document_url: fileUrl,
|
document_url: fileUrl,
|
||||||
},
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (validatedData.pages) {
|
if (validatedData.pages) {
|
||||||
@@ -124,7 +202,25 @@ export async function POST(request: NextRequest) {
|
|||||||
mistralBody.image_min_size = validatedData.imageMinSize
|
mistralBody.image_min_size = validatedData.imageMinSize
|
||||||
}
|
}
|
||||||
|
|
||||||
const mistralResponse = await fetch('https://api.mistral.ai/v1/ocr', {
|
const mistralEndpoint = 'https://api.mistral.ai/v1/ocr'
|
||||||
|
const mistralValidation = await validateUrlWithDNS(mistralEndpoint, 'Mistral API URL')
|
||||||
|
if (!mistralValidation.isValid) {
|
||||||
|
logger.error(`[${requestId}] Mistral API URL validation failed`, {
|
||||||
|
error: mistralValidation.error,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to reach Mistral API',
|
||||||
|
},
|
||||||
|
{ status: 502 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const mistralResponse = await secureFetchWithPinnedIP(
|
||||||
|
mistralEndpoint,
|
||||||
|
mistralValidation.resolvedIP!,
|
||||||
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -132,7 +228,8 @@ export async function POST(request: NextRequest) {
|
|||||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||||
},
|
},
|
||||||
body: JSON.stringify(mistralBody),
|
body: JSON.stringify(mistralBody),
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if (!mistralResponse.ok) {
|
if (!mistralResponse.ok) {
|
||||||
const errorText = await mistralResponse.text()
|
const errorText = await mistralResponse.text()
|
||||||
|
|||||||
177
apps/sim/app/api/tools/onedrive/download/route.ts
Normal file
177
apps/sim/app/api/tools/onedrive/download/route.ts
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
/** Microsoft Graph API error response structure */
|
||||||
|
interface GraphApiError {
|
||||||
|
error?: {
|
||||||
|
code?: string
|
||||||
|
message?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Microsoft Graph API drive item metadata response */
|
||||||
|
interface DriveItemMetadata {
|
||||||
|
id?: string
|
||||||
|
name?: string
|
||||||
|
folder?: Record<string, unknown>
|
||||||
|
file?: {
|
||||||
|
mimeType?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const logger = createLogger('OneDriveDownloadAPI')
|
||||||
|
|
||||||
|
const OneDriveDownloadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
fileId: z.string().min(1, 'File ID is required'),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized OneDrive download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = OneDriveDownloadSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, fileId, fileName } = validatedData
|
||||||
|
const authHeader = `Bearer ${accessToken}`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting file metadata from OneDrive`, { fileId })
|
||||||
|
|
||||||
|
const metadataUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}`
|
||||||
|
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
|
||||||
|
if (!metadataUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: metadataUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadataResponse = await secureFetchWithPinnedIP(
|
||||||
|
metadataUrl,
|
||||||
|
metadataUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: { Authorization: authHeader },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!metadataResponse.ok) {
|
||||||
|
const errorDetails = (await metadataResponse.json().catch(() => ({}))) as GraphApiError
|
||||||
|
logger.error(`[${requestId}] Failed to get file metadata`, {
|
||||||
|
status: metadataResponse.status,
|
||||||
|
error: errorDetails,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = (await metadataResponse.json()) as DriveItemMetadata
|
||||||
|
|
||||||
|
if (metadata.folder && !metadata.file) {
|
||||||
|
logger.error(`[${requestId}] Attempted to download a folder`, {
|
||||||
|
itemId: metadata.id,
|
||||||
|
itemName: metadata.name,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: `Cannot download folder "${metadata.name}". Please select a file instead.`,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const mimeType = metadata.file?.mimeType || 'application/octet-stream'
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Downloading file from OneDrive`, { fileId, mimeType })
|
||||||
|
|
||||||
|
const downloadUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}/content`
|
||||||
|
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||||
|
if (!downloadUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
downloadUrl,
|
||||||
|
downloadUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: { Authorization: authHeader },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
const downloadError = (await downloadResponse.json().catch(() => ({}))) as GraphApiError
|
||||||
|
logger.error(`[${requestId}] Failed to download file`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
error: downloadError,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
|
||||||
|
const resolvedName = fileName || metadata.name || 'download'
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||||
|
fileId,
|
||||||
|
name: resolvedName,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
mimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const base64Data = fileBuffer.toString('base64')
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedName,
|
||||||
|
mimeType,
|
||||||
|
data: base64Data,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading OneDrive file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,7 +4,9 @@ import * as XLSX from 'xlsx'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import {
|
import {
|
||||||
getExtensionFromMimeType,
|
getExtensionFromMimeType,
|
||||||
processSingleFileToUserFile,
|
processSingleFileToUserFile,
|
||||||
@@ -29,12 +31,33 @@ const ExcelValuesSchema = z.union([
|
|||||||
const OneDriveUploadSchema = z.object({
|
const OneDriveUploadSchema = z.object({
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
file: z.any().optional(),
|
file: RawFileInputSchema.optional(),
|
||||||
folderId: z.string().optional().nullable(),
|
folderId: z.string().optional().nullable(),
|
||||||
mimeType: z.string().nullish(),
|
mimeType: z.string().nullish(),
|
||||||
values: ExcelValuesSchema.optional().nullable(),
|
values: ExcelValuesSchema.optional().nullable(),
|
||||||
|
conflictBehavior: z.enum(['fail', 'replace', 'rename']).optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
/** Microsoft Graph DriveItem response */
|
||||||
|
interface OneDriveFileData {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
size: number
|
||||||
|
webUrl: string
|
||||||
|
createdDateTime: string
|
||||||
|
lastModifiedDateTime: string
|
||||||
|
file?: { mimeType: string }
|
||||||
|
parentReference?: { id: string; path: string }
|
||||||
|
'@microsoft.graph.downloadUrl'?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Microsoft Graph Excel range response */
|
||||||
|
interface ExcelRangeData {
|
||||||
|
address?: string
|
||||||
|
addressLocal?: string
|
||||||
|
values?: unknown[][]
|
||||||
|
}
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
@@ -88,25 +111,9 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
let fileToProcess
|
|
||||||
if (Array.isArray(rawFile)) {
|
|
||||||
if (rawFile.length === 0) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'No file provided',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
fileToProcess = rawFile[0]
|
|
||||||
} else {
|
|
||||||
fileToProcess = rawFile
|
|
||||||
}
|
|
||||||
|
|
||||||
let userFile
|
let userFile
|
||||||
try {
|
try {
|
||||||
userFile = processSingleFileToUserFile(fileToProcess, requestId, logger)
|
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -179,14 +186,23 @@ export async function POST(request: NextRequest) {
|
|||||||
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
// Add conflict behavior if specified (defaults to replace by Microsoft Graph API)
|
||||||
|
if (validatedData.conflictBehavior) {
|
||||||
|
uploadUrl += `?@microsoft.graph.conflictBehavior=${validatedData.conflictBehavior}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const uploadResponse = await secureFetchWithValidation(
|
||||||
|
uploadUrl,
|
||||||
|
{
|
||||||
method: 'PUT',
|
method: 'PUT',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
'Content-Type': mimeType,
|
'Content-Type': mimeType,
|
||||||
},
|
},
|
||||||
body: new Uint8Array(fileBuffer),
|
body: fileBuffer,
|
||||||
})
|
},
|
||||||
|
'uploadUrl'
|
||||||
|
)
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
if (!uploadResponse.ok) {
|
||||||
const errorText = await uploadResponse.text()
|
const errorText = await uploadResponse.text()
|
||||||
@@ -200,7 +216,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileData = await uploadResponse.json()
|
const fileData = (await uploadResponse.json()) as OneDriveFileData
|
||||||
|
|
||||||
let excelWriteResult: any | undefined
|
let excelWriteResult: any | undefined
|
||||||
const shouldWriteExcelContent =
|
const shouldWriteExcelContent =
|
||||||
@@ -209,8 +225,11 @@ export async function POST(request: NextRequest) {
|
|||||||
if (shouldWriteExcelContent) {
|
if (shouldWriteExcelContent) {
|
||||||
try {
|
try {
|
||||||
let workbookSessionId: string | undefined
|
let workbookSessionId: string | undefined
|
||||||
const sessionResp = await fetch(
|
const sessionUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/createSession`,
|
fileData.id
|
||||||
|
)}/workbook/createSession`
|
||||||
|
const sessionResp = await secureFetchWithValidation(
|
||||||
|
sessionUrl,
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -218,11 +237,12 @@ export async function POST(request: NextRequest) {
|
|||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
body: JSON.stringify({ persistChanges: true }),
|
body: JSON.stringify({ persistChanges: true }),
|
||||||
}
|
},
|
||||||
|
'sessionUrl'
|
||||||
)
|
)
|
||||||
|
|
||||||
if (sessionResp.ok) {
|
if (sessionResp.ok) {
|
||||||
const sessionData = await sessionResp.json()
|
const sessionData = (await sessionResp.json()) as { id?: string }
|
||||||
workbookSessionId = sessionData?.id
|
workbookSessionId = sessionData?.id
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -231,14 +251,19 @@ export async function POST(request: NextRequest) {
|
|||||||
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||||
fileData.id
|
fileData.id
|
||||||
)}/workbook/worksheets?$select=name&$orderby=position&$top=1`
|
)}/workbook/worksheets?$select=name&$orderby=position&$top=1`
|
||||||
const listResp = await fetch(listUrl, {
|
const listResp = await secureFetchWithValidation(
|
||||||
|
listUrl,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||||
},
|
},
|
||||||
})
|
},
|
||||||
|
'listUrl'
|
||||||
|
)
|
||||||
if (listResp.ok) {
|
if (listResp.ok) {
|
||||||
const listData = await listResp.json()
|
const listData = (await listResp.json()) as { value?: Array<{ name?: string }> }
|
||||||
const firstSheetName = listData?.value?.[0]?.name
|
const firstSheetName = listData?.value?.[0]?.name
|
||||||
if (firstSheetName) {
|
if (firstSheetName) {
|
||||||
sheetName = firstSheetName
|
sheetName = firstSheetName
|
||||||
@@ -297,7 +322,9 @@ export async function POST(request: NextRequest) {
|
|||||||
)}')/range(address='${encodeURIComponent(computedRangeAddress)}')`
|
)}')/range(address='${encodeURIComponent(computedRangeAddress)}')`
|
||||||
)
|
)
|
||||||
|
|
||||||
const excelWriteResponse = await fetch(url.toString(), {
|
const excelWriteResponse = await secureFetchWithValidation(
|
||||||
|
url.toString(),
|
||||||
|
{
|
||||||
method: 'PATCH',
|
method: 'PATCH',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
@@ -305,7 +332,9 @@ export async function POST(request: NextRequest) {
|
|||||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||||
},
|
},
|
||||||
body: JSON.stringify({ values: processedValues }),
|
body: JSON.stringify({ values: processedValues }),
|
||||||
})
|
},
|
||||||
|
'excelWriteUrl'
|
||||||
|
)
|
||||||
|
|
||||||
if (!excelWriteResponse || !excelWriteResponse.ok) {
|
if (!excelWriteResponse || !excelWriteResponse.ok) {
|
||||||
const errorText = excelWriteResponse ? await excelWriteResponse.text() : 'no response'
|
const errorText = excelWriteResponse ? await excelWriteResponse.text() : 'no response'
|
||||||
@@ -320,7 +349,7 @@ export async function POST(request: NextRequest) {
|
|||||||
details: errorText,
|
details: errorText,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const writeData = await excelWriteResponse.json()
|
const writeData = (await excelWriteResponse.json()) as ExcelRangeData
|
||||||
const addr = writeData.address || writeData.addressLocal
|
const addr = writeData.address || writeData.addressLocal
|
||||||
const v = writeData.values || []
|
const v = writeData.values || []
|
||||||
excelWriteResult = {
|
excelWriteResult = {
|
||||||
@@ -328,21 +357,25 @@ export async function POST(request: NextRequest) {
|
|||||||
updatedRange: addr,
|
updatedRange: addr,
|
||||||
updatedRows: Array.isArray(v) ? v.length : undefined,
|
updatedRows: Array.isArray(v) ? v.length : undefined,
|
||||||
updatedColumns: Array.isArray(v) && v[0] ? v[0].length : undefined,
|
updatedColumns: Array.isArray(v) && v[0] ? v[0].length : undefined,
|
||||||
updatedCells: Array.isArray(v) && v[0] ? v.length * (v[0] as any[]).length : undefined,
|
updatedCells: Array.isArray(v) && v[0] ? v.length * v[0].length : undefined,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (workbookSessionId) {
|
if (workbookSessionId) {
|
||||||
try {
|
try {
|
||||||
const closeResp = await fetch(
|
const closeUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/closeSession`,
|
fileData.id
|
||||||
|
)}/workbook/closeSession`
|
||||||
|
const closeResp = await secureFetchWithValidation(
|
||||||
|
closeUrl,
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
'workbook-session-id': workbookSessionId,
|
'workbook-session-id': workbookSessionId,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
|
'closeSessionUrl'
|
||||||
)
|
)
|
||||||
if (!closeResp.ok) {
|
if (!closeResp.ok) {
|
||||||
const closeText = await closeResp.text()
|
const closeText = await closeResp.text()
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -18,7 +19,7 @@ const OutlookDraftSchema = z.object({
|
|||||||
contentType: z.enum(['text', 'html']).optional().nullable(),
|
contentType: z.enum(['text', 'html']).optional().nullable(),
|
||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -20,7 +21,7 @@ const OutlookSendSchema = z.object({
|
|||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
replyToMessageId: z.string().optional().nullable(),
|
replyToMessageId: z.string().optional().nullable(),
|
||||||
conversationId: z.string().optional().nullable(),
|
conversationId: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -95,14 +96,14 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
if (attachments.length > 0) {
|
if (attachments.length > 0) {
|
||||||
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
||||||
const maxSize = 4 * 1024 * 1024 // 4MB
|
const maxSize = 3 * 1024 * 1024 // 3MB - Microsoft Graph API limit for inline attachments
|
||||||
|
|
||||||
if (totalSize > maxSize) {
|
if (totalSize > maxSize) {
|
||||||
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: `Total attachment size (${sizeMB}MB) exceeds Outlook's limit of 4MB per request`,
|
error: `Total attachment size (${sizeMB}MB) exceeds Microsoft Graph API limit of 3MB per request`,
|
||||||
},
|
},
|
||||||
{ status: 400 }
|
{ status: 400 }
|
||||||
)
|
)
|
||||||
|
|||||||
165
apps/sim/app/api/tools/pipedrive/get-files/route.ts
Normal file
165
apps/sim/app/api/tools/pipedrive/get-files/route.ts
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('PipedriveGetFilesAPI')
|
||||||
|
|
||||||
|
interface PipedriveFile {
|
||||||
|
id?: number
|
||||||
|
name?: string
|
||||||
|
url?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PipedriveApiResponse {
|
||||||
|
success: boolean
|
||||||
|
data?: PipedriveFile[]
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const PipedriveGetFilesSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
deal_id: z.string().optional().nullable(),
|
||||||
|
person_id: z.string().optional().nullable(),
|
||||||
|
org_id: z.string().optional().nullable(),
|
||||||
|
limit: z.string().optional().nullable(),
|
||||||
|
downloadFiles: z.boolean().optional().default(false),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Pipedrive get files attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = PipedriveGetFilesSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, deal_id, person_id, org_id, limit, downloadFiles } = validatedData
|
||||||
|
|
||||||
|
const baseUrl = 'https://api.pipedrive.com/v1/files'
|
||||||
|
const queryParams = new URLSearchParams()
|
||||||
|
|
||||||
|
if (deal_id) queryParams.append('deal_id', deal_id)
|
||||||
|
if (person_id) queryParams.append('person_id', person_id)
|
||||||
|
if (org_id) queryParams.append('org_id', org_id)
|
||||||
|
if (limit) queryParams.append('limit', limit)
|
||||||
|
|
||||||
|
const queryString = queryParams.toString()
|
||||||
|
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Fetching files from Pipedrive`, { deal_id, person_id, org_id })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
Accept: 'application/json',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = (await response.json()) as PipedriveApiResponse
|
||||||
|
|
||||||
|
if (!data.success) {
|
||||||
|
logger.error(`[${requestId}] Pipedrive API request failed`, { data })
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: data.error || 'Failed to fetch files from Pipedrive' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = data.data || []
|
||||||
|
const downloadedFiles: Array<{
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
if (downloadFiles) {
|
||||||
|
for (const file of files) {
|
||||||
|
if (!file?.url) continue
|
||||||
|
|
||||||
|
try {
|
||||||
|
const fileUrlValidation = await validateUrlWithDNS(file.url, 'fileUrl')
|
||||||
|
if (!fileUrlValidation.isValid) continue
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
file.url,
|
||||||
|
fileUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Bearer ${accessToken}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) continue
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
const extension = getFileExtension(file.name || '')
|
||||||
|
const mimeType =
|
||||||
|
downloadResponse.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||||
|
const fileName = file.name || `pipedrive-file-${file.id || Date.now()}`
|
||||||
|
|
||||||
|
downloadedFiles.push({
|
||||||
|
name: fileName,
|
||||||
|
mimeType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to download file ${file.id}:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Pipedrive files fetched successfully`, {
|
||||||
|
fileCount: files.length,
|
||||||
|
downloadedCount: downloadedFiles.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
files,
|
||||||
|
downloadedFiles: downloadedFiles.length > 0 ? downloadedFiles : undefined,
|
||||||
|
total_items: files.length,
|
||||||
|
success: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching Pipedrive files:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,15 +2,14 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { StorageService } from '@/lib/uploads'
|
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
secureFetchWithPinnedIP,
|
||||||
inferContextFromKey,
|
validateUrlWithDNS,
|
||||||
isInternalFileUrl,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -18,7 +17,8 @@ const logger = createLogger('PulseParseAPI')
|
|||||||
|
|
||||||
const PulseParseSchema = z.object({
|
const PulseParseSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
filePath: z.string().min(1, 'File path is required'),
|
filePath: z.string().optional(),
|
||||||
|
file: RawFileInputSchema.optional(),
|
||||||
pages: z.string().optional(),
|
pages: z.string().optional(),
|
||||||
extractFigure: z.boolean().optional(),
|
extractFigure: z.boolean().optional(),
|
||||||
figureDescription: z.boolean().optional(),
|
figureDescription: z.boolean().optional(),
|
||||||
@@ -51,50 +51,30 @@ export async function POST(request: NextRequest) {
|
|||||||
const validatedData = PulseParseSchema.parse(body)
|
const validatedData = PulseParseSchema.parse(body)
|
||||||
|
|
||||||
logger.info(`[${requestId}] Pulse parse request`, {
|
logger.info(`[${requestId}] Pulse parse request`, {
|
||||||
|
fileName: validatedData.file?.name,
|
||||||
filePath: validatedData.filePath,
|
filePath: validatedData.filePath,
|
||||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
const resolution = await resolveFileInputToUrl({
|
||||||
|
file: validatedData.file,
|
||||||
if (isInternalFileUrl(validatedData.filePath)) {
|
filePath: validatedData.filePath,
|
||||||
try {
|
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
|
||||||
const context = inferContextFromKey(storageKey)
|
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
userId,
|
||||||
key: storageKey,
|
requestId,
|
||||||
context,
|
logger,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (resolution.error) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{ success: false, error: resolution.error.message },
|
||||||
success: false,
|
{ status: resolution.error.status }
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
const fileUrl = resolution.fileUrl
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
if (!fileUrl) {
|
||||||
} catch (error) {
|
return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 })
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Failed to generate file access URL',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const formData = new FormData()
|
const formData = new FormData()
|
||||||
@@ -119,13 +99,36 @@ export async function POST(request: NextRequest) {
|
|||||||
formData.append('chunk_size', String(validatedData.chunkSize))
|
formData.append('chunk_size', String(validatedData.chunkSize))
|
||||||
}
|
}
|
||||||
|
|
||||||
const pulseResponse = await fetch('https://api.runpulse.com/extract', {
|
const pulseEndpoint = 'https://api.runpulse.com/extract'
|
||||||
|
const pulseValidation = await validateUrlWithDNS(pulseEndpoint, 'Pulse API URL')
|
||||||
|
if (!pulseValidation.isValid) {
|
||||||
|
logger.error(`[${requestId}] Pulse API URL validation failed`, {
|
||||||
|
error: pulseValidation.error,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to reach Pulse API',
|
||||||
|
},
|
||||||
|
{ status: 502 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const pulsePayload = new Response(formData)
|
||||||
|
const contentType = pulsePayload.headers.get('content-type') || 'multipart/form-data'
|
||||||
|
const bodyBuffer = Buffer.from(await pulsePayload.arrayBuffer())
|
||||||
|
const pulseResponse = await secureFetchWithPinnedIP(
|
||||||
|
pulseEndpoint,
|
||||||
|
pulseValidation.resolvedIP!,
|
||||||
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'x-api-key': validatedData.apiKey,
|
'x-api-key': validatedData.apiKey,
|
||||||
|
'Content-Type': contentType,
|
||||||
},
|
},
|
||||||
body: formData,
|
body: bodyBuffer,
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if (!pulseResponse.ok) {
|
if (!pulseResponse.ok) {
|
||||||
const errorText = await pulseResponse.text()
|
const errorText = await pulseResponse.text()
|
||||||
|
|||||||
@@ -2,15 +2,14 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { StorageService } from '@/lib/uploads'
|
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
secureFetchWithPinnedIP,
|
||||||
inferContextFromKey,
|
validateUrlWithDNS,
|
||||||
isInternalFileUrl,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -18,7 +17,8 @@ const logger = createLogger('ReductoParseAPI')
|
|||||||
|
|
||||||
const ReductoParseSchema = z.object({
|
const ReductoParseSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
filePath: z.string().min(1, 'File path is required'),
|
filePath: z.string().optional(),
|
||||||
|
file: RawFileInputSchema.optional(),
|
||||||
pages: z.array(z.number()).optional(),
|
pages: z.array(z.number()).optional(),
|
||||||
tableOutputFormat: z.enum(['html', 'md']).optional(),
|
tableOutputFormat: z.enum(['html', 'md']).optional(),
|
||||||
})
|
})
|
||||||
@@ -47,56 +47,30 @@ export async function POST(request: NextRequest) {
|
|||||||
const validatedData = ReductoParseSchema.parse(body)
|
const validatedData = ReductoParseSchema.parse(body)
|
||||||
|
|
||||||
logger.info(`[${requestId}] Reducto parse request`, {
|
logger.info(`[${requestId}] Reducto parse request`, {
|
||||||
|
fileName: validatedData.file?.name,
|
||||||
filePath: validatedData.filePath,
|
filePath: validatedData.filePath,
|
||||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
const resolution = await resolveFileInputToUrl({
|
||||||
|
file: validatedData.file,
|
||||||
if (isInternalFileUrl(validatedData.filePath)) {
|
filePath: validatedData.filePath,
|
||||||
try {
|
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
|
||||||
const context = inferContextFromKey(storageKey)
|
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(
|
|
||||||
storageKey,
|
|
||||||
userId,
|
userId,
|
||||||
undefined, // customConfig
|
requestId,
|
||||||
context, // context
|
logger,
|
||||||
false // isLocal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (resolution.error) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{ success: false, error: resolution.error.message },
|
||||||
success: false,
|
{ status: resolution.error.status }
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
const fileUrl = resolution.fileUrl
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
if (!fileUrl) {
|
||||||
} catch (error) {
|
return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 })
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Failed to generate file access URL',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const reductoBody: Record<string, unknown> = {
|
const reductoBody: Record<string, unknown> = {
|
||||||
@@ -104,8 +78,13 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (validatedData.pages && validatedData.pages.length > 0) {
|
if (validatedData.pages && validatedData.pages.length > 0) {
|
||||||
|
// Reducto API expects page_range as an object with start/end, not an array
|
||||||
|
const pages = validatedData.pages
|
||||||
reductoBody.settings = {
|
reductoBody.settings = {
|
||||||
page_range: validatedData.pages,
|
page_range: {
|
||||||
|
start: Math.min(...pages),
|
||||||
|
end: Math.max(...pages),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -115,7 +94,25 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const reductoResponse = await fetch('https://platform.reducto.ai/parse', {
|
const reductoEndpoint = 'https://platform.reducto.ai/parse'
|
||||||
|
const reductoValidation = await validateUrlWithDNS(reductoEndpoint, 'Reducto API URL')
|
||||||
|
if (!reductoValidation.isValid) {
|
||||||
|
logger.error(`[${requestId}] Reducto API URL validation failed`, {
|
||||||
|
error: reductoValidation.error,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to reach Reducto API',
|
||||||
|
},
|
||||||
|
{ status: 502 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const reductoResponse = await secureFetchWithPinnedIP(
|
||||||
|
reductoEndpoint,
|
||||||
|
reductoValidation.resolvedIP!,
|
||||||
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
@@ -123,7 +120,8 @@ export async function POST(request: NextRequest) {
|
|||||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||||
},
|
},
|
||||||
body: JSON.stringify(reductoBody),
|
body: JSON.stringify(reductoBody),
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if (!reductoResponse.ok) {
|
if (!reductoResponse.ok) {
|
||||||
const errorText = await reductoResponse.text()
|
const errorText = await reductoResponse.text()
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -17,7 +18,7 @@ const S3PutObjectSchema = z.object({
|
|||||||
region: z.string().min(1, 'Region is required'),
|
region: z.string().min(1, 'Region is required'),
|
||||||
bucketName: z.string().min(1, 'Bucket name is required'),
|
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||||
objectKey: z.string().min(1, 'Object key is required'),
|
objectKey: z.string().min(1, 'Object key is required'),
|
||||||
file: z.any().optional().nullable(),
|
file: RawFileInputSchema.optional().nullable(),
|
||||||
content: z.string().optional().nullable(),
|
content: z.string().optional().nullable(),
|
||||||
contentType: z.string().optional().nullable(),
|
contentType: z.string().optional().nullable(),
|
||||||
acl: z.string().optional().nullable(),
|
acl: z.string().optional().nullable(),
|
||||||
|
|||||||
188
apps/sim/app/api/tools/sendgrid/send-mail/route.ts
Normal file
188
apps/sim/app/api/tools/sendgrid/send-mail/route.ts
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('SendGridSendMailAPI')
|
||||||
|
|
||||||
|
const SendGridSendMailSchema = z.object({
|
||||||
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
|
from: z.string().min(1, 'From email is required'),
|
||||||
|
fromName: z.string().optional().nullable(),
|
||||||
|
to: z.string().min(1, 'To email is required'),
|
||||||
|
toName: z.string().optional().nullable(),
|
||||||
|
subject: z.string().optional().nullable(),
|
||||||
|
content: z.string().optional().nullable(),
|
||||||
|
contentType: z.string().optional().nullable(),
|
||||||
|
cc: z.string().optional().nullable(),
|
||||||
|
bcc: z.string().optional().nullable(),
|
||||||
|
replyTo: z.string().optional().nullable(),
|
||||||
|
replyToName: z.string().optional().nullable(),
|
||||||
|
templateId: z.string().optional().nullable(),
|
||||||
|
dynamicTemplateData: z.any().optional().nullable(),
|
||||||
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized SendGrid send attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: authResult.error || 'Authentication required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Authenticated SendGrid send request via ${authResult.authType}`)
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = SendGridSendMailSchema.parse(body)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Sending SendGrid email`, {
|
||||||
|
to: validatedData.to,
|
||||||
|
subject: validatedData.subject || '(template)',
|
||||||
|
hasAttachments: !!(validatedData.attachments && validatedData.attachments.length > 0),
|
||||||
|
attachmentCount: validatedData.attachments?.length || 0,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Build personalizations
|
||||||
|
const personalizations: Record<string, unknown> = {
|
||||||
|
to: [
|
||||||
|
{ email: validatedData.to, ...(validatedData.toName && { name: validatedData.toName }) },
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.cc) {
|
||||||
|
personalizations.cc = [{ email: validatedData.cc }]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.bcc) {
|
||||||
|
personalizations.bcc = [{ email: validatedData.bcc }]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.templateId && validatedData.dynamicTemplateData) {
|
||||||
|
personalizations.dynamic_template_data =
|
||||||
|
typeof validatedData.dynamicTemplateData === 'string'
|
||||||
|
? JSON.parse(validatedData.dynamicTemplateData)
|
||||||
|
: validatedData.dynamicTemplateData
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build mail body
|
||||||
|
const mailBody: Record<string, unknown> = {
|
||||||
|
personalizations: [personalizations],
|
||||||
|
from: {
|
||||||
|
email: validatedData.from,
|
||||||
|
...(validatedData.fromName && { name: validatedData.fromName }),
|
||||||
|
},
|
||||||
|
subject: validatedData.subject,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.templateId) {
|
||||||
|
mailBody.template_id = validatedData.templateId
|
||||||
|
} else {
|
||||||
|
mailBody.content = [
|
||||||
|
{
|
||||||
|
type: validatedData.contentType || 'text/plain',
|
||||||
|
value: validatedData.content,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.replyTo) {
|
||||||
|
mailBody.reply_to = {
|
||||||
|
email: validatedData.replyTo,
|
||||||
|
...(validatedData.replyToName && { name: validatedData.replyToName }),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process attachments from UserFile objects
|
||||||
|
if (validatedData.attachments && validatedData.attachments.length > 0) {
|
||||||
|
const rawAttachments = validatedData.attachments
|
||||||
|
logger.info(`[${requestId}] Processing ${rawAttachments.length} attachment(s)`)
|
||||||
|
|
||||||
|
const userFiles = processFilesToUserFiles(rawAttachments, requestId, logger)
|
||||||
|
|
||||||
|
if (userFiles.length > 0) {
|
||||||
|
const sendGridAttachments = await Promise.all(
|
||||||
|
userFiles.map(async (file) => {
|
||||||
|
try {
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)`
|
||||||
|
)
|
||||||
|
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: buffer.toString('base64'),
|
||||||
|
filename: file.name,
|
||||||
|
type: file.type || 'application/octet-stream',
|
||||||
|
disposition: 'attachment',
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Failed to download attachment ${file.name}:`, error)
|
||||||
|
throw new Error(
|
||||||
|
`Failed to download attachment "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
mailBody.attachments = sendGridAttachments
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send to SendGrid
|
||||||
|
const response = await fetch('https://api.sendgrid.com/v3/mail/send', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify(mailBody),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json().catch(() => ({}))
|
||||||
|
const errorMessage =
|
||||||
|
errorData.errors?.[0]?.message || errorData.message || 'Failed to send email'
|
||||||
|
logger.error(`[${requestId}] SendGrid API error:`, { status: response.status, errorData })
|
||||||
|
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageId = response.headers.get('X-Message-Id')
|
||||||
|
logger.info(`[${requestId}] Email sent successfully`, { messageId })
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
success: true,
|
||||||
|
messageId: messageId || undefined,
|
||||||
|
to: validatedData.to,
|
||||||
|
subject: validatedData.subject || '',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
logger.warn(`[${requestId}] Validation error:`, error.errors)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error.errors[0]?.message || 'Validation failed' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] Unexpected error:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||||
import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils'
|
import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -111,6 +112,8 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const buffer = Buffer.concat(chunks)
|
const buffer = Buffer.concat(chunks)
|
||||||
const fileName = path.basename(remotePath)
|
const fileName = path.basename(remotePath)
|
||||||
|
const extension = getFileExtension(fileName)
|
||||||
|
const mimeType = getMimeTypeFromExtension(extension)
|
||||||
|
|
||||||
let content: string
|
let content: string
|
||||||
if (params.encoding === 'base64') {
|
if (params.encoding === 'base64') {
|
||||||
@@ -124,6 +127,12 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
fileName,
|
fileName,
|
||||||
|
file: {
|
||||||
|
name: fileName,
|
||||||
|
mimeType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
},
|
||||||
content,
|
content,
|
||||||
size: buffer.length,
|
size: buffer.length,
|
||||||
encoding: params.encoding,
|
encoding: params.encoding,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -26,14 +27,7 @@ const UploadSchema = z.object({
|
|||||||
privateKey: z.string().nullish(),
|
privateKey: z.string().nullish(),
|
||||||
passphrase: z.string().nullish(),
|
passphrase: z.string().nullish(),
|
||||||
remotePath: z.string().min(1, 'Remote path is required'),
|
remotePath: z.string().min(1, 'Remote path is required'),
|
||||||
files: z
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
.union([z.array(z.any()), z.string(), z.number(), z.null(), z.undefined()])
|
|
||||||
.transform((val) => {
|
|
||||||
if (Array.isArray(val)) return val
|
|
||||||
if (val === null || val === undefined || val === '') return undefined
|
|
||||||
return undefined
|
|
||||||
})
|
|
||||||
.nullish(),
|
|
||||||
fileContent: z.string().nullish(),
|
fileContent: z.string().nullish(),
|
||||||
fileName: z.string().nullish(),
|
fileName: z.string().nullish(),
|
||||||
overwrite: z.boolean().default(true),
|
overwrite: z.boolean().default(true),
|
||||||
|
|||||||
@@ -2,9 +2,12 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import type { MicrosoftGraphDriveItem } from '@/tools/onedrive/types'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -16,7 +19,7 @@ const SharepointUploadSchema = z.object({
|
|||||||
driveId: z.string().optional().nullable(),
|
driveId: z.string().optional().nullable(),
|
||||||
folderPath: z.string().optional().nullable(),
|
folderPath: z.string().optional().nullable(),
|
||||||
fileName: z.string().optional().nullable(),
|
fileName: z.string().optional().nullable(),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -79,18 +82,23 @@ export async function POST(request: NextRequest) {
|
|||||||
let effectiveDriveId = validatedData.driveId
|
let effectiveDriveId = validatedData.driveId
|
||||||
if (!effectiveDriveId) {
|
if (!effectiveDriveId) {
|
||||||
logger.info(`[${requestId}] No driveId provided, fetching default drive for site`)
|
logger.info(`[${requestId}] No driveId provided, fetching default drive for site`)
|
||||||
const driveResponse = await fetch(
|
const driveUrl = `https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`
|
||||||
`https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`,
|
const driveResponse = await secureFetchWithValidation(
|
||||||
|
driveUrl,
|
||||||
{
|
{
|
||||||
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
Accept: 'application/json',
|
Accept: 'application/json',
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
|
'driveUrl'
|
||||||
)
|
)
|
||||||
|
|
||||||
if (!driveResponse.ok) {
|
if (!driveResponse.ok) {
|
||||||
const errorData = await driveResponse.json().catch(() => ({}))
|
const errorData = (await driveResponse.json().catch(() => ({}))) as {
|
||||||
|
error?: { message?: string }
|
||||||
|
}
|
||||||
logger.error(`[${requestId}] Failed to get default drive:`, errorData)
|
logger.error(`[${requestId}] Failed to get default drive:`, errorData)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -101,7 +109,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const driveData = await driveResponse.json()
|
const driveData = (await driveResponse.json()) as { id: string }
|
||||||
effectiveDriveId = driveData.id
|
effectiveDriveId = driveData.id
|
||||||
logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`)
|
logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`)
|
||||||
}
|
}
|
||||||
@@ -145,34 +153,87 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to: ${uploadUrl}`)
|
logger.info(`[${requestId}] Uploading to: ${uploadUrl}`)
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
const uploadResponse = await secureFetchWithValidation(
|
||||||
|
uploadUrl,
|
||||||
|
{
|
||||||
method: 'PUT',
|
method: 'PUT',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
'Content-Type': userFile.type || 'application/octet-stream',
|
'Content-Type': userFile.type || 'application/octet-stream',
|
||||||
},
|
},
|
||||||
body: new Uint8Array(buffer),
|
body: buffer,
|
||||||
})
|
},
|
||||||
|
'uploadUrl'
|
||||||
|
)
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
if (!uploadResponse.ok) {
|
||||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||||
logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData)
|
logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData)
|
||||||
|
|
||||||
if (uploadResponse.status === 409) {
|
if (uploadResponse.status === 409) {
|
||||||
logger.warn(`[${requestId}] File ${fileName} already exists, attempting to replace`)
|
// File exists - retry with conflict behavior set to replace
|
||||||
|
logger.warn(`[${requestId}] File ${fileName} already exists, retrying with replace`)
|
||||||
|
const replaceUrl = `${uploadUrl}?@microsoft.graph.conflictBehavior=replace`
|
||||||
|
const replaceResponse = await secureFetchWithValidation(
|
||||||
|
replaceUrl,
|
||||||
|
{
|
||||||
|
method: 'PUT',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': userFile.type || 'application/octet-stream',
|
||||||
|
},
|
||||||
|
body: buffer,
|
||||||
|
},
|
||||||
|
'replaceUrl'
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!replaceResponse.ok) {
|
||||||
|
const replaceErrorData = (await replaceResponse.json().catch(() => ({}))) as {
|
||||||
|
error?: { message?: string }
|
||||||
|
}
|
||||||
|
logger.error(`[${requestId}] Failed to replace file ${fileName}:`, replaceErrorData)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: replaceErrorData.error?.message || `Failed to replace file: ${fileName}`,
|
||||||
|
},
|
||||||
|
{ status: replaceResponse.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const replaceData = (await replaceResponse.json()) as {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
webUrl: string
|
||||||
|
size: number
|
||||||
|
createdDateTime: string
|
||||||
|
lastModifiedDateTime: string
|
||||||
|
}
|
||||||
|
logger.info(`[${requestId}] File replaced successfully: ${fileName}`)
|
||||||
|
|
||||||
|
uploadedFiles.push({
|
||||||
|
id: replaceData.id,
|
||||||
|
name: replaceData.name,
|
||||||
|
webUrl: replaceData.webUrl,
|
||||||
|
size: replaceData.size,
|
||||||
|
createdDateTime: replaceData.createdDateTime,
|
||||||
|
lastModifiedDateTime: replaceData.lastModifiedDateTime,
|
||||||
|
})
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: errorData.error?.message || `Failed to upload file: ${fileName}`,
|
error:
|
||||||
|
(errorData as { error?: { message?: string } }).error?.message ||
|
||||||
|
`Failed to upload file: ${fileName}`,
|
||||||
},
|
},
|
||||||
{ status: uploadResponse.status }
|
{ status: uploadResponse.status }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadData = await uploadResponse.json()
|
const uploadData = (await uploadResponse.json()) as MicrosoftGraphDriveItem
|
||||||
logger.info(`[${requestId}] File uploaded successfully: ${fileName}`)
|
logger.info(`[${requestId}] File uploaded successfully: ${fileName}`)
|
||||||
|
|
||||||
uploadedFiles.push({
|
uploadedFiles.push({
|
||||||
|
|||||||
170
apps/sim/app/api/tools/slack/download/route.ts
Normal file
170
apps/sim/app/api/tools/slack/download/route.ts
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('SlackDownloadAPI')
|
||||||
|
|
||||||
|
const SlackDownloadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
fileId: z.string().min(1, 'File ID is required'),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Slack download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Authenticated Slack download request via ${authResult.authType}`, {
|
||||||
|
userId: authResult.userId,
|
||||||
|
})
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = SlackDownloadSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, fileId, fileName } = validatedData
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting file info from Slack`, { fileId })
|
||||||
|
|
||||||
|
const infoResponse = await fetch(`https://slack.com/api/files.info?file=${fileId}`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!infoResponse.ok) {
|
||||||
|
const errorDetails = await infoResponse.json().catch(() => ({}))
|
||||||
|
logger.error(`[${requestId}] Failed to get file info from Slack`, {
|
||||||
|
status: infoResponse.status,
|
||||||
|
statusText: infoResponse.statusText,
|
||||||
|
error: errorDetails,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: errorDetails.error || 'Failed to get file info',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await infoResponse.json()
|
||||||
|
|
||||||
|
if (!data.ok) {
|
||||||
|
logger.error(`[${requestId}] Slack API returned error`, { error: data.error })
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: data.error || 'Slack API error',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const file = data.file
|
||||||
|
const resolvedFileName = fileName || file.name || 'download'
|
||||||
|
const mimeType = file.mimetype || 'application/octet-stream'
|
||||||
|
const urlPrivate = file.url_private
|
||||||
|
|
||||||
|
if (!urlPrivate) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'File does not have a download URL',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(urlPrivate, 'urlPrivate')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: urlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Downloading file from Slack`, {
|
||||||
|
fileId,
|
||||||
|
fileName: resolvedFileName,
|
||||||
|
mimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(urlPrivate, urlValidation.resolvedIP!, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
logger.error(`[${requestId}] Failed to download file content`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
statusText: downloadResponse.statusText,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to download file content',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||||
|
fileId,
|
||||||
|
name: resolvedFileName,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
mimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const base64Data = fileBuffer.toString('base64')
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedFileName,
|
||||||
|
mimeType,
|
||||||
|
data: base64Data,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading Slack file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { sendSlackMessage } from '../utils'
|
import { sendSlackMessage } from '../utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -16,7 +17,7 @@ const SlackSendMessageSchema = z
|
|||||||
userId: z.string().optional().nullable(),
|
userId: z.string().optional().nullable(),
|
||||||
text: z.string().min(1, 'Message text is required'),
|
text: z.string().min(1, 'Message text is required'),
|
||||||
thread_ts: z.string().optional().nullable(),
|
thread_ts: z.string().optional().nullable(),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
.refine((data) => data.channel || data.userId, {
|
.refine((data) => data.channel || data.userId, {
|
||||||
message: 'Either channel or userId is required',
|
message: 'Either channel or userId is required',
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import type { Logger } from '@sim/logger'
|
import type { Logger } from '@sim/logger'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import type { ToolFileData } from '@/tools/types'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sends a message to a Slack channel using chat.postMessage
|
* Sends a message to a Slack channel using chat.postMessage
|
||||||
@@ -70,9 +72,10 @@ export async function uploadFilesToSlack(
|
|||||||
accessToken: string,
|
accessToken: string,
|
||||||
requestId: string,
|
requestId: string,
|
||||||
logger: Logger
|
logger: Logger
|
||||||
): Promise<string[]> {
|
): Promise<{ fileIds: string[]; files: ToolFileData[] }> {
|
||||||
const userFiles = processFilesToUserFiles(files, requestId, logger)
|
const userFiles = processFilesToUserFiles(files, requestId, logger)
|
||||||
const uploadedFileIds: string[] = []
|
const uploadedFileIds: string[] = []
|
||||||
|
const uploadedFiles: ToolFileData[] = []
|
||||||
|
|
||||||
for (const userFile of userFiles) {
|
for (const userFile of userFiles) {
|
||||||
logger.info(`[${requestId}] Uploading file: ${userFile.name}`)
|
logger.info(`[${requestId}] Uploading file: ${userFile.name}`)
|
||||||
@@ -100,10 +103,14 @@ export async function uploadFilesToSlack(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`)
|
logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`)
|
||||||
|
|
||||||
const uploadResponse = await fetch(urlData.upload_url, {
|
const uploadResponse = await secureFetchWithValidation(
|
||||||
|
urlData.upload_url,
|
||||||
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
body: new Uint8Array(buffer),
|
body: buffer,
|
||||||
})
|
},
|
||||||
|
'uploadUrl'
|
||||||
|
)
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
if (!uploadResponse.ok) {
|
||||||
logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`)
|
logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`)
|
||||||
@@ -112,9 +119,16 @@ export async function uploadFilesToSlack(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] File data uploaded successfully`)
|
logger.info(`[${requestId}] File data uploaded successfully`)
|
||||||
uploadedFileIds.push(urlData.file_id)
|
uploadedFileIds.push(urlData.file_id)
|
||||||
|
// Only add to uploadedFiles after successful upload to keep arrays in sync
|
||||||
|
uploadedFiles.push({
|
||||||
|
name: userFile.name,
|
||||||
|
mimeType: userFile.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
return uploadedFileIds
|
return { fileIds: uploadedFileIds, files: uploadedFiles }
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -124,7 +138,8 @@ export async function completeSlackFileUpload(
|
|||||||
uploadedFileIds: string[],
|
uploadedFileIds: string[],
|
||||||
channel: string,
|
channel: string,
|
||||||
text: string,
|
text: string,
|
||||||
accessToken: string
|
accessToken: string,
|
||||||
|
threadTs?: string | null
|
||||||
): Promise<{ ok: boolean; files?: any[]; error?: string }> {
|
): Promise<{ ok: boolean; files?: any[]; error?: string }> {
|
||||||
const response = await fetch('https://slack.com/api/files.completeUploadExternal', {
|
const response = await fetch('https://slack.com/api/files.completeUploadExternal', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -136,6 +151,7 @@ export async function completeSlackFileUpload(
|
|||||||
files: uploadedFileIds.map((id) => ({ id })),
|
files: uploadedFileIds.map((id) => ({ id })),
|
||||||
channel_id: channel,
|
channel_id: channel,
|
||||||
initial_comment: text,
|
initial_comment: text,
|
||||||
|
...(threadTs && { thread_ts: threadTs }),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -217,7 +233,13 @@ export async function sendSlackMessage(
|
|||||||
logger: Logger
|
logger: Logger
|
||||||
): Promise<{
|
): Promise<{
|
||||||
success: boolean
|
success: boolean
|
||||||
output?: { message: any; ts: string; channel: string; fileCount?: number }
|
output?: {
|
||||||
|
message: any
|
||||||
|
ts: string
|
||||||
|
channel: string
|
||||||
|
fileCount?: number
|
||||||
|
files?: ToolFileData[]
|
||||||
|
}
|
||||||
error?: string
|
error?: string
|
||||||
}> {
|
}> {
|
||||||
const { accessToken, text, threadTs, files } = params
|
const { accessToken, text, threadTs, files } = params
|
||||||
@@ -249,10 +271,15 @@ export async function sendSlackMessage(
|
|||||||
|
|
||||||
// Process files
|
// Process files
|
||||||
logger.info(`[${requestId}] Processing ${files.length} file(s)`)
|
logger.info(`[${requestId}] Processing ${files.length} file(s)`)
|
||||||
const uploadedFileIds = await uploadFilesToSlack(files, accessToken, requestId, logger)
|
const { fileIds, files: uploadedFiles } = await uploadFilesToSlack(
|
||||||
|
files,
|
||||||
|
accessToken,
|
||||||
|
requestId,
|
||||||
|
logger
|
||||||
|
)
|
||||||
|
|
||||||
// No valid files uploaded - send text-only
|
// No valid files uploaded - send text-only
|
||||||
if (uploadedFileIds.length === 0) {
|
if (fileIds.length === 0) {
|
||||||
logger.warn(`[${requestId}] No valid files to upload, sending text-only message`)
|
logger.warn(`[${requestId}] No valid files to upload, sending text-only message`)
|
||||||
|
|
||||||
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
||||||
@@ -264,8 +291,8 @@ export async function sendSlackMessage(
|
|||||||
return { success: true, output: formatMessageSuccessResponse(data, text) }
|
return { success: true, output: formatMessageSuccessResponse(data, text) }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Complete file upload
|
// Complete file upload with thread support
|
||||||
const completeData = await completeSlackFileUpload(uploadedFileIds, channel, text, accessToken)
|
const completeData = await completeSlackFileUpload(fileIds, channel, text, accessToken, threadTs)
|
||||||
|
|
||||||
if (!completeData.ok) {
|
if (!completeData.ok) {
|
||||||
logger.error(`[${requestId}] Failed to complete upload:`, completeData.error)
|
logger.error(`[${requestId}] Failed to complete upload:`, completeData.error)
|
||||||
@@ -282,7 +309,8 @@ export async function sendSlackMessage(
|
|||||||
message: fileMessage,
|
message: fileMessage,
|
||||||
ts: fileMessage.ts,
|
ts: fileMessage.ts,
|
||||||
channel,
|
channel,
|
||||||
fileCount: uploadedFileIds.length,
|
fileCount: fileIds.length,
|
||||||
|
files: uploadedFiles,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import nodemailer from 'nodemailer'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -28,7 +29,7 @@ const SmtpSendSchema = z.object({
|
|||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
replyTo: z.string().optional().nullable(),
|
replyTo: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import type { Client, SFTPWrapper } from 'ssh2'
|
import type { Client, SFTPWrapper } from 'ssh2'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||||
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
|
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
|
||||||
|
|
||||||
const logger = createLogger('SSHDownloadFileAPI')
|
const logger = createLogger('SSHDownloadFileAPI')
|
||||||
@@ -79,6 +80,16 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Check file size limit (50MB to prevent memory exhaustion)
|
||||||
|
const maxSize = 50 * 1024 * 1024
|
||||||
|
if (stats.size > maxSize) {
|
||||||
|
const sizeMB = (stats.size / (1024 * 1024)).toFixed(2)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: `File size (${sizeMB}MB) exceeds download limit of 50MB` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// Read file content
|
// Read file content
|
||||||
const content = await new Promise<Buffer>((resolve, reject) => {
|
const content = await new Promise<Buffer>((resolve, reject) => {
|
||||||
const chunks: Buffer[] = []
|
const chunks: Buffer[] = []
|
||||||
@@ -96,6 +107,8 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
const fileName = path.basename(remotePath)
|
const fileName = path.basename(remotePath)
|
||||||
|
const extension = getFileExtension(fileName)
|
||||||
|
const mimeType = getMimeTypeFromExtension(extension)
|
||||||
|
|
||||||
// Encode content as base64 for binary safety
|
// Encode content as base64 for binary safety
|
||||||
const base64Content = content.toString('base64')
|
const base64Content = content.toString('base64')
|
||||||
@@ -104,6 +117,12 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
downloaded: true,
|
downloaded: true,
|
||||||
|
file: {
|
||||||
|
name: fileName,
|
||||||
|
mimeType,
|
||||||
|
data: base64Content,
|
||||||
|
size: stats.size,
|
||||||
|
},
|
||||||
content: base64Content,
|
content: base64Content,
|
||||||
fileName: fileName,
|
fileName: fileName,
|
||||||
remotePath: remotePath,
|
remotePath: remotePath,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction'
|
import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction'
|
||||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||||
|
|
||||||
@@ -123,6 +124,10 @@ export async function POST(request: NextRequest) {
|
|||||||
const variablesObject = processVariables(params.variables)
|
const variablesObject = processVariables(params.variables)
|
||||||
|
|
||||||
const startUrl = normalizeUrl(rawStartUrl)
|
const startUrl = normalizeUrl(rawStartUrl)
|
||||||
|
const urlValidation = await validateUrlWithDNS(startUrl, 'startUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
logger.info('Starting Stagehand agent process', {
|
logger.info('Starting Stagehand agent process', {
|
||||||
rawStartUrl,
|
rawStartUrl,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||||
|
|
||||||
const logger = createLogger('StagehandExtractAPI')
|
const logger = createLogger('StagehandExtractAPI')
|
||||||
@@ -51,6 +52,10 @@ export async function POST(request: NextRequest) {
|
|||||||
const params = validationResult.data
|
const params = validationResult.data
|
||||||
const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params
|
const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params
|
||||||
const url = normalizeUrl(rawUrl)
|
const url = normalizeUrl(rawUrl)
|
||||||
|
const urlValidation = await validateUrlWithDNS(url, 'url')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
logger.info('Starting Stagehand extraction process', {
|
logger.info('Starting Stagehand extraction process', {
|
||||||
rawUrl,
|
rawUrl,
|
||||||
|
|||||||
@@ -2,7 +2,16 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { extractAudioFromVideo, isVideoFile } from '@/lib/audio/extractor'
|
import { extractAudioFromVideo, isVideoFile } from '@/lib/audio/extractor'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { getMimeTypeFromExtension, isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import {
|
||||||
|
downloadFileFromStorage,
|
||||||
|
resolveInternalFileUrl,
|
||||||
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
import type { UserFile } from '@/executor/types'
|
import type { UserFile } from '@/executor/types'
|
||||||
import type { TranscriptSegment } from '@/tools/stt/types'
|
import type { TranscriptSegment } from '@/tools/stt/types'
|
||||||
|
|
||||||
@@ -45,6 +54,7 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const body: SttRequestBody = await request.json()
|
const body: SttRequestBody = await request.json()
|
||||||
const {
|
const {
|
||||||
provider,
|
provider,
|
||||||
@@ -72,13 +82,25 @@ export async function POST(request: NextRequest) {
|
|||||||
let audioMimeType: string
|
let audioMimeType: string
|
||||||
|
|
||||||
if (body.audioFile) {
|
if (body.audioFile) {
|
||||||
|
if (Array.isArray(body.audioFile) && body.audioFile.length !== 1) {
|
||||||
|
return NextResponse.json({ error: 'audioFile must be a single file' }, { status: 400 })
|
||||||
|
}
|
||||||
const file = Array.isArray(body.audioFile) ? body.audioFile[0] : body.audioFile
|
const file = Array.isArray(body.audioFile) ? body.audioFile[0] : body.audioFile
|
||||||
logger.info(`[${requestId}] Processing uploaded file: ${file.name}`)
|
logger.info(`[${requestId}] Processing uploaded file: ${file.name}`)
|
||||||
|
|
||||||
audioBuffer = await downloadFileFromStorage(file, requestId, logger)
|
audioBuffer = await downloadFileFromStorage(file, requestId, logger)
|
||||||
audioFileName = file.name
|
audioFileName = file.name
|
||||||
audioMimeType = file.type
|
// file.type may be missing if the file came from a block that doesn't preserve it
|
||||||
|
// Infer from filename extension as fallback
|
||||||
|
const ext = file.name.split('.').pop()?.toLowerCase() || ''
|
||||||
|
audioMimeType = file.type || getMimeTypeFromExtension(ext)
|
||||||
} else if (body.audioFileReference) {
|
} else if (body.audioFileReference) {
|
||||||
|
if (Array.isArray(body.audioFileReference) && body.audioFileReference.length !== 1) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'audioFileReference must be a single file' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
const file = Array.isArray(body.audioFileReference)
|
const file = Array.isArray(body.audioFileReference)
|
||||||
? body.audioFileReference[0]
|
? body.audioFileReference[0]
|
||||||
: body.audioFileReference
|
: body.audioFileReference
|
||||||
@@ -86,18 +108,54 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
audioBuffer = await downloadFileFromStorage(file, requestId, logger)
|
audioBuffer = await downloadFileFromStorage(file, requestId, logger)
|
||||||
audioFileName = file.name
|
audioFileName = file.name
|
||||||
audioMimeType = file.type
|
|
||||||
|
const ext = file.name.split('.').pop()?.toLowerCase() || ''
|
||||||
|
audioMimeType = file.type || getMimeTypeFromExtension(ext)
|
||||||
} else if (body.audioUrl) {
|
} else if (body.audioUrl) {
|
||||||
logger.info(`[${requestId}] Downloading from URL: ${body.audioUrl}`)
|
logger.info(`[${requestId}] Downloading from URL: ${body.audioUrl}`)
|
||||||
|
|
||||||
const response = await fetch(body.audioUrl)
|
let audioUrl = body.audioUrl.trim()
|
||||||
|
if (audioUrl.startsWith('/') && !isInternalFileUrl(audioUrl)) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isInternalFileUrl(audioUrl)) {
|
||||||
|
if (!userId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authentication required for internal file access' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const resolution = await resolveInternalFileUrl(audioUrl, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: resolution.error.message },
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
audioUrl = resolution.fileUrl || audioUrl
|
||||||
|
}
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(audioUrl, 'audioUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(audioUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to download audio from URL: ${response.statusText}`)
|
throw new Error(`Failed to download audio from URL: ${response.statusText}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const arrayBuffer = await response.arrayBuffer()
|
const arrayBuffer = await response.arrayBuffer()
|
||||||
audioBuffer = Buffer.from(arrayBuffer)
|
audioBuffer = Buffer.from(arrayBuffer)
|
||||||
audioFileName = body.audioUrl.split('/').pop() || 'audio_file'
|
audioFileName = audioUrl.split('/').pop() || 'audio_file'
|
||||||
audioMimeType = response.headers.get('content-type') || 'audio/mpeg'
|
audioMimeType = response.headers.get('content-type') || 'audio/mpeg'
|
||||||
} else {
|
} else {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
@@ -149,7 +207,9 @@ export async function POST(request: NextRequest) {
|
|||||||
translateToEnglish,
|
translateToEnglish,
|
||||||
model,
|
model,
|
||||||
body.prompt,
|
body.prompt,
|
||||||
body.temperature
|
body.temperature,
|
||||||
|
audioMimeType,
|
||||||
|
audioFileName
|
||||||
)
|
)
|
||||||
transcript = result.transcript
|
transcript = result.transcript
|
||||||
segments = result.segments
|
segments = result.segments
|
||||||
@@ -162,7 +222,8 @@ export async function POST(request: NextRequest) {
|
|||||||
language,
|
language,
|
||||||
timestamps,
|
timestamps,
|
||||||
diarization,
|
diarization,
|
||||||
model
|
model,
|
||||||
|
audioMimeType
|
||||||
)
|
)
|
||||||
transcript = result.transcript
|
transcript = result.transcript
|
||||||
segments = result.segments
|
segments = result.segments
|
||||||
@@ -252,7 +313,9 @@ async function transcribeWithWhisper(
|
|||||||
translate?: boolean,
|
translate?: boolean,
|
||||||
model?: string,
|
model?: string,
|
||||||
prompt?: string,
|
prompt?: string,
|
||||||
temperature?: number
|
temperature?: number,
|
||||||
|
mimeType?: string,
|
||||||
|
fileName?: string
|
||||||
): Promise<{
|
): Promise<{
|
||||||
transcript: string
|
transcript: string
|
||||||
segments?: TranscriptSegment[]
|
segments?: TranscriptSegment[]
|
||||||
@@ -261,8 +324,11 @@ async function transcribeWithWhisper(
|
|||||||
}> {
|
}> {
|
||||||
const formData = new FormData()
|
const formData = new FormData()
|
||||||
|
|
||||||
const blob = new Blob([new Uint8Array(audioBuffer)], { type: 'audio/mpeg' })
|
// Use actual MIME type and filename if provided
|
||||||
formData.append('file', blob, 'audio.mp3')
|
const actualMimeType = mimeType || 'audio/mpeg'
|
||||||
|
const actualFileName = fileName || 'audio.mp3'
|
||||||
|
const blob = new Blob([new Uint8Array(audioBuffer)], { type: actualMimeType })
|
||||||
|
formData.append('file', blob, actualFileName)
|
||||||
formData.append('model', model || 'whisper-1')
|
formData.append('model', model || 'whisper-1')
|
||||||
|
|
||||||
if (language && language !== 'auto') {
|
if (language && language !== 'auto') {
|
||||||
@@ -279,10 +345,11 @@ async function transcribeWithWhisper(
|
|||||||
|
|
||||||
formData.append('response_format', 'verbose_json')
|
formData.append('response_format', 'verbose_json')
|
||||||
|
|
||||||
|
// OpenAI API uses array notation for timestamp_granularities
|
||||||
if (timestamps === 'word') {
|
if (timestamps === 'word') {
|
||||||
formData.append('timestamp_granularities', 'word')
|
formData.append('timestamp_granularities[]', 'word')
|
||||||
} else if (timestamps === 'sentence') {
|
} else if (timestamps === 'sentence') {
|
||||||
formData.append('timestamp_granularities', 'segment')
|
formData.append('timestamp_granularities[]', 'segment')
|
||||||
}
|
}
|
||||||
|
|
||||||
const endpoint = translate ? 'translations' : 'transcriptions'
|
const endpoint = translate ? 'translations' : 'transcriptions'
|
||||||
@@ -325,7 +392,8 @@ async function transcribeWithDeepgram(
|
|||||||
language?: string,
|
language?: string,
|
||||||
timestamps?: 'none' | 'sentence' | 'word',
|
timestamps?: 'none' | 'sentence' | 'word',
|
||||||
diarization?: boolean,
|
diarization?: boolean,
|
||||||
model?: string
|
model?: string,
|
||||||
|
mimeType?: string
|
||||||
): Promise<{
|
): Promise<{
|
||||||
transcript: string
|
transcript: string
|
||||||
segments?: TranscriptSegment[]
|
segments?: TranscriptSegment[]
|
||||||
@@ -357,7 +425,7 @@ async function transcribeWithDeepgram(
|
|||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Token ${apiKey}`,
|
Authorization: `Token ${apiKey}`,
|
||||||
'Content-Type': 'audio/mpeg',
|
'Content-Type': mimeType || 'audio/mpeg',
|
||||||
},
|
},
|
||||||
body: new Uint8Array(audioBuffer),
|
body: new Uint8Array(audioBuffer),
|
||||||
})
|
})
|
||||||
@@ -513,7 +581,8 @@ async function transcribeWithAssemblyAI(
|
|||||||
audio_url: upload_url,
|
audio_url: upload_url,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (model === 'best' || model === 'nano') {
|
// AssemblyAI supports 'best', 'slam-1', or 'universal' for speech_model
|
||||||
|
if (model === 'best' || model === 'slam-1' || model === 'universal') {
|
||||||
transcriptRequest.speech_model = model
|
transcriptRequest.speech_model = model
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -568,7 +637,8 @@ async function transcribeWithAssemblyAI(
|
|||||||
|
|
||||||
let transcript: any
|
let transcript: any
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
const maxAttempts = 60 // 5 minutes with 5-second intervals
|
const pollIntervalMs = 5000
|
||||||
|
const maxAttempts = Math.ceil(DEFAULT_EXECUTION_TIMEOUT_MS / pollIntervalMs)
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
const statusResponse = await fetch(`https://api.assemblyai.com/v2/transcript/${id}`, {
|
const statusResponse = await fetch(`https://api.assemblyai.com/v2/transcript/${id}`, {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -16,7 +17,7 @@ const SupabaseStorageUploadSchema = z.object({
|
|||||||
bucket: z.string().min(1, 'Bucket name is required'),
|
bucket: z.string().min(1, 'Bucket name is required'),
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
path: z.string().optional().nullable(),
|
path: z.string().optional().nullable(),
|
||||||
fileData: z.any(),
|
fileData: FileInputSchema,
|
||||||
contentType: z.string().optional().nullable(),
|
contentType: z.string().optional().nullable(),
|
||||||
upsert: z.boolean().optional().default(false),
|
upsert: z.boolean().optional().default(false),
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import { convertMarkdownToHTML } from '@/tools/telegram/utils'
|
import { convertMarkdownToHTML } from '@/tools/telegram/utils'
|
||||||
@@ -14,7 +15,7 @@ const logger = createLogger('TelegramSendDocumentAPI')
|
|||||||
const TelegramSendDocumentSchema = z.object({
|
const TelegramSendDocumentSchema = z.object({
|
||||||
botToken: z.string().min(1, 'Bot token is required'),
|
botToken: z.string().min(1, 'Bot token is required'),
|
||||||
chatId: z.string().min(1, 'Chat ID is required'),
|
chatId: z.string().min(1, 'Chat ID is required'),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
caption: z.string().optional().nullable(),
|
caption: z.string().optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -93,6 +94,14 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Uploading document: ${userFile.name}`)
|
logger.info(`[${requestId}] Uploading document: ${userFile.name}`)
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
const filesOutput = [
|
||||||
|
{
|
||||||
|
name: userFile.name,
|
||||||
|
mimeType: userFile.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
logger.info(`[${requestId}] Downloaded file: ${buffer.length} bytes`)
|
logger.info(`[${requestId}] Downloaded file: ${buffer.length} bytes`)
|
||||||
|
|
||||||
@@ -135,6 +144,7 @@ export async function POST(request: NextRequest) {
|
|||||||
output: {
|
output: {
|
||||||
message: 'Document sent successfully',
|
message: 'Document sent successfully',
|
||||||
data: data.result,
|
data: data.result,
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -3,19 +3,19 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||||
|
import { validateAwsRegion, validateS3BucketName } from '@/lib/core/security/input-validation'
|
||||||
import {
|
import {
|
||||||
validateAwsRegion,
|
secureFetchWithPinnedIP,
|
||||||
validateExternalUrl,
|
validateUrlWithDNS,
|
||||||
validateS3BucketName,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/core/security/input-validation'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { StorageService } from '@/lib/uploads'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
downloadFileFromStorage,
|
||||||
inferContextFromKey,
|
resolveInternalFileUrl,
|
||||||
isInternalFileUrl,
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
||||||
@@ -35,6 +35,7 @@ const TextractParseSchema = z
|
|||||||
region: z.string().min(1, 'AWS region is required'),
|
region: z.string().min(1, 'AWS region is required'),
|
||||||
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
||||||
filePath: z.string().optional(),
|
filePath: z.string().optional(),
|
||||||
|
file: RawFileInputSchema.optional(),
|
||||||
s3Uri: z.string().optional(),
|
s3Uri: z.string().optional(),
|
||||||
featureTypes: z
|
featureTypes: z
|
||||||
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
||||||
@@ -50,6 +51,20 @@ const TextractParseSchema = z
|
|||||||
path: ['region'],
|
path: ['region'],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
if (data.processingMode === 'async' && !data.s3Uri) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: 'S3 URI is required for multi-page processing (s3://bucket/key)',
|
||||||
|
path: ['s3Uri'],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (data.processingMode !== 'async' && !data.file && !data.filePath) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: 'File input is required for single-page processing',
|
||||||
|
path: ['filePath'],
|
||||||
|
})
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
function getSignatureKey(
|
function getSignatureKey(
|
||||||
@@ -111,7 +126,14 @@ function signAwsRequest(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
||||||
const response = await fetch(url)
|
const urlValidation = await validateUrlWithDNS(url, 'Document URL')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
throw new Error(urlValidation.error || 'Invalid document URL')
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||||
}
|
}
|
||||||
@@ -205,8 +227,8 @@ async function pollForJobCompletion(
|
|||||||
useAnalyzeDocument: boolean,
|
useAnalyzeDocument: boolean,
|
||||||
requestId: string
|
requestId: string
|
||||||
): Promise<Record<string, unknown>> {
|
): Promise<Record<string, unknown>> {
|
||||||
const pollIntervalMs = 5000 // 5 seconds between polls
|
const pollIntervalMs = 5000
|
||||||
const maxPollTimeMs = 180000 // 3 minutes maximum polling time
|
const maxPollTimeMs = DEFAULT_EXECUTION_TIMEOUT_MS
|
||||||
const maxAttempts = Math.ceil(maxPollTimeMs / pollIntervalMs)
|
const maxAttempts = Math.ceil(maxPollTimeMs / pollIntervalMs)
|
||||||
|
|
||||||
const getTarget = useAnalyzeDocument
|
const getTarget = useAnalyzeDocument
|
||||||
@@ -318,8 +340,8 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Textract parse request`, {
|
logger.info(`[${requestId}] Textract parse request`, {
|
||||||
processingMode,
|
processingMode,
|
||||||
filePath: validatedData.filePath?.substring(0, 50),
|
hasFile: Boolean(validatedData.file),
|
||||||
s3Uri: validatedData.s3Uri?.substring(0, 50),
|
hasS3Uri: Boolean(validatedData.s3Uri),
|
||||||
featureTypes,
|
featureTypes,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
@@ -414,59 +436,49 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!validatedData.filePath) {
|
let bytes = ''
|
||||||
|
let contentType = 'application/octet-stream'
|
||||||
|
let isPdf = false
|
||||||
|
|
||||||
|
if (validatedData.file) {
|
||||||
|
let userFile
|
||||||
|
try {
|
||||||
|
userFile = processSingleFileToUserFile(validatedData.file, requestId, logger)
|
||||||
|
} catch (error) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: 'File path is required for single-page processing',
|
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||||
},
|
},
|
||||||
{ status: 400 }
|
{ status: 400 }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
bytes = buffer.toString('base64')
|
||||||
|
contentType = userFile.type || 'application/octet-stream'
|
||||||
|
isPdf = contentType.includes('pdf') || userFile.name?.toLowerCase().endsWith('.pdf')
|
||||||
|
} else if (validatedData.filePath) {
|
||||||
let fileUrl = validatedData.filePath
|
let fileUrl = validatedData.filePath
|
||||||
|
|
||||||
const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath)
|
const isInternalFilePath = isInternalFileUrl(fileUrl)
|
||||||
|
|
||||||
if (isInternalFilePath) {
|
if (isInternalFilePath) {
|
||||||
try {
|
const resolution = await resolveInternalFileUrl(fileUrl, userId, requestId, logger)
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
if (resolution.error) {
|
||||||
const context = inferContextFromKey(storageKey)
|
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: 'File not found',
|
error: resolution.error.message,
|
||||||
},
|
},
|
||||||
{ status: 404 }
|
{ status: resolution.error.status }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
fileUrl = resolution.fileUrl || fileUrl
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
} else if (fileUrl.startsWith('/')) {
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Failed to generate file access URL',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
// Reject arbitrary absolute paths that don't contain /api/files/serve/
|
|
||||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||||
userId,
|
userId,
|
||||||
path: validatedData.filePath.substring(0, 50),
|
path: fileUrl.substring(0, 50),
|
||||||
})
|
})
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -476,7 +488,7 @@ export async function POST(request: NextRequest) {
|
|||||||
{ status: 400 }
|
{ status: 400 }
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
const urlValidation = validateExternalUrl(fileUrl, 'Document URL')
|
const urlValidation = await validateUrlWithDNS(fileUrl, 'Document URL')
|
||||||
if (!urlValidation.isValid) {
|
if (!urlValidation.isValid) {
|
||||||
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
||||||
userId,
|
userId,
|
||||||
@@ -493,10 +505,19 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const { bytes, contentType } = await fetchDocumentBytes(fileUrl)
|
const fetched = await fetchDocumentBytes(fileUrl)
|
||||||
|
bytes = fetched.bytes
|
||||||
// Track if this is a PDF for better error messaging
|
contentType = fetched.contentType
|
||||||
const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
||||||
|
} else {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'File input is required for single-page processing',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const uri = '/'
|
const uri = '/'
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger'
|
|||||||
import type { NextRequest } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { StorageService } from '@/lib/uploads'
|
import { StorageService } from '@/lib/uploads'
|
||||||
@@ -60,7 +61,7 @@ export async function POST(request: NextRequest) {
|
|||||||
text,
|
text,
|
||||||
model_id: modelId,
|
model_id: modelId,
|
||||||
}),
|
}),
|
||||||
signal: AbortSignal.timeout(60000),
|
signal: AbortSignal.timeout(DEFAULT_EXECUTION_TIMEOUT_MS),
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
|
|||||||
250
apps/sim/app/api/tools/twilio/get-recording/route.ts
Normal file
250
apps/sim/app/api/tools/twilio/get-recording/route.ts
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('TwilioGetRecordingAPI')
|
||||||
|
|
||||||
|
interface TwilioRecordingResponse {
|
||||||
|
sid?: string
|
||||||
|
call_sid?: string
|
||||||
|
duration?: string
|
||||||
|
status?: string
|
||||||
|
channels?: number
|
||||||
|
source?: string
|
||||||
|
price?: string
|
||||||
|
price_unit?: string
|
||||||
|
uri?: string
|
||||||
|
error_code?: number
|
||||||
|
message?: string
|
||||||
|
error_message?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TwilioErrorResponse {
|
||||||
|
message?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TwilioTranscription {
|
||||||
|
transcription_text?: string
|
||||||
|
status?: string
|
||||||
|
price?: string
|
||||||
|
price_unit?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TwilioTranscriptionsResponse {
|
||||||
|
transcriptions?: TwilioTranscription[]
|
||||||
|
}
|
||||||
|
|
||||||
|
const TwilioGetRecordingSchema = z.object({
|
||||||
|
accountSid: z.string().min(1, 'Account SID is required'),
|
||||||
|
authToken: z.string().min(1, 'Auth token is required'),
|
||||||
|
recordingSid: z.string().min(1, 'Recording SID is required'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Twilio get recording attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = TwilioGetRecordingSchema.parse(body)
|
||||||
|
|
||||||
|
const { accountSid, authToken, recordingSid } = validatedData
|
||||||
|
|
||||||
|
if (!accountSid.startsWith('AC')) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: `Invalid Account SID format. Account SID must start with "AC" (you provided: ${accountSid.substring(0, 2)}...)`,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const twilioAuth = Buffer.from(`${accountSid}:${authToken}`).toString('base64')
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting recording info from Twilio`, { recordingSid })
|
||||||
|
|
||||||
|
const infoUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Recordings/${recordingSid}.json`
|
||||||
|
const infoUrlValidation = await validateUrlWithDNS(infoUrl, 'infoUrl')
|
||||||
|
if (!infoUrlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: infoUrlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const infoResponse = await secureFetchWithPinnedIP(infoUrl, infoUrlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!infoResponse.ok) {
|
||||||
|
const errorData = (await infoResponse.json().catch(() => ({}))) as TwilioErrorResponse
|
||||||
|
logger.error(`[${requestId}] Twilio API error`, {
|
||||||
|
status: infoResponse.status,
|
||||||
|
error: errorData,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorData.message || `Twilio API error: ${infoResponse.status}` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await infoResponse.json()) as TwilioRecordingResponse
|
||||||
|
|
||||||
|
if (data.error_code) {
|
||||||
|
return NextResponse.json({
|
||||||
|
success: false,
|
||||||
|
output: {
|
||||||
|
success: false,
|
||||||
|
error: data.message || data.error_message || 'Failed to retrieve recording',
|
||||||
|
},
|
||||||
|
error: data.message || data.error_message || 'Failed to retrieve recording',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseUrl = 'https://api.twilio.com'
|
||||||
|
const mediaUrl = data.uri ? `${baseUrl}${data.uri.replace('.json', '')}` : undefined
|
||||||
|
|
||||||
|
let transcriptionText: string | undefined
|
||||||
|
let transcriptionStatus: string | undefined
|
||||||
|
let transcriptionPrice: string | undefined
|
||||||
|
let transcriptionPriceUnit: string | undefined
|
||||||
|
let file:
|
||||||
|
| {
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}
|
||||||
|
| undefined
|
||||||
|
|
||||||
|
try {
|
||||||
|
const transcriptionUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Transcriptions.json?RecordingSid=${data.sid}`
|
||||||
|
logger.info(`[${requestId}] Checking for transcriptions`)
|
||||||
|
|
||||||
|
const transcriptionUrlValidation = await validateUrlWithDNS(
|
||||||
|
transcriptionUrl,
|
||||||
|
'transcriptionUrl'
|
||||||
|
)
|
||||||
|
if (transcriptionUrlValidation.isValid) {
|
||||||
|
const transcriptionResponse = await secureFetchWithPinnedIP(
|
||||||
|
transcriptionUrl,
|
||||||
|
transcriptionUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (transcriptionResponse.ok) {
|
||||||
|
const transcriptionData =
|
||||||
|
(await transcriptionResponse.json()) as TwilioTranscriptionsResponse
|
||||||
|
|
||||||
|
if (transcriptionData.transcriptions && transcriptionData.transcriptions.length > 0) {
|
||||||
|
const transcription = transcriptionData.transcriptions[0]
|
||||||
|
transcriptionText = transcription.transcription_text
|
||||||
|
transcriptionStatus = transcription.status
|
||||||
|
transcriptionPrice = transcription.price
|
||||||
|
transcriptionPriceUnit = transcription.price_unit
|
||||||
|
logger.info(`[${requestId}] Transcription found`, {
|
||||||
|
status: transcriptionStatus,
|
||||||
|
textLength: transcriptionText?.length,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to fetch transcription:`, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mediaUrl) {
|
||||||
|
try {
|
||||||
|
const mediaUrlValidation = await validateUrlWithDNS(mediaUrl, 'mediaUrl')
|
||||||
|
if (mediaUrlValidation.isValid) {
|
||||||
|
const mediaResponse = await secureFetchWithPinnedIP(
|
||||||
|
mediaUrl,
|
||||||
|
mediaUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (mediaResponse.ok) {
|
||||||
|
const contentType =
|
||||||
|
mediaResponse.headers.get('content-type') || 'application/octet-stream'
|
||||||
|
const extension = getExtensionFromMimeType(contentType) || 'dat'
|
||||||
|
const arrayBuffer = await mediaResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
const fileName = `${data.sid || recordingSid}.${extension}`
|
||||||
|
|
||||||
|
file = {
|
||||||
|
name: fileName,
|
||||||
|
mimeType: contentType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to download recording media:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Twilio recording fetched successfully`, {
|
||||||
|
recordingSid: data.sid,
|
||||||
|
hasFile: !!file,
|
||||||
|
hasTranscription: !!transcriptionText,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
success: true,
|
||||||
|
recordingSid: data.sid,
|
||||||
|
callSid: data.call_sid,
|
||||||
|
duration: data.duration ? Number.parseInt(data.duration, 10) : undefined,
|
||||||
|
status: data.status,
|
||||||
|
channels: data.channels,
|
||||||
|
source: data.source,
|
||||||
|
mediaUrl,
|
||||||
|
file,
|
||||||
|
price: data.price,
|
||||||
|
priceUnit: data.price_unit,
|
||||||
|
uri: data.uri,
|
||||||
|
transcriptionText,
|
||||||
|
transcriptionStatus,
|
||||||
|
transcriptionPrice,
|
||||||
|
transcriptionPriceUnit,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching Twilio recording:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import type { UserFile } from '@/executor/types'
|
import type { UserFile } from '@/executor/types'
|
||||||
import type { VideoRequestBody } from '@/tools/video/types'
|
import type { VideoRequestBody } from '@/tools/video/types'
|
||||||
@@ -326,11 +327,12 @@ async function generateWithRunway(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Runway task created: ${taskId}`)
|
logger.info(`[${requestId}] Runway task created: ${taskId}`)
|
||||||
|
|
||||||
const maxAttempts = 120 // 10 minutes with 5-second intervals
|
const pollIntervalMs = 5000
|
||||||
|
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
await sleep(5000) // Poll every 5 seconds
|
await sleep(pollIntervalMs)
|
||||||
|
|
||||||
const statusResponse = await fetch(`https://api.dev.runwayml.com/v1/tasks/${taskId}`, {
|
const statusResponse = await fetch(`https://api.dev.runwayml.com/v1/tasks/${taskId}`, {
|
||||||
headers: {
|
headers: {
|
||||||
@@ -370,7 +372,7 @@ async function generateWithRunway(
|
|||||||
attempts++
|
attempts++
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('Runway generation timed out after 10 minutes')
|
throw new Error('Runway generation timed out')
|
||||||
}
|
}
|
||||||
|
|
||||||
async function generateWithVeo(
|
async function generateWithVeo(
|
||||||
@@ -429,11 +431,12 @@ async function generateWithVeo(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Veo operation created: ${operationName}`)
|
logger.info(`[${requestId}] Veo operation created: ${operationName}`)
|
||||||
|
|
||||||
const maxAttempts = 60 // 5 minutes with 5-second intervals
|
const pollIntervalMs = 5000
|
||||||
|
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
await sleep(5000)
|
await sleep(pollIntervalMs)
|
||||||
|
|
||||||
const statusResponse = await fetch(
|
const statusResponse = await fetch(
|
||||||
`https://generativelanguage.googleapis.com/v1beta/${operationName}`,
|
`https://generativelanguage.googleapis.com/v1beta/${operationName}`,
|
||||||
@@ -485,7 +488,7 @@ async function generateWithVeo(
|
|||||||
attempts++
|
attempts++
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('Veo generation timed out after 5 minutes')
|
throw new Error('Veo generation timed out')
|
||||||
}
|
}
|
||||||
|
|
||||||
async function generateWithLuma(
|
async function generateWithLuma(
|
||||||
@@ -541,11 +544,12 @@ async function generateWithLuma(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Luma generation created: ${generationId}`)
|
logger.info(`[${requestId}] Luma generation created: ${generationId}`)
|
||||||
|
|
||||||
const maxAttempts = 120 // 10 minutes
|
const pollIntervalMs = 5000
|
||||||
|
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
await sleep(5000)
|
await sleep(pollIntervalMs)
|
||||||
|
|
||||||
const statusResponse = await fetch(
|
const statusResponse = await fetch(
|
||||||
`https://api.lumalabs.ai/dream-machine/v1/generations/${generationId}`,
|
`https://api.lumalabs.ai/dream-machine/v1/generations/${generationId}`,
|
||||||
@@ -592,7 +596,7 @@ async function generateWithLuma(
|
|||||||
attempts++
|
attempts++
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('Luma generation timed out after 10 minutes')
|
throw new Error('Luma generation timed out')
|
||||||
}
|
}
|
||||||
|
|
||||||
async function generateWithMiniMax(
|
async function generateWithMiniMax(
|
||||||
@@ -658,14 +662,13 @@ async function generateWithMiniMax(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] MiniMax task created: ${taskId}`)
|
logger.info(`[${requestId}] MiniMax task created: ${taskId}`)
|
||||||
|
|
||||||
// Poll for completion (6-10 minutes typical)
|
const pollIntervalMs = 5000
|
||||||
const maxAttempts = 120 // 10 minutes with 5-second intervals
|
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
await sleep(5000)
|
await sleep(pollIntervalMs)
|
||||||
|
|
||||||
// Query task status
|
|
||||||
const statusResponse = await fetch(
|
const statusResponse = await fetch(
|
||||||
`https://api.minimax.io/v1/query/video_generation?task_id=${taskId}`,
|
`https://api.minimax.io/v1/query/video_generation?task_id=${taskId}`,
|
||||||
{
|
{
|
||||||
@@ -743,7 +746,7 @@ async function generateWithMiniMax(
|
|||||||
attempts++
|
attempts++
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('MiniMax generation timed out after 10 minutes')
|
throw new Error('MiniMax generation timed out')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function to strip subpaths from Fal.ai model IDs for status/result endpoints
|
// Helper function to strip subpaths from Fal.ai model IDs for status/result endpoints
|
||||||
@@ -861,11 +864,12 @@ async function generateWithFalAI(
|
|||||||
// Get base model ID (without subpath) for status and result endpoints
|
// Get base model ID (without subpath) for status and result endpoints
|
||||||
const baseModelId = getBaseModelId(falModelId)
|
const baseModelId = getBaseModelId(falModelId)
|
||||||
|
|
||||||
const maxAttempts = 96 // 8 minutes with 5-second intervals
|
const pollIntervalMs = 5000
|
||||||
|
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
await sleep(5000)
|
await sleep(pollIntervalMs)
|
||||||
|
|
||||||
const statusResponse = await fetch(
|
const statusResponse = await fetch(
|
||||||
`https://queue.fal.run/${baseModelId}/requests/${requestIdFal}/status`,
|
`https://queue.fal.run/${baseModelId}/requests/${requestIdFal}/status`,
|
||||||
@@ -938,7 +942,7 @@ async function generateWithFalAI(
|
|||||||
attempts++
|
attempts++
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('Fal.ai generation timed out after 8 minutes')
|
throw new Error('Fal.ai generation timed out')
|
||||||
}
|
}
|
||||||
|
|
||||||
function getVideoDimensions(
|
function getVideoDimensions(
|
||||||
|
|||||||
@@ -1,10 +1,20 @@
|
|||||||
|
import { GoogleGenAI } from '@google/genai'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import {
|
||||||
|
downloadFileFromStorage,
|
||||||
|
resolveInternalFileUrl,
|
||||||
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import { convertUsageMetadata, extractTextContent } from '@/providers/google/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -13,8 +23,8 @@ const logger = createLogger('VisionAnalyzeAPI')
|
|||||||
const VisionAnalyzeSchema = z.object({
|
const VisionAnalyzeSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
imageUrl: z.string().optional().nullable(),
|
imageUrl: z.string().optional().nullable(),
|
||||||
imageFile: z.any().optional().nullable(),
|
imageFile: RawFileInputSchema.optional().nullable(),
|
||||||
model: z.string().optional().default('gpt-4o'),
|
model: z.string().optional().default('gpt-5.2'),
|
||||||
prompt: z.string().optional().nullable(),
|
prompt: z.string().optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -39,6 +49,7 @@ export async function POST(request: NextRequest) {
|
|||||||
userId: authResult.userId,
|
userId: authResult.userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const validatedData = VisionAnalyzeSchema.parse(body)
|
const validatedData = VisionAnalyzeSchema.parse(body)
|
||||||
|
|
||||||
@@ -77,18 +88,72 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let base64 = userFile.base64
|
||||||
|
let bufferLength = 0
|
||||||
|
if (!base64) {
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
base64 = buffer.toString('base64')
|
||||||
const base64 = buffer.toString('base64')
|
bufferLength = buffer.length
|
||||||
|
}
|
||||||
const mimeType = userFile.type || 'image/jpeg'
|
const mimeType = userFile.type || 'image/jpeg'
|
||||||
imageSource = `data:${mimeType};base64,${base64}`
|
imageSource = `data:${mimeType};base64,${base64}`
|
||||||
logger.info(`[${requestId}] Converted image to base64 (${buffer.length} bytes)`)
|
if (bufferLength > 0) {
|
||||||
|
logger.info(`[${requestId}] Converted image to base64 (${bufferLength} bytes)`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let imageUrlValidation: Awaited<ReturnType<typeof validateUrlWithDNS>> | null = null
|
||||||
|
if (imageSource && !imageSource.startsWith('data:')) {
|
||||||
|
if (imageSource.startsWith('/') && !isInternalFileUrl(imageSource)) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isInternalFileUrl(imageSource)) {
|
||||||
|
if (!userId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Authentication required for internal file access',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const resolution = await resolveInternalFileUrl(imageSource, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: resolution.error.message,
|
||||||
|
},
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
imageSource = resolution.fileUrl || imageSource
|
||||||
|
}
|
||||||
|
|
||||||
|
imageUrlValidation = await validateUrlWithDNS(imageSource, 'imageUrl')
|
||||||
|
if (!imageUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: imageUrlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
|
const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
|
||||||
const prompt = validatedData.prompt || defaultPrompt
|
const prompt = validatedData.prompt || defaultPrompt
|
||||||
|
|
||||||
const isClaude = validatedData.model.startsWith('claude-3')
|
const isClaude = validatedData.model.startsWith('claude-')
|
||||||
|
const isGemini = validatedData.model.startsWith('gemini-')
|
||||||
const apiUrl = isClaude
|
const apiUrl = isClaude
|
||||||
? 'https://api.anthropic.com/v1/messages'
|
? 'https://api.anthropic.com/v1/messages'
|
||||||
: 'https://api.openai.com/v1/chat/completions'
|
: 'https://api.openai.com/v1/chat/completions'
|
||||||
@@ -106,6 +171,72 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
let requestBody: any
|
let requestBody: any
|
||||||
|
|
||||||
|
if (isGemini) {
|
||||||
|
let base64Payload = imageSource
|
||||||
|
if (!base64Payload.startsWith('data:')) {
|
||||||
|
const urlValidation =
|
||||||
|
imageUrlValidation || (await validateUrlWithDNS(base64Payload, 'imageUrl'))
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(base64Payload, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Failed to fetch image for Gemini' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const contentType =
|
||||||
|
response.headers.get('content-type') || validatedData.imageFile?.type || 'image/jpeg'
|
||||||
|
const arrayBuffer = await response.arrayBuffer()
|
||||||
|
const base64 = Buffer.from(arrayBuffer).toString('base64')
|
||||||
|
base64Payload = `data:${contentType};base64,${base64}`
|
||||||
|
}
|
||||||
|
const base64Marker = ';base64,'
|
||||||
|
const markerIndex = base64Payload.indexOf(base64Marker)
|
||||||
|
if (!base64Payload.startsWith('data:') || markerIndex === -1) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid base64 image format' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const rawMimeType = base64Payload.slice('data:'.length, markerIndex)
|
||||||
|
const mediaType = rawMimeType.split(';')[0] || 'image/jpeg'
|
||||||
|
const base64Data = base64Payload.slice(markerIndex + base64Marker.length)
|
||||||
|
if (!base64Data) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid base64 image format' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const ai = new GoogleGenAI({ apiKey: validatedData.apiKey })
|
||||||
|
const geminiResponse = await ai.models.generateContent({
|
||||||
|
model: validatedData.model,
|
||||||
|
contents: [
|
||||||
|
{
|
||||||
|
role: 'user',
|
||||||
|
parts: [{ text: prompt }, { inlineData: { mimeType: mediaType, data: base64Data } }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
|
const content = extractTextContent(geminiResponse.candidates?.[0])
|
||||||
|
const usage = convertUsageMetadata(geminiResponse.usageMetadata)
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
content,
|
||||||
|
model: validatedData.model,
|
||||||
|
tokens: usage.totalTokenCount || undefined,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if (isClaude) {
|
if (isClaude) {
|
||||||
if (imageSource.startsWith('data:')) {
|
if (imageSource.startsWith('data:')) {
|
||||||
const base64Match = imageSource.match(/^data:([^;]+);base64,(.+)$/)
|
const base64Match = imageSource.match(/^data:([^;]+);base64,(.+)$/)
|
||||||
@@ -172,7 +303,7 @@ export async function POST(request: NextRequest) {
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
max_tokens: 1000,
|
max_completion_tokens: 1000,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import {
|
import {
|
||||||
getFileExtension,
|
getFileExtension,
|
||||||
getMimeTypeFromExtension,
|
getMimeTypeFromExtension,
|
||||||
@@ -19,7 +20,7 @@ const WORDPRESS_COM_API_BASE = 'https://public-api.wordpress.com/wp/v2/sites'
|
|||||||
const WordPressUploadSchema = z.object({
|
const WordPressUploadSchema = z.object({
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
siteId: z.string().min(1, 'Site ID is required'),
|
siteId: z.string().min(1, 'Site ID is required'),
|
||||||
file: z.any().optional().nullable(),
|
file: RawFileInputSchema.optional().nullable(),
|
||||||
filename: z.string().optional().nullable(),
|
filename: z.string().optional().nullable(),
|
||||||
title: z.string().optional().nullable(),
|
title: z.string().optional().nullable(),
|
||||||
caption: z.string().optional().nullable(),
|
caption: z.string().optional().nullable(),
|
||||||
|
|||||||
216
apps/sim/app/api/tools/zoom/get-recordings/route.ts
Normal file
216
apps/sim/app/api/tools/zoom/get-recordings/route.ts
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('ZoomGetRecordingsAPI')
|
||||||
|
|
||||||
|
interface ZoomRecordingFile {
|
||||||
|
id?: string
|
||||||
|
meeting_id?: string
|
||||||
|
recording_start?: string
|
||||||
|
recording_end?: string
|
||||||
|
file_type?: string
|
||||||
|
file_extension?: string
|
||||||
|
file_size?: number
|
||||||
|
play_url?: string
|
||||||
|
download_url?: string
|
||||||
|
status?: string
|
||||||
|
recording_type?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ZoomRecordingsResponse {
|
||||||
|
uuid?: string
|
||||||
|
id?: string | number
|
||||||
|
account_id?: string
|
||||||
|
host_id?: string
|
||||||
|
topic?: string
|
||||||
|
type?: number
|
||||||
|
start_time?: string
|
||||||
|
duration?: number
|
||||||
|
total_size?: number
|
||||||
|
recording_count?: number
|
||||||
|
share_url?: string
|
||||||
|
recording_files?: ZoomRecordingFile[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ZoomErrorResponse {
|
||||||
|
message?: string
|
||||||
|
code?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const ZoomGetRecordingsSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
meetingId: z.string().min(1, 'Meeting ID is required'),
|
||||||
|
includeFolderItems: z.boolean().optional(),
|
||||||
|
ttl: z.number().optional(),
|
||||||
|
downloadFiles: z.boolean().optional().default(false),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Zoom get recordings attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = ZoomGetRecordingsSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, meetingId, includeFolderItems, ttl, downloadFiles } = validatedData
|
||||||
|
|
||||||
|
const baseUrl = `https://api.zoom.us/v2/meetings/${encodeURIComponent(meetingId)}/recordings`
|
||||||
|
const queryParams = new URLSearchParams()
|
||||||
|
|
||||||
|
if (includeFolderItems != null) {
|
||||||
|
queryParams.append('include_folder_items', String(includeFolderItems))
|
||||||
|
}
|
||||||
|
if (ttl) {
|
||||||
|
queryParams.append('ttl', String(ttl))
|
||||||
|
}
|
||||||
|
|
||||||
|
const queryString = queryParams.toString()
|
||||||
|
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Fetching recordings from Zoom`, { meetingId })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = (await response.json().catch(() => ({}))) as ZoomErrorResponse
|
||||||
|
logger.error(`[${requestId}] Zoom API error`, {
|
||||||
|
status: response.status,
|
||||||
|
error: errorData,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorData.message || `Zoom API error: ${response.status}` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as ZoomRecordingsResponse
|
||||||
|
const files: Array<{
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
if (downloadFiles && Array.isArray(data.recording_files)) {
|
||||||
|
for (const file of data.recording_files) {
|
||||||
|
if (!file?.download_url) continue
|
||||||
|
|
||||||
|
try {
|
||||||
|
const fileUrlValidation = await validateUrlWithDNS(file.download_url, 'downloadUrl')
|
||||||
|
if (!fileUrlValidation.isValid) continue
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
file.download_url,
|
||||||
|
fileUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Bearer ${accessToken}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) continue
|
||||||
|
|
||||||
|
const contentType =
|
||||||
|
downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
const extension =
|
||||||
|
file.file_extension?.toString().toLowerCase() ||
|
||||||
|
getExtensionFromMimeType(contentType) ||
|
||||||
|
'dat'
|
||||||
|
const fileName = `zoom-recording-${file.id || file.recording_start || Date.now()}.${extension}`
|
||||||
|
|
||||||
|
files.push({
|
||||||
|
name: fileName,
|
||||||
|
mimeType: contentType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to download recording file:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Zoom recordings fetched successfully`, {
|
||||||
|
recordingCount: data.recording_files?.length || 0,
|
||||||
|
downloadedCount: files.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
recording: {
|
||||||
|
uuid: data.uuid,
|
||||||
|
id: data.id,
|
||||||
|
account_id: data.account_id,
|
||||||
|
host_id: data.host_id,
|
||||||
|
topic: data.topic,
|
||||||
|
type: data.type,
|
||||||
|
start_time: data.start_time,
|
||||||
|
duration: data.duration,
|
||||||
|
total_size: data.total_size,
|
||||||
|
recording_count: data.recording_count,
|
||||||
|
share_url: data.share_url,
|
||||||
|
recording_files: (data.recording_files || []).map((file: ZoomRecordingFile) => ({
|
||||||
|
id: file.id,
|
||||||
|
meeting_id: file.meeting_id,
|
||||||
|
recording_start: file.recording_start,
|
||||||
|
recording_end: file.recording_end,
|
||||||
|
file_type: file.file_type,
|
||||||
|
file_extension: file.file_extension,
|
||||||
|
file_size: file.file_size,
|
||||||
|
play_url: file.play_url,
|
||||||
|
download_url: file.download_url,
|
||||||
|
status: file.status,
|
||||||
|
recording_type: file.recording_type,
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
files: files.length > 0 ? files : undefined,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching Zoom recordings:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,7 +3,6 @@ import { userStats, workflow } from '@sim/db/schema'
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { eq, sql } from 'drizzle-orm'
|
import { eq, sql } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import OpenAI, { AzureOpenAI } from 'openai'
|
|
||||||
import { getBYOKKey } from '@/lib/api-key/byok'
|
import { getBYOKKey } from '@/lib/api-key/byok'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { logModelUsage } from '@/lib/billing/core/usage-log'
|
import { logModelUsage } from '@/lib/billing/core/usage-log'
|
||||||
@@ -12,6 +11,7 @@ import { env } from '@/lib/core/config/env'
|
|||||||
import { getCostMultiplier, isBillingEnabled } from '@/lib/core/config/feature-flags'
|
import { getCostMultiplier, isBillingEnabled } from '@/lib/core/config/feature-flags'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||||
|
import { extractResponseText, parseResponsesUsage } from '@/providers/openai/utils'
|
||||||
import { getModelPricing } from '@/providers/utils'
|
import { getModelPricing } from '@/providers/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -28,18 +28,6 @@ const openaiApiKey = env.OPENAI_API_KEY
|
|||||||
|
|
||||||
const useWandAzure = azureApiKey && azureEndpoint && azureApiVersion
|
const useWandAzure = azureApiKey && azureEndpoint && azureApiVersion
|
||||||
|
|
||||||
const client = useWandAzure
|
|
||||||
? new AzureOpenAI({
|
|
||||||
apiKey: azureApiKey,
|
|
||||||
apiVersion: azureApiVersion,
|
|
||||||
endpoint: azureEndpoint,
|
|
||||||
})
|
|
||||||
: openaiApiKey
|
|
||||||
? new OpenAI({
|
|
||||||
apiKey: openaiApiKey,
|
|
||||||
})
|
|
||||||
: null
|
|
||||||
|
|
||||||
if (!useWandAzure && !openaiApiKey) {
|
if (!useWandAzure && !openaiApiKey) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
'Neither Azure OpenAI nor OpenAI API key found. Wand generation API will not function.'
|
'Neither Azure OpenAI nor OpenAI API key found. Wand generation API will not function.'
|
||||||
@@ -202,20 +190,18 @@ export async function POST(req: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let isBYOK = false
|
let isBYOK = false
|
||||||
let activeClient = client
|
let activeOpenAIKey = openaiApiKey
|
||||||
let byokApiKey: string | null = null
|
|
||||||
|
|
||||||
if (workspaceId && !useWandAzure) {
|
if (workspaceId && !useWandAzure) {
|
||||||
const byokResult = await getBYOKKey(workspaceId, 'openai')
|
const byokResult = await getBYOKKey(workspaceId, 'openai')
|
||||||
if (byokResult) {
|
if (byokResult) {
|
||||||
isBYOK = true
|
isBYOK = true
|
||||||
byokApiKey = byokResult.apiKey
|
activeOpenAIKey = byokResult.apiKey
|
||||||
activeClient = new OpenAI({ apiKey: byokResult.apiKey })
|
|
||||||
logger.info(`[${requestId}] Using BYOK OpenAI key for wand generation`)
|
logger.info(`[${requestId}] Using BYOK OpenAI key for wand generation`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!activeClient) {
|
if (!useWandAzure && !activeOpenAIKey) {
|
||||||
logger.error(`[${requestId}] AI client not initialized. Missing API key.`)
|
logger.error(`[${requestId}] AI client not initialized. Missing API key.`)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{ success: false, error: 'Wand generation service is not configured.' },
|
{ success: false, error: 'Wand generation service is not configured.' },
|
||||||
@@ -276,17 +262,18 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
)
|
)
|
||||||
|
|
||||||
const apiUrl = useWandAzure
|
const apiUrl = useWandAzure
|
||||||
? `${azureEndpoint}/openai/deployments/${wandModelName}/chat/completions?api-version=${azureApiVersion}`
|
? `${azureEndpoint?.replace(/\/$/, '')}/openai/v1/responses?api-version=${azureApiVersion}`
|
||||||
: 'https://api.openai.com/v1/chat/completions'
|
: 'https://api.openai.com/v1/responses'
|
||||||
|
|
||||||
const headers: Record<string, string> = {
|
const headers: Record<string, string> = {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
|
'OpenAI-Beta': 'responses=v1',
|
||||||
}
|
}
|
||||||
|
|
||||||
if (useWandAzure) {
|
if (useWandAzure) {
|
||||||
headers['api-key'] = azureApiKey!
|
headers['api-key'] = azureApiKey!
|
||||||
} else {
|
} else {
|
||||||
headers.Authorization = `Bearer ${byokApiKey || openaiApiKey}`
|
headers.Authorization = `Bearer ${activeOpenAIKey}`
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug(`[${requestId}] Making streaming request to: ${apiUrl}`)
|
logger.debug(`[${requestId}] Making streaming request to: ${apiUrl}`)
|
||||||
@@ -296,11 +283,10 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
headers,
|
headers,
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: useWandAzure ? wandModelName : 'gpt-4o',
|
model: useWandAzure ? wandModelName : 'gpt-4o',
|
||||||
messages: messages,
|
input: messages,
|
||||||
temperature: 0.2,
|
temperature: 0.2,
|
||||||
max_tokens: 10000,
|
max_output_tokens: 10000,
|
||||||
stream: true,
|
stream: true,
|
||||||
stream_options: { include_usage: true },
|
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -327,16 +313,29 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let finalUsage: any = null
|
||||||
|
let usageRecorded = false
|
||||||
|
|
||||||
|
const recordUsage = async () => {
|
||||||
|
if (usageRecorded || !finalUsage) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
usageRecorded = true
|
||||||
|
await updateUserStatsForWand(session.user.id, finalUsage, requestId, isBYOK)
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let buffer = ''
|
let buffer = ''
|
||||||
let chunkCount = 0
|
let chunkCount = 0
|
||||||
let finalUsage: any = null
|
let activeEventType: string | undefined
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const { done, value } = await reader.read()
|
const { done, value } = await reader.read()
|
||||||
|
|
||||||
if (done) {
|
if (done) {
|
||||||
logger.info(`[${requestId}] Stream completed. Total chunks: ${chunkCount}`)
|
logger.info(`[${requestId}] Stream completed. Total chunks: ${chunkCount}`)
|
||||||
|
await recordUsage()
|
||||||
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ done: true })}\n\n`))
|
controller.enqueue(encoder.encode(`data: ${JSON.stringify({ done: true })}\n\n`))
|
||||||
controller.close()
|
controller.close()
|
||||||
break
|
break
|
||||||
@@ -348,15 +347,25 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
buffer = lines.pop() || ''
|
buffer = lines.pop() || ''
|
||||||
|
|
||||||
for (const line of lines) {
|
for (const line of lines) {
|
||||||
if (line.startsWith('data: ')) {
|
const trimmed = line.trim()
|
||||||
const data = line.slice(6).trim()
|
if (!trimmed) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (trimmed.startsWith('event:')) {
|
||||||
|
activeEventType = trimmed.slice(6).trim()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!trimmed.startsWith('data:')) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = trimmed.slice(5).trim()
|
||||||
if (data === '[DONE]') {
|
if (data === '[DONE]') {
|
||||||
logger.info(`[${requestId}] Received [DONE] signal`)
|
logger.info(`[${requestId}] Received [DONE] signal`)
|
||||||
|
|
||||||
if (finalUsage) {
|
await recordUsage()
|
||||||
await updateUserStatsForWand(session.user.id, finalUsage, requestId, isBYOK)
|
|
||||||
}
|
|
||||||
|
|
||||||
controller.enqueue(
|
controller.enqueue(
|
||||||
encoder.encode(`data: ${JSON.stringify({ done: true })}\n\n`)
|
encoder.encode(`data: ${JSON.stringify({ done: true })}\n\n`)
|
||||||
@@ -365,9 +374,40 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let parsed: any
|
||||||
try {
|
try {
|
||||||
const parsed = JSON.parse(data)
|
parsed = JSON.parse(data)
|
||||||
const content = parsed.choices?.[0]?.delta?.content
|
} catch (parseError) {
|
||||||
|
logger.debug(`[${requestId}] Skipped non-JSON line: ${data.substring(0, 100)}`)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
const eventType = parsed?.type ?? activeEventType
|
||||||
|
|
||||||
|
if (
|
||||||
|
eventType === 'response.error' ||
|
||||||
|
eventType === 'error' ||
|
||||||
|
eventType === 'response.failed'
|
||||||
|
) {
|
||||||
|
throw new Error(parsed?.error?.message || 'Responses stream error')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
eventType === 'response.output_text.delta' ||
|
||||||
|
eventType === 'response.output_json.delta'
|
||||||
|
) {
|
||||||
|
let content = ''
|
||||||
|
if (typeof parsed.delta === 'string') {
|
||||||
|
content = parsed.delta
|
||||||
|
} else if (parsed.delta && typeof parsed.delta.text === 'string') {
|
||||||
|
content = parsed.delta.text
|
||||||
|
} else if (parsed.delta && parsed.delta.json !== undefined) {
|
||||||
|
content = JSON.stringify(parsed.delta.json)
|
||||||
|
} else if (parsed.json !== undefined) {
|
||||||
|
content = JSON.stringify(parsed.json)
|
||||||
|
} else if (typeof parsed.text === 'string') {
|
||||||
|
content = parsed.text
|
||||||
|
}
|
||||||
|
|
||||||
if (content) {
|
if (content) {
|
||||||
chunkCount++
|
chunkCount++
|
||||||
@@ -379,16 +419,18 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
encoder.encode(`data: ${JSON.stringify({ chunk: content })}\n\n`)
|
encoder.encode(`data: ${JSON.stringify({ chunk: content })}\n\n`)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (parsed.usage) {
|
|
||||||
finalUsage = parsed.usage
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Received usage data: ${JSON.stringify(parsed.usage)}`
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
} catch (parseError) {
|
|
||||||
logger.debug(
|
if (eventType === 'response.completed') {
|
||||||
`[${requestId}] Skipped non-JSON line: ${data.substring(0, 100)}`
|
const usage = parseResponsesUsage(parsed?.response?.usage ?? parsed?.usage)
|
||||||
|
if (usage) {
|
||||||
|
finalUsage = {
|
||||||
|
prompt_tokens: usage.promptTokens,
|
||||||
|
completion_tokens: usage.completionTokens,
|
||||||
|
total_tokens: usage.totalTokens,
|
||||||
|
}
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Received usage data: ${JSON.stringify(finalUsage)}`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -401,6 +443,12 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
stack: streamError?.stack,
|
stack: streamError?.stack,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
await recordUsage()
|
||||||
|
} catch (usageError) {
|
||||||
|
logger.warn(`[${requestId}] Failed to record usage after stream error`, usageError)
|
||||||
|
}
|
||||||
|
|
||||||
const errorData = `data: ${JSON.stringify({ error: 'Streaming failed', done: true })}\n\n`
|
const errorData = `data: ${JSON.stringify({ error: 'Streaming failed', done: true })}\n\n`
|
||||||
controller.enqueue(encoder.encode(errorData))
|
controller.enqueue(encoder.encode(errorData))
|
||||||
controller.close()
|
controller.close()
|
||||||
@@ -424,8 +472,6 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
message: error?.message || 'Unknown error',
|
message: error?.message || 'Unknown error',
|
||||||
code: error?.code,
|
code: error?.code,
|
||||||
status: error?.status,
|
status: error?.status,
|
||||||
responseStatus: error?.response?.status,
|
|
||||||
responseData: error?.response?.data ? safeStringify(error.response.data) : undefined,
|
|
||||||
stack: error?.stack,
|
stack: error?.stack,
|
||||||
useWandAzure,
|
useWandAzure,
|
||||||
model: useWandAzure ? wandModelName : 'gpt-4o',
|
model: useWandAzure ? wandModelName : 'gpt-4o',
|
||||||
@@ -440,14 +486,43 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const completion = await activeClient.chat.completions.create({
|
const apiUrl = useWandAzure
|
||||||
|
? `${azureEndpoint?.replace(/\/$/, '')}/openai/v1/responses?api-version=${azureApiVersion}`
|
||||||
|
: 'https://api.openai.com/v1/responses'
|
||||||
|
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'OpenAI-Beta': 'responses=v1',
|
||||||
|
}
|
||||||
|
|
||||||
|
if (useWandAzure) {
|
||||||
|
headers['api-key'] = azureApiKey!
|
||||||
|
} else {
|
||||||
|
headers.Authorization = `Bearer ${activeOpenAIKey}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(apiUrl, {
|
||||||
|
method: 'POST',
|
||||||
|
headers,
|
||||||
|
body: JSON.stringify({
|
||||||
model: useWandAzure ? wandModelName : 'gpt-4o',
|
model: useWandAzure ? wandModelName : 'gpt-4o',
|
||||||
messages: messages,
|
input: messages,
|
||||||
temperature: 0.3,
|
temperature: 0.2,
|
||||||
max_tokens: 10000,
|
max_output_tokens: 10000,
|
||||||
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
const generatedContent = completion.choices[0]?.message?.content?.trim()
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text()
|
||||||
|
const apiError = new Error(
|
||||||
|
`API request failed: ${response.status} ${response.statusText} - ${errorText}`
|
||||||
|
)
|
||||||
|
;(apiError as any).status = response.status
|
||||||
|
throw apiError
|
||||||
|
}
|
||||||
|
|
||||||
|
const completion = await response.json()
|
||||||
|
const generatedContent = extractResponseText(completion.output)?.trim()
|
||||||
|
|
||||||
if (!generatedContent) {
|
if (!generatedContent) {
|
||||||
logger.error(
|
logger.error(
|
||||||
@@ -461,8 +536,18 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Wand generation successful`)
|
logger.info(`[${requestId}] Wand generation successful`)
|
||||||
|
|
||||||
if (completion.usage) {
|
const usage = parseResponsesUsage(completion.usage)
|
||||||
await updateUserStatsForWand(session.user.id, completion.usage, requestId, isBYOK)
|
if (usage) {
|
||||||
|
await updateUserStatsForWand(
|
||||||
|
session.user.id,
|
||||||
|
{
|
||||||
|
prompt_tokens: usage.promptTokens,
|
||||||
|
completion_tokens: usage.completionTokens,
|
||||||
|
total_tokens: usage.totalTokens,
|
||||||
|
},
|
||||||
|
requestId,
|
||||||
|
isBYOK
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json({ success: true, content: generatedContent })
|
return NextResponse.json({ success: true, content: generatedContent })
|
||||||
@@ -472,10 +557,6 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
message: error?.message || 'Unknown error',
|
message: error?.message || 'Unknown error',
|
||||||
code: error?.code,
|
code: error?.code,
|
||||||
status: error?.status,
|
status: error?.status,
|
||||||
responseStatus: error instanceof OpenAI.APIError ? error.status : error?.response?.status,
|
|
||||||
responseData: (error as any)?.response?.data
|
|
||||||
? safeStringify((error as any).response.data)
|
|
||||||
: undefined,
|
|
||||||
stack: error?.stack,
|
stack: error?.stack,
|
||||||
useWandAzure,
|
useWandAzure,
|
||||||
model: useWandAzure ? wandModelName : 'gpt-4o',
|
model: useWandAzure ? wandModelName : 'gpt-4o',
|
||||||
@@ -484,15 +565,13 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
})
|
})
|
||||||
|
|
||||||
let clientErrorMessage = 'Wand generation failed. Please try again later.'
|
let clientErrorMessage = 'Wand generation failed. Please try again later.'
|
||||||
let status = 500
|
let status = typeof (error as any)?.status === 'number' ? (error as any).status : 500
|
||||||
|
|
||||||
if (error instanceof OpenAI.APIError) {
|
if (useWandAzure && error?.message?.includes('DeploymentNotFound')) {
|
||||||
status = error.status || 500
|
clientErrorMessage =
|
||||||
logger.error(
|
'Azure OpenAI deployment not found. Please check your model deployment configuration.'
|
||||||
`[${requestId}] ${useWandAzure ? 'Azure OpenAI' : 'OpenAI'} API Error: ${status} - ${error.message}`
|
status = 404
|
||||||
)
|
} else if (status === 401) {
|
||||||
|
|
||||||
if (status === 401) {
|
|
||||||
clientErrorMessage = 'Authentication failed. Please check your API key configuration.'
|
clientErrorMessage = 'Authentication failed. Please check your API key configuration.'
|
||||||
} else if (status === 429) {
|
} else if (status === 429) {
|
||||||
clientErrorMessage = 'Rate limit exceeded. Please try again later.'
|
clientErrorMessage = 'Rate limit exceeded. Please try again later.'
|
||||||
@@ -500,11 +579,6 @@ Use this context to calculate relative dates like "yesterday", "last week", "beg
|
|||||||
clientErrorMessage =
|
clientErrorMessage =
|
||||||
'The wand generation service is currently unavailable. Please try again later.'
|
'The wand generation service is currently unavailable. Please try again later.'
|
||||||
}
|
}
|
||||||
} else if (useWandAzure && error.message?.includes('DeploymentNotFound')) {
|
|
||||||
clientErrorMessage =
|
|
||||||
'Azure OpenAI deployment not found. Please check your model deployment configuration.'
|
|
||||||
status = 404
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,190 +0,0 @@
|
|||||||
import { db, workflowDeploymentVersion } from '@sim/db'
|
|
||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { and, eq } from 'drizzle-orm'
|
|
||||||
import type { NextRequest } from 'next/server'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
|
|
||||||
import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
|
|
||||||
import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils'
|
|
||||||
import {
|
|
||||||
cleanupDeploymentVersion,
|
|
||||||
createSchedulesForDeploy,
|
|
||||||
validateWorkflowSchedules,
|
|
||||||
} from '@/lib/workflows/schedules'
|
|
||||||
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
|
||||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
|
||||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowActivateDeploymentAPI')
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
export const runtime = 'nodejs'
|
|
||||||
|
|
||||||
export async function POST(
|
|
||||||
request: NextRequest,
|
|
||||||
{ params }: { params: Promise<{ id: string; version: string }> }
|
|
||||||
) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const { id, version } = await params
|
|
||||||
|
|
||||||
try {
|
|
||||||
const {
|
|
||||||
error,
|
|
||||||
session,
|
|
||||||
workflow: workflowData,
|
|
||||||
} = await validateWorkflowPermissions(id, requestId, 'admin')
|
|
||||||
if (error) {
|
|
||||||
return createErrorResponse(error.message, error.status)
|
|
||||||
}
|
|
||||||
|
|
||||||
const actorUserId = session?.user?.id
|
|
||||||
if (!actorUserId) {
|
|
||||||
logger.warn(`[${requestId}] Unable to resolve actor user for deployment activation: ${id}`)
|
|
||||||
return createErrorResponse('Unable to determine activating user', 400)
|
|
||||||
}
|
|
||||||
|
|
||||||
const versionNum = Number(version)
|
|
||||||
if (!Number.isFinite(versionNum)) {
|
|
||||||
return createErrorResponse('Invalid version number', 400)
|
|
||||||
}
|
|
||||||
|
|
||||||
const [versionRow] = await db
|
|
||||||
.select({
|
|
||||||
id: workflowDeploymentVersion.id,
|
|
||||||
state: workflowDeploymentVersion.state,
|
|
||||||
})
|
|
||||||
.from(workflowDeploymentVersion)
|
|
||||||
.where(
|
|
||||||
and(
|
|
||||||
eq(workflowDeploymentVersion.workflowId, id),
|
|
||||||
eq(workflowDeploymentVersion.version, versionNum)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (!versionRow?.state) {
|
|
||||||
return createErrorResponse('Deployment version not found', 404)
|
|
||||||
}
|
|
||||||
|
|
||||||
const [currentActiveVersion] = await db
|
|
||||||
.select({ id: workflowDeploymentVersion.id })
|
|
||||||
.from(workflowDeploymentVersion)
|
|
||||||
.where(
|
|
||||||
and(
|
|
||||||
eq(workflowDeploymentVersion.workflowId, id),
|
|
||||||
eq(workflowDeploymentVersion.isActive, true)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
const previousVersionId = currentActiveVersion?.id
|
|
||||||
|
|
||||||
const deployedState = versionRow.state as { blocks?: Record<string, BlockState> }
|
|
||||||
const blocks = deployedState.blocks
|
|
||||||
if (!blocks || typeof blocks !== 'object') {
|
|
||||||
return createErrorResponse('Invalid deployed state structure', 500)
|
|
||||||
}
|
|
||||||
|
|
||||||
const scheduleValidation = validateWorkflowSchedules(blocks)
|
|
||||||
if (!scheduleValidation.isValid) {
|
|
||||||
return createErrorResponse(`Invalid schedule configuration: ${scheduleValidation.error}`, 400)
|
|
||||||
}
|
|
||||||
|
|
||||||
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
|
|
||||||
request,
|
|
||||||
workflowId: id,
|
|
||||||
workflow: workflowData as Record<string, unknown>,
|
|
||||||
userId: actorUserId,
|
|
||||||
blocks,
|
|
||||||
requestId,
|
|
||||||
deploymentVersionId: versionRow.id,
|
|
||||||
previousVersionId,
|
|
||||||
forceRecreateSubscriptions: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!triggerSaveResult.success) {
|
|
||||||
return createErrorResponse(
|
|
||||||
triggerSaveResult.error?.message || 'Failed to sync trigger configuration',
|
|
||||||
triggerSaveResult.error?.status || 500
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const scheduleResult = await createSchedulesForDeploy(id, blocks, db, versionRow.id)
|
|
||||||
|
|
||||||
if (!scheduleResult.success) {
|
|
||||||
await cleanupDeploymentVersion({
|
|
||||||
workflowId: id,
|
|
||||||
workflow: workflowData as Record<string, unknown>,
|
|
||||||
requestId,
|
|
||||||
deploymentVersionId: versionRow.id,
|
|
||||||
})
|
|
||||||
if (previousVersionId) {
|
|
||||||
await restorePreviousVersionWebhooks({
|
|
||||||
request,
|
|
||||||
workflow: workflowData as Record<string, unknown>,
|
|
||||||
userId: actorUserId,
|
|
||||||
previousVersionId,
|
|
||||||
requestId,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return createErrorResponse(scheduleResult.error || 'Failed to sync schedules', 500)
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await activateWorkflowVersion({ workflowId: id, version: versionNum })
|
|
||||||
if (!result.success) {
|
|
||||||
await cleanupDeploymentVersion({
|
|
||||||
workflowId: id,
|
|
||||||
workflow: workflowData as Record<string, unknown>,
|
|
||||||
requestId,
|
|
||||||
deploymentVersionId: versionRow.id,
|
|
||||||
})
|
|
||||||
if (previousVersionId) {
|
|
||||||
await restorePreviousVersionWebhooks({
|
|
||||||
request,
|
|
||||||
workflow: workflowData as Record<string, unknown>,
|
|
||||||
userId: actorUserId,
|
|
||||||
previousVersionId,
|
|
||||||
requestId,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return createErrorResponse(result.error || 'Failed to activate deployment', 400)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (previousVersionId && previousVersionId !== versionRow.id) {
|
|
||||||
try {
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] Cleaning up previous version ${previousVersionId} webhooks/schedules`
|
|
||||||
)
|
|
||||||
await cleanupDeploymentVersion({
|
|
||||||
workflowId: id,
|
|
||||||
workflow: workflowData as Record<string, unknown>,
|
|
||||||
requestId,
|
|
||||||
deploymentVersionId: previousVersionId,
|
|
||||||
skipExternalCleanup: true,
|
|
||||||
})
|
|
||||||
logger.info(`[${requestId}] Previous version cleanup completed`)
|
|
||||||
} catch (cleanupError) {
|
|
||||||
logger.error(
|
|
||||||
`[${requestId}] Failed to clean up previous version ${previousVersionId}`,
|
|
||||||
cleanupError
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await syncMcpToolsForWorkflow({
|
|
||||||
workflowId: id,
|
|
||||||
requestId,
|
|
||||||
state: versionRow.state,
|
|
||||||
context: 'activate',
|
|
||||||
})
|
|
||||||
|
|
||||||
return createSuccessResponse({
|
|
||||||
success: true,
|
|
||||||
deployedAt: result.deployedAt,
|
|
||||||
warnings: triggerSaveResult.warnings,
|
|
||||||
})
|
|
||||||
} catch (error: any) {
|
|
||||||
logger.error(`[${requestId}] Error activating deployment for workflow: ${id}`, error)
|
|
||||||
return createErrorResponse(error.message || 'Failed to activate deployment', 500)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -4,8 +4,17 @@ import { and, eq } from 'drizzle-orm'
|
|||||||
import type { NextRequest } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync'
|
||||||
|
import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy'
|
||||||
|
import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils'
|
||||||
|
import {
|
||||||
|
cleanupDeploymentVersion,
|
||||||
|
createSchedulesForDeploy,
|
||||||
|
validateWorkflowSchedules,
|
||||||
|
} from '@/lib/workflows/schedules'
|
||||||
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
|
||||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||||
|
import type { BlockState } from '@/stores/workflows/workflow/types'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowDeploymentVersionAPI')
|
const logger = createLogger('WorkflowDeploymentVersionAPI')
|
||||||
|
|
||||||
@@ -23,10 +32,14 @@ const patchBodySchema = z
|
|||||||
.max(500, 'Description must be 500 characters or less')
|
.max(500, 'Description must be 500 characters or less')
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
|
isActive: z.literal(true).optional(), // Set to true to activate this version
|
||||||
})
|
})
|
||||||
.refine((data) => data.name !== undefined || data.description !== undefined, {
|
.refine(
|
||||||
message: 'At least one of name or description must be provided',
|
(data) => data.name !== undefined || data.description !== undefined || data.isActive === true,
|
||||||
})
|
{
|
||||||
|
message: 'At least one of name, description, or isActive must be provided',
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
export const runtime = 'nodejs'
|
export const runtime = 'nodejs'
|
||||||
@@ -82,7 +95,22 @@ export async function PATCH(
|
|||||||
const { id, version } = await params
|
const { id, version } = await params
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { error } = await validateWorkflowPermissions(id, requestId, 'write')
|
const body = await request.json()
|
||||||
|
const validation = patchBodySchema.safeParse(body)
|
||||||
|
|
||||||
|
if (!validation.success) {
|
||||||
|
return createErrorResponse(validation.error.errors[0]?.message || 'Invalid request body', 400)
|
||||||
|
}
|
||||||
|
|
||||||
|
const { name, description, isActive } = validation.data
|
||||||
|
|
||||||
|
// Activation requires admin permission, other updates require write
|
||||||
|
const requiredPermission = isActive ? 'admin' : 'write'
|
||||||
|
const {
|
||||||
|
error,
|
||||||
|
session,
|
||||||
|
workflow: workflowData,
|
||||||
|
} = await validateWorkflowPermissions(id, requestId, requiredPermission)
|
||||||
if (error) {
|
if (error) {
|
||||||
return createErrorResponse(error.message, error.status)
|
return createErrorResponse(error.message, error.status)
|
||||||
}
|
}
|
||||||
@@ -92,15 +120,193 @@ export async function PATCH(
|
|||||||
return createErrorResponse('Invalid version', 400)
|
return createErrorResponse('Invalid version', 400)
|
||||||
}
|
}
|
||||||
|
|
||||||
const body = await request.json()
|
// Handle activation
|
||||||
const validation = patchBodySchema.safeParse(body)
|
if (isActive) {
|
||||||
|
const actorUserId = session?.user?.id
|
||||||
if (!validation.success) {
|
if (!actorUserId) {
|
||||||
return createErrorResponse(validation.error.errors[0]?.message || 'Invalid request body', 400)
|
logger.warn(`[${requestId}] Unable to resolve actor user for deployment activation: ${id}`)
|
||||||
|
return createErrorResponse('Unable to determine activating user', 400)
|
||||||
}
|
}
|
||||||
|
|
||||||
const { name, description } = validation.data
|
const [versionRow] = await db
|
||||||
|
.select({
|
||||||
|
id: workflowDeploymentVersion.id,
|
||||||
|
state: workflowDeploymentVersion.state,
|
||||||
|
})
|
||||||
|
.from(workflowDeploymentVersion)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workflowDeploymentVersion.workflowId, id),
|
||||||
|
eq(workflowDeploymentVersion.version, versionNum)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (!versionRow?.state) {
|
||||||
|
return createErrorResponse('Deployment version not found', 404)
|
||||||
|
}
|
||||||
|
|
||||||
|
const [currentActiveVersion] = await db
|
||||||
|
.select({ id: workflowDeploymentVersion.id })
|
||||||
|
.from(workflowDeploymentVersion)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workflowDeploymentVersion.workflowId, id),
|
||||||
|
eq(workflowDeploymentVersion.isActive, true)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
const previousVersionId = currentActiveVersion?.id
|
||||||
|
|
||||||
|
const deployedState = versionRow.state as { blocks?: Record<string, BlockState> }
|
||||||
|
const blocks = deployedState.blocks
|
||||||
|
if (!blocks || typeof blocks !== 'object') {
|
||||||
|
return createErrorResponse('Invalid deployed state structure', 500)
|
||||||
|
}
|
||||||
|
|
||||||
|
const scheduleValidation = validateWorkflowSchedules(blocks)
|
||||||
|
if (!scheduleValidation.isValid) {
|
||||||
|
return createErrorResponse(
|
||||||
|
`Invalid schedule configuration: ${scheduleValidation.error}`,
|
||||||
|
400
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const triggerSaveResult = await saveTriggerWebhooksForDeploy({
|
||||||
|
request,
|
||||||
|
workflowId: id,
|
||||||
|
workflow: workflowData as Record<string, unknown>,
|
||||||
|
userId: actorUserId,
|
||||||
|
blocks,
|
||||||
|
requestId,
|
||||||
|
deploymentVersionId: versionRow.id,
|
||||||
|
previousVersionId,
|
||||||
|
forceRecreateSubscriptions: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!triggerSaveResult.success) {
|
||||||
|
return createErrorResponse(
|
||||||
|
triggerSaveResult.error?.message || 'Failed to sync trigger configuration',
|
||||||
|
triggerSaveResult.error?.status || 500
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const scheduleResult = await createSchedulesForDeploy(id, blocks, db, versionRow.id)
|
||||||
|
|
||||||
|
if (!scheduleResult.success) {
|
||||||
|
await cleanupDeploymentVersion({
|
||||||
|
workflowId: id,
|
||||||
|
workflow: workflowData as Record<string, unknown>,
|
||||||
|
requestId,
|
||||||
|
deploymentVersionId: versionRow.id,
|
||||||
|
})
|
||||||
|
if (previousVersionId) {
|
||||||
|
await restorePreviousVersionWebhooks({
|
||||||
|
request,
|
||||||
|
workflow: workflowData as Record<string, unknown>,
|
||||||
|
userId: actorUserId,
|
||||||
|
previousVersionId,
|
||||||
|
requestId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return createErrorResponse(scheduleResult.error || 'Failed to sync schedules', 500)
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await activateWorkflowVersion({ workflowId: id, version: versionNum })
|
||||||
|
if (!result.success) {
|
||||||
|
await cleanupDeploymentVersion({
|
||||||
|
workflowId: id,
|
||||||
|
workflow: workflowData as Record<string, unknown>,
|
||||||
|
requestId,
|
||||||
|
deploymentVersionId: versionRow.id,
|
||||||
|
})
|
||||||
|
if (previousVersionId) {
|
||||||
|
await restorePreviousVersionWebhooks({
|
||||||
|
request,
|
||||||
|
workflow: workflowData as Record<string, unknown>,
|
||||||
|
userId: actorUserId,
|
||||||
|
previousVersionId,
|
||||||
|
requestId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return createErrorResponse(result.error || 'Failed to activate deployment', 400)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (previousVersionId && previousVersionId !== versionRow.id) {
|
||||||
|
try {
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Cleaning up previous version ${previousVersionId} webhooks/schedules`
|
||||||
|
)
|
||||||
|
await cleanupDeploymentVersion({
|
||||||
|
workflowId: id,
|
||||||
|
workflow: workflowData as Record<string, unknown>,
|
||||||
|
requestId,
|
||||||
|
deploymentVersionId: previousVersionId,
|
||||||
|
skipExternalCleanup: true,
|
||||||
|
})
|
||||||
|
logger.info(`[${requestId}] Previous version cleanup completed`)
|
||||||
|
} catch (cleanupError) {
|
||||||
|
logger.error(
|
||||||
|
`[${requestId}] Failed to clean up previous version ${previousVersionId}`,
|
||||||
|
cleanupError
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await syncMcpToolsForWorkflow({
|
||||||
|
workflowId: id,
|
||||||
|
requestId,
|
||||||
|
state: versionRow.state,
|
||||||
|
context: 'activate',
|
||||||
|
})
|
||||||
|
|
||||||
|
// Apply name/description updates if provided alongside activation
|
||||||
|
let updatedName: string | null | undefined
|
||||||
|
let updatedDescription: string | null | undefined
|
||||||
|
if (name !== undefined || description !== undefined) {
|
||||||
|
const activationUpdateData: { name?: string; description?: string | null } = {}
|
||||||
|
if (name !== undefined) {
|
||||||
|
activationUpdateData.name = name
|
||||||
|
}
|
||||||
|
if (description !== undefined) {
|
||||||
|
activationUpdateData.description = description
|
||||||
|
}
|
||||||
|
|
||||||
|
const [updated] = await db
|
||||||
|
.update(workflowDeploymentVersion)
|
||||||
|
.set(activationUpdateData)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(workflowDeploymentVersion.workflowId, id),
|
||||||
|
eq(workflowDeploymentVersion.version, versionNum)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.returning({
|
||||||
|
name: workflowDeploymentVersion.name,
|
||||||
|
description: workflowDeploymentVersion.description,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (updated) {
|
||||||
|
updatedName = updated.name
|
||||||
|
updatedDescription = updated.description
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Updated deployment version ${version} metadata during activation`,
|
||||||
|
{ name: activationUpdateData.name, description: activationUpdateData.description }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return createSuccessResponse({
|
||||||
|
success: true,
|
||||||
|
deployedAt: result.deployedAt,
|
||||||
|
warnings: triggerSaveResult.warnings,
|
||||||
|
...(updatedName !== undefined && { name: updatedName }),
|
||||||
|
...(updatedDescription !== undefined && { description: updatedDescription }),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle name/description updates
|
||||||
const updateData: { name?: string; description?: string | null } = {}
|
const updateData: { name?: string; description?: string | null } = {}
|
||||||
if (name !== undefined) {
|
if (name !== undefined) {
|
||||||
updateData.name = name
|
updateData.name = name
|
||||||
|
|||||||
@@ -1,235 +0,0 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
|
||||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
|
||||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
|
||||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
|
||||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
|
||||||
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
|
|
||||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
|
||||||
import type { ExecutionMetadata, SerializableExecutionState } from '@/executor/execution/types'
|
|
||||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
|
||||||
|
|
||||||
const logger = createLogger('ExecuteFromBlockAPI')
|
|
||||||
|
|
||||||
const ExecuteFromBlockSchema = z.object({
|
|
||||||
startBlockId: z.string().min(1, 'Start block ID is required'),
|
|
||||||
sourceSnapshot: z.object({
|
|
||||||
blockStates: z.record(z.any()),
|
|
||||||
executedBlocks: z.array(z.string()),
|
|
||||||
blockLogs: z.array(z.any()),
|
|
||||||
decisions: z.object({
|
|
||||||
router: z.record(z.string()),
|
|
||||||
condition: z.record(z.string()),
|
|
||||||
}),
|
|
||||||
completedLoops: z.array(z.string()),
|
|
||||||
loopExecutions: z.record(z.any()).optional(),
|
|
||||||
parallelExecutions: z.record(z.any()).optional(),
|
|
||||||
parallelBlockMapping: z.record(z.any()).optional(),
|
|
||||||
activeExecutionPath: z.array(z.string()),
|
|
||||||
}),
|
|
||||||
input: z.any().optional(),
|
|
||||||
})
|
|
||||||
|
|
||||||
export const runtime = 'nodejs'
|
|
||||||
export const dynamic = 'force-dynamic'
|
|
||||||
|
|
||||||
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
|
||||||
const requestId = generateRequestId()
|
|
||||||
const { id: workflowId } = await params
|
|
||||||
|
|
||||||
try {
|
|
||||||
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
|
|
||||||
if (!auth.success || !auth.userId) {
|
|
||||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
|
||||||
}
|
|
||||||
const userId = auth.userId
|
|
||||||
|
|
||||||
let body: unknown
|
|
||||||
try {
|
|
||||||
body = await req.json()
|
|
||||||
} catch {
|
|
||||||
return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const validation = ExecuteFromBlockSchema.safeParse(body)
|
|
||||||
if (!validation.success) {
|
|
||||||
logger.warn(`[${requestId}] Invalid request body:`, validation.error.errors)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error: 'Invalid request body',
|
|
||||||
details: validation.error.errors.map((e) => ({
|
|
||||||
path: e.path.join('.'),
|
|
||||||
message: e.message,
|
|
||||||
})),
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const { startBlockId, sourceSnapshot, input } = validation.data
|
|
||||||
const executionId = uuidv4()
|
|
||||||
|
|
||||||
// Run preprocessing checks (billing, rate limits, usage limits)
|
|
||||||
const preprocessResult = await preprocessExecution({
|
|
||||||
workflowId,
|
|
||||||
userId,
|
|
||||||
triggerType: 'manual',
|
|
||||||
executionId,
|
|
||||||
requestId,
|
|
||||||
checkRateLimit: false, // Manual executions don't rate limit
|
|
||||||
checkDeployment: false, // Run-from-block doesn't require deployment
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!preprocessResult.success) {
|
|
||||||
const { error } = preprocessResult
|
|
||||||
logger.warn(`[${requestId}] Preprocessing failed for run-from-block`, {
|
|
||||||
workflowId,
|
|
||||||
error: error?.message,
|
|
||||||
statusCode: error?.statusCode,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: error?.message || 'Execution blocked' },
|
|
||||||
{ status: error?.statusCode || 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const workflowRecord = preprocessResult.workflowRecord
|
|
||||||
if (!workflowRecord?.workspaceId) {
|
|
||||||
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
const workspaceId = workflowRecord.workspaceId
|
|
||||||
const workflowUserId = workflowRecord.userId
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Starting run-from-block execution`, {
|
|
||||||
workflowId,
|
|
||||||
startBlockId,
|
|
||||||
executedBlocksCount: sourceSnapshot.executedBlocks.length,
|
|
||||||
billingActorUserId: preprocessResult.actorUserId,
|
|
||||||
})
|
|
||||||
|
|
||||||
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
|
|
||||||
const abortController = new AbortController()
|
|
||||||
let isStreamClosed = false
|
|
||||||
|
|
||||||
const stream = new ReadableStream<Uint8Array>({
|
|
||||||
async start(controller) {
|
|
||||||
const { sendEvent, onBlockStart, onBlockComplete, onStream } = createSSECallbacks({
|
|
||||||
executionId,
|
|
||||||
workflowId,
|
|
||||||
controller,
|
|
||||||
isStreamClosed: () => isStreamClosed,
|
|
||||||
setStreamClosed: () => {
|
|
||||||
isStreamClosed = true
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const metadata: ExecutionMetadata = {
|
|
||||||
requestId,
|
|
||||||
workflowId,
|
|
||||||
userId,
|
|
||||||
executionId,
|
|
||||||
triggerType: 'manual',
|
|
||||||
workspaceId,
|
|
||||||
workflowUserId,
|
|
||||||
useDraftState: true,
|
|
||||||
isClientSession: true,
|
|
||||||
startTime: new Date().toISOString(),
|
|
||||||
}
|
|
||||||
|
|
||||||
const snapshot = new ExecutionSnapshot(metadata, {}, input || {}, {})
|
|
||||||
|
|
||||||
try {
|
|
||||||
const startTime = new Date()
|
|
||||||
|
|
||||||
sendEvent({
|
|
||||||
type: 'execution:started',
|
|
||||||
timestamp: startTime.toISOString(),
|
|
||||||
executionId,
|
|
||||||
workflowId,
|
|
||||||
data: { startTime: startTime.toISOString() },
|
|
||||||
})
|
|
||||||
|
|
||||||
const result = await executeWorkflowCore({
|
|
||||||
snapshot,
|
|
||||||
loggingSession,
|
|
||||||
abortSignal: abortController.signal,
|
|
||||||
runFromBlock: {
|
|
||||||
startBlockId,
|
|
||||||
sourceSnapshot: sourceSnapshot as SerializableExecutionState,
|
|
||||||
},
|
|
||||||
callbacks: { onBlockStart, onBlockComplete, onStream },
|
|
||||||
})
|
|
||||||
|
|
||||||
if (result.status === 'cancelled') {
|
|
||||||
sendEvent({
|
|
||||||
type: 'execution:cancelled',
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
executionId,
|
|
||||||
workflowId,
|
|
||||||
data: { duration: result.metadata?.duration || 0 },
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
sendEvent({
|
|
||||||
type: 'execution:completed',
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
executionId,
|
|
||||||
workflowId,
|
|
||||||
data: {
|
|
||||||
success: result.success,
|
|
||||||
output: result.output,
|
|
||||||
duration: result.metadata?.duration || 0,
|
|
||||||
startTime: result.metadata?.startTime || startTime.toISOString(),
|
|
||||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
|
||||||
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`)
|
|
||||||
|
|
||||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
|
||||||
|
|
||||||
sendEvent({
|
|
||||||
type: 'execution:error',
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
executionId,
|
|
||||||
workflowId,
|
|
||||||
data: {
|
|
||||||
error: executionResult?.error || errorMessage,
|
|
||||||
duration: executionResult?.metadata?.duration || 0,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
} finally {
|
|
||||||
if (!isStreamClosed) {
|
|
||||||
try {
|
|
||||||
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'))
|
|
||||||
controller.close()
|
|
||||||
} catch {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
cancel() {
|
|
||||||
isStreamClosed = true
|
|
||||||
abortController.abort()
|
|
||||||
markExecutionCancelled(executionId).catch(() => {})
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
return new NextResponse(stream, {
|
|
||||||
headers: { ...SSE_HEADERS, 'X-Execution-Id': executionId },
|
|
||||||
})
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
|
||||||
logger.error(`[${requestId}] Failed to start run-from-block execution:`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: errorMessage || 'Failed to start execution' },
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,10 +1,14 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { tasks } from '@trigger.dev/sdk'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { validate as uuidValidate, v4 as uuidv4 } from 'uuid'
|
import { validate as uuidValidate, v4 as uuidv4 } from 'uuid'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
import { getJobQueue, shouldExecuteInline } from '@/lib/core/async-jobs'
|
||||||
|
import {
|
||||||
|
createTimeoutAbortController,
|
||||||
|
getTimeoutErrorMessage,
|
||||||
|
isTimeoutError,
|
||||||
|
} from '@/lib/core/execution-limits'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
@@ -12,6 +16,7 @@ import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
|||||||
import { processInputFileFields } from '@/lib/execution/files'
|
import { processInputFileFields } from '@/lib/execution/files'
|
||||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||||
|
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||||
import {
|
import {
|
||||||
cleanupExecutionBase64Cache,
|
cleanupExecutionBase64Cache,
|
||||||
hydrateUserFilesWithBase64,
|
hydrateUserFilesWithBase64,
|
||||||
@@ -25,7 +30,7 @@ import {
|
|||||||
} from '@/lib/workflows/persistence/utils'
|
} from '@/lib/workflows/persistence/utils'
|
||||||
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
|
import { createStreamingResponse } from '@/lib/workflows/streaming/streaming'
|
||||||
import { createHttpResponseFromBlock, workflowHasResponseBlock } from '@/lib/workflows/utils'
|
import { createHttpResponseFromBlock, workflowHasResponseBlock } from '@/lib/workflows/utils'
|
||||||
import type { WorkflowExecutionPayload } from '@/background/workflow-execution'
|
import { executeWorkflowJob, type WorkflowExecutionPayload } from '@/background/workflow-execution'
|
||||||
import { normalizeName } from '@/executor/constants'
|
import { normalizeName } from '@/executor/constants'
|
||||||
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
|
||||||
import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types'
|
import type { ExecutionMetadata, IterationContext } from '@/executor/execution/types'
|
||||||
@@ -54,6 +59,25 @@ const ExecuteWorkflowSchema = z.object({
|
|||||||
})
|
})
|
||||||
.optional(),
|
.optional(),
|
||||||
stopAfterBlockId: z.string().optional(),
|
stopAfterBlockId: z.string().optional(),
|
||||||
|
runFromBlock: z
|
||||||
|
.object({
|
||||||
|
startBlockId: z.string().min(1, 'Start block ID is required'),
|
||||||
|
sourceSnapshot: z.object({
|
||||||
|
blockStates: z.record(z.any()),
|
||||||
|
executedBlocks: z.array(z.string()),
|
||||||
|
blockLogs: z.array(z.any()),
|
||||||
|
decisions: z.object({
|
||||||
|
router: z.record(z.string()),
|
||||||
|
condition: z.record(z.string()),
|
||||||
|
}),
|
||||||
|
completedLoops: z.array(z.string()),
|
||||||
|
loopExecutions: z.record(z.any()).optional(),
|
||||||
|
parallelExecutions: z.record(z.any()).optional(),
|
||||||
|
parallelBlockMapping: z.record(z.any()).optional(),
|
||||||
|
activeExecutionPath: z.array(z.string()),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
.optional(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export const runtime = 'nodejs'
|
export const runtime = 'nodejs'
|
||||||
@@ -118,45 +142,66 @@ type AsyncExecutionParams = {
|
|||||||
userId: string
|
userId: string
|
||||||
input: any
|
input: any
|
||||||
triggerType: CoreTriggerType
|
triggerType: CoreTriggerType
|
||||||
|
executionId: string
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles async workflow execution by queueing a background job.
|
|
||||||
* Returns immediately with a 202 Accepted response containing the job ID.
|
|
||||||
*/
|
|
||||||
async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextResponse> {
|
async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextResponse> {
|
||||||
const { requestId, workflowId, userId, input, triggerType } = params
|
const { requestId, workflowId, userId, input, triggerType, executionId } = params
|
||||||
|
|
||||||
if (!isTriggerDevEnabled) {
|
|
||||||
logger.warn(`[${requestId}] Async mode requested but TRIGGER_DEV_ENABLED is false`)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: 'Async execution is not enabled. Set TRIGGER_DEV_ENABLED=true to use async mode.' },
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload: WorkflowExecutionPayload = {
|
const payload: WorkflowExecutionPayload = {
|
||||||
workflowId,
|
workflowId,
|
||||||
userId,
|
userId,
|
||||||
input,
|
input,
|
||||||
triggerType,
|
triggerType,
|
||||||
|
executionId,
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const handle = await tasks.trigger('workflow-execution', payload)
|
const jobQueue = await getJobQueue()
|
||||||
|
const jobId = await jobQueue.enqueue('workflow-execution', payload, {
|
||||||
|
metadata: { workflowId, userId },
|
||||||
|
})
|
||||||
|
|
||||||
logger.info(`[${requestId}] Queued async workflow execution`, {
|
logger.info(`[${requestId}] Queued async workflow execution`, {
|
||||||
workflowId,
|
workflowId,
|
||||||
jobId: handle.id,
|
jobId,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (shouldExecuteInline()) {
|
||||||
|
void (async () => {
|
||||||
|
try {
|
||||||
|
await jobQueue.startJob(jobId)
|
||||||
|
const output = await executeWorkflowJob(payload)
|
||||||
|
await jobQueue.completeJob(jobId, output)
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = error instanceof Error ? error.message : String(error)
|
||||||
|
logger.error(`[${requestId}] Async workflow execution failed`, {
|
||||||
|
jobId,
|
||||||
|
error: errorMessage,
|
||||||
|
})
|
||||||
|
try {
|
||||||
|
await jobQueue.markJobFailed(jobId, errorMessage)
|
||||||
|
} catch (markFailedError) {
|
||||||
|
logger.error(`[${requestId}] Failed to mark job as failed`, {
|
||||||
|
jobId,
|
||||||
|
error:
|
||||||
|
markFailedError instanceof Error
|
||||||
|
? markFailedError.message
|
||||||
|
: String(markFailedError),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
}
|
||||||
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: true,
|
success: true,
|
||||||
async: true,
|
async: true,
|
||||||
jobId: handle.id,
|
jobId,
|
||||||
|
executionId,
|
||||||
message: 'Workflow execution queued',
|
message: 'Workflow execution queued',
|
||||||
statusUrl: `${getBaseUrl()}/api/jobs/${handle.id}`,
|
statusUrl: `${getBaseUrl()}/api/jobs/${jobId}`,
|
||||||
},
|
},
|
||||||
{ status: 202 }
|
{ status: 202 }
|
||||||
)
|
)
|
||||||
@@ -224,6 +269,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
base64MaxBytes,
|
base64MaxBytes,
|
||||||
workflowStateOverride,
|
workflowStateOverride,
|
||||||
stopAfterBlockId,
|
stopAfterBlockId,
|
||||||
|
runFromBlock,
|
||||||
} = validation.data
|
} = validation.data
|
||||||
|
|
||||||
// For API key and internal JWT auth, the entire body is the input (except for our control fields)
|
// For API key and internal JWT auth, the entire body is the input (except for our control fields)
|
||||||
@@ -240,6 +286,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
base64MaxBytes,
|
base64MaxBytes,
|
||||||
workflowStateOverride,
|
workflowStateOverride,
|
||||||
stopAfterBlockId: _stopAfterBlockId,
|
stopAfterBlockId: _stopAfterBlockId,
|
||||||
|
runFromBlock: _runFromBlock,
|
||||||
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
|
||||||
...rest
|
...rest
|
||||||
} = body
|
} = body
|
||||||
@@ -318,6 +365,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
userId: actorUserId,
|
userId: actorUserId,
|
||||||
input,
|
input,
|
||||||
triggerType: loggingTriggerType,
|
triggerType: loggingTriggerType,
|
||||||
|
executionId,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -405,6 +453,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
|
|
||||||
if (!enableSSE) {
|
if (!enableSSE) {
|
||||||
logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`)
|
logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`)
|
||||||
|
const timeoutController = createTimeoutAbortController(
|
||||||
|
preprocessResult.executionTimeout?.sync
|
||||||
|
)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const metadata: ExecutionMetadata = {
|
const metadata: ExecutionMetadata = {
|
||||||
requestId,
|
requestId,
|
||||||
@@ -438,8 +490,38 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
includeFileBase64,
|
includeFileBase64,
|
||||||
base64MaxBytes,
|
base64MaxBytes,
|
||||||
stopAfterBlockId,
|
stopAfterBlockId,
|
||||||
|
runFromBlock,
|
||||||
|
abortSignal: timeoutController.signal,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (
|
||||||
|
result.status === 'cancelled' &&
|
||||||
|
timeoutController.isTimedOut() &&
|
||||||
|
timeoutController.timeoutMs
|
||||||
|
) {
|
||||||
|
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||||
|
logger.info(`[${requestId}] Non-SSE execution timed out`, {
|
||||||
|
timeoutMs: timeoutController.timeoutMs,
|
||||||
|
})
|
||||||
|
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||||
|
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
output: result.output,
|
||||||
|
error: timeoutErrorMessage,
|
||||||
|
metadata: result.metadata
|
||||||
|
? {
|
||||||
|
duration: result.metadata.duration,
|
||||||
|
startTime: result.metadata.startTime,
|
||||||
|
endTime: result.metadata.endTime,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
},
|
||||||
|
{ status: 408 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const outputWithBase64 = includeFileBase64
|
const outputWithBase64 = includeFileBase64
|
||||||
? ((await hydrateUserFilesWithBase64(result.output, {
|
? ((await hydrateUserFilesWithBase64(result.output, {
|
||||||
requestId,
|
requestId,
|
||||||
@@ -450,9 +532,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
|
|
||||||
const resultWithBase64 = { ...result, output: outputWithBase64 }
|
const resultWithBase64 = { ...result, output: outputWithBase64 }
|
||||||
|
|
||||||
// Cleanup base64 cache for this execution
|
|
||||||
await cleanupExecutionBase64Cache(executionId)
|
|
||||||
|
|
||||||
const hasResponseBlock = workflowHasResponseBlock(resultWithBase64)
|
const hasResponseBlock = workflowHasResponseBlock(resultWithBase64)
|
||||||
if (hasResponseBlock) {
|
if (hasResponseBlock) {
|
||||||
return createHttpResponseFromBlock(resultWithBase64)
|
return createHttpResponseFromBlock(resultWithBase64)
|
||||||
@@ -460,6 +539,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
|
|
||||||
const filteredResult = {
|
const filteredResult = {
|
||||||
success: result.success,
|
success: result.success,
|
||||||
|
executionId,
|
||||||
output: outputWithBase64,
|
output: outputWithBase64,
|
||||||
error: result.error,
|
error: result.error,
|
||||||
metadata: result.metadata
|
metadata: result.metadata
|
||||||
@@ -474,10 +554,17 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
return NextResponse.json(filteredResult)
|
return NextResponse.json(filteredResult)
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||||
|
|
||||||
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`)
|
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`)
|
||||||
|
|
||||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||||
|
|
||||||
|
await loggingSession.safeCompleteWithError({
|
||||||
|
totalDurationMs: executionResult?.metadata?.duration,
|
||||||
|
error: { message: errorMessage },
|
||||||
|
traceSpans: executionResult?.logs as any,
|
||||||
|
})
|
||||||
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
@@ -493,6 +580,15 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
},
|
},
|
||||||
{ status: 500 }
|
{ status: 500 }
|
||||||
)
|
)
|
||||||
|
} finally {
|
||||||
|
timeoutController.cleanup()
|
||||||
|
if (executionId) {
|
||||||
|
try {
|
||||||
|
await cleanupExecutionBase64Cache(executionId)
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Failed to cleanup base64 cache`, { error })
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -506,7 +602,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
cachedWorkflowData?.blocks || {}
|
cachedWorkflowData?.blocks || {}
|
||||||
)
|
)
|
||||||
const streamVariables = cachedWorkflowData?.variables ?? (workflow as any).variables
|
const streamVariables = cachedWorkflowData?.variables ?? (workflow as any).variables
|
||||||
|
|
||||||
const stream = await createStreamingResponse({
|
const stream = await createStreamingResponse({
|
||||||
requestId,
|
requestId,
|
||||||
workflow: {
|
workflow: {
|
||||||
@@ -524,6 +619,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api',
|
workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api',
|
||||||
includeFileBase64,
|
includeFileBase64,
|
||||||
base64MaxBytes,
|
base64MaxBytes,
|
||||||
|
timeoutMs: preprocessResult.executionTimeout?.sync,
|
||||||
},
|
},
|
||||||
executionId,
|
executionId,
|
||||||
})
|
})
|
||||||
@@ -535,7 +631,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
}
|
}
|
||||||
|
|
||||||
const encoder = new TextEncoder()
|
const encoder = new TextEncoder()
|
||||||
const abortController = new AbortController()
|
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
||||||
let isStreamClosed = false
|
let isStreamClosed = false
|
||||||
|
|
||||||
const stream = new ReadableStream<Uint8Array>({
|
const stream = new ReadableStream<Uint8Array>({
|
||||||
@@ -731,10 +827,11 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
onStream,
|
onStream,
|
||||||
},
|
},
|
||||||
loggingSession,
|
loggingSession,
|
||||||
abortSignal: abortController.signal,
|
abortSignal: timeoutController.signal,
|
||||||
includeFileBase64,
|
includeFileBase64,
|
||||||
base64MaxBytes,
|
base64MaxBytes,
|
||||||
stopAfterBlockId,
|
stopAfterBlockId,
|
||||||
|
runFromBlock,
|
||||||
})
|
})
|
||||||
|
|
||||||
if (result.status === 'paused') {
|
if (result.status === 'paused') {
|
||||||
@@ -767,7 +864,27 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (result.status === 'cancelled') {
|
if (result.status === 'cancelled') {
|
||||||
|
if (timeoutController.isTimedOut() && timeoutController.timeoutMs) {
|
||||||
|
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||||
|
logger.info(`[${requestId}] Workflow execution timed out`, {
|
||||||
|
timeoutMs: timeoutController.timeoutMs,
|
||||||
|
})
|
||||||
|
|
||||||
|
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||||
|
|
||||||
|
sendEvent({
|
||||||
|
type: 'execution:error',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
executionId,
|
||||||
|
workflowId,
|
||||||
|
data: {
|
||||||
|
error: timeoutErrorMessage,
|
||||||
|
duration: result.metadata?.duration || 0,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} else {
|
||||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||||
|
|
||||||
sendEvent({
|
sendEvent({
|
||||||
type: 'execution:cancelled',
|
type: 'execution:cancelled',
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
@@ -777,6 +894,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
duration: result.metadata?.duration || 0,
|
duration: result.metadata?.duration || 0,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -799,14 +917,26 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
endTime: result.metadata?.endTime || new Date().toISOString(),
|
endTime: result.metadata?.endTime || new Date().toISOString(),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
// Cleanup base64 cache for this execution
|
|
||||||
await cleanupExecutionBase64Cache(executionId)
|
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
||||||
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`)
|
const errorMessage = isTimeout
|
||||||
|
? getTimeoutErrorMessage(error, timeoutController.timeoutMs)
|
||||||
|
: error instanceof Error
|
||||||
|
? error.message
|
||||||
|
: 'Unknown error'
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`, { isTimeout })
|
||||||
|
|
||||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||||
|
const { traceSpans, totalDuration } = executionResult
|
||||||
|
? buildTraceSpans(executionResult)
|
||||||
|
: { traceSpans: [], totalDuration: 0 }
|
||||||
|
|
||||||
|
await loggingSession.safeCompleteWithError({
|
||||||
|
totalDurationMs: totalDuration || executionResult?.metadata?.duration,
|
||||||
|
error: { message: errorMessage },
|
||||||
|
traceSpans,
|
||||||
|
})
|
||||||
|
|
||||||
sendEvent({
|
sendEvent({
|
||||||
type: 'execution:error',
|
type: 'execution:error',
|
||||||
@@ -819,20 +949,23 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
|
timeoutController.cleanup()
|
||||||
|
if (executionId) {
|
||||||
|
await cleanupExecutionBase64Cache(executionId)
|
||||||
|
}
|
||||||
if (!isStreamClosed) {
|
if (!isStreamClosed) {
|
||||||
try {
|
try {
|
||||||
controller.enqueue(encoder.encode('data: [DONE]\n\n'))
|
controller.enqueue(encoder.encode('data: [DONE]\n\n'))
|
||||||
controller.close()
|
controller.close()
|
||||||
} catch {
|
} catch {}
|
||||||
// Stream already closed - nothing to do
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cancel() {
|
cancel() {
|
||||||
isStreamClosed = true
|
isStreamClosed = true
|
||||||
|
timeoutController.cleanup()
|
||||||
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
||||||
abortController.abort()
|
timeoutController.abort()
|
||||||
markExecutionCancelled(executionId).catch(() => {})
|
markExecutionCancelled(executionId).catch(() => {})
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -508,8 +508,10 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
|
|||||||
|
|
||||||
setIsApproving(true)
|
setIsApproving(true)
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/templates/${template.id}/approve`, {
|
const response = await fetch(`/api/templates/${template.id}`, {
|
||||||
method: 'POST',
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ status: 'approved' }),
|
||||||
})
|
})
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
@@ -531,8 +533,10 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
|
|||||||
|
|
||||||
setIsRejecting(true)
|
setIsRejecting(true)
|
||||||
try {
|
try {
|
||||||
const response = await fetch(`/api/templates/${template.id}/reject`, {
|
const response = await fetch(`/api/templates/${template.id}`, {
|
||||||
method: 'POST',
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({ status: 'rejected' }),
|
||||||
})
|
})
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
@@ -554,10 +558,11 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template
|
|||||||
|
|
||||||
setIsVerifying(true)
|
setIsVerifying(true)
|
||||||
try {
|
try {
|
||||||
const endpoint = `/api/creators/${template.creator.id}/verify`
|
const response = await fetch(`/api/creators/${template.creator.id}`, {
|
||||||
const method = template.creator.verified ? 'DELETE' : 'POST'
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
const response = await fetch(endpoint, { method })
|
body: JSON.stringify({ verified: !template.creator.verified }),
|
||||||
|
})
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
// Refresh page to show updated verification status
|
// Refresh page to show updated verification status
|
||||||
|
|||||||
@@ -192,10 +192,10 @@ export const ActionBar = memo(
|
|||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>
|
<Tooltip.Content side='top'>
|
||||||
{(() => {
|
{(() => {
|
||||||
if (disabled) return getTooltipMessage('Run')
|
if (disabled) return getTooltipMessage('Run from block')
|
||||||
if (isExecuting) return 'Execution in progress'
|
if (isExecuting) return 'Execution in progress'
|
||||||
if (!dependenciesSatisfied) return 'Disabled: Run Blocks Before'
|
if (!dependenciesSatisfied) return 'Run previous blocks first'
|
||||||
return 'Run'
|
return 'Run from block'
|
||||||
})()}
|
})()}
|
||||||
</Tooltip.Content>
|
</Tooltip.Content>
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
|
|||||||
@@ -273,7 +273,7 @@ export function BlockMenu({
|
|||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
Run
|
Run from block
|
||||||
</PopoverItem>
|
</PopoverItem>
|
||||||
{/* Hide "Run until" for triggers - they're always at the start */}
|
{/* Hide "Run until" for triggers - they're always at the start */}
|
||||||
{!hasTriggerBlock && (
|
{!hasTriggerBlock && (
|
||||||
|
|||||||
@@ -807,7 +807,7 @@ export function Chat() {
|
|||||||
|
|
||||||
const newReservedFields: StartInputFormatField[] = missingStartReservedFields.map(
|
const newReservedFields: StartInputFormatField[] = missingStartReservedFields.map(
|
||||||
(fieldName) => {
|
(fieldName) => {
|
||||||
const defaultType = fieldName === 'files' ? 'files' : 'string'
|
const defaultType = fieldName === 'files' ? 'file[]' : 'string'
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: crypto.randomUUID(),
|
id: crypto.randomUUID(),
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { memo, useCallback, useMemo } from 'react'
|
import { memo, useCallback, useMemo } from 'react'
|
||||||
import ReactMarkdown from 'react-markdown'
|
import ReactMarkdown from 'react-markdown'
|
||||||
import type { NodeProps } from 'reactflow'
|
import type { NodeProps } from 'reactflow'
|
||||||
|
import remarkBreaks from 'remark-breaks'
|
||||||
import remarkGfm from 'remark-gfm'
|
import remarkGfm from 'remark-gfm'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
import { BLOCK_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
import { BLOCK_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||||
@@ -305,7 +306,7 @@ function getEmbedInfo(url: string): EmbedInfo | null {
|
|||||||
const NoteMarkdown = memo(function NoteMarkdown({ content }: { content: string }) {
|
const NoteMarkdown = memo(function NoteMarkdown({ content }: { content: string }) {
|
||||||
return (
|
return (
|
||||||
<ReactMarkdown
|
<ReactMarkdown
|
||||||
remarkPlugins={[remarkGfm]}
|
remarkPlugins={[remarkGfm, remarkBreaks]}
|
||||||
components={{
|
components={{
|
||||||
p: ({ children }: any) => (
|
p: ({ children }: any) => (
|
||||||
<p className='mb-1 break-words text-[var(--text-primary)] text-sm leading-[1.25rem] last:mb-0'>
|
<p className='mb-1 break-words text-[var(--text-primary)] text-sm leading-[1.25rem] last:mb-0'>
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ export function A2aDeploy({
|
|||||||
newFields.push({
|
newFields.push({
|
||||||
id: crypto.randomUUID(),
|
id: crypto.randomUUID(),
|
||||||
name: 'files',
|
name: 'files',
|
||||||
type: 'files',
|
type: 'file[]',
|
||||||
value: '',
|
value: '',
|
||||||
collapsed: false,
|
collapsed: false,
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ import {
|
|||||||
Tooltip,
|
Tooltip,
|
||||||
} from '@/components/emcn'
|
} from '@/components/emcn'
|
||||||
import { Skeleton } from '@/components/ui'
|
import { Skeleton } from '@/components/ui'
|
||||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
|
||||||
import { OutputSelect } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/chat/components/output-select/output-select'
|
import { OutputSelect } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/chat/components/output-select/output-select'
|
||||||
|
|
||||||
interface WorkflowDeploymentInfo {
|
interface WorkflowDeploymentInfo {
|
||||||
@@ -78,7 +77,6 @@ export function ApiDeploy({
|
|||||||
async: false,
|
async: false,
|
||||||
})
|
})
|
||||||
|
|
||||||
const isAsyncEnabled = isTruthy(getEnv('NEXT_PUBLIC_TRIGGER_DEV_ENABLED'))
|
|
||||||
const info = deploymentInfo ? { ...deploymentInfo, needsRedeployment } : null
|
const info = deploymentInfo ? { ...deploymentInfo, needsRedeployment } : null
|
||||||
|
|
||||||
const getBaseEndpoint = () => {
|
const getBaseEndpoint = () => {
|
||||||
@@ -272,7 +270,7 @@ response = requests.post(
|
|||||||
)
|
)
|
||||||
|
|
||||||
job = response.json()
|
job = response.json()
|
||||||
print(job) # Contains job_id for status checking`
|
print(job) # Contains jobId and executionId`
|
||||||
|
|
||||||
case 'javascript':
|
case 'javascript':
|
||||||
return `const response = await fetch("${endpoint}", {
|
return `const response = await fetch("${endpoint}", {
|
||||||
@@ -286,7 +284,7 @@ print(job) # Contains job_id for status checking`
|
|||||||
});
|
});
|
||||||
|
|
||||||
const job = await response.json();
|
const job = await response.json();
|
||||||
console.log(job); // Contains job_id for status checking`
|
console.log(job); // Contains jobId and executionId`
|
||||||
|
|
||||||
case 'typescript':
|
case 'typescript':
|
||||||
return `const response = await fetch("${endpoint}", {
|
return `const response = await fetch("${endpoint}", {
|
||||||
@@ -299,8 +297,8 @@ console.log(job); // Contains job_id for status checking`
|
|||||||
body: JSON.stringify(${JSON.stringify(payload)})
|
body: JSON.stringify(${JSON.stringify(payload)})
|
||||||
});
|
});
|
||||||
|
|
||||||
const job: { job_id: string } = await response.json();
|
const job: { jobId: string; executionId: string } = await response.json();
|
||||||
console.log(job); // Contains job_id for status checking`
|
console.log(job); // Contains jobId and executionId`
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return ''
|
return ''
|
||||||
@@ -539,7 +537,6 @@ console.log(limits);`
|
|||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{isAsyncEnabled && (
|
|
||||||
<div>
|
<div>
|
||||||
<div className='mb-[6.5px] flex items-center justify-between'>
|
<div className='mb-[6.5px] flex items-center justify-between'>
|
||||||
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
<Label className='block pl-[2px] font-medium text-[13px] text-[var(--text-primary)]'>
|
||||||
@@ -554,11 +551,7 @@ console.log(limits);`
|
|||||||
aria-label='Copy command'
|
aria-label='Copy command'
|
||||||
className='!p-1.5 -my-1.5'
|
className='!p-1.5 -my-1.5'
|
||||||
>
|
>
|
||||||
{copied.async ? (
|
{copied.async ? <Check className='h-3 w-3' /> : <Clipboard className='h-3 w-3' />}
|
||||||
<Check className='h-3 w-3' />
|
|
||||||
) : (
|
|
||||||
<Clipboard className='h-3 w-3' />
|
|
||||||
)}
|
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content>
|
<Tooltip.Content>
|
||||||
@@ -587,7 +580,6 @@ console.log(limits);`
|
|||||||
className='!min-h-0 rounded-[4px] border border-[var(--border-1)]'
|
className='!min-h-0 rounded-[4px] border border-[var(--border-1)]'
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -368,6 +368,7 @@ export function FileUpload({
|
|||||||
const uploadedFile: UploadedFile = {
|
const uploadedFile: UploadedFile = {
|
||||||
name: selectedFile.name,
|
name: selectedFile.name,
|
||||||
path: selectedFile.path,
|
path: selectedFile.path,
|
||||||
|
key: selectedFile.key,
|
||||||
size: selectedFile.size,
|
size: selectedFile.size,
|
||||||
type: selectedFile.type,
|
type: selectedFile.type,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/
|
|||||||
interface Field {
|
interface Field {
|
||||||
id: string
|
id: string
|
||||||
name: string
|
name: string
|
||||||
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'file[]'
|
||||||
value?: string
|
value?: string
|
||||||
description?: string
|
description?: string
|
||||||
collapsed?: boolean
|
collapsed?: boolean
|
||||||
@@ -57,7 +57,7 @@ const TYPE_OPTIONS: ComboboxOption[] = [
|
|||||||
{ label: 'Boolean', value: 'boolean' },
|
{ label: 'Boolean', value: 'boolean' },
|
||||||
{ label: 'Object', value: 'object' },
|
{ label: 'Object', value: 'object' },
|
||||||
{ label: 'Array', value: 'array' },
|
{ label: 'Array', value: 'array' },
|
||||||
{ label: 'Files', value: 'files' },
|
{ label: 'Files', value: 'file[]' },
|
||||||
]
|
]
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -448,7 +448,7 @@ export function FieldFormat({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (field.type === 'files') {
|
if (field.type === 'file[]') {
|
||||||
const lineCount = fieldValue.split('\n').length
|
const lineCount = fieldValue.split('\n').length
|
||||||
const gutterWidth = calculateGutterWidth(lineCount)
|
const gutterWidth = calculateGutterWidth(lineCount)
|
||||||
|
|
||||||
|
|||||||
@@ -225,7 +225,7 @@ const getOutputTypeForPath = (
|
|||||||
const chatModeTypes: Record<string, string> = {
|
const chatModeTypes: Record<string, string> = {
|
||||||
input: 'string',
|
input: 'string',
|
||||||
conversationId: 'string',
|
conversationId: 'string',
|
||||||
files: 'files',
|
files: 'file[]',
|
||||||
}
|
}
|
||||||
return chatModeTypes[outputPath] || 'any'
|
return chatModeTypes[outputPath] || 'any'
|
||||||
}
|
}
|
||||||
@@ -1568,16 +1568,11 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
|||||||
blockTagGroups.sort((a, b) => a.distance - b.distance)
|
blockTagGroups.sort((a, b) => a.distance - b.distance)
|
||||||
finalBlockTagGroups.push(...blockTagGroups)
|
finalBlockTagGroups.push(...blockTagGroups)
|
||||||
|
|
||||||
const contextualTags: string[] = []
|
const groupTags = finalBlockTagGroups.flatMap((group) => group.tags)
|
||||||
if (loopBlockGroup) {
|
const tags = [...groupTags, ...variableTags]
|
||||||
contextualTags.push(...loopBlockGroup.tags)
|
|
||||||
}
|
|
||||||
if (parallelBlockGroup) {
|
|
||||||
contextualTags.push(...parallelBlockGroup.tags)
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
tags: [...allBlockTags, ...variableTags, ...contextualTags],
|
tags,
|
||||||
variableInfoMap,
|
variableInfoMap,
|
||||||
blockTagGroups: finalBlockTagGroups,
|
blockTagGroups: finalBlockTagGroups,
|
||||||
}
|
}
|
||||||
@@ -1751,7 +1746,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
|||||||
mergedSubBlocks
|
mergedSubBlocks
|
||||||
)
|
)
|
||||||
|
|
||||||
if (fieldType === 'files' || fieldType === 'file[]' || fieldType === 'array') {
|
if (fieldType === 'file' || fieldType === 'file[]' || fieldType === 'array') {
|
||||||
const blockName = parts[0]
|
const blockName = parts[0]
|
||||||
const remainingPath = parts.slice(2).join('.')
|
const remainingPath = parts.slice(2).join('.')
|
||||||
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`
|
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`
|
||||||
|
|||||||
@@ -78,19 +78,13 @@ const IconComponent = ({ icon: Icon, className }: { icon: any; className?: strin
|
|||||||
* @returns Editor panel content
|
* @returns Editor panel content
|
||||||
*/
|
*/
|
||||||
export function Editor() {
|
export function Editor() {
|
||||||
const {
|
const { currentBlockId, connectionsHeight, toggleConnectionsCollapsed, registerRenameCallback } =
|
||||||
currentBlockId,
|
usePanelEditorStore(
|
||||||
connectionsHeight,
|
|
||||||
toggleConnectionsCollapsed,
|
|
||||||
shouldFocusRename,
|
|
||||||
setShouldFocusRename,
|
|
||||||
} = usePanelEditorStore(
|
|
||||||
useShallow((state) => ({
|
useShallow((state) => ({
|
||||||
currentBlockId: state.currentBlockId,
|
currentBlockId: state.currentBlockId,
|
||||||
connectionsHeight: state.connectionsHeight,
|
connectionsHeight: state.connectionsHeight,
|
||||||
toggleConnectionsCollapsed: state.toggleConnectionsCollapsed,
|
toggleConnectionsCollapsed: state.toggleConnectionsCollapsed,
|
||||||
shouldFocusRename: state.shouldFocusRename,
|
registerRenameCallback: state.registerRenameCallback,
|
||||||
setShouldFocusRename: state.setShouldFocusRename,
|
|
||||||
}))
|
}))
|
||||||
)
|
)
|
||||||
const currentWorkflow = useCurrentWorkflow()
|
const currentWorkflow = useCurrentWorkflow()
|
||||||
@@ -229,6 +223,7 @@ export function Editor() {
|
|||||||
|
|
||||||
const [isRenaming, setIsRenaming] = useState(false)
|
const [isRenaming, setIsRenaming] = useState(false)
|
||||||
const [editedName, setEditedName] = useState('')
|
const [editedName, setEditedName] = useState('')
|
||||||
|
const renamingBlockIdRef = useRef<string | null>(null)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ref callback that auto-selects the input text when mounted.
|
* Ref callback that auto-selects the input text when mounted.
|
||||||
@@ -240,44 +235,62 @@ export function Editor() {
|
|||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles starting the rename process.
|
* Starts the rename process for the current block.
|
||||||
|
* Reads from stores directly to avoid stale closures when called via registered callback.
|
||||||
|
* Captures the block ID in a ref to ensure the correct block is renamed even if selection changes.
|
||||||
*/
|
*/
|
||||||
const handleStartRename = useCallback(() => {
|
const handleStartRename = useCallback(() => {
|
||||||
if (!canEditBlock || !currentBlock) return
|
const blockId = usePanelEditorStore.getState().currentBlockId
|
||||||
setEditedName(currentBlock.name || '')
|
if (!blockId) return
|
||||||
|
|
||||||
|
const blocks = useWorkflowStore.getState().blocks
|
||||||
|
const block = blocks[blockId]
|
||||||
|
if (!block) return
|
||||||
|
|
||||||
|
const parentId = block.data?.parentId as string | undefined
|
||||||
|
const isParentLocked = parentId ? (blocks[parentId]?.locked ?? false) : false
|
||||||
|
const isLocked = (block.locked ?? false) || isParentLocked
|
||||||
|
if (!userPermissions.canEdit || isLocked) return
|
||||||
|
|
||||||
|
renamingBlockIdRef.current = blockId
|
||||||
|
setEditedName(block.name || '')
|
||||||
setIsRenaming(true)
|
setIsRenaming(true)
|
||||||
}, [canEditBlock, currentBlock])
|
}, [userPermissions.canEdit])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles saving the renamed block.
|
* Saves the renamed block using the captured block ID from when rename started.
|
||||||
*/
|
*/
|
||||||
const handleSaveRename = useCallback(() => {
|
const handleSaveRename = useCallback(() => {
|
||||||
if (!currentBlockId || !isRenaming) return
|
const blockIdToRename = renamingBlockIdRef.current
|
||||||
|
if (!blockIdToRename || !isRenaming) return
|
||||||
|
|
||||||
|
const blocks = useWorkflowStore.getState().blocks
|
||||||
|
const blockToRename = blocks[blockIdToRename]
|
||||||
|
|
||||||
const trimmedName = editedName.trim()
|
const trimmedName = editedName.trim()
|
||||||
if (trimmedName && trimmedName !== currentBlock?.name) {
|
if (trimmedName && blockToRename && trimmedName !== blockToRename.name) {
|
||||||
const result = collaborativeUpdateBlockName(currentBlockId, trimmedName)
|
const result = collaborativeUpdateBlockName(blockIdToRename, trimmedName)
|
||||||
if (!result.success) {
|
if (!result.success) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
renamingBlockIdRef.current = null
|
||||||
setIsRenaming(false)
|
setIsRenaming(false)
|
||||||
}, [currentBlockId, isRenaming, editedName, currentBlock?.name, collaborativeUpdateBlockName])
|
}, [isRenaming, editedName, collaborativeUpdateBlockName])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles canceling the rename process.
|
* Handles canceling the rename process.
|
||||||
*/
|
*/
|
||||||
const handleCancelRename = useCallback(() => {
|
const handleCancelRename = useCallback(() => {
|
||||||
|
renamingBlockIdRef.current = null
|
||||||
setIsRenaming(false)
|
setIsRenaming(false)
|
||||||
setEditedName('')
|
setEditedName('')
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (shouldFocusRename && currentBlock) {
|
registerRenameCallback(handleStartRename)
|
||||||
handleStartRename()
|
return () => registerRenameCallback(null)
|
||||||
setShouldFocusRename(false)
|
}, [registerRenameCallback, handleStartRename])
|
||||||
}
|
|
||||||
}, [shouldFocusRename, currentBlock, handleStartRename, setShouldFocusRename])
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles opening documentation link in a new secure tab.
|
* Handles opening documentation link in a new secure tab.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import type React from 'react'
|
import type React from 'react'
|
||||||
import { RepeatIcon, SplitIcon } from 'lucide-react'
|
import { AlertTriangleIcon, BanIcon, RepeatIcon, SplitIcon, XCircleIcon } from 'lucide-react'
|
||||||
import { getBlock } from '@/blocks'
|
import { getBlock } from '@/blocks'
|
||||||
import { TERMINAL_BLOCK_COLUMN_WIDTH } from '@/stores/constants'
|
import { TERMINAL_BLOCK_COLUMN_WIDTH } from '@/stores/constants'
|
||||||
import type { ConsoleEntry } from '@/stores/terminal'
|
import type { ConsoleEntry } from '@/stores/terminal'
|
||||||
@@ -12,6 +12,15 @@ const SUBFLOW_COLORS = {
|
|||||||
parallel: '#FEE12B',
|
parallel: '#FEE12B',
|
||||||
} as const
|
} as const
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Special block type colors for errors and system messages
|
||||||
|
*/
|
||||||
|
const SPECIAL_BLOCK_COLORS = {
|
||||||
|
error: '#ef4444',
|
||||||
|
validation: '#f59e0b',
|
||||||
|
cancelled: '#6b7280',
|
||||||
|
} as const
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Retrieves the icon component for a given block type
|
* Retrieves the icon component for a given block type
|
||||||
*/
|
*/
|
||||||
@@ -32,6 +41,18 @@ export function getBlockIcon(
|
|||||||
return SplitIcon
|
return SplitIcon
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (blockType === 'error') {
|
||||||
|
return XCircleIcon
|
||||||
|
}
|
||||||
|
|
||||||
|
if (blockType === 'validation') {
|
||||||
|
return AlertTriangleIcon
|
||||||
|
}
|
||||||
|
|
||||||
|
if (blockType === 'cancelled') {
|
||||||
|
return BanIcon
|
||||||
|
}
|
||||||
|
|
||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,6 +71,16 @@ export function getBlockColor(blockType: string): string {
|
|||||||
if (blockType === 'parallel') {
|
if (blockType === 'parallel') {
|
||||||
return SUBFLOW_COLORS.parallel
|
return SUBFLOW_COLORS.parallel
|
||||||
}
|
}
|
||||||
|
// Special block types for errors and system messages
|
||||||
|
if (blockType === 'error') {
|
||||||
|
return SPECIAL_BLOCK_COLORS.error
|
||||||
|
}
|
||||||
|
if (blockType === 'validation') {
|
||||||
|
return SPECIAL_BLOCK_COLORS.validation
|
||||||
|
}
|
||||||
|
if (blockType === 'cancelled') {
|
||||||
|
return SPECIAL_BLOCK_COLORS.cancelled
|
||||||
|
}
|
||||||
return '#6b7280'
|
return '#6b7280'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -188,7 +188,7 @@ export function useBlockOutputFields({
|
|||||||
baseOutputs = {
|
baseOutputs = {
|
||||||
input: { type: 'string', description: 'User message' },
|
input: { type: 'string', description: 'User message' },
|
||||||
conversationId: { type: 'string', description: 'Conversation ID' },
|
conversationId: { type: 'string', description: 'Conversation ID' },
|
||||||
files: { type: 'files', description: 'Uploaded files' },
|
files: { type: 'file[]', description: 'Uploaded files' },
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const inputFormatValue = mergedSubBlocks?.inputFormat?.value
|
const inputFormatValue = mergedSubBlocks?.inputFormat?.value
|
||||||
|
|||||||
@@ -4,6 +4,11 @@ import { useQueryClient } from '@tanstack/react-query'
|
|||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||||
import { processStreamingBlockLogs } from '@/lib/tokenization'
|
import { processStreamingBlockLogs } from '@/lib/tokenization'
|
||||||
|
import type {
|
||||||
|
BlockCompletedData,
|
||||||
|
BlockErrorData,
|
||||||
|
BlockStartedData,
|
||||||
|
} from '@/lib/workflows/executor/execution-events'
|
||||||
import {
|
import {
|
||||||
extractTriggerMockPayload,
|
extractTriggerMockPayload,
|
||||||
selectBestTrigger,
|
selectBestTrigger,
|
||||||
@@ -17,7 +22,13 @@ import {
|
|||||||
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
|
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
|
||||||
import { getBlock } from '@/blocks'
|
import { getBlock } from '@/blocks'
|
||||||
import type { SerializableExecutionState } from '@/executor/execution/types'
|
import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||||
import type { BlockLog, BlockState, ExecutionResult, StreamingExecution } from '@/executor/types'
|
import type {
|
||||||
|
BlockLog,
|
||||||
|
BlockState,
|
||||||
|
ExecutionResult,
|
||||||
|
NormalizedBlockOutput,
|
||||||
|
StreamingExecution,
|
||||||
|
} from '@/executor/types'
|
||||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||||
import { coerceValue } from '@/executor/utils/start-block'
|
import { coerceValue } from '@/executor/utils/start-block'
|
||||||
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
||||||
@@ -27,7 +38,7 @@ import { useExecutionStore } from '@/stores/execution'
|
|||||||
import { useNotificationStore } from '@/stores/notifications'
|
import { useNotificationStore } from '@/stores/notifications'
|
||||||
import { useVariablesStore } from '@/stores/panel'
|
import { useVariablesStore } from '@/stores/panel'
|
||||||
import { useEnvironmentStore } from '@/stores/settings/environment'
|
import { useEnvironmentStore } from '@/stores/settings/environment'
|
||||||
import { type ConsoleEntry, useTerminalConsoleStore } from '@/stores/terminal'
|
import { useTerminalConsoleStore } from '@/stores/terminal'
|
||||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||||
@@ -41,6 +52,19 @@ interface DebugValidationResult {
|
|||||||
error?: string
|
error?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface BlockEventHandlerConfig {
|
||||||
|
workflowId?: string
|
||||||
|
executionId?: string
|
||||||
|
workflowEdges: Array<{ id: string; target: string }>
|
||||||
|
activeBlocksSet: Set<string>
|
||||||
|
accumulatedBlockLogs: BlockLog[]
|
||||||
|
accumulatedBlockStates: Map<string, BlockState>
|
||||||
|
executedBlockIds: Set<string>
|
||||||
|
consoleMode: 'update' | 'add'
|
||||||
|
includeStartConsoleEntry: boolean
|
||||||
|
onBlockCompleteCallback?: (blockId: string, output: unknown) => Promise<void>
|
||||||
|
}
|
||||||
|
|
||||||
const WORKFLOW_EXECUTION_FAILURE_MESSAGE = 'Workflow execution failed'
|
const WORKFLOW_EXECUTION_FAILURE_MESSAGE = 'Workflow execution failed'
|
||||||
|
|
||||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||||
@@ -149,6 +173,340 @@ export function useWorkflowExecution() {
|
|||||||
setActiveBlocks,
|
setActiveBlocks,
|
||||||
])
|
])
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds timing fields for execution-level console entries.
|
||||||
|
*/
|
||||||
|
const buildExecutionTiming = useCallback((durationMs?: number) => {
|
||||||
|
const normalizedDuration = durationMs || 0
|
||||||
|
return {
|
||||||
|
durationMs: normalizedDuration,
|
||||||
|
startedAt: new Date(Date.now() - normalizedDuration).toISOString(),
|
||||||
|
endedAt: new Date().toISOString(),
|
||||||
|
}
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds an execution-level error entry to the console when appropriate.
|
||||||
|
*/
|
||||||
|
const addExecutionErrorConsoleEntry = useCallback(
|
||||||
|
(params: {
|
||||||
|
workflowId?: string
|
||||||
|
executionId?: string
|
||||||
|
error?: string
|
||||||
|
durationMs?: number
|
||||||
|
blockLogs: BlockLog[]
|
||||||
|
isPreExecutionError?: boolean
|
||||||
|
}) => {
|
||||||
|
if (!params.workflowId) return
|
||||||
|
|
||||||
|
const hasBlockError = params.blockLogs.some((log) => log.error)
|
||||||
|
const isPreExecutionError = params.isPreExecutionError ?? false
|
||||||
|
if (!isPreExecutionError && hasBlockError) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
const errorMessage = params.error || 'Execution failed'
|
||||||
|
const isTimeout = errorMessage.toLowerCase().includes('timed out')
|
||||||
|
const timing = buildExecutionTiming(params.durationMs)
|
||||||
|
|
||||||
|
addConsole({
|
||||||
|
input: {},
|
||||||
|
output: {},
|
||||||
|
success: false,
|
||||||
|
error: errorMessage,
|
||||||
|
durationMs: timing.durationMs,
|
||||||
|
startedAt: timing.startedAt,
|
||||||
|
executionOrder: isPreExecutionError ? 0 : Number.MAX_SAFE_INTEGER,
|
||||||
|
endedAt: timing.endedAt,
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
blockId: isPreExecutionError
|
||||||
|
? 'validation'
|
||||||
|
: isTimeout
|
||||||
|
? 'timeout-error'
|
||||||
|
: 'execution-error',
|
||||||
|
executionId: params.executionId,
|
||||||
|
blockName: isPreExecutionError
|
||||||
|
? 'Workflow Validation'
|
||||||
|
: isTimeout
|
||||||
|
? 'Timeout Error'
|
||||||
|
: 'Execution Error',
|
||||||
|
blockType: isPreExecutionError ? 'validation' : 'error',
|
||||||
|
})
|
||||||
|
},
|
||||||
|
[addConsole, buildExecutionTiming]
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Adds an execution-level cancellation entry to the console.
|
||||||
|
*/
|
||||||
|
const addExecutionCancelledConsoleEntry = useCallback(
|
||||||
|
(params: { workflowId?: string; executionId?: string; durationMs?: number }) => {
|
||||||
|
if (!params.workflowId) return
|
||||||
|
|
||||||
|
const timing = buildExecutionTiming(params.durationMs)
|
||||||
|
addConsole({
|
||||||
|
input: {},
|
||||||
|
output: {},
|
||||||
|
success: false,
|
||||||
|
error: 'Execution was cancelled',
|
||||||
|
durationMs: timing.durationMs,
|
||||||
|
startedAt: timing.startedAt,
|
||||||
|
executionOrder: Number.MAX_SAFE_INTEGER,
|
||||||
|
endedAt: timing.endedAt,
|
||||||
|
workflowId: params.workflowId,
|
||||||
|
blockId: 'cancelled',
|
||||||
|
executionId: params.executionId,
|
||||||
|
blockName: 'Execution Cancelled',
|
||||||
|
blockType: 'cancelled',
|
||||||
|
})
|
||||||
|
},
|
||||||
|
[addConsole, buildExecutionTiming]
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles workflow-level execution errors for console output.
|
||||||
|
*/
|
||||||
|
const handleExecutionErrorConsole = useCallback(
|
||||||
|
(params: {
|
||||||
|
workflowId?: string
|
||||||
|
executionId?: string
|
||||||
|
error?: string
|
||||||
|
durationMs?: number
|
||||||
|
blockLogs: BlockLog[]
|
||||||
|
isPreExecutionError?: boolean
|
||||||
|
}) => {
|
||||||
|
if (params.workflowId) {
|
||||||
|
cancelRunningEntries(params.workflowId)
|
||||||
|
}
|
||||||
|
addExecutionErrorConsoleEntry(params)
|
||||||
|
},
|
||||||
|
[addExecutionErrorConsoleEntry, cancelRunningEntries]
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles workflow-level execution cancellations for console output.
|
||||||
|
*/
|
||||||
|
const handleExecutionCancelledConsole = useCallback(
|
||||||
|
(params: { workflowId?: string; executionId?: string; durationMs?: number }) => {
|
||||||
|
if (params.workflowId) {
|
||||||
|
cancelRunningEntries(params.workflowId)
|
||||||
|
}
|
||||||
|
addExecutionCancelledConsoleEntry(params)
|
||||||
|
},
|
||||||
|
[addExecutionCancelledConsoleEntry, cancelRunningEntries]
|
||||||
|
)
|
||||||
|
|
||||||
|
const buildBlockEventHandlers = useCallback(
|
||||||
|
(config: BlockEventHandlerConfig) => {
|
||||||
|
const {
|
||||||
|
workflowId,
|
||||||
|
executionId,
|
||||||
|
workflowEdges,
|
||||||
|
activeBlocksSet,
|
||||||
|
accumulatedBlockLogs,
|
||||||
|
accumulatedBlockStates,
|
||||||
|
executedBlockIds,
|
||||||
|
consoleMode,
|
||||||
|
includeStartConsoleEntry,
|
||||||
|
onBlockCompleteCallback,
|
||||||
|
} = config
|
||||||
|
|
||||||
|
const updateActiveBlocks = (blockId: string, isActive: boolean) => {
|
||||||
|
if (isActive) {
|
||||||
|
activeBlocksSet.add(blockId)
|
||||||
|
} else {
|
||||||
|
activeBlocksSet.delete(blockId)
|
||||||
|
}
|
||||||
|
setActiveBlocks(new Set(activeBlocksSet))
|
||||||
|
}
|
||||||
|
|
||||||
|
const markIncomingEdges = (blockId: string) => {
|
||||||
|
const incomingEdges = workflowEdges.filter((edge) => edge.target === blockId)
|
||||||
|
incomingEdges.forEach((edge) => {
|
||||||
|
setEdgeRunStatus(edge.id, 'success')
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const isContainerBlockType = (blockType?: string) => {
|
||||||
|
return blockType === 'loop' || blockType === 'parallel'
|
||||||
|
}
|
||||||
|
|
||||||
|
const createBlockLogEntry = (
|
||||||
|
data: BlockCompletedData | BlockErrorData,
|
||||||
|
options: { success: boolean; output?: unknown; error?: string }
|
||||||
|
): BlockLog => ({
|
||||||
|
blockId: data.blockId,
|
||||||
|
blockName: data.blockName || 'Unknown Block',
|
||||||
|
blockType: data.blockType || 'unknown',
|
||||||
|
input: data.input || {},
|
||||||
|
output: options.output ?? {},
|
||||||
|
success: options.success,
|
||||||
|
error: options.error,
|
||||||
|
durationMs: data.durationMs,
|
||||||
|
startedAt: data.startedAt,
|
||||||
|
executionOrder: data.executionOrder,
|
||||||
|
endedAt: data.endedAt,
|
||||||
|
})
|
||||||
|
|
||||||
|
const addConsoleEntry = (data: BlockCompletedData, output: NormalizedBlockOutput) => {
|
||||||
|
if (!workflowId) return
|
||||||
|
addConsole({
|
||||||
|
input: data.input || {},
|
||||||
|
output,
|
||||||
|
success: true,
|
||||||
|
durationMs: data.durationMs,
|
||||||
|
startedAt: data.startedAt,
|
||||||
|
executionOrder: data.executionOrder,
|
||||||
|
endedAt: data.endedAt,
|
||||||
|
workflowId,
|
||||||
|
blockId: data.blockId,
|
||||||
|
executionId,
|
||||||
|
blockName: data.blockName || 'Unknown Block',
|
||||||
|
blockType: data.blockType || 'unknown',
|
||||||
|
iterationCurrent: data.iterationCurrent,
|
||||||
|
iterationTotal: data.iterationTotal,
|
||||||
|
iterationType: data.iterationType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const addConsoleErrorEntry = (data: BlockErrorData) => {
|
||||||
|
if (!workflowId) return
|
||||||
|
addConsole({
|
||||||
|
input: data.input || {},
|
||||||
|
output: {},
|
||||||
|
success: false,
|
||||||
|
error: data.error,
|
||||||
|
durationMs: data.durationMs,
|
||||||
|
startedAt: data.startedAt,
|
||||||
|
executionOrder: data.executionOrder,
|
||||||
|
endedAt: data.endedAt,
|
||||||
|
workflowId,
|
||||||
|
blockId: data.blockId,
|
||||||
|
executionId,
|
||||||
|
blockName: data.blockName || 'Unknown Block',
|
||||||
|
blockType: data.blockType || 'unknown',
|
||||||
|
iterationCurrent: data.iterationCurrent,
|
||||||
|
iterationTotal: data.iterationTotal,
|
||||||
|
iterationType: data.iterationType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateConsoleEntry = (data: BlockCompletedData) => {
|
||||||
|
updateConsole(
|
||||||
|
data.blockId,
|
||||||
|
{
|
||||||
|
input: data.input || {},
|
||||||
|
replaceOutput: data.output,
|
||||||
|
success: true,
|
||||||
|
durationMs: data.durationMs,
|
||||||
|
startedAt: data.startedAt,
|
||||||
|
endedAt: data.endedAt,
|
||||||
|
isRunning: false,
|
||||||
|
iterationCurrent: data.iterationCurrent,
|
||||||
|
iterationTotal: data.iterationTotal,
|
||||||
|
iterationType: data.iterationType,
|
||||||
|
},
|
||||||
|
executionId
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const updateConsoleErrorEntry = (data: BlockErrorData) => {
|
||||||
|
updateConsole(
|
||||||
|
data.blockId,
|
||||||
|
{
|
||||||
|
input: data.input || {},
|
||||||
|
replaceOutput: {},
|
||||||
|
success: false,
|
||||||
|
error: data.error,
|
||||||
|
durationMs: data.durationMs,
|
||||||
|
startedAt: data.startedAt,
|
||||||
|
endedAt: data.endedAt,
|
||||||
|
isRunning: false,
|
||||||
|
iterationCurrent: data.iterationCurrent,
|
||||||
|
iterationTotal: data.iterationTotal,
|
||||||
|
iterationType: data.iterationType,
|
||||||
|
},
|
||||||
|
executionId
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const onBlockStarted = (data: BlockStartedData) => {
|
||||||
|
updateActiveBlocks(data.blockId, true)
|
||||||
|
markIncomingEdges(data.blockId)
|
||||||
|
|
||||||
|
if (!includeStartConsoleEntry || !workflowId) return
|
||||||
|
|
||||||
|
const startedAt = new Date().toISOString()
|
||||||
|
addConsole({
|
||||||
|
input: {},
|
||||||
|
output: undefined,
|
||||||
|
success: undefined,
|
||||||
|
durationMs: undefined,
|
||||||
|
startedAt,
|
||||||
|
executionOrder: data.executionOrder,
|
||||||
|
endedAt: undefined,
|
||||||
|
workflowId,
|
||||||
|
blockId: data.blockId,
|
||||||
|
executionId,
|
||||||
|
blockName: data.blockName || 'Unknown Block',
|
||||||
|
blockType: data.blockType || 'unknown',
|
||||||
|
isRunning: true,
|
||||||
|
iterationCurrent: data.iterationCurrent,
|
||||||
|
iterationTotal: data.iterationTotal,
|
||||||
|
iterationType: data.iterationType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const onBlockCompleted = (data: BlockCompletedData) => {
|
||||||
|
updateActiveBlocks(data.blockId, false)
|
||||||
|
setBlockRunStatus(data.blockId, 'success')
|
||||||
|
|
||||||
|
executedBlockIds.add(data.blockId)
|
||||||
|
accumulatedBlockStates.set(data.blockId, {
|
||||||
|
output: data.output,
|
||||||
|
executed: true,
|
||||||
|
executionTime: data.durationMs,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (isContainerBlockType(data.blockType)) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
accumulatedBlockLogs.push(createBlockLogEntry(data, { success: true, output: data.output }))
|
||||||
|
|
||||||
|
if (consoleMode === 'update') {
|
||||||
|
updateConsoleEntry(data)
|
||||||
|
} else {
|
||||||
|
addConsoleEntry(data, data.output as NormalizedBlockOutput)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (onBlockCompleteCallback) {
|
||||||
|
onBlockCompleteCallback(data.blockId, data.output).catch((error) => {
|
||||||
|
logger.error('Error in onBlockComplete callback:', error)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const onBlockError = (data: BlockErrorData) => {
|
||||||
|
updateActiveBlocks(data.blockId, false)
|
||||||
|
setBlockRunStatus(data.blockId, 'error')
|
||||||
|
|
||||||
|
accumulatedBlockLogs.push(
|
||||||
|
createBlockLogEntry(data, { success: false, output: {}, error: data.error })
|
||||||
|
)
|
||||||
|
|
||||||
|
if (consoleMode === 'update') {
|
||||||
|
updateConsoleErrorEntry(data)
|
||||||
|
} else {
|
||||||
|
addConsoleErrorEntry(data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { onBlockStarted, onBlockCompleted, onBlockError }
|
||||||
|
},
|
||||||
|
[addConsole, setActiveBlocks, setBlockRunStatus, setEdgeRunStatus, updateConsole]
|
||||||
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if debug session is complete based on execution result
|
* Checks if debug session is complete based on execution result
|
||||||
*/
|
*/
|
||||||
@@ -789,7 +1147,12 @@ export function useWorkflowExecution() {
|
|||||||
const startBlock = TriggerUtils.findStartBlock(filteredStates, 'chat')
|
const startBlock = TriggerUtils.findStartBlock(filteredStates, 'chat')
|
||||||
|
|
||||||
if (!startBlock) {
|
if (!startBlock) {
|
||||||
throw new Error(TriggerUtils.getTriggerValidationMessage('chat', 'missing'))
|
throw new WorkflowValidationError(
|
||||||
|
TriggerUtils.getTriggerValidationMessage('chat', 'missing'),
|
||||||
|
'validation',
|
||||||
|
'validation',
|
||||||
|
'Workflow Validation'
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
startBlockId = startBlock.blockId
|
startBlockId = startBlock.blockId
|
||||||
@@ -800,7 +1163,12 @@ export function useWorkflowExecution() {
|
|||||||
})
|
})
|
||||||
|
|
||||||
if (candidates.length === 0) {
|
if (candidates.length === 0) {
|
||||||
const error = new Error('Workflow requires at least one trigger block to execute')
|
const error = new WorkflowValidationError(
|
||||||
|
'Workflow requires at least one trigger block to execute',
|
||||||
|
'validation',
|
||||||
|
'validation',
|
||||||
|
'Workflow Validation'
|
||||||
|
)
|
||||||
logger.error('No trigger blocks found for manual run', {
|
logger.error('No trigger blocks found for manual run', {
|
||||||
allBlockTypes: Object.values(filteredStates).map((b) => b.type),
|
allBlockTypes: Object.values(filteredStates).map((b) => b.type),
|
||||||
})
|
})
|
||||||
@@ -813,7 +1181,12 @@ export function useWorkflowExecution() {
|
|||||||
(candidate) => candidate.path === StartBlockPath.SPLIT_API
|
(candidate) => candidate.path === StartBlockPath.SPLIT_API
|
||||||
)
|
)
|
||||||
if (apiCandidates.length > 1) {
|
if (apiCandidates.length > 1) {
|
||||||
const error = new Error('Multiple API Trigger blocks found. Keep only one.')
|
const error = new WorkflowValidationError(
|
||||||
|
'Multiple API Trigger blocks found. Keep only one.',
|
||||||
|
'validation',
|
||||||
|
'validation',
|
||||||
|
'Workflow Validation'
|
||||||
|
)
|
||||||
logger.error('Multiple API triggers found')
|
logger.error('Multiple API triggers found')
|
||||||
setIsExecuting(false)
|
setIsExecuting(false)
|
||||||
throw error
|
throw error
|
||||||
@@ -833,7 +1206,12 @@ export function useWorkflowExecution() {
|
|||||||
const outgoingConnections = workflowEdges.filter((edge) => edge.source === startBlockId)
|
const outgoingConnections = workflowEdges.filter((edge) => edge.source === startBlockId)
|
||||||
if (outgoingConnections.length === 0) {
|
if (outgoingConnections.length === 0) {
|
||||||
const triggerName = selectedTrigger.name || selectedTrigger.type
|
const triggerName = selectedTrigger.name || selectedTrigger.type
|
||||||
const error = new Error(`${triggerName} must be connected to other blocks to execute`)
|
const error = new WorkflowValidationError(
|
||||||
|
`${triggerName} must be connected to other blocks to execute`,
|
||||||
|
'validation',
|
||||||
|
'validation',
|
||||||
|
'Workflow Validation'
|
||||||
|
)
|
||||||
logger.error('Trigger has no outgoing connections', { triggerName, startBlockId })
|
logger.error('Trigger has no outgoing connections', { triggerName, startBlockId })
|
||||||
setIsExecuting(false)
|
setIsExecuting(false)
|
||||||
throw error
|
throw error
|
||||||
@@ -859,7 +1237,12 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
// If we don't have a valid startBlockId at this point, throw an error
|
// If we don't have a valid startBlockId at this point, throw an error
|
||||||
if (!startBlockId) {
|
if (!startBlockId) {
|
||||||
const error = new Error('No valid trigger block found to start execution')
|
const error = new WorkflowValidationError(
|
||||||
|
'No valid trigger block found to start execution',
|
||||||
|
'validation',
|
||||||
|
'validation',
|
||||||
|
'Workflow Validation'
|
||||||
|
)
|
||||||
logger.error('No startBlockId found after trigger search')
|
logger.error('No startBlockId found after trigger search')
|
||||||
setIsExecuting(false)
|
setIsExecuting(false)
|
||||||
throw error
|
throw error
|
||||||
@@ -892,6 +1275,19 @@ export function useWorkflowExecution() {
|
|||||||
|
|
||||||
// Execute the workflow
|
// Execute the workflow
|
||||||
try {
|
try {
|
||||||
|
const blockHandlers = buildBlockEventHandlers({
|
||||||
|
workflowId: activeWorkflowId,
|
||||||
|
executionId,
|
||||||
|
workflowEdges,
|
||||||
|
activeBlocksSet,
|
||||||
|
accumulatedBlockLogs,
|
||||||
|
accumulatedBlockStates,
|
||||||
|
executedBlockIds,
|
||||||
|
consoleMode: 'update',
|
||||||
|
includeStartConsoleEntry: true,
|
||||||
|
onBlockCompleteCallback: onBlockComplete,
|
||||||
|
})
|
||||||
|
|
||||||
await executionStream.execute({
|
await executionStream.execute({
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
input: finalWorkflowInput,
|
input: finalWorkflowInput,
|
||||||
@@ -914,145 +1310,9 @@ export function useWorkflowExecution() {
|
|||||||
logger.info('Server execution started:', data)
|
logger.info('Server execution started:', data)
|
||||||
},
|
},
|
||||||
|
|
||||||
onBlockStarted: (data) => {
|
onBlockStarted: blockHandlers.onBlockStarted,
|
||||||
activeBlocksSet.add(data.blockId)
|
onBlockCompleted: blockHandlers.onBlockCompleted,
|
||||||
// Create a new Set to trigger React re-render
|
onBlockError: blockHandlers.onBlockError,
|
||||||
setActiveBlocks(new Set(activeBlocksSet))
|
|
||||||
|
|
||||||
// Track edges that led to this block as soon as execution starts
|
|
||||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === data.blockId)
|
|
||||||
incomingEdges.forEach((edge) => {
|
|
||||||
setEdgeRunStatus(edge.id, 'success')
|
|
||||||
})
|
|
||||||
|
|
||||||
// Add entry to terminal immediately with isRunning=true
|
|
||||||
// Use server-provided executionOrder to ensure correct sort order
|
|
||||||
const startedAt = new Date().toISOString()
|
|
||||||
addConsole({
|
|
||||||
input: {},
|
|
||||||
output: undefined,
|
|
||||||
success: undefined,
|
|
||||||
durationMs: undefined,
|
|
||||||
startedAt,
|
|
||||||
executionOrder: data.executionOrder,
|
|
||||||
endedAt: undefined,
|
|
||||||
workflowId: activeWorkflowId,
|
|
||||||
blockId: data.blockId,
|
|
||||||
executionId,
|
|
||||||
blockName: data.blockName || 'Unknown Block',
|
|
||||||
blockType: data.blockType || 'unknown',
|
|
||||||
isRunning: true,
|
|
||||||
// Pass through iteration context for subflow grouping
|
|
||||||
iterationCurrent: data.iterationCurrent,
|
|
||||||
iterationTotal: data.iterationTotal,
|
|
||||||
iterationType: data.iterationType,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
|
|
||||||
onBlockCompleted: (data) => {
|
|
||||||
activeBlocksSet.delete(data.blockId)
|
|
||||||
setActiveBlocks(new Set(activeBlocksSet))
|
|
||||||
setBlockRunStatus(data.blockId, 'success')
|
|
||||||
|
|
||||||
executedBlockIds.add(data.blockId)
|
|
||||||
accumulatedBlockStates.set(data.blockId, {
|
|
||||||
output: data.output,
|
|
||||||
executed: true,
|
|
||||||
executionTime: data.durationMs,
|
|
||||||
})
|
|
||||||
|
|
||||||
const isContainerBlock = data.blockType === 'loop' || data.blockType === 'parallel'
|
|
||||||
if (isContainerBlock) return
|
|
||||||
|
|
||||||
const startedAt = data.startedAt
|
|
||||||
const endedAt = data.endedAt
|
|
||||||
|
|
||||||
accumulatedBlockLogs.push({
|
|
||||||
blockId: data.blockId,
|
|
||||||
blockName: data.blockName || 'Unknown Block',
|
|
||||||
blockType: data.blockType || 'unknown',
|
|
||||||
input: data.input || {},
|
|
||||||
output: data.output,
|
|
||||||
success: true,
|
|
||||||
durationMs: data.durationMs,
|
|
||||||
startedAt,
|
|
||||||
executionOrder: data.executionOrder,
|
|
||||||
endedAt,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Update existing console entry (created in onBlockStarted) with completion data
|
|
||||||
updateConsole(
|
|
||||||
data.blockId,
|
|
||||||
{
|
|
||||||
input: data.input || {},
|
|
||||||
replaceOutput: data.output,
|
|
||||||
success: true,
|
|
||||||
durationMs: data.durationMs,
|
|
||||||
startedAt,
|
|
||||||
endedAt,
|
|
||||||
isRunning: false,
|
|
||||||
// Pass through iteration context for subflow grouping
|
|
||||||
iterationCurrent: data.iterationCurrent,
|
|
||||||
iterationTotal: data.iterationTotal,
|
|
||||||
iterationType: data.iterationType,
|
|
||||||
},
|
|
||||||
executionId
|
|
||||||
)
|
|
||||||
|
|
||||||
// Call onBlockComplete callback if provided
|
|
||||||
if (onBlockComplete) {
|
|
||||||
onBlockComplete(data.blockId, data.output).catch((error) => {
|
|
||||||
logger.error('Error in onBlockComplete callback:', error)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
onBlockError: (data) => {
|
|
||||||
activeBlocksSet.delete(data.blockId)
|
|
||||||
// Create a new Set to trigger React re-render
|
|
||||||
setActiveBlocks(new Set(activeBlocksSet))
|
|
||||||
|
|
||||||
// Track failed block execution in run path
|
|
||||||
setBlockRunStatus(data.blockId, 'error')
|
|
||||||
|
|
||||||
const startedAt = data.startedAt
|
|
||||||
const endedAt = data.endedAt
|
|
||||||
|
|
||||||
// Accumulate block error log for the execution result
|
|
||||||
accumulatedBlockLogs.push({
|
|
||||||
blockId: data.blockId,
|
|
||||||
blockName: data.blockName || 'Unknown Block',
|
|
||||||
blockType: data.blockType || 'unknown',
|
|
||||||
input: data.input || {},
|
|
||||||
output: {},
|
|
||||||
success: false,
|
|
||||||
error: data.error,
|
|
||||||
durationMs: data.durationMs,
|
|
||||||
startedAt,
|
|
||||||
executionOrder: data.executionOrder,
|
|
||||||
endedAt,
|
|
||||||
})
|
|
||||||
|
|
||||||
// Update existing console entry (created in onBlockStarted) with error data
|
|
||||||
updateConsole(
|
|
||||||
data.blockId,
|
|
||||||
{
|
|
||||||
input: data.input || {},
|
|
||||||
replaceOutput: {},
|
|
||||||
success: false,
|
|
||||||
error: data.error,
|
|
||||||
durationMs: data.durationMs,
|
|
||||||
startedAt,
|
|
||||||
endedAt,
|
|
||||||
isRunning: false,
|
|
||||||
// Pass through iteration context for subflow grouping
|
|
||||||
iterationCurrent: data.iterationCurrent,
|
|
||||||
iterationTotal: data.iterationTotal,
|
|
||||||
iterationType: data.iterationType,
|
|
||||||
},
|
|
||||||
executionId
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|
|
||||||
onStreamChunk: (data) => {
|
onStreamChunk: (data) => {
|
||||||
const existing = streamedContent.get(data.blockId) || ''
|
const existing = streamedContent.get(data.blockId) || ''
|
||||||
@@ -1157,33 +1417,23 @@ export function useWorkflowExecution() {
|
|||||||
logs: accumulatedBlockLogs,
|
logs: accumulatedBlockLogs,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only add workflow-level error if no blocks have executed yet
|
const isPreExecutionError = accumulatedBlockLogs.length === 0
|
||||||
// This catches pre-execution errors (validation, serialization, etc.)
|
handleExecutionErrorConsole({
|
||||||
// Block execution errors are already logged via onBlockError callback
|
|
||||||
const { entries } = useTerminalConsoleStore.getState()
|
|
||||||
const existingLogs = entries.filter(
|
|
||||||
(log: ConsoleEntry) => log.executionId === executionId
|
|
||||||
)
|
|
||||||
|
|
||||||
if (existingLogs.length === 0) {
|
|
||||||
// No blocks executed yet - this is a pre-execution error
|
|
||||||
// Use 0 for executionOrder so validation errors appear first
|
|
||||||
addConsole({
|
|
||||||
input: {},
|
|
||||||
output: {},
|
|
||||||
success: false,
|
|
||||||
error: data.error,
|
|
||||||
durationMs: data.duration || 0,
|
|
||||||
startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(),
|
|
||||||
executionOrder: 0,
|
|
||||||
endedAt: new Date().toISOString(),
|
|
||||||
workflowId: activeWorkflowId,
|
workflowId: activeWorkflowId,
|
||||||
blockId: 'validation',
|
|
||||||
executionId,
|
executionId,
|
||||||
blockName: 'Workflow Validation',
|
error: data.error,
|
||||||
blockType: 'validation',
|
durationMs: data.duration,
|
||||||
|
blockLogs: accumulatedBlockLogs,
|
||||||
|
isPreExecutionError,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
onExecutionCancelled: (data) => {
|
||||||
|
handleExecutionCancelledConsole({
|
||||||
|
workflowId: activeWorkflowId,
|
||||||
|
executionId,
|
||||||
|
durationMs: data?.duration,
|
||||||
})
|
})
|
||||||
}
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@@ -1579,115 +1829,27 @@ export function useWorkflowExecution() {
|
|||||||
const activeBlocksSet = new Set<string>()
|
const activeBlocksSet = new Set<string>()
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const blockHandlers = buildBlockEventHandlers({
|
||||||
|
workflowId,
|
||||||
|
executionId,
|
||||||
|
workflowEdges,
|
||||||
|
activeBlocksSet,
|
||||||
|
accumulatedBlockLogs,
|
||||||
|
accumulatedBlockStates,
|
||||||
|
executedBlockIds,
|
||||||
|
consoleMode: 'add',
|
||||||
|
includeStartConsoleEntry: false,
|
||||||
|
})
|
||||||
|
|
||||||
await executionStream.executeFromBlock({
|
await executionStream.executeFromBlock({
|
||||||
workflowId,
|
workflowId,
|
||||||
startBlockId: blockId,
|
startBlockId: blockId,
|
||||||
sourceSnapshot: effectiveSnapshot,
|
sourceSnapshot: effectiveSnapshot,
|
||||||
input: workflowInput,
|
input: workflowInput,
|
||||||
callbacks: {
|
callbacks: {
|
||||||
onBlockStarted: (data) => {
|
onBlockStarted: blockHandlers.onBlockStarted,
|
||||||
activeBlocksSet.add(data.blockId)
|
onBlockCompleted: blockHandlers.onBlockCompleted,
|
||||||
setActiveBlocks(new Set(activeBlocksSet))
|
onBlockError: blockHandlers.onBlockError,
|
||||||
|
|
||||||
const incomingEdges = workflowEdges.filter((edge) => edge.target === data.blockId)
|
|
||||||
incomingEdges.forEach((edge) => {
|
|
||||||
setEdgeRunStatus(edge.id, 'success')
|
|
||||||
})
|
|
||||||
},
|
|
||||||
|
|
||||||
onBlockCompleted: (data) => {
|
|
||||||
activeBlocksSet.delete(data.blockId)
|
|
||||||
setActiveBlocks(new Set(activeBlocksSet))
|
|
||||||
|
|
||||||
setBlockRunStatus(data.blockId, 'success')
|
|
||||||
|
|
||||||
executedBlockIds.add(data.blockId)
|
|
||||||
accumulatedBlockStates.set(data.blockId, {
|
|
||||||
output: data.output,
|
|
||||||
executed: true,
|
|
||||||
executionTime: data.durationMs,
|
|
||||||
})
|
|
||||||
|
|
||||||
const isContainerBlock = data.blockType === 'loop' || data.blockType === 'parallel'
|
|
||||||
if (isContainerBlock) return
|
|
||||||
|
|
||||||
const startedAt = data.startedAt
|
|
||||||
const endedAt = data.endedAt
|
|
||||||
|
|
||||||
accumulatedBlockLogs.push({
|
|
||||||
blockId: data.blockId,
|
|
||||||
blockName: data.blockName || 'Unknown Block',
|
|
||||||
blockType: data.blockType || 'unknown',
|
|
||||||
input: data.input || {},
|
|
||||||
output: data.output,
|
|
||||||
success: true,
|
|
||||||
durationMs: data.durationMs,
|
|
||||||
startedAt,
|
|
||||||
executionOrder: data.executionOrder,
|
|
||||||
endedAt,
|
|
||||||
})
|
|
||||||
|
|
||||||
addConsole({
|
|
||||||
input: data.input || {},
|
|
||||||
output: data.output,
|
|
||||||
success: true,
|
|
||||||
durationMs: data.durationMs,
|
|
||||||
startedAt,
|
|
||||||
executionOrder: data.executionOrder,
|
|
||||||
endedAt,
|
|
||||||
workflowId,
|
|
||||||
blockId: data.blockId,
|
|
||||||
executionId,
|
|
||||||
blockName: data.blockName || 'Unknown Block',
|
|
||||||
blockType: data.blockType || 'unknown',
|
|
||||||
iterationCurrent: data.iterationCurrent,
|
|
||||||
iterationTotal: data.iterationTotal,
|
|
||||||
iterationType: data.iterationType,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
|
|
||||||
onBlockError: (data) => {
|
|
||||||
activeBlocksSet.delete(data.blockId)
|
|
||||||
setActiveBlocks(new Set(activeBlocksSet))
|
|
||||||
|
|
||||||
setBlockRunStatus(data.blockId, 'error')
|
|
||||||
|
|
||||||
const startedAt = data.startedAt
|
|
||||||
const endedAt = data.endedAt
|
|
||||||
|
|
||||||
accumulatedBlockLogs.push({
|
|
||||||
blockId: data.blockId,
|
|
||||||
blockName: data.blockName || 'Unknown Block',
|
|
||||||
blockType: data.blockType || 'unknown',
|
|
||||||
input: data.input || {},
|
|
||||||
output: {},
|
|
||||||
success: false,
|
|
||||||
error: data.error,
|
|
||||||
executionOrder: data.executionOrder,
|
|
||||||
durationMs: data.durationMs,
|
|
||||||
startedAt,
|
|
||||||
endedAt,
|
|
||||||
})
|
|
||||||
|
|
||||||
addConsole({
|
|
||||||
input: data.input || {},
|
|
||||||
output: {},
|
|
||||||
success: false,
|
|
||||||
error: data.error,
|
|
||||||
durationMs: data.durationMs,
|
|
||||||
startedAt,
|
|
||||||
executionOrder: data.executionOrder,
|
|
||||||
endedAt,
|
|
||||||
workflowId,
|
|
||||||
blockId: data.blockId,
|
|
||||||
executionId,
|
|
||||||
blockName: data.blockName,
|
|
||||||
blockType: data.blockType,
|
|
||||||
iterationCurrent: data.iterationCurrent,
|
|
||||||
iterationTotal: data.iterationTotal,
|
|
||||||
iterationType: data.iterationType,
|
|
||||||
})
|
|
||||||
},
|
|
||||||
|
|
||||||
onExecutionCompleted: (data) => {
|
onExecutionCompleted: (data) => {
|
||||||
if (data.success) {
|
if (data.success) {
|
||||||
@@ -1730,13 +1892,23 @@ export function useWorkflowExecution() {
|
|||||||
'Workflow was modified. Run the workflow again to enable running from block.',
|
'Workflow was modified. Run the workflow again to enable running from block.',
|
||||||
workflowId,
|
workflowId,
|
||||||
})
|
})
|
||||||
} else {
|
|
||||||
addNotification({
|
|
||||||
level: 'error',
|
|
||||||
message: data.error || 'Run from block failed',
|
|
||||||
workflowId,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
handleExecutionErrorConsole({
|
||||||
|
workflowId,
|
||||||
|
executionId,
|
||||||
|
error: data.error,
|
||||||
|
durationMs: data.duration,
|
||||||
|
blockLogs: accumulatedBlockLogs,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
|
||||||
|
onExecutionCancelled: (data) => {
|
||||||
|
handleExecutionCancelledConsole({
|
||||||
|
workflowId,
|
||||||
|
executionId,
|
||||||
|
durationMs: data?.duration,
|
||||||
|
})
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@@ -1758,7 +1930,9 @@ export function useWorkflowExecution() {
|
|||||||
setBlockRunStatus,
|
setBlockRunStatus,
|
||||||
setEdgeRunStatus,
|
setEdgeRunStatus,
|
||||||
addNotification,
|
addNotification,
|
||||||
addConsole,
|
buildBlockEventHandlers,
|
||||||
|
handleExecutionErrorConsole,
|
||||||
|
handleExecutionCancelledConsole,
|
||||||
executionStream,
|
executionStream,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1132,7 +1132,7 @@ const WorkflowContent = React.memo(() => {
|
|||||||
const handleContextRename = useCallback(() => {
|
const handleContextRename = useCallback(() => {
|
||||||
if (contextMenuBlocks.length === 1) {
|
if (contextMenuBlocks.length === 1) {
|
||||||
usePanelEditorStore.getState().setCurrentBlockId(contextMenuBlocks[0].id)
|
usePanelEditorStore.getState().setCurrentBlockId(contextMenuBlocks[0].id)
|
||||||
usePanelEditorStore.getState().setShouldFocusRename(true)
|
usePanelEditorStore.getState().triggerRename()
|
||||||
}
|
}
|
||||||
}, [contextMenuBlocks])
|
}, [contextMenuBlocks])
|
||||||
|
|
||||||
@@ -1173,7 +1173,7 @@ const WorkflowContent = React.memo(() => {
|
|||||||
block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
|
block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
|
||||||
|
|
||||||
if (isInsideSubflow) return { canRun: false, reason: 'Cannot run from inside subflow' }
|
if (isInsideSubflow) return { canRun: false, reason: 'Cannot run from inside subflow' }
|
||||||
if (!dependenciesSatisfied) return { canRun: false, reason: 'Disabled: Run Blocks Before' }
|
if (!dependenciesSatisfied) return { canRun: false, reason: 'Run previous blocks first' }
|
||||||
if (isNoteBlock) return { canRun: false, reason: undefined }
|
if (isNoteBlock) return { canRun: false, reason: undefined }
|
||||||
if (isExecuting) return { canRun: false, reason: undefined }
|
if (isExecuting) return { canRun: false, reason: undefined }
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
import {
|
import {
|
||||||
Building2,
|
|
||||||
Clock,
|
Clock,
|
||||||
Database,
|
Database,
|
||||||
HardDrive,
|
HardDrive,
|
||||||
HeadphonesIcon,
|
HeadphonesIcon,
|
||||||
Server,
|
Server,
|
||||||
ShieldCheck,
|
ShieldCheck,
|
||||||
|
Timer,
|
||||||
Users,
|
Users,
|
||||||
Zap,
|
Zap,
|
||||||
} from 'lucide-react'
|
} from 'lucide-react'
|
||||||
@@ -15,8 +15,8 @@ import type { PlanFeature } from '@/app/workspace/[workspaceId]/w/components/sid
|
|||||||
export const PRO_PLAN_FEATURES: PlanFeature[] = [
|
export const PRO_PLAN_FEATURES: PlanFeature[] = [
|
||||||
{ icon: Zap, text: '150 runs per minute (sync)' },
|
{ icon: Zap, text: '150 runs per minute (sync)' },
|
||||||
{ icon: Clock, text: '1,000 runs per minute (async)' },
|
{ icon: Clock, text: '1,000 runs per minute (async)' },
|
||||||
|
{ icon: Timer, text: '50 min sync execution limit' },
|
||||||
{ icon: HardDrive, text: '50GB file storage' },
|
{ icon: HardDrive, text: '50GB file storage' },
|
||||||
{ icon: Building2, text: 'Unlimited workspaces' },
|
|
||||||
{ icon: Users, text: 'Unlimited invites' },
|
{ icon: Users, text: 'Unlimited invites' },
|
||||||
{ icon: Database, text: 'Unlimited log retention' },
|
{ icon: Database, text: 'Unlimited log retention' },
|
||||||
]
|
]
|
||||||
@@ -24,8 +24,8 @@ export const PRO_PLAN_FEATURES: PlanFeature[] = [
|
|||||||
export const TEAM_PLAN_FEATURES: PlanFeature[] = [
|
export const TEAM_PLAN_FEATURES: PlanFeature[] = [
|
||||||
{ icon: Zap, text: '300 runs per minute (sync)' },
|
{ icon: Zap, text: '300 runs per minute (sync)' },
|
||||||
{ icon: Clock, text: '2,500 runs per minute (async)' },
|
{ icon: Clock, text: '2,500 runs per minute (async)' },
|
||||||
|
{ icon: Timer, text: '50 min sync execution limit' },
|
||||||
{ icon: HardDrive, text: '500GB file storage (pooled)' },
|
{ icon: HardDrive, text: '500GB file storage (pooled)' },
|
||||||
{ icon: Building2, text: 'Unlimited workspaces' },
|
|
||||||
{ icon: Users, text: 'Unlimited invites' },
|
{ icon: Users, text: 'Unlimited invites' },
|
||||||
{ icon: Database, text: 'Unlimited log retention' },
|
{ icon: Database, text: 'Unlimited log retention' },
|
||||||
{ icon: SlackMonoIcon, text: 'Dedicated Slack channel' },
|
{ icon: SlackMonoIcon, text: 'Dedicated Slack channel' },
|
||||||
|
|||||||
@@ -52,9 +52,8 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
|||||||
const workflowName = extractWorkflowName(content, filename)
|
const workflowName = extractWorkflowName(content, filename)
|
||||||
clearDiff()
|
clearDiff()
|
||||||
|
|
||||||
const parsedContent = JSON.parse(content)
|
|
||||||
const workflowColor =
|
const workflowColor =
|
||||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
(workflowData.metadata as { color?: string } | undefined)?.color || '#3972F6'
|
||||||
|
|
||||||
const result = await createWorkflowMutation.mutateAsync({
|
const result = await createWorkflowMutation.mutateAsync({
|
||||||
name: workflowName,
|
name: workflowName,
|
||||||
@@ -62,23 +61,20 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
|||||||
workspaceId,
|
workspaceId,
|
||||||
folderId: folderId || undefined,
|
folderId: folderId || undefined,
|
||||||
sortOrder,
|
sortOrder,
|
||||||
|
color: workflowColor,
|
||||||
})
|
})
|
||||||
const newWorkflowId = result.id
|
const newWorkflowId = result.id
|
||||||
|
|
||||||
if (workflowColor !== '#3972F6') {
|
const stateResponse = await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||||
await fetch(`/api/workflows/${newWorkflowId}`, {
|
|
||||||
method: 'PATCH',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
body: JSON.stringify({ color: workflowColor }),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
|
||||||
method: 'PUT',
|
method: 'PUT',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify(workflowData),
|
body: JSON.stringify(workflowData),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (!stateResponse.ok) {
|
||||||
|
logger.error(`Failed to save workflow state for ${newWorkflowId}`)
|
||||||
|
}
|
||||||
|
|
||||||
if (workflowData.variables) {
|
if (workflowData.variables) {
|
||||||
const variablesArray = Array.isArray(workflowData.variables)
|
const variablesArray = Array.isArray(workflowData.variables)
|
||||||
? workflowData.variables
|
? workflowData.variables
|
||||||
@@ -101,11 +97,15 @@ export function useImportWorkflow({ workspaceId }: UseImportWorkflowProps) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await fetch(`/api/workflows/${newWorkflowId}/variables`, {
|
const variablesResponse = await fetch(`/api/workflows/${newWorkflowId}/variables`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ variables: variablesRecord }),
|
body: JSON.stringify({ variables: variablesRecord }),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (!variablesResponse.ok) {
|
||||||
|
logger.error(`Failed to save variables for ${newWorkflowId}`)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -160,9 +160,8 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
|||||||
const workflowName = extractWorkflowName(workflow.content, workflow.name)
|
const workflowName = extractWorkflowName(workflow.content, workflow.name)
|
||||||
clearDiff()
|
clearDiff()
|
||||||
|
|
||||||
const parsedContent = JSON.parse(workflow.content)
|
|
||||||
const workflowColor =
|
const workflowColor =
|
||||||
parsedContent.state?.metadata?.color || parsedContent.metadata?.color || '#3972F6'
|
(workflowData.metadata as { color?: string } | undefined)?.color || '#3972F6'
|
||||||
|
|
||||||
const createWorkflowResponse = await fetch('/api/workflows', {
|
const createWorkflowResponse = await fetch('/api/workflows', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -216,11 +215,18 @@ export function useImportWorkspace({ onSuccess }: UseImportWorkspaceProps = {})
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await fetch(`/api/workflows/${newWorkflow.id}/variables`, {
|
const variablesResponse = await fetch(
|
||||||
|
`/api/workflows/${newWorkflow.id}/variables`,
|
||||||
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
body: JSON.stringify({ variables: variablesRecord }),
|
body: JSON.stringify({ variables: variablesRecord }),
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!variablesResponse.ok) {
|
||||||
|
logger.error(`Failed to save variables for ${newWorkflow.id}`)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { useParams } from 'next/navigation'
|
import { useParams } from 'next/navigation'
|
||||||
import { io, type Socket } from 'socket.io-client'
|
import { io, type Socket } from 'socket.io-client'
|
||||||
import { getEnv } from '@/lib/core/config/env'
|
import { getEnv } from '@/lib/core/config/env'
|
||||||
|
import { useOperationQueueStore } from '@/stores/operation-queue/store'
|
||||||
|
|
||||||
const logger = createLogger('SocketContext')
|
const logger = createLogger('SocketContext')
|
||||||
|
|
||||||
@@ -138,6 +139,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
|||||||
const [authFailed, setAuthFailed] = useState(false)
|
const [authFailed, setAuthFailed] = useState(false)
|
||||||
const initializedRef = useRef(false)
|
const initializedRef = useRef(false)
|
||||||
const socketRef = useRef<Socket | null>(null)
|
const socketRef = useRef<Socket | null>(null)
|
||||||
|
const triggerOfflineMode = useOperationQueueStore((state) => state.triggerOfflineMode)
|
||||||
|
|
||||||
const params = useParams()
|
const params = useParams()
|
||||||
const urlWorkflowId = params?.workflowId as string | undefined
|
const urlWorkflowId = params?.workflowId as string | undefined
|
||||||
@@ -341,9 +343,12 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
socketInstance.on('join-workflow-error', ({ error }) => {
|
socketInstance.on('join-workflow-error', ({ error, code }) => {
|
||||||
isRejoiningRef.current = false
|
isRejoiningRef.current = false
|
||||||
logger.error('Failed to join workflow:', error)
|
logger.error('Failed to join workflow:', { error, code })
|
||||||
|
if (code === 'ROOM_MANAGER_UNAVAILABLE') {
|
||||||
|
triggerOfflineMode()
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
socketInstance.on('workflow-operation', (data) => {
|
socketInstance.on('workflow-operation', (data) => {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { task } from '@trigger.dev/sdk'
|
|||||||
import { Cron } from 'croner'
|
import { Cron } from 'croner'
|
||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
|
import { createTimeoutAbortController, getTimeoutErrorMessage } from '@/lib/core/execution-limits'
|
||||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||||
@@ -120,6 +121,7 @@ async function runWorkflowExecution({
|
|||||||
loggingSession,
|
loggingSession,
|
||||||
requestId,
|
requestId,
|
||||||
executionId,
|
executionId,
|
||||||
|
asyncTimeout,
|
||||||
}: {
|
}: {
|
||||||
payload: ScheduleExecutionPayload
|
payload: ScheduleExecutionPayload
|
||||||
workflowRecord: WorkflowRecord
|
workflowRecord: WorkflowRecord
|
||||||
@@ -127,6 +129,7 @@ async function runWorkflowExecution({
|
|||||||
loggingSession: LoggingSession
|
loggingSession: LoggingSession
|
||||||
requestId: string
|
requestId: string
|
||||||
executionId: string
|
executionId: string
|
||||||
|
asyncTimeout?: number
|
||||||
}): Promise<RunWorkflowResult> {
|
}): Promise<RunWorkflowResult> {
|
||||||
try {
|
try {
|
||||||
logger.debug(`[${requestId}] Loading deployed workflow ${payload.workflowId}`)
|
logger.debug(`[${requestId}] Loading deployed workflow ${payload.workflowId}`)
|
||||||
@@ -181,15 +184,33 @@ async function runWorkflowExecution({
|
|||||||
[]
|
[]
|
||||||
)
|
)
|
||||||
|
|
||||||
const executionResult = await executeWorkflowCore({
|
const timeoutController = createTimeoutAbortController(asyncTimeout)
|
||||||
|
|
||||||
|
let executionResult
|
||||||
|
try {
|
||||||
|
executionResult = await executeWorkflowCore({
|
||||||
snapshot,
|
snapshot,
|
||||||
callbacks: {},
|
callbacks: {},
|
||||||
loggingSession,
|
loggingSession,
|
||||||
includeFileBase64: true,
|
includeFileBase64: true,
|
||||||
base64MaxBytes: undefined,
|
base64MaxBytes: undefined,
|
||||||
|
abortSignal: timeoutController.signal,
|
||||||
})
|
})
|
||||||
|
} finally {
|
||||||
|
timeoutController.cleanup()
|
||||||
|
}
|
||||||
|
|
||||||
if (executionResult.status === 'paused') {
|
if (
|
||||||
|
executionResult.status === 'cancelled' &&
|
||||||
|
timeoutController.isTimedOut() &&
|
||||||
|
timeoutController.timeoutMs
|
||||||
|
) {
|
||||||
|
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||||
|
logger.info(`[${requestId}] Scheduled workflow execution timed out`, {
|
||||||
|
timeoutMs: timeoutController.timeoutMs,
|
||||||
|
})
|
||||||
|
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||||
|
} else if (executionResult.status === 'paused') {
|
||||||
if (!executionResult.snapshotSeed) {
|
if (!executionResult.snapshotSeed) {
|
||||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||||
executionId,
|
executionId,
|
||||||
@@ -453,6 +474,7 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) {
|
|||||||
loggingSession,
|
loggingSession,
|
||||||
requestId,
|
requestId,
|
||||||
executionId,
|
executionId,
|
||||||
|
asyncTimeout: preprocessResult.executionTimeout?.async,
|
||||||
})
|
})
|
||||||
|
|
||||||
if (executionResult.status === 'skip') {
|
if (executionResult.status === 'skip') {
|
||||||
|
|||||||
@@ -4,7 +4,14 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { task } from '@trigger.dev/sdk'
|
import { task } from '@trigger.dev/sdk'
|
||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
|
import { getHighestPrioritySubscription } from '@/lib/billing'
|
||||||
|
import {
|
||||||
|
createTimeoutAbortController,
|
||||||
|
getExecutionTimeout,
|
||||||
|
getTimeoutErrorMessage,
|
||||||
|
} from '@/lib/core/execution-limits'
|
||||||
import { IdempotencyService, webhookIdempotency } from '@/lib/core/idempotency'
|
import { IdempotencyService, webhookIdempotency } from '@/lib/core/idempotency'
|
||||||
|
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
|
||||||
import { processExecutionFiles } from '@/lib/execution/files'
|
import { processExecutionFiles } from '@/lib/execution/files'
|
||||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||||
@@ -134,7 +141,13 @@ async function executeWebhookJobInternal(
|
|||||||
requestId
|
requestId
|
||||||
)
|
)
|
||||||
|
|
||||||
// Track deploymentVersionId at function scope so it's available in catch block
|
const userSubscription = await getHighestPrioritySubscription(payload.userId)
|
||||||
|
const asyncTimeout = getExecutionTimeout(
|
||||||
|
userSubscription?.plan as SubscriptionPlan | undefined,
|
||||||
|
'async'
|
||||||
|
)
|
||||||
|
const timeoutController = createTimeoutAbortController(asyncTimeout)
|
||||||
|
|
||||||
let deploymentVersionId: string | undefined
|
let deploymentVersionId: string | undefined
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -241,11 +254,22 @@ async function executeWebhookJobInternal(
|
|||||||
snapshot,
|
snapshot,
|
||||||
callbacks: {},
|
callbacks: {},
|
||||||
loggingSession,
|
loggingSession,
|
||||||
includeFileBase64: true, // Enable base64 hydration
|
includeFileBase64: true,
|
||||||
base64MaxBytes: undefined, // Use default limit
|
base64MaxBytes: undefined,
|
||||||
|
abortSignal: timeoutController.signal,
|
||||||
})
|
})
|
||||||
|
|
||||||
if (executionResult.status === 'paused') {
|
if (
|
||||||
|
executionResult.status === 'cancelled' &&
|
||||||
|
timeoutController.isTimedOut() &&
|
||||||
|
timeoutController.timeoutMs
|
||||||
|
) {
|
||||||
|
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||||
|
logger.info(`[${requestId}] Airtable webhook execution timed out`, {
|
||||||
|
timeoutMs: timeoutController.timeoutMs,
|
||||||
|
})
|
||||||
|
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||||
|
} else if (executionResult.status === 'paused') {
|
||||||
if (!executionResult.snapshotSeed) {
|
if (!executionResult.snapshotSeed) {
|
||||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||||
executionId,
|
executionId,
|
||||||
@@ -417,11 +441,11 @@ async function executeWebhookJobInternal(
|
|||||||
if (triggerBlock?.subBlocks?.inputFormat?.value) {
|
if (triggerBlock?.subBlocks?.inputFormat?.value) {
|
||||||
const inputFormat = triggerBlock.subBlocks.inputFormat.value as unknown as Array<{
|
const inputFormat = triggerBlock.subBlocks.inputFormat.value as unknown as Array<{
|
||||||
name: string
|
name: string
|
||||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'file[]'
|
||||||
}>
|
}>
|
||||||
logger.debug(`[${requestId}] Processing generic webhook files from inputFormat`)
|
logger.debug(`[${requestId}] Processing generic webhook files from inputFormat`)
|
||||||
|
|
||||||
const fileFields = inputFormat.filter((field) => field.type === 'files')
|
const fileFields = inputFormat.filter((field) => field.type === 'file[]')
|
||||||
|
|
||||||
if (fileFields.length > 0 && typeof input === 'object' && input !== null) {
|
if (fileFields.length > 0 && typeof input === 'object' && input !== null) {
|
||||||
const executionContext = {
|
const executionContext = {
|
||||||
@@ -497,9 +521,20 @@ async function executeWebhookJobInternal(
|
|||||||
callbacks: {},
|
callbacks: {},
|
||||||
loggingSession,
|
loggingSession,
|
||||||
includeFileBase64: true,
|
includeFileBase64: true,
|
||||||
|
abortSignal: timeoutController.signal,
|
||||||
})
|
})
|
||||||
|
|
||||||
if (executionResult.status === 'paused') {
|
if (
|
||||||
|
executionResult.status === 'cancelled' &&
|
||||||
|
timeoutController.isTimedOut() &&
|
||||||
|
timeoutController.timeoutMs
|
||||||
|
) {
|
||||||
|
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||||
|
logger.info(`[${requestId}] Webhook execution timed out`, {
|
||||||
|
timeoutMs: timeoutController.timeoutMs,
|
||||||
|
})
|
||||||
|
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||||
|
} else if (executionResult.status === 'paused') {
|
||||||
if (!executionResult.snapshotSeed) {
|
if (!executionResult.snapshotSeed) {
|
||||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||||
executionId,
|
executionId,
|
||||||
@@ -601,6 +636,8 @@ async function executeWebhookJobInternal(
|
|||||||
}
|
}
|
||||||
|
|
||||||
throw error
|
throw error
|
||||||
|
} finally {
|
||||||
|
timeoutController.cleanup()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { task } from '@trigger.dev/sdk'
|
import { task } from '@trigger.dev/sdk'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
|
import { createTimeoutAbortController, getTimeoutErrorMessage } from '@/lib/core/execution-limits'
|
||||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||||
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||||
@@ -19,6 +20,7 @@ export type WorkflowExecutionPayload = {
|
|||||||
userId: string
|
userId: string
|
||||||
input?: any
|
input?: any
|
||||||
triggerType?: CoreTriggerType
|
triggerType?: CoreTriggerType
|
||||||
|
executionId?: string
|
||||||
metadata?: Record<string, any>
|
metadata?: Record<string, any>
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -29,7 +31,7 @@ export type WorkflowExecutionPayload = {
|
|||||||
*/
|
*/
|
||||||
export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
||||||
const workflowId = payload.workflowId
|
const workflowId = payload.workflowId
|
||||||
const executionId = uuidv4()
|
const executionId = payload.executionId || uuidv4()
|
||||||
const requestId = executionId.slice(0, 8)
|
const requestId = executionId.slice(0, 8)
|
||||||
|
|
||||||
logger.info(`[${requestId}] Starting workflow execution job: ${workflowId}`, {
|
logger.info(`[${requestId}] Starting workflow execution job: ${workflowId}`, {
|
||||||
@@ -103,15 +105,33 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
|
|||||||
[]
|
[]
|
||||||
)
|
)
|
||||||
|
|
||||||
const result = await executeWorkflowCore({
|
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.async)
|
||||||
|
|
||||||
|
let result
|
||||||
|
try {
|
||||||
|
result = await executeWorkflowCore({
|
||||||
snapshot,
|
snapshot,
|
||||||
callbacks: {},
|
callbacks: {},
|
||||||
loggingSession,
|
loggingSession,
|
||||||
includeFileBase64: true,
|
includeFileBase64: true,
|
||||||
base64MaxBytes: undefined,
|
base64MaxBytes: undefined,
|
||||||
|
abortSignal: timeoutController.signal,
|
||||||
})
|
})
|
||||||
|
} finally {
|
||||||
|
timeoutController.cleanup()
|
||||||
|
}
|
||||||
|
|
||||||
if (result.status === 'paused') {
|
if (
|
||||||
|
result.status === 'cancelled' &&
|
||||||
|
timeoutController.isTimedOut() &&
|
||||||
|
timeoutController.timeoutMs
|
||||||
|
) {
|
||||||
|
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||||
|
logger.info(`[${requestId}] Workflow execution timed out`, {
|
||||||
|
timeoutMs: timeoutController.timeoutMs,
|
||||||
|
})
|
||||||
|
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||||
|
} else if (result.status === 'paused') {
|
||||||
if (!result.snapshotSeed) {
|
if (!result.snapshotSeed) {
|
||||||
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
|
||||||
executionId,
|
executionId,
|
||||||
|
|||||||
@@ -442,7 +442,16 @@ describe('Blocks Module', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should have valid output types', () => {
|
it('should have valid output types', () => {
|
||||||
const validPrimitiveTypes = ['string', 'number', 'boolean', 'json', 'array', 'files', 'any']
|
const validPrimitiveTypes = [
|
||||||
|
'string',
|
||||||
|
'number',
|
||||||
|
'boolean',
|
||||||
|
'json',
|
||||||
|
'array',
|
||||||
|
'file',
|
||||||
|
'file[]',
|
||||||
|
'any',
|
||||||
|
]
|
||||||
const blocks = getAllBlocks()
|
const blocks = getAllBlocks()
|
||||||
for (const block of blocks) {
|
for (const block of blocks) {
|
||||||
for (const [key, outputConfig] of Object.entries(block.outputs)) {
|
for (const [key, outputConfig] of Object.entries(block.outputs)) {
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { A2AIcon } from '@/components/icons'
|
import { A2AIcon } from '@/components/icons'
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
import type { ToolResponse } from '@/tools/types'
|
import type { ToolResponse } from '@/tools/types'
|
||||||
|
|
||||||
export interface A2AResponse extends ToolResponse {
|
export interface A2AResponse extends ToolResponse {
|
||||||
@@ -214,6 +215,14 @@ export const A2ABlock: BlockConfig<A2AResponse> = {
|
|||||||
],
|
],
|
||||||
config: {
|
config: {
|
||||||
tool: (params) => params.operation as string,
|
tool: (params) => params.operation as string,
|
||||||
|
params: (params) => {
|
||||||
|
const { fileUpload, fileReference, ...rest } = params
|
||||||
|
const normalizedFiles = normalizeFileInput(fileUpload || fileReference || params.files)
|
||||||
|
return {
|
||||||
|
...rest,
|
||||||
|
...(normalizedFiles && { files: normalizedFiles }),
|
||||||
|
}
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
inputs: {
|
inputs: {
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ export const ChatTriggerBlock: BlockConfig = {
|
|||||||
outputs: {
|
outputs: {
|
||||||
input: { type: 'string', description: 'User message' },
|
input: { type: 'string', description: 'User message' },
|
||||||
conversationId: { type: 'string', description: 'Conversation ID' },
|
conversationId: { type: 'string', description: 'Conversation ID' },
|
||||||
files: { type: 'files', description: 'Uploaded files' },
|
files: { type: 'file[]', description: 'Uploaded files' },
|
||||||
},
|
},
|
||||||
triggers: {
|
triggers: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { ConfluenceIcon } from '@/components/icons'
|
import { ConfluenceIcon } from '@/components/icons'
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
import { AuthMode } from '@/blocks/types'
|
import { AuthMode } from '@/blocks/types'
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
import type { ConfluenceResponse } from '@/tools/confluence/types'
|
import type { ConfluenceResponse } from '@/tools/confluence/types'
|
||||||
|
|
||||||
export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
export const ConfluenceBlock: BlockConfig<ConfluenceResponse> = {
|
||||||
@@ -651,14 +652,15 @@ export const ConfluenceV2Block: BlockConfig<ConfluenceResponse> = {
|
|||||||
|
|
||||||
if (operation === 'upload_attachment') {
|
if (operation === 'upload_attachment') {
|
||||||
const fileInput = attachmentFileUpload || attachmentFileReference || attachmentFile
|
const fileInput = attachmentFileUpload || attachmentFileReference || attachmentFile
|
||||||
if (!fileInput) {
|
const normalizedFile = normalizeFileInput(fileInput, { single: true })
|
||||||
|
if (!normalizedFile) {
|
||||||
throw new Error('File is required for upload attachment operation.')
|
throw new Error('File is required for upload attachment operation.')
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
credential,
|
credential,
|
||||||
pageId: effectivePageId,
|
pageId: effectivePageId,
|
||||||
operation,
|
operation,
|
||||||
file: fileInput,
|
file: normalizedFile,
|
||||||
fileName: attachmentFileName,
|
fileName: attachmentFileName,
|
||||||
comment: attachmentComment,
|
comment: attachmentComment,
|
||||||
...rest,
|
...rest,
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { DiscordIcon } from '@/components/icons'
|
import { DiscordIcon } from '@/components/icons'
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
import { AuthMode } from '@/blocks/types'
|
import { AuthMode } from '@/blocks/types'
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
import type { DiscordResponse } from '@/tools/discord/types'
|
import type { DiscordResponse } from '@/tools/discord/types'
|
||||||
|
|
||||||
export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
||||||
@@ -578,12 +579,13 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
|||||||
if (!params.serverId) throw new Error('Server ID is required')
|
if (!params.serverId) throw new Error('Server ID is required')
|
||||||
|
|
||||||
switch (params.operation) {
|
switch (params.operation) {
|
||||||
case 'discord_send_message':
|
case 'discord_send_message': {
|
||||||
return {
|
return {
|
||||||
...commonParams,
|
...commonParams,
|
||||||
channelId: params.channelId,
|
channelId: params.channelId,
|
||||||
content: params.content,
|
content: params.content,
|
||||||
files: params.attachmentFiles || params.files,
|
files: normalizeFileInput(params.attachmentFiles || params.files),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
case 'discord_get_messages':
|
case 'discord_get_messages':
|
||||||
return {
|
return {
|
||||||
@@ -789,6 +791,7 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
|||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
message: { type: 'string', description: 'Status message' },
|
message: { type: 'string', description: 'Status message' },
|
||||||
|
files: { type: 'file[]', description: 'Files attached to the message' },
|
||||||
data: { type: 'json', description: 'Response data' },
|
data: { type: 'json', description: 'Response data' },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { DropboxIcon } from '@/components/icons'
|
import { DropboxIcon } from '@/components/icons'
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
import { AuthMode } from '@/blocks/types'
|
import { AuthMode } from '@/blocks/types'
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
import type { DropboxResponse } from '@/tools/dropbox/types'
|
import type { DropboxResponse } from '@/tools/dropbox/types'
|
||||||
|
|
||||||
export const DropboxBlock: BlockConfig<DropboxResponse> = {
|
export const DropboxBlock: BlockConfig<DropboxResponse> = {
|
||||||
@@ -60,12 +61,25 @@ export const DropboxBlock: BlockConfig<DropboxResponse> = {
|
|||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'fileContent',
|
id: 'uploadFile',
|
||||||
title: 'File Content',
|
title: 'File',
|
||||||
type: 'long-input',
|
type: 'file-upload',
|
||||||
placeholder: 'Base64 encoded file content or file reference',
|
canonicalParamId: 'file',
|
||||||
condition: { field: 'operation', value: 'dropbox_upload' },
|
placeholder: 'Upload file to send to Dropbox',
|
||||||
|
mode: 'basic',
|
||||||
|
multiple: false,
|
||||||
required: true,
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'dropbox_upload' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'fileRef',
|
||||||
|
title: 'File',
|
||||||
|
type: 'short-input',
|
||||||
|
canonicalParamId: 'file',
|
||||||
|
placeholder: 'Reference file from previous blocks',
|
||||||
|
mode: 'advanced',
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'dropbox_upload' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'mode',
|
id: 'mode',
|
||||||
@@ -303,6 +317,16 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
|||||||
params.maxResults = Number(params.maxResults)
|
params.maxResults = Number(params.maxResults)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Normalize file input for upload operation
|
||||||
|
// Check all possible field IDs: uploadFile (basic), fileRef (advanced), fileContent (legacy)
|
||||||
|
const normalizedFile = normalizeFileInput(
|
||||||
|
params.uploadFile || params.fileRef || params.fileContent,
|
||||||
|
{ single: true }
|
||||||
|
)
|
||||||
|
if (normalizedFile) {
|
||||||
|
params.file = normalizedFile
|
||||||
|
}
|
||||||
|
|
||||||
switch (params.operation) {
|
switch (params.operation) {
|
||||||
case 'dropbox_upload':
|
case 'dropbox_upload':
|
||||||
return 'dropbox_upload'
|
return 'dropbox_upload'
|
||||||
@@ -337,7 +361,10 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
|||||||
path: { type: 'string', description: 'Path in Dropbox' },
|
path: { type: 'string', description: 'Path in Dropbox' },
|
||||||
autorename: { type: 'boolean', description: 'Auto-rename on conflict' },
|
autorename: { type: 'boolean', description: 'Auto-rename on conflict' },
|
||||||
// Upload inputs
|
// Upload inputs
|
||||||
fileContent: { type: 'string', description: 'Base64 encoded file content' },
|
uploadFile: { type: 'json', description: 'Uploaded file (UserFile)' },
|
||||||
|
file: { type: 'json', description: 'File to upload (UserFile object)' },
|
||||||
|
fileRef: { type: 'json', description: 'File reference from previous block' },
|
||||||
|
fileContent: { type: 'string', description: 'Legacy: base64 encoded file content' },
|
||||||
fileName: { type: 'string', description: 'Optional filename' },
|
fileName: { type: 'string', description: 'Optional filename' },
|
||||||
mode: { type: 'string', description: 'Write mode: add or overwrite' },
|
mode: { type: 'string', description: 'Write mode: add or overwrite' },
|
||||||
mute: { type: 'boolean', description: 'Mute notifications' },
|
mute: { type: 'boolean', description: 'Mute notifications' },
|
||||||
@@ -360,7 +387,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
|||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
// Upload/Download outputs
|
// Upload/Download outputs
|
||||||
file: { type: 'json', description: 'File metadata' },
|
file: { type: 'file', description: 'Downloaded file stored in execution files' },
|
||||||
content: { type: 'string', description: 'File content (base64)' },
|
content: { type: 'string', description: 'File content (base64)' },
|
||||||
temporaryLink: { type: 'string', description: 'Temporary download link' },
|
temporaryLink: { type: 'string', description: 'Temporary download link' },
|
||||||
// List folder outputs
|
// List folder outputs
|
||||||
|
|||||||
@@ -73,5 +73,6 @@ export const ElevenLabsBlock: BlockConfig<ElevenLabsBlockResponse> = {
|
|||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
audioUrl: { type: 'string', description: 'Generated audio URL' },
|
audioUrl: { type: 'string', description: 'Generated audio URL' },
|
||||||
|
audioFile: { type: 'file', description: 'Generated audio file' },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user