mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-04 03:35:04 -05:00
Compare commits
40 Commits
v0.5.78
...
feat/timeo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b890120afb | ||
|
|
c519034c8d | ||
|
|
17f02f8ed2 | ||
|
|
32a571a22a | ||
|
|
593bda7d0b | ||
|
|
0bc245b7a9 | ||
|
|
0a08ac03b9 | ||
|
|
7977ac88ca | ||
|
|
5565677f7c | ||
|
|
06ddd80ab4 | ||
|
|
5b0c2156e0 | ||
|
|
4db6e556b7 | ||
|
|
fe27adfb7c | ||
|
|
ee06ee34f6 | ||
|
|
39d75892a3 | ||
|
|
424b6e6a61 | ||
|
|
4ba22527b6 | ||
|
|
066850b65a | ||
|
|
c332efd1e4 | ||
|
|
d2e4afd15b | ||
|
|
bbf5c66abd | ||
|
|
f104659330 | ||
|
|
eac163cfd0 | ||
|
|
b53ed5dae0 | ||
|
|
d7259e304a | ||
|
|
c51f266ad7 | ||
|
|
4ca00810b2 | ||
|
|
710bf75bca | ||
|
|
f21fe2309c | ||
|
|
9c3fd1f7af | ||
|
|
501b44e05a | ||
|
|
7c1e7273de | ||
|
|
a9b7d75d87 | ||
|
|
0449804ffb | ||
|
|
c286f3ed24 | ||
|
|
b738550815 | ||
|
|
c6357f7438 | ||
|
|
b1118935f7 | ||
|
|
3e18b4186c | ||
|
|
e1ac201936 |
@@ -183,6 +183,109 @@ export const {ServiceName}Block: BlockConfig = {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## File Input Handling
|
||||||
|
|
||||||
|
When your block accepts file uploads, use the basic/advanced mode pattern with `normalizeFileInput`.
|
||||||
|
|
||||||
|
### Basic/Advanced File Pattern
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Basic mode: Visual file upload
|
||||||
|
{
|
||||||
|
id: 'uploadFile',
|
||||||
|
title: 'File',
|
||||||
|
type: 'file-upload',
|
||||||
|
canonicalParamId: 'file', // Both map to 'file' param
|
||||||
|
placeholder: 'Upload file',
|
||||||
|
mode: 'basic',
|
||||||
|
multiple: false,
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'upload' },
|
||||||
|
},
|
||||||
|
// Advanced mode: Reference from other blocks
|
||||||
|
{
|
||||||
|
id: 'fileRef',
|
||||||
|
title: 'File',
|
||||||
|
type: 'short-input',
|
||||||
|
canonicalParamId: 'file', // Both map to 'file' param
|
||||||
|
placeholder: 'Reference file (e.g., {{file_block.output}})',
|
||||||
|
mode: 'advanced',
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'upload' },
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
**Critical constraints:**
|
||||||
|
- `canonicalParamId` must NOT match any subblock's `id` in the same block
|
||||||
|
- Values are stored under subblock `id`, not `canonicalParamId`
|
||||||
|
|
||||||
|
### Normalizing File Input in tools.config
|
||||||
|
|
||||||
|
Use `normalizeFileInput` to handle all input variants:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
|
|
||||||
|
tools: {
|
||||||
|
access: ['service_upload'],
|
||||||
|
config: {
|
||||||
|
tool: (params) => {
|
||||||
|
// Check all field IDs: uploadFile (basic), fileRef (advanced), fileContent (legacy)
|
||||||
|
const normalizedFile = normalizeFileInput(
|
||||||
|
params.uploadFile || params.fileRef || params.fileContent,
|
||||||
|
{ single: true }
|
||||||
|
)
|
||||||
|
if (normalizedFile) {
|
||||||
|
params.file = normalizedFile
|
||||||
|
}
|
||||||
|
return `service_${params.operation}`
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Why this pattern?**
|
||||||
|
- Values come through as `params.uploadFile` or `params.fileRef` (the subblock IDs)
|
||||||
|
- `canonicalParamId` only controls UI/schema mapping, not runtime values
|
||||||
|
- `normalizeFileInput` handles JSON strings from advanced mode template resolution
|
||||||
|
|
||||||
|
### File Input Types in `inputs`
|
||||||
|
|
||||||
|
Use `type: 'json'` for file inputs:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
inputs: {
|
||||||
|
uploadFile: { type: 'json', description: 'Uploaded file (UserFile)' },
|
||||||
|
fileRef: { type: 'json', description: 'File reference from previous block' },
|
||||||
|
// Legacy field for backwards compatibility
|
||||||
|
fileContent: { type: 'string', description: 'Legacy: base64 encoded content' },
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multiple Files
|
||||||
|
|
||||||
|
For multiple file uploads:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
id: 'attachments',
|
||||||
|
title: 'Attachments',
|
||||||
|
type: 'file-upload',
|
||||||
|
multiple: true, // Allow multiple files
|
||||||
|
maxSize: 25, // Max size in MB per file
|
||||||
|
acceptedTypes: 'image/*,application/pdf,.doc,.docx',
|
||||||
|
}
|
||||||
|
|
||||||
|
// In tools.config:
|
||||||
|
const normalizedFiles = normalizeFileInput(
|
||||||
|
params.attachments || params.attachmentRefs,
|
||||||
|
// No { single: true } - returns array
|
||||||
|
)
|
||||||
|
if (normalizedFiles) {
|
||||||
|
params.files = normalizedFiles
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Condition Syntax
|
## Condition Syntax
|
||||||
|
|
||||||
Controls when a field is shown based on other field values.
|
Controls when a field is shown based on other field values.
|
||||||
|
|||||||
@@ -457,7 +457,230 @@ You can usually find this in the service's brand/press kit page, or copy it from
|
|||||||
Paste the SVG code here and I'll convert it to a React component.
|
Paste the SVG code here and I'll convert it to a React component.
|
||||||
```
|
```
|
||||||
|
|
||||||
## Common Gotchas
|
## File Handling
|
||||||
|
|
||||||
|
When your integration handles file uploads or downloads, follow these patterns to work with `UserFile` objects consistently.
|
||||||
|
|
||||||
|
### What is a UserFile?
|
||||||
|
|
||||||
|
A `UserFile` is the standard file representation in Sim:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
interface UserFile {
|
||||||
|
id: string // Unique identifier
|
||||||
|
name: string // Original filename
|
||||||
|
url: string // Presigned URL for download
|
||||||
|
size: number // File size in bytes
|
||||||
|
type: string // MIME type (e.g., 'application/pdf')
|
||||||
|
base64?: string // Optional base64 content (if small file)
|
||||||
|
key?: string // Internal storage key
|
||||||
|
context?: object // Storage context metadata
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Input Pattern (Uploads)
|
||||||
|
|
||||||
|
For tools that accept file uploads, **always route through an internal API endpoint** rather than calling external APIs directly. This ensures proper file content retrieval.
|
||||||
|
|
||||||
|
#### 1. Block SubBlocks for File Input
|
||||||
|
|
||||||
|
Use the basic/advanced mode pattern:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Basic mode: File upload UI
|
||||||
|
{
|
||||||
|
id: 'uploadFile',
|
||||||
|
title: 'File',
|
||||||
|
type: 'file-upload',
|
||||||
|
canonicalParamId: 'file', // Maps to 'file' param
|
||||||
|
placeholder: 'Upload file',
|
||||||
|
mode: 'basic',
|
||||||
|
multiple: false,
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'upload' },
|
||||||
|
},
|
||||||
|
// Advanced mode: Reference from previous block
|
||||||
|
{
|
||||||
|
id: 'fileRef',
|
||||||
|
title: 'File',
|
||||||
|
type: 'short-input',
|
||||||
|
canonicalParamId: 'file', // Same canonical param
|
||||||
|
placeholder: 'Reference file (e.g., {{file_block.output}})',
|
||||||
|
mode: 'advanced',
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'upload' },
|
||||||
|
},
|
||||||
|
```
|
||||||
|
|
||||||
|
**Critical:** `canonicalParamId` must NOT match any subblock `id`.
|
||||||
|
|
||||||
|
#### 2. Normalize File Input in Block Config
|
||||||
|
|
||||||
|
In `tools.config.tool`, use `normalizeFileInput` to handle all input variants:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
|
|
||||||
|
tools: {
|
||||||
|
config: {
|
||||||
|
tool: (params) => {
|
||||||
|
// Normalize file from basic (uploadFile), advanced (fileRef), or legacy (fileContent)
|
||||||
|
const normalizedFile = normalizeFileInput(
|
||||||
|
params.uploadFile || params.fileRef || params.fileContent,
|
||||||
|
{ single: true }
|
||||||
|
)
|
||||||
|
if (normalizedFile) {
|
||||||
|
params.file = normalizedFile
|
||||||
|
}
|
||||||
|
return `{service}_${params.operation}`
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 3. Create Internal API Route
|
||||||
|
|
||||||
|
Create `apps/sim/app/api/tools/{service}/{action}/route.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { NextResponse, type NextRequest } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { FileInputSchema, type RawFileInput } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
|
const logger = createLogger('{Service}UploadAPI')
|
||||||
|
|
||||||
|
const RequestSchema = z.object({
|
||||||
|
accessToken: z.string(),
|
||||||
|
file: FileInputSchema.optional().nullable(),
|
||||||
|
// Legacy field for backwards compatibility
|
||||||
|
fileContent: z.string().optional().nullable(),
|
||||||
|
// ... other params
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!authResult.success) {
|
||||||
|
return NextResponse.json({ success: false, error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const data = RequestSchema.parse(body)
|
||||||
|
|
||||||
|
let fileBuffer: Buffer
|
||||||
|
let fileName: string
|
||||||
|
|
||||||
|
// Prefer UserFile input, fall back to legacy base64
|
||||||
|
if (data.file) {
|
||||||
|
const userFiles = processFilesToUserFiles([data.file as RawFileInput], requestId, logger)
|
||||||
|
if (userFiles.length === 0) {
|
||||||
|
return NextResponse.json({ success: false, error: 'Invalid file' }, { status: 400 })
|
||||||
|
}
|
||||||
|
const userFile = userFiles[0]
|
||||||
|
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
fileName = userFile.name
|
||||||
|
} else if (data.fileContent) {
|
||||||
|
// Legacy: base64 string (backwards compatibility)
|
||||||
|
fileBuffer = Buffer.from(data.fileContent, 'base64')
|
||||||
|
fileName = 'file'
|
||||||
|
} else {
|
||||||
|
return NextResponse.json({ success: false, error: 'File required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now call external API with fileBuffer
|
||||||
|
const response = await fetch('https://api.{service}.com/upload', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { Authorization: `Bearer ${data.accessToken}` },
|
||||||
|
body: new Uint8Array(fileBuffer), // Convert Buffer for fetch
|
||||||
|
})
|
||||||
|
|
||||||
|
// ... handle response
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 4. Update Tool to Use Internal Route
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export const {service}UploadTool: ToolConfig<Params, Response> = {
|
||||||
|
id: '{service}_upload',
|
||||||
|
// ...
|
||||||
|
params: {
|
||||||
|
file: { type: 'file', required: false, visibility: 'user-or-llm' },
|
||||||
|
fileContent: { type: 'string', required: false, visibility: 'hidden' }, // Legacy
|
||||||
|
},
|
||||||
|
request: {
|
||||||
|
url: '/api/tools/{service}/upload', // Internal route
|
||||||
|
method: 'POST',
|
||||||
|
body: (params) => ({
|
||||||
|
accessToken: params.accessToken,
|
||||||
|
file: params.file,
|
||||||
|
fileContent: params.fileContent,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Output Pattern (Downloads)
|
||||||
|
|
||||||
|
For tools that return files, use `FileToolProcessor` to store files and return `UserFile` objects.
|
||||||
|
|
||||||
|
#### In Tool transformResponse
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
|
||||||
|
|
||||||
|
transformResponse: async (response, context) => {
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
// Process file outputs to UserFile objects
|
||||||
|
const fileProcessor = new FileToolProcessor(context)
|
||||||
|
const file = await fileProcessor.processFileData({
|
||||||
|
data: data.content, // base64 or buffer
|
||||||
|
mimeType: data.mimeType,
|
||||||
|
filename: data.filename,
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: { file },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### In API Route (for complex file handling)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Return file data that FileToolProcessor can handle
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
data: base64Content,
|
||||||
|
mimeType: 'application/pdf',
|
||||||
|
filename: 'document.pdf',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Helpers Reference
|
||||||
|
|
||||||
|
| Helper | Location | Purpose |
|
||||||
|
|--------|----------|---------|
|
||||||
|
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
|
||||||
|
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
|
||||||
|
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get file Buffer from UserFile |
|
||||||
|
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
|
||||||
|
| `isUserFile` | `@/lib/core/utils/user-file` | Type guard for UserFile objects |
|
||||||
|
| `FileInputSchema` | `@/lib/uploads/utils/file-schemas` | Zod schema for file validation |
|
||||||
|
|
||||||
|
### Common Gotchas
|
||||||
|
|
||||||
1. **OAuth serviceId must match** - The `serviceId` in oauth-input must match the OAuth provider configuration
|
1. **OAuth serviceId must match** - The `serviceId` in oauth-input must match the OAuth provider configuration
|
||||||
2. **Tool IDs are snake_case** - `stripe_create_payment`, not `stripeCreatePayment`
|
2. **Tool IDs are snake_case** - `stripe_create_payment`, not `stripeCreatePayment`
|
||||||
@@ -465,3 +688,5 @@ Paste the SVG code here and I'll convert it to a React component.
|
|||||||
4. **Alphabetical ordering** - Keep imports and registry entries alphabetically sorted
|
4. **Alphabetical ordering** - Keep imports and registry entries alphabetically sorted
|
||||||
5. **Required can be conditional** - Use `required: { field: 'op', value: 'create' }` instead of always true
|
5. **Required can be conditional** - Use `required: { field: 'op', value: 'create' }` instead of always true
|
||||||
6. **DependsOn clears options** - When a dependency changes, selector options are refetched
|
6. **DependsOn clears options** - When a dependency changes, selector options are refetched
|
||||||
|
7. **Never pass Buffer directly to fetch** - Convert to `new Uint8Array(buffer)` for TypeScript compatibility
|
||||||
|
8. **Always handle legacy file params** - Keep hidden `fileContent` params for backwards compatibility
|
||||||
|
|||||||
@@ -195,6 +195,52 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
|
|||||||
{service}_webhook: {service}WebhookTrigger,
|
{service}_webhook: {service}WebhookTrigger,
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## File Handling
|
||||||
|
|
||||||
|
When integrations handle file uploads/downloads, use `UserFile` objects consistently.
|
||||||
|
|
||||||
|
### File Input (Uploads)
|
||||||
|
|
||||||
|
1. **Block subBlocks:** Use basic/advanced mode pattern with `canonicalParamId`
|
||||||
|
2. **Normalize in block config:** Use `normalizeFileInput` from `@/blocks/utils`
|
||||||
|
3. **Internal API route:** Create route that uses `downloadFileFromStorage` to get file content
|
||||||
|
4. **Tool routes to internal API:** Don't call external APIs directly with files
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In block tools.config:
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
|
|
||||||
|
const normalizedFile = normalizeFileInput(
|
||||||
|
params.uploadFile || params.fileRef || params.fileContent,
|
||||||
|
{ single: true }
|
||||||
|
)
|
||||||
|
if (normalizedFile) params.file = normalizedFile
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Output (Downloads)
|
||||||
|
|
||||||
|
Use `FileToolProcessor` in tool `transformResponse` to store files:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
|
||||||
|
|
||||||
|
const processor = new FileToolProcessor(context)
|
||||||
|
const file = await processor.processFileData({
|
||||||
|
data: base64Content,
|
||||||
|
mimeType: 'application/pdf',
|
||||||
|
filename: 'doc.pdf',
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Helpers
|
||||||
|
|
||||||
|
| Helper | Location | Purpose |
|
||||||
|
|--------|----------|---------|
|
||||||
|
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
|
||||||
|
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
|
||||||
|
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get Buffer from UserFile |
|
||||||
|
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
|
||||||
|
|
||||||
## Checklist
|
## Checklist
|
||||||
|
|
||||||
- [ ] Look up API docs for the service
|
- [ ] Look up API docs for the service
|
||||||
@@ -207,3 +253,5 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
|
|||||||
- [ ] Register block in `blocks/registry.ts`
|
- [ ] Register block in `blocks/registry.ts`
|
||||||
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
||||||
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
||||||
|
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
|
||||||
|
- [ ] (If file uploads) Use `normalizeFileInput` in block config
|
||||||
|
|||||||
@@ -193,6 +193,52 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
|
|||||||
{service}_webhook: {service}WebhookTrigger,
|
{service}_webhook: {service}WebhookTrigger,
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## File Handling
|
||||||
|
|
||||||
|
When integrations handle file uploads/downloads, use `UserFile` objects consistently.
|
||||||
|
|
||||||
|
### File Input (Uploads)
|
||||||
|
|
||||||
|
1. **Block subBlocks:** Use basic/advanced mode pattern with `canonicalParamId`
|
||||||
|
2. **Normalize in block config:** Use `normalizeFileInput` from `@/blocks/utils`
|
||||||
|
3. **Internal API route:** Create route that uses `downloadFileFromStorage` to get file content
|
||||||
|
4. **Tool routes to internal API:** Don't call external APIs directly with files
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// In block tools.config:
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
|
|
||||||
|
const normalizedFile = normalizeFileInput(
|
||||||
|
params.uploadFile || params.fileRef || params.fileContent,
|
||||||
|
{ single: true }
|
||||||
|
)
|
||||||
|
if (normalizedFile) params.file = normalizedFile
|
||||||
|
```
|
||||||
|
|
||||||
|
### File Output (Downloads)
|
||||||
|
|
||||||
|
Use `FileToolProcessor` in tool `transformResponse` to store files:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { FileToolProcessor } from '@/executor/utils/file-tool-processor'
|
||||||
|
|
||||||
|
const processor = new FileToolProcessor(context)
|
||||||
|
const file = await processor.processFileData({
|
||||||
|
data: base64Content,
|
||||||
|
mimeType: 'application/pdf',
|
||||||
|
filename: 'doc.pdf',
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Helpers
|
||||||
|
|
||||||
|
| Helper | Location | Purpose |
|
||||||
|
|--------|----------|---------|
|
||||||
|
| `normalizeFileInput` | `@/blocks/utils` | Normalize file params in block config |
|
||||||
|
| `processFilesToUserFiles` | `@/lib/uploads/utils/file-utils` | Convert raw inputs to UserFile[] |
|
||||||
|
| `downloadFileFromStorage` | `@/lib/uploads/utils/file-utils.server` | Get Buffer from UserFile |
|
||||||
|
| `FileToolProcessor` | `@/executor/utils/file-tool-processor` | Process tool output files |
|
||||||
|
|
||||||
## Checklist
|
## Checklist
|
||||||
|
|
||||||
- [ ] Look up API docs for the service
|
- [ ] Look up API docs for the service
|
||||||
@@ -205,3 +251,5 @@ import { {service}WebhookTrigger } from '@/triggers/{service}'
|
|||||||
- [ ] Register block in `blocks/registry.ts`
|
- [ ] Register block in `blocks/registry.ts`
|
||||||
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
- [ ] (Optional) Create triggers in `triggers/{service}/`
|
||||||
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
- [ ] (Optional) Register triggers in `triggers/registry.ts`
|
||||||
|
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
|
||||||
|
- [ ] (If file uploads) Use `normalizeFileInput` in block config
|
||||||
|
|||||||
19
CLAUDE.md
19
CLAUDE.md
@@ -265,6 +265,23 @@ Register in `blocks/registry.ts` (alphabetically).
|
|||||||
|
|
||||||
**dependsOn:** `['field']` or `{ all: ['a'], any: ['b', 'c'] }`
|
**dependsOn:** `['field']` or `{ all: ['a'], any: ['b', 'c'] }`
|
||||||
|
|
||||||
|
**File Input Pattern (basic/advanced mode):**
|
||||||
|
```typescript
|
||||||
|
// Basic: file-upload UI
|
||||||
|
{ id: 'uploadFile', type: 'file-upload', canonicalParamId: 'file', mode: 'basic' },
|
||||||
|
// Advanced: reference from other blocks
|
||||||
|
{ id: 'fileRef', type: 'short-input', canonicalParamId: 'file', mode: 'advanced' },
|
||||||
|
```
|
||||||
|
|
||||||
|
In `tools.config.tool`, normalize with:
|
||||||
|
```typescript
|
||||||
|
import { normalizeFileInput } from '@/blocks/utils'
|
||||||
|
const file = normalizeFileInput(params.uploadFile || params.fileRef, { single: true })
|
||||||
|
if (file) params.file = file
|
||||||
|
```
|
||||||
|
|
||||||
|
For file uploads, create an internal API route (`/api/tools/{service}/upload`) that uses `downloadFileFromStorage` to get file content from `UserFile` objects.
|
||||||
|
|
||||||
### 3. Icon (`components/icons.tsx`)
|
### 3. Icon (`components/icons.tsx`)
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
@@ -293,3 +310,5 @@ Register in `triggers/registry.ts`.
|
|||||||
- [ ] Create block in `blocks/blocks/{service}.ts`
|
- [ ] Create block in `blocks/blocks/{service}.ts`
|
||||||
- [ ] Register block in `blocks/registry.ts`
|
- [ ] Register block in `blocks/registry.ts`
|
||||||
- [ ] (Optional) Create and register triggers
|
- [ ] (Optional) Create and register triggers
|
||||||
|
- [ ] (If file uploads) Create internal API route with `downloadFileFromStorage`
|
||||||
|
- [ ] (If file uploads) Use `normalizeFileInput` in block config
|
||||||
|
|||||||
@@ -5421,3 +5421,18 @@ z'
|
|||||||
</svg>
|
</svg>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function EnrichSoIcon(props: SVGProps<SVGSVGElement>) {
|
||||||
|
return (
|
||||||
|
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 398 394' fill='none'>
|
||||||
|
<path
|
||||||
|
fill='#5A52F4'
|
||||||
|
d='M129.705566,319.705719 C127.553314,322.684906 125.651512,325.414673 123.657059,328.277466 C113.748466,318.440308 105.605003,310.395905 97.510834,302.302216 C93.625801,298.417419 89.990181,294.269318 85.949242,290.558868 C82.857994,287.720428 82.464081,285.757660 85.772888,282.551880 C104.068108,264.826202 122.146088,246.876312 140.285110,228.989670 C141.183945,228.103317 141.957443,227.089844 143.588837,225.218384 C140.691605,225.066116 138.820053,224.882874 136.948410,224.881958 C102.798264,224.865326 68.647453,224.765244 34.498699,224.983612 C29.315699,225.016739 27.990419,223.343155 28.090912,218.397430 C28.381887,204.076935 28.189890,189.746719 28.195684,175.420319 C28.198524,168.398178 28.319166,168.279541 35.590389,168.278687 C69.074188,168.274780 102.557991,168.281174 136.041794,168.266083 C137.968231,168.265213 139.894608,168.107101 141.821030,168.022171 C142.137955,167.513992 142.454895,167.005829 142.771820,166.497650 C122.842415,146.495621 102.913002,126.493591 83.261360,106.770348 C96.563828,93.471756 109.448814,80.590523 122.656265,67.386925 C123.522743,68.161835 124.785545,69.187096 125.930321,70.330513 C144.551819,88.930206 163.103683,107.600082 181.805267,126.118790 C186.713593,130.979126 189.085648,136.448059 189.055374,143.437057 C188.899490,179.418961 188.911179,215.402191 189.046661,251.384262 C189.072296,258.190796 186.742920,263.653717 181.982727,268.323273 C164.624405,285.351227 147.295807,302.409485 129.705566,319.705719z'
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill='#5A52F4'
|
||||||
|
d='M276.070923,246.906128 C288.284363,258.985870 300.156097,270.902100 312.235931,282.603485 C315.158752,285.434784 315.417542,287.246246 312.383484,290.248932 C301.143494,301.372498 290.168549,312.763733 279.075592,324.036255 C278.168030,324.958496 277.121307,325.743835 275.898315,326.801086 C274.628357,325.711792 273.460663,324.822968 272.422150,323.802673 C253.888397,305.594757 235.418701,287.321289 216.818268,269.181854 C211.508789,264.003937 208.872726,258.136688 208.914001,250.565842 C209.108337,214.917786 209.084808,179.267715 208.928864,143.619293 C208.898407,136.654907 211.130066,131.122162 216.052216,126.246094 C234.867538,107.606842 253.537521,88.820908 272.274780,70.102730 C273.313202,69.065353 274.468597,68.145027 275.264038,67.440727 C288.353516,80.579514 301.213470,93.487869 314.597534,106.922356 C295.163391,126.421753 275.214752,146.437363 255.266113,166.452972 C255.540176,166.940353 255.814240,167.427734 256.088318,167.915100 C257.983887,168.035736 259.879425,168.260345 261.775085,168.261551 C295.425201,168.282852 329.075287,168.273544 362.725403,168.279831 C369.598907,168.281113 369.776215,168.463593 369.778931,175.252213 C369.784882,189.911667 369.646088,204.573074 369.861206,219.229355 C369.925110,223.585022 368.554596,224.976288 364.148865,224.956406 C329.833130,224.801605 295.516388,224.869598 261.199951,224.868744 C259.297974,224.868698 257.396027,224.868744 254.866638,224.868744 C262.350708,232.658707 269.078217,239.661194 276.070923,246.906128z'
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ import {
|
|||||||
DynamoDBIcon,
|
DynamoDBIcon,
|
||||||
ElasticsearchIcon,
|
ElasticsearchIcon,
|
||||||
ElevenLabsIcon,
|
ElevenLabsIcon,
|
||||||
|
EnrichSoIcon,
|
||||||
ExaAIIcon,
|
ExaAIIcon,
|
||||||
EyeIcon,
|
EyeIcon,
|
||||||
FirecrawlIcon,
|
FirecrawlIcon,
|
||||||
@@ -160,6 +161,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
dynamodb: DynamoDBIcon,
|
dynamodb: DynamoDBIcon,
|
||||||
elasticsearch: ElasticsearchIcon,
|
elasticsearch: ElasticsearchIcon,
|
||||||
elevenlabs: ElevenLabsIcon,
|
elevenlabs: ElevenLabsIcon,
|
||||||
|
enrich: EnrichSoIcon,
|
||||||
exa: ExaAIIcon,
|
exa: ExaAIIcon,
|
||||||
file_v2: DocumentIcon,
|
file_v2: DocumentIcon,
|
||||||
firecrawl: FirecrawlIcon,
|
firecrawl: FirecrawlIcon,
|
||||||
|
|||||||
@@ -27,16 +27,16 @@ All API responses include information about your workflow execution limits and u
|
|||||||
"limits": {
|
"limits": {
|
||||||
"workflowExecutionRateLimit": {
|
"workflowExecutionRateLimit": {
|
||||||
"sync": {
|
"sync": {
|
||||||
"requestsPerMinute": 60, // Sustained rate limit per minute
|
"requestsPerMinute": 150, // Sustained rate limit per minute
|
||||||
"maxBurst": 120, // Maximum burst capacity
|
"maxBurst": 300, // Maximum burst capacity
|
||||||
"remaining": 118, // Current tokens available (up to maxBurst)
|
"remaining": 298, // Current tokens available (up to maxBurst)
|
||||||
"resetAt": "..." // When tokens next refill
|
"resetAt": "..." // When tokens next refill
|
||||||
},
|
},
|
||||||
"async": {
|
"async": {
|
||||||
"requestsPerMinute": 200, // Sustained rate limit per minute
|
"requestsPerMinute": 1000, // Sustained rate limit per minute
|
||||||
"maxBurst": 400, // Maximum burst capacity
|
"maxBurst": 2000, // Maximum burst capacity
|
||||||
"remaining": 398, // Current tokens available
|
"remaining": 1998, // Current tokens available
|
||||||
"resetAt": "..." // When tokens next refill
|
"resetAt": "..." // When tokens next refill
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"usage": {
|
"usage": {
|
||||||
@@ -107,28 +107,28 @@ Query workflow execution logs with extensive filtering options.
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"nextCursor": "eyJzIjoiMjAyNS0wMS0wMVQxMjozNDo1Ni43ODlaIiwiaWQiOiJsb2dfYWJjMTIzIn0",
|
"nextCursor": "eyJzIjoiMjAyNS0wMS0wMVQxMjozNDo1Ni43ODlaIiwiaWQiOiJsb2dfYWJjMTIzIn0",
|
||||||
"limits": {
|
"limits": {
|
||||||
"workflowExecutionRateLimit": {
|
"workflowExecutionRateLimit": {
|
||||||
"sync": {
|
"sync": {
|
||||||
"requestsPerMinute": 60,
|
"requestsPerMinute": 150,
|
||||||
"maxBurst": 120,
|
"maxBurst": 300,
|
||||||
"remaining": 118,
|
"remaining": 298,
|
||||||
"resetAt": "2025-01-01T12:35:56.789Z"
|
"resetAt": "2025-01-01T12:35:56.789Z"
|
||||||
|
},
|
||||||
|
"async": {
|
||||||
|
"requestsPerMinute": 1000,
|
||||||
|
"maxBurst": 2000,
|
||||||
|
"remaining": 1998,
|
||||||
|
"resetAt": "2025-01-01T12:35:56.789Z"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"async": {
|
"usage": {
|
||||||
"requestsPerMinute": 200,
|
"currentPeriodCost": 1.234,
|
||||||
"maxBurst": 400,
|
"limit": 10,
|
||||||
"remaining": 398,
|
"plan": "pro",
|
||||||
"resetAt": "2025-01-01T12:35:56.789Z"
|
"isExceeded": false
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"usage": {
|
|
||||||
"currentPeriodCost": 1.234,
|
|
||||||
"limit": 10,
|
|
||||||
"plan": "pro",
|
|
||||||
"isExceeded": false
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
</Tab>
|
</Tab>
|
||||||
@@ -188,15 +188,15 @@ Retrieve detailed information about a specific log entry.
|
|||||||
"limits": {
|
"limits": {
|
||||||
"workflowExecutionRateLimit": {
|
"workflowExecutionRateLimit": {
|
||||||
"sync": {
|
"sync": {
|
||||||
"requestsPerMinute": 60,
|
"requestsPerMinute": 150,
|
||||||
"maxBurst": 120,
|
"maxBurst": 300,
|
||||||
"remaining": 118,
|
"remaining": 298,
|
||||||
"resetAt": "2025-01-01T12:35:56.789Z"
|
"resetAt": "2025-01-01T12:35:56.789Z"
|
||||||
},
|
},
|
||||||
"async": {
|
"async": {
|
||||||
"requestsPerMinute": 200,
|
"requestsPerMinute": 1000,
|
||||||
"maxBurst": 400,
|
"maxBurst": 2000,
|
||||||
"remaining": 398,
|
"remaining": 1998,
|
||||||
"resetAt": "2025-01-01T12:35:56.789Z"
|
"resetAt": "2025-01-01T12:35:56.789Z"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -477,10 +477,10 @@ The API uses a **token bucket algorithm** for rate limiting, providing fair usag
|
|||||||
|
|
||||||
| Plan | Requests/Minute | Burst Capacity |
|
| Plan | Requests/Minute | Burst Capacity |
|
||||||
|------|-----------------|----------------|
|
|------|-----------------|----------------|
|
||||||
| Free | 10 | 20 |
|
| Free | 30 | 60 |
|
||||||
| Pro | 30 | 60 |
|
| Pro | 100 | 200 |
|
||||||
| Team | 60 | 120 |
|
| Team | 200 | 400 |
|
||||||
| Enterprise | 120 | 240 |
|
| Enterprise | 500 | 1000 |
|
||||||
|
|
||||||
**How it works:**
|
**How it works:**
|
||||||
- Tokens refill at `requestsPerMinute` rate
|
- Tokens refill at `requestsPerMinute` rate
|
||||||
|
|||||||
@@ -170,16 +170,16 @@ curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" htt
|
|||||||
"rateLimit": {
|
"rateLimit": {
|
||||||
"sync": {
|
"sync": {
|
||||||
"isLimited": false,
|
"isLimited": false,
|
||||||
"requestsPerMinute": 25,
|
"requestsPerMinute": 150,
|
||||||
"maxBurst": 50,
|
"maxBurst": 300,
|
||||||
"remaining": 50,
|
"remaining": 300,
|
||||||
"resetAt": "2025-09-08T22:51:55.999Z"
|
"resetAt": "2025-09-08T22:51:55.999Z"
|
||||||
},
|
},
|
||||||
"async": {
|
"async": {
|
||||||
"isLimited": false,
|
"isLimited": false,
|
||||||
"requestsPerMinute": 200,
|
"requestsPerMinute": 1000,
|
||||||
"maxBurst": 400,
|
"maxBurst": 2000,
|
||||||
"remaining": 400,
|
"remaining": 2000,
|
||||||
"resetAt": "2025-09-08T22:51:56.155Z"
|
"resetAt": "2025-09-08T22:51:56.155Z"
|
||||||
},
|
},
|
||||||
"authType": "api"
|
"authType": "api"
|
||||||
@@ -206,13 +206,32 @@ curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" htt
|
|||||||
|
|
||||||
Different subscription plans have different usage limits:
|
Different subscription plans have different usage limits:
|
||||||
|
|
||||||
| Plan | Monthly Usage Limit | Rate Limits (per minute) |
|
| Plan | Monthly Usage Included | Rate Limits (per minute) |
|
||||||
|------|-------------------|-------------------------|
|
|------|------------------------|-------------------------|
|
||||||
| **Free** | $20 | 5 sync, 10 async |
|
| **Free** | $20 | 50 sync, 200 async |
|
||||||
| **Pro** | $100 | 10 sync, 50 async |
|
| **Pro** | $20 (adjustable) | 150 sync, 1,000 async |
|
||||||
| **Team** | $500 (pooled) | 50 sync, 100 async |
|
| **Team** | $40/seat (pooled, adjustable) | 300 sync, 2,500 async |
|
||||||
| **Enterprise** | Custom | Custom |
|
| **Enterprise** | Custom | Custom |
|
||||||
|
|
||||||
|
## Execution Time Limits
|
||||||
|
|
||||||
|
Workflows have maximum execution time limits based on your subscription plan:
|
||||||
|
|
||||||
|
| Plan | Sync Execution | Async Execution |
|
||||||
|
|------|----------------|-----------------|
|
||||||
|
| **Free** | 5 minutes | 10 minutes |
|
||||||
|
| **Pro** | 60 minutes | 90 minutes |
|
||||||
|
| **Team** | 60 minutes | 90 minutes |
|
||||||
|
| **Enterprise** | 60 minutes | 90 minutes |
|
||||||
|
|
||||||
|
**Sync executions** run immediately and return results directly. These are triggered via the API with `async: false` (default) or through the UI.
|
||||||
|
|
||||||
|
**Async executions** (triggered via API with `async: true`, webhooks, or schedules) run in the background. Async time limits are 2x the sync limit, capped at 90 minutes.
|
||||||
|
|
||||||
|
<Callout type="info">
|
||||||
|
If a workflow exceeds its time limit, it will be terminated and marked as failed with a timeout error. Design long-running workflows to use async execution or break them into smaller workflows.
|
||||||
|
</Callout>
|
||||||
|
|
||||||
## Billing Model
|
## Billing Model
|
||||||
|
|
||||||
Sim uses a **base subscription + overage** billing model:
|
Sim uses a **base subscription + overage** billing model:
|
||||||
|
|||||||
168
apps/docs/content/docs/en/execution/files.mdx
Normal file
168
apps/docs/content/docs/en/execution/files.mdx
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
---
|
||||||
|
title: Passing Files
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Callout } from 'fumadocs-ui/components/callout'
|
||||||
|
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||||
|
|
||||||
|
Sim makes it easy to work with files throughout your workflows. Blocks can receive files, process them, and pass them to other blocks seamlessly.
|
||||||
|
|
||||||
|
## File Objects
|
||||||
|
|
||||||
|
When blocks output files (like Gmail attachments, generated images, or parsed documents), they return a standardized file object:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "report.pdf",
|
||||||
|
"url": "https://...",
|
||||||
|
"base64": "JVBERi0xLjQK...",
|
||||||
|
"type": "application/pdf",
|
||||||
|
"size": 245678
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You can access any of these properties when referencing files from previous blocks.
|
||||||
|
|
||||||
|
## The File Block
|
||||||
|
|
||||||
|
The **File block** is the universal entry point for files in your workflows. It accepts files from any source and outputs standardized file objects that work with all integrations.
|
||||||
|
|
||||||
|
**Inputs:**
|
||||||
|
- **Uploaded files** - Drag and drop or select files directly
|
||||||
|
- **External URLs** - Any publicly accessible file URL
|
||||||
|
- **Files from other blocks** - Pass files from Gmail attachments, Slack downloads, etc.
|
||||||
|
|
||||||
|
**Outputs:**
|
||||||
|
- A list of `UserFile` objects with consistent structure (`name`, `url`, `base64`, `type`, `size`)
|
||||||
|
- `combinedContent` - Extracted text content from all files (for documents)
|
||||||
|
|
||||||
|
**Example usage:**
|
||||||
|
|
||||||
|
```
|
||||||
|
// Get all files from the File block
|
||||||
|
<file.files>
|
||||||
|
|
||||||
|
// Get the first file
|
||||||
|
<file.files[0]>
|
||||||
|
|
||||||
|
// Get combined text content from parsed documents
|
||||||
|
<file.combinedContent>
|
||||||
|
```
|
||||||
|
|
||||||
|
The File block automatically:
|
||||||
|
- Detects file types from URLs and extensions
|
||||||
|
- Extracts text from PDFs, CSVs, and documents
|
||||||
|
- Generates base64 encoding for binary files
|
||||||
|
- Creates presigned URLs for secure access
|
||||||
|
|
||||||
|
Use the File block when you need to normalize files from different sources before passing them to other blocks like Vision, STT, or email integrations.
|
||||||
|
|
||||||
|
## Passing Files Between Blocks
|
||||||
|
|
||||||
|
Reference files from previous blocks using the tag dropdown. Click in any file input field and type `<` to see available outputs.
|
||||||
|
|
||||||
|
**Common patterns:**
|
||||||
|
|
||||||
|
```
|
||||||
|
// Single file from a block
|
||||||
|
<gmail.attachments[0]>
|
||||||
|
|
||||||
|
// Pass the whole file object
|
||||||
|
<file_parser.files[0]>
|
||||||
|
|
||||||
|
// Access specific properties
|
||||||
|
<gmail.attachments[0].name>
|
||||||
|
<gmail.attachments[0].base64>
|
||||||
|
```
|
||||||
|
|
||||||
|
Most blocks accept the full file object and extract what they need automatically. You don't need to manually extract `base64` or `url` in most cases.
|
||||||
|
|
||||||
|
## Triggering Workflows with Files
|
||||||
|
|
||||||
|
When calling a workflow via API that expects file input, include files in your request:
|
||||||
|
|
||||||
|
<Tabs items={['Base64', 'URL']}>
|
||||||
|
<Tab value="Base64">
|
||||||
|
```bash
|
||||||
|
curl -X POST "https://sim.ai/api/workflows/YOUR_WORKFLOW_ID/execute" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "x-api-key: YOUR_API_KEY" \
|
||||||
|
-d '{
|
||||||
|
"document": {
|
||||||
|
"name": "report.pdf",
|
||||||
|
"base64": "JVBERi0xLjQK...",
|
||||||
|
"type": "application/pdf"
|
||||||
|
}
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
</Tab>
|
||||||
|
<Tab value="URL">
|
||||||
|
```bash
|
||||||
|
curl -X POST "https://sim.ai/api/workflows/YOUR_WORKFLOW_ID/execute" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "x-api-key: YOUR_API_KEY" \
|
||||||
|
-d '{
|
||||||
|
"document": {
|
||||||
|
"name": "report.pdf",
|
||||||
|
"url": "https://example.com/report.pdf",
|
||||||
|
"type": "application/pdf"
|
||||||
|
}
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
</Tab>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
|
The workflow's Start block should have an input field configured to receive the file parameter.
|
||||||
|
|
||||||
|
## Receiving Files in API Responses
|
||||||
|
|
||||||
|
When a workflow outputs files, they're included in the response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"output": {
|
||||||
|
"generatedFile": {
|
||||||
|
"name": "output.png",
|
||||||
|
"url": "https://...",
|
||||||
|
"base64": "iVBORw0KGgo...",
|
||||||
|
"type": "image/png",
|
||||||
|
"size": 34567
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `url` for direct downloads or `base64` for inline processing.
|
||||||
|
|
||||||
|
## Blocks That Work with Files
|
||||||
|
|
||||||
|
**File inputs:**
|
||||||
|
- **File** - Parse documents, images, and text files
|
||||||
|
- **Vision** - Analyze images with AI models
|
||||||
|
- **Mistral Parser** - Extract text from PDFs
|
||||||
|
|
||||||
|
**File outputs:**
|
||||||
|
- **Gmail** - Email attachments
|
||||||
|
- **Slack** - Downloaded files
|
||||||
|
- **TTS** - Generated audio files
|
||||||
|
- **Video Generator** - Generated videos
|
||||||
|
- **Image Generator** - Generated images
|
||||||
|
|
||||||
|
**File storage:**
|
||||||
|
- **Supabase** - Upload/download from storage
|
||||||
|
- **S3** - AWS S3 operations
|
||||||
|
- **Google Drive** - Drive file operations
|
||||||
|
- **Dropbox** - Dropbox file operations
|
||||||
|
|
||||||
|
<Callout type="info">
|
||||||
|
Files are automatically available to downstream blocks. The execution engine handles all file transfer and format conversion.
|
||||||
|
</Callout>
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
1. **Use file objects directly** - Pass the full file object rather than extracting individual properties. Blocks handle the conversion automatically.
|
||||||
|
|
||||||
|
2. **Check file types** - Ensure the file type matches what the receiving block expects. The Vision block needs images, the File block handles documents.
|
||||||
|
|
||||||
|
3. **Consider file size** - Large files increase execution time. For very large files, consider using storage blocks (S3, Supabase) for intermediate storage.
|
||||||
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"pages": ["index", "basics", "api", "logging", "costs"]
|
"pages": ["index", "basics", "files", "api", "logging", "costs"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -180,6 +180,11 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
|
|||||||
<td>Right-click → **Enable/Disable**</td>
|
<td>Right-click → **Enable/Disable**</td>
|
||||||
<td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td>
|
<td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Lock/Unlock a block</td>
|
||||||
|
<td>Hover block → Click lock icon (Admin only)</td>
|
||||||
|
<td><ActionImage src="/static/quick-reference/lock-block.png" alt="Lock block" /></td>
|
||||||
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>Toggle handle orientation</td>
|
<td>Toggle handle orientation</td>
|
||||||
<td>Right-click → **Toggle Handles**</td>
|
<td>Right-click → **Toggle Handles**</td>
|
||||||
|
|||||||
930
apps/docs/content/docs/en/tools/enrich.mdx
Normal file
930
apps/docs/content/docs/en/tools/enrich.mdx
Normal file
@@ -0,0 +1,930 @@
|
|||||||
|
---
|
||||||
|
title: Enrich
|
||||||
|
description: B2B data enrichment and LinkedIn intelligence with Enrich.so
|
||||||
|
---
|
||||||
|
|
||||||
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
|
<BlockInfoCard
|
||||||
|
type="enrich"
|
||||||
|
color="#E5E5E6"
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
|
[Enrich.so](https://enrich.so/) delivers real-time, precision B2B data enrichment and LinkedIn intelligence. Its platform provides dynamic access to public and structured company, contact, and professional information, enabling teams to build richer profiles, improve lead quality, and drive more effective outreach.
|
||||||
|
|
||||||
|
With Enrich.so, you can:
|
||||||
|
|
||||||
|
- **Enrich contact and company profiles**: Instantly discover key data points for leads, prospects, and businesses using just an email or LinkedIn profile.
|
||||||
|
- **Verify email deliverability**: Check if emails are valid, deliverable, and safe to contact before sending.
|
||||||
|
- **Find work & personal emails**: Identify missing business emails from a LinkedIn profile or personal emails to expand your reach.
|
||||||
|
- **Reveal phone numbers and social profiles**: Surface additional communication channels for contacts through enrichment tools.
|
||||||
|
- **Analyze LinkedIn posts and engagement**: Extract insights on post reach, reactions, and audience from public LinkedIn content.
|
||||||
|
- **Conduct advanced people and company search**: Enable your agents to locate companies and professionals based on deep filters and real-time intelligence.
|
||||||
|
|
||||||
|
The Sim integration with Enrich.so empowers your agents and automations to instantly query, enrich, and validate B2B data, boosting productivity in workflows like sales prospecting, recruiting, marketing operations, and more. Combining Sim's orchestration capabilities with Enrich.so unlocks smarter, data-driven automation strategies powered by best-in-class B2B intelligence.
|
||||||
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
|
## Usage Instructions
|
||||||
|
|
||||||
|
Access real-time B2B data intelligence with Enrich.so. Enrich profiles from email addresses, find work emails from LinkedIn, verify email deliverability, search for people and companies, and analyze LinkedIn post engagement.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
|
### `enrich_check_credits`
|
||||||
|
|
||||||
|
Check your Enrich API credit usage and remaining balance.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `totalCredits` | number | Total credits allocated to the account |
|
||||||
|
| `creditsUsed` | number | Credits consumed so far |
|
||||||
|
| `creditsRemaining` | number | Available credits remaining |
|
||||||
|
|
||||||
|
### `enrich_email_to_profile`
|
||||||
|
|
||||||
|
Retrieve detailed LinkedIn profile information using an email address including work history, education, and skills.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to look up \(e.g., john.doe@company.com\) |
|
||||||
|
| `inRealtime` | boolean | No | Set to true to retrieve fresh data, bypassing cached information |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `displayName` | string | Full display name |
|
||||||
|
| `firstName` | string | First name |
|
||||||
|
| `lastName` | string | Last name |
|
||||||
|
| `headline` | string | Professional headline |
|
||||||
|
| `occupation` | string | Current occupation |
|
||||||
|
| `summary` | string | Profile summary |
|
||||||
|
| `location` | string | Location |
|
||||||
|
| `country` | string | Country |
|
||||||
|
| `linkedInUrl` | string | LinkedIn profile URL |
|
||||||
|
| `photoUrl` | string | Profile photo URL |
|
||||||
|
| `connectionCount` | number | Number of connections |
|
||||||
|
| `isConnectionCountObfuscated` | boolean | Whether connection count is obfuscated \(500+\) |
|
||||||
|
| `positionHistory` | array | Work experience history |
|
||||||
|
| ↳ `title` | string | Job title |
|
||||||
|
| ↳ `company` | string | Company name |
|
||||||
|
| ↳ `startDate` | string | Start date |
|
||||||
|
| ↳ `endDate` | string | End date |
|
||||||
|
| ↳ `location` | string | Location |
|
||||||
|
| `education` | array | Education history |
|
||||||
|
| ↳ `school` | string | School name |
|
||||||
|
| ↳ `degree` | string | Degree |
|
||||||
|
| ↳ `fieldOfStudy` | string | Field of study |
|
||||||
|
| ↳ `startDate` | string | Start date |
|
||||||
|
| ↳ `endDate` | string | End date |
|
||||||
|
| `certifications` | array | Professional certifications |
|
||||||
|
| ↳ `name` | string | Certification name |
|
||||||
|
| ↳ `authority` | string | Issuing authority |
|
||||||
|
| ↳ `url` | string | Certification URL |
|
||||||
|
| `skills` | array | List of skills |
|
||||||
|
| `languages` | array | List of languages |
|
||||||
|
| `locale` | string | Profile locale \(e.g., en_US\) |
|
||||||
|
| `version` | number | Profile version number |
|
||||||
|
|
||||||
|
### `enrich_email_to_person_lite`
|
||||||
|
|
||||||
|
Retrieve basic LinkedIn profile information from an email address. A lighter version with essential data only.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to look up \(e.g., john.doe@company.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `name` | string | Full name |
|
||||||
|
| `firstName` | string | First name |
|
||||||
|
| `lastName` | string | Last name |
|
||||||
|
| `email` | string | Email address |
|
||||||
|
| `title` | string | Job title |
|
||||||
|
| `location` | string | Location |
|
||||||
|
| `company` | string | Current company |
|
||||||
|
| `companyLocation` | string | Company location |
|
||||||
|
| `companyLinkedIn` | string | Company LinkedIn URL |
|
||||||
|
| `profileId` | string | LinkedIn profile ID |
|
||||||
|
| `schoolName` | string | School name |
|
||||||
|
| `schoolUrl` | string | School URL |
|
||||||
|
| `linkedInUrl` | string | LinkedIn profile URL |
|
||||||
|
| `photoUrl` | string | Profile photo URL |
|
||||||
|
| `followerCount` | number | Number of followers |
|
||||||
|
| `connectionCount` | number | Number of connections |
|
||||||
|
| `languages` | array | Languages spoken |
|
||||||
|
| `projects` | array | Projects |
|
||||||
|
| `certifications` | array | Certifications |
|
||||||
|
| `volunteerExperience` | array | Volunteer experience |
|
||||||
|
|
||||||
|
### `enrich_linkedin_profile`
|
||||||
|
|
||||||
|
Enrich a LinkedIn profile URL with detailed information including positions, education, and social metrics.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `url` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/williamhgates\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `profileId` | string | LinkedIn profile ID |
|
||||||
|
| `firstName` | string | First name |
|
||||||
|
| `lastName` | string | Last name |
|
||||||
|
| `subTitle` | string | Profile subtitle/headline |
|
||||||
|
| `profilePicture` | string | Profile picture URL |
|
||||||
|
| `backgroundImage` | string | Background image URL |
|
||||||
|
| `industry` | string | Industry |
|
||||||
|
| `location` | string | Location |
|
||||||
|
| `followersCount` | number | Number of followers |
|
||||||
|
| `connectionsCount` | number | Number of connections |
|
||||||
|
| `premium` | boolean | Whether the account is premium |
|
||||||
|
| `influencer` | boolean | Whether the account is an influencer |
|
||||||
|
| `positions` | array | Work positions |
|
||||||
|
| ↳ `title` | string | Job title |
|
||||||
|
| ↳ `company` | string | Company name |
|
||||||
|
| ↳ `companyLogo` | string | Company logo URL |
|
||||||
|
| ↳ `startDate` | string | Start date |
|
||||||
|
| ↳ `endDate` | string | End date |
|
||||||
|
| ↳ `location` | string | Location |
|
||||||
|
| `education` | array | Education history |
|
||||||
|
| ↳ `school` | string | School name |
|
||||||
|
| ↳ `degree` | string | Degree |
|
||||||
|
| ↳ `fieldOfStudy` | string | Field of study |
|
||||||
|
| ↳ `startDate` | string | Start date |
|
||||||
|
| ↳ `endDate` | string | End date |
|
||||||
|
| `websites` | array | Personal websites |
|
||||||
|
|
||||||
|
### `enrich_find_email`
|
||||||
|
|
||||||
|
Find a person
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `fullName` | string | Yes | Person's full name \(e.g., John Doe\) |
|
||||||
|
| `companyDomain` | string | Yes | Company domain \(e.g., example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Found email address |
|
||||||
|
| `firstName` | string | First name |
|
||||||
|
| `lastName` | string | Last name |
|
||||||
|
| `domain` | string | Company domain |
|
||||||
|
| `found` | boolean | Whether an email was found |
|
||||||
|
| `acceptAll` | boolean | Whether the domain accepts all emails |
|
||||||
|
|
||||||
|
### `enrich_linkedin_to_work_email`
|
||||||
|
|
||||||
|
Find a work email address from a LinkedIn profile URL.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., https://www.linkedin.com/in/williamhgates\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Found work email address |
|
||||||
|
| `found` | boolean | Whether an email was found |
|
||||||
|
| `status` | string | Request status \(in_progress or completed\) |
|
||||||
|
|
||||||
|
### `enrich_linkedin_to_personal_email`
|
||||||
|
|
||||||
|
Find personal email address from a LinkedIn profile URL.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/username\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Personal email address |
|
||||||
|
| `found` | boolean | Whether an email was found |
|
||||||
|
| `status` | string | Request status |
|
||||||
|
|
||||||
|
### `enrich_phone_finder`
|
||||||
|
|
||||||
|
Find a phone number from a LinkedIn profile URL.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/williamhgates\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `profileUrl` | string | LinkedIn profile URL |
|
||||||
|
| `mobileNumber` | string | Found mobile phone number |
|
||||||
|
| `found` | boolean | Whether a phone number was found |
|
||||||
|
| `status` | string | Request status \(in_progress or completed\) |
|
||||||
|
|
||||||
|
### `enrich_email_to_phone`
|
||||||
|
|
||||||
|
Find a phone number associated with an email address.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to look up \(e.g., john.doe@example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Email address looked up |
|
||||||
|
| `mobileNumber` | string | Found mobile phone number |
|
||||||
|
| `found` | boolean | Whether a phone number was found |
|
||||||
|
| `status` | string | Request status \(in_progress or completed\) |
|
||||||
|
|
||||||
|
### `enrich_verify_email`
|
||||||
|
|
||||||
|
Verify an email address for deliverability, including catch-all detection and provider identification.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to verify \(e.g., john.doe@example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Email address verified |
|
||||||
|
| `status` | string | Verification status |
|
||||||
|
| `result` | string | Deliverability result \(deliverable, undeliverable, etc.\) |
|
||||||
|
| `confidenceScore` | number | Confidence score \(0-100\) |
|
||||||
|
| `smtpProvider` | string | Email service provider \(e.g., Google, Microsoft\) |
|
||||||
|
| `mailDisposable` | boolean | Whether the email is from a disposable provider |
|
||||||
|
| `mailAcceptAll` | boolean | Whether the domain is a catch-all domain |
|
||||||
|
| `free` | boolean | Whether the email uses a free email service |
|
||||||
|
|
||||||
|
### `enrich_disposable_email_check`
|
||||||
|
|
||||||
|
Check if an email address is from a disposable or temporary email provider. Returns a score and validation details.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to check \(e.g., john.doe@example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Email address checked |
|
||||||
|
| `score` | number | Validation score \(0-100\) |
|
||||||
|
| `testsPassed` | string | Number of tests passed \(e.g., "3/3"\) |
|
||||||
|
| `passed` | boolean | Whether the email passed all validation tests |
|
||||||
|
| `reason` | string | Reason for failure if email did not pass |
|
||||||
|
| `mailServerIp` | string | Mail server IP address |
|
||||||
|
| `mxRecords` | array | MX records for the domain |
|
||||||
|
| ↳ `host` | string | MX record host |
|
||||||
|
| ↳ `pref` | number | MX record preference |
|
||||||
|
|
||||||
|
### `enrich_email_to_ip`
|
||||||
|
|
||||||
|
Discover an IP address associated with an email address.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to look up \(e.g., john.doe@example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Email address looked up |
|
||||||
|
| `ip` | string | Associated IP address |
|
||||||
|
| `found` | boolean | Whether an IP address was found |
|
||||||
|
|
||||||
|
### `enrich_ip_to_company`
|
||||||
|
|
||||||
|
Identify a company from an IP address with detailed firmographic information.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `ip` | string | Yes | IP address to look up \(e.g., 86.92.60.221\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `name` | string | Company name |
|
||||||
|
| `legalName` | string | Legal company name |
|
||||||
|
| `domain` | string | Primary domain |
|
||||||
|
| `domainAliases` | array | Domain aliases |
|
||||||
|
| `sector` | string | Business sector |
|
||||||
|
| `industry` | string | Industry |
|
||||||
|
| `phone` | string | Phone number |
|
||||||
|
| `employees` | number | Number of employees |
|
||||||
|
| `revenue` | string | Estimated revenue |
|
||||||
|
| `location` | json | Company location |
|
||||||
|
| ↳ `city` | string | City |
|
||||||
|
| ↳ `state` | string | State |
|
||||||
|
| ↳ `country` | string | Country |
|
||||||
|
| ↳ `timezone` | string | Timezone |
|
||||||
|
| `linkedInUrl` | string | LinkedIn company URL |
|
||||||
|
| `twitterUrl` | string | Twitter URL |
|
||||||
|
| `facebookUrl` | string | Facebook URL |
|
||||||
|
|
||||||
|
### `enrich_company_lookup`
|
||||||
|
|
||||||
|
Look up comprehensive company information by name or domain including funding, location, and social profiles.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `name` | string | No | Company name \(e.g., Google\) |
|
||||||
|
| `domain` | string | No | Company domain \(e.g., google.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `name` | string | Company name |
|
||||||
|
| `universalName` | string | Universal company name |
|
||||||
|
| `companyId` | string | Company ID |
|
||||||
|
| `description` | string | Company description |
|
||||||
|
| `phone` | string | Phone number |
|
||||||
|
| `linkedInUrl` | string | LinkedIn company URL |
|
||||||
|
| `websiteUrl` | string | Company website |
|
||||||
|
| `followers` | number | Number of LinkedIn followers |
|
||||||
|
| `staffCount` | number | Number of employees |
|
||||||
|
| `foundedDate` | string | Date founded |
|
||||||
|
| `type` | string | Company type |
|
||||||
|
| `industries` | array | Industries |
|
||||||
|
| `specialties` | array | Company specialties |
|
||||||
|
| `headquarters` | json | Headquarters location |
|
||||||
|
| ↳ `city` | string | City |
|
||||||
|
| ↳ `country` | string | Country |
|
||||||
|
| ↳ `postalCode` | string | Postal code |
|
||||||
|
| ↳ `line1` | string | Address line 1 |
|
||||||
|
| `logo` | string | Company logo URL |
|
||||||
|
| `coverImage` | string | Cover image URL |
|
||||||
|
| `fundingRounds` | array | Funding history |
|
||||||
|
| ↳ `roundType` | string | Funding round type |
|
||||||
|
| ↳ `amount` | number | Amount raised |
|
||||||
|
| ↳ `currency` | string | Currency |
|
||||||
|
| ↳ `investors` | array | Investors |
|
||||||
|
|
||||||
|
### `enrich_company_funding`
|
||||||
|
|
||||||
|
Retrieve company funding history, traffic metrics, and executive information by domain.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `domain` | string | Yes | Company domain \(e.g., example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `legalName` | string | Legal company name |
|
||||||
|
| `employeeCount` | number | Number of employees |
|
||||||
|
| `headquarters` | string | Headquarters location |
|
||||||
|
| `industry` | string | Industry |
|
||||||
|
| `totalFundingRaised` | number | Total funding raised |
|
||||||
|
| `fundingRounds` | array | Funding rounds |
|
||||||
|
| ↳ `roundType` | string | Round type |
|
||||||
|
| ↳ `amount` | number | Amount raised |
|
||||||
|
| ↳ `date` | string | Date |
|
||||||
|
| ↳ `investors` | array | Investors |
|
||||||
|
| `monthlyVisits` | number | Monthly website visits |
|
||||||
|
| `trafficChange` | number | Traffic change percentage |
|
||||||
|
| `itSpending` | number | Estimated IT spending in USD |
|
||||||
|
| `executives` | array | Executive team |
|
||||||
|
| ↳ `name` | string | Name |
|
||||||
|
| ↳ `title` | string | Title |
|
||||||
|
|
||||||
|
### `enrich_company_revenue`
|
||||||
|
|
||||||
|
Retrieve company revenue data, CEO information, and competitive analysis by domain.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `domain` | string | Yes | Company domain \(e.g., clay.io\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `companyName` | string | Company name |
|
||||||
|
| `shortDescription` | string | Short company description |
|
||||||
|
| `fullSummary` | string | Full company summary |
|
||||||
|
| `revenue` | string | Company revenue |
|
||||||
|
| `revenueMin` | number | Minimum revenue estimate |
|
||||||
|
| `revenueMax` | number | Maximum revenue estimate |
|
||||||
|
| `employeeCount` | number | Number of employees |
|
||||||
|
| `founded` | string | Year founded |
|
||||||
|
| `ownership` | string | Ownership type |
|
||||||
|
| `status` | string | Company status \(e.g., Active\) |
|
||||||
|
| `website` | string | Company website URL |
|
||||||
|
| `ceo` | json | CEO information |
|
||||||
|
| ↳ `name` | string | CEO name |
|
||||||
|
| ↳ `designation` | string | CEO designation/title |
|
||||||
|
| ↳ `rating` | number | CEO rating |
|
||||||
|
| `socialLinks` | json | Social media links |
|
||||||
|
| ↳ `linkedIn` | string | LinkedIn URL |
|
||||||
|
| ↳ `twitter` | string | Twitter URL |
|
||||||
|
| ↳ `facebook` | string | Facebook URL |
|
||||||
|
| `totalFunding` | string | Total funding raised |
|
||||||
|
| `fundingRounds` | number | Number of funding rounds |
|
||||||
|
| `competitors` | array | Competitors |
|
||||||
|
| ↳ `name` | string | Competitor name |
|
||||||
|
| ↳ `revenue` | string | Revenue |
|
||||||
|
| ↳ `employeeCount` | number | Employee count |
|
||||||
|
| ↳ `headquarters` | string | Headquarters |
|
||||||
|
|
||||||
|
### `enrich_search_people`
|
||||||
|
|
||||||
|
Search for professionals by various criteria including name, title, skills, education, and company.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `firstName` | string | No | First name |
|
||||||
|
| `lastName` | string | No | Last name |
|
||||||
|
| `summary` | string | No | Professional summary keywords |
|
||||||
|
| `subTitle` | string | No | Job title/subtitle |
|
||||||
|
| `locationCountry` | string | No | Country |
|
||||||
|
| `locationCity` | string | No | City |
|
||||||
|
| `locationState` | string | No | State/province |
|
||||||
|
| `influencer` | boolean | No | Filter for influencers only |
|
||||||
|
| `premium` | boolean | No | Filter for premium accounts only |
|
||||||
|
| `language` | string | No | Primary language |
|
||||||
|
| `industry` | string | No | Industry |
|
||||||
|
| `currentJobTitles` | json | No | Current job titles \(array\) |
|
||||||
|
| `pastJobTitles` | json | No | Past job titles \(array\) |
|
||||||
|
| `skills` | json | No | Skills to search for \(array\) |
|
||||||
|
| `schoolNames` | json | No | School names \(array\) |
|
||||||
|
| `certifications` | json | No | Certifications to filter by \(array\) |
|
||||||
|
| `degreeNames` | json | No | Degree names to filter by \(array\) |
|
||||||
|
| `studyFields` | json | No | Fields of study to filter by \(array\) |
|
||||||
|
| `currentCompanies` | json | No | Current company IDs to filter by \(array of numbers\) |
|
||||||
|
| `pastCompanies` | json | No | Past company IDs to filter by \(array of numbers\) |
|
||||||
|
| `currentPage` | number | No | Page number \(default: 1\) |
|
||||||
|
| `pageSize` | number | No | Results per page \(default: 20\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `currentPage` | number | Current page number |
|
||||||
|
| `totalPage` | number | Total number of pages |
|
||||||
|
| `pageSize` | number | Results per page |
|
||||||
|
| `profiles` | array | Search results |
|
||||||
|
| ↳ `profileIdentifier` | string | Profile ID |
|
||||||
|
| ↳ `givenName` | string | First name |
|
||||||
|
| ↳ `familyName` | string | Last name |
|
||||||
|
| ↳ `currentPosition` | string | Current job title |
|
||||||
|
| ↳ `profileImage` | string | Profile image URL |
|
||||||
|
| ↳ `externalProfileUrl` | string | LinkedIn URL |
|
||||||
|
| ↳ `city` | string | City |
|
||||||
|
| ↳ `country` | string | Country |
|
||||||
|
| ↳ `expertSkills` | array | Skills |
|
||||||
|
|
||||||
|
### `enrich_search_company`
|
||||||
|
|
||||||
|
Search for companies by various criteria including name, industry, location, and size.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `name` | string | No | Company name |
|
||||||
|
| `website` | string | No | Company website URL |
|
||||||
|
| `tagline` | string | No | Company tagline |
|
||||||
|
| `type` | string | No | Company type \(e.g., Private, Public\) |
|
||||||
|
| `description` | string | No | Company description keywords |
|
||||||
|
| `industries` | json | No | Industries to filter by \(array\) |
|
||||||
|
| `locationCountry` | string | No | Country |
|
||||||
|
| `locationCity` | string | No | City |
|
||||||
|
| `postalCode` | string | No | Postal code |
|
||||||
|
| `locationCountryList` | json | No | Multiple countries to filter by \(array\) |
|
||||||
|
| `locationCityList` | json | No | Multiple cities to filter by \(array\) |
|
||||||
|
| `specialities` | json | No | Company specialties \(array\) |
|
||||||
|
| `followers` | number | No | Minimum number of followers |
|
||||||
|
| `staffCount` | number | No | Maximum staff count |
|
||||||
|
| `staffCountMin` | number | No | Minimum staff count |
|
||||||
|
| `staffCountMax` | number | No | Maximum staff count |
|
||||||
|
| `currentPage` | number | No | Page number \(default: 1\) |
|
||||||
|
| `pageSize` | number | No | Results per page \(default: 20\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `currentPage` | number | Current page number |
|
||||||
|
| `totalPage` | number | Total number of pages |
|
||||||
|
| `pageSize` | number | Results per page |
|
||||||
|
| `companies` | array | Search results |
|
||||||
|
| ↳ `companyName` | string | Company name |
|
||||||
|
| ↳ `tagline` | string | Company tagline |
|
||||||
|
| ↳ `webAddress` | string | Website URL |
|
||||||
|
| ↳ `industries` | array | Industries |
|
||||||
|
| ↳ `teamSize` | number | Team size |
|
||||||
|
| ↳ `linkedInProfile` | string | LinkedIn URL |
|
||||||
|
|
||||||
|
### `enrich_search_company_employees`
|
||||||
|
|
||||||
|
Search for employees within specific companies by location and job title.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `companyIds` | json | No | Array of company IDs to search within |
|
||||||
|
| `country` | string | No | Country filter \(e.g., United States\) |
|
||||||
|
| `city` | string | No | City filter \(e.g., San Francisco\) |
|
||||||
|
| `state` | string | No | State filter \(e.g., California\) |
|
||||||
|
| `jobTitles` | json | No | Job titles to filter by \(array\) |
|
||||||
|
| `page` | number | No | Page number \(default: 1\) |
|
||||||
|
| `pageSize` | number | No | Results per page \(default: 10\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `currentPage` | number | Current page number |
|
||||||
|
| `totalPage` | number | Total number of pages |
|
||||||
|
| `pageSize` | number | Number of results per page |
|
||||||
|
| `profiles` | array | Employee profiles |
|
||||||
|
| ↳ `profileIdentifier` | string | Profile ID |
|
||||||
|
| ↳ `givenName` | string | First name |
|
||||||
|
| ↳ `familyName` | string | Last name |
|
||||||
|
| ↳ `currentPosition` | string | Current job title |
|
||||||
|
| ↳ `profileImage` | string | Profile image URL |
|
||||||
|
| ↳ `externalProfileUrl` | string | LinkedIn URL |
|
||||||
|
| ↳ `city` | string | City |
|
||||||
|
| ↳ `country` | string | Country |
|
||||||
|
| ↳ `expertSkills` | array | Skills |
|
||||||
|
|
||||||
|
### `enrich_search_similar_companies`
|
||||||
|
|
||||||
|
Find companies similar to a given company by LinkedIn URL with filters for location and size.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `url` | string | Yes | LinkedIn company URL \(e.g., linkedin.com/company/google\) |
|
||||||
|
| `accountLocation` | json | No | Filter by locations \(array of country names\) |
|
||||||
|
| `employeeSizeType` | string | No | Employee size filter type \(e.g., RANGE\) |
|
||||||
|
| `employeeSizeRange` | json | No | Employee size ranges \(array of \{start, end\} objects\) |
|
||||||
|
| `page` | number | No | Page number \(default: 1\) |
|
||||||
|
| `num` | number | No | Number of results per page |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `companies` | array | Similar companies |
|
||||||
|
| ↳ `url` | string | LinkedIn URL |
|
||||||
|
| ↳ `name` | string | Company name |
|
||||||
|
| ↳ `universalName` | string | Universal name |
|
||||||
|
| ↳ `type` | string | Company type |
|
||||||
|
| ↳ `description` | string | Description |
|
||||||
|
| ↳ `phone` | string | Phone number |
|
||||||
|
| ↳ `website` | string | Website URL |
|
||||||
|
| ↳ `logo` | string | Logo URL |
|
||||||
|
| ↳ `foundedYear` | number | Year founded |
|
||||||
|
| ↳ `staffTotal` | number | Total staff |
|
||||||
|
| ↳ `industries` | array | Industries |
|
||||||
|
| ↳ `relevancyScore` | number | Relevancy score |
|
||||||
|
| ↳ `relevancyValue` | string | Relevancy value |
|
||||||
|
|
||||||
|
### `enrich_sales_pointer_people`
|
||||||
|
|
||||||
|
Advanced people search with complex filters for location, company size, seniority, experience, and more.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `page` | number | Yes | Page number \(starts at 1\) |
|
||||||
|
| `filters` | json | Yes | Array of filter objects. Each filter has type \(e.g., POSTAL_CODE, COMPANY_HEADCOUNT\), values \(array with id, text, selectionType: INCLUDED/EXCLUDED\), and optional selectedSubFilter |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `data` | array | People results |
|
||||||
|
| ↳ `name` | string | Full name |
|
||||||
|
| ↳ `summary` | string | Professional summary |
|
||||||
|
| ↳ `location` | string | Location |
|
||||||
|
| ↳ `profilePicture` | string | Profile picture URL |
|
||||||
|
| ↳ `linkedInUrn` | string | LinkedIn URN |
|
||||||
|
| ↳ `positions` | array | Work positions |
|
||||||
|
| ↳ `education` | array | Education |
|
||||||
|
| `pagination` | json | Pagination info |
|
||||||
|
| ↳ `totalCount` | number | Total results |
|
||||||
|
| ↳ `returnedCount` | number | Returned count |
|
||||||
|
| ↳ `start` | number | Start position |
|
||||||
|
| ↳ `limit` | number | Limit |
|
||||||
|
|
||||||
|
### `enrich_search_posts`
|
||||||
|
|
||||||
|
Search LinkedIn posts by keywords with date filtering.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `keywords` | string | Yes | Search keywords \(e.g., "AI automation"\) |
|
||||||
|
| `datePosted` | string | No | Time filter \(e.g., past_week, past_month\) |
|
||||||
|
| `page` | number | No | Page number \(default: 1\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `count` | number | Total number of results |
|
||||||
|
| `posts` | array | Search results |
|
||||||
|
| ↳ `url` | string | Post URL |
|
||||||
|
| ↳ `postId` | string | Post ID |
|
||||||
|
| ↳ `author` | object | Author information |
|
||||||
|
| ↳ `name` | string | Author name |
|
||||||
|
| ↳ `headline` | string | Author headline |
|
||||||
|
| ↳ `linkedInUrl` | string | Author LinkedIn URL |
|
||||||
|
| ↳ `profileImage` | string | Author profile image |
|
||||||
|
| ↳ `timestamp` | string | Post timestamp |
|
||||||
|
| ↳ `textContent` | string | Post text content |
|
||||||
|
| ↳ `hashtags` | array | Hashtags |
|
||||||
|
| ↳ `mediaUrls` | array | Media URLs |
|
||||||
|
| ↳ `reactions` | number | Number of reactions |
|
||||||
|
| ↳ `commentsCount` | number | Number of comments |
|
||||||
|
|
||||||
|
### `enrich_get_post_details`
|
||||||
|
|
||||||
|
Get detailed information about a LinkedIn post by URL.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `url` | string | Yes | LinkedIn post URL |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `postId` | string | Post ID |
|
||||||
|
| `author` | json | Author information |
|
||||||
|
| ↳ `name` | string | Author name |
|
||||||
|
| ↳ `headline` | string | Author headline |
|
||||||
|
| ↳ `linkedInUrl` | string | Author LinkedIn URL |
|
||||||
|
| ↳ `profileImage` | string | Author profile image |
|
||||||
|
| `timestamp` | string | Post timestamp |
|
||||||
|
| `textContent` | string | Post text content |
|
||||||
|
| `hashtags` | array | Hashtags |
|
||||||
|
| `mediaUrls` | array | Media URLs |
|
||||||
|
| `reactions` | number | Number of reactions |
|
||||||
|
| `commentsCount` | number | Number of comments |
|
||||||
|
|
||||||
|
### `enrich_search_post_reactions`
|
||||||
|
|
||||||
|
Get reactions on a LinkedIn post with filtering by reaction type.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `postUrn` | string | Yes | LinkedIn activity URN \(e.g., urn:li:activity:7231931952839196672\) |
|
||||||
|
| `reactionType` | string | Yes | Reaction type filter: all, like, love, celebrate, insightful, or funny \(default: all\) |
|
||||||
|
| `page` | number | Yes | Page number \(starts at 1\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `page` | number | Current page number |
|
||||||
|
| `totalPage` | number | Total number of pages |
|
||||||
|
| `count` | number | Number of reactions returned |
|
||||||
|
| `reactions` | array | Reactions |
|
||||||
|
| ↳ `reactionType` | string | Type of reaction |
|
||||||
|
| ↳ `reactor` | object | Person who reacted |
|
||||||
|
| ↳ `name` | string | Name |
|
||||||
|
| ↳ `subTitle` | string | Job title |
|
||||||
|
| ↳ `profileId` | string | Profile ID |
|
||||||
|
| ↳ `profilePicture` | string | Profile picture URL |
|
||||||
|
| ↳ `linkedInUrl` | string | LinkedIn URL |
|
||||||
|
|
||||||
|
### `enrich_search_post_comments`
|
||||||
|
|
||||||
|
Get comments on a LinkedIn post.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `postUrn` | string | Yes | LinkedIn activity URN \(e.g., urn:li:activity:7191163324208705536\) |
|
||||||
|
| `page` | number | No | Page number \(starts at 1, default: 1\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `page` | number | Current page number |
|
||||||
|
| `totalPage` | number | Total number of pages |
|
||||||
|
| `count` | number | Number of comments returned |
|
||||||
|
| `comments` | array | Comments |
|
||||||
|
| ↳ `activityId` | string | Comment activity ID |
|
||||||
|
| ↳ `commentary` | string | Comment text |
|
||||||
|
| ↳ `linkedInUrl` | string | Link to comment |
|
||||||
|
| ↳ `commenter` | object | Commenter info |
|
||||||
|
| ↳ `profileId` | string | Profile ID |
|
||||||
|
| ↳ `firstName` | string | First name |
|
||||||
|
| ↳ `lastName` | string | Last name |
|
||||||
|
| ↳ `subTitle` | string | Subtitle/headline |
|
||||||
|
| ↳ `profilePicture` | string | Profile picture URL |
|
||||||
|
| ↳ `backgroundImage` | string | Background image URL |
|
||||||
|
| ↳ `entityUrn` | string | Entity URN |
|
||||||
|
| ↳ `objectUrn` | string | Object URN |
|
||||||
|
| ↳ `profileType` | string | Profile type |
|
||||||
|
| ↳ `reactionBreakdown` | object | Reactions on the comment |
|
||||||
|
| ↳ `likes` | number | Number of likes |
|
||||||
|
| ↳ `empathy` | number | Number of empathy reactions |
|
||||||
|
| ↳ `other` | number | Number of other reactions |
|
||||||
|
|
||||||
|
### `enrich_search_people_activities`
|
||||||
|
|
||||||
|
Get a person
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `profileId` | string | Yes | LinkedIn profile ID |
|
||||||
|
| `activityType` | string | Yes | Activity type: posts, comments, or articles |
|
||||||
|
| `paginationToken` | string | No | Pagination token for next page of results |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `paginationToken` | string | Token for fetching next page |
|
||||||
|
| `activityType` | string | Type of activities returned |
|
||||||
|
| `activities` | array | Activities |
|
||||||
|
| ↳ `activityId` | string | Activity ID |
|
||||||
|
| ↳ `commentary` | string | Activity text content |
|
||||||
|
| ↳ `linkedInUrl` | string | Link to activity |
|
||||||
|
| ↳ `timeElapsed` | string | Time elapsed since activity |
|
||||||
|
| ↳ `numReactions` | number | Total number of reactions |
|
||||||
|
| ↳ `author` | object | Activity author info |
|
||||||
|
| ↳ `name` | string | Author name |
|
||||||
|
| ↳ `profileId` | string | Profile ID |
|
||||||
|
| ↳ `profilePicture` | string | Profile picture URL |
|
||||||
|
| ↳ `reactionBreakdown` | object | Reactions |
|
||||||
|
| ↳ `likes` | number | Likes |
|
||||||
|
| ↳ `empathy` | number | Empathy reactions |
|
||||||
|
| ↳ `other` | number | Other reactions |
|
||||||
|
| ↳ `attachments` | array | Attachment URLs |
|
||||||
|
|
||||||
|
### `enrich_search_company_activities`
|
||||||
|
|
||||||
|
Get a company
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `companyId` | string | Yes | LinkedIn company ID |
|
||||||
|
| `activityType` | string | Yes | Activity type: posts, comments, or articles |
|
||||||
|
| `paginationToken` | string | No | Pagination token for next page of results |
|
||||||
|
| `offset` | number | No | Number of records to skip \(default: 0\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `paginationToken` | string | Token for fetching next page |
|
||||||
|
| `activityType` | string | Type of activities returned |
|
||||||
|
| `activities` | array | Activities |
|
||||||
|
| ↳ `activityId` | string | Activity ID |
|
||||||
|
| ↳ `commentary` | string | Activity text content |
|
||||||
|
| ↳ `linkedInUrl` | string | Link to activity |
|
||||||
|
| ↳ `timeElapsed` | string | Time elapsed since activity |
|
||||||
|
| ↳ `numReactions` | number | Total number of reactions |
|
||||||
|
| ↳ `author` | object | Activity author info |
|
||||||
|
| ↳ `name` | string | Author name |
|
||||||
|
| ↳ `profileId` | string | Profile ID |
|
||||||
|
| ↳ `profilePicture` | string | Profile picture URL |
|
||||||
|
| ↳ `reactionBreakdown` | object | Reactions |
|
||||||
|
| ↳ `likes` | number | Likes |
|
||||||
|
| ↳ `empathy` | number | Empathy reactions |
|
||||||
|
| ↳ `other` | number | Other reactions |
|
||||||
|
| ↳ `attachments` | array | Attachments |
|
||||||
|
|
||||||
|
### `enrich_reverse_hash_lookup`
|
||||||
|
|
||||||
|
Convert an MD5 email hash back to the original email address and display name.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `hash` | string | Yes | MD5 hash value to look up |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `hash` | string | MD5 hash that was looked up |
|
||||||
|
| `email` | string | Original email address |
|
||||||
|
| `displayName` | string | Display name associated with the email |
|
||||||
|
| `found` | boolean | Whether an email was found for the hash |
|
||||||
|
|
||||||
|
### `enrich_search_logo`
|
||||||
|
|
||||||
|
Get a company logo image URL by domain.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `url` | string | Yes | Company domain \(e.g., google.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `logoUrl` | string | URL to fetch the company logo |
|
||||||
|
| `domain` | string | Domain that was looked up |
|
||||||
|
|
||||||
|
|
||||||
@@ -10,6 +10,23 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
color="#181C1E"
|
color="#181C1E"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
|
[GitHub](https://github.com/) is the world’s leading platform for hosting, collaborating on, and managing source code. GitHub offers powerful tools for version control, code review, branching strategies, and team collaboration within the rich Git ecosystem, underpinning both open source and enterprise development worldwide.
|
||||||
|
|
||||||
|
The GitHub integration in Sim allows your agents to seamlessly automate, interact with, and orchestrate workflows across your repositories. Using this integration, agents can perform an extended set of code and collaboration operations, enabling:
|
||||||
|
|
||||||
|
- **Fetch pull request details:** Retrieve a full overview of any pull request, including file diffs, branch information, metadata, approvals, and a summary of changes, for automation or review workflows.
|
||||||
|
- **Create pull request comments:** Automatically generate or post comments on PRs—such as reviews, suggestions, or status updates—enabling speedy feedback, documentation, or policy enforcement.
|
||||||
|
- **Get repository information:** Access comprehensive repository metadata, including descriptions, visibility, topics, default branches, and contributors. This supports intelligent project analysis, dynamic workflow routing, and organizational reporting.
|
||||||
|
- **Fetch the latest commit:** Quickly obtain details from the newest commit on any branch, including hashes, messages, authors, and timestamps. This is useful for monitoring development velocity, triggering downstream actions, or enforcing quality checks.
|
||||||
|
- **Trigger workflows from GitHub events:** Set up Sim workflows to start automatically from key GitHub events, including pull request creation, review comments, or when new commits are pushed, through easy webhook integration. Automate actions such as deployments, notifications, compliance checks, or documentation updates in real time.
|
||||||
|
- **Monitor and manage repository activity:** Programmatically track contributions, manage PR review states, analyze branch histories, and audit code changes. Empower agents to enforce requirements, coordinate releases, and respond dynamically to development patterns.
|
||||||
|
- **Support for advanced automations:** Combine these operations—for example, fetch PR data, leave context-aware comments, and kick off multi-step Sim workflows on code pushes or PR merges—to automate your team’s engineering processes from end to end.
|
||||||
|
|
||||||
|
By leveraging all of these capabilities, the Sim GitHub integration enables agents to engage deeply in the development lifecycle. Automate code reviews, streamline team feedback, synchronize project artifacts, accelerate CI/CD, and enforce best practices with ease. Bring security, speed, and reliability to your workflows—directly within your Sim-powered automation environment, with full integration into your organization’s GitHub strategy.
|
||||||
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Integrate Github into the workflow. Can get get PR details, create PR comment, get repository info, and get latest commit. Can be used in trigger mode to trigger a workflow when a PR is created, commented on, or a commit is pushed.
|
Integrate Github into the workflow. Can get get PR details, create PR comment, get repository info, and get latest commit. Can be used in trigger mode to trigger a workflow when a PR is created, commented on, or a commit is pushed.
|
||||||
|
|||||||
@@ -11,55 +11,17 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
/>
|
/>
|
||||||
|
|
||||||
{/* MANUAL-CONTENT-START:intro */}
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
[Google Docs](https://docs.google.com) is a powerful cloud-based document creation and editing service that allows users to create, edit, and collaborate on documents in real-time. As part of Google's productivity suite, Google Docs offers a versatile platform for text documents with robust formatting, commenting, and sharing capabilities.
|
[Google Docs](https://docs.google.com) is Google’s collaborative, cloud-based document service, enabling users to create, edit, and share documents in real time. As an integral part of Google Workspace, Docs offers rich formatting tools, commenting, version history, and seamless integration with other Google productivity tools.
|
||||||
|
|
||||||
Learn how to integrate the Google Docs "Read" tool in Sim to effortlessly fetch data from your docs and to integrate into your workflows. This tutorial walks you through connecting Google Docs, setting up data reads, and using that information to automate processes in real-time. Perfect for syncing live data with your agents.
|
Google Docs empowers individuals and teams to:
|
||||||
|
|
||||||
<iframe
|
- **Create and format documents:** Develop rich text documents with advanced formatting, images, and tables.
|
||||||
width="100%"
|
- **Collaborate and comment:** Multiple users can edit and comment with suggestions instantly.
|
||||||
height="400"
|
- **Track changes and version history:** Review, revert, and manage revisions over time.
|
||||||
src="https://www.youtube.com/embed/f41gy9rBHhE"
|
- **Access from any device:** Work on documents from web, mobile, or desktop with full cloud synchronization.
|
||||||
title="Use the Google Docs Read tool in Sim"
|
- **Integrate across Google services:** Connect Docs with Drive, Sheets, Slides, and external platforms for powerful workflows.
|
||||||
frameBorder="0"
|
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
Learn how to integrate the Google Docs "Update" tool in Sim to effortlessly add content in your docs through your workflows. This tutorial walks you through connecting Google Docs, configuring data writes, and using that information to automate document updates seamlessly. Perfect for maintaining dynamic, real-time documentation with minimal effort.
|
In Sim, the Google Docs integration allows your agents to read document content, write new content, and create documents programmatically as part of automated workflows. This integration unlocks automation such as document generation, report writing, content extraction, and collaborative editing—bridging the gap between AI-driven workflows and document management in your organization.
|
||||||
|
|
||||||
<iframe
|
|
||||||
width="100%"
|
|
||||||
height="400"
|
|
||||||
src="https://www.youtube.com/embed/L64ROHS2ivA"
|
|
||||||
title="Use the Google Docs Update tool in Sim"
|
|
||||||
frameBorder="0"
|
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
Learn how to integrate the Google Docs "Create" tool in Sim to effortlessly generate new documents through your workflows. This tutorial walks you through connecting Google Docs, setting up document creation, and using workflow data to populate content automatically. Perfect for streamlining document generation and enhancing productivity.
|
|
||||||
|
|
||||||
<iframe
|
|
||||||
width="100%"
|
|
||||||
height="400"
|
|
||||||
src="https://www.youtube.com/embed/lWpHH4qddWk"
|
|
||||||
title="Use the Google Docs Create tool in Sim"
|
|
||||||
frameBorder="0"
|
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
With Google Docs, you can:
|
|
||||||
|
|
||||||
- **Create and edit documents**: Develop text documents with comprehensive formatting options
|
|
||||||
- **Collaborate in real-time**: Work simultaneously with multiple users on the same document
|
|
||||||
- **Track changes**: View revision history and restore previous versions
|
|
||||||
- **Comment and suggest**: Provide feedback and propose edits without changing the original content
|
|
||||||
- **Access anywhere**: Use Google Docs across devices with automatic cloud synchronization
|
|
||||||
- **Work offline**: Continue working without internet connection with changes syncing when back online
|
|
||||||
- **Integrate with other services**: Connect with Google Drive, Sheets, Slides, and third-party applications
|
|
||||||
|
|
||||||
In Sim, the Google Docs integration enables your agents to interact directly with document content programmatically. This allows for powerful automation scenarios such as document creation, content extraction, collaborative editing, and document management. Your agents can read existing documents to extract information, write to documents to update content, and create new documents from scratch. This integration bridges the gap between your AI workflows and document management, enabling seamless interaction with one of the world's most widely used document platforms. By connecting Sim with Google Docs, you can automate document workflows, generate reports, extract insights from documents, and maintain documentation - all through your intelligent agents.
|
|
||||||
{/* MANUAL-CONTENT-END */}
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -11,30 +11,18 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
/>
|
/>
|
||||||
|
|
||||||
{/* MANUAL-CONTENT-START:intro */}
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
[Google Drive](https://drive.google.com) is Google's cloud storage and file synchronization service that allows users to store files, synchronize files across devices, and share files with others. As a core component of Google's productivity ecosystem, Google Drive offers robust storage, organization, and collaboration capabilities.
|
[Google Drive](https://drive.google.com) is Google’s cloud-based file storage and synchronization service, making it easy to store, manage, share, and access files securely across devices and platforms. As a core element of Google Workspace, Google Drive offers robust tools for file organization, collaboration, and seamless integration with the broader productivity suite.
|
||||||
|
|
||||||
Learn how to integrate the Google Drive tool in Sim to effortlessly pull information from your Drive through your workflows. This tutorial walks you through connecting Google Drive, setting up data retrieval, and using stored documents and files to enhance automation. Perfect for syncing important data with your agents in real-time.
|
Google Drive enables individuals and teams to:
|
||||||
|
|
||||||
<iframe
|
- **Store files in the cloud:** Access documents, images, videos, and more from anywhere with internet connectivity.
|
||||||
width="100%"
|
- **Organize and manage content:** Create and arrange folders, use naming conventions, and leverage search for fast retrieval.
|
||||||
height="400"
|
- **Share and collaborate:** Control file and folder permissions, share with individuals or groups, and collaborate in real time.
|
||||||
src="https://www.youtube.com/embed/cRoRr4b-EAs"
|
- **Leverage powerful search:** Quickly locate files using Google’s search technology.
|
||||||
title="Use the Google Drive tool in Sim"
|
- **Access across devices:** Work with your files on desktop, mobile, or web with full synchronization.
|
||||||
frameBorder="0"
|
- **Integrate deeply across Google services:** Connect with Google Docs, Sheets, Slides, and partner applications in your workflows.
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
With Google Drive, you can:
|
In Sim, the Google Drive integration allows your agents to read, upload, download, list, and organize your Drive files programmatically. Agents can automate file management, streamline content workflows, and enable no-code automation around document storage and retrieval. By connecting Sim with Google Drive, you empower your agents to incorporate cloud file operations directly into intelligent business processes.
|
||||||
|
|
||||||
- **Store files in the cloud**: Upload and access your files from anywhere with internet access
|
|
||||||
- **Organize content**: Create folders, use color coding, and implement naming conventions
|
|
||||||
- **Share and collaborate**: Control access permissions and work simultaneously on files
|
|
||||||
- **Search efficiently**: Find files quickly with Google's powerful search technology
|
|
||||||
- **Access across devices**: Use Google Drive on desktop, mobile, and web platforms
|
|
||||||
- **Integrate with other services**: Connect with Google Docs, Sheets, Slides, and third-party applications
|
|
||||||
|
|
||||||
In Sim, the Google Drive integration enables your agents to interact directly with your cloud storage programmatically. This allows for powerful automation scenarios such as file management, content organization, and document workflows. Your agents can upload new files to specific folders, download existing files to process their contents, and list folder contents to navigate your storage structure. This integration bridges the gap between your AI workflows and your document management system, enabling seamless file operations without manual intervention. By connecting Sim with Google Drive, you can automate file-based workflows, manage documents intelligently, and incorporate cloud storage operations into your agent's capabilities.
|
|
||||||
{/* MANUAL-CONTENT-END */}
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -11,29 +11,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
/>
|
/>
|
||||||
|
|
||||||
{/* MANUAL-CONTENT-START:intro */}
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
[Google Search](https://www.google.com) is the world's most widely used search engine, providing access to billions of web pages and information sources. Google Search uses sophisticated algorithms to deliver relevant search results based on user queries, making it an essential tool for finding information on the internet.
|
[Google Search](https://www.google.com) is the world's most widely used web search engine, making it easy to find information, discover new content, and answer questions in real time. With advanced search algorithms, Google Search helps you quickly locate web pages, images, news, and more using simple or complex queries.
|
||||||
|
|
||||||
Learn how to integrate the Google Search tool in Sim to effortlessly fetch real-time search results through your workflows. This tutorial walks you through connecting Google Search, configuring search queries, and using live data to enhance automation. Perfect for powering your agents with up-to-date information and smarter decision-making.
|
In Sim, the Google Search integration allows your agents to search the web and retrieve live information as part of automated workflows. This enables powerful use cases such as automated research, fact-checking, knowledge synthesis, and dynamic content discovery. By connecting Sim with Google Search, your agents can perform queries, process and analyze web results, and incorporate the latest information into their decisions—without manual effort. Enhance your workflows with always up-to-date knowledge from across the internet.
|
||||||
|
|
||||||
<iframe
|
|
||||||
width="100%"
|
|
||||||
height="400"
|
|
||||||
src="https://www.youtube.com/embed/1B7hV9b5UMQ"
|
|
||||||
title="Use the Google Search tool in Sim"
|
|
||||||
frameBorder="0"
|
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
With Google Search, you can:
|
|
||||||
|
|
||||||
- **Find relevant information**: Access billions of web pages with Google's powerful search algorithms
|
|
||||||
- **Get specific results**: Use search operators to refine and target your queries
|
|
||||||
- **Discover diverse content**: Find text, images, videos, news, and other content types
|
|
||||||
- **Access knowledge graphs**: Get structured information about people, places, and things
|
|
||||||
- **Utilize search features**: Take advantage of specialized search tools like calculators, unit converters, and more
|
|
||||||
|
|
||||||
In Sim, the Google Search integration enables your agents to search the web programmatically and incorporate search results into their workflows. This allows for powerful automation scenarios such as research, fact-checking, data gathering, and information synthesis. Your agents can formulate search queries, retrieve relevant results, and extract information from those results to make decisions or generate insights. This integration bridges the gap between your AI workflows and the vast information available on the web, enabling your agents to access up-to-date information from across the internet. By connecting Sim with Google Search, you can create agents that stay informed with the latest information, verify facts, conduct research, and provide users with relevant web content - all without leaving your workflow.
|
|
||||||
{/* MANUAL-CONTENT-END */}
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,20 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
color="#F64F9E"
|
color="#F64F9E"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
|
The Memory tool enables your agents to store, retrieve, and manage conversation memories across workflows. It acts as a persistent memory store that agents can access to maintain conversation context, recall facts, or track actions over time.
|
||||||
|
|
||||||
|
With the Memory tool, you can:
|
||||||
|
|
||||||
|
- **Add new memories**: Store relevant information, events, or conversation history by saving agent or user messages into a structured memory database
|
||||||
|
- **Retrieve memories**: Fetch specific memories or all memories tied to a conversation, helping agents recall previous interactions or facts
|
||||||
|
- **Delete memories**: Remove outdated or incorrect memories from the database to maintain accurate context
|
||||||
|
- **Append to existing conversations**: Update or expand on existing memory threads by appending new messages with the same conversation identifier
|
||||||
|
|
||||||
|
Sim’s Memory block is especially useful for building agents that require persistent state—helping them remember what was said earlier in a conversation, persist facts between tasks, or apply long-term history in decision-making. By integrating Memory, you enable richer, more contextual, and more dynamic workflows for your agents.
|
||||||
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Integrate Memory into the workflow. Can add, get a memory, get all memories, and delete memories.
|
Integrate Memory into the workflow. Can add, get a memory, get all memories, and delete memories.
|
||||||
|
|||||||
@@ -24,6 +24,7 @@
|
|||||||
"dynamodb",
|
"dynamodb",
|
||||||
"elasticsearch",
|
"elasticsearch",
|
||||||
"elevenlabs",
|
"elevenlabs",
|
||||||
|
"enrich",
|
||||||
"exa",
|
"exa",
|
||||||
"file",
|
"file",
|
||||||
"firecrawl",
|
"firecrawl",
|
||||||
|
|||||||
@@ -10,6 +10,21 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
color="#181C1E"
|
color="#181C1E"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
|
The Notion tool integration enables your agents to read, create, and manage Notion pages and databases directly within your workflows. This allows you to automate the retrieval and updating of structured content, notes, documents, and more from your Notion workspace.
|
||||||
|
|
||||||
|
With the Notion tool, you can:
|
||||||
|
|
||||||
|
- **Read pages or databases**: Extract rich content or metadata from specified Notion pages or entire databases
|
||||||
|
- **Create new content**: Programmatically create new pages or databases for dynamic content generation
|
||||||
|
- **Append content**: Add new blocks or properties to existing pages and databases
|
||||||
|
- **Query databases**: Run advanced filters and searches on structured Notion data for custom workflows
|
||||||
|
- **Search your workspace**: Locate pages and databases across your Notion workspace automatically
|
||||||
|
|
||||||
|
This tool is ideal for scenarios where agents need to synchronize information, generate reports, or maintain structured notes within Notion. By bringing Notion's capabilities into automated workflows, you empower your agents to interface with knowledge, documentation, and project management data programmatically and seamlessly.
|
||||||
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Integrate with Notion into the workflow. Can read page, read database, create page, create database, append content, query database, and search workspace.
|
Integrate with Notion into the workflow. Can read page, read database, create page, create database, append content, query database, and search workspace.
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
/>
|
/>
|
||||||
|
|
||||||
{/* MANUAL-CONTENT-START:intro */}
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
The [Pulse](https://www.pulseapi.com/) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
|
The [Pulse](https://www.runpulse.com) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
|
||||||
|
|
||||||
With Pulse, you can:
|
With Pulse, you can:
|
||||||
|
|
||||||
|
|||||||
@@ -13,16 +13,6 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
{/* MANUAL-CONTENT-START:intro */}
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
[Slack](https://www.slack.com/) is a business communication platform that offers teams a unified place for messaging, tools, and files.
|
[Slack](https://www.slack.com/) is a business communication platform that offers teams a unified place for messaging, tools, and files.
|
||||||
|
|
||||||
<iframe
|
|
||||||
width="100%"
|
|
||||||
height="400"
|
|
||||||
src="https://www.youtube.com/embed/J5jz3UaWmE8"
|
|
||||||
title="Slack Integration with Sim"
|
|
||||||
frameBorder="0"
|
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
With Slack, you can:
|
With Slack, you can:
|
||||||
|
|
||||||
- **Automate agent notifications**: Send real-time updates from your Sim agents to any Slack channel
|
- **Automate agent notifications**: Send real-time updates from your Sim agents to any Slack channel
|
||||||
|
|||||||
BIN
apps/docs/public/static/quick-reference/lock-block.png
Normal file
BIN
apps/docs/public/static/quick-reference/lock-block.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 34 KiB |
@@ -1,6 +1,6 @@
|
|||||||
import { redirect } from 'next/navigation'
|
import { redirect } from 'next/navigation'
|
||||||
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
import { getEnv, isTruthy } from '@/lib/core/config/env'
|
||||||
import SSOForm from '@/app/(auth)/sso/sso-form'
|
import SSOForm from '@/ee/sso/components/sso-form'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import {
|
|||||||
Database,
|
Database,
|
||||||
DollarSign,
|
DollarSign,
|
||||||
HardDrive,
|
HardDrive,
|
||||||
Workflow,
|
Timer,
|
||||||
} from 'lucide-react'
|
} from 'lucide-react'
|
||||||
import { useRouter } from 'next/navigation'
|
import { useRouter } from 'next/navigation'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
@@ -44,7 +44,7 @@ interface PricingTier {
|
|||||||
const FREE_PLAN_FEATURES: PricingFeature[] = [
|
const FREE_PLAN_FEATURES: PricingFeature[] = [
|
||||||
{ icon: DollarSign, text: '$20 usage limit' },
|
{ icon: DollarSign, text: '$20 usage limit' },
|
||||||
{ icon: HardDrive, text: '5GB file storage' },
|
{ icon: HardDrive, text: '5GB file storage' },
|
||||||
{ icon: Workflow, text: 'Public template access' },
|
{ icon: Timer, text: '5 min execution limit' },
|
||||||
{ icon: Database, text: 'Limited log retention' },
|
{ icon: Database, text: 'Limited log retention' },
|
||||||
{ icon: Code2, text: 'CLI/SDK Access' },
|
{ icon: Code2, text: 'CLI/SDK Access' },
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import {
|
|||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { getBrandConfig } from '@/lib/branding/branding'
|
import { getBrandConfig } from '@/lib/branding/branding'
|
||||||
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
||||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||||
@@ -1119,7 +1119,7 @@ async function handlePushNotificationSet(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const urlValidation = validateExternalUrl(
|
const urlValidation = await validateUrlWithDNS(
|
||||||
params.pushNotificationConfig.url,
|
params.pushNotificationConfig.url,
|
||||||
'Push notification URL'
|
'Push notification URL'
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -4,10 +4,13 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { and, eq, lt, sql } from 'drizzle-orm'
|
import { and, eq, lt, sql } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||||
|
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
|
|
||||||
const logger = createLogger('CleanupStaleExecutions')
|
const logger = createLogger('CleanupStaleExecutions')
|
||||||
|
|
||||||
const STALE_THRESHOLD_MINUTES = 30
|
const STALE_THRESHOLD_MS = getMaxExecutionTimeout() + 5 * 60 * 1000
|
||||||
|
const STALE_THRESHOLD_MINUTES = Math.ceil(STALE_THRESHOLD_MS / 60000)
|
||||||
|
const MAX_INT32 = 2_147_483_647
|
||||||
|
|
||||||
export async function GET(request: NextRequest) {
|
export async function GET(request: NextRequest) {
|
||||||
try {
|
try {
|
||||||
@@ -45,13 +48,14 @@ export async function GET(request: NextRequest) {
|
|||||||
try {
|
try {
|
||||||
const staleDurationMs = Date.now() - new Date(execution.startedAt).getTime()
|
const staleDurationMs = Date.now() - new Date(execution.startedAt).getTime()
|
||||||
const staleDurationMinutes = Math.round(staleDurationMs / 60000)
|
const staleDurationMinutes = Math.round(staleDurationMs / 60000)
|
||||||
|
const totalDurationMs = Math.min(staleDurationMs, MAX_INT32)
|
||||||
|
|
||||||
await db
|
await db
|
||||||
.update(workflowExecutionLogs)
|
.update(workflowExecutionLogs)
|
||||||
.set({
|
.set({
|
||||||
status: 'failed',
|
status: 'failed',
|
||||||
endedAt: new Date(),
|
endedAt: new Date(),
|
||||||
totalDurationMs: staleDurationMs,
|
totalDurationMs,
|
||||||
executionData: sql`jsonb_set(
|
executionData: sql`jsonb_set(
|
||||||
COALESCE(execution_data, '{}'::jsonb),
|
COALESCE(execution_data, '{}'::jsonb),
|
||||||
ARRAY['error'],
|
ARRAY['error'],
|
||||||
|
|||||||
@@ -6,7 +6,11 @@ import { createLogger } from '@sim/logger'
|
|||||||
import binaryExtensionsList from 'binary-extensions'
|
import binaryExtensionsList from 'binary-extensions'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||||
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
||||||
@@ -19,6 +23,7 @@ import {
|
|||||||
getMimeTypeFromExtension,
|
getMimeTypeFromExtension,
|
||||||
getViewerUrl,
|
getViewerUrl,
|
||||||
inferContextFromKey,
|
inferContextFromKey,
|
||||||
|
isInternalFileUrl,
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
} from '@/lib/uploads/utils/file-utils'
|
||||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||||
@@ -215,7 +220,7 @@ async function parseFileSingle(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filePath.includes('/api/files/serve/')) {
|
if (isInternalFileUrl(filePath)) {
|
||||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -246,7 +251,7 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
|||||||
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
|
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filePath.startsWith('/') && !filePath.startsWith('/api/files/serve/')) {
|
if (filePath.startsWith('/') && !isInternalFileUrl(filePath)) {
|
||||||
return { isValid: false, error: 'Path outside allowed directory' }
|
return { isValid: false, error: 'Path outside allowed directory' }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -420,7 +425,7 @@ async function handleExternalUrl(
|
|||||||
|
|
||||||
return parseResult
|
return parseResult
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Error handling external URL ${url}:`, error)
|
logger.error(`Error handling external URL ${sanitizeUrlForLog(url)}:`, error)
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error: `Error fetching URL: ${(error as Error).message}`,
|
error: `Error fetching URL: ${(error as Error).message}`,
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ import { and, eq } from 'drizzle-orm'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateInternalToken } from '@/lib/auth/internal'
|
import { generateInternalToken } from '@/lib/auth/internal'
|
||||||
|
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
|
||||||
const logger = createLogger('WorkflowMcpServeAPI')
|
const logger = createLogger('WorkflowMcpServeAPI')
|
||||||
@@ -264,7 +265,7 @@ async function handleToolsCall(
|
|||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers,
|
headers,
|
||||||
body: JSON.stringify({ input: params.arguments || {}, triggerType: 'mcp' }),
|
body: JSON.stringify({ input: params.arguments || {}, triggerType: 'mcp' }),
|
||||||
signal: AbortSignal.timeout(600000), // 10 minute timeout
|
signal: AbortSignal.timeout(getMaxExecutionTimeout()),
|
||||||
})
|
})
|
||||||
|
|
||||||
const executeResult = await response.json()
|
const executeResult = await response.json()
|
||||||
@@ -284,7 +285,7 @@ async function handleToolsCall(
|
|||||||
content: [
|
content: [
|
||||||
{ type: 'text', text: JSON.stringify(executeResult.output || executeResult, null, 2) },
|
{ type: 'text', text: JSON.stringify(executeResult.output || executeResult, null, 2) },
|
||||||
],
|
],
|
||||||
isError: !executeResult.success,
|
isError: executeResult.success === false,
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json(createResponse(id, result))
|
return NextResponse.json(createResponse(id, result))
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import type { NextRequest } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
|
import { getHighestPrioritySubscription } from '@/lib/billing/core/plan'
|
||||||
|
import { getExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
|
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
|
||||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||||
import { mcpService } from '@/lib/mcp/service'
|
import { mcpService } from '@/lib/mcp/service'
|
||||||
import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types'
|
import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types'
|
||||||
@@ -7,7 +10,6 @@ import {
|
|||||||
categorizeError,
|
categorizeError,
|
||||||
createMcpErrorResponse,
|
createMcpErrorResponse,
|
||||||
createMcpSuccessResponse,
|
createMcpSuccessResponse,
|
||||||
MCP_CONSTANTS,
|
|
||||||
validateStringParam,
|
validateStringParam,
|
||||||
} from '@/lib/mcp/utils'
|
} from '@/lib/mcp/utils'
|
||||||
|
|
||||||
@@ -171,13 +173,16 @@ export const POST = withMcpAuth('read')(
|
|||||||
arguments: args,
|
arguments: args,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const userSubscription = await getHighestPrioritySubscription(userId)
|
||||||
|
const executionTimeout = getExecutionTimeout(
|
||||||
|
userSubscription?.plan as SubscriptionPlan | undefined,
|
||||||
|
'sync'
|
||||||
|
)
|
||||||
|
|
||||||
const result = await Promise.race([
|
const result = await Promise.race([
|
||||||
mcpService.executeTool(userId, serverId, toolCall, workspaceId),
|
mcpService.executeTool(userId, serverId, toolCall, workspaceId),
|
||||||
new Promise<never>((_, reject) =>
|
new Promise<never>((_, reject) =>
|
||||||
setTimeout(
|
setTimeout(() => reject(new Error('Tool execution timeout')), executionTimeout)
|
||||||
() => reject(new Error('Tool execution timeout')),
|
|
||||||
MCP_CONSTANTS.EXECUTION_TIMEOUT
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ import { z } from 'zod'
|
|||||||
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
|
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { hasAccessControlAccess } from '@/lib/billing'
|
import { hasAccessControlAccess } from '@/lib/billing'
|
||||||
|
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
|
||||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||||
@@ -501,6 +502,18 @@ export async function PUT(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (status === 'accepted') {
|
||||||
|
try {
|
||||||
|
await syncUsageLimitsFromSubscription(session.user.id)
|
||||||
|
} catch (syncError) {
|
||||||
|
logger.error('Failed to sync usage limits after joining org', {
|
||||||
|
userId: session.user.id,
|
||||||
|
organizationId,
|
||||||
|
error: syncError,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`Organization invitation ${status}`, {
|
logger.info(`Organization invitation ${status}`, {
|
||||||
organizationId,
|
organizationId,
|
||||||
invitationId,
|
invitationId,
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ import { hasWorkspaceAdminAccess } from '@/lib/workspaces/permissions/utils'
|
|||||||
import {
|
import {
|
||||||
InvitationsNotAllowedError,
|
InvitationsNotAllowedError,
|
||||||
validateInvitationsAllowed,
|
validateInvitationsAllowed,
|
||||||
} from '@/executor/utils/permission-check'
|
} from '@/ee/access-control/utils/permission-check'
|
||||||
|
|
||||||
const logger = createLogger('OrganizationInvitations')
|
const logger = createLogger('OrganizationInvitations')
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -95,6 +96,14 @@ export async function POST(request: NextRequest) {
|
|||||||
if (validatedData.files && validatedData.files.length > 0) {
|
if (validatedData.files && validatedData.files.length > 0) {
|
||||||
for (const file of validatedData.files) {
|
for (const file of validatedData.files) {
|
||||||
if (file.type === 'url') {
|
if (file.type === 'url') {
|
||||||
|
const urlValidation = await validateUrlWithDNS(file.data, 'fileUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: urlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const filePart: FilePart = {
|
const filePart: FilePart = {
|
||||||
kind: 'file',
|
kind: 'file',
|
||||||
file: {
|
file: {
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { createA2AClient } from '@/lib/a2a/utils'
|
import { createA2AClient } from '@/lib/a2a/utils'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -40,7 +40,7 @@ export async function POST(request: NextRequest) {
|
|||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
||||||
|
|
||||||
const urlValidation = validateExternalUrl(validatedData.webhookUrl, 'Webhook URL')
|
const urlValidation = await validateUrlWithDNS(validatedData.webhookUrl, 'Webhook URL')
|
||||||
if (!urlValidation.isValid) {
|
if (!urlValidation.isValid) {
|
||||||
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
|
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
|
|||||||
@@ -92,6 +92,9 @@ export async function POST(request: NextRequest) {
|
|||||||
formData.append('comment', comment)
|
formData.append('comment', comment)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add minorEdit field as required by Confluence API
|
||||||
|
formData.append('minorEdit', 'false')
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { z } from 'zod'
|
|||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateNumericId } from '@/lib/core/security/input-validation'
|
import { validateNumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -15,7 +16,7 @@ const DiscordSendMessageSchema = z.object({
|
|||||||
botToken: z.string().min(1, 'Bot token is required'),
|
botToken: z.string().min(1, 'Bot token is required'),
|
||||||
channelId: z.string().min(1, 'Channel ID is required'),
|
channelId: z.string().min(1, 'Channel ID is required'),
|
||||||
content: z.string().optional().nullable(),
|
content: z.string().optional().nullable(),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -101,6 +102,12 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`)
|
logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`)
|
||||||
|
|
||||||
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||||
|
const filesOutput: Array<{
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}> = []
|
||||||
|
|
||||||
if (userFiles.length === 0) {
|
if (userFiles.length === 0) {
|
||||||
logger.warn(`[${requestId}] No valid files to upload, falling back to text-only`)
|
logger.warn(`[${requestId}] No valid files to upload, falling back to text-only`)
|
||||||
@@ -137,6 +144,12 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Downloading file ${i}: ${userFile.name}`)
|
logger.info(`[${requestId}] Downloading file ${i}: ${userFile.name}`)
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
filesOutput.push({
|
||||||
|
name: userFile.name,
|
||||||
|
mimeType: userFile.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
|
||||||
const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type })
|
const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type })
|
||||||
formData.append(`files[${i}]`, blob, userFile.name)
|
formData.append(`files[${i}]`, blob, userFile.name)
|
||||||
@@ -173,6 +186,7 @@ export async function POST(request: NextRequest) {
|
|||||||
message: data.content,
|
message: data.content,
|
||||||
data: data,
|
data: data,
|
||||||
fileCount: userFiles.length,
|
fileCount: userFiles.length,
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
141
apps/sim/app/api/tools/dropbox/upload/route.ts
Normal file
141
apps/sim/app/api/tools/dropbox/upload/route.ts
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { processFilesToUserFiles, type RawFileInput } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('DropboxUploadAPI')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Escapes non-ASCII characters in JSON string for HTTP header safety.
|
||||||
|
* Dropbox API requires characters 0x7F and all non-ASCII to be escaped as \uXXXX.
|
||||||
|
*/
|
||||||
|
function httpHeaderSafeJson(value: object): string {
|
||||||
|
return JSON.stringify(value).replace(/[\u007f-\uffff]/g, (c) => {
|
||||||
|
return `\\u${(`0000${c.charCodeAt(0).toString(16)}`).slice(-4)}`
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const DropboxUploadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
path: z.string().min(1, 'Destination path is required'),
|
||||||
|
file: FileInputSchema.optional().nullable(),
|
||||||
|
// Legacy field for backwards compatibility
|
||||||
|
fileContent: z.string().optional().nullable(),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
mode: z.enum(['add', 'overwrite']).optional().nullable(),
|
||||||
|
autorename: z.boolean().optional().nullable(),
|
||||||
|
mute: z.boolean().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Dropbox upload attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: authResult.error || 'Authentication required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Authenticated Dropbox upload request via ${authResult.authType}`)
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = DropboxUploadSchema.parse(body)
|
||||||
|
|
||||||
|
let fileBuffer: Buffer
|
||||||
|
let fileName: string
|
||||||
|
|
||||||
|
// Prefer UserFile input, fall back to legacy base64 string
|
||||||
|
if (validatedData.file) {
|
||||||
|
// Process UserFile input
|
||||||
|
const userFiles = processFilesToUserFiles(
|
||||||
|
[validatedData.file as RawFileInput],
|
||||||
|
requestId,
|
||||||
|
logger
|
||||||
|
)
|
||||||
|
|
||||||
|
if (userFiles.length === 0) {
|
||||||
|
return NextResponse.json({ success: false, error: 'Invalid file input' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const userFile = userFiles[0]
|
||||||
|
logger.info(`[${requestId}] Downloading file: ${userFile.name} (${userFile.size} bytes)`)
|
||||||
|
|
||||||
|
fileBuffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
fileName = userFile.name
|
||||||
|
} else if (validatedData.fileContent) {
|
||||||
|
// Legacy: base64 string input (backwards compatibility)
|
||||||
|
logger.info(`[${requestId}] Using legacy base64 content input`)
|
||||||
|
fileBuffer = Buffer.from(validatedData.fileContent, 'base64')
|
||||||
|
fileName = validatedData.fileName || 'file'
|
||||||
|
} else {
|
||||||
|
return NextResponse.json({ success: false, error: 'File is required' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine final path
|
||||||
|
let finalPath = validatedData.path
|
||||||
|
if (finalPath.endsWith('/')) {
|
||||||
|
finalPath = `${finalPath}${fileName}`
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Uploading to Dropbox: ${finalPath} (${fileBuffer.length} bytes)`)
|
||||||
|
|
||||||
|
const dropboxApiArg = {
|
||||||
|
path: finalPath,
|
||||||
|
mode: validatedData.mode || 'add',
|
||||||
|
autorename: validatedData.autorename ?? true,
|
||||||
|
mute: validatedData.mute ?? false,
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch('https://content.dropboxapi.com/2/files/upload', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': 'application/octet-stream',
|
||||||
|
'Dropbox-API-Arg': httpHeaderSafeJson(dropboxApiArg),
|
||||||
|
},
|
||||||
|
body: new Uint8Array(fileBuffer),
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorMessage = data.error_summary || data.error?.message || 'Failed to upload file'
|
||||||
|
logger.error(`[${requestId}] Dropbox API error:`, { status: response.status, data })
|
||||||
|
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File uploaded successfully to ${data.path_display}`)
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: data,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
logger.warn(`[${requestId}] Validation error:`, error.errors)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error.errors[0]?.message || 'Validation failed' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] Unexpected error:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
195
apps/sim/app/api/tools/github/latest-commit/route.ts
Normal file
195
apps/sim/app/api/tools/github/latest-commit/route.ts
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('GitHubLatestCommitAPI')
|
||||||
|
|
||||||
|
interface GitHubErrorResponse {
|
||||||
|
message?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GitHubCommitResponse {
|
||||||
|
sha: string
|
||||||
|
html_url: string
|
||||||
|
commit: {
|
||||||
|
message: string
|
||||||
|
author: { name: string; email: string; date: string }
|
||||||
|
committer: { name: string; email: string; date: string }
|
||||||
|
}
|
||||||
|
author?: { login: string; avatar_url: string; html_url: string }
|
||||||
|
committer?: { login: string; avatar_url: string; html_url: string }
|
||||||
|
stats?: { additions: number; deletions: number; total: number }
|
||||||
|
files?: Array<{
|
||||||
|
filename: string
|
||||||
|
status: string
|
||||||
|
additions: number
|
||||||
|
deletions: number
|
||||||
|
changes: number
|
||||||
|
patch?: string
|
||||||
|
raw_url?: string
|
||||||
|
blob_url?: string
|
||||||
|
}>
|
||||||
|
}
|
||||||
|
|
||||||
|
const GitHubLatestCommitSchema = z.object({
|
||||||
|
owner: z.string().min(1, 'Owner is required'),
|
||||||
|
repo: z.string().min(1, 'Repo is required'),
|
||||||
|
branch: z.string().optional().nullable(),
|
||||||
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized GitHub latest commit attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = GitHubLatestCommitSchema.parse(body)
|
||||||
|
|
||||||
|
const { owner, repo, branch, apiKey } = validatedData
|
||||||
|
|
||||||
|
const baseUrl = `https://api.github.com/repos/${owner}/${repo}`
|
||||||
|
const commitUrl = branch ? `${baseUrl}/commits/${branch}` : `${baseUrl}/commits/HEAD`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Fetching latest commit from GitHub`, { owner, repo, branch })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(commitUrl, 'commitUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(commitUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/vnd.github.v3+json',
|
||||||
|
Authorization: `Bearer ${apiKey}`,
|
||||||
|
'X-GitHub-Api-Version': '2022-11-28',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = (await response.json().catch(() => ({}))) as GitHubErrorResponse
|
||||||
|
logger.error(`[${requestId}] GitHub API error`, {
|
||||||
|
status: response.status,
|
||||||
|
error: errorData,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorData.message || `GitHub API error: ${response.status}` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as GitHubCommitResponse
|
||||||
|
|
||||||
|
const content = `Latest commit: "${data.commit.message}" by ${data.commit.author.name} on ${data.commit.author.date}. SHA: ${data.sha}`
|
||||||
|
|
||||||
|
const files = data.files || []
|
||||||
|
const fileDetailsWithContent = []
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
const fileDetail: Record<string, any> = {
|
||||||
|
filename: file.filename,
|
||||||
|
additions: file.additions,
|
||||||
|
deletions: file.deletions,
|
||||||
|
changes: file.changes,
|
||||||
|
status: file.status,
|
||||||
|
raw_url: file.raw_url,
|
||||||
|
blob_url: file.blob_url,
|
||||||
|
patch: file.patch,
|
||||||
|
content: undefined,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.status !== 'removed' && file.raw_url) {
|
||||||
|
try {
|
||||||
|
const rawUrlValidation = await validateUrlWithDNS(file.raw_url, 'rawUrl')
|
||||||
|
if (rawUrlValidation.isValid) {
|
||||||
|
const contentResponse = await secureFetchWithPinnedIP(
|
||||||
|
file.raw_url,
|
||||||
|
rawUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${apiKey}`,
|
||||||
|
'X-GitHub-Api-Version': '2022-11-28',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (contentResponse.ok) {
|
||||||
|
fileDetail.content = await contentResponse.text()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to fetch content for ${file.filename}:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fileDetailsWithContent.push(fileDetail)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Latest commit fetched successfully`, {
|
||||||
|
sha: data.sha,
|
||||||
|
fileCount: files.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
content,
|
||||||
|
metadata: {
|
||||||
|
sha: data.sha,
|
||||||
|
html_url: data.html_url,
|
||||||
|
commit_message: data.commit.message,
|
||||||
|
author: {
|
||||||
|
name: data.commit.author.name,
|
||||||
|
login: data.author?.login || 'Unknown',
|
||||||
|
avatar_url: data.author?.avatar_url || '',
|
||||||
|
html_url: data.author?.html_url || '',
|
||||||
|
},
|
||||||
|
committer: {
|
||||||
|
name: data.commit.committer.name,
|
||||||
|
login: data.committer?.login || 'Unknown',
|
||||||
|
avatar_url: data.committer?.avatar_url || '',
|
||||||
|
html_url: data.committer?.html_url || '',
|
||||||
|
},
|
||||||
|
stats: data.stats
|
||||||
|
? {
|
||||||
|
additions: data.stats.additions,
|
||||||
|
deletions: data.stats.deletions,
|
||||||
|
total: data.stats.total,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
files: fileDetailsWithContent.length > 0 ? fileDetailsWithContent : undefined,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching GitHub latest commit:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -28,7 +29,7 @@ const GmailDraftSchema = z.object({
|
|||||||
replyToMessageId: z.string().optional().nullable(),
|
replyToMessageId: z.string().optional().nullable(),
|
||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -28,7 +29,7 @@ const GmailSendSchema = z.object({
|
|||||||
replyToMessageId: z.string().optional().nullable(),
|
replyToMessageId: z.string().optional().nullable(),
|
||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
252
apps/sim/app/api/tools/google_drive/download/route.ts
Normal file
252
apps/sim/app/api/tools/google_drive/download/route.ts
Normal file
@@ -0,0 +1,252 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import type { GoogleDriveFile, GoogleDriveRevision } from '@/tools/google_drive/types'
|
||||||
|
import {
|
||||||
|
ALL_FILE_FIELDS,
|
||||||
|
ALL_REVISION_FIELDS,
|
||||||
|
DEFAULT_EXPORT_FORMATS,
|
||||||
|
GOOGLE_WORKSPACE_MIME_TYPES,
|
||||||
|
} from '@/tools/google_drive/utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('GoogleDriveDownloadAPI')
|
||||||
|
|
||||||
|
/** Google API error response structure */
|
||||||
|
interface GoogleApiErrorResponse {
|
||||||
|
error?: {
|
||||||
|
message?: string
|
||||||
|
code?: number
|
||||||
|
status?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Google Drive revisions list response */
|
||||||
|
interface GoogleDriveRevisionsResponse {
|
||||||
|
revisions?: GoogleDriveRevision[]
|
||||||
|
nextPageToken?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const GoogleDriveDownloadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
fileId: z.string().min(1, 'File ID is required'),
|
||||||
|
mimeType: z.string().optional().nullable(),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
includeRevisions: z.boolean().optional().default(true),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Google Drive download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = GoogleDriveDownloadSchema.parse(body)
|
||||||
|
|
||||||
|
const {
|
||||||
|
accessToken,
|
||||||
|
fileId,
|
||||||
|
mimeType: exportMimeType,
|
||||||
|
fileName,
|
||||||
|
includeRevisions,
|
||||||
|
} = validatedData
|
||||||
|
const authHeader = `Bearer ${accessToken}`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting file metadata from Google Drive`, { fileId })
|
||||||
|
|
||||||
|
const metadataUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?fields=${ALL_FILE_FIELDS}&supportsAllDrives=true`
|
||||||
|
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
|
||||||
|
if (!metadataUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: metadataUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadataResponse = await secureFetchWithPinnedIP(
|
||||||
|
metadataUrl,
|
||||||
|
metadataUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: { Authorization: authHeader },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!metadataResponse.ok) {
|
||||||
|
const errorDetails = (await metadataResponse
|
||||||
|
.json()
|
||||||
|
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||||
|
logger.error(`[${requestId}] Failed to get file metadata`, {
|
||||||
|
status: metadataResponse.status,
|
||||||
|
error: errorDetails,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = (await metadataResponse.json()) as GoogleDriveFile
|
||||||
|
const fileMimeType = metadata.mimeType
|
||||||
|
|
||||||
|
let fileBuffer: Buffer
|
||||||
|
let finalMimeType = fileMimeType
|
||||||
|
|
||||||
|
if (GOOGLE_WORKSPACE_MIME_TYPES.includes(fileMimeType)) {
|
||||||
|
const exportFormat = exportMimeType || DEFAULT_EXPORT_FORMATS[fileMimeType] || 'text/plain'
|
||||||
|
finalMimeType = exportFormat
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Exporting Google Workspace file`, {
|
||||||
|
fileId,
|
||||||
|
mimeType: fileMimeType,
|
||||||
|
exportFormat,
|
||||||
|
})
|
||||||
|
|
||||||
|
const exportUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/export?mimeType=${encodeURIComponent(exportFormat)}&supportsAllDrives=true`
|
||||||
|
const exportUrlValidation = await validateUrlWithDNS(exportUrl, 'exportUrl')
|
||||||
|
if (!exportUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: exportUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const exportResponse = await secureFetchWithPinnedIP(
|
||||||
|
exportUrl,
|
||||||
|
exportUrlValidation.resolvedIP!,
|
||||||
|
{ headers: { Authorization: authHeader } }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!exportResponse.ok) {
|
||||||
|
const exportError = (await exportResponse
|
||||||
|
.json()
|
||||||
|
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||||
|
logger.error(`[${requestId}] Failed to export file`, {
|
||||||
|
status: exportResponse.status,
|
||||||
|
error: exportError,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: exportError.error?.message || 'Failed to export Google Workspace file',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await exportResponse.arrayBuffer()
|
||||||
|
fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
} else {
|
||||||
|
logger.info(`[${requestId}] Downloading regular file`, { fileId, mimeType: fileMimeType })
|
||||||
|
|
||||||
|
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?alt=media&supportsAllDrives=true`
|
||||||
|
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||||
|
if (!downloadUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
downloadUrl,
|
||||||
|
downloadUrlValidation.resolvedIP!,
|
||||||
|
{ headers: { Authorization: authHeader } }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
const downloadError = (await downloadResponse
|
||||||
|
.json()
|
||||||
|
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||||
|
logger.error(`[${requestId}] Failed to download file`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
error: downloadError,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
const canReadRevisions = metadata.capabilities?.canReadRevisions === true
|
||||||
|
if (includeRevisions && canReadRevisions) {
|
||||||
|
try {
|
||||||
|
const revisionsUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/revisions?fields=revisions(${ALL_REVISION_FIELDS})&pageSize=100`
|
||||||
|
const revisionsUrlValidation = await validateUrlWithDNS(revisionsUrl, 'revisionsUrl')
|
||||||
|
if (revisionsUrlValidation.isValid) {
|
||||||
|
const revisionsResponse = await secureFetchWithPinnedIP(
|
||||||
|
revisionsUrl,
|
||||||
|
revisionsUrlValidation.resolvedIP!,
|
||||||
|
{ headers: { Authorization: authHeader } }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (revisionsResponse.ok) {
|
||||||
|
const revisionsData = (await revisionsResponse.json()) as GoogleDriveRevisionsResponse
|
||||||
|
metadata.revisions = revisionsData.revisions
|
||||||
|
logger.info(`[${requestId}] Fetched file revisions`, {
|
||||||
|
fileId,
|
||||||
|
revisionCount: metadata.revisions?.length || 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Error fetching revisions, continuing without them`, { error })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedName = fileName || metadata.name || 'download'
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||||
|
fileId,
|
||||||
|
name: resolvedName,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
mimeType: finalMimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const base64Data = fileBuffer.toString('base64')
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedName,
|
||||||
|
mimeType: finalMimeType,
|
||||||
|
data: base64Data,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
},
|
||||||
|
metadata,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading Google Drive file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -20,7 +21,7 @@ const GOOGLE_DRIVE_API_BASE = 'https://www.googleapis.com/upload/drive/v3/files'
|
|||||||
const GoogleDriveUploadSchema = z.object({
|
const GoogleDriveUploadSchema = z.object({
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
file: z.any().optional().nullable(),
|
file: RawFileInputSchema.optional().nullable(),
|
||||||
mimeType: z.string().optional().nullable(),
|
mimeType: z.string().optional().nullable(),
|
||||||
folderId: z.string().optional().nullable(),
|
folderId: z.string().optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -0,0 +1,131 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('GoogleVaultDownloadExportFileAPI')
|
||||||
|
|
||||||
|
const GoogleVaultDownloadExportFileSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||||
|
objectName: z.string().min(1, 'Object name is required'),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Google Vault download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = GoogleVaultDownloadExportFileSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, bucketName, objectName, fileName } = validatedData
|
||||||
|
|
||||||
|
const bucket = encodeURIComponent(bucketName)
|
||||||
|
const object = encodeURIComponent(objectName)
|
||||||
|
const downloadUrl = `https://storage.googleapis.com/storage/v1/b/${bucket}/o/${object}?alt=media`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Downloading file from Google Vault`, { bucketName, objectName })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: enhanceGoogleVaultError(urlValidation.error || 'Invalid URL') },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(downloadUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
const errorText = await downloadResponse.text().catch(() => '')
|
||||||
|
const errorMessage = `Failed to download file: ${errorText || downloadResponse.statusText}`
|
||||||
|
logger.error(`[${requestId}] Failed to download Vault export file`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
error: errorText,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: enhanceGoogleVaultError(errorMessage) },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentType = downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||||
|
const disposition = downloadResponse.headers.get('content-disposition') || ''
|
||||||
|
const match = disposition.match(/filename\*=UTF-8''([^;]+)|filename="([^"]+)"/)
|
||||||
|
|
||||||
|
let resolvedName = fileName
|
||||||
|
if (!resolvedName) {
|
||||||
|
if (match?.[1]) {
|
||||||
|
try {
|
||||||
|
resolvedName = decodeURIComponent(match[1])
|
||||||
|
} catch {
|
||||||
|
resolvedName = match[1]
|
||||||
|
}
|
||||||
|
} else if (match?.[2]) {
|
||||||
|
resolvedName = match[2]
|
||||||
|
} else if (objectName) {
|
||||||
|
const parts = objectName.split('/')
|
||||||
|
resolvedName = parts[parts.length - 1] || 'vault-export.bin'
|
||||||
|
} else {
|
||||||
|
resolvedName = 'vault-export.bin'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Vault export file downloaded successfully`, {
|
||||||
|
name: resolvedName,
|
||||||
|
size: buffer.length,
|
||||||
|
mimeType: contentType,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedName,
|
||||||
|
mimeType: contentType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading Google Vault export file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,10 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateImageUrl } from '@/lib/core/security/input-validation'
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
const logger = createLogger('ImageProxyAPI')
|
const logger = createLogger('ImageProxyAPI')
|
||||||
@@ -26,7 +29,7 @@ export async function GET(request: NextRequest) {
|
|||||||
return new NextResponse('Missing URL parameter', { status: 400 })
|
return new NextResponse('Missing URL parameter', { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const urlValidation = validateImageUrl(imageUrl)
|
const urlValidation = await validateUrlWithDNS(imageUrl, 'imageUrl')
|
||||||
if (!urlValidation.isValid) {
|
if (!urlValidation.isValid) {
|
||||||
logger.warn(`[${requestId}] Blocked image proxy request`, {
|
logger.warn(`[${requestId}] Blocked image proxy request`, {
|
||||||
url: imageUrl.substring(0, 100),
|
url: imageUrl.substring(0, 100),
|
||||||
@@ -38,7 +41,8 @@ export async function GET(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Proxying image request for: ${imageUrl}`)
|
logger.info(`[${requestId}] Proxying image request for: ${imageUrl}`)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const imageResponse = await fetch(imageUrl, {
|
const imageResponse = await secureFetchWithPinnedIP(imageUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
'User-Agent':
|
'User-Agent':
|
||||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
|
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
|
||||||
@@ -64,14 +68,14 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
const contentType = imageResponse.headers.get('content-type') || 'image/jpeg'
|
const contentType = imageResponse.headers.get('content-type') || 'image/jpeg'
|
||||||
|
|
||||||
const imageBlob = await imageResponse.blob()
|
const imageArrayBuffer = await imageResponse.arrayBuffer()
|
||||||
|
|
||||||
if (imageBlob.size === 0) {
|
if (imageArrayBuffer.byteLength === 0) {
|
||||||
logger.error(`[${requestId}] Empty image blob received`)
|
logger.error(`[${requestId}] Empty image received`)
|
||||||
return new NextResponse('Empty image received', { status: 404 })
|
return new NextResponse('Empty image received', { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
return new NextResponse(imageBlob, {
|
return new NextResponse(imageArrayBuffer, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': contentType,
|
'Content-Type': contentType,
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
|
|||||||
121
apps/sim/app/api/tools/jira/add-attachment/route.ts
Normal file
121
apps/sim/app/api/tools/jira/add-attachment/route.ts
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('JiraAddAttachmentAPI')
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const JiraAddAttachmentSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
domain: z.string().min(1, 'Domain is required'),
|
||||||
|
issueKey: z.string().min(1, 'Issue key is required'),
|
||||||
|
files: RawFileInputArraySchema,
|
||||||
|
cloudId: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = `jira-attach-${Date.now()}`
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!authResult.success) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: authResult.error || 'Unauthorized' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = JiraAddAttachmentSchema.parse(body)
|
||||||
|
|
||||||
|
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||||
|
if (userFiles.length === 0) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'No valid files provided for upload' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const cloudId =
|
||||||
|
validatedData.cloudId ||
|
||||||
|
(await getJiraCloudId(validatedData.domain, validatedData.accessToken))
|
||||||
|
|
||||||
|
const formData = new FormData()
|
||||||
|
const filesOutput: Array<{ name: string; mimeType: string; data: string; size: number }> = []
|
||||||
|
|
||||||
|
for (const file of userFiles) {
|
||||||
|
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||||
|
filesOutput.push({
|
||||||
|
name: file.name,
|
||||||
|
mimeType: file.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
const blob = new Blob([new Uint8Array(buffer)], {
|
||||||
|
type: file.type || 'application/octet-stream',
|
||||||
|
})
|
||||||
|
formData.append('file', blob, file.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${validatedData.issueKey}/attachments`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'X-Atlassian-Token': 'no-check',
|
||||||
|
},
|
||||||
|
body: formData,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text()
|
||||||
|
logger.error(`[${requestId}] Jira attachment upload failed`, {
|
||||||
|
status: response.status,
|
||||||
|
statusText: response.statusText,
|
||||||
|
error: errorText,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: `Failed to upload attachments: ${response.statusText}`,
|
||||||
|
},
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const attachments = await response.json()
|
||||||
|
const attachmentIds = Array.isArray(attachments)
|
||||||
|
? attachments.map((attachment) => attachment.id).filter(Boolean)
|
||||||
|
: []
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
ts: new Date().toISOString(),
|
||||||
|
issueKey: validatedData.issueKey,
|
||||||
|
attachmentIds,
|
||||||
|
files: filesOutput,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid request data', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] Jira attachment upload error`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error instanceof Error ? error.message : 'Internal server error' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,9 +2,11 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { uploadFilesForTeamsMessage } from '@/tools/microsoft_teams/server-utils'
|
||||||
|
import type { GraphApiErrorResponse, GraphChatMessage } from '@/tools/microsoft_teams/types'
|
||||||
import { resolveMentionsForChannel, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
import { resolveMentionsForChannel, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -16,7 +18,7 @@ const TeamsWriteChannelSchema = z.object({
|
|||||||
teamId: z.string().min(1, 'Team ID is required'),
|
teamId: z.string().min(1, 'Team ID is required'),
|
||||||
channelId: z.string().min(1, 'Channel ID is required'),
|
channelId: z.string().min(1, 'Channel ID is required'),
|
||||||
content: z.string().min(1, 'Message content is required'),
|
content: z.string().min(1, 'Message content is required'),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -53,93 +55,12 @@ export async function POST(request: NextRequest) {
|
|||||||
fileCount: validatedData.files?.length || 0,
|
fileCount: validatedData.files?.length || 0,
|
||||||
})
|
})
|
||||||
|
|
||||||
const attachments: any[] = []
|
const { attachments, filesOutput } = await uploadFilesForTeamsMessage({
|
||||||
if (validatedData.files && validatedData.files.length > 0) {
|
rawFiles: validatedData.files || [],
|
||||||
const rawFiles = validatedData.files
|
accessToken: validatedData.accessToken,
|
||||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to OneDrive`)
|
requestId,
|
||||||
|
logger,
|
||||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
})
|
||||||
|
|
||||||
for (const file of userFiles) {
|
|
||||||
try {
|
|
||||||
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
|
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
|
||||||
|
|
||||||
const uploadUrl =
|
|
||||||
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
|
|
||||||
encodeURIComponent(file.name) +
|
|
||||||
':/content'
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
'Content-Type': file.type || 'application/octet-stream',
|
|
||||||
},
|
|
||||||
body: new Uint8Array(buffer),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
|
||||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const uploadedFile = await uploadResponse.json()
|
|
||||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
|
||||||
id: uploadedFile.id,
|
|
||||||
webUrl: uploadedFile.webUrl,
|
|
||||||
})
|
|
||||||
|
|
||||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
|
||||||
|
|
||||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!fileDetailsResponse.ok) {
|
|
||||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const fileDetails = await fileDetailsResponse.json()
|
|
||||||
logger.info(`[${requestId}] Got file details`, {
|
|
||||||
webDavUrl: fileDetails.webDavUrl,
|
|
||||||
eTag: fileDetails.eTag,
|
|
||||||
})
|
|
||||||
|
|
||||||
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
|
|
||||||
|
|
||||||
attachments.push({
|
|
||||||
id: attachmentId,
|
|
||||||
contentType: 'reference',
|
|
||||||
contentUrl: fileDetails.webDavUrl,
|
|
||||||
name: file.name,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let messageContent = validatedData.content
|
let messageContent = validatedData.content
|
||||||
let contentType: 'text' | 'html' = 'text'
|
let contentType: 'text' | 'html' = 'text'
|
||||||
@@ -197,17 +118,21 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages`
|
const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages`
|
||||||
|
|
||||||
const teamsResponse = await fetch(teamsUrl, {
|
const teamsResponse = await secureFetchWithValidation(
|
||||||
method: 'POST',
|
teamsUrl,
|
||||||
headers: {
|
{
|
||||||
'Content-Type': 'application/json',
|
method: 'POST',
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify(messageBody),
|
||||||
},
|
},
|
||||||
body: JSON.stringify(messageBody),
|
'teamsUrl'
|
||||||
})
|
)
|
||||||
|
|
||||||
if (!teamsResponse.ok) {
|
if (!teamsResponse.ok) {
|
||||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
|
||||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -218,7 +143,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const responseData = await teamsResponse.json()
|
const responseData = (await teamsResponse.json()) as GraphChatMessage
|
||||||
logger.info(`[${requestId}] Teams channel message sent successfully`, {
|
logger.info(`[${requestId}] Teams channel message sent successfully`, {
|
||||||
messageId: responseData.id,
|
messageId: responseData.id,
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
@@ -237,6 +162,7 @@ export async function POST(request: NextRequest) {
|
|||||||
url: responseData.webUrl || '',
|
url: responseData.webUrl || '',
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
},
|
},
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -2,9 +2,11 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { uploadFilesForTeamsMessage } from '@/tools/microsoft_teams/server-utils'
|
||||||
|
import type { GraphApiErrorResponse, GraphChatMessage } from '@/tools/microsoft_teams/types'
|
||||||
import { resolveMentionsForChat, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
import { resolveMentionsForChat, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -15,7 +17,7 @@ const TeamsWriteChatSchema = z.object({
|
|||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
chatId: z.string().min(1, 'Chat ID is required'),
|
chatId: z.string().min(1, 'Chat ID is required'),
|
||||||
content: z.string().min(1, 'Message content is required'),
|
content: z.string().min(1, 'Message content is required'),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -51,93 +53,12 @@ export async function POST(request: NextRequest) {
|
|||||||
fileCount: validatedData.files?.length || 0,
|
fileCount: validatedData.files?.length || 0,
|
||||||
})
|
})
|
||||||
|
|
||||||
const attachments: any[] = []
|
const { attachments, filesOutput } = await uploadFilesForTeamsMessage({
|
||||||
if (validatedData.files && validatedData.files.length > 0) {
|
rawFiles: validatedData.files || [],
|
||||||
const rawFiles = validatedData.files
|
accessToken: validatedData.accessToken,
|
||||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to Teams`)
|
requestId,
|
||||||
|
logger,
|
||||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
})
|
||||||
|
|
||||||
for (const file of userFiles) {
|
|
||||||
try {
|
|
||||||
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
|
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
|
||||||
|
|
||||||
const uploadUrl =
|
|
||||||
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
|
|
||||||
encodeURIComponent(file.name) +
|
|
||||||
':/content'
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
'Content-Type': file.type || 'application/octet-stream',
|
|
||||||
},
|
|
||||||
body: new Uint8Array(buffer),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
|
||||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const uploadedFile = await uploadResponse.json()
|
|
||||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
|
||||||
id: uploadedFile.id,
|
|
||||||
webUrl: uploadedFile.webUrl,
|
|
||||||
})
|
|
||||||
|
|
||||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
|
||||||
|
|
||||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!fileDetailsResponse.ok) {
|
|
||||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const fileDetails = await fileDetailsResponse.json()
|
|
||||||
logger.info(`[${requestId}] Got file details`, {
|
|
||||||
webDavUrl: fileDetails.webDavUrl,
|
|
||||||
eTag: fileDetails.eTag,
|
|
||||||
})
|
|
||||||
|
|
||||||
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
|
|
||||||
|
|
||||||
attachments.push({
|
|
||||||
id: attachmentId,
|
|
||||||
contentType: 'reference',
|
|
||||||
contentUrl: fileDetails.webDavUrl,
|
|
||||||
name: file.name,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let messageContent = validatedData.content
|
let messageContent = validatedData.content
|
||||||
let contentType: 'text' | 'html' = 'text'
|
let contentType: 'text' | 'html' = 'text'
|
||||||
@@ -194,17 +115,21 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages`
|
const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages`
|
||||||
|
|
||||||
const teamsResponse = await fetch(teamsUrl, {
|
const teamsResponse = await secureFetchWithValidation(
|
||||||
method: 'POST',
|
teamsUrl,
|
||||||
headers: {
|
{
|
||||||
'Content-Type': 'application/json',
|
method: 'POST',
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify(messageBody),
|
||||||
},
|
},
|
||||||
body: JSON.stringify(messageBody),
|
'teamsUrl'
|
||||||
})
|
)
|
||||||
|
|
||||||
if (!teamsResponse.ok) {
|
if (!teamsResponse.ok) {
|
||||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
|
||||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -215,7 +140,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const responseData = await teamsResponse.json()
|
const responseData = (await teamsResponse.json()) as GraphChatMessage
|
||||||
logger.info(`[${requestId}] Teams message sent successfully`, {
|
logger.info(`[${requestId}] Teams message sent successfully`, {
|
||||||
messageId: responseData.id,
|
messageId: responseData.id,
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
@@ -233,6 +158,7 @@ export async function POST(request: NextRequest) {
|
|||||||
url: responseData.webUrl || '',
|
url: responseData.webUrl || '',
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
},
|
},
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -2,15 +2,17 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { StorageService } from '@/lib/uploads'
|
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
secureFetchWithPinnedIP,
|
||||||
inferContextFromKey,
|
validateUrlWithDNS,
|
||||||
isInternalFileUrl,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import {
|
||||||
|
downloadFileFromStorage,
|
||||||
|
resolveInternalFileUrl,
|
||||||
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -18,7 +20,9 @@ const logger = createLogger('MistralParseAPI')
|
|||||||
|
|
||||||
const MistralParseSchema = z.object({
|
const MistralParseSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
filePath: z.string().min(1, 'File path is required'),
|
filePath: z.string().min(1, 'File path is required').optional(),
|
||||||
|
fileData: FileInputSchema.optional(),
|
||||||
|
file: FileInputSchema.optional(),
|
||||||
resultType: z.string().optional(),
|
resultType: z.string().optional(),
|
||||||
pages: z.array(z.number()).optional(),
|
pages: z.array(z.number()).optional(),
|
||||||
includeImageBase64: z.boolean().optional(),
|
includeImageBase64: z.boolean().optional(),
|
||||||
@@ -49,66 +53,140 @@ export async function POST(request: NextRequest) {
|
|||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const validatedData = MistralParseSchema.parse(body)
|
const validatedData = MistralParseSchema.parse(body)
|
||||||
|
|
||||||
|
const fileData = validatedData.file || validatedData.fileData
|
||||||
|
const filePath = typeof fileData === 'string' ? fileData : validatedData.filePath
|
||||||
|
|
||||||
|
if (!fileData && (!filePath || filePath.trim() === '')) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'File input is required',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`[${requestId}] Mistral parse request`, {
|
logger.info(`[${requestId}] Mistral parse request`, {
|
||||||
filePath: validatedData.filePath,
|
hasFileData: Boolean(fileData),
|
||||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
filePath,
|
||||||
|
isWorkspaceFile: filePath ? isInternalFileUrl(filePath) : false,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
const mistralBody: any = {
|
||||||
|
model: 'mistral-ocr-latest',
|
||||||
|
}
|
||||||
|
|
||||||
if (isInternalFileUrl(validatedData.filePath)) {
|
if (fileData && typeof fileData === 'object') {
|
||||||
|
const rawFile = fileData
|
||||||
|
let userFile
|
||||||
try {
|
try {
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||||
|
|
||||||
const context = inferContextFromKey(storageKey)
|
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(
|
|
||||||
storageKey,
|
|
||||||
userId,
|
|
||||||
undefined, // customConfig
|
|
||||||
context, // context
|
|
||||||
false // isLocal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: 'Failed to generate file access URL',
|
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||||
},
|
},
|
||||||
{ status: 500 }
|
{ status: 400 }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
|
||||||
}
|
|
||||||
|
|
||||||
const mistralBody: any = {
|
let mimeType = userFile.type
|
||||||
model: 'mistral-ocr-latest',
|
if (!mimeType || mimeType === 'application/octet-stream') {
|
||||||
document: {
|
const filename = userFile.name?.toLowerCase() || ''
|
||||||
type: 'document_url',
|
if (filename.endsWith('.pdf')) {
|
||||||
document_url: fileUrl,
|
mimeType = 'application/pdf'
|
||||||
},
|
} else if (filename.endsWith('.png')) {
|
||||||
|
mimeType = 'image/png'
|
||||||
|
} else if (filename.endsWith('.jpg') || filename.endsWith('.jpeg')) {
|
||||||
|
mimeType = 'image/jpeg'
|
||||||
|
} else if (filename.endsWith('.gif')) {
|
||||||
|
mimeType = 'image/gif'
|
||||||
|
} else if (filename.endsWith('.webp')) {
|
||||||
|
mimeType = 'image/webp'
|
||||||
|
} else {
|
||||||
|
mimeType = 'application/pdf'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let base64 = userFile.base64
|
||||||
|
if (!base64) {
|
||||||
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
base64 = buffer.toString('base64')
|
||||||
|
}
|
||||||
|
const base64Payload = base64.startsWith('data:')
|
||||||
|
? base64
|
||||||
|
: `data:${mimeType};base64,${base64}`
|
||||||
|
|
||||||
|
// Mistral API uses different document types for images vs documents
|
||||||
|
const isImage = mimeType.startsWith('image/')
|
||||||
|
if (isImage) {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: base64Payload,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'document_url',
|
||||||
|
document_url: base64Payload,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (filePath) {
|
||||||
|
let fileUrl = filePath
|
||||||
|
|
||||||
|
const isInternalFilePath = isInternalFileUrl(filePath)
|
||||||
|
if (isInternalFilePath) {
|
||||||
|
const resolution = await resolveInternalFileUrl(filePath, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: resolution.error.message,
|
||||||
|
},
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
fileUrl = resolution.fileUrl || fileUrl
|
||||||
|
} else if (filePath.startsWith('/')) {
|
||||||
|
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||||
|
userId,
|
||||||
|
path: filePath.substring(0, 50),
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
const urlValidation = await validateUrlWithDNS(fileUrl, 'filePath')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: urlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const imageExtensions = ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.avif']
|
||||||
|
const pathname = new URL(fileUrl).pathname.toLowerCase()
|
||||||
|
const isImageUrl = imageExtensions.some((ext) => pathname.endsWith(ext))
|
||||||
|
|
||||||
|
if (isImageUrl) {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: fileUrl,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'document_url',
|
||||||
|
document_url: fileUrl,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (validatedData.pages) {
|
if (validatedData.pages) {
|
||||||
@@ -124,15 +202,34 @@ export async function POST(request: NextRequest) {
|
|||||||
mistralBody.image_min_size = validatedData.imageMinSize
|
mistralBody.image_min_size = validatedData.imageMinSize
|
||||||
}
|
}
|
||||||
|
|
||||||
const mistralResponse = await fetch('https://api.mistral.ai/v1/ocr', {
|
const mistralEndpoint = 'https://api.mistral.ai/v1/ocr'
|
||||||
method: 'POST',
|
const mistralValidation = await validateUrlWithDNS(mistralEndpoint, 'Mistral API URL')
|
||||||
headers: {
|
if (!mistralValidation.isValid) {
|
||||||
'Content-Type': 'application/json',
|
logger.error(`[${requestId}] Mistral API URL validation failed`, {
|
||||||
Accept: 'application/json',
|
error: mistralValidation.error,
|
||||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
})
|
||||||
},
|
return NextResponse.json(
|
||||||
body: JSON.stringify(mistralBody),
|
{
|
||||||
})
|
success: false,
|
||||||
|
error: 'Failed to reach Mistral API',
|
||||||
|
},
|
||||||
|
{ status: 502 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const mistralResponse = await secureFetchWithPinnedIP(
|
||||||
|
mistralEndpoint,
|
||||||
|
mistralValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Accept: 'application/json',
|
||||||
|
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify(mistralBody),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if (!mistralResponse.ok) {
|
if (!mistralResponse.ok) {
|
||||||
const errorText = await mistralResponse.text()
|
const errorText = await mistralResponse.text()
|
||||||
|
|||||||
177
apps/sim/app/api/tools/onedrive/download/route.ts
Normal file
177
apps/sim/app/api/tools/onedrive/download/route.ts
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
/** Microsoft Graph API error response structure */
|
||||||
|
interface GraphApiError {
|
||||||
|
error?: {
|
||||||
|
code?: string
|
||||||
|
message?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Microsoft Graph API drive item metadata response */
|
||||||
|
interface DriveItemMetadata {
|
||||||
|
id?: string
|
||||||
|
name?: string
|
||||||
|
folder?: Record<string, unknown>
|
||||||
|
file?: {
|
||||||
|
mimeType?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const logger = createLogger('OneDriveDownloadAPI')
|
||||||
|
|
||||||
|
const OneDriveDownloadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
fileId: z.string().min(1, 'File ID is required'),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized OneDrive download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = OneDriveDownloadSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, fileId, fileName } = validatedData
|
||||||
|
const authHeader = `Bearer ${accessToken}`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting file metadata from OneDrive`, { fileId })
|
||||||
|
|
||||||
|
const metadataUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}`
|
||||||
|
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
|
||||||
|
if (!metadataUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: metadataUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadataResponse = await secureFetchWithPinnedIP(
|
||||||
|
metadataUrl,
|
||||||
|
metadataUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: { Authorization: authHeader },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!metadataResponse.ok) {
|
||||||
|
const errorDetails = (await metadataResponse.json().catch(() => ({}))) as GraphApiError
|
||||||
|
logger.error(`[${requestId}] Failed to get file metadata`, {
|
||||||
|
status: metadataResponse.status,
|
||||||
|
error: errorDetails,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = (await metadataResponse.json()) as DriveItemMetadata
|
||||||
|
|
||||||
|
if (metadata.folder && !metadata.file) {
|
||||||
|
logger.error(`[${requestId}] Attempted to download a folder`, {
|
||||||
|
itemId: metadata.id,
|
||||||
|
itemName: metadata.name,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: `Cannot download folder "${metadata.name}". Please select a file instead.`,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const mimeType = metadata.file?.mimeType || 'application/octet-stream'
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Downloading file from OneDrive`, { fileId, mimeType })
|
||||||
|
|
||||||
|
const downloadUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}/content`
|
||||||
|
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||||
|
if (!downloadUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
downloadUrl,
|
||||||
|
downloadUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: { Authorization: authHeader },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
const downloadError = (await downloadResponse.json().catch(() => ({}))) as GraphApiError
|
||||||
|
logger.error(`[${requestId}] Failed to download file`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
error: downloadError,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
|
||||||
|
const resolvedName = fileName || metadata.name || 'download'
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||||
|
fileId,
|
||||||
|
name: resolvedName,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
mimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const base64Data = fileBuffer.toString('base64')
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedName,
|
||||||
|
mimeType,
|
||||||
|
data: base64Data,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading OneDrive file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,7 +4,9 @@ import * as XLSX from 'xlsx'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import {
|
import {
|
||||||
getExtensionFromMimeType,
|
getExtensionFromMimeType,
|
||||||
processSingleFileToUserFile,
|
processSingleFileToUserFile,
|
||||||
@@ -29,12 +31,33 @@ const ExcelValuesSchema = z.union([
|
|||||||
const OneDriveUploadSchema = z.object({
|
const OneDriveUploadSchema = z.object({
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
file: z.any().optional(),
|
file: RawFileInputSchema.optional(),
|
||||||
folderId: z.string().optional().nullable(),
|
folderId: z.string().optional().nullable(),
|
||||||
mimeType: z.string().nullish(),
|
mimeType: z.string().nullish(),
|
||||||
values: ExcelValuesSchema.optional().nullable(),
|
values: ExcelValuesSchema.optional().nullable(),
|
||||||
|
conflictBehavior: z.enum(['fail', 'replace', 'rename']).optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
/** Microsoft Graph DriveItem response */
|
||||||
|
interface OneDriveFileData {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
size: number
|
||||||
|
webUrl: string
|
||||||
|
createdDateTime: string
|
||||||
|
lastModifiedDateTime: string
|
||||||
|
file?: { mimeType: string }
|
||||||
|
parentReference?: { id: string; path: string }
|
||||||
|
'@microsoft.graph.downloadUrl'?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Microsoft Graph Excel range response */
|
||||||
|
interface ExcelRangeData {
|
||||||
|
address?: string
|
||||||
|
addressLocal?: string
|
||||||
|
values?: unknown[][]
|
||||||
|
}
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
@@ -88,25 +111,9 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
let fileToProcess
|
|
||||||
if (Array.isArray(rawFile)) {
|
|
||||||
if (rawFile.length === 0) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'No file provided',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
fileToProcess = rawFile[0]
|
|
||||||
} else {
|
|
||||||
fileToProcess = rawFile
|
|
||||||
}
|
|
||||||
|
|
||||||
let userFile
|
let userFile
|
||||||
try {
|
try {
|
||||||
userFile = processSingleFileToUserFile(fileToProcess, requestId, logger)
|
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -179,14 +186,23 @@ export async function POST(request: NextRequest) {
|
|||||||
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
// Add conflict behavior if specified (defaults to replace by Microsoft Graph API)
|
||||||
method: 'PUT',
|
if (validatedData.conflictBehavior) {
|
||||||
headers: {
|
uploadUrl += `?@microsoft.graph.conflictBehavior=${validatedData.conflictBehavior}`
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
}
|
||||||
'Content-Type': mimeType,
|
|
||||||
|
const uploadResponse = await secureFetchWithValidation(
|
||||||
|
uploadUrl,
|
||||||
|
{
|
||||||
|
method: 'PUT',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': mimeType,
|
||||||
|
},
|
||||||
|
body: fileBuffer,
|
||||||
},
|
},
|
||||||
body: new Uint8Array(fileBuffer),
|
'uploadUrl'
|
||||||
})
|
)
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
if (!uploadResponse.ok) {
|
||||||
const errorText = await uploadResponse.text()
|
const errorText = await uploadResponse.text()
|
||||||
@@ -200,7 +216,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileData = await uploadResponse.json()
|
const fileData = (await uploadResponse.json()) as OneDriveFileData
|
||||||
|
|
||||||
let excelWriteResult: any | undefined
|
let excelWriteResult: any | undefined
|
||||||
const shouldWriteExcelContent =
|
const shouldWriteExcelContent =
|
||||||
@@ -209,8 +225,11 @@ export async function POST(request: NextRequest) {
|
|||||||
if (shouldWriteExcelContent) {
|
if (shouldWriteExcelContent) {
|
||||||
try {
|
try {
|
||||||
let workbookSessionId: string | undefined
|
let workbookSessionId: string | undefined
|
||||||
const sessionResp = await fetch(
|
const sessionUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/createSession`,
|
fileData.id
|
||||||
|
)}/workbook/createSession`
|
||||||
|
const sessionResp = await secureFetchWithValidation(
|
||||||
|
sessionUrl,
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -218,11 +237,12 @@ export async function POST(request: NextRequest) {
|
|||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
body: JSON.stringify({ persistChanges: true }),
|
body: JSON.stringify({ persistChanges: true }),
|
||||||
}
|
},
|
||||||
|
'sessionUrl'
|
||||||
)
|
)
|
||||||
|
|
||||||
if (sessionResp.ok) {
|
if (sessionResp.ok) {
|
||||||
const sessionData = await sessionResp.json()
|
const sessionData = (await sessionResp.json()) as { id?: string }
|
||||||
workbookSessionId = sessionData?.id
|
workbookSessionId = sessionData?.id
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -231,14 +251,19 @@ export async function POST(request: NextRequest) {
|
|||||||
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||||
fileData.id
|
fileData.id
|
||||||
)}/workbook/worksheets?$select=name&$orderby=position&$top=1`
|
)}/workbook/worksheets?$select=name&$orderby=position&$top=1`
|
||||||
const listResp = await fetch(listUrl, {
|
const listResp = await secureFetchWithValidation(
|
||||||
headers: {
|
listUrl,
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
{
|
||||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
})
|
'listUrl'
|
||||||
|
)
|
||||||
if (listResp.ok) {
|
if (listResp.ok) {
|
||||||
const listData = await listResp.json()
|
const listData = (await listResp.json()) as { value?: Array<{ name?: string }> }
|
||||||
const firstSheetName = listData?.value?.[0]?.name
|
const firstSheetName = listData?.value?.[0]?.name
|
||||||
if (firstSheetName) {
|
if (firstSheetName) {
|
||||||
sheetName = firstSheetName
|
sheetName = firstSheetName
|
||||||
@@ -297,15 +322,19 @@ export async function POST(request: NextRequest) {
|
|||||||
)}')/range(address='${encodeURIComponent(computedRangeAddress)}')`
|
)}')/range(address='${encodeURIComponent(computedRangeAddress)}')`
|
||||||
)
|
)
|
||||||
|
|
||||||
const excelWriteResponse = await fetch(url.toString(), {
|
const excelWriteResponse = await secureFetchWithValidation(
|
||||||
method: 'PATCH',
|
url.toString(),
|
||||||
headers: {
|
{
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
method: 'PATCH',
|
||||||
'Content-Type': 'application/json',
|
headers: {
|
||||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ values: processedValues }),
|
||||||
},
|
},
|
||||||
body: JSON.stringify({ values: processedValues }),
|
'excelWriteUrl'
|
||||||
})
|
)
|
||||||
|
|
||||||
if (!excelWriteResponse || !excelWriteResponse.ok) {
|
if (!excelWriteResponse || !excelWriteResponse.ok) {
|
||||||
const errorText = excelWriteResponse ? await excelWriteResponse.text() : 'no response'
|
const errorText = excelWriteResponse ? await excelWriteResponse.text() : 'no response'
|
||||||
@@ -320,7 +349,7 @@ export async function POST(request: NextRequest) {
|
|||||||
details: errorText,
|
details: errorText,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const writeData = await excelWriteResponse.json()
|
const writeData = (await excelWriteResponse.json()) as ExcelRangeData
|
||||||
const addr = writeData.address || writeData.addressLocal
|
const addr = writeData.address || writeData.addressLocal
|
||||||
const v = writeData.values || []
|
const v = writeData.values || []
|
||||||
excelWriteResult = {
|
excelWriteResult = {
|
||||||
@@ -328,21 +357,25 @@ export async function POST(request: NextRequest) {
|
|||||||
updatedRange: addr,
|
updatedRange: addr,
|
||||||
updatedRows: Array.isArray(v) ? v.length : undefined,
|
updatedRows: Array.isArray(v) ? v.length : undefined,
|
||||||
updatedColumns: Array.isArray(v) && v[0] ? v[0].length : undefined,
|
updatedColumns: Array.isArray(v) && v[0] ? v[0].length : undefined,
|
||||||
updatedCells: Array.isArray(v) && v[0] ? v.length * (v[0] as any[]).length : undefined,
|
updatedCells: Array.isArray(v) && v[0] ? v.length * v[0].length : undefined,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (workbookSessionId) {
|
if (workbookSessionId) {
|
||||||
try {
|
try {
|
||||||
const closeResp = await fetch(
|
const closeUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/closeSession`,
|
fileData.id
|
||||||
|
)}/workbook/closeSession`
|
||||||
|
const closeResp = await secureFetchWithValidation(
|
||||||
|
closeUrl,
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
'workbook-session-id': workbookSessionId,
|
'workbook-session-id': workbookSessionId,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
|
'closeSessionUrl'
|
||||||
)
|
)
|
||||||
if (!closeResp.ok) {
|
if (!closeResp.ok) {
|
||||||
const closeText = await closeResp.text()
|
const closeText = await closeResp.text()
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -18,7 +19,7 @@ const OutlookDraftSchema = z.object({
|
|||||||
contentType: z.enum(['text', 'html']).optional().nullable(),
|
contentType: z.enum(['text', 'html']).optional().nullable(),
|
||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -20,7 +21,7 @@ const OutlookSendSchema = z.object({
|
|||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
replyToMessageId: z.string().optional().nullable(),
|
replyToMessageId: z.string().optional().nullable(),
|
||||||
conversationId: z.string().optional().nullable(),
|
conversationId: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -95,14 +96,14 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
if (attachments.length > 0) {
|
if (attachments.length > 0) {
|
||||||
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
||||||
const maxSize = 4 * 1024 * 1024 // 4MB
|
const maxSize = 3 * 1024 * 1024 // 3MB - Microsoft Graph API limit for inline attachments
|
||||||
|
|
||||||
if (totalSize > maxSize) {
|
if (totalSize > maxSize) {
|
||||||
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: `Total attachment size (${sizeMB}MB) exceeds Outlook's limit of 4MB per request`,
|
error: `Total attachment size (${sizeMB}MB) exceeds Microsoft Graph API limit of 3MB per request`,
|
||||||
},
|
},
|
||||||
{ status: 400 }
|
{ status: 400 }
|
||||||
)
|
)
|
||||||
|
|||||||
165
apps/sim/app/api/tools/pipedrive/get-files/route.ts
Normal file
165
apps/sim/app/api/tools/pipedrive/get-files/route.ts
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('PipedriveGetFilesAPI')
|
||||||
|
|
||||||
|
interface PipedriveFile {
|
||||||
|
id?: number
|
||||||
|
name?: string
|
||||||
|
url?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PipedriveApiResponse {
|
||||||
|
success: boolean
|
||||||
|
data?: PipedriveFile[]
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const PipedriveGetFilesSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
deal_id: z.string().optional().nullable(),
|
||||||
|
person_id: z.string().optional().nullable(),
|
||||||
|
org_id: z.string().optional().nullable(),
|
||||||
|
limit: z.string().optional().nullable(),
|
||||||
|
downloadFiles: z.boolean().optional().default(false),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Pipedrive get files attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = PipedriveGetFilesSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, deal_id, person_id, org_id, limit, downloadFiles } = validatedData
|
||||||
|
|
||||||
|
const baseUrl = 'https://api.pipedrive.com/v1/files'
|
||||||
|
const queryParams = new URLSearchParams()
|
||||||
|
|
||||||
|
if (deal_id) queryParams.append('deal_id', deal_id)
|
||||||
|
if (person_id) queryParams.append('person_id', person_id)
|
||||||
|
if (org_id) queryParams.append('org_id', org_id)
|
||||||
|
if (limit) queryParams.append('limit', limit)
|
||||||
|
|
||||||
|
const queryString = queryParams.toString()
|
||||||
|
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Fetching files from Pipedrive`, { deal_id, person_id, org_id })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
Accept: 'application/json',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = (await response.json()) as PipedriveApiResponse
|
||||||
|
|
||||||
|
if (!data.success) {
|
||||||
|
logger.error(`[${requestId}] Pipedrive API request failed`, { data })
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: data.error || 'Failed to fetch files from Pipedrive' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = data.data || []
|
||||||
|
const downloadedFiles: Array<{
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
if (downloadFiles) {
|
||||||
|
for (const file of files) {
|
||||||
|
if (!file?.url) continue
|
||||||
|
|
||||||
|
try {
|
||||||
|
const fileUrlValidation = await validateUrlWithDNS(file.url, 'fileUrl')
|
||||||
|
if (!fileUrlValidation.isValid) continue
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
file.url,
|
||||||
|
fileUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Bearer ${accessToken}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) continue
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
const extension = getFileExtension(file.name || '')
|
||||||
|
const mimeType =
|
||||||
|
downloadResponse.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||||
|
const fileName = file.name || `pipedrive-file-${file.id || Date.now()}`
|
||||||
|
|
||||||
|
downloadedFiles.push({
|
||||||
|
name: fileName,
|
||||||
|
mimeType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to download file ${file.id}:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Pipedrive files fetched successfully`, {
|
||||||
|
fileCount: files.length,
|
||||||
|
downloadedCount: downloadedFiles.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
files,
|
||||||
|
downloadedFiles: downloadedFiles.length > 0 ? downloadedFiles : undefined,
|
||||||
|
total_items: files.length,
|
||||||
|
success: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching Pipedrive files:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,15 +2,14 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { StorageService } from '@/lib/uploads'
|
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
secureFetchWithPinnedIP,
|
||||||
inferContextFromKey,
|
validateUrlWithDNS,
|
||||||
isInternalFileUrl,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -18,7 +17,8 @@ const logger = createLogger('PulseParseAPI')
|
|||||||
|
|
||||||
const PulseParseSchema = z.object({
|
const PulseParseSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
filePath: z.string().min(1, 'File path is required'),
|
filePath: z.string().optional(),
|
||||||
|
file: RawFileInputSchema.optional(),
|
||||||
pages: z.string().optional(),
|
pages: z.string().optional(),
|
||||||
extractFigure: z.boolean().optional(),
|
extractFigure: z.boolean().optional(),
|
||||||
figureDescription: z.boolean().optional(),
|
figureDescription: z.boolean().optional(),
|
||||||
@@ -51,50 +51,30 @@ export async function POST(request: NextRequest) {
|
|||||||
const validatedData = PulseParseSchema.parse(body)
|
const validatedData = PulseParseSchema.parse(body)
|
||||||
|
|
||||||
logger.info(`[${requestId}] Pulse parse request`, {
|
logger.info(`[${requestId}] Pulse parse request`, {
|
||||||
|
fileName: validatedData.file?.name,
|
||||||
filePath: validatedData.filePath,
|
filePath: validatedData.filePath,
|
||||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
const resolution = await resolveFileInputToUrl({
|
||||||
|
file: validatedData.file,
|
||||||
|
filePath: validatedData.filePath,
|
||||||
|
userId,
|
||||||
|
requestId,
|
||||||
|
logger,
|
||||||
|
})
|
||||||
|
|
||||||
if (isInternalFileUrl(validatedData.filePath)) {
|
if (resolution.error) {
|
||||||
try {
|
return NextResponse.json(
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
{ success: false, error: resolution.error.message },
|
||||||
const context = inferContextFromKey(storageKey)
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
const fileUrl = resolution.fileUrl
|
||||||
|
if (!fileUrl) {
|
||||||
if (!hasAccess) {
|
return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 })
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Failed to generate file access URL',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const formData = new FormData()
|
const formData = new FormData()
|
||||||
@@ -119,13 +99,36 @@ export async function POST(request: NextRequest) {
|
|||||||
formData.append('chunk_size', String(validatedData.chunkSize))
|
formData.append('chunk_size', String(validatedData.chunkSize))
|
||||||
}
|
}
|
||||||
|
|
||||||
const pulseResponse = await fetch('https://api.runpulse.com/extract', {
|
const pulseEndpoint = 'https://api.runpulse.com/extract'
|
||||||
method: 'POST',
|
const pulseValidation = await validateUrlWithDNS(pulseEndpoint, 'Pulse API URL')
|
||||||
headers: {
|
if (!pulseValidation.isValid) {
|
||||||
'x-api-key': validatedData.apiKey,
|
logger.error(`[${requestId}] Pulse API URL validation failed`, {
|
||||||
},
|
error: pulseValidation.error,
|
||||||
body: formData,
|
})
|
||||||
})
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to reach Pulse API',
|
||||||
|
},
|
||||||
|
{ status: 502 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const pulsePayload = new Response(formData)
|
||||||
|
const contentType = pulsePayload.headers.get('content-type') || 'multipart/form-data'
|
||||||
|
const bodyBuffer = Buffer.from(await pulsePayload.arrayBuffer())
|
||||||
|
const pulseResponse = await secureFetchWithPinnedIP(
|
||||||
|
pulseEndpoint,
|
||||||
|
pulseValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'x-api-key': validatedData.apiKey,
|
||||||
|
'Content-Type': contentType,
|
||||||
|
},
|
||||||
|
body: bodyBuffer,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if (!pulseResponse.ok) {
|
if (!pulseResponse.ok) {
|
||||||
const errorText = await pulseResponse.text()
|
const errorText = await pulseResponse.text()
|
||||||
|
|||||||
@@ -2,15 +2,14 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { StorageService } from '@/lib/uploads'
|
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
secureFetchWithPinnedIP,
|
||||||
inferContextFromKey,
|
validateUrlWithDNS,
|
||||||
isInternalFileUrl,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -18,7 +17,8 @@ const logger = createLogger('ReductoParseAPI')
|
|||||||
|
|
||||||
const ReductoParseSchema = z.object({
|
const ReductoParseSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
filePath: z.string().min(1, 'File path is required'),
|
filePath: z.string().optional(),
|
||||||
|
file: RawFileInputSchema.optional(),
|
||||||
pages: z.array(z.number()).optional(),
|
pages: z.array(z.number()).optional(),
|
||||||
tableOutputFormat: z.enum(['html', 'md']).optional(),
|
tableOutputFormat: z.enum(['html', 'md']).optional(),
|
||||||
})
|
})
|
||||||
@@ -47,56 +47,30 @@ export async function POST(request: NextRequest) {
|
|||||||
const validatedData = ReductoParseSchema.parse(body)
|
const validatedData = ReductoParseSchema.parse(body)
|
||||||
|
|
||||||
logger.info(`[${requestId}] Reducto parse request`, {
|
logger.info(`[${requestId}] Reducto parse request`, {
|
||||||
|
fileName: validatedData.file?.name,
|
||||||
filePath: validatedData.filePath,
|
filePath: validatedData.filePath,
|
||||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
const resolution = await resolveFileInputToUrl({
|
||||||
|
file: validatedData.file,
|
||||||
|
filePath: validatedData.filePath,
|
||||||
|
userId,
|
||||||
|
requestId,
|
||||||
|
logger,
|
||||||
|
})
|
||||||
|
|
||||||
if (isInternalFileUrl(validatedData.filePath)) {
|
if (resolution.error) {
|
||||||
try {
|
return NextResponse.json(
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
{ success: false, error: resolution.error.message },
|
||||||
const context = inferContextFromKey(storageKey)
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(
|
const fileUrl = resolution.fileUrl
|
||||||
storageKey,
|
if (!fileUrl) {
|
||||||
userId,
|
return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 })
|
||||||
undefined, // customConfig
|
|
||||||
context, // context
|
|
||||||
false // isLocal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Failed to generate file access URL',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const reductoBody: Record<string, unknown> = {
|
const reductoBody: Record<string, unknown> = {
|
||||||
@@ -104,8 +78,13 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (validatedData.pages && validatedData.pages.length > 0) {
|
if (validatedData.pages && validatedData.pages.length > 0) {
|
||||||
|
// Reducto API expects page_range as an object with start/end, not an array
|
||||||
|
const pages = validatedData.pages
|
||||||
reductoBody.settings = {
|
reductoBody.settings = {
|
||||||
page_range: validatedData.pages,
|
page_range: {
|
||||||
|
start: Math.min(...pages),
|
||||||
|
end: Math.max(...pages),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -115,15 +94,34 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const reductoResponse = await fetch('https://platform.reducto.ai/parse', {
|
const reductoEndpoint = 'https://platform.reducto.ai/parse'
|
||||||
method: 'POST',
|
const reductoValidation = await validateUrlWithDNS(reductoEndpoint, 'Reducto API URL')
|
||||||
headers: {
|
if (!reductoValidation.isValid) {
|
||||||
'Content-Type': 'application/json',
|
logger.error(`[${requestId}] Reducto API URL validation failed`, {
|
||||||
Accept: 'application/json',
|
error: reductoValidation.error,
|
||||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
})
|
||||||
},
|
return NextResponse.json(
|
||||||
body: JSON.stringify(reductoBody),
|
{
|
||||||
})
|
success: false,
|
||||||
|
error: 'Failed to reach Reducto API',
|
||||||
|
},
|
||||||
|
{ status: 502 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const reductoResponse = await secureFetchWithPinnedIP(
|
||||||
|
reductoEndpoint,
|
||||||
|
reductoValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Accept: 'application/json',
|
||||||
|
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify(reductoBody),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if (!reductoResponse.ok) {
|
if (!reductoResponse.ok) {
|
||||||
const errorText = await reductoResponse.text()
|
const errorText = await reductoResponse.text()
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -17,7 +18,7 @@ const S3PutObjectSchema = z.object({
|
|||||||
region: z.string().min(1, 'Region is required'),
|
region: z.string().min(1, 'Region is required'),
|
||||||
bucketName: z.string().min(1, 'Bucket name is required'),
|
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||||
objectKey: z.string().min(1, 'Object key is required'),
|
objectKey: z.string().min(1, 'Object key is required'),
|
||||||
file: z.any().optional().nullable(),
|
file: RawFileInputSchema.optional().nullable(),
|
||||||
content: z.string().optional().nullable(),
|
content: z.string().optional().nullable(),
|
||||||
contentType: z.string().optional().nullable(),
|
contentType: z.string().optional().nullable(),
|
||||||
acl: z.string().optional().nullable(),
|
acl: z.string().optional().nullable(),
|
||||||
|
|||||||
188
apps/sim/app/api/tools/sendgrid/send-mail/route.ts
Normal file
188
apps/sim/app/api/tools/sendgrid/send-mail/route.ts
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('SendGridSendMailAPI')
|
||||||
|
|
||||||
|
const SendGridSendMailSchema = z.object({
|
||||||
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
|
from: z.string().min(1, 'From email is required'),
|
||||||
|
fromName: z.string().optional().nullable(),
|
||||||
|
to: z.string().min(1, 'To email is required'),
|
||||||
|
toName: z.string().optional().nullable(),
|
||||||
|
subject: z.string().optional().nullable(),
|
||||||
|
content: z.string().optional().nullable(),
|
||||||
|
contentType: z.string().optional().nullable(),
|
||||||
|
cc: z.string().optional().nullable(),
|
||||||
|
bcc: z.string().optional().nullable(),
|
||||||
|
replyTo: z.string().optional().nullable(),
|
||||||
|
replyToName: z.string().optional().nullable(),
|
||||||
|
templateId: z.string().optional().nullable(),
|
||||||
|
dynamicTemplateData: z.any().optional().nullable(),
|
||||||
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized SendGrid send attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: authResult.error || 'Authentication required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Authenticated SendGrid send request via ${authResult.authType}`)
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = SendGridSendMailSchema.parse(body)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Sending SendGrid email`, {
|
||||||
|
to: validatedData.to,
|
||||||
|
subject: validatedData.subject || '(template)',
|
||||||
|
hasAttachments: !!(validatedData.attachments && validatedData.attachments.length > 0),
|
||||||
|
attachmentCount: validatedData.attachments?.length || 0,
|
||||||
|
})
|
||||||
|
|
||||||
|
// Build personalizations
|
||||||
|
const personalizations: Record<string, unknown> = {
|
||||||
|
to: [
|
||||||
|
{ email: validatedData.to, ...(validatedData.toName && { name: validatedData.toName }) },
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.cc) {
|
||||||
|
personalizations.cc = [{ email: validatedData.cc }]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.bcc) {
|
||||||
|
personalizations.bcc = [{ email: validatedData.bcc }]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.templateId && validatedData.dynamicTemplateData) {
|
||||||
|
personalizations.dynamic_template_data =
|
||||||
|
typeof validatedData.dynamicTemplateData === 'string'
|
||||||
|
? JSON.parse(validatedData.dynamicTemplateData)
|
||||||
|
: validatedData.dynamicTemplateData
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build mail body
|
||||||
|
const mailBody: Record<string, unknown> = {
|
||||||
|
personalizations: [personalizations],
|
||||||
|
from: {
|
||||||
|
email: validatedData.from,
|
||||||
|
...(validatedData.fromName && { name: validatedData.fromName }),
|
||||||
|
},
|
||||||
|
subject: validatedData.subject,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.templateId) {
|
||||||
|
mailBody.template_id = validatedData.templateId
|
||||||
|
} else {
|
||||||
|
mailBody.content = [
|
||||||
|
{
|
||||||
|
type: validatedData.contentType || 'text/plain',
|
||||||
|
value: validatedData.content,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (validatedData.replyTo) {
|
||||||
|
mailBody.reply_to = {
|
||||||
|
email: validatedData.replyTo,
|
||||||
|
...(validatedData.replyToName && { name: validatedData.replyToName }),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process attachments from UserFile objects
|
||||||
|
if (validatedData.attachments && validatedData.attachments.length > 0) {
|
||||||
|
const rawAttachments = validatedData.attachments
|
||||||
|
logger.info(`[${requestId}] Processing ${rawAttachments.length} attachment(s)`)
|
||||||
|
|
||||||
|
const userFiles = processFilesToUserFiles(rawAttachments, requestId, logger)
|
||||||
|
|
||||||
|
if (userFiles.length > 0) {
|
||||||
|
const sendGridAttachments = await Promise.all(
|
||||||
|
userFiles.map(async (file) => {
|
||||||
|
try {
|
||||||
|
logger.info(
|
||||||
|
`[${requestId}] Downloading attachment: ${file.name} (${file.size} bytes)`
|
||||||
|
)
|
||||||
|
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: buffer.toString('base64'),
|
||||||
|
filename: file.name,
|
||||||
|
type: file.type || 'application/octet-stream',
|
||||||
|
disposition: 'attachment',
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Failed to download attachment ${file.name}:`, error)
|
||||||
|
throw new Error(
|
||||||
|
`Failed to download attachment "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
mailBody.attachments = sendGridAttachments
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send to SendGrid
|
||||||
|
const response = await fetch('https://api.sendgrid.com/v3/mail/send', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify(mailBody),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json().catch(() => ({}))
|
||||||
|
const errorMessage =
|
||||||
|
errorData.errors?.[0]?.message || errorData.message || 'Failed to send email'
|
||||||
|
logger.error(`[${requestId}] SendGrid API error:`, { status: response.status, errorData })
|
||||||
|
return NextResponse.json({ success: false, error: errorMessage }, { status: response.status })
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageId = response.headers.get('X-Message-Id')
|
||||||
|
logger.info(`[${requestId}] Email sent successfully`, { messageId })
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
success: true,
|
||||||
|
messageId: messageId || undefined,
|
||||||
|
to: validatedData.to,
|
||||||
|
subject: validatedData.subject || '',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
logger.warn(`[${requestId}] Validation error:`, error.errors)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error.errors[0]?.message || 'Validation failed' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] Unexpected error:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||||
import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils'
|
import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -111,6 +112,8 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const buffer = Buffer.concat(chunks)
|
const buffer = Buffer.concat(chunks)
|
||||||
const fileName = path.basename(remotePath)
|
const fileName = path.basename(remotePath)
|
||||||
|
const extension = getFileExtension(fileName)
|
||||||
|
const mimeType = getMimeTypeFromExtension(extension)
|
||||||
|
|
||||||
let content: string
|
let content: string
|
||||||
if (params.encoding === 'base64') {
|
if (params.encoding === 'base64') {
|
||||||
@@ -124,6 +127,12 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
fileName,
|
fileName,
|
||||||
|
file: {
|
||||||
|
name: fileName,
|
||||||
|
mimeType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
},
|
||||||
content,
|
content,
|
||||||
size: buffer.length,
|
size: buffer.length,
|
||||||
encoding: params.encoding,
|
encoding: params.encoding,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -26,14 +27,7 @@ const UploadSchema = z.object({
|
|||||||
privateKey: z.string().nullish(),
|
privateKey: z.string().nullish(),
|
||||||
passphrase: z.string().nullish(),
|
passphrase: z.string().nullish(),
|
||||||
remotePath: z.string().min(1, 'Remote path is required'),
|
remotePath: z.string().min(1, 'Remote path is required'),
|
||||||
files: z
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
.union([z.array(z.any()), z.string(), z.number(), z.null(), z.undefined()])
|
|
||||||
.transform((val) => {
|
|
||||||
if (Array.isArray(val)) return val
|
|
||||||
if (val === null || val === undefined || val === '') return undefined
|
|
||||||
return undefined
|
|
||||||
})
|
|
||||||
.nullish(),
|
|
||||||
fileContent: z.string().nullish(),
|
fileContent: z.string().nullish(),
|
||||||
fileName: z.string().nullish(),
|
fileName: z.string().nullish(),
|
||||||
overwrite: z.boolean().default(true),
|
overwrite: z.boolean().default(true),
|
||||||
|
|||||||
@@ -2,9 +2,12 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import type { MicrosoftGraphDriveItem } from '@/tools/onedrive/types'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -16,7 +19,7 @@ const SharepointUploadSchema = z.object({
|
|||||||
driveId: z.string().optional().nullable(),
|
driveId: z.string().optional().nullable(),
|
||||||
folderPath: z.string().optional().nullable(),
|
folderPath: z.string().optional().nullable(),
|
||||||
fileName: z.string().optional().nullable(),
|
fileName: z.string().optional().nullable(),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -79,18 +82,23 @@ export async function POST(request: NextRequest) {
|
|||||||
let effectiveDriveId = validatedData.driveId
|
let effectiveDriveId = validatedData.driveId
|
||||||
if (!effectiveDriveId) {
|
if (!effectiveDriveId) {
|
||||||
logger.info(`[${requestId}] No driveId provided, fetching default drive for site`)
|
logger.info(`[${requestId}] No driveId provided, fetching default drive for site`)
|
||||||
const driveResponse = await fetch(
|
const driveUrl = `https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`
|
||||||
`https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`,
|
const driveResponse = await secureFetchWithValidation(
|
||||||
|
driveUrl,
|
||||||
{
|
{
|
||||||
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
Accept: 'application/json',
|
Accept: 'application/json',
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
|
'driveUrl'
|
||||||
)
|
)
|
||||||
|
|
||||||
if (!driveResponse.ok) {
|
if (!driveResponse.ok) {
|
||||||
const errorData = await driveResponse.json().catch(() => ({}))
|
const errorData = (await driveResponse.json().catch(() => ({}))) as {
|
||||||
|
error?: { message?: string }
|
||||||
|
}
|
||||||
logger.error(`[${requestId}] Failed to get default drive:`, errorData)
|
logger.error(`[${requestId}] Failed to get default drive:`, errorData)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -101,7 +109,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const driveData = await driveResponse.json()
|
const driveData = (await driveResponse.json()) as { id: string }
|
||||||
effectiveDriveId = driveData.id
|
effectiveDriveId = driveData.id
|
||||||
logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`)
|
logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`)
|
||||||
}
|
}
|
||||||
@@ -145,34 +153,87 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to: ${uploadUrl}`)
|
logger.info(`[${requestId}] Uploading to: ${uploadUrl}`)
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
const uploadResponse = await secureFetchWithValidation(
|
||||||
method: 'PUT',
|
uploadUrl,
|
||||||
headers: {
|
{
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
method: 'PUT',
|
||||||
'Content-Type': userFile.type || 'application/octet-stream',
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': userFile.type || 'application/octet-stream',
|
||||||
|
},
|
||||||
|
body: buffer,
|
||||||
},
|
},
|
||||||
body: new Uint8Array(buffer),
|
'uploadUrl'
|
||||||
})
|
)
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
if (!uploadResponse.ok) {
|
||||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||||
logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData)
|
logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData)
|
||||||
|
|
||||||
if (uploadResponse.status === 409) {
|
if (uploadResponse.status === 409) {
|
||||||
logger.warn(`[${requestId}] File ${fileName} already exists, attempting to replace`)
|
// File exists - retry with conflict behavior set to replace
|
||||||
|
logger.warn(`[${requestId}] File ${fileName} already exists, retrying with replace`)
|
||||||
|
const replaceUrl = `${uploadUrl}?@microsoft.graph.conflictBehavior=replace`
|
||||||
|
const replaceResponse = await secureFetchWithValidation(
|
||||||
|
replaceUrl,
|
||||||
|
{
|
||||||
|
method: 'PUT',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': userFile.type || 'application/octet-stream',
|
||||||
|
},
|
||||||
|
body: buffer,
|
||||||
|
},
|
||||||
|
'replaceUrl'
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!replaceResponse.ok) {
|
||||||
|
const replaceErrorData = (await replaceResponse.json().catch(() => ({}))) as {
|
||||||
|
error?: { message?: string }
|
||||||
|
}
|
||||||
|
logger.error(`[${requestId}] Failed to replace file ${fileName}:`, replaceErrorData)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: replaceErrorData.error?.message || `Failed to replace file: ${fileName}`,
|
||||||
|
},
|
||||||
|
{ status: replaceResponse.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const replaceData = (await replaceResponse.json()) as {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
webUrl: string
|
||||||
|
size: number
|
||||||
|
createdDateTime: string
|
||||||
|
lastModifiedDateTime: string
|
||||||
|
}
|
||||||
|
logger.info(`[${requestId}] File replaced successfully: ${fileName}`)
|
||||||
|
|
||||||
|
uploadedFiles.push({
|
||||||
|
id: replaceData.id,
|
||||||
|
name: replaceData.name,
|
||||||
|
webUrl: replaceData.webUrl,
|
||||||
|
size: replaceData.size,
|
||||||
|
createdDateTime: replaceData.createdDateTime,
|
||||||
|
lastModifiedDateTime: replaceData.lastModifiedDateTime,
|
||||||
|
})
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: errorData.error?.message || `Failed to upload file: ${fileName}`,
|
error:
|
||||||
|
(errorData as { error?: { message?: string } }).error?.message ||
|
||||||
|
`Failed to upload file: ${fileName}`,
|
||||||
},
|
},
|
||||||
{ status: uploadResponse.status }
|
{ status: uploadResponse.status }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadData = await uploadResponse.json()
|
const uploadData = (await uploadResponse.json()) as MicrosoftGraphDriveItem
|
||||||
logger.info(`[${requestId}] File uploaded successfully: ${fileName}`)
|
logger.info(`[${requestId}] File uploaded successfully: ${fileName}`)
|
||||||
|
|
||||||
uploadedFiles.push({
|
uploadedFiles.push({
|
||||||
|
|||||||
170
apps/sim/app/api/tools/slack/download/route.ts
Normal file
170
apps/sim/app/api/tools/slack/download/route.ts
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('SlackDownloadAPI')
|
||||||
|
|
||||||
|
const SlackDownloadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
fileId: z.string().min(1, 'File ID is required'),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Slack download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Authenticated Slack download request via ${authResult.authType}`, {
|
||||||
|
userId: authResult.userId,
|
||||||
|
})
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = SlackDownloadSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, fileId, fileName } = validatedData
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting file info from Slack`, { fileId })
|
||||||
|
|
||||||
|
const infoResponse = await fetch(`https://slack.com/api/files.info?file=${fileId}`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!infoResponse.ok) {
|
||||||
|
const errorDetails = await infoResponse.json().catch(() => ({}))
|
||||||
|
logger.error(`[${requestId}] Failed to get file info from Slack`, {
|
||||||
|
status: infoResponse.status,
|
||||||
|
statusText: infoResponse.statusText,
|
||||||
|
error: errorDetails,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: errorDetails.error || 'Failed to get file info',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await infoResponse.json()
|
||||||
|
|
||||||
|
if (!data.ok) {
|
||||||
|
logger.error(`[${requestId}] Slack API returned error`, { error: data.error })
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: data.error || 'Slack API error',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const file = data.file
|
||||||
|
const resolvedFileName = fileName || file.name || 'download'
|
||||||
|
const mimeType = file.mimetype || 'application/octet-stream'
|
||||||
|
const urlPrivate = file.url_private
|
||||||
|
|
||||||
|
if (!urlPrivate) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'File does not have a download URL',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(urlPrivate, 'urlPrivate')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: urlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Downloading file from Slack`, {
|
||||||
|
fileId,
|
||||||
|
fileName: resolvedFileName,
|
||||||
|
mimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(urlPrivate, urlValidation.resolvedIP!, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
logger.error(`[${requestId}] Failed to download file content`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
statusText: downloadResponse.statusText,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to download file content',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||||
|
fileId,
|
||||||
|
name: resolvedFileName,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
mimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const base64Data = fileBuffer.toString('base64')
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedFileName,
|
||||||
|
mimeType,
|
||||||
|
data: base64Data,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading Slack file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { sendSlackMessage } from '../utils'
|
import { sendSlackMessage } from '../utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -16,7 +17,7 @@ const SlackSendMessageSchema = z
|
|||||||
userId: z.string().optional().nullable(),
|
userId: z.string().optional().nullable(),
|
||||||
text: z.string().min(1, 'Message text is required'),
|
text: z.string().min(1, 'Message text is required'),
|
||||||
thread_ts: z.string().optional().nullable(),
|
thread_ts: z.string().optional().nullable(),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
.refine((data) => data.channel || data.userId, {
|
.refine((data) => data.channel || data.userId, {
|
||||||
message: 'Either channel or userId is required',
|
message: 'Either channel or userId is required',
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import type { Logger } from '@sim/logger'
|
import type { Logger } from '@sim/logger'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import type { ToolFileData } from '@/tools/types'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sends a message to a Slack channel using chat.postMessage
|
* Sends a message to a Slack channel using chat.postMessage
|
||||||
@@ -70,9 +72,10 @@ export async function uploadFilesToSlack(
|
|||||||
accessToken: string,
|
accessToken: string,
|
||||||
requestId: string,
|
requestId: string,
|
||||||
logger: Logger
|
logger: Logger
|
||||||
): Promise<string[]> {
|
): Promise<{ fileIds: string[]; files: ToolFileData[] }> {
|
||||||
const userFiles = processFilesToUserFiles(files, requestId, logger)
|
const userFiles = processFilesToUserFiles(files, requestId, logger)
|
||||||
const uploadedFileIds: string[] = []
|
const uploadedFileIds: string[] = []
|
||||||
|
const uploadedFiles: ToolFileData[] = []
|
||||||
|
|
||||||
for (const userFile of userFiles) {
|
for (const userFile of userFiles) {
|
||||||
logger.info(`[${requestId}] Uploading file: ${userFile.name}`)
|
logger.info(`[${requestId}] Uploading file: ${userFile.name}`)
|
||||||
@@ -100,10 +103,14 @@ export async function uploadFilesToSlack(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`)
|
logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`)
|
||||||
|
|
||||||
const uploadResponse = await fetch(urlData.upload_url, {
|
const uploadResponse = await secureFetchWithValidation(
|
||||||
method: 'POST',
|
urlData.upload_url,
|
||||||
body: new Uint8Array(buffer),
|
{
|
||||||
})
|
method: 'POST',
|
||||||
|
body: buffer,
|
||||||
|
},
|
||||||
|
'uploadUrl'
|
||||||
|
)
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
if (!uploadResponse.ok) {
|
||||||
logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`)
|
logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`)
|
||||||
@@ -112,9 +119,16 @@ export async function uploadFilesToSlack(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] File data uploaded successfully`)
|
logger.info(`[${requestId}] File data uploaded successfully`)
|
||||||
uploadedFileIds.push(urlData.file_id)
|
uploadedFileIds.push(urlData.file_id)
|
||||||
|
// Only add to uploadedFiles after successful upload to keep arrays in sync
|
||||||
|
uploadedFiles.push({
|
||||||
|
name: userFile.name,
|
||||||
|
mimeType: userFile.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
return uploadedFileIds
|
return { fileIds: uploadedFileIds, files: uploadedFiles }
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -124,7 +138,8 @@ export async function completeSlackFileUpload(
|
|||||||
uploadedFileIds: string[],
|
uploadedFileIds: string[],
|
||||||
channel: string,
|
channel: string,
|
||||||
text: string,
|
text: string,
|
||||||
accessToken: string
|
accessToken: string,
|
||||||
|
threadTs?: string | null
|
||||||
): Promise<{ ok: boolean; files?: any[]; error?: string }> {
|
): Promise<{ ok: boolean; files?: any[]; error?: string }> {
|
||||||
const response = await fetch('https://slack.com/api/files.completeUploadExternal', {
|
const response = await fetch('https://slack.com/api/files.completeUploadExternal', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -136,6 +151,7 @@ export async function completeSlackFileUpload(
|
|||||||
files: uploadedFileIds.map((id) => ({ id })),
|
files: uploadedFileIds.map((id) => ({ id })),
|
||||||
channel_id: channel,
|
channel_id: channel,
|
||||||
initial_comment: text,
|
initial_comment: text,
|
||||||
|
...(threadTs && { thread_ts: threadTs }),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -217,7 +233,13 @@ export async function sendSlackMessage(
|
|||||||
logger: Logger
|
logger: Logger
|
||||||
): Promise<{
|
): Promise<{
|
||||||
success: boolean
|
success: boolean
|
||||||
output?: { message: any; ts: string; channel: string; fileCount?: number }
|
output?: {
|
||||||
|
message: any
|
||||||
|
ts: string
|
||||||
|
channel: string
|
||||||
|
fileCount?: number
|
||||||
|
files?: ToolFileData[]
|
||||||
|
}
|
||||||
error?: string
|
error?: string
|
||||||
}> {
|
}> {
|
||||||
const { accessToken, text, threadTs, files } = params
|
const { accessToken, text, threadTs, files } = params
|
||||||
@@ -249,10 +271,15 @@ export async function sendSlackMessage(
|
|||||||
|
|
||||||
// Process files
|
// Process files
|
||||||
logger.info(`[${requestId}] Processing ${files.length} file(s)`)
|
logger.info(`[${requestId}] Processing ${files.length} file(s)`)
|
||||||
const uploadedFileIds = await uploadFilesToSlack(files, accessToken, requestId, logger)
|
const { fileIds, files: uploadedFiles } = await uploadFilesToSlack(
|
||||||
|
files,
|
||||||
|
accessToken,
|
||||||
|
requestId,
|
||||||
|
logger
|
||||||
|
)
|
||||||
|
|
||||||
// No valid files uploaded - send text-only
|
// No valid files uploaded - send text-only
|
||||||
if (uploadedFileIds.length === 0) {
|
if (fileIds.length === 0) {
|
||||||
logger.warn(`[${requestId}] No valid files to upload, sending text-only message`)
|
logger.warn(`[${requestId}] No valid files to upload, sending text-only message`)
|
||||||
|
|
||||||
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
||||||
@@ -264,8 +291,8 @@ export async function sendSlackMessage(
|
|||||||
return { success: true, output: formatMessageSuccessResponse(data, text) }
|
return { success: true, output: formatMessageSuccessResponse(data, text) }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Complete file upload
|
// Complete file upload with thread support
|
||||||
const completeData = await completeSlackFileUpload(uploadedFileIds, channel, text, accessToken)
|
const completeData = await completeSlackFileUpload(fileIds, channel, text, accessToken, threadTs)
|
||||||
|
|
||||||
if (!completeData.ok) {
|
if (!completeData.ok) {
|
||||||
logger.error(`[${requestId}] Failed to complete upload:`, completeData.error)
|
logger.error(`[${requestId}] Failed to complete upload:`, completeData.error)
|
||||||
@@ -282,7 +309,8 @@ export async function sendSlackMessage(
|
|||||||
message: fileMessage,
|
message: fileMessage,
|
||||||
ts: fileMessage.ts,
|
ts: fileMessage.ts,
|
||||||
channel,
|
channel,
|
||||||
fileCount: uploadedFileIds.length,
|
fileCount: fileIds.length,
|
||||||
|
files: uploadedFiles,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import nodemailer from 'nodemailer'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -28,7 +29,7 @@ const SmtpSendSchema = z.object({
|
|||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
replyTo: z.string().optional().nullable(),
|
replyTo: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import type { Client, SFTPWrapper } from 'ssh2'
|
import type { Client, SFTPWrapper } from 'ssh2'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||||
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
|
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
|
||||||
|
|
||||||
const logger = createLogger('SSHDownloadFileAPI')
|
const logger = createLogger('SSHDownloadFileAPI')
|
||||||
@@ -79,6 +80,16 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Check file size limit (50MB to prevent memory exhaustion)
|
||||||
|
const maxSize = 50 * 1024 * 1024
|
||||||
|
if (stats.size > maxSize) {
|
||||||
|
const sizeMB = (stats.size / (1024 * 1024)).toFixed(2)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: `File size (${sizeMB}MB) exceeds download limit of 50MB` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// Read file content
|
// Read file content
|
||||||
const content = await new Promise<Buffer>((resolve, reject) => {
|
const content = await new Promise<Buffer>((resolve, reject) => {
|
||||||
const chunks: Buffer[] = []
|
const chunks: Buffer[] = []
|
||||||
@@ -96,6 +107,8 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
const fileName = path.basename(remotePath)
|
const fileName = path.basename(remotePath)
|
||||||
|
const extension = getFileExtension(fileName)
|
||||||
|
const mimeType = getMimeTypeFromExtension(extension)
|
||||||
|
|
||||||
// Encode content as base64 for binary safety
|
// Encode content as base64 for binary safety
|
||||||
const base64Content = content.toString('base64')
|
const base64Content = content.toString('base64')
|
||||||
@@ -104,6 +117,12 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
downloaded: true,
|
downloaded: true,
|
||||||
|
file: {
|
||||||
|
name: fileName,
|
||||||
|
mimeType,
|
||||||
|
data: base64Content,
|
||||||
|
size: stats.size,
|
||||||
|
},
|
||||||
content: base64Content,
|
content: base64Content,
|
||||||
fileName: fileName,
|
fileName: fileName,
|
||||||
remotePath: remotePath,
|
remotePath: remotePath,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction'
|
import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction'
|
||||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||||
|
|
||||||
@@ -123,6 +124,10 @@ export async function POST(request: NextRequest) {
|
|||||||
const variablesObject = processVariables(params.variables)
|
const variablesObject = processVariables(params.variables)
|
||||||
|
|
||||||
const startUrl = normalizeUrl(rawStartUrl)
|
const startUrl = normalizeUrl(rawStartUrl)
|
||||||
|
const urlValidation = await validateUrlWithDNS(startUrl, 'startUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
logger.info('Starting Stagehand agent process', {
|
logger.info('Starting Stagehand agent process', {
|
||||||
rawStartUrl,
|
rawStartUrl,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||||
|
|
||||||
const logger = createLogger('StagehandExtractAPI')
|
const logger = createLogger('StagehandExtractAPI')
|
||||||
@@ -51,6 +52,10 @@ export async function POST(request: NextRequest) {
|
|||||||
const params = validationResult.data
|
const params = validationResult.data
|
||||||
const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params
|
const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params
|
||||||
const url = normalizeUrl(rawUrl)
|
const url = normalizeUrl(rawUrl)
|
||||||
|
const urlValidation = await validateUrlWithDNS(url, 'url')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
logger.info('Starting Stagehand extraction process', {
|
logger.info('Starting Stagehand extraction process', {
|
||||||
rawUrl,
|
rawUrl,
|
||||||
|
|||||||
@@ -2,7 +2,16 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { extractAudioFromVideo, isVideoFile } from '@/lib/audio/extractor'
|
import { extractAudioFromVideo, isVideoFile } from '@/lib/audio/extractor'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { getMimeTypeFromExtension, isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import {
|
||||||
|
downloadFileFromStorage,
|
||||||
|
resolveInternalFileUrl,
|
||||||
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
import type { UserFile } from '@/executor/types'
|
import type { UserFile } from '@/executor/types'
|
||||||
import type { TranscriptSegment } from '@/tools/stt/types'
|
import type { TranscriptSegment } from '@/tools/stt/types'
|
||||||
|
|
||||||
@@ -45,6 +54,7 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const body: SttRequestBody = await request.json()
|
const body: SttRequestBody = await request.json()
|
||||||
const {
|
const {
|
||||||
provider,
|
provider,
|
||||||
@@ -72,13 +82,25 @@ export async function POST(request: NextRequest) {
|
|||||||
let audioMimeType: string
|
let audioMimeType: string
|
||||||
|
|
||||||
if (body.audioFile) {
|
if (body.audioFile) {
|
||||||
|
if (Array.isArray(body.audioFile) && body.audioFile.length !== 1) {
|
||||||
|
return NextResponse.json({ error: 'audioFile must be a single file' }, { status: 400 })
|
||||||
|
}
|
||||||
const file = Array.isArray(body.audioFile) ? body.audioFile[0] : body.audioFile
|
const file = Array.isArray(body.audioFile) ? body.audioFile[0] : body.audioFile
|
||||||
logger.info(`[${requestId}] Processing uploaded file: ${file.name}`)
|
logger.info(`[${requestId}] Processing uploaded file: ${file.name}`)
|
||||||
|
|
||||||
audioBuffer = await downloadFileFromStorage(file, requestId, logger)
|
audioBuffer = await downloadFileFromStorage(file, requestId, logger)
|
||||||
audioFileName = file.name
|
audioFileName = file.name
|
||||||
audioMimeType = file.type
|
// file.type may be missing if the file came from a block that doesn't preserve it
|
||||||
|
// Infer from filename extension as fallback
|
||||||
|
const ext = file.name.split('.').pop()?.toLowerCase() || ''
|
||||||
|
audioMimeType = file.type || getMimeTypeFromExtension(ext)
|
||||||
} else if (body.audioFileReference) {
|
} else if (body.audioFileReference) {
|
||||||
|
if (Array.isArray(body.audioFileReference) && body.audioFileReference.length !== 1) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'audioFileReference must be a single file' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
const file = Array.isArray(body.audioFileReference)
|
const file = Array.isArray(body.audioFileReference)
|
||||||
? body.audioFileReference[0]
|
? body.audioFileReference[0]
|
||||||
: body.audioFileReference
|
: body.audioFileReference
|
||||||
@@ -86,18 +108,54 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
audioBuffer = await downloadFileFromStorage(file, requestId, logger)
|
audioBuffer = await downloadFileFromStorage(file, requestId, logger)
|
||||||
audioFileName = file.name
|
audioFileName = file.name
|
||||||
audioMimeType = file.type
|
|
||||||
|
const ext = file.name.split('.').pop()?.toLowerCase() || ''
|
||||||
|
audioMimeType = file.type || getMimeTypeFromExtension(ext)
|
||||||
} else if (body.audioUrl) {
|
} else if (body.audioUrl) {
|
||||||
logger.info(`[${requestId}] Downloading from URL: ${body.audioUrl}`)
|
logger.info(`[${requestId}] Downloading from URL: ${body.audioUrl}`)
|
||||||
|
|
||||||
const response = await fetch(body.audioUrl)
|
let audioUrl = body.audioUrl.trim()
|
||||||
|
if (audioUrl.startsWith('/') && !isInternalFileUrl(audioUrl)) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isInternalFileUrl(audioUrl)) {
|
||||||
|
if (!userId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authentication required for internal file access' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const resolution = await resolveInternalFileUrl(audioUrl, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: resolution.error.message },
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
audioUrl = resolution.fileUrl || audioUrl
|
||||||
|
}
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(audioUrl, 'audioUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(audioUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to download audio from URL: ${response.statusText}`)
|
throw new Error(`Failed to download audio from URL: ${response.statusText}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const arrayBuffer = await response.arrayBuffer()
|
const arrayBuffer = await response.arrayBuffer()
|
||||||
audioBuffer = Buffer.from(arrayBuffer)
|
audioBuffer = Buffer.from(arrayBuffer)
|
||||||
audioFileName = body.audioUrl.split('/').pop() || 'audio_file'
|
audioFileName = audioUrl.split('/').pop() || 'audio_file'
|
||||||
audioMimeType = response.headers.get('content-type') || 'audio/mpeg'
|
audioMimeType = response.headers.get('content-type') || 'audio/mpeg'
|
||||||
} else {
|
} else {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
@@ -149,7 +207,9 @@ export async function POST(request: NextRequest) {
|
|||||||
translateToEnglish,
|
translateToEnglish,
|
||||||
model,
|
model,
|
||||||
body.prompt,
|
body.prompt,
|
||||||
body.temperature
|
body.temperature,
|
||||||
|
audioMimeType,
|
||||||
|
audioFileName
|
||||||
)
|
)
|
||||||
transcript = result.transcript
|
transcript = result.transcript
|
||||||
segments = result.segments
|
segments = result.segments
|
||||||
@@ -162,7 +222,8 @@ export async function POST(request: NextRequest) {
|
|||||||
language,
|
language,
|
||||||
timestamps,
|
timestamps,
|
||||||
diarization,
|
diarization,
|
||||||
model
|
model,
|
||||||
|
audioMimeType
|
||||||
)
|
)
|
||||||
transcript = result.transcript
|
transcript = result.transcript
|
||||||
segments = result.segments
|
segments = result.segments
|
||||||
@@ -252,7 +313,9 @@ async function transcribeWithWhisper(
|
|||||||
translate?: boolean,
|
translate?: boolean,
|
||||||
model?: string,
|
model?: string,
|
||||||
prompt?: string,
|
prompt?: string,
|
||||||
temperature?: number
|
temperature?: number,
|
||||||
|
mimeType?: string,
|
||||||
|
fileName?: string
|
||||||
): Promise<{
|
): Promise<{
|
||||||
transcript: string
|
transcript: string
|
||||||
segments?: TranscriptSegment[]
|
segments?: TranscriptSegment[]
|
||||||
@@ -261,8 +324,11 @@ async function transcribeWithWhisper(
|
|||||||
}> {
|
}> {
|
||||||
const formData = new FormData()
|
const formData = new FormData()
|
||||||
|
|
||||||
const blob = new Blob([new Uint8Array(audioBuffer)], { type: 'audio/mpeg' })
|
// Use actual MIME type and filename if provided
|
||||||
formData.append('file', blob, 'audio.mp3')
|
const actualMimeType = mimeType || 'audio/mpeg'
|
||||||
|
const actualFileName = fileName || 'audio.mp3'
|
||||||
|
const blob = new Blob([new Uint8Array(audioBuffer)], { type: actualMimeType })
|
||||||
|
formData.append('file', blob, actualFileName)
|
||||||
formData.append('model', model || 'whisper-1')
|
formData.append('model', model || 'whisper-1')
|
||||||
|
|
||||||
if (language && language !== 'auto') {
|
if (language && language !== 'auto') {
|
||||||
@@ -279,10 +345,11 @@ async function transcribeWithWhisper(
|
|||||||
|
|
||||||
formData.append('response_format', 'verbose_json')
|
formData.append('response_format', 'verbose_json')
|
||||||
|
|
||||||
|
// OpenAI API uses array notation for timestamp_granularities
|
||||||
if (timestamps === 'word') {
|
if (timestamps === 'word') {
|
||||||
formData.append('timestamp_granularities', 'word')
|
formData.append('timestamp_granularities[]', 'word')
|
||||||
} else if (timestamps === 'sentence') {
|
} else if (timestamps === 'sentence') {
|
||||||
formData.append('timestamp_granularities', 'segment')
|
formData.append('timestamp_granularities[]', 'segment')
|
||||||
}
|
}
|
||||||
|
|
||||||
const endpoint = translate ? 'translations' : 'transcriptions'
|
const endpoint = translate ? 'translations' : 'transcriptions'
|
||||||
@@ -325,7 +392,8 @@ async function transcribeWithDeepgram(
|
|||||||
language?: string,
|
language?: string,
|
||||||
timestamps?: 'none' | 'sentence' | 'word',
|
timestamps?: 'none' | 'sentence' | 'word',
|
||||||
diarization?: boolean,
|
diarization?: boolean,
|
||||||
model?: string
|
model?: string,
|
||||||
|
mimeType?: string
|
||||||
): Promise<{
|
): Promise<{
|
||||||
transcript: string
|
transcript: string
|
||||||
segments?: TranscriptSegment[]
|
segments?: TranscriptSegment[]
|
||||||
@@ -357,7 +425,7 @@ async function transcribeWithDeepgram(
|
|||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Token ${apiKey}`,
|
Authorization: `Token ${apiKey}`,
|
||||||
'Content-Type': 'audio/mpeg',
|
'Content-Type': mimeType || 'audio/mpeg',
|
||||||
},
|
},
|
||||||
body: new Uint8Array(audioBuffer),
|
body: new Uint8Array(audioBuffer),
|
||||||
})
|
})
|
||||||
@@ -513,7 +581,8 @@ async function transcribeWithAssemblyAI(
|
|||||||
audio_url: upload_url,
|
audio_url: upload_url,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (model === 'best' || model === 'nano') {
|
// AssemblyAI supports 'best', 'slam-1', or 'universal' for speech_model
|
||||||
|
if (model === 'best' || model === 'slam-1' || model === 'universal') {
|
||||||
transcriptRequest.speech_model = model
|
transcriptRequest.speech_model = model
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -568,7 +637,8 @@ async function transcribeWithAssemblyAI(
|
|||||||
|
|
||||||
let transcript: any
|
let transcript: any
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
const maxAttempts = 60 // 5 minutes with 5-second intervals
|
const pollIntervalMs = 5000
|
||||||
|
const maxAttempts = Math.ceil(DEFAULT_EXECUTION_TIMEOUT_MS / pollIntervalMs)
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
const statusResponse = await fetch(`https://api.assemblyai.com/v2/transcript/${id}`, {
|
const statusResponse = await fetch(`https://api.assemblyai.com/v2/transcript/${id}`, {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -16,7 +17,7 @@ const SupabaseStorageUploadSchema = z.object({
|
|||||||
bucket: z.string().min(1, 'Bucket name is required'),
|
bucket: z.string().min(1, 'Bucket name is required'),
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
path: z.string().optional().nullable(),
|
path: z.string().optional().nullable(),
|
||||||
fileData: z.any(),
|
fileData: FileInputSchema,
|
||||||
contentType: z.string().optional().nullable(),
|
contentType: z.string().optional().nullable(),
|
||||||
upsert: z.boolean().optional().default(false),
|
upsert: z.boolean().optional().default(false),
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import { convertMarkdownToHTML } from '@/tools/telegram/utils'
|
import { convertMarkdownToHTML } from '@/tools/telegram/utils'
|
||||||
@@ -14,7 +15,7 @@ const logger = createLogger('TelegramSendDocumentAPI')
|
|||||||
const TelegramSendDocumentSchema = z.object({
|
const TelegramSendDocumentSchema = z.object({
|
||||||
botToken: z.string().min(1, 'Bot token is required'),
|
botToken: z.string().min(1, 'Bot token is required'),
|
||||||
chatId: z.string().min(1, 'Chat ID is required'),
|
chatId: z.string().min(1, 'Chat ID is required'),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
caption: z.string().optional().nullable(),
|
caption: z.string().optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -93,6 +94,14 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Uploading document: ${userFile.name}`)
|
logger.info(`[${requestId}] Uploading document: ${userFile.name}`)
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
const filesOutput = [
|
||||||
|
{
|
||||||
|
name: userFile.name,
|
||||||
|
mimeType: userFile.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
logger.info(`[${requestId}] Downloaded file: ${buffer.length} bytes`)
|
logger.info(`[${requestId}] Downloaded file: ${buffer.length} bytes`)
|
||||||
|
|
||||||
@@ -135,6 +144,7 @@ export async function POST(request: NextRequest) {
|
|||||||
output: {
|
output: {
|
||||||
message: 'Document sent successfully',
|
message: 'Document sent successfully',
|
||||||
data: data.result,
|
data: data.result,
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -3,19 +3,19 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||||
|
import { validateAwsRegion, validateS3BucketName } from '@/lib/core/security/input-validation'
|
||||||
import {
|
import {
|
||||||
validateAwsRegion,
|
secureFetchWithPinnedIP,
|
||||||
validateExternalUrl,
|
validateUrlWithDNS,
|
||||||
validateS3BucketName,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/core/security/input-validation'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { StorageService } from '@/lib/uploads'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
downloadFileFromStorage,
|
||||||
inferContextFromKey,
|
resolveInternalFileUrl,
|
||||||
isInternalFileUrl,
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
||||||
@@ -35,6 +35,7 @@ const TextractParseSchema = z
|
|||||||
region: z.string().min(1, 'AWS region is required'),
|
region: z.string().min(1, 'AWS region is required'),
|
||||||
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
||||||
filePath: z.string().optional(),
|
filePath: z.string().optional(),
|
||||||
|
file: RawFileInputSchema.optional(),
|
||||||
s3Uri: z.string().optional(),
|
s3Uri: z.string().optional(),
|
||||||
featureTypes: z
|
featureTypes: z
|
||||||
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
||||||
@@ -50,6 +51,20 @@ const TextractParseSchema = z
|
|||||||
path: ['region'],
|
path: ['region'],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
if (data.processingMode === 'async' && !data.s3Uri) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: 'S3 URI is required for multi-page processing (s3://bucket/key)',
|
||||||
|
path: ['s3Uri'],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (data.processingMode !== 'async' && !data.file && !data.filePath) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: 'File input is required for single-page processing',
|
||||||
|
path: ['filePath'],
|
||||||
|
})
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
function getSignatureKey(
|
function getSignatureKey(
|
||||||
@@ -111,7 +126,14 @@ function signAwsRequest(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
||||||
const response = await fetch(url)
|
const urlValidation = await validateUrlWithDNS(url, 'Document URL')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
throw new Error(urlValidation.error || 'Invalid document URL')
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||||
}
|
}
|
||||||
@@ -205,8 +227,8 @@ async function pollForJobCompletion(
|
|||||||
useAnalyzeDocument: boolean,
|
useAnalyzeDocument: boolean,
|
||||||
requestId: string
|
requestId: string
|
||||||
): Promise<Record<string, unknown>> {
|
): Promise<Record<string, unknown>> {
|
||||||
const pollIntervalMs = 5000 // 5 seconds between polls
|
const pollIntervalMs = 5000
|
||||||
const maxPollTimeMs = 180000 // 3 minutes maximum polling time
|
const maxPollTimeMs = DEFAULT_EXECUTION_TIMEOUT_MS
|
||||||
const maxAttempts = Math.ceil(maxPollTimeMs / pollIntervalMs)
|
const maxAttempts = Math.ceil(maxPollTimeMs / pollIntervalMs)
|
||||||
|
|
||||||
const getTarget = useAnalyzeDocument
|
const getTarget = useAnalyzeDocument
|
||||||
@@ -318,8 +340,8 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Textract parse request`, {
|
logger.info(`[${requestId}] Textract parse request`, {
|
||||||
processingMode,
|
processingMode,
|
||||||
filePath: validatedData.filePath?.substring(0, 50),
|
hasFile: Boolean(validatedData.file),
|
||||||
s3Uri: validatedData.s3Uri?.substring(0, 50),
|
hasS3Uri: Boolean(validatedData.s3Uri),
|
||||||
featureTypes,
|
featureTypes,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
@@ -414,90 +436,89 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!validatedData.filePath) {
|
let bytes = ''
|
||||||
return NextResponse.json(
|
let contentType = 'application/octet-stream'
|
||||||
{
|
let isPdf = false
|
||||||
success: false,
|
|
||||||
error: 'File path is required for single-page processing',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
if (validatedData.file) {
|
||||||
|
let userFile
|
||||||
const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath)
|
|
||||||
|
|
||||||
if (isInternalFilePath) {
|
|
||||||
try {
|
try {
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
userFile = processSingleFileToUserFile(validatedData.file, requestId, logger)
|
||||||
const context = inferContextFromKey(storageKey)
|
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: 'Failed to generate file access URL',
|
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
// Reject arbitrary absolute paths that don't contain /api/files/serve/
|
|
||||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
|
||||||
userId,
|
|
||||||
path: validatedData.filePath.substring(0, 50),
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
const urlValidation = validateExternalUrl(fileUrl, 'Document URL')
|
|
||||||
if (!urlValidation.isValid) {
|
|
||||||
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
|
||||||
userId,
|
|
||||||
url: fileUrl.substring(0, 100),
|
|
||||||
error: urlValidation.error,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: urlValidation.error,
|
|
||||||
},
|
},
|
||||||
{ status: 400 }
|
{ status: 400 }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
bytes = buffer.toString('base64')
|
||||||
|
contentType = userFile.type || 'application/octet-stream'
|
||||||
|
isPdf = contentType.includes('pdf') || userFile.name?.toLowerCase().endsWith('.pdf')
|
||||||
|
} else if (validatedData.filePath) {
|
||||||
|
let fileUrl = validatedData.filePath
|
||||||
|
|
||||||
|
const isInternalFilePath = isInternalFileUrl(fileUrl)
|
||||||
|
|
||||||
|
if (isInternalFilePath) {
|
||||||
|
const resolution = await resolveInternalFileUrl(fileUrl, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: resolution.error.message,
|
||||||
|
},
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
fileUrl = resolution.fileUrl || fileUrl
|
||||||
|
} else if (fileUrl.startsWith('/')) {
|
||||||
|
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||||
|
userId,
|
||||||
|
path: fileUrl.substring(0, 50),
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
const urlValidation = await validateUrlWithDNS(fileUrl, 'Document URL')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
||||||
|
userId,
|
||||||
|
url: fileUrl.substring(0, 100),
|
||||||
|
error: urlValidation.error,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: urlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const fetched = await fetchDocumentBytes(fileUrl)
|
||||||
|
bytes = fetched.bytes
|
||||||
|
contentType = fetched.contentType
|
||||||
|
isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
||||||
|
} else {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'File input is required for single-page processing',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const { bytes, contentType } = await fetchDocumentBytes(fileUrl)
|
|
||||||
|
|
||||||
// Track if this is a PDF for better error messaging
|
|
||||||
const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
|
||||||
|
|
||||||
const uri = '/'
|
const uri = '/'
|
||||||
|
|
||||||
let textractBody: Record<string, unknown>
|
let textractBody: Record<string, unknown>
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger'
|
|||||||
import type { NextRequest } from 'next/server'
|
import type { NextRequest } from 'next/server'
|
||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||||
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { StorageService } from '@/lib/uploads'
|
import { StorageService } from '@/lib/uploads'
|
||||||
@@ -60,7 +61,7 @@ export async function POST(request: NextRequest) {
|
|||||||
text,
|
text,
|
||||||
model_id: modelId,
|
model_id: modelId,
|
||||||
}),
|
}),
|
||||||
signal: AbortSignal.timeout(60000),
|
signal: AbortSignal.timeout(DEFAULT_EXECUTION_TIMEOUT_MS),
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
|
|||||||
250
apps/sim/app/api/tools/twilio/get-recording/route.ts
Normal file
250
apps/sim/app/api/tools/twilio/get-recording/route.ts
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('TwilioGetRecordingAPI')
|
||||||
|
|
||||||
|
interface TwilioRecordingResponse {
|
||||||
|
sid?: string
|
||||||
|
call_sid?: string
|
||||||
|
duration?: string
|
||||||
|
status?: string
|
||||||
|
channels?: number
|
||||||
|
source?: string
|
||||||
|
price?: string
|
||||||
|
price_unit?: string
|
||||||
|
uri?: string
|
||||||
|
error_code?: number
|
||||||
|
message?: string
|
||||||
|
error_message?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TwilioErrorResponse {
|
||||||
|
message?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TwilioTranscription {
|
||||||
|
transcription_text?: string
|
||||||
|
status?: string
|
||||||
|
price?: string
|
||||||
|
price_unit?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TwilioTranscriptionsResponse {
|
||||||
|
transcriptions?: TwilioTranscription[]
|
||||||
|
}
|
||||||
|
|
||||||
|
const TwilioGetRecordingSchema = z.object({
|
||||||
|
accountSid: z.string().min(1, 'Account SID is required'),
|
||||||
|
authToken: z.string().min(1, 'Auth token is required'),
|
||||||
|
recordingSid: z.string().min(1, 'Recording SID is required'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Twilio get recording attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = TwilioGetRecordingSchema.parse(body)
|
||||||
|
|
||||||
|
const { accountSid, authToken, recordingSid } = validatedData
|
||||||
|
|
||||||
|
if (!accountSid.startsWith('AC')) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: `Invalid Account SID format. Account SID must start with "AC" (you provided: ${accountSid.substring(0, 2)}...)`,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const twilioAuth = Buffer.from(`${accountSid}:${authToken}`).toString('base64')
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting recording info from Twilio`, { recordingSid })
|
||||||
|
|
||||||
|
const infoUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Recordings/${recordingSid}.json`
|
||||||
|
const infoUrlValidation = await validateUrlWithDNS(infoUrl, 'infoUrl')
|
||||||
|
if (!infoUrlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: infoUrlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const infoResponse = await secureFetchWithPinnedIP(infoUrl, infoUrlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!infoResponse.ok) {
|
||||||
|
const errorData = (await infoResponse.json().catch(() => ({}))) as TwilioErrorResponse
|
||||||
|
logger.error(`[${requestId}] Twilio API error`, {
|
||||||
|
status: infoResponse.status,
|
||||||
|
error: errorData,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorData.message || `Twilio API error: ${infoResponse.status}` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await infoResponse.json()) as TwilioRecordingResponse
|
||||||
|
|
||||||
|
if (data.error_code) {
|
||||||
|
return NextResponse.json({
|
||||||
|
success: false,
|
||||||
|
output: {
|
||||||
|
success: false,
|
||||||
|
error: data.message || data.error_message || 'Failed to retrieve recording',
|
||||||
|
},
|
||||||
|
error: data.message || data.error_message || 'Failed to retrieve recording',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseUrl = 'https://api.twilio.com'
|
||||||
|
const mediaUrl = data.uri ? `${baseUrl}${data.uri.replace('.json', '')}` : undefined
|
||||||
|
|
||||||
|
let transcriptionText: string | undefined
|
||||||
|
let transcriptionStatus: string | undefined
|
||||||
|
let transcriptionPrice: string | undefined
|
||||||
|
let transcriptionPriceUnit: string | undefined
|
||||||
|
let file:
|
||||||
|
| {
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}
|
||||||
|
| undefined
|
||||||
|
|
||||||
|
try {
|
||||||
|
const transcriptionUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Transcriptions.json?RecordingSid=${data.sid}`
|
||||||
|
logger.info(`[${requestId}] Checking for transcriptions`)
|
||||||
|
|
||||||
|
const transcriptionUrlValidation = await validateUrlWithDNS(
|
||||||
|
transcriptionUrl,
|
||||||
|
'transcriptionUrl'
|
||||||
|
)
|
||||||
|
if (transcriptionUrlValidation.isValid) {
|
||||||
|
const transcriptionResponse = await secureFetchWithPinnedIP(
|
||||||
|
transcriptionUrl,
|
||||||
|
transcriptionUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (transcriptionResponse.ok) {
|
||||||
|
const transcriptionData =
|
||||||
|
(await transcriptionResponse.json()) as TwilioTranscriptionsResponse
|
||||||
|
|
||||||
|
if (transcriptionData.transcriptions && transcriptionData.transcriptions.length > 0) {
|
||||||
|
const transcription = transcriptionData.transcriptions[0]
|
||||||
|
transcriptionText = transcription.transcription_text
|
||||||
|
transcriptionStatus = transcription.status
|
||||||
|
transcriptionPrice = transcription.price
|
||||||
|
transcriptionPriceUnit = transcription.price_unit
|
||||||
|
logger.info(`[${requestId}] Transcription found`, {
|
||||||
|
status: transcriptionStatus,
|
||||||
|
textLength: transcriptionText?.length,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to fetch transcription:`, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mediaUrl) {
|
||||||
|
try {
|
||||||
|
const mediaUrlValidation = await validateUrlWithDNS(mediaUrl, 'mediaUrl')
|
||||||
|
if (mediaUrlValidation.isValid) {
|
||||||
|
const mediaResponse = await secureFetchWithPinnedIP(
|
||||||
|
mediaUrl,
|
||||||
|
mediaUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (mediaResponse.ok) {
|
||||||
|
const contentType =
|
||||||
|
mediaResponse.headers.get('content-type') || 'application/octet-stream'
|
||||||
|
const extension = getExtensionFromMimeType(contentType) || 'dat'
|
||||||
|
const arrayBuffer = await mediaResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
const fileName = `${data.sid || recordingSid}.${extension}`
|
||||||
|
|
||||||
|
file = {
|
||||||
|
name: fileName,
|
||||||
|
mimeType: contentType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to download recording media:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Twilio recording fetched successfully`, {
|
||||||
|
recordingSid: data.sid,
|
||||||
|
hasFile: !!file,
|
||||||
|
hasTranscription: !!transcriptionText,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
success: true,
|
||||||
|
recordingSid: data.sid,
|
||||||
|
callSid: data.call_sid,
|
||||||
|
duration: data.duration ? Number.parseInt(data.duration, 10) : undefined,
|
||||||
|
status: data.status,
|
||||||
|
channels: data.channels,
|
||||||
|
source: data.source,
|
||||||
|
mediaUrl,
|
||||||
|
file,
|
||||||
|
price: data.price,
|
||||||
|
priceUnit: data.price_unit,
|
||||||
|
uri: data.uri,
|
||||||
|
transcriptionText,
|
||||||
|
transcriptionStatus,
|
||||||
|
transcriptionPrice,
|
||||||
|
transcriptionPriceUnit,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching Twilio recording:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import type { UserFile } from '@/executor/types'
|
import type { UserFile } from '@/executor/types'
|
||||||
import type { VideoRequestBody } from '@/tools/video/types'
|
import type { VideoRequestBody } from '@/tools/video/types'
|
||||||
@@ -326,11 +327,12 @@ async function generateWithRunway(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Runway task created: ${taskId}`)
|
logger.info(`[${requestId}] Runway task created: ${taskId}`)
|
||||||
|
|
||||||
const maxAttempts = 120 // 10 minutes with 5-second intervals
|
const pollIntervalMs = 5000
|
||||||
|
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
await sleep(5000) // Poll every 5 seconds
|
await sleep(pollIntervalMs)
|
||||||
|
|
||||||
const statusResponse = await fetch(`https://api.dev.runwayml.com/v1/tasks/${taskId}`, {
|
const statusResponse = await fetch(`https://api.dev.runwayml.com/v1/tasks/${taskId}`, {
|
||||||
headers: {
|
headers: {
|
||||||
@@ -370,7 +372,7 @@ async function generateWithRunway(
|
|||||||
attempts++
|
attempts++
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('Runway generation timed out after 10 minutes')
|
throw new Error('Runway generation timed out')
|
||||||
}
|
}
|
||||||
|
|
||||||
async function generateWithVeo(
|
async function generateWithVeo(
|
||||||
@@ -429,11 +431,12 @@ async function generateWithVeo(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Veo operation created: ${operationName}`)
|
logger.info(`[${requestId}] Veo operation created: ${operationName}`)
|
||||||
|
|
||||||
const maxAttempts = 60 // 5 minutes with 5-second intervals
|
const pollIntervalMs = 5000
|
||||||
|
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
await sleep(5000)
|
await sleep(pollIntervalMs)
|
||||||
|
|
||||||
const statusResponse = await fetch(
|
const statusResponse = await fetch(
|
||||||
`https://generativelanguage.googleapis.com/v1beta/${operationName}`,
|
`https://generativelanguage.googleapis.com/v1beta/${operationName}`,
|
||||||
@@ -485,7 +488,7 @@ async function generateWithVeo(
|
|||||||
attempts++
|
attempts++
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('Veo generation timed out after 5 minutes')
|
throw new Error('Veo generation timed out')
|
||||||
}
|
}
|
||||||
|
|
||||||
async function generateWithLuma(
|
async function generateWithLuma(
|
||||||
@@ -541,11 +544,12 @@ async function generateWithLuma(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Luma generation created: ${generationId}`)
|
logger.info(`[${requestId}] Luma generation created: ${generationId}`)
|
||||||
|
|
||||||
const maxAttempts = 120 // 10 minutes
|
const pollIntervalMs = 5000
|
||||||
|
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
await sleep(5000)
|
await sleep(pollIntervalMs)
|
||||||
|
|
||||||
const statusResponse = await fetch(
|
const statusResponse = await fetch(
|
||||||
`https://api.lumalabs.ai/dream-machine/v1/generations/${generationId}`,
|
`https://api.lumalabs.ai/dream-machine/v1/generations/${generationId}`,
|
||||||
@@ -592,7 +596,7 @@ async function generateWithLuma(
|
|||||||
attempts++
|
attempts++
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('Luma generation timed out after 10 minutes')
|
throw new Error('Luma generation timed out')
|
||||||
}
|
}
|
||||||
|
|
||||||
async function generateWithMiniMax(
|
async function generateWithMiniMax(
|
||||||
@@ -658,14 +662,13 @@ async function generateWithMiniMax(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] MiniMax task created: ${taskId}`)
|
logger.info(`[${requestId}] MiniMax task created: ${taskId}`)
|
||||||
|
|
||||||
// Poll for completion (6-10 minutes typical)
|
const pollIntervalMs = 5000
|
||||||
const maxAttempts = 120 // 10 minutes with 5-second intervals
|
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
await sleep(5000)
|
await sleep(pollIntervalMs)
|
||||||
|
|
||||||
// Query task status
|
|
||||||
const statusResponse = await fetch(
|
const statusResponse = await fetch(
|
||||||
`https://api.minimax.io/v1/query/video_generation?task_id=${taskId}`,
|
`https://api.minimax.io/v1/query/video_generation?task_id=${taskId}`,
|
||||||
{
|
{
|
||||||
@@ -743,7 +746,7 @@ async function generateWithMiniMax(
|
|||||||
attempts++
|
attempts++
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('MiniMax generation timed out after 10 minutes')
|
throw new Error('MiniMax generation timed out')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper function to strip subpaths from Fal.ai model IDs for status/result endpoints
|
// Helper function to strip subpaths from Fal.ai model IDs for status/result endpoints
|
||||||
@@ -861,11 +864,12 @@ async function generateWithFalAI(
|
|||||||
// Get base model ID (without subpath) for status and result endpoints
|
// Get base model ID (without subpath) for status and result endpoints
|
||||||
const baseModelId = getBaseModelId(falModelId)
|
const baseModelId = getBaseModelId(falModelId)
|
||||||
|
|
||||||
const maxAttempts = 96 // 8 minutes with 5-second intervals
|
const pollIntervalMs = 5000
|
||||||
|
const maxAttempts = Math.ceil(getMaxExecutionTimeout() / pollIntervalMs)
|
||||||
let attempts = 0
|
let attempts = 0
|
||||||
|
|
||||||
while (attempts < maxAttempts) {
|
while (attempts < maxAttempts) {
|
||||||
await sleep(5000)
|
await sleep(pollIntervalMs)
|
||||||
|
|
||||||
const statusResponse = await fetch(
|
const statusResponse = await fetch(
|
||||||
`https://queue.fal.run/${baseModelId}/requests/${requestIdFal}/status`,
|
`https://queue.fal.run/${baseModelId}/requests/${requestIdFal}/status`,
|
||||||
@@ -938,7 +942,7 @@ async function generateWithFalAI(
|
|||||||
attempts++
|
attempts++
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new Error('Fal.ai generation timed out after 8 minutes')
|
throw new Error('Fal.ai generation timed out')
|
||||||
}
|
}
|
||||||
|
|
||||||
function getVideoDimensions(
|
function getVideoDimensions(
|
||||||
|
|||||||
@@ -1,10 +1,20 @@
|
|||||||
|
import { GoogleGenAI } from '@google/genai'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import {
|
||||||
|
downloadFileFromStorage,
|
||||||
|
resolveInternalFileUrl,
|
||||||
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import { convertUsageMetadata, extractTextContent } from '@/providers/google/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -13,8 +23,8 @@ const logger = createLogger('VisionAnalyzeAPI')
|
|||||||
const VisionAnalyzeSchema = z.object({
|
const VisionAnalyzeSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
imageUrl: z.string().optional().nullable(),
|
imageUrl: z.string().optional().nullable(),
|
||||||
imageFile: z.any().optional().nullable(),
|
imageFile: RawFileInputSchema.optional().nullable(),
|
||||||
model: z.string().optional().default('gpt-4o'),
|
model: z.string().optional().default('gpt-5.2'),
|
||||||
prompt: z.string().optional().nullable(),
|
prompt: z.string().optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -39,6 +49,7 @@ export async function POST(request: NextRequest) {
|
|||||||
userId: authResult.userId,
|
userId: authResult.userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const validatedData = VisionAnalyzeSchema.parse(body)
|
const validatedData = VisionAnalyzeSchema.parse(body)
|
||||||
|
|
||||||
@@ -77,18 +88,72 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
let base64 = userFile.base64
|
||||||
|
let bufferLength = 0
|
||||||
const base64 = buffer.toString('base64')
|
if (!base64) {
|
||||||
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
base64 = buffer.toString('base64')
|
||||||
|
bufferLength = buffer.length
|
||||||
|
}
|
||||||
const mimeType = userFile.type || 'image/jpeg'
|
const mimeType = userFile.type || 'image/jpeg'
|
||||||
imageSource = `data:${mimeType};base64,${base64}`
|
imageSource = `data:${mimeType};base64,${base64}`
|
||||||
logger.info(`[${requestId}] Converted image to base64 (${buffer.length} bytes)`)
|
if (bufferLength > 0) {
|
||||||
|
logger.info(`[${requestId}] Converted image to base64 (${bufferLength} bytes)`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let imageUrlValidation: Awaited<ReturnType<typeof validateUrlWithDNS>> | null = null
|
||||||
|
if (imageSource && !imageSource.startsWith('data:')) {
|
||||||
|
if (imageSource.startsWith('/') && !isInternalFileUrl(imageSource)) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isInternalFileUrl(imageSource)) {
|
||||||
|
if (!userId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Authentication required for internal file access',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const resolution = await resolveInternalFileUrl(imageSource, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: resolution.error.message,
|
||||||
|
},
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
imageSource = resolution.fileUrl || imageSource
|
||||||
|
}
|
||||||
|
|
||||||
|
imageUrlValidation = await validateUrlWithDNS(imageSource, 'imageUrl')
|
||||||
|
if (!imageUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: imageUrlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
|
const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
|
||||||
const prompt = validatedData.prompt || defaultPrompt
|
const prompt = validatedData.prompt || defaultPrompt
|
||||||
|
|
||||||
const isClaude = validatedData.model.startsWith('claude-3')
|
const isClaude = validatedData.model.startsWith('claude-')
|
||||||
|
const isGemini = validatedData.model.startsWith('gemini-')
|
||||||
const apiUrl = isClaude
|
const apiUrl = isClaude
|
||||||
? 'https://api.anthropic.com/v1/messages'
|
? 'https://api.anthropic.com/v1/messages'
|
||||||
: 'https://api.openai.com/v1/chat/completions'
|
: 'https://api.openai.com/v1/chat/completions'
|
||||||
@@ -106,6 +171,72 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
let requestBody: any
|
let requestBody: any
|
||||||
|
|
||||||
|
if (isGemini) {
|
||||||
|
let base64Payload = imageSource
|
||||||
|
if (!base64Payload.startsWith('data:')) {
|
||||||
|
const urlValidation =
|
||||||
|
imageUrlValidation || (await validateUrlWithDNS(base64Payload, 'imageUrl'))
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(base64Payload, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Failed to fetch image for Gemini' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const contentType =
|
||||||
|
response.headers.get('content-type') || validatedData.imageFile?.type || 'image/jpeg'
|
||||||
|
const arrayBuffer = await response.arrayBuffer()
|
||||||
|
const base64 = Buffer.from(arrayBuffer).toString('base64')
|
||||||
|
base64Payload = `data:${contentType};base64,${base64}`
|
||||||
|
}
|
||||||
|
const base64Marker = ';base64,'
|
||||||
|
const markerIndex = base64Payload.indexOf(base64Marker)
|
||||||
|
if (!base64Payload.startsWith('data:') || markerIndex === -1) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid base64 image format' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const rawMimeType = base64Payload.slice('data:'.length, markerIndex)
|
||||||
|
const mediaType = rawMimeType.split(';')[0] || 'image/jpeg'
|
||||||
|
const base64Data = base64Payload.slice(markerIndex + base64Marker.length)
|
||||||
|
if (!base64Data) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid base64 image format' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const ai = new GoogleGenAI({ apiKey: validatedData.apiKey })
|
||||||
|
const geminiResponse = await ai.models.generateContent({
|
||||||
|
model: validatedData.model,
|
||||||
|
contents: [
|
||||||
|
{
|
||||||
|
role: 'user',
|
||||||
|
parts: [{ text: prompt }, { inlineData: { mimeType: mediaType, data: base64Data } }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
|
const content = extractTextContent(geminiResponse.candidates?.[0])
|
||||||
|
const usage = convertUsageMetadata(geminiResponse.usageMetadata)
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
content,
|
||||||
|
model: validatedData.model,
|
||||||
|
tokens: usage.totalTokenCount || undefined,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if (isClaude) {
|
if (isClaude) {
|
||||||
if (imageSource.startsWith('data:')) {
|
if (imageSource.startsWith('data:')) {
|
||||||
const base64Match = imageSource.match(/^data:([^;]+);base64,(.+)$/)
|
const base64Match = imageSource.match(/^data:([^;]+);base64,(.+)$/)
|
||||||
@@ -172,7 +303,7 @@ export async function POST(request: NextRequest) {
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
max_tokens: 1000,
|
max_completion_tokens: 1000,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import {
|
import {
|
||||||
getFileExtension,
|
getFileExtension,
|
||||||
getMimeTypeFromExtension,
|
getMimeTypeFromExtension,
|
||||||
@@ -19,7 +20,7 @@ const WORDPRESS_COM_API_BASE = 'https://public-api.wordpress.com/wp/v2/sites'
|
|||||||
const WordPressUploadSchema = z.object({
|
const WordPressUploadSchema = z.object({
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
siteId: z.string().min(1, 'Site ID is required'),
|
siteId: z.string().min(1, 'Site ID is required'),
|
||||||
file: z.any().optional().nullable(),
|
file: RawFileInputSchema.optional().nullable(),
|
||||||
filename: z.string().optional().nullable(),
|
filename: z.string().optional().nullable(),
|
||||||
title: z.string().optional().nullable(),
|
title: z.string().optional().nullable(),
|
||||||
caption: z.string().optional().nullable(),
|
caption: z.string().optional().nullable(),
|
||||||
|
|||||||
216
apps/sim/app/api/tools/zoom/get-recordings/route.ts
Normal file
216
apps/sim/app/api/tools/zoom/get-recordings/route.ts
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('ZoomGetRecordingsAPI')
|
||||||
|
|
||||||
|
interface ZoomRecordingFile {
|
||||||
|
id?: string
|
||||||
|
meeting_id?: string
|
||||||
|
recording_start?: string
|
||||||
|
recording_end?: string
|
||||||
|
file_type?: string
|
||||||
|
file_extension?: string
|
||||||
|
file_size?: number
|
||||||
|
play_url?: string
|
||||||
|
download_url?: string
|
||||||
|
status?: string
|
||||||
|
recording_type?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ZoomRecordingsResponse {
|
||||||
|
uuid?: string
|
||||||
|
id?: string | number
|
||||||
|
account_id?: string
|
||||||
|
host_id?: string
|
||||||
|
topic?: string
|
||||||
|
type?: number
|
||||||
|
start_time?: string
|
||||||
|
duration?: number
|
||||||
|
total_size?: number
|
||||||
|
recording_count?: number
|
||||||
|
share_url?: string
|
||||||
|
recording_files?: ZoomRecordingFile[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ZoomErrorResponse {
|
||||||
|
message?: string
|
||||||
|
code?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const ZoomGetRecordingsSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
meetingId: z.string().min(1, 'Meeting ID is required'),
|
||||||
|
includeFolderItems: z.boolean().optional(),
|
||||||
|
ttl: z.number().optional(),
|
||||||
|
downloadFiles: z.boolean().optional().default(false),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Zoom get recordings attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = ZoomGetRecordingsSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, meetingId, includeFolderItems, ttl, downloadFiles } = validatedData
|
||||||
|
|
||||||
|
const baseUrl = `https://api.zoom.us/v2/meetings/${encodeURIComponent(meetingId)}/recordings`
|
||||||
|
const queryParams = new URLSearchParams()
|
||||||
|
|
||||||
|
if (includeFolderItems != null) {
|
||||||
|
queryParams.append('include_folder_items', String(includeFolderItems))
|
||||||
|
}
|
||||||
|
if (ttl) {
|
||||||
|
queryParams.append('ttl', String(ttl))
|
||||||
|
}
|
||||||
|
|
||||||
|
const queryString = queryParams.toString()
|
||||||
|
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Fetching recordings from Zoom`, { meetingId })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = (await response.json().catch(() => ({}))) as ZoomErrorResponse
|
||||||
|
logger.error(`[${requestId}] Zoom API error`, {
|
||||||
|
status: response.status,
|
||||||
|
error: errorData,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorData.message || `Zoom API error: ${response.status}` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as ZoomRecordingsResponse
|
||||||
|
const files: Array<{
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
if (downloadFiles && Array.isArray(data.recording_files)) {
|
||||||
|
for (const file of data.recording_files) {
|
||||||
|
if (!file?.download_url) continue
|
||||||
|
|
||||||
|
try {
|
||||||
|
const fileUrlValidation = await validateUrlWithDNS(file.download_url, 'downloadUrl')
|
||||||
|
if (!fileUrlValidation.isValid) continue
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
file.download_url,
|
||||||
|
fileUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Bearer ${accessToken}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) continue
|
||||||
|
|
||||||
|
const contentType =
|
||||||
|
downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
const extension =
|
||||||
|
file.file_extension?.toString().toLowerCase() ||
|
||||||
|
getExtensionFromMimeType(contentType) ||
|
||||||
|
'dat'
|
||||||
|
const fileName = `zoom-recording-${file.id || file.recording_start || Date.now()}.${extension}`
|
||||||
|
|
||||||
|
files.push({
|
||||||
|
name: fileName,
|
||||||
|
mimeType: contentType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to download recording file:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Zoom recordings fetched successfully`, {
|
||||||
|
recordingCount: data.recording_files?.length || 0,
|
||||||
|
downloadedCount: files.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
recording: {
|
||||||
|
uuid: data.uuid,
|
||||||
|
id: data.id,
|
||||||
|
account_id: data.account_id,
|
||||||
|
host_id: data.host_id,
|
||||||
|
topic: data.topic,
|
||||||
|
type: data.type,
|
||||||
|
start_time: data.start_time,
|
||||||
|
duration: data.duration,
|
||||||
|
total_size: data.total_size,
|
||||||
|
recording_count: data.recording_count,
|
||||||
|
share_url: data.share_url,
|
||||||
|
recording_files: (data.recording_files || []).map((file: ZoomRecordingFile) => ({
|
||||||
|
id: file.id,
|
||||||
|
meeting_id: file.meeting_id,
|
||||||
|
recording_start: file.recording_start,
|
||||||
|
recording_end: file.recording_end,
|
||||||
|
file_type: file.file_type,
|
||||||
|
file_extension: file.file_extension,
|
||||||
|
file_size: file.file_size,
|
||||||
|
play_url: file.play_url,
|
||||||
|
download_url: file.download_url,
|
||||||
|
status: file.status,
|
||||||
|
recording_type: file.recording_type,
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
files: files.length > 0 ? files : undefined,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching Zoom recordings:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,6 +5,7 @@ import { and, eq } from 'drizzle-orm'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { hasActiveSubscription } from '@/lib/billing'
|
||||||
|
|
||||||
const logger = createLogger('SubscriptionTransferAPI')
|
const logger = createLogger('SubscriptionTransferAPI')
|
||||||
|
|
||||||
@@ -88,6 +89,14 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if org already has an active subscription (prevent duplicates)
|
||||||
|
if (await hasActiveSubscription(organizationId)) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Organization already has an active subscription' },
|
||||||
|
{ status: 409 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
await db
|
await db
|
||||||
.update(subscription)
|
.update(subscription)
|
||||||
.set({ referenceId: organizationId })
|
.set({ referenceId: organizationId })
|
||||||
|
|||||||
@@ -203,6 +203,10 @@ export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) =
|
|||||||
}
|
}
|
||||||
|
|
||||||
updateData.billingBlocked = body.billingBlocked
|
updateData.billingBlocked = body.billingBlocked
|
||||||
|
// Clear the reason when unblocking
|
||||||
|
if (body.billingBlocked === false) {
|
||||||
|
updateData.billingBlockedReason = null
|
||||||
|
}
|
||||||
updated.push('billingBlocked')
|
updated.push('billingBlocked')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
import { db, workflow as workflowTable } from '@sim/db'
|
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { getTimeoutErrorMessage, isTimeoutError } from '@/lib/core/execution-limits'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||||
|
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||||
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
|
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
|
||||||
@@ -75,12 +75,31 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
const { startBlockId, sourceSnapshot, input } = validation.data
|
const { startBlockId, sourceSnapshot, input } = validation.data
|
||||||
const executionId = uuidv4()
|
const executionId = uuidv4()
|
||||||
|
|
||||||
const [workflowRecord] = await db
|
// Run preprocessing checks (billing, rate limits, usage limits)
|
||||||
.select({ workspaceId: workflowTable.workspaceId, userId: workflowTable.userId })
|
const preprocessResult = await preprocessExecution({
|
||||||
.from(workflowTable)
|
workflowId,
|
||||||
.where(eq(workflowTable.id, workflowId))
|
userId,
|
||||||
.limit(1)
|
triggerType: 'manual',
|
||||||
|
executionId,
|
||||||
|
requestId,
|
||||||
|
checkRateLimit: false, // Manual executions don't rate limit
|
||||||
|
checkDeployment: false, // Run-from-block doesn't require deployment
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!preprocessResult.success) {
|
||||||
|
const { error } = preprocessResult
|
||||||
|
logger.warn(`[${requestId}] Preprocessing failed for run-from-block`, {
|
||||||
|
workflowId,
|
||||||
|
error: error?.message,
|
||||||
|
statusCode: error?.statusCode,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: error?.message || 'Execution blocked' },
|
||||||
|
{ status: error?.statusCode || 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowRecord = preprocessResult.workflowRecord
|
||||||
if (!workflowRecord?.workspaceId) {
|
if (!workflowRecord?.workspaceId) {
|
||||||
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
|
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
|
||||||
}
|
}
|
||||||
@@ -92,11 +111,22 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
workflowId,
|
workflowId,
|
||||||
startBlockId,
|
startBlockId,
|
||||||
executedBlocksCount: sourceSnapshot.executedBlocks.length,
|
executedBlocksCount: sourceSnapshot.executedBlocks.length,
|
||||||
|
billingActorUserId: preprocessResult.actorUserId,
|
||||||
})
|
})
|
||||||
|
|
||||||
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
|
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
|
||||||
const abortController = new AbortController()
|
const abortController = new AbortController()
|
||||||
let isStreamClosed = false
|
let isStreamClosed = false
|
||||||
|
let isTimedOut = false
|
||||||
|
|
||||||
|
const syncTimeout = preprocessResult.executionTimeout?.sync
|
||||||
|
let timeoutId: NodeJS.Timeout | undefined
|
||||||
|
if (syncTimeout) {
|
||||||
|
timeoutId = setTimeout(() => {
|
||||||
|
isTimedOut = true
|
||||||
|
abortController.abort()
|
||||||
|
}, syncTimeout)
|
||||||
|
}
|
||||||
|
|
||||||
const stream = new ReadableStream<Uint8Array>({
|
const stream = new ReadableStream<Uint8Array>({
|
||||||
async start(controller) {
|
async start(controller) {
|
||||||
@@ -148,13 +178,33 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
})
|
})
|
||||||
|
|
||||||
if (result.status === 'cancelled') {
|
if (result.status === 'cancelled') {
|
||||||
sendEvent({
|
if (isTimedOut && syncTimeout) {
|
||||||
type: 'execution:cancelled',
|
const timeoutErrorMessage = getTimeoutErrorMessage(null, syncTimeout)
|
||||||
timestamp: new Date().toISOString(),
|
logger.info(`[${requestId}] Run-from-block execution timed out`, {
|
||||||
executionId,
|
timeoutMs: syncTimeout,
|
||||||
workflowId,
|
})
|
||||||
data: { duration: result.metadata?.duration || 0 },
|
|
||||||
})
|
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||||
|
|
||||||
|
sendEvent({
|
||||||
|
type: 'execution:error',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
executionId,
|
||||||
|
workflowId,
|
||||||
|
data: {
|
||||||
|
error: timeoutErrorMessage,
|
||||||
|
duration: result.metadata?.duration || 0,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
sendEvent({
|
||||||
|
type: 'execution:cancelled',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
executionId,
|
||||||
|
workflowId,
|
||||||
|
data: { duration: result.metadata?.duration || 0 },
|
||||||
|
})
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
sendEvent({
|
sendEvent({
|
||||||
type: 'execution:completed',
|
type: 'execution:completed',
|
||||||
@@ -171,11 +221,25 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
const isTimeout = isTimeoutError(error) || isTimedOut
|
||||||
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`)
|
const errorMessage = isTimeout
|
||||||
|
? getTimeoutErrorMessage(error, syncTimeout)
|
||||||
|
: error instanceof Error
|
||||||
|
? error.message
|
||||||
|
: 'Unknown error'
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`, {
|
||||||
|
isTimeout,
|
||||||
|
})
|
||||||
|
|
||||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||||
|
|
||||||
|
await loggingSession.safeCompleteWithError({
|
||||||
|
totalDurationMs: executionResult?.metadata?.duration,
|
||||||
|
error: { message: errorMessage },
|
||||||
|
traceSpans: executionResult?.logs as any,
|
||||||
|
})
|
||||||
|
|
||||||
sendEvent({
|
sendEvent({
|
||||||
type: 'execution:error',
|
type: 'execution:error',
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
@@ -187,6 +251,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
|
if (timeoutId) clearTimeout(timeoutId)
|
||||||
if (!isStreamClosed) {
|
if (!isStreamClosed) {
|
||||||
try {
|
try {
|
||||||
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'))
|
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'))
|
||||||
@@ -197,6 +262,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
},
|
},
|
||||||
cancel() {
|
cancel() {
|
||||||
isStreamClosed = true
|
isStreamClosed = true
|
||||||
|
if (timeoutId) clearTimeout(timeoutId)
|
||||||
abortController.abort()
|
abortController.abort()
|
||||||
markExecutionCancelled(executionId).catch(() => {})
|
markExecutionCancelled(executionId).catch(() => {})
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -5,6 +5,11 @@ import { validate as uuidValidate, v4 as uuidv4 } from 'uuid'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
||||||
|
import {
|
||||||
|
createTimeoutAbortController,
|
||||||
|
getTimeoutErrorMessage,
|
||||||
|
isTimeoutError,
|
||||||
|
} from '@/lib/core/execution-limits'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
@@ -120,10 +125,6 @@ type AsyncExecutionParams = {
|
|||||||
triggerType: CoreTriggerType
|
triggerType: CoreTriggerType
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles async workflow execution by queueing a background job.
|
|
||||||
* Returns immediately with a 202 Accepted response containing the job ID.
|
|
||||||
*/
|
|
||||||
async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextResponse> {
|
async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextResponse> {
|
||||||
const { requestId, workflowId, userId, input, triggerType } = params
|
const { requestId, workflowId, userId, input, triggerType } = params
|
||||||
|
|
||||||
@@ -405,6 +406,10 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
|
|
||||||
if (!enableSSE) {
|
if (!enableSSE) {
|
||||||
logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`)
|
logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`)
|
||||||
|
const timeoutController = createTimeoutAbortController(
|
||||||
|
preprocessResult.executionTimeout?.sync
|
||||||
|
)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const metadata: ExecutionMetadata = {
|
const metadata: ExecutionMetadata = {
|
||||||
requestId,
|
requestId,
|
||||||
@@ -438,8 +443,39 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
includeFileBase64,
|
includeFileBase64,
|
||||||
base64MaxBytes,
|
base64MaxBytes,
|
||||||
stopAfterBlockId,
|
stopAfterBlockId,
|
||||||
|
abortSignal: timeoutController.signal,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if (
|
||||||
|
result.status === 'cancelled' &&
|
||||||
|
timeoutController.isTimedOut() &&
|
||||||
|
timeoutController.timeoutMs
|
||||||
|
) {
|
||||||
|
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||||
|
logger.info(`[${requestId}] Non-SSE execution timed out`, {
|
||||||
|
timeoutMs: timeoutController.timeoutMs,
|
||||||
|
})
|
||||||
|
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||||
|
|
||||||
|
await cleanupExecutionBase64Cache(executionId)
|
||||||
|
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
output: result.output,
|
||||||
|
error: timeoutErrorMessage,
|
||||||
|
metadata: result.metadata
|
||||||
|
? {
|
||||||
|
duration: result.metadata.duration,
|
||||||
|
startTime: result.metadata.startTime,
|
||||||
|
endTime: result.metadata.endTime,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
},
|
||||||
|
{ status: 408 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const outputWithBase64 = includeFileBase64
|
const outputWithBase64 = includeFileBase64
|
||||||
? ((await hydrateUserFilesWithBase64(result.output, {
|
? ((await hydrateUserFilesWithBase64(result.output, {
|
||||||
requestId,
|
requestId,
|
||||||
@@ -474,10 +510,17 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
return NextResponse.json(filteredResult)
|
return NextResponse.json(filteredResult)
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||||
|
|
||||||
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`)
|
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`)
|
||||||
|
|
||||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||||
|
|
||||||
|
await loggingSession.safeCompleteWithError({
|
||||||
|
totalDurationMs: executionResult?.metadata?.duration,
|
||||||
|
error: { message: errorMessage },
|
||||||
|
traceSpans: executionResult?.logs as any,
|
||||||
|
})
|
||||||
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
@@ -493,6 +536,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
},
|
},
|
||||||
{ status: 500 }
|
{ status: 500 }
|
||||||
)
|
)
|
||||||
|
} finally {
|
||||||
|
timeoutController.cleanup()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -506,7 +551,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
cachedWorkflowData?.blocks || {}
|
cachedWorkflowData?.blocks || {}
|
||||||
)
|
)
|
||||||
const streamVariables = cachedWorkflowData?.variables ?? (workflow as any).variables
|
const streamVariables = cachedWorkflowData?.variables ?? (workflow as any).variables
|
||||||
|
|
||||||
const stream = await createStreamingResponse({
|
const stream = await createStreamingResponse({
|
||||||
requestId,
|
requestId,
|
||||||
workflow: {
|
workflow: {
|
||||||
@@ -524,6 +568,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api',
|
workflowTriggerType: triggerType === 'chat' ? 'chat' : 'api',
|
||||||
includeFileBase64,
|
includeFileBase64,
|
||||||
base64MaxBytes,
|
base64MaxBytes,
|
||||||
|
timeoutMs: preprocessResult.executionTimeout?.sync,
|
||||||
},
|
},
|
||||||
executionId,
|
executionId,
|
||||||
})
|
})
|
||||||
@@ -535,7 +580,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
}
|
}
|
||||||
|
|
||||||
const encoder = new TextEncoder()
|
const encoder = new TextEncoder()
|
||||||
const abortController = new AbortController()
|
const timeoutController = createTimeoutAbortController(preprocessResult.executionTimeout?.sync)
|
||||||
let isStreamClosed = false
|
let isStreamClosed = false
|
||||||
|
|
||||||
const stream = new ReadableStream<Uint8Array>({
|
const stream = new ReadableStream<Uint8Array>({
|
||||||
@@ -567,6 +612,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
blockId: string,
|
blockId: string,
|
||||||
blockName: string,
|
blockName: string,
|
||||||
blockType: string,
|
blockType: string,
|
||||||
|
executionOrder: number,
|
||||||
iterationContext?: IterationContext
|
iterationContext?: IterationContext
|
||||||
) => {
|
) => {
|
||||||
logger.info(`[${requestId}] 🔷 onBlockStart called:`, { blockId, blockName, blockType })
|
logger.info(`[${requestId}] 🔷 onBlockStart called:`, { blockId, blockName, blockType })
|
||||||
@@ -579,6 +625,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
blockId,
|
blockId,
|
||||||
blockName,
|
blockName,
|
||||||
blockType,
|
blockType,
|
||||||
|
executionOrder,
|
||||||
...(iterationContext && {
|
...(iterationContext && {
|
||||||
iterationCurrent: iterationContext.iterationCurrent,
|
iterationCurrent: iterationContext.iterationCurrent,
|
||||||
iterationTotal: iterationContext.iterationTotal,
|
iterationTotal: iterationContext.iterationTotal,
|
||||||
@@ -617,6 +664,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
error: callbackData.output.error,
|
error: callbackData.output.error,
|
||||||
durationMs: callbackData.executionTime || 0,
|
durationMs: callbackData.executionTime || 0,
|
||||||
startedAt: callbackData.startedAt,
|
startedAt: callbackData.startedAt,
|
||||||
|
executionOrder: callbackData.executionOrder,
|
||||||
endedAt: callbackData.endedAt,
|
endedAt: callbackData.endedAt,
|
||||||
...(iterationContext && {
|
...(iterationContext && {
|
||||||
iterationCurrent: iterationContext.iterationCurrent,
|
iterationCurrent: iterationContext.iterationCurrent,
|
||||||
@@ -644,6 +692,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
output: callbackData.output,
|
output: callbackData.output,
|
||||||
durationMs: callbackData.executionTime || 0,
|
durationMs: callbackData.executionTime || 0,
|
||||||
startedAt: callbackData.startedAt,
|
startedAt: callbackData.startedAt,
|
||||||
|
executionOrder: callbackData.executionOrder,
|
||||||
endedAt: callbackData.endedAt,
|
endedAt: callbackData.endedAt,
|
||||||
...(iterationContext && {
|
...(iterationContext && {
|
||||||
iterationCurrent: iterationContext.iterationCurrent,
|
iterationCurrent: iterationContext.iterationCurrent,
|
||||||
@@ -727,7 +776,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
onStream,
|
onStream,
|
||||||
},
|
},
|
||||||
loggingSession,
|
loggingSession,
|
||||||
abortSignal: abortController.signal,
|
abortSignal: timeoutController.signal,
|
||||||
includeFileBase64,
|
includeFileBase64,
|
||||||
base64MaxBytes,
|
base64MaxBytes,
|
||||||
stopAfterBlockId,
|
stopAfterBlockId,
|
||||||
@@ -763,16 +812,37 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (result.status === 'cancelled') {
|
if (result.status === 'cancelled') {
|
||||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
if (timeoutController.isTimedOut() && timeoutController.timeoutMs) {
|
||||||
sendEvent({
|
const timeoutErrorMessage = getTimeoutErrorMessage(null, timeoutController.timeoutMs)
|
||||||
type: 'execution:cancelled',
|
logger.info(`[${requestId}] Workflow execution timed out`, {
|
||||||
timestamp: new Date().toISOString(),
|
timeoutMs: timeoutController.timeoutMs,
|
||||||
executionId,
|
})
|
||||||
workflowId,
|
|
||||||
data: {
|
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||||
duration: result.metadata?.duration || 0,
|
|
||||||
},
|
sendEvent({
|
||||||
})
|
type: 'execution:error',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
executionId,
|
||||||
|
workflowId,
|
||||||
|
data: {
|
||||||
|
error: timeoutErrorMessage,
|
||||||
|
duration: result.metadata?.duration || 0,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||||
|
|
||||||
|
sendEvent({
|
||||||
|
type: 'execution:cancelled',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
executionId,
|
||||||
|
workflowId,
|
||||||
|
data: {
|
||||||
|
duration: result.metadata?.duration || 0,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -799,11 +869,23 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
// Cleanup base64 cache for this execution
|
// Cleanup base64 cache for this execution
|
||||||
await cleanupExecutionBase64Cache(executionId)
|
await cleanupExecutionBase64Cache(executionId)
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
const isTimeout = isTimeoutError(error) || timeoutController.isTimedOut()
|
||||||
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`)
|
const errorMessage = isTimeout
|
||||||
|
? getTimeoutErrorMessage(error, timeoutController.timeoutMs)
|
||||||
|
: error instanceof Error
|
||||||
|
? error.message
|
||||||
|
: 'Unknown error'
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`, { isTimeout })
|
||||||
|
|
||||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||||
|
|
||||||
|
await loggingSession.safeCompleteWithError({
|
||||||
|
totalDurationMs: executionResult?.metadata?.duration,
|
||||||
|
error: { message: errorMessage },
|
||||||
|
traceSpans: executionResult?.logs as any,
|
||||||
|
})
|
||||||
|
|
||||||
sendEvent({
|
sendEvent({
|
||||||
type: 'execution:error',
|
type: 'execution:error',
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
@@ -815,20 +897,20 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
},
|
},
|
||||||
})
|
})
|
||||||
} finally {
|
} finally {
|
||||||
|
timeoutController.cleanup()
|
||||||
if (!isStreamClosed) {
|
if (!isStreamClosed) {
|
||||||
try {
|
try {
|
||||||
controller.enqueue(encoder.encode('data: [DONE]\n\n'))
|
controller.enqueue(encoder.encode('data: [DONE]\n\n'))
|
||||||
controller.close()
|
controller.close()
|
||||||
} catch {
|
} catch {}
|
||||||
// Stream already closed - nothing to do
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
cancel() {
|
cancel() {
|
||||||
isStreamClosed = true
|
isStreamClosed = true
|
||||||
|
timeoutController.cleanup()
|
||||||
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
||||||
abortController.abort()
|
timeoutController.abort()
|
||||||
markExecutionCancelled(executionId).catch(() => {})
|
markExecutionCancelled(executionId).catch(() => {})
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -102,7 +102,7 @@ describe('Workspace Invitations API Route', () => {
|
|||||||
inArray: vi.fn().mockImplementation((field, values) => ({ type: 'inArray', field, values })),
|
inArray: vi.fn().mockImplementation((field, values) => ({ type: 'inArray', field, values })),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
vi.doMock('@/executor/utils/permission-check', () => ({
|
vi.doMock('@/ee/access-control/utils/permission-check', () => ({
|
||||||
validateInvitationsAllowed: vi.fn().mockResolvedValue(undefined),
|
validateInvitationsAllowed: vi.fn().mockResolvedValue(undefined),
|
||||||
InvitationsNotAllowedError: class InvitationsNotAllowedError extends Error {
|
InvitationsNotAllowedError: class InvitationsNotAllowedError extends Error {
|
||||||
constructor() {
|
constructor() {
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ import { getFromEmailAddress } from '@/lib/messaging/email/utils'
|
|||||||
import {
|
import {
|
||||||
InvitationsNotAllowedError,
|
InvitationsNotAllowedError,
|
||||||
validateInvitationsAllowed,
|
validateInvitationsAllowed,
|
||||||
} from '@/executor/utils/permission-check'
|
} from '@/ee/access-control/utils/permission-check'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -38,7 +38,6 @@ export async function GET(req: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get all workspaces where the user has permissions
|
|
||||||
const userWorkspaces = await db
|
const userWorkspaces = await db
|
||||||
.select({ id: workspace.id })
|
.select({ id: workspace.id })
|
||||||
.from(workspace)
|
.from(workspace)
|
||||||
@@ -55,10 +54,8 @@ export async function GET(req: NextRequest) {
|
|||||||
return NextResponse.json({ invitations: [] })
|
return NextResponse.json({ invitations: [] })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get all workspaceIds where the user is a member
|
|
||||||
const workspaceIds = userWorkspaces.map((w) => w.id)
|
const workspaceIds = userWorkspaces.map((w) => w.id)
|
||||||
|
|
||||||
// Find all invitations for those workspaces
|
|
||||||
const invitations = await db
|
const invitations = await db
|
||||||
.select()
|
.select()
|
||||||
.from(workspaceInvitation)
|
.from(workspaceInvitation)
|
||||||
|
|||||||
@@ -14,11 +14,11 @@ import {
|
|||||||
ChatMessageContainer,
|
ChatMessageContainer,
|
||||||
EmailAuth,
|
EmailAuth,
|
||||||
PasswordAuth,
|
PasswordAuth,
|
||||||
SSOAuth,
|
|
||||||
VoiceInterface,
|
VoiceInterface,
|
||||||
} from '@/app/chat/components'
|
} from '@/app/chat/components'
|
||||||
import { CHAT_ERROR_MESSAGES, CHAT_REQUEST_TIMEOUT_MS } from '@/app/chat/constants'
|
import { CHAT_ERROR_MESSAGES, CHAT_REQUEST_TIMEOUT_MS } from '@/app/chat/constants'
|
||||||
import { useAudioStreaming, useChatStreaming } from '@/app/chat/hooks'
|
import { useAudioStreaming, useChatStreaming } from '@/app/chat/hooks'
|
||||||
|
import SSOAuth from '@/ee/sso/components/sso-auth'
|
||||||
|
|
||||||
const logger = createLogger('ChatClient')
|
const logger = createLogger('ChatClient')
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
export { default as EmailAuth } from './auth/email/email-auth'
|
export { default as EmailAuth } from './auth/email/email-auth'
|
||||||
export { default as PasswordAuth } from './auth/password/password-auth'
|
export { default as PasswordAuth } from './auth/password/password-auth'
|
||||||
export { default as SSOAuth } from './auth/sso/sso-auth'
|
|
||||||
export { ChatErrorState } from './error-state/error-state'
|
export { ChatErrorState } from './error-state/error-state'
|
||||||
export { ChatHeader } from './header/header'
|
export { ChatHeader } from './header/header'
|
||||||
export { ChatInput } from './input/input'
|
export { ChatInput } from './input/input'
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { redirect } from 'next/navigation'
|
import { redirect } from 'next/navigation'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||||
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
|
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||||
import { Knowledge } from './knowledge'
|
import { Knowledge } from './knowledge'
|
||||||
|
|
||||||
interface KnowledgePageProps {
|
interface KnowledgePageProps {
|
||||||
@@ -23,7 +23,6 @@ export default async function KnowledgePage({ params }: KnowledgePageProps) {
|
|||||||
redirect('/')
|
redirect('/')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check permission group restrictions
|
|
||||||
const permissionConfig = await getUserPermissionConfig(session.user.id)
|
const permissionConfig = await getUserPermissionConfig(session.user.id)
|
||||||
if (permissionConfig?.hideKnowledgeBaseTab) {
|
if (permissionConfig?.hideKnowledgeBaseTab) {
|
||||||
redirect(`/workspace/${workspaceId}`)
|
redirect(`/workspace/${workspaceId}`)
|
||||||
|
|||||||
@@ -104,14 +104,12 @@ function FileCard({ file, isExecutionFile = false, workspaceId }: FileCardProps)
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className='flex flex-col gap-[8px] rounded-[6px] bg-[var(--surface-1)] px-[10px] py-[8px]'>
|
<div className='flex flex-col gap-[4px] rounded-[6px] bg-[var(--surface-1)] px-[8px] py-[6px]'>
|
||||||
<div className='flex items-center justify-between'>
|
<div className='flex min-w-0 items-center justify-between gap-[8px]'>
|
||||||
<div className='flex items-center gap-[8px]'>
|
<span className='min-w-0 flex-1 truncate font-medium text-[12px] text-[var(--text-secondary)]'>
|
||||||
<span className='truncate font-medium text-[12px] text-[var(--text-secondary)]'>
|
{file.name}
|
||||||
{file.name}
|
</span>
|
||||||
</span>
|
<span className='flex-shrink-0 font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||||
</div>
|
|
||||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
|
||||||
{formatFileSize(file.size)}
|
{formatFileSize(file.size)}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
@@ -142,20 +140,18 @@ export function FileCards({ files, isExecutionFile = false, workspaceId }: FileC
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className='flex w-full flex-col gap-[6px] rounded-[6px] bg-[var(--surface-2)] px-[10px] py-[8px]'>
|
<div className='mt-[4px] flex flex-col gap-[6px] rounded-[6px] border border-[var(--border)] bg-[var(--surface-2)] px-[10px] py-[8px] dark:bg-transparent'>
|
||||||
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
<span className='font-medium text-[12px] text-[var(--text-tertiary)]'>
|
||||||
Files ({files.length})
|
Files ({files.length})
|
||||||
</span>
|
</span>
|
||||||
<div className='flex flex-col gap-[8px]'>
|
{files.map((file, index) => (
|
||||||
{files.map((file, index) => (
|
<FileCard
|
||||||
<FileCard
|
key={file.id || `file-${index}`}
|
||||||
key={file.id || `file-${index}`}
|
file={file}
|
||||||
file={file}
|
isExecutionFile={isExecutionFile}
|
||||||
isExecutionFile={isExecutionFile}
|
workspaceId={workspaceId}
|
||||||
workspaceId={workspaceId}
|
/>
|
||||||
/>
|
))}
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ import {
|
|||||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||||
import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
|
import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
|
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||||
import { filterHiddenOutputKeys } from '@/lib/logs/execution/trace-spans/trace-spans'
|
import { filterHiddenOutputKeys } from '@/lib/logs/execution/trace-spans/trace-spans'
|
||||||
import {
|
import {
|
||||||
ExecutionSnapshot,
|
ExecutionSnapshot,
|
||||||
@@ -453,7 +454,7 @@ export const LogDetails = memo(function LogDetails({
|
|||||||
Duration
|
Duration
|
||||||
</span>
|
</span>
|
||||||
<span className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
<span className='font-medium text-[13px] text-[var(--text-secondary)]'>
|
||||||
{log.duration || '—'}
|
{formatDuration(log.duration, { precision: 2 }) || '—'}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@@ -6,11 +6,11 @@ import Link from 'next/link'
|
|||||||
import { List, type RowComponentProps, useListRef } from 'react-window'
|
import { List, type RowComponentProps, useListRef } from 'react-window'
|
||||||
import { Badge, buttonVariants } from '@/components/emcn'
|
import { Badge, buttonVariants } from '@/components/emcn'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
|
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||||
import {
|
import {
|
||||||
DELETED_WORKFLOW_COLOR,
|
DELETED_WORKFLOW_COLOR,
|
||||||
DELETED_WORKFLOW_LABEL,
|
DELETED_WORKFLOW_LABEL,
|
||||||
formatDate,
|
formatDate,
|
||||||
formatDuration,
|
|
||||||
getDisplayStatus,
|
getDisplayStatus,
|
||||||
LOG_COLUMNS,
|
LOG_COLUMNS,
|
||||||
StatusBadge,
|
StatusBadge,
|
||||||
@@ -113,7 +113,7 @@ const LogRow = memo(
|
|||||||
|
|
||||||
<div className={`${LOG_COLUMNS.duration.width} ${LOG_COLUMNS.duration.minWidth}`}>
|
<div className={`${LOG_COLUMNS.duration.width} ${LOG_COLUMNS.duration.minWidth}`}>
|
||||||
<Badge variant='default' className='rounded-[6px] px-[9px] py-[2px] text-[12px]'>
|
<Badge variant='default' className='rounded-[6px] px-[9px] py-[2px] text-[12px]'>
|
||||||
{formatDuration(log.duration) || '—'}
|
{formatDuration(log.duration, { precision: 2 }) || '—'}
|
||||||
</Badge>
|
</Badge>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import React from 'react'
|
import React from 'react'
|
||||||
import { format } from 'date-fns'
|
import { format } from 'date-fns'
|
||||||
import { Badge } from '@/components/emcn'
|
import { Badge } from '@/components/emcn'
|
||||||
|
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||||
import { getIntegrationMetadata } from '@/lib/logs/get-trigger-options'
|
import { getIntegrationMetadata } from '@/lib/logs/get-trigger-options'
|
||||||
import { getBlock } from '@/blocks/registry'
|
import { getBlock } from '@/blocks/registry'
|
||||||
import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types'
|
import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types'
|
||||||
@@ -362,47 +363,14 @@ export function mapToExecutionLogAlt(log: RawLogResponse): ExecutionLog {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Format duration for display in logs UI
|
|
||||||
* If duration is under 1 second, displays as milliseconds (e.g., "500ms")
|
|
||||||
* If duration is 1 second or more, displays as seconds (e.g., "1.23s")
|
|
||||||
* @param duration - Duration string (e.g., "500ms") or null
|
|
||||||
* @returns Formatted duration string or null
|
|
||||||
*/
|
|
||||||
export function formatDuration(duration: string | null): string | null {
|
|
||||||
if (!duration) return null
|
|
||||||
|
|
||||||
// Extract numeric value from duration string (e.g., "500ms" -> 500)
|
|
||||||
const ms = Number.parseInt(duration.replace(/[^0-9]/g, ''), 10)
|
|
||||||
|
|
||||||
if (!Number.isFinite(ms)) return duration
|
|
||||||
|
|
||||||
if (ms < 1000) {
|
|
||||||
return `${ms}ms`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to seconds with up to 2 decimal places
|
|
||||||
const seconds = ms / 1000
|
|
||||||
return `${seconds.toFixed(2).replace(/\.?0+$/, '')}s`
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format latency value for display in dashboard UI
|
* Format latency value for display in dashboard UI
|
||||||
* If latency is under 1 second, displays as milliseconds (e.g., "500ms")
|
|
||||||
* If latency is 1 second or more, displays as seconds (e.g., "1.23s")
|
|
||||||
* @param ms - Latency in milliseconds (number)
|
* @param ms - Latency in milliseconds (number)
|
||||||
* @returns Formatted latency string
|
* @returns Formatted latency string
|
||||||
*/
|
*/
|
||||||
export function formatLatency(ms: number): string {
|
export function formatLatency(ms: number): string {
|
||||||
if (!Number.isFinite(ms) || ms <= 0) return '—'
|
if (!Number.isFinite(ms) || ms <= 0) return '—'
|
||||||
|
return formatDuration(ms, { precision: 2 }) ?? '—'
|
||||||
if (ms < 1000) {
|
|
||||||
return `${Math.round(ms)}ms`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to seconds with up to 2 decimal places
|
|
||||||
const seconds = ms / 1000
|
|
||||||
return `${seconds.toFixed(2).replace(/\.?0+$/, '')}s`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const formatDate = (dateString: string) => {
|
export const formatDate = (dateString: string) => {
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
import type React from 'react'
|
import type React from 'react'
|
||||||
import { createContext, useCallback, useContext, useEffect, useMemo, useState } from 'react'
|
import { createContext, useCallback, useContext, useEffect, useMemo, useRef, useState } from 'react'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { useQueryClient } from '@tanstack/react-query'
|
import { useQueryClient } from '@tanstack/react-query'
|
||||||
import { useParams } from 'next/navigation'
|
import { useParams } from 'next/navigation'
|
||||||
|
import { useSocket } from '@/app/workspace/providers/socket-provider'
|
||||||
import {
|
import {
|
||||||
useWorkspacePermissionsQuery,
|
useWorkspacePermissionsQuery,
|
||||||
type WorkspacePermissions,
|
type WorkspacePermissions,
|
||||||
@@ -57,14 +58,42 @@ export function WorkspacePermissionsProvider({ children }: WorkspacePermissionsP
|
|||||||
const [hasShownOfflineNotification, setHasShownOfflineNotification] = useState(false)
|
const [hasShownOfflineNotification, setHasShownOfflineNotification] = useState(false)
|
||||||
const hasOperationError = useOperationQueueStore((state) => state.hasOperationError)
|
const hasOperationError = useOperationQueueStore((state) => state.hasOperationError)
|
||||||
const addNotification = useNotificationStore((state) => state.addNotification)
|
const addNotification = useNotificationStore((state) => state.addNotification)
|
||||||
|
const removeNotification = useNotificationStore((state) => state.removeNotification)
|
||||||
|
const { isReconnecting } = useSocket()
|
||||||
|
const reconnectingNotificationIdRef = useRef<string | null>(null)
|
||||||
|
|
||||||
const isOfflineMode = hasOperationError
|
const isOfflineMode = hasOperationError
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (isReconnecting && !reconnectingNotificationIdRef.current && !isOfflineMode) {
|
||||||
|
const id = addNotification({
|
||||||
|
level: 'error',
|
||||||
|
message: 'Reconnecting...',
|
||||||
|
})
|
||||||
|
reconnectingNotificationIdRef.current = id
|
||||||
|
} else if (!isReconnecting && reconnectingNotificationIdRef.current) {
|
||||||
|
removeNotification(reconnectingNotificationIdRef.current)
|
||||||
|
reconnectingNotificationIdRef.current = null
|
||||||
|
}
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
if (reconnectingNotificationIdRef.current) {
|
||||||
|
removeNotification(reconnectingNotificationIdRef.current)
|
||||||
|
reconnectingNotificationIdRef.current = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [isReconnecting, isOfflineMode, addNotification, removeNotification])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!isOfflineMode || hasShownOfflineNotification) {
|
if (!isOfflineMode || hasShownOfflineNotification) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (reconnectingNotificationIdRef.current) {
|
||||||
|
removeNotification(reconnectingNotificationIdRef.current)
|
||||||
|
reconnectingNotificationIdRef.current = null
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
addNotification({
|
addNotification({
|
||||||
level: 'error',
|
level: 'error',
|
||||||
@@ -78,7 +107,7 @@ export function WorkspacePermissionsProvider({ children }: WorkspacePermissionsP
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to add offline notification', { error })
|
logger.error('Failed to add offline notification', { error })
|
||||||
}
|
}
|
||||||
}, [addNotification, hasShownOfflineNotification, isOfflineMode])
|
}, [addNotification, removeNotification, hasShownOfflineNotification, isOfflineMode])
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: workspacePermissions,
|
data: workspacePermissions,
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import { getSession } from '@/lib/auth'
|
|||||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||||
import type { Template as WorkspaceTemplate } from '@/app/workspace/[workspaceId]/templates/templates'
|
import type { Template as WorkspaceTemplate } from '@/app/workspace/[workspaceId]/templates/templates'
|
||||||
import Templates from '@/app/workspace/[workspaceId]/templates/templates'
|
import Templates from '@/app/workspace/[workspaceId]/templates/templates'
|
||||||
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
|
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||||
|
|
||||||
interface TemplatesPageProps {
|
interface TemplatesPageProps {
|
||||||
params: Promise<{
|
params: Promise<{
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { memo, useCallback } from 'react'
|
import { memo, useCallback } from 'react'
|
||||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
|
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, Lock, LogOut, Unlock } from 'lucide-react'
|
||||||
import { Button, Copy, PlayOutline, Tooltip, Trash2 } from '@/components/emcn'
|
import { Button, Copy, PlayOutline, Tooltip, Trash2 } from '@/components/emcn'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||||
@@ -49,6 +49,7 @@ export const ActionBar = memo(
|
|||||||
collaborativeBatchRemoveBlocks,
|
collaborativeBatchRemoveBlocks,
|
||||||
collaborativeBatchToggleBlockEnabled,
|
collaborativeBatchToggleBlockEnabled,
|
||||||
collaborativeBatchToggleBlockHandles,
|
collaborativeBatchToggleBlockHandles,
|
||||||
|
collaborativeBatchToggleLocked,
|
||||||
} = useCollaborativeWorkflow()
|
} = useCollaborativeWorkflow()
|
||||||
const { setPendingSelection } = useWorkflowRegistry()
|
const { setPendingSelection } = useWorkflowRegistry()
|
||||||
const { handleRunFromBlock } = useWorkflowExecution()
|
const { handleRunFromBlock } = useWorkflowExecution()
|
||||||
@@ -84,16 +85,28 @@ export const ActionBar = memo(
|
|||||||
)
|
)
|
||||||
}, [blockId, addNotification, collaborativeBatchAddBlocks, setPendingSelection])
|
}, [blockId, addNotification, collaborativeBatchAddBlocks, setPendingSelection])
|
||||||
|
|
||||||
const { isEnabled, horizontalHandles, parentId, parentType } = useWorkflowStore(
|
const {
|
||||||
|
isEnabled,
|
||||||
|
horizontalHandles,
|
||||||
|
parentId,
|
||||||
|
parentType,
|
||||||
|
isLocked,
|
||||||
|
isParentLocked,
|
||||||
|
isParentDisabled,
|
||||||
|
} = useWorkflowStore(
|
||||||
useCallback(
|
useCallback(
|
||||||
(state) => {
|
(state) => {
|
||||||
const block = state.blocks[blockId]
|
const block = state.blocks[blockId]
|
||||||
const parentId = block?.data?.parentId
|
const parentId = block?.data?.parentId
|
||||||
|
const parentBlock = parentId ? state.blocks[parentId] : undefined
|
||||||
return {
|
return {
|
||||||
isEnabled: block?.enabled ?? true,
|
isEnabled: block?.enabled ?? true,
|
||||||
horizontalHandles: block?.horizontalHandles ?? false,
|
horizontalHandles: block?.horizontalHandles ?? false,
|
||||||
parentId,
|
parentId,
|
||||||
parentType: parentId ? state.blocks[parentId]?.type : undefined,
|
parentType: parentBlock?.type,
|
||||||
|
isLocked: block?.locked ?? false,
|
||||||
|
isParentLocked: parentBlock?.locked ?? false,
|
||||||
|
isParentDisabled: parentBlock ? !parentBlock.enabled : false,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[blockId]
|
[blockId]
|
||||||
@@ -161,25 +174,27 @@ export const ActionBar = memo(
|
|||||||
{!isNoteBlock && !isInsideSubflow && (
|
{!isNoteBlock && !isInsideSubflow && (
|
||||||
<Tooltip.Root>
|
<Tooltip.Root>
|
||||||
<Tooltip.Trigger asChild>
|
<Tooltip.Trigger asChild>
|
||||||
<Button
|
<span className='inline-flex'>
|
||||||
variant='ghost'
|
<Button
|
||||||
onClick={(e) => {
|
variant='ghost'
|
||||||
e.stopPropagation()
|
onClick={(e) => {
|
||||||
if (canRunFromBlock && !disabled) {
|
e.stopPropagation()
|
||||||
handleRunFromBlockClick()
|
if (canRunFromBlock && !disabled) {
|
||||||
}
|
handleRunFromBlockClick()
|
||||||
}}
|
}
|
||||||
className={ACTION_BUTTON_STYLES}
|
}}
|
||||||
disabled={disabled || !canRunFromBlock}
|
className={ACTION_BUTTON_STYLES}
|
||||||
>
|
disabled={disabled || !canRunFromBlock}
|
||||||
<PlayOutline className={ICON_SIZE} />
|
>
|
||||||
</Button>
|
<PlayOutline className={ICON_SIZE} />
|
||||||
|
</Button>
|
||||||
|
</span>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>
|
<Tooltip.Content side='top'>
|
||||||
{(() => {
|
{(() => {
|
||||||
if (disabled) return getTooltipMessage('Run from block')
|
if (disabled) return getTooltipMessage('Run from block')
|
||||||
if (isExecuting) return 'Execution in progress'
|
if (isExecuting) return 'Execution in progress'
|
||||||
if (!dependenciesSatisfied) return 'Run upstream blocks first'
|
if (!dependenciesSatisfied) return 'Run previous blocks first'
|
||||||
return 'Run from block'
|
return 'Run from block'
|
||||||
})()}
|
})()}
|
||||||
</Tooltip.Content>
|
</Tooltip.Content>
|
||||||
@@ -193,18 +208,54 @@ export const ActionBar = memo(
|
|||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
if (!disabled) {
|
// Can't enable if parent is disabled (must enable parent first)
|
||||||
|
const cantEnable = !isEnabled && isParentDisabled
|
||||||
|
if (!disabled && !isLocked && !isParentLocked && !cantEnable) {
|
||||||
collaborativeBatchToggleBlockEnabled([blockId])
|
collaborativeBatchToggleBlockEnabled([blockId])
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={ACTION_BUTTON_STYLES}
|
className={ACTION_BUTTON_STYLES}
|
||||||
disabled={disabled}
|
disabled={
|
||||||
|
disabled || isLocked || isParentLocked || (!isEnabled && isParentDisabled)
|
||||||
|
}
|
||||||
>
|
>
|
||||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>
|
<Tooltip.Content side='top'>
|
||||||
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
{isLocked || isParentLocked
|
||||||
|
? 'Block is locked'
|
||||||
|
: !isEnabled && isParentDisabled
|
||||||
|
? 'Parent container is disabled'
|
||||||
|
: getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||||
|
</Tooltip.Content>
|
||||||
|
</Tooltip.Root>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{userPermissions.canAdmin && (
|
||||||
|
<Tooltip.Root>
|
||||||
|
<Tooltip.Trigger asChild>
|
||||||
|
<Button
|
||||||
|
variant='ghost'
|
||||||
|
onClick={(e) => {
|
||||||
|
e.stopPropagation()
|
||||||
|
// Can't unlock a block if its parent container is locked
|
||||||
|
if (!disabled && !(isLocked && isParentLocked)) {
|
||||||
|
collaborativeBatchToggleLocked([blockId])
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
className={ACTION_BUTTON_STYLES}
|
||||||
|
disabled={disabled || (isLocked && isParentLocked)}
|
||||||
|
>
|
||||||
|
{isLocked ? <Unlock className={ICON_SIZE} /> : <Lock className={ICON_SIZE} />}
|
||||||
|
</Button>
|
||||||
|
</Tooltip.Trigger>
|
||||||
|
<Tooltip.Content side='top'>
|
||||||
|
{isLocked && isParentLocked
|
||||||
|
? 'Parent container is locked'
|
||||||
|
: isLocked
|
||||||
|
? 'Unlock Block'
|
||||||
|
: 'Lock Block'}
|
||||||
</Tooltip.Content>
|
</Tooltip.Content>
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
)}
|
)}
|
||||||
@@ -216,17 +267,21 @@ export const ActionBar = memo(
|
|||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
if (!disabled) {
|
if (!disabled && !isLocked && !isParentLocked) {
|
||||||
handleDuplicateBlock()
|
handleDuplicateBlock()
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={ACTION_BUTTON_STYLES}
|
className={ACTION_BUTTON_STYLES}
|
||||||
disabled={disabled}
|
disabled={disabled || isLocked || isParentLocked}
|
||||||
>
|
>
|
||||||
<Copy className={ICON_SIZE} />
|
<Copy className={ICON_SIZE} />
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>{getTooltipMessage('Duplicate Block')}</Tooltip.Content>
|
<Tooltip.Content side='top'>
|
||||||
|
{isLocked || isParentLocked
|
||||||
|
? 'Block is locked'
|
||||||
|
: getTooltipMessage('Duplicate Block')}
|
||||||
|
</Tooltip.Content>
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
@@ -237,12 +292,12 @@ export const ActionBar = memo(
|
|||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
if (!disabled) {
|
if (!disabled && !isLocked && !isParentLocked) {
|
||||||
collaborativeBatchToggleBlockHandles([blockId])
|
collaborativeBatchToggleBlockHandles([blockId])
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={ACTION_BUTTON_STYLES}
|
className={ACTION_BUTTON_STYLES}
|
||||||
disabled={disabled}
|
disabled={disabled || isLocked || isParentLocked}
|
||||||
>
|
>
|
||||||
{horizontalHandles ? (
|
{horizontalHandles ? (
|
||||||
<ArrowLeftRight className={ICON_SIZE} />
|
<ArrowLeftRight className={ICON_SIZE} />
|
||||||
@@ -252,7 +307,9 @@ export const ActionBar = memo(
|
|||||||
</Button>
|
</Button>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>
|
<Tooltip.Content side='top'>
|
||||||
{getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')}
|
{isLocked || isParentLocked
|
||||||
|
? 'Block is locked'
|
||||||
|
: getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')}
|
||||||
</Tooltip.Content>
|
</Tooltip.Content>
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
)}
|
)}
|
||||||
@@ -264,19 +321,23 @@ export const ActionBar = memo(
|
|||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
if (!disabled && userPermissions.canEdit) {
|
if (!disabled && userPermissions.canEdit && !isLocked && !isParentLocked) {
|
||||||
window.dispatchEvent(
|
window.dispatchEvent(
|
||||||
new CustomEvent('remove-from-subflow', { detail: { blockIds: [blockId] } })
|
new CustomEvent('remove-from-subflow', { detail: { blockIds: [blockId] } })
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={ACTION_BUTTON_STYLES}
|
className={ACTION_BUTTON_STYLES}
|
||||||
disabled={disabled || !userPermissions.canEdit}
|
disabled={disabled || !userPermissions.canEdit || isLocked || isParentLocked}
|
||||||
>
|
>
|
||||||
<LogOut className={ICON_SIZE} />
|
<LogOut className={ICON_SIZE} />
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>{getTooltipMessage('Remove from Subflow')}</Tooltip.Content>
|
<Tooltip.Content side='top'>
|
||||||
|
{isLocked || isParentLocked
|
||||||
|
? 'Block is locked'
|
||||||
|
: getTooltipMessage('Remove from Subflow')}
|
||||||
|
</Tooltip.Content>
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
@@ -286,17 +347,19 @@ export const ActionBar = memo(
|
|||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
if (!disabled) {
|
if (!disabled && !isLocked && !isParentLocked) {
|
||||||
collaborativeBatchRemoveBlocks([blockId])
|
collaborativeBatchRemoveBlocks([blockId])
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={ACTION_BUTTON_STYLES}
|
className={ACTION_BUTTON_STYLES}
|
||||||
disabled={disabled}
|
disabled={disabled || isLocked || isParentLocked}
|
||||||
>
|
>
|
||||||
<Trash2 className={ICON_SIZE} />
|
<Trash2 className={ICON_SIZE} />
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>{getTooltipMessage('Delete Block')}</Tooltip.Content>
|
<Tooltip.Content side='top'>
|
||||||
|
{isLocked || isParentLocked ? 'Block is locked' : getTooltipMessage('Delete Block')}
|
||||||
|
</Tooltip.Content>
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -20,6 +20,9 @@ export interface BlockInfo {
|
|||||||
horizontalHandles: boolean
|
horizontalHandles: boolean
|
||||||
parentId?: string
|
parentId?: string
|
||||||
parentType?: string
|
parentType?: string
|
||||||
|
locked?: boolean
|
||||||
|
isParentLocked?: boolean
|
||||||
|
isParentDisabled?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -46,10 +49,17 @@ export interface BlockMenuProps {
|
|||||||
showRemoveFromSubflow?: boolean
|
showRemoveFromSubflow?: boolean
|
||||||
/** Whether run from block is available (has snapshot, was executed, not inside subflow) */
|
/** Whether run from block is available (has snapshot, was executed, not inside subflow) */
|
||||||
canRunFromBlock?: boolean
|
canRunFromBlock?: boolean
|
||||||
|
/** Whether to disable edit actions (user can't edit OR blocks are locked) */
|
||||||
disableEdit?: boolean
|
disableEdit?: boolean
|
||||||
|
/** Whether the user has edit permission (ignoring locked state) */
|
||||||
|
userCanEdit?: boolean
|
||||||
isExecuting?: boolean
|
isExecuting?: boolean
|
||||||
/** Whether the selected block is a trigger (has no incoming edges) */
|
/** Whether the selected block is a trigger (has no incoming edges) */
|
||||||
isPositionalTrigger?: boolean
|
isPositionalTrigger?: boolean
|
||||||
|
/** Callback to toggle locked state of selected blocks */
|
||||||
|
onToggleLocked?: () => void
|
||||||
|
/** Whether the user has admin permissions */
|
||||||
|
canAdmin?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -78,13 +88,22 @@ export function BlockMenu({
|
|||||||
showRemoveFromSubflow = false,
|
showRemoveFromSubflow = false,
|
||||||
canRunFromBlock = false,
|
canRunFromBlock = false,
|
||||||
disableEdit = false,
|
disableEdit = false,
|
||||||
|
userCanEdit = true,
|
||||||
isExecuting = false,
|
isExecuting = false,
|
||||||
isPositionalTrigger = false,
|
isPositionalTrigger = false,
|
||||||
|
onToggleLocked,
|
||||||
|
canAdmin = false,
|
||||||
}: BlockMenuProps) {
|
}: BlockMenuProps) {
|
||||||
const isSingleBlock = selectedBlocks.length === 1
|
const isSingleBlock = selectedBlocks.length === 1
|
||||||
|
|
||||||
const allEnabled = selectedBlocks.every((b) => b.enabled)
|
const allEnabled = selectedBlocks.every((b) => b.enabled)
|
||||||
const allDisabled = selectedBlocks.every((b) => !b.enabled)
|
const allDisabled = selectedBlocks.every((b) => !b.enabled)
|
||||||
|
const allLocked = selectedBlocks.every((b) => b.locked)
|
||||||
|
const allUnlocked = selectedBlocks.every((b) => !b.locked)
|
||||||
|
// Can't unlock blocks that have locked parents
|
||||||
|
const hasBlockWithLockedParent = selectedBlocks.some((b) => b.locked && b.isParentLocked)
|
||||||
|
// Can't enable blocks that have disabled parents
|
||||||
|
const hasBlockWithDisabledParent = selectedBlocks.some((b) => !b.enabled && b.isParentDisabled)
|
||||||
|
|
||||||
const hasSingletonBlock = selectedBlocks.some(
|
const hasSingletonBlock = selectedBlocks.some(
|
||||||
(b) =>
|
(b) =>
|
||||||
@@ -108,6 +127,12 @@ export function BlockMenu({
|
|||||||
return 'Toggle Enabled'
|
return 'Toggle Enabled'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getToggleLockedLabel = () => {
|
||||||
|
if (allLocked) return 'Unlock'
|
||||||
|
if (allUnlocked) return 'Lock'
|
||||||
|
return 'Toggle Lock'
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Popover
|
<Popover
|
||||||
open={isOpen}
|
open={isOpen}
|
||||||
@@ -139,7 +164,7 @@ export function BlockMenu({
|
|||||||
</PopoverItem>
|
</PopoverItem>
|
||||||
<PopoverItem
|
<PopoverItem
|
||||||
className='group'
|
className='group'
|
||||||
disabled={disableEdit || !hasClipboard}
|
disabled={!userCanEdit || !hasClipboard}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
onPaste()
|
onPaste()
|
||||||
onClose()
|
onClose()
|
||||||
@@ -164,13 +189,15 @@ export function BlockMenu({
|
|||||||
{!allNoteBlocks && <PopoverDivider />}
|
{!allNoteBlocks && <PopoverDivider />}
|
||||||
{!allNoteBlocks && (
|
{!allNoteBlocks && (
|
||||||
<PopoverItem
|
<PopoverItem
|
||||||
disabled={disableEdit}
|
disabled={disableEdit || hasBlockWithDisabledParent}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
onToggleEnabled()
|
if (!disableEdit && !hasBlockWithDisabledParent) {
|
||||||
onClose()
|
onToggleEnabled()
|
||||||
|
onClose()
|
||||||
|
}
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{getToggleEnabledLabel()}
|
{hasBlockWithDisabledParent ? 'Parent is disabled' : getToggleEnabledLabel()}
|
||||||
</PopoverItem>
|
</PopoverItem>
|
||||||
)}
|
)}
|
||||||
{!allNoteBlocks && !isSubflow && (
|
{!allNoteBlocks && !isSubflow && (
|
||||||
@@ -195,6 +222,19 @@ export function BlockMenu({
|
|||||||
Remove from Subflow
|
Remove from Subflow
|
||||||
</PopoverItem>
|
</PopoverItem>
|
||||||
)}
|
)}
|
||||||
|
{canAdmin && onToggleLocked && (
|
||||||
|
<PopoverItem
|
||||||
|
disabled={hasBlockWithLockedParent}
|
||||||
|
onClick={() => {
|
||||||
|
if (!hasBlockWithLockedParent) {
|
||||||
|
onToggleLocked()
|
||||||
|
onClose()
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{hasBlockWithLockedParent ? 'Parent is locked' : getToggleLockedLabel()}
|
||||||
|
</PopoverItem>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Single block actions */}
|
{/* Single block actions */}
|
||||||
{isSingleBlock && <PopoverDivider />}
|
{isSingleBlock && <PopoverDivider />}
|
||||||
|
|||||||
@@ -34,6 +34,8 @@ export interface CanvasMenuProps {
|
|||||||
canUndo?: boolean
|
canUndo?: boolean
|
||||||
canRedo?: boolean
|
canRedo?: boolean
|
||||||
isInvitationsDisabled?: boolean
|
isInvitationsDisabled?: boolean
|
||||||
|
/** Whether the workflow has locked blocks (disables auto-layout) */
|
||||||
|
hasLockedBlocks?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -60,6 +62,7 @@ export function CanvasMenu({
|
|||||||
disableEdit = false,
|
disableEdit = false,
|
||||||
canUndo = false,
|
canUndo = false,
|
||||||
canRedo = false,
|
canRedo = false,
|
||||||
|
hasLockedBlocks = false,
|
||||||
}: CanvasMenuProps) {
|
}: CanvasMenuProps) {
|
||||||
return (
|
return (
|
||||||
<Popover
|
<Popover
|
||||||
@@ -129,11 +132,12 @@ export function CanvasMenu({
|
|||||||
</PopoverItem>
|
</PopoverItem>
|
||||||
<PopoverItem
|
<PopoverItem
|
||||||
className='group'
|
className='group'
|
||||||
disabled={disableEdit}
|
disabled={disableEdit || hasLockedBlocks}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
onAutoLayout()
|
onAutoLayout()
|
||||||
onClose()
|
onClose()
|
||||||
}}
|
}}
|
||||||
|
title={hasLockedBlocks ? 'Unlock blocks to use auto-layout' : undefined}
|
||||||
>
|
>
|
||||||
<span>Auto-layout</span>
|
<span>Auto-layout</span>
|
||||||
<span className='ml-auto opacity-70 group-hover:opacity-100'>⇧L</span>
|
<span className='ml-auto opacity-70 group-hover:opacity-100'>⇧L</span>
|
||||||
|
|||||||
@@ -807,7 +807,7 @@ export function Chat() {
|
|||||||
|
|
||||||
const newReservedFields: StartInputFormatField[] = missingStartReservedFields.map(
|
const newReservedFields: StartInputFormatField[] = missingStartReservedFields.map(
|
||||||
(fieldName) => {
|
(fieldName) => {
|
||||||
const defaultType = fieldName === 'files' ? 'files' : 'string'
|
const defaultType = fieldName === 'files' ? 'file[]' : 'string'
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: crypto.randomUUID(),
|
id: crypto.randomUUID(),
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { memo, useCallback, useMemo } from 'react'
|
import { memo, useCallback, useMemo } from 'react'
|
||||||
import ReactMarkdown from 'react-markdown'
|
import ReactMarkdown from 'react-markdown'
|
||||||
import type { NodeProps } from 'reactflow'
|
import type { NodeProps } from 'reactflow'
|
||||||
|
import remarkBreaks from 'remark-breaks'
|
||||||
import remarkGfm from 'remark-gfm'
|
import remarkGfm from 'remark-gfm'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
import { BLOCK_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
import { BLOCK_DIMENSIONS } from '@/lib/workflows/blocks/block-dimensions'
|
||||||
@@ -305,7 +306,7 @@ function getEmbedInfo(url: string): EmbedInfo | null {
|
|||||||
const NoteMarkdown = memo(function NoteMarkdown({ content }: { content: string }) {
|
const NoteMarkdown = memo(function NoteMarkdown({ content }: { content: string }) {
|
||||||
return (
|
return (
|
||||||
<ReactMarkdown
|
<ReactMarkdown
|
||||||
remarkPlugins={[remarkGfm]}
|
remarkPlugins={[remarkGfm, remarkBreaks]}
|
||||||
components={{
|
components={{
|
||||||
p: ({ children }: any) => (
|
p: ({ children }: any) => (
|
||||||
<p className='mb-1 break-words text-[var(--text-primary)] text-sm leading-[1.25rem] last:mb-0'>
|
<p className='mb-1 break-words text-[var(--text-primary)] text-sm leading-[1.25rem] last:mb-0'>
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
import { memo, useEffect, useMemo, useRef, useState } from 'react'
|
||||||
import clsx from 'clsx'
|
import clsx from 'clsx'
|
||||||
import { ChevronUp } from 'lucide-react'
|
import { ChevronUp } from 'lucide-react'
|
||||||
|
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||||
import { CopilotMarkdownRenderer } from '../markdown-renderer'
|
import { CopilotMarkdownRenderer } from '../markdown-renderer'
|
||||||
|
|
||||||
/** Removes thinking tags (raw or escaped) and special tags from streamed content */
|
/** Removes thinking tags (raw or escaped) and special tags from streamed content */
|
||||||
@@ -241,15 +242,11 @@ export function ThinkingBlock({
|
|||||||
return () => window.clearInterval(intervalId)
|
return () => window.clearInterval(intervalId)
|
||||||
}, [isStreaming, isExpanded, userHasScrolledAway])
|
}, [isStreaming, isExpanded, userHasScrolledAway])
|
||||||
|
|
||||||
/** Formats duration in milliseconds to seconds (minimum 1s) */
|
|
||||||
const formatDuration = (ms: number) => {
|
|
||||||
const seconds = Math.max(1, Math.round(ms / 1000))
|
|
||||||
return `${seconds}s`
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasContent = cleanContent.length > 0
|
const hasContent = cleanContent.length > 0
|
||||||
const isThinkingDone = !isStreaming || hasFollowingContent || hasSpecialTags
|
const isThinkingDone = !isStreaming || hasFollowingContent || hasSpecialTags
|
||||||
const durationText = `${label} for ${formatDuration(duration)}`
|
// Round to nearest second (minimum 1s) to match original behavior
|
||||||
|
const roundedMs = Math.max(1000, Math.round(duration / 1000) * 1000)
|
||||||
|
const durationText = `${label} for ${formatDuration(roundedMs)}`
|
||||||
|
|
||||||
const getStreamingLabel = (lbl: string) => {
|
const getStreamingLabel = (lbl: string) => {
|
||||||
if (lbl === 'Thought') return 'Thinking'
|
if (lbl === 'Thought') return 'Thinking'
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ import {
|
|||||||
hasInterrupt as hasInterruptFromConfig,
|
hasInterrupt as hasInterruptFromConfig,
|
||||||
isSpecialTool as isSpecialToolFromConfig,
|
isSpecialTool as isSpecialToolFromConfig,
|
||||||
} from '@/lib/copilot/tools/client/ui-config'
|
} from '@/lib/copilot/tools/client/ui-config'
|
||||||
|
import { formatDuration } from '@/lib/core/utils/formatting'
|
||||||
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
|
||||||
import { SmoothStreamingText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming'
|
import { SmoothStreamingText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming'
|
||||||
import { ThinkingBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block'
|
import { ThinkingBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block'
|
||||||
@@ -848,13 +849,10 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
|
|||||||
(allParsed.options && Object.keys(allParsed.options).length > 0)
|
(allParsed.options && Object.keys(allParsed.options).length > 0)
|
||||||
)
|
)
|
||||||
|
|
||||||
const formatDuration = (ms: number) => {
|
|
||||||
const seconds = Math.max(1, Math.round(ms / 1000))
|
|
||||||
return `${seconds}s`
|
|
||||||
}
|
|
||||||
|
|
||||||
const outerLabel = getSubagentCompletionLabel(toolCall.name)
|
const outerLabel = getSubagentCompletionLabel(toolCall.name)
|
||||||
const durationText = `${outerLabel} for ${formatDuration(duration)}`
|
// Round to nearest second (minimum 1s) to match original behavior
|
||||||
|
const roundedMs = Math.max(1000, Math.round(duration / 1000) * 1000)
|
||||||
|
const durationText = `${outerLabel} for ${formatDuration(roundedMs)}`
|
||||||
|
|
||||||
const renderCollapsibleContent = () => (
|
const renderCollapsibleContent = () => (
|
||||||
<>
|
<>
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ export function A2aDeploy({
|
|||||||
newFields.push({
|
newFields.push({
|
||||||
id: crypto.randomUUID(),
|
id: crypto.randomUUID(),
|
||||||
name: 'files',
|
name: 'files',
|
||||||
type: 'files',
|
type: 'file[]',
|
||||||
value: '',
|
value: '',
|
||||||
collapsed: false,
|
collapsed: false,
|
||||||
})
|
})
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user