mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-03 11:14:58 -05:00
Compare commits
22 Commits
feat/lock
...
improvemen
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0aeaf6faee | ||
|
|
a6ec6a0e6c | ||
|
|
cbe0f8aed2 | ||
|
|
6e642fc705 | ||
|
|
1c857cdcda | ||
|
|
7570e509ff | ||
|
|
1ff35405fa | ||
|
|
3ceabbb816 | ||
|
|
a65f3b8e6b | ||
|
|
5ecbf6cf4a | ||
|
|
42767fc4f4 | ||
|
|
5a0becf76f | ||
|
|
f4a3c94f87 | ||
|
|
9ec0c8f3f5 | ||
|
|
a9b7d75d87 | ||
|
|
0449804ffb | ||
|
|
c286f3ed24 | ||
|
|
b738550815 | ||
|
|
39ca1f61c7 | ||
|
|
1da3407f41 | ||
|
|
c6357f7438 | ||
|
|
bea0a685ae |
@@ -5421,3 +5421,18 @@ z'
|
|||||||
</svg>
|
</svg>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function EnrichSoIcon(props: SVGProps<SVGSVGElement>) {
|
||||||
|
return (
|
||||||
|
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 398 394' fill='none'>
|
||||||
|
<path
|
||||||
|
fill='#5A52F4'
|
||||||
|
d='M129.705566,319.705719 C127.553314,322.684906 125.651512,325.414673 123.657059,328.277466 C113.748466,318.440308 105.605003,310.395905 97.510834,302.302216 C93.625801,298.417419 89.990181,294.269318 85.949242,290.558868 C82.857994,287.720428 82.464081,285.757660 85.772888,282.551880 C104.068108,264.826202 122.146088,246.876312 140.285110,228.989670 C141.183945,228.103317 141.957443,227.089844 143.588837,225.218384 C140.691605,225.066116 138.820053,224.882874 136.948410,224.881958 C102.798264,224.865326 68.647453,224.765244 34.498699,224.983612 C29.315699,225.016739 27.990419,223.343155 28.090912,218.397430 C28.381887,204.076935 28.189890,189.746719 28.195684,175.420319 C28.198524,168.398178 28.319166,168.279541 35.590389,168.278687 C69.074188,168.274780 102.557991,168.281174 136.041794,168.266083 C137.968231,168.265213 139.894608,168.107101 141.821030,168.022171 C142.137955,167.513992 142.454895,167.005829 142.771820,166.497650 C122.842415,146.495621 102.913002,126.493591 83.261360,106.770348 C96.563828,93.471756 109.448814,80.590523 122.656265,67.386925 C123.522743,68.161835 124.785545,69.187096 125.930321,70.330513 C144.551819,88.930206 163.103683,107.600082 181.805267,126.118790 C186.713593,130.979126 189.085648,136.448059 189.055374,143.437057 C188.899490,179.418961 188.911179,215.402191 189.046661,251.384262 C189.072296,258.190796 186.742920,263.653717 181.982727,268.323273 C164.624405,285.351227 147.295807,302.409485 129.705566,319.705719z'
|
||||||
|
/>
|
||||||
|
<path
|
||||||
|
fill='#5A52F4'
|
||||||
|
d='M276.070923,246.906128 C288.284363,258.985870 300.156097,270.902100 312.235931,282.603485 C315.158752,285.434784 315.417542,287.246246 312.383484,290.248932 C301.143494,301.372498 290.168549,312.763733 279.075592,324.036255 C278.168030,324.958496 277.121307,325.743835 275.898315,326.801086 C274.628357,325.711792 273.460663,324.822968 272.422150,323.802673 C253.888397,305.594757 235.418701,287.321289 216.818268,269.181854 C211.508789,264.003937 208.872726,258.136688 208.914001,250.565842 C209.108337,214.917786 209.084808,179.267715 208.928864,143.619293 C208.898407,136.654907 211.130066,131.122162 216.052216,126.246094 C234.867538,107.606842 253.537521,88.820908 272.274780,70.102730 C273.313202,69.065353 274.468597,68.145027 275.264038,67.440727 C288.353516,80.579514 301.213470,93.487869 314.597534,106.922356 C295.163391,126.421753 275.214752,146.437363 255.266113,166.452972 C255.540176,166.940353 255.814240,167.427734 256.088318,167.915100 C257.983887,168.035736 259.879425,168.260345 261.775085,168.261551 C295.425201,168.282852 329.075287,168.273544 362.725403,168.279831 C369.598907,168.281113 369.776215,168.463593 369.778931,175.252213 C369.784882,189.911667 369.646088,204.573074 369.861206,219.229355 C369.925110,223.585022 368.554596,224.976288 364.148865,224.956406 C329.833130,224.801605 295.516388,224.869598 261.199951,224.868744 C259.297974,224.868698 257.396027,224.868744 254.866638,224.868744 C262.350708,232.658707 269.078217,239.661194 276.070923,246.906128z'
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ import {
|
|||||||
DynamoDBIcon,
|
DynamoDBIcon,
|
||||||
ElasticsearchIcon,
|
ElasticsearchIcon,
|
||||||
ElevenLabsIcon,
|
ElevenLabsIcon,
|
||||||
|
EnrichSoIcon,
|
||||||
ExaAIIcon,
|
ExaAIIcon,
|
||||||
EyeIcon,
|
EyeIcon,
|
||||||
FirecrawlIcon,
|
FirecrawlIcon,
|
||||||
@@ -160,6 +161,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
|||||||
dynamodb: DynamoDBIcon,
|
dynamodb: DynamoDBIcon,
|
||||||
elasticsearch: ElasticsearchIcon,
|
elasticsearch: ElasticsearchIcon,
|
||||||
elevenlabs: ElevenLabsIcon,
|
elevenlabs: ElevenLabsIcon,
|
||||||
|
enrich: EnrichSoIcon,
|
||||||
exa: ExaAIIcon,
|
exa: ExaAIIcon,
|
||||||
file_v2: DocumentIcon,
|
file_v2: DocumentIcon,
|
||||||
firecrawl: FirecrawlIcon,
|
firecrawl: FirecrawlIcon,
|
||||||
|
|||||||
134
apps/docs/content/docs/en/execution/files.mdx
Normal file
134
apps/docs/content/docs/en/execution/files.mdx
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
---
|
||||||
|
title: Passing Files
|
||||||
|
---
|
||||||
|
|
||||||
|
import { Callout } from 'fumadocs-ui/components/callout'
|
||||||
|
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||||
|
|
||||||
|
Sim makes it easy to work with files throughout your workflows. Blocks can receive files, process them, and pass them to other blocks seamlessly.
|
||||||
|
|
||||||
|
## File Objects
|
||||||
|
|
||||||
|
When blocks output files (like Gmail attachments, generated images, or parsed documents), they return a standardized file object:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "report.pdf",
|
||||||
|
"url": "https://...",
|
||||||
|
"base64": "JVBERi0xLjQK...",
|
||||||
|
"type": "application/pdf",
|
||||||
|
"size": 245678
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
You can access any of these properties when referencing files from previous blocks.
|
||||||
|
|
||||||
|
## Passing Files Between Blocks
|
||||||
|
|
||||||
|
Reference files from previous blocks using the tag dropdown. Click in any file input field and type `<` to see available outputs.
|
||||||
|
|
||||||
|
**Common patterns:**
|
||||||
|
|
||||||
|
```
|
||||||
|
// Single file from a block
|
||||||
|
<gmail.attachments[0]>
|
||||||
|
|
||||||
|
// Pass the whole file object
|
||||||
|
<file_parser.files[0]>
|
||||||
|
|
||||||
|
// Access specific properties
|
||||||
|
<gmail.attachments[0].name>
|
||||||
|
<gmail.attachments[0].base64>
|
||||||
|
```
|
||||||
|
|
||||||
|
Most blocks accept the full file object and extract what they need automatically. You don't need to manually extract `base64` or `url` in most cases.
|
||||||
|
|
||||||
|
## Triggering Workflows with Files
|
||||||
|
|
||||||
|
When calling a workflow via API that expects file input, include files in your request:
|
||||||
|
|
||||||
|
<Tabs items={['Base64', 'URL']}>
|
||||||
|
<Tab value="Base64">
|
||||||
|
```bash
|
||||||
|
curl -X POST "https://sim.ai/api/workflows/YOUR_WORKFLOW_ID/execute" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "x-api-key: YOUR_API_KEY" \
|
||||||
|
-d '{
|
||||||
|
"document": {
|
||||||
|
"name": "report.pdf",
|
||||||
|
"base64": "JVBERi0xLjQK...",
|
||||||
|
"type": "application/pdf"
|
||||||
|
}
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
</Tab>
|
||||||
|
<Tab value="URL">
|
||||||
|
```bash
|
||||||
|
curl -X POST "https://sim.ai/api/workflows/YOUR_WORKFLOW_ID/execute" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-H "x-api-key: YOUR_API_KEY" \
|
||||||
|
-d '{
|
||||||
|
"document": {
|
||||||
|
"name": "report.pdf",
|
||||||
|
"url": "https://example.com/report.pdf",
|
||||||
|
"type": "application/pdf"
|
||||||
|
}
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
</Tab>
|
||||||
|
</Tabs>
|
||||||
|
|
||||||
|
The workflow's Start block should have an input field configured to receive the file parameter.
|
||||||
|
|
||||||
|
## Receiving Files in API Responses
|
||||||
|
|
||||||
|
When a workflow outputs files, they're included in the response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"output": {
|
||||||
|
"generatedFile": {
|
||||||
|
"name": "output.png",
|
||||||
|
"url": "https://...",
|
||||||
|
"base64": "iVBORw0KGgo...",
|
||||||
|
"type": "image/png",
|
||||||
|
"size": 34567
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `url` for direct downloads or `base64` for inline processing.
|
||||||
|
|
||||||
|
## Blocks That Work with Files
|
||||||
|
|
||||||
|
**File inputs:**
|
||||||
|
- **File** - Parse documents, images, and text files
|
||||||
|
- **Vision** - Analyze images with AI models
|
||||||
|
- **Mistral Parser** - Extract text from PDFs
|
||||||
|
|
||||||
|
**File outputs:**
|
||||||
|
- **Gmail** - Email attachments
|
||||||
|
- **Slack** - Downloaded files
|
||||||
|
- **TTS** - Generated audio files
|
||||||
|
- **Video Generator** - Generated videos
|
||||||
|
- **Image Generator** - Generated images
|
||||||
|
|
||||||
|
**File storage:**
|
||||||
|
- **Supabase** - Upload/download from storage
|
||||||
|
- **S3** - AWS S3 operations
|
||||||
|
- **Google Drive** - Drive file operations
|
||||||
|
- **Dropbox** - Dropbox file operations
|
||||||
|
|
||||||
|
<Callout type="info">
|
||||||
|
Files are automatically available to downstream blocks. The execution engine handles all file transfer and format conversion.
|
||||||
|
</Callout>
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
1. **Use file objects directly** - Pass the full file object rather than extracting individual properties. Blocks handle the conversion automatically.
|
||||||
|
|
||||||
|
2. **Check file types** - Ensure the file type matches what the receiving block expects. The Vision block needs images, the File block handles documents.
|
||||||
|
|
||||||
|
3. **Consider file size** - Large files increase execution time. For very large files, consider using storage blocks (S3, Supabase) for intermediate storage.
|
||||||
@@ -1,3 +1,3 @@
|
|||||||
{
|
{
|
||||||
"pages": ["index", "basics", "api", "logging", "costs"]
|
"pages": ["index", "basics", "files", "api", "logging", "costs"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -180,11 +180,6 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
|
|||||||
<td>Right-click → **Enable/Disable**</td>
|
<td>Right-click → **Enable/Disable**</td>
|
||||||
<td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td>
|
<td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
|
||||||
<td>Lock/Unlock a block</td>
|
|
||||||
<td>Hover block → Click lock icon (Admin only)</td>
|
|
||||||
<td><ActionImage src="/static/quick-reference/lock-block.png" alt="Lock block" /></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
<tr>
|
||||||
<td>Toggle handle orientation</td>
|
<td>Toggle handle orientation</td>
|
||||||
<td>Right-click → **Toggle Handles**</td>
|
<td>Right-click → **Toggle Handles**</td>
|
||||||
|
|||||||
930
apps/docs/content/docs/en/tools/enrich.mdx
Normal file
930
apps/docs/content/docs/en/tools/enrich.mdx
Normal file
@@ -0,0 +1,930 @@
|
|||||||
|
---
|
||||||
|
title: Enrich
|
||||||
|
description: B2B data enrichment and LinkedIn intelligence with Enrich.so
|
||||||
|
---
|
||||||
|
|
||||||
|
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||||
|
|
||||||
|
<BlockInfoCard
|
||||||
|
type="enrich"
|
||||||
|
color="#E5E5E6"
|
||||||
|
/>
|
||||||
|
|
||||||
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
|
[Enrich.so](https://enrich.so/) delivers real-time, precision B2B data enrichment and LinkedIn intelligence. Its platform provides dynamic access to public and structured company, contact, and professional information, enabling teams to build richer profiles, improve lead quality, and drive more effective outreach.
|
||||||
|
|
||||||
|
With Enrich.so, you can:
|
||||||
|
|
||||||
|
- **Enrich contact and company profiles**: Instantly discover key data points for leads, prospects, and businesses using just an email or LinkedIn profile.
|
||||||
|
- **Verify email deliverability**: Check if emails are valid, deliverable, and safe to contact before sending.
|
||||||
|
- **Find work & personal emails**: Identify missing business emails from a LinkedIn profile or personal emails to expand your reach.
|
||||||
|
- **Reveal phone numbers and social profiles**: Surface additional communication channels for contacts through enrichment tools.
|
||||||
|
- **Analyze LinkedIn posts and engagement**: Extract insights on post reach, reactions, and audience from public LinkedIn content.
|
||||||
|
- **Conduct advanced people and company search**: Enable your agents to locate companies and professionals based on deep filters and real-time intelligence.
|
||||||
|
|
||||||
|
The Sim integration with Enrich.so empowers your agents and automations to instantly query, enrich, and validate B2B data, boosting productivity in workflows like sales prospecting, recruiting, marketing operations, and more. Combining Sim's orchestration capabilities with Enrich.so unlocks smarter, data-driven automation strategies powered by best-in-class B2B intelligence.
|
||||||
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
|
## Usage Instructions
|
||||||
|
|
||||||
|
Access real-time B2B data intelligence with Enrich.so. Enrich profiles from email addresses, find work emails from LinkedIn, verify email deliverability, search for people and companies, and analyze LinkedIn post engagement.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
|
### `enrich_check_credits`
|
||||||
|
|
||||||
|
Check your Enrich API credit usage and remaining balance.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `totalCredits` | number | Total credits allocated to the account |
|
||||||
|
| `creditsUsed` | number | Credits consumed so far |
|
||||||
|
| `creditsRemaining` | number | Available credits remaining |
|
||||||
|
|
||||||
|
### `enrich_email_to_profile`
|
||||||
|
|
||||||
|
Retrieve detailed LinkedIn profile information using an email address including work history, education, and skills.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to look up \(e.g., john.doe@company.com\) |
|
||||||
|
| `inRealtime` | boolean | No | Set to true to retrieve fresh data, bypassing cached information |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `displayName` | string | Full display name |
|
||||||
|
| `firstName` | string | First name |
|
||||||
|
| `lastName` | string | Last name |
|
||||||
|
| `headline` | string | Professional headline |
|
||||||
|
| `occupation` | string | Current occupation |
|
||||||
|
| `summary` | string | Profile summary |
|
||||||
|
| `location` | string | Location |
|
||||||
|
| `country` | string | Country |
|
||||||
|
| `linkedInUrl` | string | LinkedIn profile URL |
|
||||||
|
| `photoUrl` | string | Profile photo URL |
|
||||||
|
| `connectionCount` | number | Number of connections |
|
||||||
|
| `isConnectionCountObfuscated` | boolean | Whether connection count is obfuscated \(500+\) |
|
||||||
|
| `positionHistory` | array | Work experience history |
|
||||||
|
| ↳ `title` | string | Job title |
|
||||||
|
| ↳ `company` | string | Company name |
|
||||||
|
| ↳ `startDate` | string | Start date |
|
||||||
|
| ↳ `endDate` | string | End date |
|
||||||
|
| ↳ `location` | string | Location |
|
||||||
|
| `education` | array | Education history |
|
||||||
|
| ↳ `school` | string | School name |
|
||||||
|
| ↳ `degree` | string | Degree |
|
||||||
|
| ↳ `fieldOfStudy` | string | Field of study |
|
||||||
|
| ↳ `startDate` | string | Start date |
|
||||||
|
| ↳ `endDate` | string | End date |
|
||||||
|
| `certifications` | array | Professional certifications |
|
||||||
|
| ↳ `name` | string | Certification name |
|
||||||
|
| ↳ `authority` | string | Issuing authority |
|
||||||
|
| ↳ `url` | string | Certification URL |
|
||||||
|
| `skills` | array | List of skills |
|
||||||
|
| `languages` | array | List of languages |
|
||||||
|
| `locale` | string | Profile locale \(e.g., en_US\) |
|
||||||
|
| `version` | number | Profile version number |
|
||||||
|
|
||||||
|
### `enrich_email_to_person_lite`
|
||||||
|
|
||||||
|
Retrieve basic LinkedIn profile information from an email address. A lighter version with essential data only.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to look up \(e.g., john.doe@company.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `name` | string | Full name |
|
||||||
|
| `firstName` | string | First name |
|
||||||
|
| `lastName` | string | Last name |
|
||||||
|
| `email` | string | Email address |
|
||||||
|
| `title` | string | Job title |
|
||||||
|
| `location` | string | Location |
|
||||||
|
| `company` | string | Current company |
|
||||||
|
| `companyLocation` | string | Company location |
|
||||||
|
| `companyLinkedIn` | string | Company LinkedIn URL |
|
||||||
|
| `profileId` | string | LinkedIn profile ID |
|
||||||
|
| `schoolName` | string | School name |
|
||||||
|
| `schoolUrl` | string | School URL |
|
||||||
|
| `linkedInUrl` | string | LinkedIn profile URL |
|
||||||
|
| `photoUrl` | string | Profile photo URL |
|
||||||
|
| `followerCount` | number | Number of followers |
|
||||||
|
| `connectionCount` | number | Number of connections |
|
||||||
|
| `languages` | array | Languages spoken |
|
||||||
|
| `projects` | array | Projects |
|
||||||
|
| `certifications` | array | Certifications |
|
||||||
|
| `volunteerExperience` | array | Volunteer experience |
|
||||||
|
|
||||||
|
### `enrich_linkedin_profile`
|
||||||
|
|
||||||
|
Enrich a LinkedIn profile URL with detailed information including positions, education, and social metrics.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `url` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/williamhgates\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `profileId` | string | LinkedIn profile ID |
|
||||||
|
| `firstName` | string | First name |
|
||||||
|
| `lastName` | string | Last name |
|
||||||
|
| `subTitle` | string | Profile subtitle/headline |
|
||||||
|
| `profilePicture` | string | Profile picture URL |
|
||||||
|
| `backgroundImage` | string | Background image URL |
|
||||||
|
| `industry` | string | Industry |
|
||||||
|
| `location` | string | Location |
|
||||||
|
| `followersCount` | number | Number of followers |
|
||||||
|
| `connectionsCount` | number | Number of connections |
|
||||||
|
| `premium` | boolean | Whether the account is premium |
|
||||||
|
| `influencer` | boolean | Whether the account is an influencer |
|
||||||
|
| `positions` | array | Work positions |
|
||||||
|
| ↳ `title` | string | Job title |
|
||||||
|
| ↳ `company` | string | Company name |
|
||||||
|
| ↳ `companyLogo` | string | Company logo URL |
|
||||||
|
| ↳ `startDate` | string | Start date |
|
||||||
|
| ↳ `endDate` | string | End date |
|
||||||
|
| ↳ `location` | string | Location |
|
||||||
|
| `education` | array | Education history |
|
||||||
|
| ↳ `school` | string | School name |
|
||||||
|
| ↳ `degree` | string | Degree |
|
||||||
|
| ↳ `fieldOfStudy` | string | Field of study |
|
||||||
|
| ↳ `startDate` | string | Start date |
|
||||||
|
| ↳ `endDate` | string | End date |
|
||||||
|
| `websites` | array | Personal websites |
|
||||||
|
|
||||||
|
### `enrich_find_email`
|
||||||
|
|
||||||
|
Find a person
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `fullName` | string | Yes | Person's full name \(e.g., John Doe\) |
|
||||||
|
| `companyDomain` | string | Yes | Company domain \(e.g., example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Found email address |
|
||||||
|
| `firstName` | string | First name |
|
||||||
|
| `lastName` | string | Last name |
|
||||||
|
| `domain` | string | Company domain |
|
||||||
|
| `found` | boolean | Whether an email was found |
|
||||||
|
| `acceptAll` | boolean | Whether the domain accepts all emails |
|
||||||
|
|
||||||
|
### `enrich_linkedin_to_work_email`
|
||||||
|
|
||||||
|
Find a work email address from a LinkedIn profile URL.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., https://www.linkedin.com/in/williamhgates\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Found work email address |
|
||||||
|
| `found` | boolean | Whether an email was found |
|
||||||
|
| `status` | string | Request status \(in_progress or completed\) |
|
||||||
|
|
||||||
|
### `enrich_linkedin_to_personal_email`
|
||||||
|
|
||||||
|
Find personal email address from a LinkedIn profile URL.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/username\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Personal email address |
|
||||||
|
| `found` | boolean | Whether an email was found |
|
||||||
|
| `status` | string | Request status |
|
||||||
|
|
||||||
|
### `enrich_phone_finder`
|
||||||
|
|
||||||
|
Find a phone number from a LinkedIn profile URL.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/williamhgates\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `profileUrl` | string | LinkedIn profile URL |
|
||||||
|
| `mobileNumber` | string | Found mobile phone number |
|
||||||
|
| `found` | boolean | Whether a phone number was found |
|
||||||
|
| `status` | string | Request status \(in_progress or completed\) |
|
||||||
|
|
||||||
|
### `enrich_email_to_phone`
|
||||||
|
|
||||||
|
Find a phone number associated with an email address.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to look up \(e.g., john.doe@example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Email address looked up |
|
||||||
|
| `mobileNumber` | string | Found mobile phone number |
|
||||||
|
| `found` | boolean | Whether a phone number was found |
|
||||||
|
| `status` | string | Request status \(in_progress or completed\) |
|
||||||
|
|
||||||
|
### `enrich_verify_email`
|
||||||
|
|
||||||
|
Verify an email address for deliverability, including catch-all detection and provider identification.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to verify \(e.g., john.doe@example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Email address verified |
|
||||||
|
| `status` | string | Verification status |
|
||||||
|
| `result` | string | Deliverability result \(deliverable, undeliverable, etc.\) |
|
||||||
|
| `confidenceScore` | number | Confidence score \(0-100\) |
|
||||||
|
| `smtpProvider` | string | Email service provider \(e.g., Google, Microsoft\) |
|
||||||
|
| `mailDisposable` | boolean | Whether the email is from a disposable provider |
|
||||||
|
| `mailAcceptAll` | boolean | Whether the domain is a catch-all domain |
|
||||||
|
| `free` | boolean | Whether the email uses a free email service |
|
||||||
|
|
||||||
|
### `enrich_disposable_email_check`
|
||||||
|
|
||||||
|
Check if an email address is from a disposable or temporary email provider. Returns a score and validation details.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to check \(e.g., john.doe@example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Email address checked |
|
||||||
|
| `score` | number | Validation score \(0-100\) |
|
||||||
|
| `testsPassed` | string | Number of tests passed \(e.g., "3/3"\) |
|
||||||
|
| `passed` | boolean | Whether the email passed all validation tests |
|
||||||
|
| `reason` | string | Reason for failure if email did not pass |
|
||||||
|
| `mailServerIp` | string | Mail server IP address |
|
||||||
|
| `mxRecords` | array | MX records for the domain |
|
||||||
|
| ↳ `host` | string | MX record host |
|
||||||
|
| ↳ `pref` | number | MX record preference |
|
||||||
|
|
||||||
|
### `enrich_email_to_ip`
|
||||||
|
|
||||||
|
Discover an IP address associated with an email address.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `email` | string | Yes | Email address to look up \(e.g., john.doe@example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `email` | string | Email address looked up |
|
||||||
|
| `ip` | string | Associated IP address |
|
||||||
|
| `found` | boolean | Whether an IP address was found |
|
||||||
|
|
||||||
|
### `enrich_ip_to_company`
|
||||||
|
|
||||||
|
Identify a company from an IP address with detailed firmographic information.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `ip` | string | Yes | IP address to look up \(e.g., 86.92.60.221\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `name` | string | Company name |
|
||||||
|
| `legalName` | string | Legal company name |
|
||||||
|
| `domain` | string | Primary domain |
|
||||||
|
| `domainAliases` | array | Domain aliases |
|
||||||
|
| `sector` | string | Business sector |
|
||||||
|
| `industry` | string | Industry |
|
||||||
|
| `phone` | string | Phone number |
|
||||||
|
| `employees` | number | Number of employees |
|
||||||
|
| `revenue` | string | Estimated revenue |
|
||||||
|
| `location` | json | Company location |
|
||||||
|
| ↳ `city` | string | City |
|
||||||
|
| ↳ `state` | string | State |
|
||||||
|
| ↳ `country` | string | Country |
|
||||||
|
| ↳ `timezone` | string | Timezone |
|
||||||
|
| `linkedInUrl` | string | LinkedIn company URL |
|
||||||
|
| `twitterUrl` | string | Twitter URL |
|
||||||
|
| `facebookUrl` | string | Facebook URL |
|
||||||
|
|
||||||
|
### `enrich_company_lookup`
|
||||||
|
|
||||||
|
Look up comprehensive company information by name or domain including funding, location, and social profiles.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `name` | string | No | Company name \(e.g., Google\) |
|
||||||
|
| `domain` | string | No | Company domain \(e.g., google.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `name` | string | Company name |
|
||||||
|
| `universalName` | string | Universal company name |
|
||||||
|
| `companyId` | string | Company ID |
|
||||||
|
| `description` | string | Company description |
|
||||||
|
| `phone` | string | Phone number |
|
||||||
|
| `linkedInUrl` | string | LinkedIn company URL |
|
||||||
|
| `websiteUrl` | string | Company website |
|
||||||
|
| `followers` | number | Number of LinkedIn followers |
|
||||||
|
| `staffCount` | number | Number of employees |
|
||||||
|
| `foundedDate` | string | Date founded |
|
||||||
|
| `type` | string | Company type |
|
||||||
|
| `industries` | array | Industries |
|
||||||
|
| `specialties` | array | Company specialties |
|
||||||
|
| `headquarters` | json | Headquarters location |
|
||||||
|
| ↳ `city` | string | City |
|
||||||
|
| ↳ `country` | string | Country |
|
||||||
|
| ↳ `postalCode` | string | Postal code |
|
||||||
|
| ↳ `line1` | string | Address line 1 |
|
||||||
|
| `logo` | string | Company logo URL |
|
||||||
|
| `coverImage` | string | Cover image URL |
|
||||||
|
| `fundingRounds` | array | Funding history |
|
||||||
|
| ↳ `roundType` | string | Funding round type |
|
||||||
|
| ↳ `amount` | number | Amount raised |
|
||||||
|
| ↳ `currency` | string | Currency |
|
||||||
|
| ↳ `investors` | array | Investors |
|
||||||
|
|
||||||
|
### `enrich_company_funding`
|
||||||
|
|
||||||
|
Retrieve company funding history, traffic metrics, and executive information by domain.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `domain` | string | Yes | Company domain \(e.g., example.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `legalName` | string | Legal company name |
|
||||||
|
| `employeeCount` | number | Number of employees |
|
||||||
|
| `headquarters` | string | Headquarters location |
|
||||||
|
| `industry` | string | Industry |
|
||||||
|
| `totalFundingRaised` | number | Total funding raised |
|
||||||
|
| `fundingRounds` | array | Funding rounds |
|
||||||
|
| ↳ `roundType` | string | Round type |
|
||||||
|
| ↳ `amount` | number | Amount raised |
|
||||||
|
| ↳ `date` | string | Date |
|
||||||
|
| ↳ `investors` | array | Investors |
|
||||||
|
| `monthlyVisits` | number | Monthly website visits |
|
||||||
|
| `trafficChange` | number | Traffic change percentage |
|
||||||
|
| `itSpending` | number | Estimated IT spending in USD |
|
||||||
|
| `executives` | array | Executive team |
|
||||||
|
| ↳ `name` | string | Name |
|
||||||
|
| ↳ `title` | string | Title |
|
||||||
|
|
||||||
|
### `enrich_company_revenue`
|
||||||
|
|
||||||
|
Retrieve company revenue data, CEO information, and competitive analysis by domain.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `domain` | string | Yes | Company domain \(e.g., clay.io\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `companyName` | string | Company name |
|
||||||
|
| `shortDescription` | string | Short company description |
|
||||||
|
| `fullSummary` | string | Full company summary |
|
||||||
|
| `revenue` | string | Company revenue |
|
||||||
|
| `revenueMin` | number | Minimum revenue estimate |
|
||||||
|
| `revenueMax` | number | Maximum revenue estimate |
|
||||||
|
| `employeeCount` | number | Number of employees |
|
||||||
|
| `founded` | string | Year founded |
|
||||||
|
| `ownership` | string | Ownership type |
|
||||||
|
| `status` | string | Company status \(e.g., Active\) |
|
||||||
|
| `website` | string | Company website URL |
|
||||||
|
| `ceo` | json | CEO information |
|
||||||
|
| ↳ `name` | string | CEO name |
|
||||||
|
| ↳ `designation` | string | CEO designation/title |
|
||||||
|
| ↳ `rating` | number | CEO rating |
|
||||||
|
| `socialLinks` | json | Social media links |
|
||||||
|
| ↳ `linkedIn` | string | LinkedIn URL |
|
||||||
|
| ↳ `twitter` | string | Twitter URL |
|
||||||
|
| ↳ `facebook` | string | Facebook URL |
|
||||||
|
| `totalFunding` | string | Total funding raised |
|
||||||
|
| `fundingRounds` | number | Number of funding rounds |
|
||||||
|
| `competitors` | array | Competitors |
|
||||||
|
| ↳ `name` | string | Competitor name |
|
||||||
|
| ↳ `revenue` | string | Revenue |
|
||||||
|
| ↳ `employeeCount` | number | Employee count |
|
||||||
|
| ↳ `headquarters` | string | Headquarters |
|
||||||
|
|
||||||
|
### `enrich_search_people`
|
||||||
|
|
||||||
|
Search for professionals by various criteria including name, title, skills, education, and company.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `firstName` | string | No | First name |
|
||||||
|
| `lastName` | string | No | Last name |
|
||||||
|
| `summary` | string | No | Professional summary keywords |
|
||||||
|
| `subTitle` | string | No | Job title/subtitle |
|
||||||
|
| `locationCountry` | string | No | Country |
|
||||||
|
| `locationCity` | string | No | City |
|
||||||
|
| `locationState` | string | No | State/province |
|
||||||
|
| `influencer` | boolean | No | Filter for influencers only |
|
||||||
|
| `premium` | boolean | No | Filter for premium accounts only |
|
||||||
|
| `language` | string | No | Primary language |
|
||||||
|
| `industry` | string | No | Industry |
|
||||||
|
| `currentJobTitles` | json | No | Current job titles \(array\) |
|
||||||
|
| `pastJobTitles` | json | No | Past job titles \(array\) |
|
||||||
|
| `skills` | json | No | Skills to search for \(array\) |
|
||||||
|
| `schoolNames` | json | No | School names \(array\) |
|
||||||
|
| `certifications` | json | No | Certifications to filter by \(array\) |
|
||||||
|
| `degreeNames` | json | No | Degree names to filter by \(array\) |
|
||||||
|
| `studyFields` | json | No | Fields of study to filter by \(array\) |
|
||||||
|
| `currentCompanies` | json | No | Current company IDs to filter by \(array of numbers\) |
|
||||||
|
| `pastCompanies` | json | No | Past company IDs to filter by \(array of numbers\) |
|
||||||
|
| `currentPage` | number | No | Page number \(default: 1\) |
|
||||||
|
| `pageSize` | number | No | Results per page \(default: 20\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `currentPage` | number | Current page number |
|
||||||
|
| `totalPage` | number | Total number of pages |
|
||||||
|
| `pageSize` | number | Results per page |
|
||||||
|
| `profiles` | array | Search results |
|
||||||
|
| ↳ `profileIdentifier` | string | Profile ID |
|
||||||
|
| ↳ `givenName` | string | First name |
|
||||||
|
| ↳ `familyName` | string | Last name |
|
||||||
|
| ↳ `currentPosition` | string | Current job title |
|
||||||
|
| ↳ `profileImage` | string | Profile image URL |
|
||||||
|
| ↳ `externalProfileUrl` | string | LinkedIn URL |
|
||||||
|
| ↳ `city` | string | City |
|
||||||
|
| ↳ `country` | string | Country |
|
||||||
|
| ↳ `expertSkills` | array | Skills |
|
||||||
|
|
||||||
|
### `enrich_search_company`
|
||||||
|
|
||||||
|
Search for companies by various criteria including name, industry, location, and size.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `name` | string | No | Company name |
|
||||||
|
| `website` | string | No | Company website URL |
|
||||||
|
| `tagline` | string | No | Company tagline |
|
||||||
|
| `type` | string | No | Company type \(e.g., Private, Public\) |
|
||||||
|
| `description` | string | No | Company description keywords |
|
||||||
|
| `industries` | json | No | Industries to filter by \(array\) |
|
||||||
|
| `locationCountry` | string | No | Country |
|
||||||
|
| `locationCity` | string | No | City |
|
||||||
|
| `postalCode` | string | No | Postal code |
|
||||||
|
| `locationCountryList` | json | No | Multiple countries to filter by \(array\) |
|
||||||
|
| `locationCityList` | json | No | Multiple cities to filter by \(array\) |
|
||||||
|
| `specialities` | json | No | Company specialties \(array\) |
|
||||||
|
| `followers` | number | No | Minimum number of followers |
|
||||||
|
| `staffCount` | number | No | Maximum staff count |
|
||||||
|
| `staffCountMin` | number | No | Minimum staff count |
|
||||||
|
| `staffCountMax` | number | No | Maximum staff count |
|
||||||
|
| `currentPage` | number | No | Page number \(default: 1\) |
|
||||||
|
| `pageSize` | number | No | Results per page \(default: 20\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `currentPage` | number | Current page number |
|
||||||
|
| `totalPage` | number | Total number of pages |
|
||||||
|
| `pageSize` | number | Results per page |
|
||||||
|
| `companies` | array | Search results |
|
||||||
|
| ↳ `companyName` | string | Company name |
|
||||||
|
| ↳ `tagline` | string | Company tagline |
|
||||||
|
| ↳ `webAddress` | string | Website URL |
|
||||||
|
| ↳ `industries` | array | Industries |
|
||||||
|
| ↳ `teamSize` | number | Team size |
|
||||||
|
| ↳ `linkedInProfile` | string | LinkedIn URL |
|
||||||
|
|
||||||
|
### `enrich_search_company_employees`
|
||||||
|
|
||||||
|
Search for employees within specific companies by location and job title.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `companyIds` | json | No | Array of company IDs to search within |
|
||||||
|
| `country` | string | No | Country filter \(e.g., United States\) |
|
||||||
|
| `city` | string | No | City filter \(e.g., San Francisco\) |
|
||||||
|
| `state` | string | No | State filter \(e.g., California\) |
|
||||||
|
| `jobTitles` | json | No | Job titles to filter by \(array\) |
|
||||||
|
| `page` | number | No | Page number \(default: 1\) |
|
||||||
|
| `pageSize` | number | No | Results per page \(default: 10\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `currentPage` | number | Current page number |
|
||||||
|
| `totalPage` | number | Total number of pages |
|
||||||
|
| `pageSize` | number | Number of results per page |
|
||||||
|
| `profiles` | array | Employee profiles |
|
||||||
|
| ↳ `profileIdentifier` | string | Profile ID |
|
||||||
|
| ↳ `givenName` | string | First name |
|
||||||
|
| ↳ `familyName` | string | Last name |
|
||||||
|
| ↳ `currentPosition` | string | Current job title |
|
||||||
|
| ↳ `profileImage` | string | Profile image URL |
|
||||||
|
| ↳ `externalProfileUrl` | string | LinkedIn URL |
|
||||||
|
| ↳ `city` | string | City |
|
||||||
|
| ↳ `country` | string | Country |
|
||||||
|
| ↳ `expertSkills` | array | Skills |
|
||||||
|
|
||||||
|
### `enrich_search_similar_companies`
|
||||||
|
|
||||||
|
Find companies similar to a given company by LinkedIn URL with filters for location and size.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `url` | string | Yes | LinkedIn company URL \(e.g., linkedin.com/company/google\) |
|
||||||
|
| `accountLocation` | json | No | Filter by locations \(array of country names\) |
|
||||||
|
| `employeeSizeType` | string | No | Employee size filter type \(e.g., RANGE\) |
|
||||||
|
| `employeeSizeRange` | json | No | Employee size ranges \(array of \{start, end\} objects\) |
|
||||||
|
| `page` | number | No | Page number \(default: 1\) |
|
||||||
|
| `num` | number | No | Number of results per page |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `companies` | array | Similar companies |
|
||||||
|
| ↳ `url` | string | LinkedIn URL |
|
||||||
|
| ↳ `name` | string | Company name |
|
||||||
|
| ↳ `universalName` | string | Universal name |
|
||||||
|
| ↳ `type` | string | Company type |
|
||||||
|
| ↳ `description` | string | Description |
|
||||||
|
| ↳ `phone` | string | Phone number |
|
||||||
|
| ↳ `website` | string | Website URL |
|
||||||
|
| ↳ `logo` | string | Logo URL |
|
||||||
|
| ↳ `foundedYear` | number | Year founded |
|
||||||
|
| ↳ `staffTotal` | number | Total staff |
|
||||||
|
| ↳ `industries` | array | Industries |
|
||||||
|
| ↳ `relevancyScore` | number | Relevancy score |
|
||||||
|
| ↳ `relevancyValue` | string | Relevancy value |
|
||||||
|
|
||||||
|
### `enrich_sales_pointer_people`
|
||||||
|
|
||||||
|
Advanced people search with complex filters for location, company size, seniority, experience, and more.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `page` | number | Yes | Page number \(starts at 1\) |
|
||||||
|
| `filters` | json | Yes | Array of filter objects. Each filter has type \(e.g., POSTAL_CODE, COMPANY_HEADCOUNT\), values \(array with id, text, selectionType: INCLUDED/EXCLUDED\), and optional selectedSubFilter |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `data` | array | People results |
|
||||||
|
| ↳ `name` | string | Full name |
|
||||||
|
| ↳ `summary` | string | Professional summary |
|
||||||
|
| ↳ `location` | string | Location |
|
||||||
|
| ↳ `profilePicture` | string | Profile picture URL |
|
||||||
|
| ↳ `linkedInUrn` | string | LinkedIn URN |
|
||||||
|
| ↳ `positions` | array | Work positions |
|
||||||
|
| ↳ `education` | array | Education |
|
||||||
|
| `pagination` | json | Pagination info |
|
||||||
|
| ↳ `totalCount` | number | Total results |
|
||||||
|
| ↳ `returnedCount` | number | Returned count |
|
||||||
|
| ↳ `start` | number | Start position |
|
||||||
|
| ↳ `limit` | number | Limit |
|
||||||
|
|
||||||
|
### `enrich_search_posts`
|
||||||
|
|
||||||
|
Search LinkedIn posts by keywords with date filtering.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `keywords` | string | Yes | Search keywords \(e.g., "AI automation"\) |
|
||||||
|
| `datePosted` | string | No | Time filter \(e.g., past_week, past_month\) |
|
||||||
|
| `page` | number | No | Page number \(default: 1\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `count` | number | Total number of results |
|
||||||
|
| `posts` | array | Search results |
|
||||||
|
| ↳ `url` | string | Post URL |
|
||||||
|
| ↳ `postId` | string | Post ID |
|
||||||
|
| ↳ `author` | object | Author information |
|
||||||
|
| ↳ `name` | string | Author name |
|
||||||
|
| ↳ `headline` | string | Author headline |
|
||||||
|
| ↳ `linkedInUrl` | string | Author LinkedIn URL |
|
||||||
|
| ↳ `profileImage` | string | Author profile image |
|
||||||
|
| ↳ `timestamp` | string | Post timestamp |
|
||||||
|
| ↳ `textContent` | string | Post text content |
|
||||||
|
| ↳ `hashtags` | array | Hashtags |
|
||||||
|
| ↳ `mediaUrls` | array | Media URLs |
|
||||||
|
| ↳ `reactions` | number | Number of reactions |
|
||||||
|
| ↳ `commentsCount` | number | Number of comments |
|
||||||
|
|
||||||
|
### `enrich_get_post_details`
|
||||||
|
|
||||||
|
Get detailed information about a LinkedIn post by URL.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `url` | string | Yes | LinkedIn post URL |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `postId` | string | Post ID |
|
||||||
|
| `author` | json | Author information |
|
||||||
|
| ↳ `name` | string | Author name |
|
||||||
|
| ↳ `headline` | string | Author headline |
|
||||||
|
| ↳ `linkedInUrl` | string | Author LinkedIn URL |
|
||||||
|
| ↳ `profileImage` | string | Author profile image |
|
||||||
|
| `timestamp` | string | Post timestamp |
|
||||||
|
| `textContent` | string | Post text content |
|
||||||
|
| `hashtags` | array | Hashtags |
|
||||||
|
| `mediaUrls` | array | Media URLs |
|
||||||
|
| `reactions` | number | Number of reactions |
|
||||||
|
| `commentsCount` | number | Number of comments |
|
||||||
|
|
||||||
|
### `enrich_search_post_reactions`
|
||||||
|
|
||||||
|
Get reactions on a LinkedIn post with filtering by reaction type.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `postUrn` | string | Yes | LinkedIn activity URN \(e.g., urn:li:activity:7231931952839196672\) |
|
||||||
|
| `reactionType` | string | Yes | Reaction type filter: all, like, love, celebrate, insightful, or funny \(default: all\) |
|
||||||
|
| `page` | number | Yes | Page number \(starts at 1\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `page` | number | Current page number |
|
||||||
|
| `totalPage` | number | Total number of pages |
|
||||||
|
| `count` | number | Number of reactions returned |
|
||||||
|
| `reactions` | array | Reactions |
|
||||||
|
| ↳ `reactionType` | string | Type of reaction |
|
||||||
|
| ↳ `reactor` | object | Person who reacted |
|
||||||
|
| ↳ `name` | string | Name |
|
||||||
|
| ↳ `subTitle` | string | Job title |
|
||||||
|
| ↳ `profileId` | string | Profile ID |
|
||||||
|
| ↳ `profilePicture` | string | Profile picture URL |
|
||||||
|
| ↳ `linkedInUrl` | string | LinkedIn URL |
|
||||||
|
|
||||||
|
### `enrich_search_post_comments`
|
||||||
|
|
||||||
|
Get comments on a LinkedIn post.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `postUrn` | string | Yes | LinkedIn activity URN \(e.g., urn:li:activity:7191163324208705536\) |
|
||||||
|
| `page` | number | No | Page number \(starts at 1, default: 1\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `page` | number | Current page number |
|
||||||
|
| `totalPage` | number | Total number of pages |
|
||||||
|
| `count` | number | Number of comments returned |
|
||||||
|
| `comments` | array | Comments |
|
||||||
|
| ↳ `activityId` | string | Comment activity ID |
|
||||||
|
| ↳ `commentary` | string | Comment text |
|
||||||
|
| ↳ `linkedInUrl` | string | Link to comment |
|
||||||
|
| ↳ `commenter` | object | Commenter info |
|
||||||
|
| ↳ `profileId` | string | Profile ID |
|
||||||
|
| ↳ `firstName` | string | First name |
|
||||||
|
| ↳ `lastName` | string | Last name |
|
||||||
|
| ↳ `subTitle` | string | Subtitle/headline |
|
||||||
|
| ↳ `profilePicture` | string | Profile picture URL |
|
||||||
|
| ↳ `backgroundImage` | string | Background image URL |
|
||||||
|
| ↳ `entityUrn` | string | Entity URN |
|
||||||
|
| ↳ `objectUrn` | string | Object URN |
|
||||||
|
| ↳ `profileType` | string | Profile type |
|
||||||
|
| ↳ `reactionBreakdown` | object | Reactions on the comment |
|
||||||
|
| ↳ `likes` | number | Number of likes |
|
||||||
|
| ↳ `empathy` | number | Number of empathy reactions |
|
||||||
|
| ↳ `other` | number | Number of other reactions |
|
||||||
|
|
||||||
|
### `enrich_search_people_activities`
|
||||||
|
|
||||||
|
Get a person
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `profileId` | string | Yes | LinkedIn profile ID |
|
||||||
|
| `activityType` | string | Yes | Activity type: posts, comments, or articles |
|
||||||
|
| `paginationToken` | string | No | Pagination token for next page of results |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `paginationToken` | string | Token for fetching next page |
|
||||||
|
| `activityType` | string | Type of activities returned |
|
||||||
|
| `activities` | array | Activities |
|
||||||
|
| ↳ `activityId` | string | Activity ID |
|
||||||
|
| ↳ `commentary` | string | Activity text content |
|
||||||
|
| ↳ `linkedInUrl` | string | Link to activity |
|
||||||
|
| ↳ `timeElapsed` | string | Time elapsed since activity |
|
||||||
|
| ↳ `numReactions` | number | Total number of reactions |
|
||||||
|
| ↳ `author` | object | Activity author info |
|
||||||
|
| ↳ `name` | string | Author name |
|
||||||
|
| ↳ `profileId` | string | Profile ID |
|
||||||
|
| ↳ `profilePicture` | string | Profile picture URL |
|
||||||
|
| ↳ `reactionBreakdown` | object | Reactions |
|
||||||
|
| ↳ `likes` | number | Likes |
|
||||||
|
| ↳ `empathy` | number | Empathy reactions |
|
||||||
|
| ↳ `other` | number | Other reactions |
|
||||||
|
| ↳ `attachments` | array | Attachment URLs |
|
||||||
|
|
||||||
|
### `enrich_search_company_activities`
|
||||||
|
|
||||||
|
Get a company
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `companyId` | string | Yes | LinkedIn company ID |
|
||||||
|
| `activityType` | string | Yes | Activity type: posts, comments, or articles |
|
||||||
|
| `paginationToken` | string | No | Pagination token for next page of results |
|
||||||
|
| `offset` | number | No | Number of records to skip \(default: 0\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `paginationToken` | string | Token for fetching next page |
|
||||||
|
| `activityType` | string | Type of activities returned |
|
||||||
|
| `activities` | array | Activities |
|
||||||
|
| ↳ `activityId` | string | Activity ID |
|
||||||
|
| ↳ `commentary` | string | Activity text content |
|
||||||
|
| ↳ `linkedInUrl` | string | Link to activity |
|
||||||
|
| ↳ `timeElapsed` | string | Time elapsed since activity |
|
||||||
|
| ↳ `numReactions` | number | Total number of reactions |
|
||||||
|
| ↳ `author` | object | Activity author info |
|
||||||
|
| ↳ `name` | string | Author name |
|
||||||
|
| ↳ `profileId` | string | Profile ID |
|
||||||
|
| ↳ `profilePicture` | string | Profile picture URL |
|
||||||
|
| ↳ `reactionBreakdown` | object | Reactions |
|
||||||
|
| ↳ `likes` | number | Likes |
|
||||||
|
| ↳ `empathy` | number | Empathy reactions |
|
||||||
|
| ↳ `other` | number | Other reactions |
|
||||||
|
| ↳ `attachments` | array | Attachments |
|
||||||
|
|
||||||
|
### `enrich_reverse_hash_lookup`
|
||||||
|
|
||||||
|
Convert an MD5 email hash back to the original email address and display name.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `hash` | string | Yes | MD5 hash value to look up |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `hash` | string | MD5 hash that was looked up |
|
||||||
|
| `email` | string | Original email address |
|
||||||
|
| `displayName` | string | Display name associated with the email |
|
||||||
|
| `found` | boolean | Whether an email was found for the hash |
|
||||||
|
|
||||||
|
### `enrich_search_logo`
|
||||||
|
|
||||||
|
Get a company logo image URL by domain.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `apiKey` | string | Yes | Enrich API key |
|
||||||
|
| `url` | string | Yes | Company domain \(e.g., google.com\) |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `logoUrl` | string | URL to fetch the company logo |
|
||||||
|
| `domain` | string | Domain that was looked up |
|
||||||
|
|
||||||
|
|
||||||
@@ -10,6 +10,23 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
color="#181C1E"
|
color="#181C1E"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
|
[GitHub](https://github.com/) is the world’s leading platform for hosting, collaborating on, and managing source code. GitHub offers powerful tools for version control, code review, branching strategies, and team collaboration within the rich Git ecosystem, underpinning both open source and enterprise development worldwide.
|
||||||
|
|
||||||
|
The GitHub integration in Sim allows your agents to seamlessly automate, interact with, and orchestrate workflows across your repositories. Using this integration, agents can perform an extended set of code and collaboration operations, enabling:
|
||||||
|
|
||||||
|
- **Fetch pull request details:** Retrieve a full overview of any pull request, including file diffs, branch information, metadata, approvals, and a summary of changes, for automation or review workflows.
|
||||||
|
- **Create pull request comments:** Automatically generate or post comments on PRs—such as reviews, suggestions, or status updates—enabling speedy feedback, documentation, or policy enforcement.
|
||||||
|
- **Get repository information:** Access comprehensive repository metadata, including descriptions, visibility, topics, default branches, and contributors. This supports intelligent project analysis, dynamic workflow routing, and organizational reporting.
|
||||||
|
- **Fetch the latest commit:** Quickly obtain details from the newest commit on any branch, including hashes, messages, authors, and timestamps. This is useful for monitoring development velocity, triggering downstream actions, or enforcing quality checks.
|
||||||
|
- **Trigger workflows from GitHub events:** Set up Sim workflows to start automatically from key GitHub events, including pull request creation, review comments, or when new commits are pushed, through easy webhook integration. Automate actions such as deployments, notifications, compliance checks, or documentation updates in real time.
|
||||||
|
- **Monitor and manage repository activity:** Programmatically track contributions, manage PR review states, analyze branch histories, and audit code changes. Empower agents to enforce requirements, coordinate releases, and respond dynamically to development patterns.
|
||||||
|
- **Support for advanced automations:** Combine these operations—for example, fetch PR data, leave context-aware comments, and kick off multi-step Sim workflows on code pushes or PR merges—to automate your team’s engineering processes from end to end.
|
||||||
|
|
||||||
|
By leveraging all of these capabilities, the Sim GitHub integration enables agents to engage deeply in the development lifecycle. Automate code reviews, streamline team feedback, synchronize project artifacts, accelerate CI/CD, and enforce best practices with ease. Bring security, speed, and reliability to your workflows—directly within your Sim-powered automation environment, with full integration into your organization’s GitHub strategy.
|
||||||
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Integrate Github into the workflow. Can get get PR details, create PR comment, get repository info, and get latest commit. Can be used in trigger mode to trigger a workflow when a PR is created, commented on, or a commit is pushed.
|
Integrate Github into the workflow. Can get get PR details, create PR comment, get repository info, and get latest commit. Can be used in trigger mode to trigger a workflow when a PR is created, commented on, or a commit is pushed.
|
||||||
|
|||||||
@@ -11,55 +11,17 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
/>
|
/>
|
||||||
|
|
||||||
{/* MANUAL-CONTENT-START:intro */}
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
[Google Docs](https://docs.google.com) is a powerful cloud-based document creation and editing service that allows users to create, edit, and collaborate on documents in real-time. As part of Google's productivity suite, Google Docs offers a versatile platform for text documents with robust formatting, commenting, and sharing capabilities.
|
[Google Docs](https://docs.google.com) is Google’s collaborative, cloud-based document service, enabling users to create, edit, and share documents in real time. As an integral part of Google Workspace, Docs offers rich formatting tools, commenting, version history, and seamless integration with other Google productivity tools.
|
||||||
|
|
||||||
Learn how to integrate the Google Docs "Read" tool in Sim to effortlessly fetch data from your docs and to integrate into your workflows. This tutorial walks you through connecting Google Docs, setting up data reads, and using that information to automate processes in real-time. Perfect for syncing live data with your agents.
|
Google Docs empowers individuals and teams to:
|
||||||
|
|
||||||
<iframe
|
- **Create and format documents:** Develop rich text documents with advanced formatting, images, and tables.
|
||||||
width="100%"
|
- **Collaborate and comment:** Multiple users can edit and comment with suggestions instantly.
|
||||||
height="400"
|
- **Track changes and version history:** Review, revert, and manage revisions over time.
|
||||||
src="https://www.youtube.com/embed/f41gy9rBHhE"
|
- **Access from any device:** Work on documents from web, mobile, or desktop with full cloud synchronization.
|
||||||
title="Use the Google Docs Read tool in Sim"
|
- **Integrate across Google services:** Connect Docs with Drive, Sheets, Slides, and external platforms for powerful workflows.
|
||||||
frameBorder="0"
|
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
Learn how to integrate the Google Docs "Update" tool in Sim to effortlessly add content in your docs through your workflows. This tutorial walks you through connecting Google Docs, configuring data writes, and using that information to automate document updates seamlessly. Perfect for maintaining dynamic, real-time documentation with minimal effort.
|
In Sim, the Google Docs integration allows your agents to read document content, write new content, and create documents programmatically as part of automated workflows. This integration unlocks automation such as document generation, report writing, content extraction, and collaborative editing—bridging the gap between AI-driven workflows and document management in your organization.
|
||||||
|
|
||||||
<iframe
|
|
||||||
width="100%"
|
|
||||||
height="400"
|
|
||||||
src="https://www.youtube.com/embed/L64ROHS2ivA"
|
|
||||||
title="Use the Google Docs Update tool in Sim"
|
|
||||||
frameBorder="0"
|
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
Learn how to integrate the Google Docs "Create" tool in Sim to effortlessly generate new documents through your workflows. This tutorial walks you through connecting Google Docs, setting up document creation, and using workflow data to populate content automatically. Perfect for streamlining document generation and enhancing productivity.
|
|
||||||
|
|
||||||
<iframe
|
|
||||||
width="100%"
|
|
||||||
height="400"
|
|
||||||
src="https://www.youtube.com/embed/lWpHH4qddWk"
|
|
||||||
title="Use the Google Docs Create tool in Sim"
|
|
||||||
frameBorder="0"
|
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
With Google Docs, you can:
|
|
||||||
|
|
||||||
- **Create and edit documents**: Develop text documents with comprehensive formatting options
|
|
||||||
- **Collaborate in real-time**: Work simultaneously with multiple users on the same document
|
|
||||||
- **Track changes**: View revision history and restore previous versions
|
|
||||||
- **Comment and suggest**: Provide feedback and propose edits without changing the original content
|
|
||||||
- **Access anywhere**: Use Google Docs across devices with automatic cloud synchronization
|
|
||||||
- **Work offline**: Continue working without internet connection with changes syncing when back online
|
|
||||||
- **Integrate with other services**: Connect with Google Drive, Sheets, Slides, and third-party applications
|
|
||||||
|
|
||||||
In Sim, the Google Docs integration enables your agents to interact directly with document content programmatically. This allows for powerful automation scenarios such as document creation, content extraction, collaborative editing, and document management. Your agents can read existing documents to extract information, write to documents to update content, and create new documents from scratch. This integration bridges the gap between your AI workflows and document management, enabling seamless interaction with one of the world's most widely used document platforms. By connecting Sim with Google Docs, you can automate document workflows, generate reports, extract insights from documents, and maintain documentation - all through your intelligent agents.
|
|
||||||
{/* MANUAL-CONTENT-END */}
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -11,30 +11,18 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
/>
|
/>
|
||||||
|
|
||||||
{/* MANUAL-CONTENT-START:intro */}
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
[Google Drive](https://drive.google.com) is Google's cloud storage and file synchronization service that allows users to store files, synchronize files across devices, and share files with others. As a core component of Google's productivity ecosystem, Google Drive offers robust storage, organization, and collaboration capabilities.
|
[Google Drive](https://drive.google.com) is Google’s cloud-based file storage and synchronization service, making it easy to store, manage, share, and access files securely across devices and platforms. As a core element of Google Workspace, Google Drive offers robust tools for file organization, collaboration, and seamless integration with the broader productivity suite.
|
||||||
|
|
||||||
Learn how to integrate the Google Drive tool in Sim to effortlessly pull information from your Drive through your workflows. This tutorial walks you through connecting Google Drive, setting up data retrieval, and using stored documents and files to enhance automation. Perfect for syncing important data with your agents in real-time.
|
Google Drive enables individuals and teams to:
|
||||||
|
|
||||||
<iframe
|
- **Store files in the cloud:** Access documents, images, videos, and more from anywhere with internet connectivity.
|
||||||
width="100%"
|
- **Organize and manage content:** Create and arrange folders, use naming conventions, and leverage search for fast retrieval.
|
||||||
height="400"
|
- **Share and collaborate:** Control file and folder permissions, share with individuals or groups, and collaborate in real time.
|
||||||
src="https://www.youtube.com/embed/cRoRr4b-EAs"
|
- **Leverage powerful search:** Quickly locate files using Google’s search technology.
|
||||||
title="Use the Google Drive tool in Sim"
|
- **Access across devices:** Work with your files on desktop, mobile, or web with full synchronization.
|
||||||
frameBorder="0"
|
- **Integrate deeply across Google services:** Connect with Google Docs, Sheets, Slides, and partner applications in your workflows.
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
With Google Drive, you can:
|
In Sim, the Google Drive integration allows your agents to read, upload, download, list, and organize your Drive files programmatically. Agents can automate file management, streamline content workflows, and enable no-code automation around document storage and retrieval. By connecting Sim with Google Drive, you empower your agents to incorporate cloud file operations directly into intelligent business processes.
|
||||||
|
|
||||||
- **Store files in the cloud**: Upload and access your files from anywhere with internet access
|
|
||||||
- **Organize content**: Create folders, use color coding, and implement naming conventions
|
|
||||||
- **Share and collaborate**: Control access permissions and work simultaneously on files
|
|
||||||
- **Search efficiently**: Find files quickly with Google's powerful search technology
|
|
||||||
- **Access across devices**: Use Google Drive on desktop, mobile, and web platforms
|
|
||||||
- **Integrate with other services**: Connect with Google Docs, Sheets, Slides, and third-party applications
|
|
||||||
|
|
||||||
In Sim, the Google Drive integration enables your agents to interact directly with your cloud storage programmatically. This allows for powerful automation scenarios such as file management, content organization, and document workflows. Your agents can upload new files to specific folders, download existing files to process their contents, and list folder contents to navigate your storage structure. This integration bridges the gap between your AI workflows and your document management system, enabling seamless file operations without manual intervention. By connecting Sim with Google Drive, you can automate file-based workflows, manage documents intelligently, and incorporate cloud storage operations into your agent's capabilities.
|
|
||||||
{/* MANUAL-CONTENT-END */}
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -11,29 +11,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
/>
|
/>
|
||||||
|
|
||||||
{/* MANUAL-CONTENT-START:intro */}
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
[Google Search](https://www.google.com) is the world's most widely used search engine, providing access to billions of web pages and information sources. Google Search uses sophisticated algorithms to deliver relevant search results based on user queries, making it an essential tool for finding information on the internet.
|
[Google Search](https://www.google.com) is the world's most widely used web search engine, making it easy to find information, discover new content, and answer questions in real time. With advanced search algorithms, Google Search helps you quickly locate web pages, images, news, and more using simple or complex queries.
|
||||||
|
|
||||||
Learn how to integrate the Google Search tool in Sim to effortlessly fetch real-time search results through your workflows. This tutorial walks you through connecting Google Search, configuring search queries, and using live data to enhance automation. Perfect for powering your agents with up-to-date information and smarter decision-making.
|
In Sim, the Google Search integration allows your agents to search the web and retrieve live information as part of automated workflows. This enables powerful use cases such as automated research, fact-checking, knowledge synthesis, and dynamic content discovery. By connecting Sim with Google Search, your agents can perform queries, process and analyze web results, and incorporate the latest information into their decisions—without manual effort. Enhance your workflows with always up-to-date knowledge from across the internet.
|
||||||
|
|
||||||
<iframe
|
|
||||||
width="100%"
|
|
||||||
height="400"
|
|
||||||
src="https://www.youtube.com/embed/1B7hV9b5UMQ"
|
|
||||||
title="Use the Google Search tool in Sim"
|
|
||||||
frameBorder="0"
|
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
With Google Search, you can:
|
|
||||||
|
|
||||||
- **Find relevant information**: Access billions of web pages with Google's powerful search algorithms
|
|
||||||
- **Get specific results**: Use search operators to refine and target your queries
|
|
||||||
- **Discover diverse content**: Find text, images, videos, news, and other content types
|
|
||||||
- **Access knowledge graphs**: Get structured information about people, places, and things
|
|
||||||
- **Utilize search features**: Take advantage of specialized search tools like calculators, unit converters, and more
|
|
||||||
|
|
||||||
In Sim, the Google Search integration enables your agents to search the web programmatically and incorporate search results into their workflows. This allows for powerful automation scenarios such as research, fact-checking, data gathering, and information synthesis. Your agents can formulate search queries, retrieve relevant results, and extract information from those results to make decisions or generate insights. This integration bridges the gap between your AI workflows and the vast information available on the web, enabling your agents to access up-to-date information from across the internet. By connecting Sim with Google Search, you can create agents that stay informed with the latest information, verify facts, conduct research, and provide users with relevant web content - all without leaving your workflow.
|
|
||||||
{/* MANUAL-CONTENT-END */}
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,20 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
color="#F64F9E"
|
color="#F64F9E"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
|
The Memory tool enables your agents to store, retrieve, and manage conversation memories across workflows. It acts as a persistent memory store that agents can access to maintain conversation context, recall facts, or track actions over time.
|
||||||
|
|
||||||
|
With the Memory tool, you can:
|
||||||
|
|
||||||
|
- **Add new memories**: Store relevant information, events, or conversation history by saving agent or user messages into a structured memory database
|
||||||
|
- **Retrieve memories**: Fetch specific memories or all memories tied to a conversation, helping agents recall previous interactions or facts
|
||||||
|
- **Delete memories**: Remove outdated or incorrect memories from the database to maintain accurate context
|
||||||
|
- **Append to existing conversations**: Update or expand on existing memory threads by appending new messages with the same conversation identifier
|
||||||
|
|
||||||
|
Sim’s Memory block is especially useful for building agents that require persistent state—helping them remember what was said earlier in a conversation, persist facts between tasks, or apply long-term history in decision-making. By integrating Memory, you enable richer, more contextual, and more dynamic workflows for your agents.
|
||||||
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Integrate Memory into the workflow. Can add, get a memory, get all memories, and delete memories.
|
Integrate Memory into the workflow. Can add, get a memory, get all memories, and delete memories.
|
||||||
|
|||||||
@@ -24,6 +24,7 @@
|
|||||||
"dynamodb",
|
"dynamodb",
|
||||||
"elasticsearch",
|
"elasticsearch",
|
||||||
"elevenlabs",
|
"elevenlabs",
|
||||||
|
"enrich",
|
||||||
"exa",
|
"exa",
|
||||||
"file",
|
"file",
|
||||||
"firecrawl",
|
"firecrawl",
|
||||||
|
|||||||
@@ -10,6 +10,21 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
color="#181C1E"
|
color="#181C1E"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
|
The Notion tool integration enables your agents to read, create, and manage Notion pages and databases directly within your workflows. This allows you to automate the retrieval and updating of structured content, notes, documents, and more from your Notion workspace.
|
||||||
|
|
||||||
|
With the Notion tool, you can:
|
||||||
|
|
||||||
|
- **Read pages or databases**: Extract rich content or metadata from specified Notion pages or entire databases
|
||||||
|
- **Create new content**: Programmatically create new pages or databases for dynamic content generation
|
||||||
|
- **Append content**: Add new blocks or properties to existing pages and databases
|
||||||
|
- **Query databases**: Run advanced filters and searches on structured Notion data for custom workflows
|
||||||
|
- **Search your workspace**: Locate pages and databases across your Notion workspace automatically
|
||||||
|
|
||||||
|
This tool is ideal for scenarios where agents need to synchronize information, generate reports, or maintain structured notes within Notion. By bringing Notion's capabilities into automated workflows, you empower your agents to interface with knowledge, documentation, and project management data programmatically and seamlessly.
|
||||||
|
{/* MANUAL-CONTENT-END */}
|
||||||
|
|
||||||
|
|
||||||
## Usage Instructions
|
## Usage Instructions
|
||||||
|
|
||||||
Integrate with Notion into the workflow. Can read page, read database, create page, create database, append content, query database, and search workspace.
|
Integrate with Notion into the workflow. Can read page, read database, create page, create database, append content, query database, and search workspace.
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
/>
|
/>
|
||||||
|
|
||||||
{/* MANUAL-CONTENT-START:intro */}
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
The [Pulse](https://www.runpulse.com) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
|
The [Pulse](https://www.pulseapi.com/) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
|
||||||
|
|
||||||
With Pulse, you can:
|
With Pulse, you can:
|
||||||
|
|
||||||
|
|||||||
@@ -13,16 +13,6 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
{/* MANUAL-CONTENT-START:intro */}
|
{/* MANUAL-CONTENT-START:intro */}
|
||||||
[Slack](https://www.slack.com/) is a business communication platform that offers teams a unified place for messaging, tools, and files.
|
[Slack](https://www.slack.com/) is a business communication platform that offers teams a unified place for messaging, tools, and files.
|
||||||
|
|
||||||
<iframe
|
|
||||||
width="100%"
|
|
||||||
height="400"
|
|
||||||
src="https://www.youtube.com/embed/J5jz3UaWmE8"
|
|
||||||
title="Slack Integration with Sim"
|
|
||||||
frameBorder="0"
|
|
||||||
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
|
|
||||||
allowFullScreen
|
|
||||||
></iframe>
|
|
||||||
|
|
||||||
With Slack, you can:
|
With Slack, you can:
|
||||||
|
|
||||||
- **Automate agent notifications**: Send real-time updates from your Sim agents to any Slack channel
|
- **Automate agent notifications**: Send real-time updates from your Sim agents to any Slack channel
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 34 KiB |
@@ -16,7 +16,7 @@ import {
|
|||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { getBrandConfig } from '@/lib/branding/branding'
|
import { getBrandConfig } from '@/lib/branding/branding'
|
||||||
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
||||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||||
@@ -1119,7 +1119,7 @@ async function handlePushNotificationSet(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const urlValidation = validateExternalUrl(
|
const urlValidation = await validateUrlWithDNS(
|
||||||
params.pushNotificationConfig.url,
|
params.pushNotificationConfig.url,
|
||||||
'Push notification URL'
|
'Push notification URL'
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import { verifyCronAuth } from '@/lib/auth/internal'
|
|||||||
const logger = createLogger('CleanupStaleExecutions')
|
const logger = createLogger('CleanupStaleExecutions')
|
||||||
|
|
||||||
const STALE_THRESHOLD_MINUTES = 30
|
const STALE_THRESHOLD_MINUTES = 30
|
||||||
|
const MAX_INT32 = 2_147_483_647
|
||||||
|
|
||||||
export async function GET(request: NextRequest) {
|
export async function GET(request: NextRequest) {
|
||||||
try {
|
try {
|
||||||
@@ -45,13 +46,14 @@ export async function GET(request: NextRequest) {
|
|||||||
try {
|
try {
|
||||||
const staleDurationMs = Date.now() - new Date(execution.startedAt).getTime()
|
const staleDurationMs = Date.now() - new Date(execution.startedAt).getTime()
|
||||||
const staleDurationMinutes = Math.round(staleDurationMs / 60000)
|
const staleDurationMinutes = Math.round(staleDurationMs / 60000)
|
||||||
|
const totalDurationMs = Math.min(staleDurationMs, MAX_INT32)
|
||||||
|
|
||||||
await db
|
await db
|
||||||
.update(workflowExecutionLogs)
|
.update(workflowExecutionLogs)
|
||||||
.set({
|
.set({
|
||||||
status: 'failed',
|
status: 'failed',
|
||||||
endedAt: new Date(),
|
endedAt: new Date(),
|
||||||
totalDurationMs: staleDurationMs,
|
totalDurationMs,
|
||||||
executionData: sql`jsonb_set(
|
executionData: sql`jsonb_set(
|
||||||
COALESCE(execution_data, '{}'::jsonb),
|
COALESCE(execution_data, '{}'::jsonb),
|
||||||
ARRAY['error'],
|
ARRAY['error'],
|
||||||
|
|||||||
@@ -6,7 +6,11 @@ import { createLogger } from '@sim/logger'
|
|||||||
import binaryExtensionsList from 'binary-extensions'
|
import binaryExtensionsList from 'binary-extensions'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { sanitizeUrlForLog } from '@/lib/core/utils/logging'
|
||||||
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
|
||||||
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
import { isUsingCloudStorage, type StorageContext, StorageService } from '@/lib/uploads'
|
||||||
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
import { uploadExecutionFile } from '@/lib/uploads/contexts/execution'
|
||||||
@@ -19,6 +23,7 @@ import {
|
|||||||
getMimeTypeFromExtension,
|
getMimeTypeFromExtension,
|
||||||
getViewerUrl,
|
getViewerUrl,
|
||||||
inferContextFromKey,
|
inferContextFromKey,
|
||||||
|
isInternalFileUrl,
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
} from '@/lib/uploads/utils/file-utils'
|
||||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { verifyFileAccess } from '@/app/api/files/authorization'
|
||||||
@@ -215,7 +220,7 @@ async function parseFileSingle(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filePath.includes('/api/files/serve/')) {
|
if (isInternalFileUrl(filePath)) {
|
||||||
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
return handleCloudFile(filePath, fileType, undefined, userId, executionContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -246,7 +251,7 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
|
|||||||
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
|
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
|
||||||
}
|
}
|
||||||
|
|
||||||
if (filePath.startsWith('/') && !filePath.startsWith('/api/files/serve/')) {
|
if (filePath.startsWith('/') && !isInternalFileUrl(filePath)) {
|
||||||
return { isValid: false, error: 'Path outside allowed directory' }
|
return { isValid: false, error: 'Path outside allowed directory' }
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -420,7 +425,7 @@ async function handleExternalUrl(
|
|||||||
|
|
||||||
return parseResult
|
return parseResult
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Error handling external URL ${url}:`, error)
|
logger.error(`Error handling external URL ${sanitizeUrlForLog(url)}:`, error)
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error: `Error fetching URL: ${(error as Error).message}`,
|
error: `Error fetching URL: ${(error as Error).message}`,
|
||||||
|
|||||||
@@ -284,7 +284,7 @@ async function handleToolsCall(
|
|||||||
content: [
|
content: [
|
||||||
{ type: 'text', text: JSON.stringify(executeResult.output || executeResult, null, 2) },
|
{ type: 'text', text: JSON.stringify(executeResult.output || executeResult, null, 2) },
|
||||||
],
|
],
|
||||||
isError: !executeResult.success,
|
isError: executeResult.success === false,
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json(createResponse(id, result))
|
return NextResponse.json(createResponse(id, result))
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ import { z } from 'zod'
|
|||||||
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
|
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { hasAccessControlAccess } from '@/lib/billing'
|
import { hasAccessControlAccess } from '@/lib/billing'
|
||||||
|
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
|
||||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||||
@@ -501,6 +502,18 @@ export async function PUT(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (status === 'accepted') {
|
||||||
|
try {
|
||||||
|
await syncUsageLimitsFromSubscription(session.user.id)
|
||||||
|
} catch (syncError) {
|
||||||
|
logger.error('Failed to sync usage limits after joining org', {
|
||||||
|
userId: session.user.id,
|
||||||
|
organizationId,
|
||||||
|
error: syncError,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`Organization invitation ${status}`, {
|
logger.info(`Organization invitation ${status}`, {
|
||||||
organizationId,
|
organizationId,
|
||||||
invitationId,
|
invitationId,
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
import { createA2AClient, extractTextContent, isTerminalState } from '@/lib/a2a/utils'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -95,6 +96,14 @@ export async function POST(request: NextRequest) {
|
|||||||
if (validatedData.files && validatedData.files.length > 0) {
|
if (validatedData.files && validatedData.files.length > 0) {
|
||||||
for (const file of validatedData.files) {
|
for (const file of validatedData.files) {
|
||||||
if (file.type === 'url') {
|
if (file.type === 'url') {
|
||||||
|
const urlValidation = await validateUrlWithDNS(file.data, 'fileUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: urlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const filePart: FilePart = {
|
const filePart: FilePart = {
|
||||||
kind: 'file',
|
kind: 'file',
|
||||||
file: {
|
file: {
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { createA2AClient } from '@/lib/a2a/utils'
|
import { createA2AClient } from '@/lib/a2a/utils'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateExternalUrl } from '@/lib/core/security/input-validation'
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -40,7 +40,7 @@ export async function POST(request: NextRequest) {
|
|||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
const validatedData = A2ASetPushNotificationSchema.parse(body)
|
||||||
|
|
||||||
const urlValidation = validateExternalUrl(validatedData.webhookUrl, 'Webhook URL')
|
const urlValidation = await validateUrlWithDNS(validatedData.webhookUrl, 'Webhook URL')
|
||||||
if (!urlValidation.isValid) {
|
if (!urlValidation.isValid) {
|
||||||
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
|
logger.warn(`[${requestId}] Invalid webhook URL`, { error: urlValidation.error })
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
|
|||||||
@@ -92,6 +92,9 @@ export async function POST(request: NextRequest) {
|
|||||||
formData.append('comment', comment)
|
formData.append('comment', comment)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Add minorEdit field as required by Confluence API
|
||||||
|
formData.append('minorEdit', 'false')
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { z } from 'zod'
|
|||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateNumericId } from '@/lib/core/security/input-validation'
|
import { validateNumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -15,7 +16,7 @@ const DiscordSendMessageSchema = z.object({
|
|||||||
botToken: z.string().min(1, 'Bot token is required'),
|
botToken: z.string().min(1, 'Bot token is required'),
|
||||||
channelId: z.string().min(1, 'Channel ID is required'),
|
channelId: z.string().min(1, 'Channel ID is required'),
|
||||||
content: z.string().optional().nullable(),
|
content: z.string().optional().nullable(),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -101,6 +102,12 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`)
|
logger.info(`[${requestId}] Processing ${validatedData.files.length} file(s)`)
|
||||||
|
|
||||||
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||||
|
const filesOutput: Array<{
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}> = []
|
||||||
|
|
||||||
if (userFiles.length === 0) {
|
if (userFiles.length === 0) {
|
||||||
logger.warn(`[${requestId}] No valid files to upload, falling back to text-only`)
|
logger.warn(`[${requestId}] No valid files to upload, falling back to text-only`)
|
||||||
@@ -137,6 +144,12 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Downloading file ${i}: ${userFile.name}`)
|
logger.info(`[${requestId}] Downloading file ${i}: ${userFile.name}`)
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
filesOutput.push({
|
||||||
|
name: userFile.name,
|
||||||
|
mimeType: userFile.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
|
||||||
const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type })
|
const blob = new Blob([new Uint8Array(buffer)], { type: userFile.type })
|
||||||
formData.append(`files[${i}]`, blob, userFile.name)
|
formData.append(`files[${i}]`, blob, userFile.name)
|
||||||
@@ -173,6 +186,7 @@ export async function POST(request: NextRequest) {
|
|||||||
message: data.content,
|
message: data.content,
|
||||||
data: data,
|
data: data,
|
||||||
fileCount: userFiles.length,
|
fileCount: userFiles.length,
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
195
apps/sim/app/api/tools/github/latest-commit/route.ts
Normal file
195
apps/sim/app/api/tools/github/latest-commit/route.ts
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('GitHubLatestCommitAPI')
|
||||||
|
|
||||||
|
interface GitHubErrorResponse {
|
||||||
|
message?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface GitHubCommitResponse {
|
||||||
|
sha: string
|
||||||
|
html_url: string
|
||||||
|
commit: {
|
||||||
|
message: string
|
||||||
|
author: { name: string; email: string; date: string }
|
||||||
|
committer: { name: string; email: string; date: string }
|
||||||
|
}
|
||||||
|
author?: { login: string; avatar_url: string; html_url: string }
|
||||||
|
committer?: { login: string; avatar_url: string; html_url: string }
|
||||||
|
stats?: { additions: number; deletions: number; total: number }
|
||||||
|
files?: Array<{
|
||||||
|
filename: string
|
||||||
|
status: string
|
||||||
|
additions: number
|
||||||
|
deletions: number
|
||||||
|
changes: number
|
||||||
|
patch?: string
|
||||||
|
raw_url?: string
|
||||||
|
blob_url?: string
|
||||||
|
}>
|
||||||
|
}
|
||||||
|
|
||||||
|
const GitHubLatestCommitSchema = z.object({
|
||||||
|
owner: z.string().min(1, 'Owner is required'),
|
||||||
|
repo: z.string().min(1, 'Repo is required'),
|
||||||
|
branch: z.string().optional().nullable(),
|
||||||
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized GitHub latest commit attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = GitHubLatestCommitSchema.parse(body)
|
||||||
|
|
||||||
|
const { owner, repo, branch, apiKey } = validatedData
|
||||||
|
|
||||||
|
const baseUrl = `https://api.github.com/repos/${owner}/${repo}`
|
||||||
|
const commitUrl = branch ? `${baseUrl}/commits/${branch}` : `${baseUrl}/commits/HEAD`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Fetching latest commit from GitHub`, { owner, repo, branch })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(commitUrl, 'commitUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(commitUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/vnd.github.v3+json',
|
||||||
|
Authorization: `Bearer ${apiKey}`,
|
||||||
|
'X-GitHub-Api-Version': '2022-11-28',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = (await response.json().catch(() => ({}))) as GitHubErrorResponse
|
||||||
|
logger.error(`[${requestId}] GitHub API error`, {
|
||||||
|
status: response.status,
|
||||||
|
error: errorData,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorData.message || `GitHub API error: ${response.status}` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as GitHubCommitResponse
|
||||||
|
|
||||||
|
const content = `Latest commit: "${data.commit.message}" by ${data.commit.author.name} on ${data.commit.author.date}. SHA: ${data.sha}`
|
||||||
|
|
||||||
|
const files = data.files || []
|
||||||
|
const fileDetailsWithContent = []
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
const fileDetail: Record<string, any> = {
|
||||||
|
filename: file.filename,
|
||||||
|
additions: file.additions,
|
||||||
|
deletions: file.deletions,
|
||||||
|
changes: file.changes,
|
||||||
|
status: file.status,
|
||||||
|
raw_url: file.raw_url,
|
||||||
|
blob_url: file.blob_url,
|
||||||
|
patch: file.patch,
|
||||||
|
content: undefined,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.status !== 'removed' && file.raw_url) {
|
||||||
|
try {
|
||||||
|
const rawUrlValidation = await validateUrlWithDNS(file.raw_url, 'rawUrl')
|
||||||
|
if (rawUrlValidation.isValid) {
|
||||||
|
const contentResponse = await secureFetchWithPinnedIP(
|
||||||
|
file.raw_url,
|
||||||
|
rawUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${apiKey}`,
|
||||||
|
'X-GitHub-Api-Version': '2022-11-28',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (contentResponse.ok) {
|
||||||
|
fileDetail.content = await contentResponse.text()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to fetch content for ${file.filename}:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fileDetailsWithContent.push(fileDetail)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Latest commit fetched successfully`, {
|
||||||
|
sha: data.sha,
|
||||||
|
fileCount: files.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
content,
|
||||||
|
metadata: {
|
||||||
|
sha: data.sha,
|
||||||
|
html_url: data.html_url,
|
||||||
|
commit_message: data.commit.message,
|
||||||
|
author: {
|
||||||
|
name: data.commit.author.name,
|
||||||
|
login: data.author?.login || 'Unknown',
|
||||||
|
avatar_url: data.author?.avatar_url || '',
|
||||||
|
html_url: data.author?.html_url || '',
|
||||||
|
},
|
||||||
|
committer: {
|
||||||
|
name: data.commit.committer.name,
|
||||||
|
login: data.committer?.login || 'Unknown',
|
||||||
|
avatar_url: data.committer?.avatar_url || '',
|
||||||
|
html_url: data.committer?.html_url || '',
|
||||||
|
},
|
||||||
|
stats: data.stats
|
||||||
|
? {
|
||||||
|
additions: data.stats.additions,
|
||||||
|
deletions: data.stats.deletions,
|
||||||
|
total: data.stats.total,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
files: fileDetailsWithContent.length > 0 ? fileDetailsWithContent : undefined,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching GitHub latest commit:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -28,7 +29,7 @@ const GmailDraftSchema = z.object({
|
|||||||
replyToMessageId: z.string().optional().nullable(),
|
replyToMessageId: z.string().optional().nullable(),
|
||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -28,7 +29,7 @@ const GmailSendSchema = z.object({
|
|||||||
replyToMessageId: z.string().optional().nullable(),
|
replyToMessageId: z.string().optional().nullable(),
|
||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
252
apps/sim/app/api/tools/google_drive/download/route.ts
Normal file
252
apps/sim/app/api/tools/google_drive/download/route.ts
Normal file
@@ -0,0 +1,252 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import type { GoogleDriveFile, GoogleDriveRevision } from '@/tools/google_drive/types'
|
||||||
|
import {
|
||||||
|
ALL_FILE_FIELDS,
|
||||||
|
ALL_REVISION_FIELDS,
|
||||||
|
DEFAULT_EXPORT_FORMATS,
|
||||||
|
GOOGLE_WORKSPACE_MIME_TYPES,
|
||||||
|
} from '@/tools/google_drive/utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('GoogleDriveDownloadAPI')
|
||||||
|
|
||||||
|
/** Google API error response structure */
|
||||||
|
interface GoogleApiErrorResponse {
|
||||||
|
error?: {
|
||||||
|
message?: string
|
||||||
|
code?: number
|
||||||
|
status?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Google Drive revisions list response */
|
||||||
|
interface GoogleDriveRevisionsResponse {
|
||||||
|
revisions?: GoogleDriveRevision[]
|
||||||
|
nextPageToken?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const GoogleDriveDownloadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
fileId: z.string().min(1, 'File ID is required'),
|
||||||
|
mimeType: z.string().optional().nullable(),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
includeRevisions: z.boolean().optional().default(true),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Google Drive download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = GoogleDriveDownloadSchema.parse(body)
|
||||||
|
|
||||||
|
const {
|
||||||
|
accessToken,
|
||||||
|
fileId,
|
||||||
|
mimeType: exportMimeType,
|
||||||
|
fileName,
|
||||||
|
includeRevisions,
|
||||||
|
} = validatedData
|
||||||
|
const authHeader = `Bearer ${accessToken}`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting file metadata from Google Drive`, { fileId })
|
||||||
|
|
||||||
|
const metadataUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?fields=${ALL_FILE_FIELDS}&supportsAllDrives=true`
|
||||||
|
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
|
||||||
|
if (!metadataUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: metadataUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadataResponse = await secureFetchWithPinnedIP(
|
||||||
|
metadataUrl,
|
||||||
|
metadataUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: { Authorization: authHeader },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!metadataResponse.ok) {
|
||||||
|
const errorDetails = (await metadataResponse
|
||||||
|
.json()
|
||||||
|
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||||
|
logger.error(`[${requestId}] Failed to get file metadata`, {
|
||||||
|
status: metadataResponse.status,
|
||||||
|
error: errorDetails,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = (await metadataResponse.json()) as GoogleDriveFile
|
||||||
|
const fileMimeType = metadata.mimeType
|
||||||
|
|
||||||
|
let fileBuffer: Buffer
|
||||||
|
let finalMimeType = fileMimeType
|
||||||
|
|
||||||
|
if (GOOGLE_WORKSPACE_MIME_TYPES.includes(fileMimeType)) {
|
||||||
|
const exportFormat = exportMimeType || DEFAULT_EXPORT_FORMATS[fileMimeType] || 'text/plain'
|
||||||
|
finalMimeType = exportFormat
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Exporting Google Workspace file`, {
|
||||||
|
fileId,
|
||||||
|
mimeType: fileMimeType,
|
||||||
|
exportFormat,
|
||||||
|
})
|
||||||
|
|
||||||
|
const exportUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/export?mimeType=${encodeURIComponent(exportFormat)}&supportsAllDrives=true`
|
||||||
|
const exportUrlValidation = await validateUrlWithDNS(exportUrl, 'exportUrl')
|
||||||
|
if (!exportUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: exportUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const exportResponse = await secureFetchWithPinnedIP(
|
||||||
|
exportUrl,
|
||||||
|
exportUrlValidation.resolvedIP!,
|
||||||
|
{ headers: { Authorization: authHeader } }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!exportResponse.ok) {
|
||||||
|
const exportError = (await exportResponse
|
||||||
|
.json()
|
||||||
|
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||||
|
logger.error(`[${requestId}] Failed to export file`, {
|
||||||
|
status: exportResponse.status,
|
||||||
|
error: exportError,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: exportError.error?.message || 'Failed to export Google Workspace file',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await exportResponse.arrayBuffer()
|
||||||
|
fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
} else {
|
||||||
|
logger.info(`[${requestId}] Downloading regular file`, { fileId, mimeType: fileMimeType })
|
||||||
|
|
||||||
|
const downloadUrl = `https://www.googleapis.com/drive/v3/files/${fileId}?alt=media&supportsAllDrives=true`
|
||||||
|
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||||
|
if (!downloadUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
downloadUrl,
|
||||||
|
downloadUrlValidation.resolvedIP!,
|
||||||
|
{ headers: { Authorization: authHeader } }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
const downloadError = (await downloadResponse
|
||||||
|
.json()
|
||||||
|
.catch(() => ({}))) as GoogleApiErrorResponse
|
||||||
|
logger.error(`[${requestId}] Failed to download file`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
error: downloadError,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
const canReadRevisions = metadata.capabilities?.canReadRevisions === true
|
||||||
|
if (includeRevisions && canReadRevisions) {
|
||||||
|
try {
|
||||||
|
const revisionsUrl = `https://www.googleapis.com/drive/v3/files/${fileId}/revisions?fields=revisions(${ALL_REVISION_FIELDS})&pageSize=100`
|
||||||
|
const revisionsUrlValidation = await validateUrlWithDNS(revisionsUrl, 'revisionsUrl')
|
||||||
|
if (revisionsUrlValidation.isValid) {
|
||||||
|
const revisionsResponse = await secureFetchWithPinnedIP(
|
||||||
|
revisionsUrl,
|
||||||
|
revisionsUrlValidation.resolvedIP!,
|
||||||
|
{ headers: { Authorization: authHeader } }
|
||||||
|
)
|
||||||
|
|
||||||
|
if (revisionsResponse.ok) {
|
||||||
|
const revisionsData = (await revisionsResponse.json()) as GoogleDriveRevisionsResponse
|
||||||
|
metadata.revisions = revisionsData.revisions
|
||||||
|
logger.info(`[${requestId}] Fetched file revisions`, {
|
||||||
|
fileId,
|
||||||
|
revisionCount: metadata.revisions?.length || 0,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Error fetching revisions, continuing without them`, { error })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolvedName = fileName || metadata.name || 'download'
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||||
|
fileId,
|
||||||
|
name: resolvedName,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
mimeType: finalMimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const base64Data = fileBuffer.toString('base64')
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedName,
|
||||||
|
mimeType: finalMimeType,
|
||||||
|
data: base64Data,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
},
|
||||||
|
metadata,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading Google Drive file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -20,7 +21,7 @@ const GOOGLE_DRIVE_API_BASE = 'https://www.googleapis.com/upload/drive/v3/files'
|
|||||||
const GoogleDriveUploadSchema = z.object({
|
const GoogleDriveUploadSchema = z.object({
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
file: z.any().optional().nullable(),
|
file: RawFileInputSchema.optional().nullable(),
|
||||||
mimeType: z.string().optional().nullable(),
|
mimeType: z.string().optional().nullable(),
|
||||||
folderId: z.string().optional().nullable(),
|
folderId: z.string().optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -0,0 +1,131 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('GoogleVaultDownloadExportFileAPI')
|
||||||
|
|
||||||
|
const GoogleVaultDownloadExportFileSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||||
|
objectName: z.string().min(1, 'Object name is required'),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Google Vault download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = GoogleVaultDownloadExportFileSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, bucketName, objectName, fileName } = validatedData
|
||||||
|
|
||||||
|
const bucket = encodeURIComponent(bucketName)
|
||||||
|
const object = encodeURIComponent(objectName)
|
||||||
|
const downloadUrl = `https://storage.googleapis.com/storage/v1/b/${bucket}/o/${object}?alt=media`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Downloading file from Google Vault`, { bucketName, objectName })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: enhanceGoogleVaultError(urlValidation.error || 'Invalid URL') },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(downloadUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
const errorText = await downloadResponse.text().catch(() => '')
|
||||||
|
const errorMessage = `Failed to download file: ${errorText || downloadResponse.statusText}`
|
||||||
|
logger.error(`[${requestId}] Failed to download Vault export file`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
error: errorText,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: enhanceGoogleVaultError(errorMessage) },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const contentType = downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||||
|
const disposition = downloadResponse.headers.get('content-disposition') || ''
|
||||||
|
const match = disposition.match(/filename\*=UTF-8''([^;]+)|filename="([^"]+)"/)
|
||||||
|
|
||||||
|
let resolvedName = fileName
|
||||||
|
if (!resolvedName) {
|
||||||
|
if (match?.[1]) {
|
||||||
|
try {
|
||||||
|
resolvedName = decodeURIComponent(match[1])
|
||||||
|
} catch {
|
||||||
|
resolvedName = match[1]
|
||||||
|
}
|
||||||
|
} else if (match?.[2]) {
|
||||||
|
resolvedName = match[2]
|
||||||
|
} else if (objectName) {
|
||||||
|
const parts = objectName.split('/')
|
||||||
|
resolvedName = parts[parts.length - 1] || 'vault-export.bin'
|
||||||
|
} else {
|
||||||
|
resolvedName = 'vault-export.bin'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Vault export file downloaded successfully`, {
|
||||||
|
name: resolvedName,
|
||||||
|
size: buffer.length,
|
||||||
|
mimeType: contentType,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedName,
|
||||||
|
mimeType: contentType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading Google Vault export file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,10 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateImageUrl } from '@/lib/core/security/input-validation'
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
const logger = createLogger('ImageProxyAPI')
|
const logger = createLogger('ImageProxyAPI')
|
||||||
@@ -26,7 +29,7 @@ export async function GET(request: NextRequest) {
|
|||||||
return new NextResponse('Missing URL parameter', { status: 400 })
|
return new NextResponse('Missing URL parameter', { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const urlValidation = validateImageUrl(imageUrl)
|
const urlValidation = await validateUrlWithDNS(imageUrl, 'imageUrl')
|
||||||
if (!urlValidation.isValid) {
|
if (!urlValidation.isValid) {
|
||||||
logger.warn(`[${requestId}] Blocked image proxy request`, {
|
logger.warn(`[${requestId}] Blocked image proxy request`, {
|
||||||
url: imageUrl.substring(0, 100),
|
url: imageUrl.substring(0, 100),
|
||||||
@@ -38,7 +41,8 @@ export async function GET(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Proxying image request for: ${imageUrl}`)
|
logger.info(`[${requestId}] Proxying image request for: ${imageUrl}`)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const imageResponse = await fetch(imageUrl, {
|
const imageResponse = await secureFetchWithPinnedIP(imageUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
'User-Agent':
|
'User-Agent':
|
||||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
|
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
|
||||||
@@ -64,14 +68,14 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
const contentType = imageResponse.headers.get('content-type') || 'image/jpeg'
|
const contentType = imageResponse.headers.get('content-type') || 'image/jpeg'
|
||||||
|
|
||||||
const imageBlob = await imageResponse.blob()
|
const imageArrayBuffer = await imageResponse.arrayBuffer()
|
||||||
|
|
||||||
if (imageBlob.size === 0) {
|
if (imageArrayBuffer.byteLength === 0) {
|
||||||
logger.error(`[${requestId}] Empty image blob received`)
|
logger.error(`[${requestId}] Empty image received`)
|
||||||
return new NextResponse('Empty image received', { status: 404 })
|
return new NextResponse('Empty image received', { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
return new NextResponse(imageBlob, {
|
return new NextResponse(imageArrayBuffer, {
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': contentType,
|
'Content-Type': contentType,
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
|
|||||||
121
apps/sim/app/api/tools/jira/add-attachment/route.ts
Normal file
121
apps/sim/app/api/tools/jira/add-attachment/route.ts
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||||
|
|
||||||
|
const logger = createLogger('JiraAddAttachmentAPI')
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const JiraAddAttachmentSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
domain: z.string().min(1, 'Domain is required'),
|
||||||
|
issueKey: z.string().min(1, 'Issue key is required'),
|
||||||
|
files: RawFileInputArraySchema,
|
||||||
|
cloudId: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = `jira-attach-${Date.now()}`
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!authResult.success) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: authResult.error || 'Unauthorized' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = JiraAddAttachmentSchema.parse(body)
|
||||||
|
|
||||||
|
const userFiles = processFilesToUserFiles(validatedData.files, requestId, logger)
|
||||||
|
if (userFiles.length === 0) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'No valid files provided for upload' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const cloudId =
|
||||||
|
validatedData.cloudId ||
|
||||||
|
(await getJiraCloudId(validatedData.domain, validatedData.accessToken))
|
||||||
|
|
||||||
|
const formData = new FormData()
|
||||||
|
const filesOutput: Array<{ name: string; mimeType: string; data: string; size: number }> = []
|
||||||
|
|
||||||
|
for (const file of userFiles) {
|
||||||
|
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
||||||
|
filesOutput.push({
|
||||||
|
name: file.name,
|
||||||
|
mimeType: file.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
const blob = new Blob([new Uint8Array(buffer)], {
|
||||||
|
type: file.type || 'application/octet-stream',
|
||||||
|
})
|
||||||
|
formData.append('file', blob, file.name)
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${validatedData.issueKey}/attachments`
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'X-Atlassian-Token': 'no-check',
|
||||||
|
},
|
||||||
|
body: formData,
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorText = await response.text()
|
||||||
|
logger.error(`[${requestId}] Jira attachment upload failed`, {
|
||||||
|
status: response.status,
|
||||||
|
statusText: response.statusText,
|
||||||
|
error: errorText,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: `Failed to upload attachments: ${response.statusText}`,
|
||||||
|
},
|
||||||
|
{ status: response.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const attachments = await response.json()
|
||||||
|
const attachmentIds = Array.isArray(attachments)
|
||||||
|
? attachments.map((attachment) => attachment.id).filter(Boolean)
|
||||||
|
: []
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
ts: new Date().toISOString(),
|
||||||
|
issueKey: validatedData.issueKey,
|
||||||
|
attachmentIds,
|
||||||
|
files: filesOutput,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
if (error instanceof z.ZodError) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid request data', details: error.errors },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error(`[${requestId}] Jira attachment upload error`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: error instanceof Error ? error.message : 'Internal server error' },
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,9 +2,11 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { uploadFilesForTeamsMessage } from '@/tools/microsoft_teams/server-utils'
|
||||||
|
import type { GraphApiErrorResponse, GraphChatMessage } from '@/tools/microsoft_teams/types'
|
||||||
import { resolveMentionsForChannel, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
import { resolveMentionsForChannel, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -16,7 +18,7 @@ const TeamsWriteChannelSchema = z.object({
|
|||||||
teamId: z.string().min(1, 'Team ID is required'),
|
teamId: z.string().min(1, 'Team ID is required'),
|
||||||
channelId: z.string().min(1, 'Channel ID is required'),
|
channelId: z.string().min(1, 'Channel ID is required'),
|
||||||
content: z.string().min(1, 'Message content is required'),
|
content: z.string().min(1, 'Message content is required'),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -53,93 +55,12 @@ export async function POST(request: NextRequest) {
|
|||||||
fileCount: validatedData.files?.length || 0,
|
fileCount: validatedData.files?.length || 0,
|
||||||
})
|
})
|
||||||
|
|
||||||
const attachments: any[] = []
|
const { attachments, filesOutput } = await uploadFilesForTeamsMessage({
|
||||||
if (validatedData.files && validatedData.files.length > 0) {
|
rawFiles: validatedData.files || [],
|
||||||
const rawFiles = validatedData.files
|
accessToken: validatedData.accessToken,
|
||||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to OneDrive`)
|
requestId,
|
||||||
|
logger,
|
||||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
})
|
||||||
|
|
||||||
for (const file of userFiles) {
|
|
||||||
try {
|
|
||||||
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
|
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
|
||||||
|
|
||||||
const uploadUrl =
|
|
||||||
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
|
|
||||||
encodeURIComponent(file.name) +
|
|
||||||
':/content'
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
'Content-Type': file.type || 'application/octet-stream',
|
|
||||||
},
|
|
||||||
body: new Uint8Array(buffer),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
|
||||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const uploadedFile = await uploadResponse.json()
|
|
||||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
|
||||||
id: uploadedFile.id,
|
|
||||||
webUrl: uploadedFile.webUrl,
|
|
||||||
})
|
|
||||||
|
|
||||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
|
||||||
|
|
||||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!fileDetailsResponse.ok) {
|
|
||||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const fileDetails = await fileDetailsResponse.json()
|
|
||||||
logger.info(`[${requestId}] Got file details`, {
|
|
||||||
webDavUrl: fileDetails.webDavUrl,
|
|
||||||
eTag: fileDetails.eTag,
|
|
||||||
})
|
|
||||||
|
|
||||||
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
|
|
||||||
|
|
||||||
attachments.push({
|
|
||||||
id: attachmentId,
|
|
||||||
contentType: 'reference',
|
|
||||||
contentUrl: fileDetails.webDavUrl,
|
|
||||||
name: file.name,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let messageContent = validatedData.content
|
let messageContent = validatedData.content
|
||||||
let contentType: 'text' | 'html' = 'text'
|
let contentType: 'text' | 'html' = 'text'
|
||||||
@@ -197,17 +118,21 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages`
|
const teamsUrl = `https://graph.microsoft.com/v1.0/teams/${encodeURIComponent(validatedData.teamId)}/channels/${encodeURIComponent(validatedData.channelId)}/messages`
|
||||||
|
|
||||||
const teamsResponse = await fetch(teamsUrl, {
|
const teamsResponse = await secureFetchWithValidation(
|
||||||
method: 'POST',
|
teamsUrl,
|
||||||
headers: {
|
{
|
||||||
'Content-Type': 'application/json',
|
method: 'POST',
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify(messageBody),
|
||||||
},
|
},
|
||||||
body: JSON.stringify(messageBody),
|
'teamsUrl'
|
||||||
})
|
)
|
||||||
|
|
||||||
if (!teamsResponse.ok) {
|
if (!teamsResponse.ok) {
|
||||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
|
||||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -218,7 +143,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const responseData = await teamsResponse.json()
|
const responseData = (await teamsResponse.json()) as GraphChatMessage
|
||||||
logger.info(`[${requestId}] Teams channel message sent successfully`, {
|
logger.info(`[${requestId}] Teams channel message sent successfully`, {
|
||||||
messageId: responseData.id,
|
messageId: responseData.id,
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
@@ -237,6 +162,7 @@ export async function POST(request: NextRequest) {
|
|||||||
url: responseData.webUrl || '',
|
url: responseData.webUrl || '',
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
},
|
},
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -2,9 +2,11 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { uploadFilesForTeamsMessage } from '@/tools/microsoft_teams/server-utils'
|
||||||
|
import type { GraphApiErrorResponse, GraphChatMessage } from '@/tools/microsoft_teams/types'
|
||||||
import { resolveMentionsForChat, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
import { resolveMentionsForChat, type TeamsMention } from '@/tools/microsoft_teams/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -15,7 +17,7 @@ const TeamsWriteChatSchema = z.object({
|
|||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
chatId: z.string().min(1, 'Chat ID is required'),
|
chatId: z.string().min(1, 'Chat ID is required'),
|
||||||
content: z.string().min(1, 'Message content is required'),
|
content: z.string().min(1, 'Message content is required'),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -51,93 +53,12 @@ export async function POST(request: NextRequest) {
|
|||||||
fileCount: validatedData.files?.length || 0,
|
fileCount: validatedData.files?.length || 0,
|
||||||
})
|
})
|
||||||
|
|
||||||
const attachments: any[] = []
|
const { attachments, filesOutput } = await uploadFilesForTeamsMessage({
|
||||||
if (validatedData.files && validatedData.files.length > 0) {
|
rawFiles: validatedData.files || [],
|
||||||
const rawFiles = validatedData.files
|
accessToken: validatedData.accessToken,
|
||||||
logger.info(`[${requestId}] Processing ${rawFiles.length} file(s) for upload to Teams`)
|
requestId,
|
||||||
|
logger,
|
||||||
const userFiles = processFilesToUserFiles(rawFiles, requestId, logger)
|
})
|
||||||
|
|
||||||
for (const file of userFiles) {
|
|
||||||
try {
|
|
||||||
logger.info(`[${requestId}] Uploading file to Teams: ${file.name} (${file.size} bytes)`)
|
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(file, requestId, logger)
|
|
||||||
|
|
||||||
const uploadUrl =
|
|
||||||
'https://graph.microsoft.com/v1.0/me/drive/root:/TeamsAttachments/' +
|
|
||||||
encodeURIComponent(file.name) +
|
|
||||||
':/content'
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to Teams: ${uploadUrl}`)
|
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
|
||||||
method: 'PUT',
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
'Content-Type': file.type || 'application/octet-stream',
|
|
||||||
},
|
|
||||||
body: new Uint8Array(buffer),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
|
||||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Teams upload failed:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to upload file to Teams: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const uploadedFile = await uploadResponse.json()
|
|
||||||
logger.info(`[${requestId}] File uploaded to Teams successfully`, {
|
|
||||||
id: uploadedFile.id,
|
|
||||||
webUrl: uploadedFile.webUrl,
|
|
||||||
})
|
|
||||||
|
|
||||||
const fileDetailsUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${uploadedFile.id}?$select=id,name,webDavUrl,eTag,size`
|
|
||||||
|
|
||||||
const fileDetailsResponse = await fetch(fileDetailsUrl, {
|
|
||||||
headers: {
|
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!fileDetailsResponse.ok) {
|
|
||||||
const errorData = await fileDetailsResponse.json().catch(() => ({}))
|
|
||||||
logger.error(`[${requestId}] Failed to get file details:`, errorData)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to get file details: ${errorData.error?.message || 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const fileDetails = await fileDetailsResponse.json()
|
|
||||||
logger.info(`[${requestId}] Got file details`, {
|
|
||||||
webDavUrl: fileDetails.webDavUrl,
|
|
||||||
eTag: fileDetails.eTag,
|
|
||||||
})
|
|
||||||
|
|
||||||
const attachmentId = fileDetails.eTag?.match(/\{([a-f0-9-]+)\}/i)?.[1] || fileDetails.id
|
|
||||||
|
|
||||||
attachments.push({
|
|
||||||
id: attachmentId,
|
|
||||||
contentType: 'reference',
|
|
||||||
contentUrl: fileDetails.webDavUrl,
|
|
||||||
name: file.name,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(`[${requestId}] Created attachment reference for ${file.name}`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to process file ${file.name}:`, error)
|
|
||||||
throw new Error(
|
|
||||||
`Failed to process file "${file.name}": ${error instanceof Error ? error.message : 'Unknown error'}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
`[${requestId}] All ${attachments.length} file(s) uploaded and attachment references created`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let messageContent = validatedData.content
|
let messageContent = validatedData.content
|
||||||
let contentType: 'text' | 'html' = 'text'
|
let contentType: 'text' | 'html' = 'text'
|
||||||
@@ -194,17 +115,21 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages`
|
const teamsUrl = `https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(validatedData.chatId)}/messages`
|
||||||
|
|
||||||
const teamsResponse = await fetch(teamsUrl, {
|
const teamsResponse = await secureFetchWithValidation(
|
||||||
method: 'POST',
|
teamsUrl,
|
||||||
headers: {
|
{
|
||||||
'Content-Type': 'application/json',
|
method: 'POST',
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify(messageBody),
|
||||||
},
|
},
|
||||||
body: JSON.stringify(messageBody),
|
'teamsUrl'
|
||||||
})
|
)
|
||||||
|
|
||||||
if (!teamsResponse.ok) {
|
if (!teamsResponse.ok) {
|
||||||
const errorData = await teamsResponse.json().catch(() => ({}))
|
const errorData = (await teamsResponse.json().catch(() => ({}))) as GraphApiErrorResponse
|
||||||
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
logger.error(`[${requestId}] Microsoft Teams API error:`, errorData)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -215,7 +140,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const responseData = await teamsResponse.json()
|
const responseData = (await teamsResponse.json()) as GraphChatMessage
|
||||||
logger.info(`[${requestId}] Teams message sent successfully`, {
|
logger.info(`[${requestId}] Teams message sent successfully`, {
|
||||||
messageId: responseData.id,
|
messageId: responseData.id,
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
@@ -233,6 +158,7 @@ export async function POST(request: NextRequest) {
|
|||||||
url: responseData.webUrl || '',
|
url: responseData.webUrl || '',
|
||||||
attachmentCount: attachments.length,
|
attachmentCount: attachments.length,
|
||||||
},
|
},
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -2,15 +2,17 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { StorageService } from '@/lib/uploads'
|
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
secureFetchWithPinnedIP,
|
||||||
inferContextFromKey,
|
validateUrlWithDNS,
|
||||||
isInternalFileUrl,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import {
|
||||||
|
downloadFileFromStorage,
|
||||||
|
resolveInternalFileUrl,
|
||||||
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -18,7 +20,9 @@ const logger = createLogger('MistralParseAPI')
|
|||||||
|
|
||||||
const MistralParseSchema = z.object({
|
const MistralParseSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
filePath: z.string().min(1, 'File path is required'),
|
filePath: z.string().min(1, 'File path is required').optional(),
|
||||||
|
fileData: FileInputSchema.optional(),
|
||||||
|
file: FileInputSchema.optional(),
|
||||||
resultType: z.string().optional(),
|
resultType: z.string().optional(),
|
||||||
pages: z.array(z.number()).optional(),
|
pages: z.array(z.number()).optional(),
|
||||||
includeImageBase64: z.boolean().optional(),
|
includeImageBase64: z.boolean().optional(),
|
||||||
@@ -49,66 +53,130 @@ export async function POST(request: NextRequest) {
|
|||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const validatedData = MistralParseSchema.parse(body)
|
const validatedData = MistralParseSchema.parse(body)
|
||||||
|
|
||||||
|
const fileData = validatedData.file || validatedData.fileData
|
||||||
|
const filePath = typeof fileData === 'string' ? fileData : validatedData.filePath
|
||||||
|
|
||||||
|
if (!fileData && (!filePath || filePath.trim() === '')) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'File input is required',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`[${requestId}] Mistral parse request`, {
|
logger.info(`[${requestId}] Mistral parse request`, {
|
||||||
filePath: validatedData.filePath,
|
hasFileData: Boolean(fileData),
|
||||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
filePath,
|
||||||
|
isWorkspaceFile: filePath ? isInternalFileUrl(filePath) : false,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
const mistralBody: any = {
|
||||||
|
model: 'mistral-ocr-latest',
|
||||||
|
}
|
||||||
|
|
||||||
if (isInternalFileUrl(validatedData.filePath)) {
|
if (fileData && typeof fileData === 'object') {
|
||||||
|
const rawFile = fileData
|
||||||
|
let userFile
|
||||||
try {
|
try {
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||||
|
|
||||||
const context = inferContextFromKey(storageKey)
|
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(
|
|
||||||
storageKey,
|
|
||||||
userId,
|
|
||||||
undefined, // customConfig
|
|
||||||
context, // context
|
|
||||||
false // isLocal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: 'Failed to generate file access URL',
|
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||||
},
|
},
|
||||||
{ status: 500 }
|
{ status: 400 }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
|
||||||
}
|
|
||||||
|
|
||||||
const mistralBody: any = {
|
const mimeType = userFile.type || 'application/pdf'
|
||||||
model: 'mistral-ocr-latest',
|
let base64 = userFile.base64
|
||||||
document: {
|
if (!base64) {
|
||||||
type: 'document_url',
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
document_url: fileUrl,
|
base64 = buffer.toString('base64')
|
||||||
},
|
}
|
||||||
|
const base64Payload = base64.startsWith('data:')
|
||||||
|
? base64
|
||||||
|
: `data:${mimeType};base64,${base64}`
|
||||||
|
|
||||||
|
// Mistral API uses different document types for images vs documents
|
||||||
|
const isImage = mimeType.startsWith('image/')
|
||||||
|
if (isImage) {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: base64Payload,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'document_url',
|
||||||
|
document_url: base64Payload,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (filePath) {
|
||||||
|
let fileUrl = filePath
|
||||||
|
|
||||||
|
const isInternalFilePath = isInternalFileUrl(filePath)
|
||||||
|
if (isInternalFilePath) {
|
||||||
|
const resolution = await resolveInternalFileUrl(filePath, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: resolution.error.message,
|
||||||
|
},
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
fileUrl = resolution.fileUrl || fileUrl
|
||||||
|
} else if (filePath.startsWith('/')) {
|
||||||
|
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||||
|
userId,
|
||||||
|
path: filePath.substring(0, 50),
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
const urlValidation = await validateUrlWithDNS(fileUrl, 'filePath')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: urlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Detect image URLs by extension for proper Mistral API type
|
||||||
|
const lowerUrl = fileUrl.toLowerCase()
|
||||||
|
const isImageUrl =
|
||||||
|
lowerUrl.endsWith('.png') ||
|
||||||
|
lowerUrl.endsWith('.jpg') ||
|
||||||
|
lowerUrl.endsWith('.jpeg') ||
|
||||||
|
lowerUrl.endsWith('.gif') ||
|
||||||
|
lowerUrl.endsWith('.webp') ||
|
||||||
|
lowerUrl.endsWith('.avif')
|
||||||
|
|
||||||
|
if (isImageUrl) {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: fileUrl,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
mistralBody.document = {
|
||||||
|
type: 'document_url',
|
||||||
|
document_url: fileUrl,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (validatedData.pages) {
|
if (validatedData.pages) {
|
||||||
@@ -124,15 +192,34 @@ export async function POST(request: NextRequest) {
|
|||||||
mistralBody.image_min_size = validatedData.imageMinSize
|
mistralBody.image_min_size = validatedData.imageMinSize
|
||||||
}
|
}
|
||||||
|
|
||||||
const mistralResponse = await fetch('https://api.mistral.ai/v1/ocr', {
|
const mistralEndpoint = 'https://api.mistral.ai/v1/ocr'
|
||||||
method: 'POST',
|
const mistralValidation = await validateUrlWithDNS(mistralEndpoint, 'Mistral API URL')
|
||||||
headers: {
|
if (!mistralValidation.isValid) {
|
||||||
'Content-Type': 'application/json',
|
logger.error(`[${requestId}] Mistral API URL validation failed`, {
|
||||||
Accept: 'application/json',
|
error: mistralValidation.error,
|
||||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
})
|
||||||
},
|
return NextResponse.json(
|
||||||
body: JSON.stringify(mistralBody),
|
{
|
||||||
})
|
success: false,
|
||||||
|
error: 'Failed to reach Mistral API',
|
||||||
|
},
|
||||||
|
{ status: 502 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const mistralResponse = await secureFetchWithPinnedIP(
|
||||||
|
mistralEndpoint,
|
||||||
|
mistralValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Accept: 'application/json',
|
||||||
|
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify(mistralBody),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if (!mistralResponse.ok) {
|
if (!mistralResponse.ok) {
|
||||||
const errorText = await mistralResponse.text()
|
const errorText = await mistralResponse.text()
|
||||||
|
|||||||
177
apps/sim/app/api/tools/onedrive/download/route.ts
Normal file
177
apps/sim/app/api/tools/onedrive/download/route.ts
Normal file
@@ -0,0 +1,177 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
/** Microsoft Graph API error response structure */
|
||||||
|
interface GraphApiError {
|
||||||
|
error?: {
|
||||||
|
code?: string
|
||||||
|
message?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Microsoft Graph API drive item metadata response */
|
||||||
|
interface DriveItemMetadata {
|
||||||
|
id?: string
|
||||||
|
name?: string
|
||||||
|
folder?: Record<string, unknown>
|
||||||
|
file?: {
|
||||||
|
mimeType?: string
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const logger = createLogger('OneDriveDownloadAPI')
|
||||||
|
|
||||||
|
const OneDriveDownloadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
fileId: z.string().min(1, 'File ID is required'),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized OneDrive download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = OneDriveDownloadSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, fileId, fileName } = validatedData
|
||||||
|
const authHeader = `Bearer ${accessToken}`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting file metadata from OneDrive`, { fileId })
|
||||||
|
|
||||||
|
const metadataUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}`
|
||||||
|
const metadataUrlValidation = await validateUrlWithDNS(metadataUrl, 'metadataUrl')
|
||||||
|
if (!metadataUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: metadataUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadataResponse = await secureFetchWithPinnedIP(
|
||||||
|
metadataUrl,
|
||||||
|
metadataUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: { Authorization: authHeader },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!metadataResponse.ok) {
|
||||||
|
const errorDetails = (await metadataResponse.json().catch(() => ({}))) as GraphApiError
|
||||||
|
logger.error(`[${requestId}] Failed to get file metadata`, {
|
||||||
|
status: metadataResponse.status,
|
||||||
|
error: errorDetails,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorDetails.error?.message || 'Failed to get file metadata' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = (await metadataResponse.json()) as DriveItemMetadata
|
||||||
|
|
||||||
|
if (metadata.folder && !metadata.file) {
|
||||||
|
logger.error(`[${requestId}] Attempted to download a folder`, {
|
||||||
|
itemId: metadata.id,
|
||||||
|
itemName: metadata.name,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: `Cannot download folder "${metadata.name}". Please select a file instead.`,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const mimeType = metadata.file?.mimeType || 'application/octet-stream'
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Downloading file from OneDrive`, { fileId, mimeType })
|
||||||
|
|
||||||
|
const downloadUrl = `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}/content`
|
||||||
|
const downloadUrlValidation = await validateUrlWithDNS(downloadUrl, 'downloadUrl')
|
||||||
|
if (!downloadUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadUrlValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
downloadUrl,
|
||||||
|
downloadUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
headers: { Authorization: authHeader },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
const downloadError = (await downloadResponse.json().catch(() => ({}))) as GraphApiError
|
||||||
|
logger.error(`[${requestId}] Failed to download file`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
error: downloadError,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: downloadError.error?.message || 'Failed to download file' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
|
||||||
|
const resolvedName = fileName || metadata.name || 'download'
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||||
|
fileId,
|
||||||
|
name: resolvedName,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
mimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const base64Data = fileBuffer.toString('base64')
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedName,
|
||||||
|
mimeType,
|
||||||
|
data: base64Data,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading OneDrive file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,7 +4,9 @@ import * as XLSX from 'xlsx'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import {
|
import {
|
||||||
getExtensionFromMimeType,
|
getExtensionFromMimeType,
|
||||||
processSingleFileToUserFile,
|
processSingleFileToUserFile,
|
||||||
@@ -29,12 +31,33 @@ const ExcelValuesSchema = z.union([
|
|||||||
const OneDriveUploadSchema = z.object({
|
const OneDriveUploadSchema = z.object({
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
file: z.any().optional(),
|
file: RawFileInputSchema.optional(),
|
||||||
folderId: z.string().optional().nullable(),
|
folderId: z.string().optional().nullable(),
|
||||||
mimeType: z.string().nullish(),
|
mimeType: z.string().nullish(),
|
||||||
values: ExcelValuesSchema.optional().nullable(),
|
values: ExcelValuesSchema.optional().nullable(),
|
||||||
|
conflictBehavior: z.enum(['fail', 'replace', 'rename']).optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
/** Microsoft Graph DriveItem response */
|
||||||
|
interface OneDriveFileData {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
size: number
|
||||||
|
webUrl: string
|
||||||
|
createdDateTime: string
|
||||||
|
lastModifiedDateTime: string
|
||||||
|
file?: { mimeType: string }
|
||||||
|
parentReference?: { id: string; path: string }
|
||||||
|
'@microsoft.graph.downloadUrl'?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Microsoft Graph Excel range response */
|
||||||
|
interface ExcelRangeData {
|
||||||
|
address?: string
|
||||||
|
addressLocal?: string
|
||||||
|
values?: unknown[][]
|
||||||
|
}
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
@@ -88,25 +111,9 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
let fileToProcess
|
|
||||||
if (Array.isArray(rawFile)) {
|
|
||||||
if (rawFile.length === 0) {
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'No file provided',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
fileToProcess = rawFile[0]
|
|
||||||
} else {
|
|
||||||
fileToProcess = rawFile
|
|
||||||
}
|
|
||||||
|
|
||||||
let userFile
|
let userFile
|
||||||
try {
|
try {
|
||||||
userFile = processSingleFileToUserFile(fileToProcess, requestId, logger)
|
userFile = processSingleFileToUserFile(rawFile, requestId, logger)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -179,14 +186,23 @@ export async function POST(request: NextRequest) {
|
|||||||
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
// Add conflict behavior if specified (defaults to replace by Microsoft Graph API)
|
||||||
method: 'PUT',
|
if (validatedData.conflictBehavior) {
|
||||||
headers: {
|
uploadUrl += `?@microsoft.graph.conflictBehavior=${validatedData.conflictBehavior}`
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
}
|
||||||
'Content-Type': mimeType,
|
|
||||||
|
const uploadResponse = await secureFetchWithValidation(
|
||||||
|
uploadUrl,
|
||||||
|
{
|
||||||
|
method: 'PUT',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': mimeType,
|
||||||
|
},
|
||||||
|
body: fileBuffer,
|
||||||
},
|
},
|
||||||
body: new Uint8Array(fileBuffer),
|
'uploadUrl'
|
||||||
})
|
)
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
if (!uploadResponse.ok) {
|
||||||
const errorText = await uploadResponse.text()
|
const errorText = await uploadResponse.text()
|
||||||
@@ -200,7 +216,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const fileData = await uploadResponse.json()
|
const fileData = (await uploadResponse.json()) as OneDriveFileData
|
||||||
|
|
||||||
let excelWriteResult: any | undefined
|
let excelWriteResult: any | undefined
|
||||||
const shouldWriteExcelContent =
|
const shouldWriteExcelContent =
|
||||||
@@ -209,8 +225,11 @@ export async function POST(request: NextRequest) {
|
|||||||
if (shouldWriteExcelContent) {
|
if (shouldWriteExcelContent) {
|
||||||
try {
|
try {
|
||||||
let workbookSessionId: string | undefined
|
let workbookSessionId: string | undefined
|
||||||
const sessionResp = await fetch(
|
const sessionUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/createSession`,
|
fileData.id
|
||||||
|
)}/workbook/createSession`
|
||||||
|
const sessionResp = await secureFetchWithValidation(
|
||||||
|
sessionUrl,
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -218,11 +237,12 @@ export async function POST(request: NextRequest) {
|
|||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
},
|
},
|
||||||
body: JSON.stringify({ persistChanges: true }),
|
body: JSON.stringify({ persistChanges: true }),
|
||||||
}
|
},
|
||||||
|
'sessionUrl'
|
||||||
)
|
)
|
||||||
|
|
||||||
if (sessionResp.ok) {
|
if (sessionResp.ok) {
|
||||||
const sessionData = await sessionResp.json()
|
const sessionData = (await sessionResp.json()) as { id?: string }
|
||||||
workbookSessionId = sessionData?.id
|
workbookSessionId = sessionData?.id
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -231,14 +251,19 @@ export async function POST(request: NextRequest) {
|
|||||||
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||||
fileData.id
|
fileData.id
|
||||||
)}/workbook/worksheets?$select=name&$orderby=position&$top=1`
|
)}/workbook/worksheets?$select=name&$orderby=position&$top=1`
|
||||||
const listResp = await fetch(listUrl, {
|
const listResp = await secureFetchWithValidation(
|
||||||
headers: {
|
listUrl,
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
{
|
||||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
})
|
'listUrl'
|
||||||
|
)
|
||||||
if (listResp.ok) {
|
if (listResp.ok) {
|
||||||
const listData = await listResp.json()
|
const listData = (await listResp.json()) as { value?: Array<{ name?: string }> }
|
||||||
const firstSheetName = listData?.value?.[0]?.name
|
const firstSheetName = listData?.value?.[0]?.name
|
||||||
if (firstSheetName) {
|
if (firstSheetName) {
|
||||||
sheetName = firstSheetName
|
sheetName = firstSheetName
|
||||||
@@ -297,15 +322,19 @@ export async function POST(request: NextRequest) {
|
|||||||
)}')/range(address='${encodeURIComponent(computedRangeAddress)}')`
|
)}')/range(address='${encodeURIComponent(computedRangeAddress)}')`
|
||||||
)
|
)
|
||||||
|
|
||||||
const excelWriteResponse = await fetch(url.toString(), {
|
const excelWriteResponse = await secureFetchWithValidation(
|
||||||
method: 'PATCH',
|
url.toString(),
|
||||||
headers: {
|
{
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
method: 'PATCH',
|
||||||
'Content-Type': 'application/json',
|
headers: {
|
||||||
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(workbookSessionId ? { 'workbook-session-id': workbookSessionId } : {}),
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ values: processedValues }),
|
||||||
},
|
},
|
||||||
body: JSON.stringify({ values: processedValues }),
|
'excelWriteUrl'
|
||||||
})
|
)
|
||||||
|
|
||||||
if (!excelWriteResponse || !excelWriteResponse.ok) {
|
if (!excelWriteResponse || !excelWriteResponse.ok) {
|
||||||
const errorText = excelWriteResponse ? await excelWriteResponse.text() : 'no response'
|
const errorText = excelWriteResponse ? await excelWriteResponse.text() : 'no response'
|
||||||
@@ -320,7 +349,7 @@ export async function POST(request: NextRequest) {
|
|||||||
details: errorText,
|
details: errorText,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const writeData = await excelWriteResponse.json()
|
const writeData = (await excelWriteResponse.json()) as ExcelRangeData
|
||||||
const addr = writeData.address || writeData.addressLocal
|
const addr = writeData.address || writeData.addressLocal
|
||||||
const v = writeData.values || []
|
const v = writeData.values || []
|
||||||
excelWriteResult = {
|
excelWriteResult = {
|
||||||
@@ -328,21 +357,25 @@ export async function POST(request: NextRequest) {
|
|||||||
updatedRange: addr,
|
updatedRange: addr,
|
||||||
updatedRows: Array.isArray(v) ? v.length : undefined,
|
updatedRows: Array.isArray(v) ? v.length : undefined,
|
||||||
updatedColumns: Array.isArray(v) && v[0] ? v[0].length : undefined,
|
updatedColumns: Array.isArray(v) && v[0] ? v[0].length : undefined,
|
||||||
updatedCells: Array.isArray(v) && v[0] ? v.length * (v[0] as any[]).length : undefined,
|
updatedCells: Array.isArray(v) && v[0] ? v.length * v[0].length : undefined,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (workbookSessionId) {
|
if (workbookSessionId) {
|
||||||
try {
|
try {
|
||||||
const closeResp = await fetch(
|
const closeUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/closeSession`,
|
fileData.id
|
||||||
|
)}/workbook/closeSession`
|
||||||
|
const closeResp = await secureFetchWithValidation(
|
||||||
|
closeUrl,
|
||||||
{
|
{
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
'workbook-session-id': workbookSessionId,
|
'workbook-session-id': workbookSessionId,
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
|
'closeSessionUrl'
|
||||||
)
|
)
|
||||||
if (!closeResp.ok) {
|
if (!closeResp.ok) {
|
||||||
const closeText = await closeResp.text()
|
const closeText = await closeResp.text()
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -18,7 +19,7 @@ const OutlookDraftSchema = z.object({
|
|||||||
contentType: z.enum(['text', 'html']).optional().nullable(),
|
contentType: z.enum(['text', 'html']).optional().nullable(),
|
||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -20,7 +21,7 @@ const OutlookSendSchema = z.object({
|
|||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
replyToMessageId: z.string().optional().nullable(),
|
replyToMessageId: z.string().optional().nullable(),
|
||||||
conversationId: z.string().optional().nullable(),
|
conversationId: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -95,14 +96,14 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
if (attachments.length > 0) {
|
if (attachments.length > 0) {
|
||||||
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
const totalSize = attachments.reduce((sum, file) => sum + file.size, 0)
|
||||||
const maxSize = 4 * 1024 * 1024 // 4MB
|
const maxSize = 3 * 1024 * 1024 // 3MB - Microsoft Graph API limit for inline attachments
|
||||||
|
|
||||||
if (totalSize > maxSize) {
|
if (totalSize > maxSize) {
|
||||||
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
const sizeMB = (totalSize / (1024 * 1024)).toFixed(2)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: `Total attachment size (${sizeMB}MB) exceeds Outlook's limit of 4MB per request`,
|
error: `Total attachment size (${sizeMB}MB) exceeds Microsoft Graph API limit of 3MB per request`,
|
||||||
},
|
},
|
||||||
{ status: 400 }
|
{ status: 400 }
|
||||||
)
|
)
|
||||||
|
|||||||
165
apps/sim/app/api/tools/pipedrive/get-files/route.ts
Normal file
165
apps/sim/app/api/tools/pipedrive/get-files/route.ts
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('PipedriveGetFilesAPI')
|
||||||
|
|
||||||
|
interface PipedriveFile {
|
||||||
|
id?: number
|
||||||
|
name?: string
|
||||||
|
url?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PipedriveApiResponse {
|
||||||
|
success: boolean
|
||||||
|
data?: PipedriveFile[]
|
||||||
|
error?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
const PipedriveGetFilesSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
deal_id: z.string().optional().nullable(),
|
||||||
|
person_id: z.string().optional().nullable(),
|
||||||
|
org_id: z.string().optional().nullable(),
|
||||||
|
limit: z.string().optional().nullable(),
|
||||||
|
downloadFiles: z.boolean().optional().default(false),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Pipedrive get files attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = PipedriveGetFilesSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, deal_id, person_id, org_id, limit, downloadFiles } = validatedData
|
||||||
|
|
||||||
|
const baseUrl = 'https://api.pipedrive.com/v1/files'
|
||||||
|
const queryParams = new URLSearchParams()
|
||||||
|
|
||||||
|
if (deal_id) queryParams.append('deal_id', deal_id)
|
||||||
|
if (person_id) queryParams.append('person_id', person_id)
|
||||||
|
if (org_id) queryParams.append('org_id', org_id)
|
||||||
|
if (limit) queryParams.append('limit', limit)
|
||||||
|
|
||||||
|
const queryString = queryParams.toString()
|
||||||
|
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Fetching files from Pipedrive`, { deal_id, person_id, org_id })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
Accept: 'application/json',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
const data = (await response.json()) as PipedriveApiResponse
|
||||||
|
|
||||||
|
if (!data.success) {
|
||||||
|
logger.error(`[${requestId}] Pipedrive API request failed`, { data })
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: data.error || 'Failed to fetch files from Pipedrive' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = data.data || []
|
||||||
|
const downloadedFiles: Array<{
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
if (downloadFiles) {
|
||||||
|
for (const file of files) {
|
||||||
|
if (!file?.url) continue
|
||||||
|
|
||||||
|
try {
|
||||||
|
const fileUrlValidation = await validateUrlWithDNS(file.url, 'fileUrl')
|
||||||
|
if (!fileUrlValidation.isValid) continue
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
file.url,
|
||||||
|
fileUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Bearer ${accessToken}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) continue
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
const extension = getFileExtension(file.name || '')
|
||||||
|
const mimeType =
|
||||||
|
downloadResponse.headers.get('content-type') || getMimeTypeFromExtension(extension)
|
||||||
|
const fileName = file.name || `pipedrive-file-${file.id || Date.now()}`
|
||||||
|
|
||||||
|
downloadedFiles.push({
|
||||||
|
name: fileName,
|
||||||
|
mimeType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to download file ${file.id}:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Pipedrive files fetched successfully`, {
|
||||||
|
fileCount: files.length,
|
||||||
|
downloadedCount: downloadedFiles.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
files,
|
||||||
|
downloadedFiles: downloadedFiles.length > 0 ? downloadedFiles : undefined,
|
||||||
|
total_items: files.length,
|
||||||
|
success: true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching Pipedrive files:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -2,15 +2,14 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { StorageService } from '@/lib/uploads'
|
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
secureFetchWithPinnedIP,
|
||||||
inferContextFromKey,
|
validateUrlWithDNS,
|
||||||
isInternalFileUrl,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -18,7 +17,8 @@ const logger = createLogger('PulseParseAPI')
|
|||||||
|
|
||||||
const PulseParseSchema = z.object({
|
const PulseParseSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
filePath: z.string().min(1, 'File path is required'),
|
filePath: z.string().optional(),
|
||||||
|
file: RawFileInputSchema.optional(),
|
||||||
pages: z.string().optional(),
|
pages: z.string().optional(),
|
||||||
extractFigure: z.boolean().optional(),
|
extractFigure: z.boolean().optional(),
|
||||||
figureDescription: z.boolean().optional(),
|
figureDescription: z.boolean().optional(),
|
||||||
@@ -51,50 +51,30 @@ export async function POST(request: NextRequest) {
|
|||||||
const validatedData = PulseParseSchema.parse(body)
|
const validatedData = PulseParseSchema.parse(body)
|
||||||
|
|
||||||
logger.info(`[${requestId}] Pulse parse request`, {
|
logger.info(`[${requestId}] Pulse parse request`, {
|
||||||
|
fileName: validatedData.file?.name,
|
||||||
filePath: validatedData.filePath,
|
filePath: validatedData.filePath,
|
||||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
const resolution = await resolveFileInputToUrl({
|
||||||
|
file: validatedData.file,
|
||||||
|
filePath: validatedData.filePath,
|
||||||
|
userId,
|
||||||
|
requestId,
|
||||||
|
logger,
|
||||||
|
})
|
||||||
|
|
||||||
if (isInternalFileUrl(validatedData.filePath)) {
|
if (resolution.error) {
|
||||||
try {
|
return NextResponse.json(
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
{ success: false, error: resolution.error.message },
|
||||||
const context = inferContextFromKey(storageKey)
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
const fileUrl = resolution.fileUrl
|
||||||
|
if (!fileUrl) {
|
||||||
if (!hasAccess) {
|
return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 })
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Failed to generate file access URL',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const formData = new FormData()
|
const formData = new FormData()
|
||||||
@@ -119,13 +99,36 @@ export async function POST(request: NextRequest) {
|
|||||||
formData.append('chunk_size', String(validatedData.chunkSize))
|
formData.append('chunk_size', String(validatedData.chunkSize))
|
||||||
}
|
}
|
||||||
|
|
||||||
const pulseResponse = await fetch('https://api.runpulse.com/extract', {
|
const pulseEndpoint = 'https://api.runpulse.com/extract'
|
||||||
method: 'POST',
|
const pulseValidation = await validateUrlWithDNS(pulseEndpoint, 'Pulse API URL')
|
||||||
headers: {
|
if (!pulseValidation.isValid) {
|
||||||
'x-api-key': validatedData.apiKey,
|
logger.error(`[${requestId}] Pulse API URL validation failed`, {
|
||||||
},
|
error: pulseValidation.error,
|
||||||
body: formData,
|
})
|
||||||
})
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to reach Pulse API',
|
||||||
|
},
|
||||||
|
{ status: 502 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const pulsePayload = new Response(formData)
|
||||||
|
const contentType = pulsePayload.headers.get('content-type') || 'multipart/form-data'
|
||||||
|
const bodyBuffer = Buffer.from(await pulsePayload.arrayBuffer())
|
||||||
|
const pulseResponse = await secureFetchWithPinnedIP(
|
||||||
|
pulseEndpoint,
|
||||||
|
pulseValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'x-api-key': validatedData.apiKey,
|
||||||
|
'Content-Type': contentType,
|
||||||
|
},
|
||||||
|
body: bodyBuffer,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if (!pulseResponse.ok) {
|
if (!pulseResponse.ok) {
|
||||||
const errorText = await pulseResponse.text()
|
const errorText = await pulseResponse.text()
|
||||||
|
|||||||
@@ -2,15 +2,14 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { StorageService } from '@/lib/uploads'
|
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
secureFetchWithPinnedIP,
|
||||||
inferContextFromKey,
|
validateUrlWithDNS,
|
||||||
isInternalFileUrl,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import { resolveFileInputToUrl } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -18,7 +17,8 @@ const logger = createLogger('ReductoParseAPI')
|
|||||||
|
|
||||||
const ReductoParseSchema = z.object({
|
const ReductoParseSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
filePath: z.string().min(1, 'File path is required'),
|
filePath: z.string().optional(),
|
||||||
|
file: RawFileInputSchema.optional(),
|
||||||
pages: z.array(z.number()).optional(),
|
pages: z.array(z.number()).optional(),
|
||||||
tableOutputFormat: z.enum(['html', 'md']).optional(),
|
tableOutputFormat: z.enum(['html', 'md']).optional(),
|
||||||
})
|
})
|
||||||
@@ -47,56 +47,30 @@ export async function POST(request: NextRequest) {
|
|||||||
const validatedData = ReductoParseSchema.parse(body)
|
const validatedData = ReductoParseSchema.parse(body)
|
||||||
|
|
||||||
logger.info(`[${requestId}] Reducto parse request`, {
|
logger.info(`[${requestId}] Reducto parse request`, {
|
||||||
|
fileName: validatedData.file?.name,
|
||||||
filePath: validatedData.filePath,
|
filePath: validatedData.filePath,
|
||||||
isWorkspaceFile: isInternalFileUrl(validatedData.filePath),
|
isWorkspaceFile: validatedData.filePath ? isInternalFileUrl(validatedData.filePath) : false,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
const resolution = await resolveFileInputToUrl({
|
||||||
|
file: validatedData.file,
|
||||||
|
filePath: validatedData.filePath,
|
||||||
|
userId,
|
||||||
|
requestId,
|
||||||
|
logger,
|
||||||
|
})
|
||||||
|
|
||||||
if (isInternalFileUrl(validatedData.filePath)) {
|
if (resolution.error) {
|
||||||
try {
|
return NextResponse.json(
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
{ success: false, error: resolution.error.message },
|
||||||
const context = inferContextFromKey(storageKey)
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(
|
const fileUrl = resolution.fileUrl
|
||||||
storageKey,
|
if (!fileUrl) {
|
||||||
userId,
|
return NextResponse.json({ success: false, error: 'File input is required' }, { status: 400 })
|
||||||
undefined, // customConfig
|
|
||||||
context, // context
|
|
||||||
false // isLocal
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Failed to generate file access URL',
|
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
const baseUrl = getBaseUrl()
|
|
||||||
fileUrl = `${baseUrl}${validatedData.filePath}`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const reductoBody: Record<string, unknown> = {
|
const reductoBody: Record<string, unknown> = {
|
||||||
@@ -104,8 +78,13 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (validatedData.pages && validatedData.pages.length > 0) {
|
if (validatedData.pages && validatedData.pages.length > 0) {
|
||||||
|
// Reducto API expects page_range as an object with start/end, not an array
|
||||||
|
const pages = validatedData.pages
|
||||||
reductoBody.settings = {
|
reductoBody.settings = {
|
||||||
page_range: validatedData.pages,
|
page_range: {
|
||||||
|
start: Math.min(...pages),
|
||||||
|
end: Math.max(...pages),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -115,15 +94,34 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const reductoResponse = await fetch('https://platform.reducto.ai/parse', {
|
const reductoEndpoint = 'https://platform.reducto.ai/parse'
|
||||||
method: 'POST',
|
const reductoValidation = await validateUrlWithDNS(reductoEndpoint, 'Reducto API URL')
|
||||||
headers: {
|
if (!reductoValidation.isValid) {
|
||||||
'Content-Type': 'application/json',
|
logger.error(`[${requestId}] Reducto API URL validation failed`, {
|
||||||
Accept: 'application/json',
|
error: reductoValidation.error,
|
||||||
Authorization: `Bearer ${validatedData.apiKey}`,
|
})
|
||||||
},
|
return NextResponse.json(
|
||||||
body: JSON.stringify(reductoBody),
|
{
|
||||||
})
|
success: false,
|
||||||
|
error: 'Failed to reach Reducto API',
|
||||||
|
},
|
||||||
|
{ status: 502 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const reductoResponse = await secureFetchWithPinnedIP(
|
||||||
|
reductoEndpoint,
|
||||||
|
reductoValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'POST',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Accept: 'application/json',
|
||||||
|
Authorization: `Bearer ${validatedData.apiKey}`,
|
||||||
|
},
|
||||||
|
body: JSON.stringify(reductoBody),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if (!reductoResponse.ok) {
|
if (!reductoResponse.ok) {
|
||||||
const errorText = await reductoResponse.text()
|
const errorText = await reductoResponse.text()
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -17,7 +18,7 @@ const S3PutObjectSchema = z.object({
|
|||||||
region: z.string().min(1, 'Region is required'),
|
region: z.string().min(1, 'Region is required'),
|
||||||
bucketName: z.string().min(1, 'Bucket name is required'),
|
bucketName: z.string().min(1, 'Bucket name is required'),
|
||||||
objectKey: z.string().min(1, 'Object key is required'),
|
objectKey: z.string().min(1, 'Object key is required'),
|
||||||
file: z.any().optional().nullable(),
|
file: RawFileInputSchema.optional().nullable(),
|
||||||
content: z.string().optional().nullable(),
|
content: z.string().optional().nullable(),
|
||||||
contentType: z.string().optional().nullable(),
|
contentType: z.string().optional().nullable(),
|
||||||
acl: z.string().optional().nullable(),
|
acl: z.string().optional().nullable(),
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||||
import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils'
|
import { createSftpConnection, getSftp, isPathSafe, sanitizePath } from '@/app/api/tools/sftp/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -111,6 +112,8 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const buffer = Buffer.concat(chunks)
|
const buffer = Buffer.concat(chunks)
|
||||||
const fileName = path.basename(remotePath)
|
const fileName = path.basename(remotePath)
|
||||||
|
const extension = getFileExtension(fileName)
|
||||||
|
const mimeType = getMimeTypeFromExtension(extension)
|
||||||
|
|
||||||
let content: string
|
let content: string
|
||||||
if (params.encoding === 'base64') {
|
if (params.encoding === 'base64') {
|
||||||
@@ -124,6 +127,12 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
fileName,
|
fileName,
|
||||||
|
file: {
|
||||||
|
name: fileName,
|
||||||
|
mimeType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
},
|
||||||
content,
|
content,
|
||||||
size: buffer.length,
|
size: buffer.length,
|
||||||
encoding: params.encoding,
|
encoding: params.encoding,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import {
|
import {
|
||||||
@@ -26,14 +27,7 @@ const UploadSchema = z.object({
|
|||||||
privateKey: z.string().nullish(),
|
privateKey: z.string().nullish(),
|
||||||
passphrase: z.string().nullish(),
|
passphrase: z.string().nullish(),
|
||||||
remotePath: z.string().min(1, 'Remote path is required'),
|
remotePath: z.string().min(1, 'Remote path is required'),
|
||||||
files: z
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
.union([z.array(z.any()), z.string(), z.number(), z.null(), z.undefined()])
|
|
||||||
.transform((val) => {
|
|
||||||
if (Array.isArray(val)) return val
|
|
||||||
if (val === null || val === undefined || val === '') return undefined
|
|
||||||
return undefined
|
|
||||||
})
|
|
||||||
.nullish(),
|
|
||||||
fileContent: z.string().nullish(),
|
fileContent: z.string().nullish(),
|
||||||
fileName: z.string().nullish(),
|
fileName: z.string().nullish(),
|
||||||
overwrite: z.boolean().default(true),
|
overwrite: z.boolean().default(true),
|
||||||
|
|||||||
@@ -2,9 +2,12 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import type { MicrosoftGraphDriveItem } from '@/tools/onedrive/types'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -16,7 +19,7 @@ const SharepointUploadSchema = z.object({
|
|||||||
driveId: z.string().optional().nullable(),
|
driveId: z.string().optional().nullable(),
|
||||||
folderPath: z.string().optional().nullable(),
|
folderPath: z.string().optional().nullable(),
|
||||||
fileName: z.string().optional().nullable(),
|
fileName: z.string().optional().nullable(),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
@@ -79,18 +82,23 @@ export async function POST(request: NextRequest) {
|
|||||||
let effectiveDriveId = validatedData.driveId
|
let effectiveDriveId = validatedData.driveId
|
||||||
if (!effectiveDriveId) {
|
if (!effectiveDriveId) {
|
||||||
logger.info(`[${requestId}] No driveId provided, fetching default drive for site`)
|
logger.info(`[${requestId}] No driveId provided, fetching default drive for site`)
|
||||||
const driveResponse = await fetch(
|
const driveUrl = `https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`
|
||||||
`https://graph.microsoft.com/v1.0/sites/${validatedData.siteId}/drive`,
|
const driveResponse = await secureFetchWithValidation(
|
||||||
|
driveUrl,
|
||||||
{
|
{
|
||||||
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
Accept: 'application/json',
|
Accept: 'application/json',
|
||||||
},
|
},
|
||||||
}
|
},
|
||||||
|
'driveUrl'
|
||||||
)
|
)
|
||||||
|
|
||||||
if (!driveResponse.ok) {
|
if (!driveResponse.ok) {
|
||||||
const errorData = await driveResponse.json().catch(() => ({}))
|
const errorData = (await driveResponse.json().catch(() => ({}))) as {
|
||||||
|
error?: { message?: string }
|
||||||
|
}
|
||||||
logger.error(`[${requestId}] Failed to get default drive:`, errorData)
|
logger.error(`[${requestId}] Failed to get default drive:`, errorData)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -101,7 +109,7 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const driveData = await driveResponse.json()
|
const driveData = (await driveResponse.json()) as { id: string }
|
||||||
effectiveDriveId = driveData.id
|
effectiveDriveId = driveData.id
|
||||||
logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`)
|
logger.info(`[${requestId}] Using default drive: ${effectiveDriveId}`)
|
||||||
}
|
}
|
||||||
@@ -145,34 +153,87 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Uploading to: ${uploadUrl}`)
|
logger.info(`[${requestId}] Uploading to: ${uploadUrl}`)
|
||||||
|
|
||||||
const uploadResponse = await fetch(uploadUrl, {
|
const uploadResponse = await secureFetchWithValidation(
|
||||||
method: 'PUT',
|
uploadUrl,
|
||||||
headers: {
|
{
|
||||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
method: 'PUT',
|
||||||
'Content-Type': userFile.type || 'application/octet-stream',
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': userFile.type || 'application/octet-stream',
|
||||||
|
},
|
||||||
|
body: buffer,
|
||||||
},
|
},
|
||||||
body: new Uint8Array(buffer),
|
'uploadUrl'
|
||||||
})
|
)
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
if (!uploadResponse.ok) {
|
||||||
const errorData = await uploadResponse.json().catch(() => ({}))
|
const errorData = await uploadResponse.json().catch(() => ({}))
|
||||||
logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData)
|
logger.error(`[${requestId}] Failed to upload file ${fileName}:`, errorData)
|
||||||
|
|
||||||
if (uploadResponse.status === 409) {
|
if (uploadResponse.status === 409) {
|
||||||
logger.warn(`[${requestId}] File ${fileName} already exists, attempting to replace`)
|
// File exists - retry with conflict behavior set to replace
|
||||||
|
logger.warn(`[${requestId}] File ${fileName} already exists, retrying with replace`)
|
||||||
|
const replaceUrl = `${uploadUrl}?@microsoft.graph.conflictBehavior=replace`
|
||||||
|
const replaceResponse = await secureFetchWithValidation(
|
||||||
|
replaceUrl,
|
||||||
|
{
|
||||||
|
method: 'PUT',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||||
|
'Content-Type': userFile.type || 'application/octet-stream',
|
||||||
|
},
|
||||||
|
body: buffer,
|
||||||
|
},
|
||||||
|
'replaceUrl'
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!replaceResponse.ok) {
|
||||||
|
const replaceErrorData = (await replaceResponse.json().catch(() => ({}))) as {
|
||||||
|
error?: { message?: string }
|
||||||
|
}
|
||||||
|
logger.error(`[${requestId}] Failed to replace file ${fileName}:`, replaceErrorData)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: replaceErrorData.error?.message || `Failed to replace file: ${fileName}`,
|
||||||
|
},
|
||||||
|
{ status: replaceResponse.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const replaceData = (await replaceResponse.json()) as {
|
||||||
|
id: string
|
||||||
|
name: string
|
||||||
|
webUrl: string
|
||||||
|
size: number
|
||||||
|
createdDateTime: string
|
||||||
|
lastModifiedDateTime: string
|
||||||
|
}
|
||||||
|
logger.info(`[${requestId}] File replaced successfully: ${fileName}`)
|
||||||
|
|
||||||
|
uploadedFiles.push({
|
||||||
|
id: replaceData.id,
|
||||||
|
name: replaceData.name,
|
||||||
|
webUrl: replaceData.webUrl,
|
||||||
|
size: replaceData.size,
|
||||||
|
createdDateTime: replaceData.createdDateTime,
|
||||||
|
lastModifiedDateTime: replaceData.lastModifiedDateTime,
|
||||||
|
})
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: errorData.error?.message || `Failed to upload file: ${fileName}`,
|
error:
|
||||||
|
(errorData as { error?: { message?: string } }).error?.message ||
|
||||||
|
`Failed to upload file: ${fileName}`,
|
||||||
},
|
},
|
||||||
{ status: uploadResponse.status }
|
{ status: uploadResponse.status }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadData = await uploadResponse.json()
|
const uploadData = (await uploadResponse.json()) as MicrosoftGraphDriveItem
|
||||||
logger.info(`[${requestId}] File uploaded successfully: ${fileName}`)
|
logger.info(`[${requestId}] File uploaded successfully: ${fileName}`)
|
||||||
|
|
||||||
uploadedFiles.push({
|
uploadedFiles.push({
|
||||||
|
|||||||
170
apps/sim/app/api/tools/slack/download/route.ts
Normal file
170
apps/sim/app/api/tools/slack/download/route.ts
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('SlackDownloadAPI')
|
||||||
|
|
||||||
|
const SlackDownloadSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
fileId: z.string().min(1, 'File ID is required'),
|
||||||
|
fileName: z.string().optional().nullable(),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Slack download attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Authenticated Slack download request via ${authResult.authType}`, {
|
||||||
|
userId: authResult.userId,
|
||||||
|
})
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = SlackDownloadSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, fileId, fileName } = validatedData
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting file info from Slack`, { fileId })
|
||||||
|
|
||||||
|
const infoResponse = await fetch(`https://slack.com/api/files.info?file=${fileId}`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!infoResponse.ok) {
|
||||||
|
const errorDetails = await infoResponse.json().catch(() => ({}))
|
||||||
|
logger.error(`[${requestId}] Failed to get file info from Slack`, {
|
||||||
|
status: infoResponse.status,
|
||||||
|
statusText: infoResponse.statusText,
|
||||||
|
error: errorDetails,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: errorDetails.error || 'Failed to get file info',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await infoResponse.json()
|
||||||
|
|
||||||
|
if (!data.ok) {
|
||||||
|
logger.error(`[${requestId}] Slack API returned error`, { error: data.error })
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: data.error || 'Slack API error',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const file = data.file
|
||||||
|
const resolvedFileName = fileName || file.name || 'download'
|
||||||
|
const mimeType = file.mimetype || 'application/octet-stream'
|
||||||
|
const urlPrivate = file.url_private
|
||||||
|
|
||||||
|
if (!urlPrivate) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'File does not have a download URL',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(urlPrivate, 'urlPrivate')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: urlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Downloading file from Slack`, {
|
||||||
|
fileId,
|
||||||
|
fileName: resolvedFileName,
|
||||||
|
mimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(urlPrivate, urlValidation.resolvedIP!, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) {
|
||||||
|
logger.error(`[${requestId}] Failed to download file content`, {
|
||||||
|
status: downloadResponse.status,
|
||||||
|
statusText: downloadResponse.statusText,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Failed to download file content',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const fileBuffer = Buffer.from(arrayBuffer)
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] File downloaded successfully`, {
|
||||||
|
fileId,
|
||||||
|
name: resolvedFileName,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
mimeType,
|
||||||
|
})
|
||||||
|
|
||||||
|
const base64Data = fileBuffer.toString('base64')
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
file: {
|
||||||
|
name: resolvedFileName,
|
||||||
|
mimeType,
|
||||||
|
data: base64Data,
|
||||||
|
size: fileBuffer.length,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error downloading Slack file:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { sendSlackMessage } from '../utils'
|
import { sendSlackMessage } from '../utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
@@ -16,7 +17,7 @@ const SlackSendMessageSchema = z
|
|||||||
userId: z.string().optional().nullable(),
|
userId: z.string().optional().nullable(),
|
||||||
text: z.string().min(1, 'Message text is required'),
|
text: z.string().min(1, 'Message text is required'),
|
||||||
thread_ts: z.string().optional().nullable(),
|
thread_ts: z.string().optional().nullable(),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
.refine((data) => data.channel || data.userId, {
|
.refine((data) => data.channel || data.userId, {
|
||||||
message: 'Either channel or userId is required',
|
message: 'Either channel or userId is required',
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import type { Logger } from '@sim/logger'
|
import type { Logger } from '@sim/logger'
|
||||||
|
import { secureFetchWithValidation } from '@/lib/core/security/input-validation.server'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import type { ToolFileData } from '@/tools/types'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sends a message to a Slack channel using chat.postMessage
|
* Sends a message to a Slack channel using chat.postMessage
|
||||||
@@ -70,14 +72,21 @@ export async function uploadFilesToSlack(
|
|||||||
accessToken: string,
|
accessToken: string,
|
||||||
requestId: string,
|
requestId: string,
|
||||||
logger: Logger
|
logger: Logger
|
||||||
): Promise<string[]> {
|
): Promise<{ fileIds: string[]; files: ToolFileData[] }> {
|
||||||
const userFiles = processFilesToUserFiles(files, requestId, logger)
|
const userFiles = processFilesToUserFiles(files, requestId, logger)
|
||||||
const uploadedFileIds: string[] = []
|
const uploadedFileIds: string[] = []
|
||||||
|
const uploadedFiles: ToolFileData[] = []
|
||||||
|
|
||||||
for (const userFile of userFiles) {
|
for (const userFile of userFiles) {
|
||||||
logger.info(`[${requestId}] Uploading file: ${userFile.name}`)
|
logger.info(`[${requestId}] Uploading file: ${userFile.name}`)
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
uploadedFiles.push({
|
||||||
|
name: userFile.name,
|
||||||
|
mimeType: userFile.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
|
||||||
const getUrlResponse = await fetch('https://slack.com/api/files.getUploadURLExternal', {
|
const getUrlResponse = await fetch('https://slack.com/api/files.getUploadURLExternal', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -100,10 +109,14 @@ export async function uploadFilesToSlack(
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`)
|
logger.info(`[${requestId}] Got upload URL for ${userFile.name}, file_id: ${urlData.file_id}`)
|
||||||
|
|
||||||
const uploadResponse = await fetch(urlData.upload_url, {
|
const uploadResponse = await secureFetchWithValidation(
|
||||||
method: 'POST',
|
urlData.upload_url,
|
||||||
body: new Uint8Array(buffer),
|
{
|
||||||
})
|
method: 'POST',
|
||||||
|
body: buffer,
|
||||||
|
},
|
||||||
|
'uploadUrl'
|
||||||
|
)
|
||||||
|
|
||||||
if (!uploadResponse.ok) {
|
if (!uploadResponse.ok) {
|
||||||
logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`)
|
logger.error(`[${requestId}] Failed to upload file data: ${uploadResponse.status}`)
|
||||||
@@ -114,7 +127,7 @@ export async function uploadFilesToSlack(
|
|||||||
uploadedFileIds.push(urlData.file_id)
|
uploadedFileIds.push(urlData.file_id)
|
||||||
}
|
}
|
||||||
|
|
||||||
return uploadedFileIds
|
return { fileIds: uploadedFileIds, files: uploadedFiles }
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -124,7 +137,8 @@ export async function completeSlackFileUpload(
|
|||||||
uploadedFileIds: string[],
|
uploadedFileIds: string[],
|
||||||
channel: string,
|
channel: string,
|
||||||
text: string,
|
text: string,
|
||||||
accessToken: string
|
accessToken: string,
|
||||||
|
threadTs?: string | null
|
||||||
): Promise<{ ok: boolean; files?: any[]; error?: string }> {
|
): Promise<{ ok: boolean; files?: any[]; error?: string }> {
|
||||||
const response = await fetch('https://slack.com/api/files.completeUploadExternal', {
|
const response = await fetch('https://slack.com/api/files.completeUploadExternal', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
@@ -136,6 +150,7 @@ export async function completeSlackFileUpload(
|
|||||||
files: uploadedFileIds.map((id) => ({ id })),
|
files: uploadedFileIds.map((id) => ({ id })),
|
||||||
channel_id: channel,
|
channel_id: channel,
|
||||||
initial_comment: text,
|
initial_comment: text,
|
||||||
|
...(threadTs && { thread_ts: threadTs }),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -217,7 +232,13 @@ export async function sendSlackMessage(
|
|||||||
logger: Logger
|
logger: Logger
|
||||||
): Promise<{
|
): Promise<{
|
||||||
success: boolean
|
success: boolean
|
||||||
output?: { message: any; ts: string; channel: string; fileCount?: number }
|
output?: {
|
||||||
|
message: any
|
||||||
|
ts: string
|
||||||
|
channel: string
|
||||||
|
fileCount?: number
|
||||||
|
files?: ToolFileData[]
|
||||||
|
}
|
||||||
error?: string
|
error?: string
|
||||||
}> {
|
}> {
|
||||||
const { accessToken, text, threadTs, files } = params
|
const { accessToken, text, threadTs, files } = params
|
||||||
@@ -249,10 +270,15 @@ export async function sendSlackMessage(
|
|||||||
|
|
||||||
// Process files
|
// Process files
|
||||||
logger.info(`[${requestId}] Processing ${files.length} file(s)`)
|
logger.info(`[${requestId}] Processing ${files.length} file(s)`)
|
||||||
const uploadedFileIds = await uploadFilesToSlack(files, accessToken, requestId, logger)
|
const { fileIds, files: uploadedFiles } = await uploadFilesToSlack(
|
||||||
|
files,
|
||||||
|
accessToken,
|
||||||
|
requestId,
|
||||||
|
logger
|
||||||
|
)
|
||||||
|
|
||||||
// No valid files uploaded - send text-only
|
// No valid files uploaded - send text-only
|
||||||
if (uploadedFileIds.length === 0) {
|
if (fileIds.length === 0) {
|
||||||
logger.warn(`[${requestId}] No valid files to upload, sending text-only message`)
|
logger.warn(`[${requestId}] No valid files to upload, sending text-only message`)
|
||||||
|
|
||||||
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
||||||
@@ -264,8 +290,8 @@ export async function sendSlackMessage(
|
|||||||
return { success: true, output: formatMessageSuccessResponse(data, text) }
|
return { success: true, output: formatMessageSuccessResponse(data, text) }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Complete file upload
|
// Complete file upload with thread support
|
||||||
const completeData = await completeSlackFileUpload(uploadedFileIds, channel, text, accessToken)
|
const completeData = await completeSlackFileUpload(fileIds, channel, text, accessToken, threadTs)
|
||||||
|
|
||||||
if (!completeData.ok) {
|
if (!completeData.ok) {
|
||||||
logger.error(`[${requestId}] Failed to complete upload:`, completeData.error)
|
logger.error(`[${requestId}] Failed to complete upload:`, completeData.error)
|
||||||
@@ -282,7 +308,8 @@ export async function sendSlackMessage(
|
|||||||
message: fileMessage,
|
message: fileMessage,
|
||||||
ts: fileMessage.ts,
|
ts: fileMessage.ts,
|
||||||
channel,
|
channel,
|
||||||
fileCount: uploadedFileIds.length,
|
fileCount: fileIds.length,
|
||||||
|
files: uploadedFiles,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import nodemailer from 'nodemailer'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -28,7 +29,7 @@ const SmtpSendSchema = z.object({
|
|||||||
cc: z.string().optional().nullable(),
|
cc: z.string().optional().nullable(),
|
||||||
bcc: z.string().optional().nullable(),
|
bcc: z.string().optional().nullable(),
|
||||||
replyTo: z.string().optional().nullable(),
|
replyTo: z.string().optional().nullable(),
|
||||||
attachments: z.array(z.any()).optional().nullable(),
|
attachments: RawFileInputArraySchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function POST(request: NextRequest) {
|
export async function POST(request: NextRequest) {
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import type { Client, SFTPWrapper } from 'ssh2'
|
import type { Client, SFTPWrapper } from 'ssh2'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils'
|
||||||
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
|
import { createSSHConnection, sanitizePath } from '@/app/api/tools/ssh/utils'
|
||||||
|
|
||||||
const logger = createLogger('SSHDownloadFileAPI')
|
const logger = createLogger('SSHDownloadFileAPI')
|
||||||
@@ -79,6 +80,16 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Check file size limit (50MB to prevent memory exhaustion)
|
||||||
|
const maxSize = 50 * 1024 * 1024
|
||||||
|
if (stats.size > maxSize) {
|
||||||
|
const sizeMB = (stats.size / (1024 * 1024)).toFixed(2)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: `File size (${sizeMB}MB) exceeds download limit of 50MB` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
// Read file content
|
// Read file content
|
||||||
const content = await new Promise<Buffer>((resolve, reject) => {
|
const content = await new Promise<Buffer>((resolve, reject) => {
|
||||||
const chunks: Buffer[] = []
|
const chunks: Buffer[] = []
|
||||||
@@ -96,6 +107,8 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
|
|
||||||
const fileName = path.basename(remotePath)
|
const fileName = path.basename(remotePath)
|
||||||
|
const extension = getFileExtension(fileName)
|
||||||
|
const mimeType = getMimeTypeFromExtension(extension)
|
||||||
|
|
||||||
// Encode content as base64 for binary safety
|
// Encode content as base64 for binary safety
|
||||||
const base64Content = content.toString('base64')
|
const base64Content = content.toString('base64')
|
||||||
@@ -104,6 +117,12 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
downloaded: true,
|
downloaded: true,
|
||||||
|
file: {
|
||||||
|
name: fileName,
|
||||||
|
mimeType,
|
||||||
|
data: base64Content,
|
||||||
|
size: stats.size,
|
||||||
|
},
|
||||||
content: base64Content,
|
content: base64Content,
|
||||||
fileName: fileName,
|
fileName: fileName,
|
||||||
remotePath: remotePath,
|
remotePath: remotePath,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction'
|
import { isSensitiveKey, REDACTED_MARKER } from '@/lib/core/security/redaction'
|
||||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||||
|
|
||||||
@@ -123,6 +124,10 @@ export async function POST(request: NextRequest) {
|
|||||||
const variablesObject = processVariables(params.variables)
|
const variablesObject = processVariables(params.variables)
|
||||||
|
|
||||||
const startUrl = normalizeUrl(rawStartUrl)
|
const startUrl = normalizeUrl(rawStartUrl)
|
||||||
|
const urlValidation = await validateUrlWithDNS(startUrl, 'startUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
logger.info('Starting Stagehand agent process', {
|
logger.info('Starting Stagehand agent process', {
|
||||||
rawStartUrl,
|
rawStartUrl,
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { env } from '@/lib/core/config/env'
|
import { env } from '@/lib/core/config/env'
|
||||||
|
import { validateUrlWithDNS } from '@/lib/core/security/input-validation.server'
|
||||||
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
import { ensureZodObject, normalizeUrl } from '@/app/api/tools/stagehand/utils'
|
||||||
|
|
||||||
const logger = createLogger('StagehandExtractAPI')
|
const logger = createLogger('StagehandExtractAPI')
|
||||||
@@ -51,6 +52,10 @@ export async function POST(request: NextRequest) {
|
|||||||
const params = validationResult.data
|
const params = validationResult.data
|
||||||
const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params
|
const { url: rawUrl, instruction, selector, provider, apiKey, schema } = params
|
||||||
const url = normalizeUrl(rawUrl)
|
const url = normalizeUrl(rawUrl)
|
||||||
|
const urlValidation = await validateUrlWithDNS(url, 'url')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
logger.info('Starting Stagehand extraction process', {
|
logger.info('Starting Stagehand extraction process', {
|
||||||
rawUrl,
|
rawUrl,
|
||||||
|
|||||||
@@ -2,7 +2,15 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { extractAudioFromVideo, isVideoFile } from '@/lib/audio/extractor'
|
import { extractAudioFromVideo, isVideoFile } from '@/lib/audio/extractor'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { isInternalFileUrl } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import {
|
||||||
|
downloadFileFromStorage,
|
||||||
|
resolveInternalFileUrl,
|
||||||
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
import type { UserFile } from '@/executor/types'
|
import type { UserFile } from '@/executor/types'
|
||||||
import type { TranscriptSegment } from '@/tools/stt/types'
|
import type { TranscriptSegment } from '@/tools/stt/types'
|
||||||
|
|
||||||
@@ -45,6 +53,7 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const body: SttRequestBody = await request.json()
|
const body: SttRequestBody = await request.json()
|
||||||
const {
|
const {
|
||||||
provider,
|
provider,
|
||||||
@@ -72,6 +81,9 @@ export async function POST(request: NextRequest) {
|
|||||||
let audioMimeType: string
|
let audioMimeType: string
|
||||||
|
|
||||||
if (body.audioFile) {
|
if (body.audioFile) {
|
||||||
|
if (Array.isArray(body.audioFile) && body.audioFile.length !== 1) {
|
||||||
|
return NextResponse.json({ error: 'audioFile must be a single file' }, { status: 400 })
|
||||||
|
}
|
||||||
const file = Array.isArray(body.audioFile) ? body.audioFile[0] : body.audioFile
|
const file = Array.isArray(body.audioFile) ? body.audioFile[0] : body.audioFile
|
||||||
logger.info(`[${requestId}] Processing uploaded file: ${file.name}`)
|
logger.info(`[${requestId}] Processing uploaded file: ${file.name}`)
|
||||||
|
|
||||||
@@ -79,6 +91,12 @@ export async function POST(request: NextRequest) {
|
|||||||
audioFileName = file.name
|
audioFileName = file.name
|
||||||
audioMimeType = file.type
|
audioMimeType = file.type
|
||||||
} else if (body.audioFileReference) {
|
} else if (body.audioFileReference) {
|
||||||
|
if (Array.isArray(body.audioFileReference) && body.audioFileReference.length !== 1) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'audioFileReference must be a single file' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
const file = Array.isArray(body.audioFileReference)
|
const file = Array.isArray(body.audioFileReference)
|
||||||
? body.audioFileReference[0]
|
? body.audioFileReference[0]
|
||||||
: body.audioFileReference
|
: body.audioFileReference
|
||||||
@@ -90,14 +108,48 @@ export async function POST(request: NextRequest) {
|
|||||||
} else if (body.audioUrl) {
|
} else if (body.audioUrl) {
|
||||||
logger.info(`[${requestId}] Downloading from URL: ${body.audioUrl}`)
|
logger.info(`[${requestId}] Downloading from URL: ${body.audioUrl}`)
|
||||||
|
|
||||||
const response = await fetch(body.audioUrl)
|
let audioUrl = body.audioUrl.trim()
|
||||||
|
if (audioUrl.startsWith('/') && !isInternalFileUrl(audioUrl)) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isInternalFileUrl(audioUrl)) {
|
||||||
|
if (!userId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Authentication required for internal file access' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const resolution = await resolveInternalFileUrl(audioUrl, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: resolution.error.message },
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
audioUrl = resolution.fileUrl || audioUrl
|
||||||
|
}
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(audioUrl, 'audioUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(audioUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to download audio from URL: ${response.statusText}`)
|
throw new Error(`Failed to download audio from URL: ${response.statusText}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const arrayBuffer = await response.arrayBuffer()
|
const arrayBuffer = await response.arrayBuffer()
|
||||||
audioBuffer = Buffer.from(arrayBuffer)
|
audioBuffer = Buffer.from(arrayBuffer)
|
||||||
audioFileName = body.audioUrl.split('/').pop() || 'audio_file'
|
audioFileName = audioUrl.split('/').pop() || 'audio_file'
|
||||||
audioMimeType = response.headers.get('content-type') || 'audio/mpeg'
|
audioMimeType = response.headers.get('content-type') || 'audio/mpeg'
|
||||||
} else {
|
} else {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
@@ -149,7 +201,9 @@ export async function POST(request: NextRequest) {
|
|||||||
translateToEnglish,
|
translateToEnglish,
|
||||||
model,
|
model,
|
||||||
body.prompt,
|
body.prompt,
|
||||||
body.temperature
|
body.temperature,
|
||||||
|
audioMimeType,
|
||||||
|
audioFileName
|
||||||
)
|
)
|
||||||
transcript = result.transcript
|
transcript = result.transcript
|
||||||
segments = result.segments
|
segments = result.segments
|
||||||
@@ -162,7 +216,8 @@ export async function POST(request: NextRequest) {
|
|||||||
language,
|
language,
|
||||||
timestamps,
|
timestamps,
|
||||||
diarization,
|
diarization,
|
||||||
model
|
model,
|
||||||
|
audioMimeType
|
||||||
)
|
)
|
||||||
transcript = result.transcript
|
transcript = result.transcript
|
||||||
segments = result.segments
|
segments = result.segments
|
||||||
@@ -252,7 +307,9 @@ async function transcribeWithWhisper(
|
|||||||
translate?: boolean,
|
translate?: boolean,
|
||||||
model?: string,
|
model?: string,
|
||||||
prompt?: string,
|
prompt?: string,
|
||||||
temperature?: number
|
temperature?: number,
|
||||||
|
mimeType?: string,
|
||||||
|
fileName?: string
|
||||||
): Promise<{
|
): Promise<{
|
||||||
transcript: string
|
transcript: string
|
||||||
segments?: TranscriptSegment[]
|
segments?: TranscriptSegment[]
|
||||||
@@ -261,8 +318,11 @@ async function transcribeWithWhisper(
|
|||||||
}> {
|
}> {
|
||||||
const formData = new FormData()
|
const formData = new FormData()
|
||||||
|
|
||||||
const blob = new Blob([new Uint8Array(audioBuffer)], { type: 'audio/mpeg' })
|
// Use actual MIME type and filename if provided
|
||||||
formData.append('file', blob, 'audio.mp3')
|
const actualMimeType = mimeType || 'audio/mpeg'
|
||||||
|
const actualFileName = fileName || 'audio.mp3'
|
||||||
|
const blob = new Blob([new Uint8Array(audioBuffer)], { type: actualMimeType })
|
||||||
|
formData.append('file', blob, actualFileName)
|
||||||
formData.append('model', model || 'whisper-1')
|
formData.append('model', model || 'whisper-1')
|
||||||
|
|
||||||
if (language && language !== 'auto') {
|
if (language && language !== 'auto') {
|
||||||
@@ -279,10 +339,11 @@ async function transcribeWithWhisper(
|
|||||||
|
|
||||||
formData.append('response_format', 'verbose_json')
|
formData.append('response_format', 'verbose_json')
|
||||||
|
|
||||||
|
// OpenAI API uses array notation for timestamp_granularities
|
||||||
if (timestamps === 'word') {
|
if (timestamps === 'word') {
|
||||||
formData.append('timestamp_granularities', 'word')
|
formData.append('timestamp_granularities[]', 'word')
|
||||||
} else if (timestamps === 'sentence') {
|
} else if (timestamps === 'sentence') {
|
||||||
formData.append('timestamp_granularities', 'segment')
|
formData.append('timestamp_granularities[]', 'segment')
|
||||||
}
|
}
|
||||||
|
|
||||||
const endpoint = translate ? 'translations' : 'transcriptions'
|
const endpoint = translate ? 'translations' : 'transcriptions'
|
||||||
@@ -325,7 +386,8 @@ async function transcribeWithDeepgram(
|
|||||||
language?: string,
|
language?: string,
|
||||||
timestamps?: 'none' | 'sentence' | 'word',
|
timestamps?: 'none' | 'sentence' | 'word',
|
||||||
diarization?: boolean,
|
diarization?: boolean,
|
||||||
model?: string
|
model?: string,
|
||||||
|
mimeType?: string
|
||||||
): Promise<{
|
): Promise<{
|
||||||
transcript: string
|
transcript: string
|
||||||
segments?: TranscriptSegment[]
|
segments?: TranscriptSegment[]
|
||||||
@@ -357,7 +419,7 @@ async function transcribeWithDeepgram(
|
|||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Token ${apiKey}`,
|
Authorization: `Token ${apiKey}`,
|
||||||
'Content-Type': 'audio/mpeg',
|
'Content-Type': mimeType || 'audio/mpeg',
|
||||||
},
|
},
|
||||||
body: new Uint8Array(audioBuffer),
|
body: new Uint8Array(audioBuffer),
|
||||||
})
|
})
|
||||||
@@ -513,7 +575,8 @@ async function transcribeWithAssemblyAI(
|
|||||||
audio_url: upload_url,
|
audio_url: upload_url,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (model === 'best' || model === 'nano') {
|
// AssemblyAI supports 'best', 'slam-1', or 'universal' for speech_model
|
||||||
|
if (model === 'best' || model === 'slam-1' || model === 'universal') {
|
||||||
transcriptRequest.speech_model = model
|
transcriptRequest.speech_model = model
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { FileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
|
||||||
@@ -16,7 +17,7 @@ const SupabaseStorageUploadSchema = z.object({
|
|||||||
bucket: z.string().min(1, 'Bucket name is required'),
|
bucket: z.string().min(1, 'Bucket name is required'),
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
path: z.string().optional().nullable(),
|
path: z.string().optional().nullable(),
|
||||||
fileData: z.any(),
|
fileData: FileInputSchema,
|
||||||
contentType: z.string().optional().nullable(),
|
contentType: z.string().optional().nullable(),
|
||||||
upsert: z.boolean().optional().default(false),
|
upsert: z.boolean().optional().default(false),
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputArraySchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
import { processFilesToUserFiles } from '@/lib/uploads/utils/file-utils'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
||||||
import { convertMarkdownToHTML } from '@/tools/telegram/utils'
|
import { convertMarkdownToHTML } from '@/tools/telegram/utils'
|
||||||
@@ -14,7 +15,7 @@ const logger = createLogger('TelegramSendDocumentAPI')
|
|||||||
const TelegramSendDocumentSchema = z.object({
|
const TelegramSendDocumentSchema = z.object({
|
||||||
botToken: z.string().min(1, 'Bot token is required'),
|
botToken: z.string().min(1, 'Bot token is required'),
|
||||||
chatId: z.string().min(1, 'Chat ID is required'),
|
chatId: z.string().min(1, 'Chat ID is required'),
|
||||||
files: z.array(z.any()).optional().nullable(),
|
files: RawFileInputArraySchema.optional().nullable(),
|
||||||
caption: z.string().optional().nullable(),
|
caption: z.string().optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -93,6 +94,14 @@ export async function POST(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Uploading document: ${userFile.name}`)
|
logger.info(`[${requestId}] Uploading document: ${userFile.name}`)
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
const filesOutput = [
|
||||||
|
{
|
||||||
|
name: userFile.name,
|
||||||
|
mimeType: userFile.type || 'application/octet-stream',
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
logger.info(`[${requestId}] Downloaded file: ${buffer.length} bytes`)
|
logger.info(`[${requestId}] Downloaded file: ${buffer.length} bytes`)
|
||||||
|
|
||||||
@@ -135,6 +144,7 @@ export async function POST(request: NextRequest) {
|
|||||||
output: {
|
output: {
|
||||||
message: 'Document sent successfully',
|
message: 'Document sent successfully',
|
||||||
data: data.result,
|
data: data.result,
|
||||||
|
files: filesOutput,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -3,19 +3,18 @@ import { createLogger } from '@sim/logger'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { validateAwsRegion, validateS3BucketName } from '@/lib/core/security/input-validation'
|
||||||
import {
|
import {
|
||||||
validateAwsRegion,
|
secureFetchWithPinnedIP,
|
||||||
validateExternalUrl,
|
validateUrlWithDNS,
|
||||||
validateS3BucketName,
|
} from '@/lib/core/security/input-validation.server'
|
||||||
} from '@/lib/core/security/input-validation'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { StorageService } from '@/lib/uploads'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
|
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
import {
|
import {
|
||||||
extractStorageKey,
|
downloadFileFromStorage,
|
||||||
inferContextFromKey,
|
resolveInternalFileUrl,
|
||||||
isInternalFileUrl,
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
} from '@/lib/uploads/utils/file-utils'
|
|
||||||
import { verifyFileAccess } from '@/app/api/files/authorization'
|
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
export const maxDuration = 300 // 5 minutes for large multi-page PDF processing
|
||||||
@@ -35,6 +34,7 @@ const TextractParseSchema = z
|
|||||||
region: z.string().min(1, 'AWS region is required'),
|
region: z.string().min(1, 'AWS region is required'),
|
||||||
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
processingMode: z.enum(['sync', 'async']).optional().default('sync'),
|
||||||
filePath: z.string().optional(),
|
filePath: z.string().optional(),
|
||||||
|
file: RawFileInputSchema.optional(),
|
||||||
s3Uri: z.string().optional(),
|
s3Uri: z.string().optional(),
|
||||||
featureTypes: z
|
featureTypes: z
|
||||||
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
.array(z.enum(['TABLES', 'FORMS', 'QUERIES', 'SIGNATURES', 'LAYOUT']))
|
||||||
@@ -50,6 +50,20 @@ const TextractParseSchema = z
|
|||||||
path: ['region'],
|
path: ['region'],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
if (data.processingMode === 'async' && !data.s3Uri) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: 'S3 URI is required for multi-page processing (s3://bucket/key)',
|
||||||
|
path: ['s3Uri'],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if (data.processingMode !== 'async' && !data.file && !data.filePath) {
|
||||||
|
ctx.addIssue({
|
||||||
|
code: z.ZodIssueCode.custom,
|
||||||
|
message: 'File input is required for single-page processing',
|
||||||
|
path: ['filePath'],
|
||||||
|
})
|
||||||
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
function getSignatureKey(
|
function getSignatureKey(
|
||||||
@@ -111,7 +125,14 @@ function signAwsRequest(
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
async function fetchDocumentBytes(url: string): Promise<{ bytes: string; contentType: string }> {
|
||||||
const response = await fetch(url)
|
const urlValidation = await validateUrlWithDNS(url, 'Document URL')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
throw new Error(urlValidation.error || 'Invalid document URL')
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(url, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
throw new Error(`Failed to fetch document: ${response.statusText}`)
|
||||||
}
|
}
|
||||||
@@ -318,8 +339,8 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Textract parse request`, {
|
logger.info(`[${requestId}] Textract parse request`, {
|
||||||
processingMode,
|
processingMode,
|
||||||
filePath: validatedData.filePath?.substring(0, 50),
|
hasFile: Boolean(validatedData.file),
|
||||||
s3Uri: validatedData.s3Uri?.substring(0, 50),
|
hasS3Uri: Boolean(validatedData.s3Uri),
|
||||||
featureTypes,
|
featureTypes,
|
||||||
userId,
|
userId,
|
||||||
})
|
})
|
||||||
@@ -414,90 +435,89 @@ export async function POST(request: NextRequest) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!validatedData.filePath) {
|
let bytes = ''
|
||||||
return NextResponse.json(
|
let contentType = 'application/octet-stream'
|
||||||
{
|
let isPdf = false
|
||||||
success: false,
|
|
||||||
error: 'File path is required for single-page processing',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
let fileUrl = validatedData.filePath
|
if (validatedData.file) {
|
||||||
|
let userFile
|
||||||
const isInternalFilePath = validatedData.filePath && isInternalFileUrl(validatedData.filePath)
|
|
||||||
|
|
||||||
if (isInternalFilePath) {
|
|
||||||
try {
|
try {
|
||||||
const storageKey = extractStorageKey(validatedData.filePath)
|
userFile = processSingleFileToUserFile(validatedData.file, requestId, logger)
|
||||||
const context = inferContextFromKey(storageKey)
|
|
||||||
|
|
||||||
const hasAccess = await verifyFileAccess(storageKey, userId, undefined, context, false)
|
|
||||||
|
|
||||||
if (!hasAccess) {
|
|
||||||
logger.warn(`[${requestId}] Unauthorized presigned URL generation attempt`, {
|
|
||||||
userId,
|
|
||||||
key: storageKey,
|
|
||||||
context,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'File not found',
|
|
||||||
},
|
|
||||||
{ status: 404 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileUrl = await StorageService.generatePresignedDownloadUrl(storageKey, context, 5 * 60)
|
|
||||||
logger.info(`[${requestId}] Generated presigned URL for ${context} file`)
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`[${requestId}] Failed to generate presigned URL:`, error)
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: false,
|
success: false,
|
||||||
error: 'Failed to generate file access URL',
|
error: error instanceof Error ? error.message : 'Failed to process file',
|
||||||
},
|
|
||||||
{ status: 500 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (validatedData.filePath?.startsWith('/')) {
|
|
||||||
// Reject arbitrary absolute paths that don't contain /api/files/serve/
|
|
||||||
logger.warn(`[${requestId}] Invalid internal path`, {
|
|
||||||
userId,
|
|
||||||
path: validatedData.filePath.substring(0, 50),
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
const urlValidation = validateExternalUrl(fileUrl, 'Document URL')
|
|
||||||
if (!urlValidation.isValid) {
|
|
||||||
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
|
||||||
userId,
|
|
||||||
url: fileUrl.substring(0, 100),
|
|
||||||
error: urlValidation.error,
|
|
||||||
})
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
success: false,
|
|
||||||
error: urlValidation.error,
|
|
||||||
},
|
},
|
||||||
{ status: 400 }
|
{ status: 400 }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
bytes = buffer.toString('base64')
|
||||||
|
contentType = userFile.type || 'application/octet-stream'
|
||||||
|
isPdf = contentType.includes('pdf') || userFile.name?.toLowerCase().endsWith('.pdf')
|
||||||
|
} else if (validatedData.filePath) {
|
||||||
|
let fileUrl = validatedData.filePath
|
||||||
|
|
||||||
|
const isInternalFilePath = isInternalFileUrl(fileUrl)
|
||||||
|
|
||||||
|
if (isInternalFilePath) {
|
||||||
|
const resolution = await resolveInternalFileUrl(fileUrl, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: resolution.error.message,
|
||||||
|
},
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
fileUrl = resolution.fileUrl || fileUrl
|
||||||
|
} else if (fileUrl.startsWith('/')) {
|
||||||
|
logger.warn(`[${requestId}] Invalid internal path`, {
|
||||||
|
userId,
|
||||||
|
path: fileUrl.substring(0, 50),
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
const urlValidation = await validateUrlWithDNS(fileUrl, 'Document URL')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] SSRF attempt blocked`, {
|
||||||
|
userId,
|
||||||
|
url: fileUrl.substring(0, 100),
|
||||||
|
error: urlValidation.error,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: urlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const fetched = await fetchDocumentBytes(fileUrl)
|
||||||
|
bytes = fetched.bytes
|
||||||
|
contentType = fetched.contentType
|
||||||
|
isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
||||||
|
} else {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'File input is required for single-page processing',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const { bytes, contentType } = await fetchDocumentBytes(fileUrl)
|
|
||||||
|
|
||||||
// Track if this is a PDF for better error messaging
|
|
||||||
const isPdf = contentType.includes('pdf') || fileUrl.toLowerCase().endsWith('.pdf')
|
|
||||||
|
|
||||||
const uri = '/'
|
const uri = '/'
|
||||||
|
|
||||||
let textractBody: Record<string, unknown>
|
let textractBody: Record<string, unknown>
|
||||||
|
|||||||
250
apps/sim/app/api/tools/twilio/get-recording/route.ts
Normal file
250
apps/sim/app/api/tools/twilio/get-recording/route.ts
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('TwilioGetRecordingAPI')
|
||||||
|
|
||||||
|
interface TwilioRecordingResponse {
|
||||||
|
sid?: string
|
||||||
|
call_sid?: string
|
||||||
|
duration?: string
|
||||||
|
status?: string
|
||||||
|
channels?: number
|
||||||
|
source?: string
|
||||||
|
price?: string
|
||||||
|
price_unit?: string
|
||||||
|
uri?: string
|
||||||
|
error_code?: number
|
||||||
|
message?: string
|
||||||
|
error_message?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TwilioErrorResponse {
|
||||||
|
message?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TwilioTranscription {
|
||||||
|
transcription_text?: string
|
||||||
|
status?: string
|
||||||
|
price?: string
|
||||||
|
price_unit?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface TwilioTranscriptionsResponse {
|
||||||
|
transcriptions?: TwilioTranscription[]
|
||||||
|
}
|
||||||
|
|
||||||
|
const TwilioGetRecordingSchema = z.object({
|
||||||
|
accountSid: z.string().min(1, 'Account SID is required'),
|
||||||
|
authToken: z.string().min(1, 'Auth token is required'),
|
||||||
|
recordingSid: z.string().min(1, 'Recording SID is required'),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Twilio get recording attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = TwilioGetRecordingSchema.parse(body)
|
||||||
|
|
||||||
|
const { accountSid, authToken, recordingSid } = validatedData
|
||||||
|
|
||||||
|
if (!accountSid.startsWith('AC')) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: `Invalid Account SID format. Account SID must start with "AC" (you provided: ${accountSid.substring(0, 2)}...)`,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const twilioAuth = Buffer.from(`${accountSid}:${authToken}`).toString('base64')
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Getting recording info from Twilio`, { recordingSid })
|
||||||
|
|
||||||
|
const infoUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Recordings/${recordingSid}.json`
|
||||||
|
const infoUrlValidation = await validateUrlWithDNS(infoUrl, 'infoUrl')
|
||||||
|
if (!infoUrlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: infoUrlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const infoResponse = await secureFetchWithPinnedIP(infoUrl, infoUrlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!infoResponse.ok) {
|
||||||
|
const errorData = (await infoResponse.json().catch(() => ({}))) as TwilioErrorResponse
|
||||||
|
logger.error(`[${requestId}] Twilio API error`, {
|
||||||
|
status: infoResponse.status,
|
||||||
|
error: errorData,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorData.message || `Twilio API error: ${infoResponse.status}` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await infoResponse.json()) as TwilioRecordingResponse
|
||||||
|
|
||||||
|
if (data.error_code) {
|
||||||
|
return NextResponse.json({
|
||||||
|
success: false,
|
||||||
|
output: {
|
||||||
|
success: false,
|
||||||
|
error: data.message || data.error_message || 'Failed to retrieve recording',
|
||||||
|
},
|
||||||
|
error: data.message || data.error_message || 'Failed to retrieve recording',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseUrl = 'https://api.twilio.com'
|
||||||
|
const mediaUrl = data.uri ? `${baseUrl}${data.uri.replace('.json', '')}` : undefined
|
||||||
|
|
||||||
|
let transcriptionText: string | undefined
|
||||||
|
let transcriptionStatus: string | undefined
|
||||||
|
let transcriptionPrice: string | undefined
|
||||||
|
let transcriptionPriceUnit: string | undefined
|
||||||
|
let file:
|
||||||
|
| {
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}
|
||||||
|
| undefined
|
||||||
|
|
||||||
|
try {
|
||||||
|
const transcriptionUrl = `https://api.twilio.com/2010-04-01/Accounts/${accountSid}/Transcriptions.json?RecordingSid=${data.sid}`
|
||||||
|
logger.info(`[${requestId}] Checking for transcriptions`)
|
||||||
|
|
||||||
|
const transcriptionUrlValidation = await validateUrlWithDNS(
|
||||||
|
transcriptionUrl,
|
||||||
|
'transcriptionUrl'
|
||||||
|
)
|
||||||
|
if (transcriptionUrlValidation.isValid) {
|
||||||
|
const transcriptionResponse = await secureFetchWithPinnedIP(
|
||||||
|
transcriptionUrl,
|
||||||
|
transcriptionUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (transcriptionResponse.ok) {
|
||||||
|
const transcriptionData =
|
||||||
|
(await transcriptionResponse.json()) as TwilioTranscriptionsResponse
|
||||||
|
|
||||||
|
if (transcriptionData.transcriptions && transcriptionData.transcriptions.length > 0) {
|
||||||
|
const transcription = transcriptionData.transcriptions[0]
|
||||||
|
transcriptionText = transcription.transcription_text
|
||||||
|
transcriptionStatus = transcription.status
|
||||||
|
transcriptionPrice = transcription.price
|
||||||
|
transcriptionPriceUnit = transcription.price_unit
|
||||||
|
logger.info(`[${requestId}] Transcription found`, {
|
||||||
|
status: transcriptionStatus,
|
||||||
|
textLength: transcriptionText?.length,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to fetch transcription:`, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (mediaUrl) {
|
||||||
|
try {
|
||||||
|
const mediaUrlValidation = await validateUrlWithDNS(mediaUrl, 'mediaUrl')
|
||||||
|
if (mediaUrlValidation.isValid) {
|
||||||
|
const mediaResponse = await secureFetchWithPinnedIP(
|
||||||
|
mediaUrl,
|
||||||
|
mediaUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Basic ${twilioAuth}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (mediaResponse.ok) {
|
||||||
|
const contentType =
|
||||||
|
mediaResponse.headers.get('content-type') || 'application/octet-stream'
|
||||||
|
const extension = getExtensionFromMimeType(contentType) || 'dat'
|
||||||
|
const arrayBuffer = await mediaResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
const fileName = `${data.sid || recordingSid}.${extension}`
|
||||||
|
|
||||||
|
file = {
|
||||||
|
name: fileName,
|
||||||
|
mimeType: contentType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to download recording media:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Twilio recording fetched successfully`, {
|
||||||
|
recordingSid: data.sid,
|
||||||
|
hasFile: !!file,
|
||||||
|
hasTranscription: !!transcriptionText,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
success: true,
|
||||||
|
recordingSid: data.sid,
|
||||||
|
callSid: data.call_sid,
|
||||||
|
duration: data.duration ? Number.parseInt(data.duration, 10) : undefined,
|
||||||
|
status: data.status,
|
||||||
|
channels: data.channels,
|
||||||
|
source: data.source,
|
||||||
|
mediaUrl,
|
||||||
|
file,
|
||||||
|
price: data.price,
|
||||||
|
priceUnit: data.price_unit,
|
||||||
|
uri: data.uri,
|
||||||
|
transcriptionText,
|
||||||
|
transcriptionStatus,
|
||||||
|
transcriptionPrice,
|
||||||
|
transcriptionPriceUnit,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching Twilio recording:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,10 +1,20 @@
|
|||||||
|
import { GoogleGenAI } from '@google/genai'
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import { downloadFileFromStorage } from '@/lib/uploads/utils/file-utils.server'
|
import { isInternalFileUrl, processSingleFileToUserFile } from '@/lib/uploads/utils/file-utils'
|
||||||
|
import {
|
||||||
|
downloadFileFromStorage,
|
||||||
|
resolveInternalFileUrl,
|
||||||
|
} from '@/lib/uploads/utils/file-utils.server'
|
||||||
|
import { convertUsageMetadata, extractTextContent } from '@/providers/google/utils'
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
@@ -13,8 +23,8 @@ const logger = createLogger('VisionAnalyzeAPI')
|
|||||||
const VisionAnalyzeSchema = z.object({
|
const VisionAnalyzeSchema = z.object({
|
||||||
apiKey: z.string().min(1, 'API key is required'),
|
apiKey: z.string().min(1, 'API key is required'),
|
||||||
imageUrl: z.string().optional().nullable(),
|
imageUrl: z.string().optional().nullable(),
|
||||||
imageFile: z.any().optional().nullable(),
|
imageFile: RawFileInputSchema.optional().nullable(),
|
||||||
model: z.string().optional().default('gpt-4o'),
|
model: z.string().optional().default('gpt-5.2'),
|
||||||
prompt: z.string().optional().nullable(),
|
prompt: z.string().optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -39,6 +49,7 @@ export async function POST(request: NextRequest) {
|
|||||||
userId: authResult.userId,
|
userId: authResult.userId,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
const userId = authResult.userId
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const validatedData = VisionAnalyzeSchema.parse(body)
|
const validatedData = VisionAnalyzeSchema.parse(body)
|
||||||
|
|
||||||
@@ -77,18 +88,72 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
let base64 = userFile.base64
|
||||||
|
let bufferLength = 0
|
||||||
const base64 = buffer.toString('base64')
|
if (!base64) {
|
||||||
|
const buffer = await downloadFileFromStorage(userFile, requestId, logger)
|
||||||
|
base64 = buffer.toString('base64')
|
||||||
|
bufferLength = buffer.length
|
||||||
|
}
|
||||||
const mimeType = userFile.type || 'image/jpeg'
|
const mimeType = userFile.type || 'image/jpeg'
|
||||||
imageSource = `data:${mimeType};base64,${base64}`
|
imageSource = `data:${mimeType};base64,${base64}`
|
||||||
logger.info(`[${requestId}] Converted image to base64 (${buffer.length} bytes)`)
|
if (bufferLength > 0) {
|
||||||
|
logger.info(`[${requestId}] Converted image to base64 (${bufferLength} bytes)`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let imageUrlValidation: Awaited<ReturnType<typeof validateUrlWithDNS>> | null = null
|
||||||
|
if (imageSource && !imageSource.startsWith('data:')) {
|
||||||
|
if (imageSource.startsWith('/') && !isInternalFileUrl(imageSource)) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Invalid file path. Only uploaded files are supported for internal paths.',
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isInternalFileUrl(imageSource)) {
|
||||||
|
if (!userId) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: 'Authentication required for internal file access',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const resolution = await resolveInternalFileUrl(imageSource, userId, requestId, logger)
|
||||||
|
if (resolution.error) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: resolution.error.message,
|
||||||
|
},
|
||||||
|
{ status: resolution.error.status }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
imageSource = resolution.fileUrl || imageSource
|
||||||
|
}
|
||||||
|
|
||||||
|
imageUrlValidation = await validateUrlWithDNS(imageSource, 'imageUrl')
|
||||||
|
if (!imageUrlValidation.isValid) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: imageUrlValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
|
const defaultPrompt = 'Please analyze this image and describe what you see in detail.'
|
||||||
const prompt = validatedData.prompt || defaultPrompt
|
const prompt = validatedData.prompt || defaultPrompt
|
||||||
|
|
||||||
const isClaude = validatedData.model.startsWith('claude-3')
|
const isClaude = validatedData.model.startsWith('claude-')
|
||||||
|
const isGemini = validatedData.model.startsWith('gemini-')
|
||||||
const apiUrl = isClaude
|
const apiUrl = isClaude
|
||||||
? 'https://api.anthropic.com/v1/messages'
|
? 'https://api.anthropic.com/v1/messages'
|
||||||
: 'https://api.openai.com/v1/chat/completions'
|
: 'https://api.openai.com/v1/chat/completions'
|
||||||
@@ -106,6 +171,72 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
let requestBody: any
|
let requestBody: any
|
||||||
|
|
||||||
|
if (isGemini) {
|
||||||
|
let base64Payload = imageSource
|
||||||
|
if (!base64Payload.startsWith('data:')) {
|
||||||
|
const urlValidation =
|
||||||
|
imageUrlValidation || (await validateUrlWithDNS(base64Payload, 'imageUrl'))
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(base64Payload, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
})
|
||||||
|
if (!response.ok) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Failed to fetch image for Gemini' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const contentType =
|
||||||
|
response.headers.get('content-type') || validatedData.imageFile?.type || 'image/jpeg'
|
||||||
|
const arrayBuffer = await response.arrayBuffer()
|
||||||
|
const base64 = Buffer.from(arrayBuffer).toString('base64')
|
||||||
|
base64Payload = `data:${contentType};base64,${base64}`
|
||||||
|
}
|
||||||
|
const base64Marker = ';base64,'
|
||||||
|
const markerIndex = base64Payload.indexOf(base64Marker)
|
||||||
|
if (!base64Payload.startsWith('data:') || markerIndex === -1) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid base64 image format' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
const rawMimeType = base64Payload.slice('data:'.length, markerIndex)
|
||||||
|
const mediaType = rawMimeType.split(';')[0] || 'image/jpeg'
|
||||||
|
const base64Data = base64Payload.slice(markerIndex + base64Marker.length)
|
||||||
|
if (!base64Data) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: 'Invalid base64 image format' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const ai = new GoogleGenAI({ apiKey: validatedData.apiKey })
|
||||||
|
const geminiResponse = await ai.models.generateContent({
|
||||||
|
model: validatedData.model,
|
||||||
|
contents: [
|
||||||
|
{
|
||||||
|
role: 'user',
|
||||||
|
parts: [{ text: prompt }, { inlineData: { mimeType: mediaType, data: base64Data } }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
})
|
||||||
|
|
||||||
|
const content = extractTextContent(geminiResponse.candidates?.[0])
|
||||||
|
const usage = convertUsageMetadata(geminiResponse.usageMetadata)
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
content,
|
||||||
|
model: validatedData.model,
|
||||||
|
tokens: usage.totalTokenCount || undefined,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
if (isClaude) {
|
if (isClaude) {
|
||||||
if (imageSource.startsWith('data:')) {
|
if (imageSource.startsWith('data:')) {
|
||||||
const base64Match = imageSource.match(/^data:([^;]+);base64,(.+)$/)
|
const base64Match = imageSource.match(/^data:([^;]+);base64,(.+)$/)
|
||||||
@@ -172,7 +303,7 @@ export async function POST(request: NextRequest) {
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
max_tokens: 1000,
|
max_completion_tokens: 1000,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { RawFileInputSchema } from '@/lib/uploads/utils/file-schemas'
|
||||||
import {
|
import {
|
||||||
getFileExtension,
|
getFileExtension,
|
||||||
getMimeTypeFromExtension,
|
getMimeTypeFromExtension,
|
||||||
@@ -19,7 +20,7 @@ const WORDPRESS_COM_API_BASE = 'https://public-api.wordpress.com/wp/v2/sites'
|
|||||||
const WordPressUploadSchema = z.object({
|
const WordPressUploadSchema = z.object({
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
siteId: z.string().min(1, 'Site ID is required'),
|
siteId: z.string().min(1, 'Site ID is required'),
|
||||||
file: z.any().optional().nullable(),
|
file: RawFileInputSchema.optional().nullable(),
|
||||||
filename: z.string().optional().nullable(),
|
filename: z.string().optional().nullable(),
|
||||||
title: z.string().optional().nullable(),
|
title: z.string().optional().nullable(),
|
||||||
caption: z.string().optional().nullable(),
|
caption: z.string().optional().nullable(),
|
||||||
|
|||||||
216
apps/sim/app/api/tools/zoom/get-recordings/route.ts
Normal file
216
apps/sim/app/api/tools/zoom/get-recordings/route.ts
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
import { createLogger } from '@sim/logger'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { z } from 'zod'
|
||||||
|
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||||
|
import {
|
||||||
|
secureFetchWithPinnedIP,
|
||||||
|
validateUrlWithDNS,
|
||||||
|
} from '@/lib/core/security/input-validation.server'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getExtensionFromMimeType } from '@/lib/uploads/utils/file-utils'
|
||||||
|
|
||||||
|
export const dynamic = 'force-dynamic'
|
||||||
|
|
||||||
|
const logger = createLogger('ZoomGetRecordingsAPI')
|
||||||
|
|
||||||
|
interface ZoomRecordingFile {
|
||||||
|
id?: string
|
||||||
|
meeting_id?: string
|
||||||
|
recording_start?: string
|
||||||
|
recording_end?: string
|
||||||
|
file_type?: string
|
||||||
|
file_extension?: string
|
||||||
|
file_size?: number
|
||||||
|
play_url?: string
|
||||||
|
download_url?: string
|
||||||
|
status?: string
|
||||||
|
recording_type?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ZoomRecordingsResponse {
|
||||||
|
uuid?: string
|
||||||
|
id?: string | number
|
||||||
|
account_id?: string
|
||||||
|
host_id?: string
|
||||||
|
topic?: string
|
||||||
|
type?: number
|
||||||
|
start_time?: string
|
||||||
|
duration?: number
|
||||||
|
total_size?: number
|
||||||
|
recording_count?: number
|
||||||
|
share_url?: string
|
||||||
|
recording_files?: ZoomRecordingFile[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ZoomErrorResponse {
|
||||||
|
message?: string
|
||||||
|
code?: number
|
||||||
|
}
|
||||||
|
|
||||||
|
const ZoomGetRecordingsSchema = z.object({
|
||||||
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
|
meetingId: z.string().min(1, 'Meeting ID is required'),
|
||||||
|
includeFolderItems: z.boolean().optional(),
|
||||||
|
ttl: z.number().optional(),
|
||||||
|
downloadFiles: z.boolean().optional().default(false),
|
||||||
|
})
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
|
try {
|
||||||
|
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||||
|
|
||||||
|
if (!authResult.success) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized Zoom get recordings attempt: ${authResult.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: authResult.error || 'Authentication required',
|
||||||
|
},
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const validatedData = ZoomGetRecordingsSchema.parse(body)
|
||||||
|
|
||||||
|
const { accessToken, meetingId, includeFolderItems, ttl, downloadFiles } = validatedData
|
||||||
|
|
||||||
|
const baseUrl = `https://api.zoom.us/v2/meetings/${encodeURIComponent(meetingId)}/recordings`
|
||||||
|
const queryParams = new URLSearchParams()
|
||||||
|
|
||||||
|
if (includeFolderItems != null) {
|
||||||
|
queryParams.append('include_folder_items', String(includeFolderItems))
|
||||||
|
}
|
||||||
|
if (ttl) {
|
||||||
|
queryParams.append('ttl', String(ttl))
|
||||||
|
}
|
||||||
|
|
||||||
|
const queryString = queryParams.toString()
|
||||||
|
const apiUrl = queryString ? `${baseUrl}?${queryString}` : baseUrl
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Fetching recordings from Zoom`, { meetingId })
|
||||||
|
|
||||||
|
const urlValidation = await validateUrlWithDNS(apiUrl, 'apiUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
return NextResponse.json({ success: false, error: urlValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await secureFetchWithPinnedIP(apiUrl, urlValidation.resolvedIP!, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = (await response.json().catch(() => ({}))) as ZoomErrorResponse
|
||||||
|
logger.error(`[${requestId}] Zoom API error`, {
|
||||||
|
status: response.status,
|
||||||
|
error: errorData,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: errorData.message || `Zoom API error: ${response.status}` },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = (await response.json()) as ZoomRecordingsResponse
|
||||||
|
const files: Array<{
|
||||||
|
name: string
|
||||||
|
mimeType: string
|
||||||
|
data: string
|
||||||
|
size: number
|
||||||
|
}> = []
|
||||||
|
|
||||||
|
if (downloadFiles && Array.isArray(data.recording_files)) {
|
||||||
|
for (const file of data.recording_files) {
|
||||||
|
if (!file?.download_url) continue
|
||||||
|
|
||||||
|
try {
|
||||||
|
const fileUrlValidation = await validateUrlWithDNS(file.download_url, 'downloadUrl')
|
||||||
|
if (!fileUrlValidation.isValid) continue
|
||||||
|
|
||||||
|
const downloadResponse = await secureFetchWithPinnedIP(
|
||||||
|
file.download_url,
|
||||||
|
fileUrlValidation.resolvedIP!,
|
||||||
|
{
|
||||||
|
method: 'GET',
|
||||||
|
headers: { Authorization: `Bearer ${accessToken}` },
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!downloadResponse.ok) continue
|
||||||
|
|
||||||
|
const contentType =
|
||||||
|
downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||||
|
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||||
|
const buffer = Buffer.from(arrayBuffer)
|
||||||
|
const extension =
|
||||||
|
file.file_extension?.toString().toLowerCase() ||
|
||||||
|
getExtensionFromMimeType(contentType) ||
|
||||||
|
'dat'
|
||||||
|
const fileName = `zoom-recording-${file.id || file.recording_start || Date.now()}.${extension}`
|
||||||
|
|
||||||
|
files.push({
|
||||||
|
name: fileName,
|
||||||
|
mimeType: contentType,
|
||||||
|
data: buffer.toString('base64'),
|
||||||
|
size: buffer.length,
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`[${requestId}] Failed to download recording file:`, error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Zoom recordings fetched successfully`, {
|
||||||
|
recordingCount: data.recording_files?.length || 0,
|
||||||
|
downloadedCount: files.length,
|
||||||
|
})
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
recording: {
|
||||||
|
uuid: data.uuid,
|
||||||
|
id: data.id,
|
||||||
|
account_id: data.account_id,
|
||||||
|
host_id: data.host_id,
|
||||||
|
topic: data.topic,
|
||||||
|
type: data.type,
|
||||||
|
start_time: data.start_time,
|
||||||
|
duration: data.duration,
|
||||||
|
total_size: data.total_size,
|
||||||
|
recording_count: data.recording_count,
|
||||||
|
share_url: data.share_url,
|
||||||
|
recording_files: (data.recording_files || []).map((file: ZoomRecordingFile) => ({
|
||||||
|
id: file.id,
|
||||||
|
meeting_id: file.meeting_id,
|
||||||
|
recording_start: file.recording_start,
|
||||||
|
recording_end: file.recording_end,
|
||||||
|
file_type: file.file_type,
|
||||||
|
file_extension: file.file_extension,
|
||||||
|
file_size: file.file_size,
|
||||||
|
play_url: file.play_url,
|
||||||
|
download_url: file.download_url,
|
||||||
|
status: file.status,
|
||||||
|
recording_type: file.recording_type,
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
files: files.length > 0 ? files : undefined,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching Zoom recordings:`, error)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||||
|
},
|
||||||
|
{ status: 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -5,6 +5,7 @@ import { and, eq } from 'drizzle-orm'
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { hasActiveSubscription } from '@/lib/billing'
|
||||||
|
|
||||||
const logger = createLogger('SubscriptionTransferAPI')
|
const logger = createLogger('SubscriptionTransferAPI')
|
||||||
|
|
||||||
@@ -88,6 +89,14 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if org already has an active subscription (prevent duplicates)
|
||||||
|
if (await hasActiveSubscription(organizationId)) {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Organization already has an active subscription' },
|
||||||
|
{ status: 409 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
await db
|
await db
|
||||||
.update(subscription)
|
.update(subscription)
|
||||||
.set({ referenceId: organizationId })
|
.set({ referenceId: organizationId })
|
||||||
|
|||||||
@@ -203,6 +203,10 @@ export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) =
|
|||||||
}
|
}
|
||||||
|
|
||||||
updateData.billingBlocked = body.billingBlocked
|
updateData.billingBlocked = body.billingBlocked
|
||||||
|
// Clear the reason when unblocking
|
||||||
|
if (body.billingBlocked === false) {
|
||||||
|
updateData.billingBlockedReason = null
|
||||||
|
}
|
||||||
updated.push('billingBlocked')
|
updated.push('billingBlocked')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
import { db, workflow as workflowTable } from '@sim/db'
|
|
||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { eq } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
@@ -8,6 +6,7 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
|
|||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||||
|
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||||
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
|
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
|
||||||
@@ -75,12 +74,31 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
const { startBlockId, sourceSnapshot, input } = validation.data
|
const { startBlockId, sourceSnapshot, input } = validation.data
|
||||||
const executionId = uuidv4()
|
const executionId = uuidv4()
|
||||||
|
|
||||||
const [workflowRecord] = await db
|
// Run preprocessing checks (billing, rate limits, usage limits)
|
||||||
.select({ workspaceId: workflowTable.workspaceId, userId: workflowTable.userId })
|
const preprocessResult = await preprocessExecution({
|
||||||
.from(workflowTable)
|
workflowId,
|
||||||
.where(eq(workflowTable.id, workflowId))
|
userId,
|
||||||
.limit(1)
|
triggerType: 'manual',
|
||||||
|
executionId,
|
||||||
|
requestId,
|
||||||
|
checkRateLimit: false, // Manual executions don't rate limit
|
||||||
|
checkDeployment: false, // Run-from-block doesn't require deployment
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!preprocessResult.success) {
|
||||||
|
const { error } = preprocessResult
|
||||||
|
logger.warn(`[${requestId}] Preprocessing failed for run-from-block`, {
|
||||||
|
workflowId,
|
||||||
|
error: error?.message,
|
||||||
|
statusCode: error?.statusCode,
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: error?.message || 'Execution blocked' },
|
||||||
|
{ status: error?.statusCode || 500 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowRecord = preprocessResult.workflowRecord
|
||||||
if (!workflowRecord?.workspaceId) {
|
if (!workflowRecord?.workspaceId) {
|
||||||
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
|
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
|
||||||
}
|
}
|
||||||
@@ -92,6 +110,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
workflowId,
|
workflowId,
|
||||||
startBlockId,
|
startBlockId,
|
||||||
executedBlocksCount: sourceSnapshot.executedBlocks.length,
|
executedBlocksCount: sourceSnapshot.executedBlocks.length,
|
||||||
|
billingActorUserId: preprocessResult.actorUserId,
|
||||||
})
|
})
|
||||||
|
|
||||||
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
|
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { memo, useCallback } from 'react'
|
import { memo, useCallback } from 'react'
|
||||||
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, Lock, LogOut, Unlock } from 'lucide-react'
|
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
|
||||||
import { Button, Copy, PlayOutline, Tooltip, Trash2 } from '@/components/emcn'
|
import { Button, Copy, PlayOutline, Tooltip, Trash2 } from '@/components/emcn'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
|
||||||
@@ -49,7 +49,6 @@ export const ActionBar = memo(
|
|||||||
collaborativeBatchRemoveBlocks,
|
collaborativeBatchRemoveBlocks,
|
||||||
collaborativeBatchToggleBlockEnabled,
|
collaborativeBatchToggleBlockEnabled,
|
||||||
collaborativeBatchToggleBlockHandles,
|
collaborativeBatchToggleBlockHandles,
|
||||||
collaborativeBatchToggleLocked,
|
|
||||||
} = useCollaborativeWorkflow()
|
} = useCollaborativeWorkflow()
|
||||||
const { setPendingSelection } = useWorkflowRegistry()
|
const { setPendingSelection } = useWorkflowRegistry()
|
||||||
const { handleRunFromBlock } = useWorkflowExecution()
|
const { handleRunFromBlock } = useWorkflowExecution()
|
||||||
@@ -85,28 +84,16 @@ export const ActionBar = memo(
|
|||||||
)
|
)
|
||||||
}, [blockId, addNotification, collaborativeBatchAddBlocks, setPendingSelection])
|
}, [blockId, addNotification, collaborativeBatchAddBlocks, setPendingSelection])
|
||||||
|
|
||||||
const {
|
const { isEnabled, horizontalHandles, parentId, parentType } = useWorkflowStore(
|
||||||
isEnabled,
|
|
||||||
horizontalHandles,
|
|
||||||
parentId,
|
|
||||||
parentType,
|
|
||||||
isLocked,
|
|
||||||
isParentLocked,
|
|
||||||
isParentDisabled,
|
|
||||||
} = useWorkflowStore(
|
|
||||||
useCallback(
|
useCallback(
|
||||||
(state) => {
|
(state) => {
|
||||||
const block = state.blocks[blockId]
|
const block = state.blocks[blockId]
|
||||||
const parentId = block?.data?.parentId
|
const parentId = block?.data?.parentId
|
||||||
const parentBlock = parentId ? state.blocks[parentId] : undefined
|
|
||||||
return {
|
return {
|
||||||
isEnabled: block?.enabled ?? true,
|
isEnabled: block?.enabled ?? true,
|
||||||
horizontalHandles: block?.horizontalHandles ?? false,
|
horizontalHandles: block?.horizontalHandles ?? false,
|
||||||
parentId,
|
parentId,
|
||||||
parentType: parentBlock?.type,
|
parentType: parentId ? state.blocks[parentId]?.type : undefined,
|
||||||
isLocked: block?.locked ?? false,
|
|
||||||
isParentLocked: parentBlock?.locked ?? false,
|
|
||||||
isParentDisabled: parentBlock ? !parentBlock.enabled : false,
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[blockId]
|
[blockId]
|
||||||
@@ -174,27 +161,25 @@ export const ActionBar = memo(
|
|||||||
{!isNoteBlock && !isInsideSubflow && (
|
{!isNoteBlock && !isInsideSubflow && (
|
||||||
<Tooltip.Root>
|
<Tooltip.Root>
|
||||||
<Tooltip.Trigger asChild>
|
<Tooltip.Trigger asChild>
|
||||||
<span className='inline-flex'>
|
<Button
|
||||||
<Button
|
variant='ghost'
|
||||||
variant='ghost'
|
onClick={(e) => {
|
||||||
onClick={(e) => {
|
e.stopPropagation()
|
||||||
e.stopPropagation()
|
if (canRunFromBlock && !disabled) {
|
||||||
if (canRunFromBlock && !disabled) {
|
handleRunFromBlockClick()
|
||||||
handleRunFromBlockClick()
|
}
|
||||||
}
|
}}
|
||||||
}}
|
className={ACTION_BUTTON_STYLES}
|
||||||
className={ACTION_BUTTON_STYLES}
|
disabled={disabled || !canRunFromBlock}
|
||||||
disabled={disabled || !canRunFromBlock}
|
>
|
||||||
>
|
<PlayOutline className={ICON_SIZE} />
|
||||||
<PlayOutline className={ICON_SIZE} />
|
</Button>
|
||||||
</Button>
|
|
||||||
</span>
|
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>
|
<Tooltip.Content side='top'>
|
||||||
{(() => {
|
{(() => {
|
||||||
if (disabled) return getTooltipMessage('Run from block')
|
if (disabled) return getTooltipMessage('Run from block')
|
||||||
if (isExecuting) return 'Execution in progress'
|
if (isExecuting) return 'Execution in progress'
|
||||||
if (!dependenciesSatisfied) return 'Run previous blocks first'
|
if (!dependenciesSatisfied) return 'Run upstream blocks first'
|
||||||
return 'Run from block'
|
return 'Run from block'
|
||||||
})()}
|
})()}
|
||||||
</Tooltip.Content>
|
</Tooltip.Content>
|
||||||
@@ -208,54 +193,18 @@ export const ActionBar = memo(
|
|||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
// Can't enable if parent is disabled (must enable parent first)
|
if (!disabled) {
|
||||||
const cantEnable = !isEnabled && isParentDisabled
|
|
||||||
if (!disabled && !isLocked && !isParentLocked && !cantEnable) {
|
|
||||||
collaborativeBatchToggleBlockEnabled([blockId])
|
collaborativeBatchToggleBlockEnabled([blockId])
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={ACTION_BUTTON_STYLES}
|
className={ACTION_BUTTON_STYLES}
|
||||||
disabled={
|
disabled={disabled}
|
||||||
disabled || isLocked || isParentLocked || (!isEnabled && isParentDisabled)
|
|
||||||
}
|
|
||||||
>
|
>
|
||||||
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>
|
<Tooltip.Content side='top'>
|
||||||
{isLocked || isParentLocked
|
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
||||||
? 'Block is locked'
|
|
||||||
: !isEnabled && isParentDisabled
|
|
||||||
? 'Parent container is disabled'
|
|
||||||
: getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
|
|
||||||
</Tooltip.Content>
|
|
||||||
</Tooltip.Root>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{userPermissions.canAdmin && (
|
|
||||||
<Tooltip.Root>
|
|
||||||
<Tooltip.Trigger asChild>
|
|
||||||
<Button
|
|
||||||
variant='ghost'
|
|
||||||
onClick={(e) => {
|
|
||||||
e.stopPropagation()
|
|
||||||
// Can't unlock a block if its parent container is locked
|
|
||||||
if (!disabled && !(isLocked && isParentLocked)) {
|
|
||||||
collaborativeBatchToggleLocked([blockId])
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
className={ACTION_BUTTON_STYLES}
|
|
||||||
disabled={disabled || (isLocked && isParentLocked)}
|
|
||||||
>
|
|
||||||
{isLocked ? <Unlock className={ICON_SIZE} /> : <Lock className={ICON_SIZE} />}
|
|
||||||
</Button>
|
|
||||||
</Tooltip.Trigger>
|
|
||||||
<Tooltip.Content side='top'>
|
|
||||||
{isLocked && isParentLocked
|
|
||||||
? 'Parent container is locked'
|
|
||||||
: isLocked
|
|
||||||
? 'Unlock Block'
|
|
||||||
: 'Lock Block'}
|
|
||||||
</Tooltip.Content>
|
</Tooltip.Content>
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
)}
|
)}
|
||||||
@@ -288,12 +237,12 @@ export const ActionBar = memo(
|
|||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
if (!disabled && !isLocked && !isParentLocked) {
|
if (!disabled) {
|
||||||
collaborativeBatchToggleBlockHandles([blockId])
|
collaborativeBatchToggleBlockHandles([blockId])
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={ACTION_BUTTON_STYLES}
|
className={ACTION_BUTTON_STYLES}
|
||||||
disabled={disabled || isLocked || isParentLocked}
|
disabled={disabled}
|
||||||
>
|
>
|
||||||
{horizontalHandles ? (
|
{horizontalHandles ? (
|
||||||
<ArrowLeftRight className={ICON_SIZE} />
|
<ArrowLeftRight className={ICON_SIZE} />
|
||||||
@@ -303,9 +252,7 @@ export const ActionBar = memo(
|
|||||||
</Button>
|
</Button>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>
|
<Tooltip.Content side='top'>
|
||||||
{isLocked || isParentLocked
|
{getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')}
|
||||||
? 'Block is locked'
|
|
||||||
: getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')}
|
|
||||||
</Tooltip.Content>
|
</Tooltip.Content>
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
)}
|
)}
|
||||||
@@ -317,23 +264,19 @@ export const ActionBar = memo(
|
|||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
if (!disabled && userPermissions.canEdit && !isLocked && !isParentLocked) {
|
if (!disabled && userPermissions.canEdit) {
|
||||||
window.dispatchEvent(
|
window.dispatchEvent(
|
||||||
new CustomEvent('remove-from-subflow', { detail: { blockIds: [blockId] } })
|
new CustomEvent('remove-from-subflow', { detail: { blockIds: [blockId] } })
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={ACTION_BUTTON_STYLES}
|
className={ACTION_BUTTON_STYLES}
|
||||||
disabled={disabled || !userPermissions.canEdit || isLocked || isParentLocked}
|
disabled={disabled || !userPermissions.canEdit}
|
||||||
>
|
>
|
||||||
<LogOut className={ICON_SIZE} />
|
<LogOut className={ICON_SIZE} />
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>
|
<Tooltip.Content side='top'>{getTooltipMessage('Remove from Subflow')}</Tooltip.Content>
|
||||||
{isLocked || isParentLocked
|
|
||||||
? 'Block is locked'
|
|
||||||
: getTooltipMessage('Remove from Subflow')}
|
|
||||||
</Tooltip.Content>
|
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
@@ -343,19 +286,17 @@ export const ActionBar = memo(
|
|||||||
variant='ghost'
|
variant='ghost'
|
||||||
onClick={(e) => {
|
onClick={(e) => {
|
||||||
e.stopPropagation()
|
e.stopPropagation()
|
||||||
if (!disabled && !isLocked && !isParentLocked) {
|
if (!disabled) {
|
||||||
collaborativeBatchRemoveBlocks([blockId])
|
collaborativeBatchRemoveBlocks([blockId])
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
className={ACTION_BUTTON_STYLES}
|
className={ACTION_BUTTON_STYLES}
|
||||||
disabled={disabled || isLocked || isParentLocked}
|
disabled={disabled}
|
||||||
>
|
>
|
||||||
<Trash2 className={ICON_SIZE} />
|
<Trash2 className={ICON_SIZE} />
|
||||||
</Button>
|
</Button>
|
||||||
</Tooltip.Trigger>
|
</Tooltip.Trigger>
|
||||||
<Tooltip.Content side='top'>
|
<Tooltip.Content side='top'>{getTooltipMessage('Delete Block')}</Tooltip.Content>
|
||||||
{isLocked || isParentLocked ? 'Block is locked' : getTooltipMessage('Delete Block')}
|
|
||||||
</Tooltip.Content>
|
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -20,9 +20,6 @@ export interface BlockInfo {
|
|||||||
horizontalHandles: boolean
|
horizontalHandles: boolean
|
||||||
parentId?: string
|
parentId?: string
|
||||||
parentType?: string
|
parentType?: string
|
||||||
locked?: boolean
|
|
||||||
isParentLocked?: boolean
|
|
||||||
isParentDisabled?: boolean
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -49,17 +46,10 @@ export interface BlockMenuProps {
|
|||||||
showRemoveFromSubflow?: boolean
|
showRemoveFromSubflow?: boolean
|
||||||
/** Whether run from block is available (has snapshot, was executed, not inside subflow) */
|
/** Whether run from block is available (has snapshot, was executed, not inside subflow) */
|
||||||
canRunFromBlock?: boolean
|
canRunFromBlock?: boolean
|
||||||
/** Whether to disable edit actions (user can't edit OR blocks are locked) */
|
|
||||||
disableEdit?: boolean
|
disableEdit?: boolean
|
||||||
/** Whether the user has edit permission (ignoring locked state) */
|
|
||||||
userCanEdit?: boolean
|
|
||||||
isExecuting?: boolean
|
isExecuting?: boolean
|
||||||
/** Whether the selected block is a trigger (has no incoming edges) */
|
/** Whether the selected block is a trigger (has no incoming edges) */
|
||||||
isPositionalTrigger?: boolean
|
isPositionalTrigger?: boolean
|
||||||
/** Callback to toggle locked state of selected blocks */
|
|
||||||
onToggleLocked?: () => void
|
|
||||||
/** Whether the user has admin permissions */
|
|
||||||
canAdmin?: boolean
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -88,22 +78,13 @@ export function BlockMenu({
|
|||||||
showRemoveFromSubflow = false,
|
showRemoveFromSubflow = false,
|
||||||
canRunFromBlock = false,
|
canRunFromBlock = false,
|
||||||
disableEdit = false,
|
disableEdit = false,
|
||||||
userCanEdit = true,
|
|
||||||
isExecuting = false,
|
isExecuting = false,
|
||||||
isPositionalTrigger = false,
|
isPositionalTrigger = false,
|
||||||
onToggleLocked,
|
|
||||||
canAdmin = false,
|
|
||||||
}: BlockMenuProps) {
|
}: BlockMenuProps) {
|
||||||
const isSingleBlock = selectedBlocks.length === 1
|
const isSingleBlock = selectedBlocks.length === 1
|
||||||
|
|
||||||
const allEnabled = selectedBlocks.every((b) => b.enabled)
|
const allEnabled = selectedBlocks.every((b) => b.enabled)
|
||||||
const allDisabled = selectedBlocks.every((b) => !b.enabled)
|
const allDisabled = selectedBlocks.every((b) => !b.enabled)
|
||||||
const allLocked = selectedBlocks.every((b) => b.locked)
|
|
||||||
const allUnlocked = selectedBlocks.every((b) => !b.locked)
|
|
||||||
// Can't unlock blocks that have locked parents
|
|
||||||
const hasBlockWithLockedParent = selectedBlocks.some((b) => b.locked && b.isParentLocked)
|
|
||||||
// Can't enable blocks that have disabled parents
|
|
||||||
const hasBlockWithDisabledParent = selectedBlocks.some((b) => !b.enabled && b.isParentDisabled)
|
|
||||||
|
|
||||||
const hasSingletonBlock = selectedBlocks.some(
|
const hasSingletonBlock = selectedBlocks.some(
|
||||||
(b) =>
|
(b) =>
|
||||||
@@ -127,12 +108,6 @@ export function BlockMenu({
|
|||||||
return 'Toggle Enabled'
|
return 'Toggle Enabled'
|
||||||
}
|
}
|
||||||
|
|
||||||
const getToggleLockedLabel = () => {
|
|
||||||
if (allLocked) return 'Unlock'
|
|
||||||
if (allUnlocked) return 'Lock'
|
|
||||||
return 'Toggle Lock'
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Popover
|
<Popover
|
||||||
open={isOpen}
|
open={isOpen}
|
||||||
@@ -164,7 +139,7 @@ export function BlockMenu({
|
|||||||
</PopoverItem>
|
</PopoverItem>
|
||||||
<PopoverItem
|
<PopoverItem
|
||||||
className='group'
|
className='group'
|
||||||
disabled={!userCanEdit || !hasClipboard}
|
disabled={disableEdit || !hasClipboard}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
onPaste()
|
onPaste()
|
||||||
onClose()
|
onClose()
|
||||||
@@ -175,7 +150,7 @@ export function BlockMenu({
|
|||||||
</PopoverItem>
|
</PopoverItem>
|
||||||
{!hasSingletonBlock && (
|
{!hasSingletonBlock && (
|
||||||
<PopoverItem
|
<PopoverItem
|
||||||
disabled={!userCanEdit}
|
disabled={disableEdit}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
onDuplicate()
|
onDuplicate()
|
||||||
onClose()
|
onClose()
|
||||||
@@ -189,15 +164,13 @@ export function BlockMenu({
|
|||||||
{!allNoteBlocks && <PopoverDivider />}
|
{!allNoteBlocks && <PopoverDivider />}
|
||||||
{!allNoteBlocks && (
|
{!allNoteBlocks && (
|
||||||
<PopoverItem
|
<PopoverItem
|
||||||
disabled={disableEdit || hasBlockWithDisabledParent}
|
disabled={disableEdit}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
if (!disableEdit && !hasBlockWithDisabledParent) {
|
onToggleEnabled()
|
||||||
onToggleEnabled()
|
onClose()
|
||||||
onClose()
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{hasBlockWithDisabledParent ? 'Parent is disabled' : getToggleEnabledLabel()}
|
{getToggleEnabledLabel()}
|
||||||
</PopoverItem>
|
</PopoverItem>
|
||||||
)}
|
)}
|
||||||
{!allNoteBlocks && !isSubflow && (
|
{!allNoteBlocks && !isSubflow && (
|
||||||
@@ -222,19 +195,6 @@ export function BlockMenu({
|
|||||||
Remove from Subflow
|
Remove from Subflow
|
||||||
</PopoverItem>
|
</PopoverItem>
|
||||||
)}
|
)}
|
||||||
{canAdmin && onToggleLocked && (
|
|
||||||
<PopoverItem
|
|
||||||
disabled={hasBlockWithLockedParent}
|
|
||||||
onClick={() => {
|
|
||||||
if (!hasBlockWithLockedParent) {
|
|
||||||
onToggleLocked()
|
|
||||||
onClose()
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{hasBlockWithLockedParent ? 'Parent is locked' : getToggleLockedLabel()}
|
|
||||||
</PopoverItem>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Single block actions */}
|
{/* Single block actions */}
|
||||||
{isSingleBlock && <PopoverDivider />}
|
{isSingleBlock && <PopoverDivider />}
|
||||||
|
|||||||
@@ -34,8 +34,6 @@ export interface CanvasMenuProps {
|
|||||||
canUndo?: boolean
|
canUndo?: boolean
|
||||||
canRedo?: boolean
|
canRedo?: boolean
|
||||||
isInvitationsDisabled?: boolean
|
isInvitationsDisabled?: boolean
|
||||||
/** Whether the workflow has locked blocks (disables auto-layout) */
|
|
||||||
hasLockedBlocks?: boolean
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -62,7 +60,6 @@ export function CanvasMenu({
|
|||||||
disableEdit = false,
|
disableEdit = false,
|
||||||
canUndo = false,
|
canUndo = false,
|
||||||
canRedo = false,
|
canRedo = false,
|
||||||
hasLockedBlocks = false,
|
|
||||||
}: CanvasMenuProps) {
|
}: CanvasMenuProps) {
|
||||||
return (
|
return (
|
||||||
<Popover
|
<Popover
|
||||||
@@ -132,12 +129,11 @@ export function CanvasMenu({
|
|||||||
</PopoverItem>
|
</PopoverItem>
|
||||||
<PopoverItem
|
<PopoverItem
|
||||||
className='group'
|
className='group'
|
||||||
disabled={disableEdit || hasLockedBlocks}
|
disabled={disableEdit}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
onAutoLayout()
|
onAutoLayout()
|
||||||
onClose()
|
onClose()
|
||||||
}}
|
}}
|
||||||
title={hasLockedBlocks ? 'Unlock blocks to use auto-layout' : undefined}
|
|
||||||
>
|
>
|
||||||
<span>Auto-layout</span>
|
<span>Auto-layout</span>
|
||||||
<span className='ml-auto opacity-70 group-hover:opacity-100'>⇧L</span>
|
<span className='ml-auto opacity-70 group-hover:opacity-100'>⇧L</span>
|
||||||
|
|||||||
@@ -807,7 +807,7 @@ export function Chat() {
|
|||||||
|
|
||||||
const newReservedFields: StartInputFormatField[] = missingStartReservedFields.map(
|
const newReservedFields: StartInputFormatField[] = missingStartReservedFields.map(
|
||||||
(fieldName) => {
|
(fieldName) => {
|
||||||
const defaultType = fieldName === 'files' ? 'files' : 'string'
|
const defaultType = fieldName === 'files' ? 'file[]' : 'string'
|
||||||
|
|
||||||
return {
|
return {
|
||||||
id: crypto.randomUUID(),
|
id: crypto.randomUUID(),
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ export function A2aDeploy({
|
|||||||
newFields.push({
|
newFields.push({
|
||||||
id: crypto.randomUUID(),
|
id: crypto.randomUUID(),
|
||||||
name: 'files',
|
name: 'files',
|
||||||
type: 'files',
|
type: 'file[]',
|
||||||
value: '',
|
value: '',
|
||||||
collapsed: false,
|
collapsed: false,
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/
|
|||||||
interface Field {
|
interface Field {
|
||||||
id: string
|
id: string
|
||||||
name: string
|
name: string
|
||||||
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
type?: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'file[]'
|
||||||
value?: string
|
value?: string
|
||||||
description?: string
|
description?: string
|
||||||
collapsed?: boolean
|
collapsed?: boolean
|
||||||
@@ -57,7 +57,7 @@ const TYPE_OPTIONS: ComboboxOption[] = [
|
|||||||
{ label: 'Boolean', value: 'boolean' },
|
{ label: 'Boolean', value: 'boolean' },
|
||||||
{ label: 'Object', value: 'object' },
|
{ label: 'Object', value: 'object' },
|
||||||
{ label: 'Array', value: 'array' },
|
{ label: 'Array', value: 'array' },
|
||||||
{ label: 'Files', value: 'files' },
|
{ label: 'Files', value: 'file[]' },
|
||||||
]
|
]
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -448,7 +448,7 @@ export function FieldFormat({
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (field.type === 'files') {
|
if (field.type === 'file[]') {
|
||||||
const lineCount = fieldValue.split('\n').length
|
const lineCount = fieldValue.split('\n').length
|
||||||
const gutterWidth = calculateGutterWidth(lineCount)
|
const gutterWidth = calculateGutterWidth(lineCount)
|
||||||
|
|
||||||
|
|||||||
@@ -225,7 +225,7 @@ const getOutputTypeForPath = (
|
|||||||
const chatModeTypes: Record<string, string> = {
|
const chatModeTypes: Record<string, string> = {
|
||||||
input: 'string',
|
input: 'string',
|
||||||
conversationId: 'string',
|
conversationId: 'string',
|
||||||
files: 'files',
|
files: 'file[]',
|
||||||
}
|
}
|
||||||
return chatModeTypes[outputPath] || 'any'
|
return chatModeTypes[outputPath] || 'any'
|
||||||
}
|
}
|
||||||
@@ -1563,16 +1563,11 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
|||||||
blockTagGroups.sort((a, b) => a.distance - b.distance)
|
blockTagGroups.sort((a, b) => a.distance - b.distance)
|
||||||
finalBlockTagGroups.push(...blockTagGroups)
|
finalBlockTagGroups.push(...blockTagGroups)
|
||||||
|
|
||||||
const contextualTags: string[] = []
|
const groupTags = finalBlockTagGroups.flatMap((group) => group.tags)
|
||||||
if (loopBlockGroup) {
|
const tags = [...groupTags, ...variableTags]
|
||||||
contextualTags.push(...loopBlockGroup.tags)
|
|
||||||
}
|
|
||||||
if (parallelBlockGroup) {
|
|
||||||
contextualTags.push(...parallelBlockGroup.tags)
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
tags: [...allBlockTags, ...variableTags, ...contextualTags],
|
tags,
|
||||||
variableInfoMap,
|
variableInfoMap,
|
||||||
blockTagGroups: finalBlockTagGroups,
|
blockTagGroups: finalBlockTagGroups,
|
||||||
}
|
}
|
||||||
@@ -1746,7 +1741,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
|||||||
mergedSubBlocks
|
mergedSubBlocks
|
||||||
)
|
)
|
||||||
|
|
||||||
if (fieldType === 'files' || fieldType === 'file[]' || fieldType === 'array') {
|
if (fieldType === 'file' || fieldType === 'file[]' || fieldType === 'array') {
|
||||||
const blockName = parts[0]
|
const blockName = parts[0]
|
||||||
const remainingPath = parts.slice(2).join('.')
|
const remainingPath = parts.slice(2).join('.')
|
||||||
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`
|
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`
|
||||||
|
|||||||
@@ -9,9 +9,7 @@ import {
|
|||||||
ChevronUp,
|
ChevronUp,
|
||||||
ExternalLink,
|
ExternalLink,
|
||||||
Loader2,
|
Loader2,
|
||||||
Lock,
|
|
||||||
Pencil,
|
Pencil,
|
||||||
Unlock,
|
|
||||||
} from 'lucide-react'
|
} from 'lucide-react'
|
||||||
import { useParams } from 'next/navigation'
|
import { useParams } from 'next/navigation'
|
||||||
import { useShallow } from 'zustand/react/shallow'
|
import { useShallow } from 'zustand/react/shallow'
|
||||||
@@ -48,11 +46,16 @@ import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
|
|||||||
import { usePanelEditorStore } from '@/stores/panel'
|
import { usePanelEditorStore } from '@/stores/panel'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
|
||||||
|
|
||||||
/** Stable empty object to avoid creating new references */
|
/** Stable empty object to avoid creating new references */
|
||||||
const EMPTY_SUBBLOCK_VALUES = {} as Record<string, any>
|
const EMPTY_SUBBLOCK_VALUES = {} as Record<string, any>
|
||||||
|
|
||||||
|
/** Shared style for dashed divider lines */
|
||||||
|
const DASHED_DIVIDER_STYLE = {
|
||||||
|
backgroundImage:
|
||||||
|
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||||
|
} as const
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Icon component for rendering block icons.
|
* Icon component for rendering block icons.
|
||||||
*
|
*
|
||||||
@@ -92,39 +95,23 @@ export function Editor() {
|
|||||||
const blockConfig = currentBlock ? getBlock(currentBlock.type) : null
|
const blockConfig = currentBlock ? getBlock(currentBlock.type) : null
|
||||||
const title = currentBlock?.name || 'Editor'
|
const title = currentBlock?.name || 'Editor'
|
||||||
|
|
||||||
// Check if selected block is a subflow (loop or parallel)
|
|
||||||
const isSubflow =
|
const isSubflow =
|
||||||
currentBlock && (currentBlock.type === 'loop' || currentBlock.type === 'parallel')
|
currentBlock && (currentBlock.type === 'loop' || currentBlock.type === 'parallel')
|
||||||
|
|
||||||
// Get subflow display properties from configs
|
|
||||||
const subflowConfig = isSubflow ? (currentBlock.type === 'loop' ? LoopTool : ParallelTool) : null
|
const subflowConfig = isSubflow ? (currentBlock.type === 'loop' ? LoopTool : ParallelTool) : null
|
||||||
|
|
||||||
// Check if selected block is a workflow block
|
|
||||||
const isWorkflowBlock =
|
const isWorkflowBlock =
|
||||||
currentBlock && (currentBlock.type === 'workflow' || currentBlock.type === 'workflow_input')
|
currentBlock && (currentBlock.type === 'workflow' || currentBlock.type === 'workflow_input')
|
||||||
|
|
||||||
// Get workspace ID from params
|
|
||||||
const params = useParams()
|
const params = useParams()
|
||||||
const workspaceId = params.workspaceId as string
|
const workspaceId = params.workspaceId as string
|
||||||
|
|
||||||
// Refs for resize functionality
|
|
||||||
const subBlocksRef = useRef<HTMLDivElement>(null)
|
const subBlocksRef = useRef<HTMLDivElement>(null)
|
||||||
|
|
||||||
// Get user permissions
|
|
||||||
const userPermissions = useUserPermissionsContext()
|
const userPermissions = useUserPermissionsContext()
|
||||||
|
|
||||||
// Check if block is locked (or inside a locked container) and compute edit permission
|
|
||||||
// Locked blocks cannot be edited by anyone (admins can only lock/unlock)
|
|
||||||
const blocks = useWorkflowStore((state) => state.blocks)
|
|
||||||
const parentId = currentBlock?.data?.parentId as string | undefined
|
|
||||||
const isParentLocked = parentId ? (blocks[parentId]?.locked ?? false) : false
|
|
||||||
const isLocked = (currentBlock?.locked ?? false) || isParentLocked
|
|
||||||
const canEditBlock = userPermissions.canEdit && !isLocked
|
|
||||||
|
|
||||||
// Get active workflow ID
|
|
||||||
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
|
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
|
||||||
|
|
||||||
// Get block properties (advanced/trigger modes)
|
|
||||||
const { advancedMode, triggerMode } = useEditorBlockProperties(
|
const { advancedMode, triggerMode } = useEditorBlockProperties(
|
||||||
currentBlockId,
|
currentBlockId,
|
||||||
currentWorkflow.isSnapshotView
|
currentWorkflow.isSnapshotView
|
||||||
@@ -156,20 +143,19 @@ export function Editor() {
|
|||||||
[subBlocksForCanonical]
|
[subBlocksForCanonical]
|
||||||
)
|
)
|
||||||
const canonicalModeOverrides = currentBlock?.data?.canonicalModes
|
const canonicalModeOverrides = currentBlock?.data?.canonicalModes
|
||||||
const advancedValuesPresent = hasAdvancedValues(
|
const advancedValuesPresent = useMemo(
|
||||||
subBlocksForCanonical,
|
() => hasAdvancedValues(subBlocksForCanonical, blockSubBlockValues, canonicalIndex),
|
||||||
blockSubBlockValues,
|
[subBlocksForCanonical, blockSubBlockValues, canonicalIndex]
|
||||||
canonicalIndex
|
|
||||||
)
|
)
|
||||||
const displayAdvancedOptions = canEditBlock ? advancedMode : advancedMode || advancedValuesPresent
|
const displayAdvancedOptions = userPermissions.canEdit
|
||||||
|
? advancedMode
|
||||||
|
: advancedMode || advancedValuesPresent
|
||||||
|
|
||||||
const hasAdvancedOnlyFields = useMemo(() => {
|
const hasAdvancedOnlyFields = useMemo(() => {
|
||||||
for (const subBlock of subBlocksForCanonical) {
|
for (const subBlock of subBlocksForCanonical) {
|
||||||
// Must be standalone advanced (mode: 'advanced' without canonicalParamId)
|
|
||||||
if (subBlock.mode !== 'advanced') continue
|
if (subBlock.mode !== 'advanced') continue
|
||||||
if (canonicalIndex.canonicalIdBySubBlockId[subBlock.id]) continue
|
if (canonicalIndex.canonicalIdBySubBlockId[subBlock.id]) continue
|
||||||
|
|
||||||
// Check condition - skip if condition not met for current values
|
|
||||||
if (
|
if (
|
||||||
subBlock.condition &&
|
subBlock.condition &&
|
||||||
!evaluateSubBlockCondition(subBlock.condition, blockSubBlockValues)
|
!evaluateSubBlockCondition(subBlock.condition, blockSubBlockValues)
|
||||||
@@ -182,7 +168,6 @@ export function Editor() {
|
|||||||
return false
|
return false
|
||||||
}, [subBlocksForCanonical, canonicalIndex.canonicalIdBySubBlockId, blockSubBlockValues])
|
}, [subBlocksForCanonical, canonicalIndex.canonicalIdBySubBlockId, blockSubBlockValues])
|
||||||
|
|
||||||
// Get subblock layout using custom hook
|
|
||||||
const { subBlocks, stateToUse: subBlockState } = useEditorSubblockLayout(
|
const { subBlocks, stateToUse: subBlockState } = useEditorSubblockLayout(
|
||||||
blockConfig || ({} as any),
|
blockConfig || ({} as any),
|
||||||
currentBlockId || '',
|
currentBlockId || '',
|
||||||
@@ -215,41 +200,43 @@ export function Editor() {
|
|||||||
return { regularSubBlocks: regular, advancedOnlySubBlocks: advancedOnly }
|
return { regularSubBlocks: regular, advancedOnlySubBlocks: advancedOnly }
|
||||||
}, [subBlocks, canonicalIndex.canonicalIdBySubBlockId])
|
}, [subBlocks, canonicalIndex.canonicalIdBySubBlockId])
|
||||||
|
|
||||||
// Get block connections
|
|
||||||
const { incomingConnections, hasIncomingConnections } = useBlockConnections(currentBlockId || '')
|
const { incomingConnections, hasIncomingConnections } = useBlockConnections(currentBlockId || '')
|
||||||
|
|
||||||
// Connections resize hook
|
|
||||||
const { handleMouseDown: handleConnectionsResizeMouseDown, isResizing } = useConnectionsResize({
|
const { handleMouseDown: handleConnectionsResizeMouseDown, isResizing } = useConnectionsResize({
|
||||||
subBlocksRef,
|
subBlocksRef,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Collaborative actions
|
|
||||||
const {
|
const {
|
||||||
collaborativeSetBlockCanonicalMode,
|
collaborativeSetBlockCanonicalMode,
|
||||||
collaborativeUpdateBlockName,
|
collaborativeUpdateBlockName,
|
||||||
collaborativeToggleBlockAdvancedMode,
|
collaborativeToggleBlockAdvancedMode,
|
||||||
collaborativeBatchToggleLocked,
|
|
||||||
} = useCollaborativeWorkflow()
|
} = useCollaborativeWorkflow()
|
||||||
|
|
||||||
// Advanced mode toggle handler
|
|
||||||
const handleToggleAdvancedMode = useCallback(() => {
|
const handleToggleAdvancedMode = useCallback(() => {
|
||||||
if (!currentBlockId || !canEditBlock) return
|
if (!currentBlockId || !userPermissions.canEdit) return
|
||||||
collaborativeToggleBlockAdvancedMode(currentBlockId)
|
collaborativeToggleBlockAdvancedMode(currentBlockId)
|
||||||
}, [currentBlockId, canEditBlock, collaborativeToggleBlockAdvancedMode])
|
}, [currentBlockId, userPermissions.canEdit, collaborativeToggleBlockAdvancedMode])
|
||||||
|
|
||||||
// Rename state
|
|
||||||
const [isRenaming, setIsRenaming] = useState(false)
|
const [isRenaming, setIsRenaming] = useState(false)
|
||||||
const [editedName, setEditedName] = useState('')
|
const [editedName, setEditedName] = useState('')
|
||||||
const nameInputRef = useRef<HTMLInputElement>(null)
|
|
||||||
|
/**
|
||||||
|
* Ref callback that auto-selects the input text when mounted.
|
||||||
|
*/
|
||||||
|
const nameInputRefCallback = useCallback((element: HTMLInputElement | null) => {
|
||||||
|
if (element) {
|
||||||
|
element.select()
|
||||||
|
}
|
||||||
|
}, [])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles starting the rename process.
|
* Handles starting the rename process.
|
||||||
*/
|
*/
|
||||||
const handleStartRename = useCallback(() => {
|
const handleStartRename = useCallback(() => {
|
||||||
if (!canEditBlock || !currentBlock) return
|
if (!userPermissions.canEdit || !currentBlock) return
|
||||||
setEditedName(currentBlock.name || '')
|
setEditedName(currentBlock.name || '')
|
||||||
setIsRenaming(true)
|
setIsRenaming(true)
|
||||||
}, [canEditBlock, currentBlock])
|
}, [userPermissions.canEdit, currentBlock])
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles saving the renamed block.
|
* Handles saving the renamed block.
|
||||||
@@ -261,7 +248,6 @@ export function Editor() {
|
|||||||
if (trimmedName && trimmedName !== currentBlock?.name) {
|
if (trimmedName && trimmedName !== currentBlock?.name) {
|
||||||
const result = collaborativeUpdateBlockName(currentBlockId, trimmedName)
|
const result = collaborativeUpdateBlockName(currentBlockId, trimmedName)
|
||||||
if (!result.success) {
|
if (!result.success) {
|
||||||
// Keep rename mode open on error so user can correct the name
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -276,14 +262,6 @@ export function Editor() {
|
|||||||
setEditedName('')
|
setEditedName('')
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
// Focus input when entering rename mode
|
|
||||||
useEffect(() => {
|
|
||||||
if (isRenaming && nameInputRef.current) {
|
|
||||||
nameInputRef.current.select()
|
|
||||||
}
|
|
||||||
}, [isRenaming])
|
|
||||||
|
|
||||||
// Trigger rename mode when signaled from context menu
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (shouldFocusRename && currentBlock) {
|
if (shouldFocusRename && currentBlock) {
|
||||||
handleStartRename()
|
handleStartRename()
|
||||||
@@ -294,17 +272,13 @@ export function Editor() {
|
|||||||
/**
|
/**
|
||||||
* Handles opening documentation link in a new secure tab.
|
* Handles opening documentation link in a new secure tab.
|
||||||
*/
|
*/
|
||||||
const handleOpenDocs = () => {
|
const handleOpenDocs = useCallback(() => {
|
||||||
const docsLink = isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink
|
const docsLink = isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink
|
||||||
if (docsLink) {
|
window.open(docsLink || 'https://docs.sim.ai/quick-reference', '_blank', 'noopener,noreferrer')
|
||||||
window.open(docsLink, '_blank', 'noopener,noreferrer')
|
}, [isSubflow, subflowConfig?.docsLink, blockConfig?.docsLink])
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get child workflow ID for workflow blocks
|
|
||||||
const childWorkflowId = isWorkflowBlock ? blockSubBlockValues?.workflowId : null
|
const childWorkflowId = isWorkflowBlock ? blockSubBlockValues?.workflowId : null
|
||||||
|
|
||||||
// Fetch child workflow state for preview (only for workflow blocks with a selected workflow)
|
|
||||||
const { data: childWorkflowState, isLoading: isLoadingChildWorkflow } =
|
const { data: childWorkflowState, isLoading: isLoadingChildWorkflow } =
|
||||||
useWorkflowState(childWorkflowId)
|
useWorkflowState(childWorkflowId)
|
||||||
|
|
||||||
@@ -317,7 +291,6 @@ export function Editor() {
|
|||||||
}
|
}
|
||||||
}, [childWorkflowId, workspaceId])
|
}, [childWorkflowId, workspaceId])
|
||||||
|
|
||||||
// Determine if connections are at minimum height (collapsed state)
|
|
||||||
const isConnectionsAtMinHeight = connectionsHeight <= 35
|
const isConnectionsAtMinHeight = connectionsHeight <= 35
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -338,7 +311,7 @@ export function Editor() {
|
|||||||
)}
|
)}
|
||||||
{isRenaming ? (
|
{isRenaming ? (
|
||||||
<input
|
<input
|
||||||
ref={nameInputRef}
|
ref={nameInputRefCallback}
|
||||||
type='text'
|
type='text'
|
||||||
value={editedName}
|
value={editedName}
|
||||||
onChange={(e) => setEditedName(e.target.value)}
|
onChange={(e) => setEditedName(e.target.value)}
|
||||||
@@ -368,36 +341,6 @@ export function Editor() {
|
|||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
<div className='flex shrink-0 items-center gap-[8px]'>
|
<div className='flex shrink-0 items-center gap-[8px]'>
|
||||||
{/* Locked indicator - clickable to unlock if user has admin permissions, block is locked, and parent is not locked */}
|
|
||||||
{isLocked && currentBlock && (
|
|
||||||
<Tooltip.Root>
|
|
||||||
<Tooltip.Trigger asChild>
|
|
||||||
{userPermissions.canAdmin && currentBlock.locked && !isParentLocked ? (
|
|
||||||
<Button
|
|
||||||
variant='ghost'
|
|
||||||
className='p-0'
|
|
||||||
onClick={() => collaborativeBatchToggleLocked([currentBlockId!])}
|
|
||||||
aria-label='Unlock block'
|
|
||||||
>
|
|
||||||
<Unlock className='h-[14px] w-[14px] text-[var(--text-secondary)]' />
|
|
||||||
</Button>
|
|
||||||
) : (
|
|
||||||
<div className='flex items-center justify-center'>
|
|
||||||
<Lock className='h-[14px] w-[14px] text-[var(--text-secondary)]' />
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</Tooltip.Trigger>
|
|
||||||
<Tooltip.Content side='top'>
|
|
||||||
<p>
|
|
||||||
{isParentLocked
|
|
||||||
? 'Parent container is locked'
|
|
||||||
: userPermissions.canAdmin && currentBlock.locked
|
|
||||||
? 'Unlock block'
|
|
||||||
: 'Block is locked'}
|
|
||||||
</p>
|
|
||||||
</Tooltip.Content>
|
|
||||||
</Tooltip.Root>
|
|
||||||
)}
|
|
||||||
{/* Rename button */}
|
{/* Rename button */}
|
||||||
{currentBlock && (
|
{currentBlock && (
|
||||||
<Tooltip.Root>
|
<Tooltip.Root>
|
||||||
@@ -406,7 +349,7 @@ export function Editor() {
|
|||||||
variant='ghost'
|
variant='ghost'
|
||||||
className='p-0'
|
className='p-0'
|
||||||
onClick={isRenaming ? handleSaveRename : handleStartRename}
|
onClick={isRenaming ? handleSaveRename : handleStartRename}
|
||||||
disabled={!canEditBlock}
|
disabled={!userPermissions.canEdit}
|
||||||
aria-label={isRenaming ? 'Save name' : 'Rename block'}
|
aria-label={isRenaming ? 'Save name' : 'Rename block'}
|
||||||
>
|
>
|
||||||
{isRenaming ? (
|
{isRenaming ? (
|
||||||
@@ -439,23 +382,21 @@ export function Editor() {
|
|||||||
</Tooltip.Content>
|
</Tooltip.Content>
|
||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
)} */}
|
)} */}
|
||||||
{currentBlock && (isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink) && (
|
<Tooltip.Root>
|
||||||
<Tooltip.Root>
|
<Tooltip.Trigger asChild>
|
||||||
<Tooltip.Trigger asChild>
|
<Button
|
||||||
<Button
|
variant='ghost'
|
||||||
variant='ghost'
|
className='p-0'
|
||||||
className='p-0'
|
onClick={handleOpenDocs}
|
||||||
onClick={handleOpenDocs}
|
aria-label='Open documentation'
|
||||||
aria-label='Open documentation'
|
>
|
||||||
>
|
<BookOpen className='h-[14px] w-[14px]' />
|
||||||
<BookOpen className='h-[14px] w-[14px]' />
|
</Button>
|
||||||
</Button>
|
</Tooltip.Trigger>
|
||||||
</Tooltip.Trigger>
|
<Tooltip.Content side='top'>
|
||||||
<Tooltip.Content side='top'>
|
<p>Open docs</p>
|
||||||
<p>Open docs</p>
|
</Tooltip.Content>
|
||||||
</Tooltip.Content>
|
</Tooltip.Root>
|
||||||
</Tooltip.Root>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -474,7 +415,7 @@ export function Editor() {
|
|||||||
incomingConnections={incomingConnections}
|
incomingConnections={incomingConnections}
|
||||||
handleConnectionsResizeMouseDown={handleConnectionsResizeMouseDown}
|
handleConnectionsResizeMouseDown={handleConnectionsResizeMouseDown}
|
||||||
toggleConnectionsCollapsed={toggleConnectionsCollapsed}
|
toggleConnectionsCollapsed={toggleConnectionsCollapsed}
|
||||||
userCanEdit={canEditBlock}
|
userCanEdit={userPermissions.canEdit}
|
||||||
isConnectionsAtMinHeight={isConnectionsAtMinHeight}
|
isConnectionsAtMinHeight={isConnectionsAtMinHeight}
|
||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
@@ -535,13 +476,7 @@ export function Editor() {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||||
<div
|
<div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
|
||||||
className='h-[1.25px]'
|
|
||||||
style={{
|
|
||||||
backgroundImage:
|
|
||||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
@@ -582,14 +517,14 @@ export function Editor() {
|
|||||||
config={subBlock}
|
config={subBlock}
|
||||||
isPreview={false}
|
isPreview={false}
|
||||||
subBlockValues={subBlockState}
|
subBlockValues={subBlockState}
|
||||||
disabled={!canEditBlock}
|
disabled={!userPermissions.canEdit}
|
||||||
fieldDiffStatus={undefined}
|
fieldDiffStatus={undefined}
|
||||||
allowExpandInPreview={false}
|
allowExpandInPreview={false}
|
||||||
canonicalToggle={
|
canonicalToggle={
|
||||||
isCanonicalSwap && canonicalMode && canonicalId
|
isCanonicalSwap && canonicalMode && canonicalId
|
||||||
? {
|
? {
|
||||||
mode: canonicalMode,
|
mode: canonicalMode,
|
||||||
disabled: !canEditBlock,
|
disabled: !userPermissions.canEdit,
|
||||||
onToggle: () => {
|
onToggle: () => {
|
||||||
if (!currentBlockId) return
|
if (!currentBlockId) return
|
||||||
const nextMode =
|
const nextMode =
|
||||||
@@ -606,28 +541,16 @@ export function Editor() {
|
|||||||
/>
|
/>
|
||||||
{showDivider && (
|
{showDivider && (
|
||||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||||
<div
|
<div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
|
||||||
className='h-[1.25px]'
|
|
||||||
style={{
|
|
||||||
backgroundImage:
|
|
||||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
})}
|
})}
|
||||||
|
|
||||||
{hasAdvancedOnlyFields && canEditBlock && (
|
{hasAdvancedOnlyFields && userPermissions.canEdit && (
|
||||||
<div className='flex items-center gap-[10px] px-[2px] pt-[14px] pb-[12px]'>
|
<div className='flex items-center gap-[10px] px-[2px] pt-[14px] pb-[12px]'>
|
||||||
<div
|
<div className='h-[1.25px] flex-1' style={DASHED_DIVIDER_STYLE} />
|
||||||
className='h-[1.25px] flex-1'
|
|
||||||
style={{
|
|
||||||
backgroundImage:
|
|
||||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
<button
|
<button
|
||||||
type='button'
|
type='button'
|
||||||
onClick={handleToggleAdvancedMode}
|
onClick={handleToggleAdvancedMode}
|
||||||
@@ -640,13 +563,7 @@ export function Editor() {
|
|||||||
className={`h-[14px] w-[14px] transition-transform duration-200 ${displayAdvancedOptions ? 'rotate-180' : ''}`}
|
className={`h-[14px] w-[14px] transition-transform duration-200 ${displayAdvancedOptions ? 'rotate-180' : ''}`}
|
||||||
/>
|
/>
|
||||||
</button>
|
</button>
|
||||||
<div
|
<div className='h-[1.25px] flex-1' style={DASHED_DIVIDER_STYLE} />
|
||||||
className='h-[1.25px] flex-1'
|
|
||||||
style={{
|
|
||||||
backgroundImage:
|
|
||||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
@@ -664,19 +581,13 @@ export function Editor() {
|
|||||||
config={subBlock}
|
config={subBlock}
|
||||||
isPreview={false}
|
isPreview={false}
|
||||||
subBlockValues={subBlockState}
|
subBlockValues={subBlockState}
|
||||||
disabled={!canEditBlock}
|
disabled={!userPermissions.canEdit}
|
||||||
fieldDiffStatus={undefined}
|
fieldDiffStatus={undefined}
|
||||||
allowExpandInPreview={false}
|
allowExpandInPreview={false}
|
||||||
/>
|
/>
|
||||||
{index < advancedOnlySubBlocks.length - 1 && (
|
{index < advancedOnlySubBlocks.length - 1 && (
|
||||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||||
<div
|
<div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
|
||||||
className='h-[1.25px]'
|
|
||||||
style={{
|
|
||||||
backgroundImage:
|
|
||||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -45,13 +45,11 @@ import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowI
|
|||||||
import { useDeleteWorkflow, useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks'
|
import { useDeleteWorkflow, useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks'
|
||||||
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
import { usePermissionConfig } from '@/hooks/use-permission-config'
|
||||||
import { useChatStore } from '@/stores/chat/store'
|
import { useChatStore } from '@/stores/chat/store'
|
||||||
import { useNotificationStore } from '@/stores/notifications/store'
|
|
||||||
import type { PanelTab } from '@/stores/panel'
|
import type { PanelTab } from '@/stores/panel'
|
||||||
import { usePanelStore, useVariablesStore as usePanelVariablesStore } from '@/stores/panel'
|
import { usePanelStore, useVariablesStore as usePanelVariablesStore } from '@/stores/panel'
|
||||||
import { useVariablesStore } from '@/stores/variables/store'
|
import { useVariablesStore } from '@/stores/variables/store'
|
||||||
import { getWorkflowWithValues } from '@/stores/workflows'
|
import { getWorkflowWithValues } from '@/stores/workflows'
|
||||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
|
||||||
|
|
||||||
const logger = createLogger('Panel')
|
const logger = createLogger('Panel')
|
||||||
/**
|
/**
|
||||||
@@ -121,11 +119,6 @@ export const Panel = memo(function Panel() {
|
|||||||
hydration.phase === 'state-loading'
|
hydration.phase === 'state-loading'
|
||||||
const { handleAutoLayout: autoLayoutWithFitView } = useAutoLayout(activeWorkflowId || null)
|
const { handleAutoLayout: autoLayoutWithFitView } = useAutoLayout(activeWorkflowId || null)
|
||||||
|
|
||||||
// Check for locked blocks (disables auto-layout)
|
|
||||||
const hasLockedBlocks = useWorkflowStore((state) =>
|
|
||||||
Object.values(state.blocks).some((block) => block.locked)
|
|
||||||
)
|
|
||||||
|
|
||||||
// Delete workflow hook
|
// Delete workflow hook
|
||||||
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
|
||||||
workspaceId,
|
workspaceId,
|
||||||
@@ -237,24 +230,11 @@ export const Panel = memo(function Panel() {
|
|||||||
|
|
||||||
setIsAutoLayouting(true)
|
setIsAutoLayouting(true)
|
||||||
try {
|
try {
|
||||||
const result = await autoLayoutWithFitView()
|
await autoLayoutWithFitView()
|
||||||
if (!result.success && result.error) {
|
|
||||||
useNotificationStore.getState().addNotification({
|
|
||||||
level: 'info',
|
|
||||||
message: result.error,
|
|
||||||
workflowId: activeWorkflowId || undefined,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
} finally {
|
} finally {
|
||||||
setIsAutoLayouting(false)
|
setIsAutoLayouting(false)
|
||||||
}
|
}
|
||||||
}, [
|
}, [isExecuting, userPermissions.canEdit, isAutoLayouting, autoLayoutWithFitView])
|
||||||
isExecuting,
|
|
||||||
userPermissions.canEdit,
|
|
||||||
isAutoLayouting,
|
|
||||||
autoLayoutWithFitView,
|
|
||||||
activeWorkflowId,
|
|
||||||
])
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handles exporting workflow as JSON
|
* Handles exporting workflow as JSON
|
||||||
@@ -424,10 +404,7 @@ export const Panel = memo(function Panel() {
|
|||||||
<PopoverContent align='start' side='bottom' sideOffset={8}>
|
<PopoverContent align='start' side='bottom' sideOffset={8}>
|
||||||
<PopoverItem
|
<PopoverItem
|
||||||
onClick={handleAutoLayout}
|
onClick={handleAutoLayout}
|
||||||
disabled={
|
disabled={isExecuting || !userPermissions.canEdit || isAutoLayouting}
|
||||||
isExecuting || !userPermissions.canEdit || isAutoLayouting || hasLockedBlocks
|
|
||||||
}
|
|
||||||
title={hasLockedBlocks ? 'Unlock blocks to use auto-layout' : undefined}
|
|
||||||
>
|
>
|
||||||
<Layout className='h-3 w-3' animate={isAutoLayouting} variant='clockwise' />
|
<Layout className='h-3 w-3' animate={isAutoLayouting} variant='clockwise' />
|
||||||
<span>Auto layout</span>
|
<span>Auto layout</span>
|
||||||
|
|||||||
@@ -80,7 +80,6 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
|||||||
: undefined
|
: undefined
|
||||||
|
|
||||||
const isEnabled = currentBlock?.enabled ?? true
|
const isEnabled = currentBlock?.enabled ?? true
|
||||||
const isLocked = currentBlock?.locked ?? false
|
|
||||||
const isPreview = data?.isPreview || false
|
const isPreview = data?.isPreview || false
|
||||||
|
|
||||||
// Focus state
|
// Focus state
|
||||||
@@ -201,10 +200,7 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
|
|||||||
{blockName}
|
{blockName}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div className='flex items-center gap-1'>
|
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
|
||||||
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
|
|
||||||
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{!isPreview && (
|
{!isPreview && (
|
||||||
|
|||||||
@@ -18,8 +18,6 @@ export interface UseBlockStateReturn {
|
|||||||
diffStatus: DiffStatus
|
diffStatus: DiffStatus
|
||||||
/** Whether this is a deleted block in diff mode */
|
/** Whether this is a deleted block in diff mode */
|
||||||
isDeletedBlock: boolean
|
isDeletedBlock: boolean
|
||||||
/** Whether the block is locked */
|
|
||||||
isLocked: boolean
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -42,11 +40,6 @@ export function useBlockState(
|
|||||||
? (data.blockState?.enabled ?? true)
|
? (data.blockState?.enabled ?? true)
|
||||||
: (currentBlock?.enabled ?? true)
|
: (currentBlock?.enabled ?? true)
|
||||||
|
|
||||||
// Determine if block is locked
|
|
||||||
const isLocked = data.isPreview
|
|
||||||
? (data.blockState?.locked ?? false)
|
|
||||||
: (currentBlock?.locked ?? false)
|
|
||||||
|
|
||||||
// Get diff status
|
// Get diff status
|
||||||
const diffStatus: DiffStatus =
|
const diffStatus: DiffStatus =
|
||||||
currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock)
|
currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock)
|
||||||
@@ -75,6 +68,5 @@ export function useBlockState(
|
|||||||
isActive,
|
isActive,
|
||||||
diffStatus,
|
diffStatus,
|
||||||
isDeletedBlock: isDeletedBlock ?? false,
|
isDeletedBlock: isDeletedBlock ?? false,
|
||||||
isLocked,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -672,7 +672,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
|||||||
currentWorkflow,
|
currentWorkflow,
|
||||||
activeWorkflowId,
|
activeWorkflowId,
|
||||||
isEnabled,
|
isEnabled,
|
||||||
isLocked,
|
|
||||||
handleClick,
|
handleClick,
|
||||||
hasRing,
|
hasRing,
|
||||||
ringStyles,
|
ringStyles,
|
||||||
@@ -1101,7 +1100,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
|||||||
{name}
|
{name}
|
||||||
</span>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div className='relative z-10 flex flex-shrink-0 items-center gap-1'>
|
<div className='relative z-10 flex flex-shrink-0 items-center gap-2'>
|
||||||
{isWorkflowSelector &&
|
{isWorkflowSelector &&
|
||||||
childWorkflowId &&
|
childWorkflowId &&
|
||||||
typeof childIsDeployed === 'boolean' &&
|
typeof childIsDeployed === 'boolean' &&
|
||||||
@@ -1134,7 +1133,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
|||||||
</Tooltip.Root>
|
</Tooltip.Root>
|
||||||
)}
|
)}
|
||||||
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
|
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
|
||||||
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
|
|
||||||
|
|
||||||
{type === 'schedule' && shouldShowScheduleBadge && scheduleInfo?.isDisabled && (
|
{type === 'schedule' && shouldShowScheduleBadge && scheduleInfo?.isDisabled && (
|
||||||
<Tooltip.Root>
|
<Tooltip.Root>
|
||||||
|
|||||||
@@ -188,7 +188,7 @@ export function useBlockOutputFields({
|
|||||||
baseOutputs = {
|
baseOutputs = {
|
||||||
input: { type: 'string', description: 'User message' },
|
input: { type: 'string', description: 'User message' },
|
||||||
conversationId: { type: 'string', description: 'Conversation ID' },
|
conversationId: { type: 'string', description: 'Conversation ID' },
|
||||||
files: { type: 'files', description: 'Uploaded files' },
|
files: { type: 'file[]', description: 'Uploaded files' },
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const inputFormatValue = mergedSubBlocks?.inputFormat?.value
|
const inputFormatValue = mergedSubBlocks?.inputFormat?.value
|
||||||
|
|||||||
@@ -47,7 +47,6 @@ export function useBlockVisual({
|
|||||||
isActive: isExecuting,
|
isActive: isExecuting,
|
||||||
diffStatus,
|
diffStatus,
|
||||||
isDeletedBlock,
|
isDeletedBlock,
|
||||||
isLocked,
|
|
||||||
} = useBlockState(blockId, currentWorkflow, data)
|
} = useBlockState(blockId, currentWorkflow, data)
|
||||||
|
|
||||||
const currentBlockId = usePanelEditorStore((state) => state.currentBlockId)
|
const currentBlockId = usePanelEditorStore((state) => state.currentBlockId)
|
||||||
@@ -104,7 +103,6 @@ export function useBlockVisual({
|
|||||||
currentWorkflow,
|
currentWorkflow,
|
||||||
activeWorkflowId,
|
activeWorkflowId,
|
||||||
isEnabled,
|
isEnabled,
|
||||||
isLocked,
|
|
||||||
handleClick,
|
handleClick,
|
||||||
hasRing,
|
hasRing,
|
||||||
ringStyles,
|
ringStyles,
|
||||||
|
|||||||
@@ -31,8 +31,7 @@ export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasCo
|
|||||||
nodes.map((n) => {
|
nodes.map((n) => {
|
||||||
const block = blocks[n.id]
|
const block = blocks[n.id]
|
||||||
const parentId = block?.data?.parentId
|
const parentId = block?.data?.parentId
|
||||||
const parentBlock = parentId ? blocks[parentId] : undefined
|
const parentType = parentId ? blocks[parentId]?.type : undefined
|
||||||
const parentType = parentBlock?.type
|
|
||||||
return {
|
return {
|
||||||
id: n.id,
|
id: n.id,
|
||||||
type: block?.type || '',
|
type: block?.type || '',
|
||||||
@@ -40,9 +39,6 @@ export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasCo
|
|||||||
horizontalHandles: block?.horizontalHandles ?? false,
|
horizontalHandles: block?.horizontalHandles ?? false,
|
||||||
parentId,
|
parentId,
|
||||||
parentType,
|
parentType,
|
||||||
locked: block?.locked ?? false,
|
|
||||||
isParentLocked: parentBlock?.locked ?? false,
|
|
||||||
isParentDisabled: parentBlock ? !parentBlock.enabled : false,
|
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
[blocks]
|
[blocks]
|
||||||
|
|||||||
@@ -52,16 +52,6 @@ export async function applyAutoLayoutAndUpdateStore(
|
|||||||
return { success: false, error: 'No blocks to layout' }
|
return { success: false, error: 'No blocks to layout' }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for locked blocks - auto-layout is disabled when blocks are locked
|
|
||||||
const hasLockedBlocks = Object.values(blocks).some((block) => block.locked)
|
|
||||||
if (hasLockedBlocks) {
|
|
||||||
logger.info('Auto layout skipped: workflow contains locked blocks', { workflowId })
|
|
||||||
return {
|
|
||||||
success: false,
|
|
||||||
error: 'Auto-layout is disabled when blocks are locked. Unlock blocks to use auto-layout.',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge with default options
|
// Merge with default options
|
||||||
const layoutOptions = {
|
const layoutOptions = {
|
||||||
spacing: {
|
spacing: {
|
||||||
|
|||||||
@@ -1,87 +0,0 @@
|
|||||||
import type { BlockState } from '@/stores/workflows/workflow/types'
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Result of filtering protected blocks from a deletion operation
|
|
||||||
*/
|
|
||||||
export interface FilterProtectedBlocksResult {
|
|
||||||
/** Block IDs that can be deleted (not protected) */
|
|
||||||
deletableIds: string[]
|
|
||||||
/** Block IDs that are protected and cannot be deleted */
|
|
||||||
protectedIds: string[]
|
|
||||||
/** Whether all blocks are protected (deletion should be cancelled entirely) */
|
|
||||||
allProtected: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if a block is protected from editing/deletion.
|
|
||||||
* A block is protected if it is locked or if its parent container is locked.
|
|
||||||
*
|
|
||||||
* @param blockId - The ID of the block to check
|
|
||||||
* @param blocks - Record of all blocks in the workflow
|
|
||||||
* @returns True if the block is protected
|
|
||||||
*/
|
|
||||||
export function isBlockProtected(blockId: string, blocks: Record<string, BlockState>): boolean {
|
|
||||||
const block = blocks[blockId]
|
|
||||||
if (!block) return false
|
|
||||||
|
|
||||||
// Block is locked directly
|
|
||||||
if (block.locked) return true
|
|
||||||
|
|
||||||
// Block is inside a locked container
|
|
||||||
const parentId = block.data?.parentId
|
|
||||||
if (parentId && blocks[parentId]?.locked) return true
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if an edge is protected from modification.
|
|
||||||
* An edge is protected if either its source or target block is protected.
|
|
||||||
*
|
|
||||||
* @param edge - The edge to check (must have source and target)
|
|
||||||
* @param blocks - Record of all blocks in the workflow
|
|
||||||
* @returns True if the edge is protected
|
|
||||||
*/
|
|
||||||
export function isEdgeProtected(
|
|
||||||
edge: { source: string; target: string },
|
|
||||||
blocks: Record<string, BlockState>
|
|
||||||
): boolean {
|
|
||||||
return isBlockProtected(edge.source, blocks) || isBlockProtected(edge.target, blocks)
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filters out protected blocks from a list of block IDs for deletion.
|
|
||||||
* Protected blocks are those that are locked or inside a locked container.
|
|
||||||
*
|
|
||||||
* @param blockIds - Array of block IDs to filter
|
|
||||||
* @param blocks - Record of all blocks in the workflow
|
|
||||||
* @returns Result containing deletable IDs, protected IDs, and whether all are protected
|
|
||||||
*/
|
|
||||||
export function filterProtectedBlocks(
|
|
||||||
blockIds: string[],
|
|
||||||
blocks: Record<string, BlockState>
|
|
||||||
): FilterProtectedBlocksResult {
|
|
||||||
const protectedIds = blockIds.filter((id) => isBlockProtected(id, blocks))
|
|
||||||
const deletableIds = blockIds.filter((id) => !protectedIds.includes(id))
|
|
||||||
|
|
||||||
return {
|
|
||||||
deletableIds,
|
|
||||||
protectedIds,
|
|
||||||
allProtected: protectedIds.length === blockIds.length && blockIds.length > 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks if any blocks in the selection are protected.
|
|
||||||
* Useful for determining if edit actions should be disabled.
|
|
||||||
*
|
|
||||||
* @param blockIds - Array of block IDs to check
|
|
||||||
* @param blocks - Record of all blocks in the workflow
|
|
||||||
* @returns True if any block is protected
|
|
||||||
*/
|
|
||||||
export function hasProtectedBlocks(
|
|
||||||
blockIds: string[],
|
|
||||||
blocks: Record<string, BlockState>
|
|
||||||
): boolean {
|
|
||||||
return blockIds.some((id) => isBlockProtected(id, blocks))
|
|
||||||
}
|
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
export * from './auto-layout-utils'
|
export * from './auto-layout-utils'
|
||||||
export * from './block-protection-utils'
|
|
||||||
export * from './block-ring-utils'
|
export * from './block-ring-utils'
|
||||||
export * from './node-position-utils'
|
export * from './node-position-utils'
|
||||||
export * from './workflow-canvas-helpers'
|
export * from './workflow-canvas-helpers'
|
||||||
|
|||||||
@@ -55,10 +55,7 @@ import {
|
|||||||
clearDragHighlights,
|
clearDragHighlights,
|
||||||
computeClampedPositionUpdates,
|
computeClampedPositionUpdates,
|
||||||
estimateBlockDimensions,
|
estimateBlockDimensions,
|
||||||
filterProtectedBlocks,
|
|
||||||
getClampedPositionForNode,
|
getClampedPositionForNode,
|
||||||
isBlockProtected,
|
|
||||||
isEdgeProtected,
|
|
||||||
isInEditableElement,
|
isInEditableElement,
|
||||||
resolveParentChildSelectionConflicts,
|
resolveParentChildSelectionConflicts,
|
||||||
validateTriggerPaste,
|
validateTriggerPaste,
|
||||||
@@ -546,7 +543,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
collaborativeBatchRemoveBlocks,
|
collaborativeBatchRemoveBlocks,
|
||||||
collaborativeBatchToggleBlockEnabled,
|
collaborativeBatchToggleBlockEnabled,
|
||||||
collaborativeBatchToggleBlockHandles,
|
collaborativeBatchToggleBlockHandles,
|
||||||
collaborativeBatchToggleLocked,
|
|
||||||
undo,
|
undo,
|
||||||
redo,
|
redo,
|
||||||
} = useCollaborativeWorkflow()
|
} = useCollaborativeWorkflow()
|
||||||
@@ -1073,27 +1069,8 @@ const WorkflowContent = React.memo(() => {
|
|||||||
|
|
||||||
const handleContextDelete = useCallback(() => {
|
const handleContextDelete = useCallback(() => {
|
||||||
const blockIds = contextMenuBlocks.map((b) => b.id)
|
const blockIds = contextMenuBlocks.map((b) => b.id)
|
||||||
const { deletableIds, protectedIds, allProtected } = filterProtectedBlocks(blockIds, blocks)
|
collaborativeBatchRemoveBlocks(blockIds)
|
||||||
|
}, [contextMenuBlocks, collaborativeBatchRemoveBlocks])
|
||||||
if (protectedIds.length > 0) {
|
|
||||||
if (allProtected) {
|
|
||||||
addNotification({
|
|
||||||
level: 'info',
|
|
||||||
message: 'Cannot delete locked blocks or blocks inside locked containers',
|
|
||||||
workflowId: activeWorkflowId || undefined,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
addNotification({
|
|
||||||
level: 'info',
|
|
||||||
message: `Skipped ${protectedIds.length} protected block(s)`,
|
|
||||||
workflowId: activeWorkflowId || undefined,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (deletableIds.length > 0) {
|
|
||||||
collaborativeBatchRemoveBlocks(deletableIds)
|
|
||||||
}
|
|
||||||
}, [contextMenuBlocks, collaborativeBatchRemoveBlocks, addNotification, activeWorkflowId, blocks])
|
|
||||||
|
|
||||||
const handleContextToggleEnabled = useCallback(() => {
|
const handleContextToggleEnabled = useCallback(() => {
|
||||||
const blockIds = contextMenuBlocks.map((block) => block.id)
|
const blockIds = contextMenuBlocks.map((block) => block.id)
|
||||||
@@ -1105,11 +1082,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
collaborativeBatchToggleBlockHandles(blockIds)
|
collaborativeBatchToggleBlockHandles(blockIds)
|
||||||
}, [contextMenuBlocks, collaborativeBatchToggleBlockHandles])
|
}, [contextMenuBlocks, collaborativeBatchToggleBlockHandles])
|
||||||
|
|
||||||
const handleContextToggleLocked = useCallback(() => {
|
|
||||||
const blockIds = contextMenuBlocks.map((block) => block.id)
|
|
||||||
collaborativeBatchToggleLocked(blockIds)
|
|
||||||
}, [contextMenuBlocks, collaborativeBatchToggleLocked])
|
|
||||||
|
|
||||||
const handleContextRemoveFromSubflow = useCallback(() => {
|
const handleContextRemoveFromSubflow = useCallback(() => {
|
||||||
const blocksToRemove = contextMenuBlocks.filter(
|
const blocksToRemove = contextMenuBlocks.filter(
|
||||||
(block) => block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
|
(block) => block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
|
||||||
@@ -1979,6 +1951,7 @@ const WorkflowContent = React.memo(() => {
|
|||||||
const loadingWorkflowRef = useRef<string | null>(null)
|
const loadingWorkflowRef = useRef<string | null>(null)
|
||||||
const currentWorkflowExists = Boolean(workflows[workflowIdParam])
|
const currentWorkflowExists = Boolean(workflows[workflowIdParam])
|
||||||
|
|
||||||
|
/** Initializes workflow when it exists in registry and needs hydration. */
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const currentId = workflowIdParam
|
const currentId = workflowIdParam
|
||||||
const currentWorkspaceHydration = hydration.workspaceId
|
const currentWorkspaceHydration = hydration.workspaceId
|
||||||
@@ -2155,7 +2128,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
parentId: block.data?.parentId,
|
parentId: block.data?.parentId,
|
||||||
extent: block.data?.extent || undefined,
|
extent: block.data?.extent || undefined,
|
||||||
dragHandle: '.workflow-drag-handle',
|
dragHandle: '.workflow-drag-handle',
|
||||||
draggable: !isBlockProtected(block.id, blocks),
|
|
||||||
data: {
|
data: {
|
||||||
...block.data,
|
...block.data,
|
||||||
name: block.name,
|
name: block.name,
|
||||||
@@ -2191,7 +2163,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
position,
|
position,
|
||||||
parentId: block.data?.parentId,
|
parentId: block.data?.parentId,
|
||||||
dragHandle,
|
dragHandle,
|
||||||
draggable: !isBlockProtected(block.id, blocks),
|
|
||||||
extent: (() => {
|
extent: (() => {
|
||||||
// Clamp children to subflow body (exclude header)
|
// Clamp children to subflow body (exclude header)
|
||||||
const parentId = block.data?.parentId as string | undefined
|
const parentId = block.data?.parentId as string | undefined
|
||||||
@@ -2520,18 +2491,12 @@ const WorkflowContent = React.memo(() => {
|
|||||||
const edgeIdsToRemove = changes
|
const edgeIdsToRemove = changes
|
||||||
.filter((change: any) => change.type === 'remove')
|
.filter((change: any) => change.type === 'remove')
|
||||||
.map((change: any) => change.id)
|
.map((change: any) => change.id)
|
||||||
.filter((edgeId: string) => {
|
|
||||||
// Prevent removing edges connected to protected blocks
|
|
||||||
const edge = edges.find((e) => e.id === edgeId)
|
|
||||||
if (!edge) return true
|
|
||||||
return !isEdgeProtected(edge, blocks)
|
|
||||||
})
|
|
||||||
|
|
||||||
if (edgeIdsToRemove.length > 0) {
|
if (edgeIdsToRemove.length > 0) {
|
||||||
collaborativeBatchRemoveEdges(edgeIdsToRemove)
|
collaborativeBatchRemoveEdges(edgeIdsToRemove)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[collaborativeBatchRemoveEdges, edges, blocks]
|
[collaborativeBatchRemoveEdges]
|
||||||
)
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -2593,16 +2558,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
|
|
||||||
if (!sourceNode || !targetNode) return
|
if (!sourceNode || !targetNode) return
|
||||||
|
|
||||||
// Prevent connections to/from protected blocks
|
|
||||||
if (isEdgeProtected(connection, blocks)) {
|
|
||||||
addNotification({
|
|
||||||
level: 'info',
|
|
||||||
message: 'Cannot connect to locked blocks or blocks inside locked containers',
|
|
||||||
workflowId: activeWorkflowId || undefined,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get parent information (handle container start node case)
|
// Get parent information (handle container start node case)
|
||||||
const sourceParentId =
|
const sourceParentId =
|
||||||
blocks[sourceNode.id]?.data?.parentId ||
|
blocks[sourceNode.id]?.data?.parentId ||
|
||||||
@@ -2665,7 +2620,7 @@ const WorkflowContent = React.memo(() => {
|
|||||||
connectionCompletedRef.current = true
|
connectionCompletedRef.current = true
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[addEdge, getNodes, blocks, addNotification, activeWorkflowId]
|
[addEdge, getNodes, blocks]
|
||||||
)
|
)
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -2760,9 +2715,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
// Only consider container nodes that aren't the dragged node
|
// Only consider container nodes that aren't the dragged node
|
||||||
if (n.type !== 'subflowNode' || n.id === node.id) return false
|
if (n.type !== 'subflowNode' || n.id === node.id) return false
|
||||||
|
|
||||||
// Don't allow dropping into locked containers
|
|
||||||
if (blocks[n.id]?.locked) return false
|
|
||||||
|
|
||||||
// Get the container's absolute position
|
// Get the container's absolute position
|
||||||
const containerAbsolutePos = getNodeAbsolutePosition(n.id)
|
const containerAbsolutePos = getNodeAbsolutePosition(n.id)
|
||||||
|
|
||||||
@@ -2855,11 +2807,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
/** Captures initial parent ID and position when drag starts. */
|
/** Captures initial parent ID and position when drag starts. */
|
||||||
const onNodeDragStart = useCallback(
|
const onNodeDragStart = useCallback(
|
||||||
(_event: React.MouseEvent, node: any) => {
|
(_event: React.MouseEvent, node: any) => {
|
||||||
// Prevent dragging protected blocks
|
|
||||||
if (isBlockProtected(node.id, blocks)) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store the original parent ID when starting to drag
|
// Store the original parent ID when starting to drag
|
||||||
const currentParentId = blocks[node.id]?.data?.parentId || null
|
const currentParentId = blocks[node.id]?.data?.parentId || null
|
||||||
setDragStartParentId(currentParentId)
|
setDragStartParentId(currentParentId)
|
||||||
@@ -2888,7 +2835,7 @@ const WorkflowContent = React.memo(() => {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
[blocks, setDragStartPosition, getNodes, setPotentialParentId]
|
[blocks, setDragStartPosition, getNodes, potentialParentId, setPotentialParentId]
|
||||||
)
|
)
|
||||||
|
|
||||||
/** Handles node drag stop to establish parent-child relationships. */
|
/** Handles node drag stop to establish parent-child relationships. */
|
||||||
@@ -2950,18 +2897,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
// Don't process parent changes if the node hasn't actually changed parent or is being moved within same parent
|
// Don't process parent changes if the node hasn't actually changed parent or is being moved within same parent
|
||||||
if (potentialParentId === dragStartParentId) return
|
if (potentialParentId === dragStartParentId) return
|
||||||
|
|
||||||
// Prevent moving locked blocks out of locked containers
|
|
||||||
// Unlocked blocks (e.g., duplicates) can be moved out freely
|
|
||||||
if (dragStartParentId && blocks[dragStartParentId]?.locked && blocks[node.id]?.locked) {
|
|
||||||
addNotification({
|
|
||||||
level: 'info',
|
|
||||||
message: 'Cannot move locked blocks out of locked containers',
|
|
||||||
workflowId: activeWorkflowId || undefined,
|
|
||||||
})
|
|
||||||
setPotentialParentId(dragStartParentId) // Reset to original parent
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if this is a starter block - starter blocks should never be in containers
|
// Check if this is a starter block - starter blocks should never be in containers
|
||||||
const isStarterBlock = node.data?.type === 'starter'
|
const isStarterBlock = node.data?.type === 'starter'
|
||||||
if (isStarterBlock) {
|
if (isStarterBlock) {
|
||||||
@@ -3358,16 +3293,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
/** Stable delete handler to avoid creating new function references per edge. */
|
/** Stable delete handler to avoid creating new function references per edge. */
|
||||||
const handleEdgeDelete = useCallback(
|
const handleEdgeDelete = useCallback(
|
||||||
(edgeId: string) => {
|
(edgeId: string) => {
|
||||||
// Prevent removing edges connected to protected blocks
|
|
||||||
const edge = edges.find((e) => e.id === edgeId)
|
|
||||||
if (edge && isEdgeProtected(edge, blocks)) {
|
|
||||||
addNotification({
|
|
||||||
level: 'info',
|
|
||||||
message: 'Cannot remove connections from locked blocks',
|
|
||||||
workflowId: activeWorkflowId || undefined,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
removeEdge(edgeId)
|
removeEdge(edgeId)
|
||||||
// Remove this edge from selection (find by edge ID value)
|
// Remove this edge from selection (find by edge ID value)
|
||||||
setSelectedEdges((prev) => {
|
setSelectedEdges((prev) => {
|
||||||
@@ -3380,7 +3305,7 @@ const WorkflowContent = React.memo(() => {
|
|||||||
return next
|
return next
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
[removeEdge, edges, blocks, addNotification, activeWorkflowId]
|
[removeEdge]
|
||||||
)
|
)
|
||||||
|
|
||||||
/** Transforms edges to include selection state and delete handlers. Memoized to prevent re-renders. */
|
/** Transforms edges to include selection state and delete handlers. Memoized to prevent re-renders. */
|
||||||
@@ -3421,15 +3346,9 @@ const WorkflowContent = React.memo(() => {
|
|||||||
|
|
||||||
// Handle edge deletion first (edges take priority if selected)
|
// Handle edge deletion first (edges take priority if selected)
|
||||||
if (selectedEdges.size > 0) {
|
if (selectedEdges.size > 0) {
|
||||||
// Get all selected edge IDs and filter out edges connected to protected blocks
|
// Get all selected edge IDs and batch delete them
|
||||||
const edgeIds = Array.from(selectedEdges.values()).filter((edgeId) => {
|
const edgeIds = Array.from(selectedEdges.values())
|
||||||
const edge = edges.find((e) => e.id === edgeId)
|
collaborativeBatchRemoveEdges(edgeIds)
|
||||||
if (!edge) return true
|
|
||||||
return !isEdgeProtected(edge, blocks)
|
|
||||||
})
|
|
||||||
if (edgeIds.length > 0) {
|
|
||||||
collaborativeBatchRemoveEdges(edgeIds)
|
|
||||||
}
|
|
||||||
setSelectedEdges(new Map())
|
setSelectedEdges(new Map())
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -3446,29 +3365,7 @@ const WorkflowContent = React.memo(() => {
|
|||||||
|
|
||||||
event.preventDefault()
|
event.preventDefault()
|
||||||
const selectedIds = selectedNodes.map((node) => node.id)
|
const selectedIds = selectedNodes.map((node) => node.id)
|
||||||
const { deletableIds, protectedIds, allProtected } = filterProtectedBlocks(
|
collaborativeBatchRemoveBlocks(selectedIds)
|
||||||
selectedIds,
|
|
||||||
blocks
|
|
||||||
)
|
|
||||||
|
|
||||||
if (protectedIds.length > 0) {
|
|
||||||
if (allProtected) {
|
|
||||||
addNotification({
|
|
||||||
level: 'info',
|
|
||||||
message: 'Cannot delete locked blocks or blocks inside locked containers',
|
|
||||||
workflowId: activeWorkflowId || undefined,
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
addNotification({
|
|
||||||
level: 'info',
|
|
||||||
message: `Skipped ${protectedIds.length} protected block(s)`,
|
|
||||||
workflowId: activeWorkflowId || undefined,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if (deletableIds.length > 0) {
|
|
||||||
collaborativeBatchRemoveBlocks(deletableIds)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
window.addEventListener('keydown', handleKeyDown)
|
window.addEventListener('keydown', handleKeyDown)
|
||||||
@@ -3479,10 +3376,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
getNodes,
|
getNodes,
|
||||||
collaborativeBatchRemoveBlocks,
|
collaborativeBatchRemoveBlocks,
|
||||||
effectivePermissions.canEdit,
|
effectivePermissions.canEdit,
|
||||||
blocks,
|
|
||||||
edges,
|
|
||||||
addNotification,
|
|
||||||
activeWorkflowId,
|
|
||||||
])
|
])
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -3603,18 +3496,12 @@ const WorkflowContent = React.memo(() => {
|
|||||||
(b) => b.parentId && (b.parentType === 'loop' || b.parentType === 'parallel')
|
(b) => b.parentId && (b.parentType === 'loop' || b.parentType === 'parallel')
|
||||||
)}
|
)}
|
||||||
canRunFromBlock={runFromBlockState.canRun}
|
canRunFromBlock={runFromBlockState.canRun}
|
||||||
disableEdit={
|
disableEdit={!effectivePermissions.canEdit}
|
||||||
!effectivePermissions.canEdit ||
|
|
||||||
contextMenuBlocks.some((b) => b.locked || b.isParentLocked)
|
|
||||||
}
|
|
||||||
userCanEdit={effectivePermissions.canEdit}
|
|
||||||
isExecuting={isExecuting}
|
isExecuting={isExecuting}
|
||||||
isPositionalTrigger={
|
isPositionalTrigger={
|
||||||
contextMenuBlocks.length === 1 &&
|
contextMenuBlocks.length === 1 &&
|
||||||
edges.filter((e) => e.target === contextMenuBlocks[0]?.id).length === 0
|
edges.filter((e) => e.target === contextMenuBlocks[0]?.id).length === 0
|
||||||
}
|
}
|
||||||
onToggleLocked={handleContextToggleLocked}
|
|
||||||
canAdmin={effectivePermissions.canAdmin}
|
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<CanvasMenu
|
<CanvasMenu
|
||||||
@@ -3637,7 +3524,6 @@ const WorkflowContent = React.memo(() => {
|
|||||||
disableEdit={!effectivePermissions.canEdit}
|
disableEdit={!effectivePermissions.canEdit}
|
||||||
canUndo={canUndo}
|
canUndo={canUndo}
|
||||||
canRedo={canRedo}
|
canRedo={canRedo}
|
||||||
hasLockedBlocks={Object.values(blocks).some((b) => b.locked)}
|
|
||||||
/>
|
/>
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
|
|||||||
@@ -417,11 +417,11 @@ async function executeWebhookJobInternal(
|
|||||||
if (triggerBlock?.subBlocks?.inputFormat?.value) {
|
if (triggerBlock?.subBlocks?.inputFormat?.value) {
|
||||||
const inputFormat = triggerBlock.subBlocks.inputFormat.value as unknown as Array<{
|
const inputFormat = triggerBlock.subBlocks.inputFormat.value as unknown as Array<{
|
||||||
name: string
|
name: string
|
||||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files'
|
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'file[]'
|
||||||
}>
|
}>
|
||||||
logger.debug(`[${requestId}] Processing generic webhook files from inputFormat`)
|
logger.debug(`[${requestId}] Processing generic webhook files from inputFormat`)
|
||||||
|
|
||||||
const fileFields = inputFormat.filter((field) => field.type === 'files')
|
const fileFields = inputFormat.filter((field) => field.type === 'file[]')
|
||||||
|
|
||||||
if (fileFields.length > 0 && typeof input === 'object' && input !== null) {
|
if (fileFields.length > 0 && typeof input === 'object' && input !== null) {
|
||||||
const executionContext = {
|
const executionContext = {
|
||||||
|
|||||||
@@ -442,7 +442,16 @@ describe('Blocks Module', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should have valid output types', () => {
|
it('should have valid output types', () => {
|
||||||
const validPrimitiveTypes = ['string', 'number', 'boolean', 'json', 'array', 'files', 'any']
|
const validPrimitiveTypes = [
|
||||||
|
'string',
|
||||||
|
'number',
|
||||||
|
'boolean',
|
||||||
|
'json',
|
||||||
|
'array',
|
||||||
|
'file',
|
||||||
|
'file[]',
|
||||||
|
'any',
|
||||||
|
]
|
||||||
const blocks = getAllBlocks()
|
const blocks = getAllBlocks()
|
||||||
for (const block of blocks) {
|
for (const block of blocks) {
|
||||||
for (const [key, outputConfig] of Object.entries(block.outputs)) {
|
for (const [key, outputConfig] of Object.entries(block.outputs)) {
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ export const ChatTriggerBlock: BlockConfig = {
|
|||||||
outputs: {
|
outputs: {
|
||||||
input: { type: 'string', description: 'User message' },
|
input: { type: 'string', description: 'User message' },
|
||||||
conversationId: { type: 'string', description: 'Conversation ID' },
|
conversationId: { type: 'string', description: 'Conversation ID' },
|
||||||
files: { type: 'files', description: 'Uploaded files' },
|
files: { type: 'file[]', description: 'Uploaded files' },
|
||||||
},
|
},
|
||||||
triggers: {
|
triggers: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
|||||||
@@ -578,13 +578,20 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
|||||||
if (!params.serverId) throw new Error('Server ID is required')
|
if (!params.serverId) throw new Error('Server ID is required')
|
||||||
|
|
||||||
switch (params.operation) {
|
switch (params.operation) {
|
||||||
case 'discord_send_message':
|
case 'discord_send_message': {
|
||||||
|
const fileParam = params.attachmentFiles || params.files
|
||||||
|
const normalizedFiles = fileParam
|
||||||
|
? Array.isArray(fileParam)
|
||||||
|
? fileParam
|
||||||
|
: [fileParam]
|
||||||
|
: undefined
|
||||||
return {
|
return {
|
||||||
...commonParams,
|
...commonParams,
|
||||||
channelId: params.channelId,
|
channelId: params.channelId,
|
||||||
content: params.content,
|
content: params.content,
|
||||||
files: params.attachmentFiles || params.files,
|
files: normalizedFiles,
|
||||||
}
|
}
|
||||||
|
}
|
||||||
case 'discord_get_messages':
|
case 'discord_get_messages':
|
||||||
return {
|
return {
|
||||||
...commonParams,
|
...commonParams,
|
||||||
@@ -789,6 +796,7 @@ export const DiscordBlock: BlockConfig<DiscordResponse> = {
|
|||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
message: { type: 'string', description: 'Status message' },
|
message: { type: 'string', description: 'Status message' },
|
||||||
|
files: { type: 'file[]', description: 'Files attached to the message' },
|
||||||
data: { type: 'json', description: 'Response data' },
|
data: { type: 'json', description: 'Response data' },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -60,12 +60,25 @@ export const DropboxBlock: BlockConfig<DropboxResponse> = {
|
|||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'fileContent',
|
id: 'uploadFile',
|
||||||
title: 'File Content',
|
title: 'File',
|
||||||
type: 'long-input',
|
type: 'file-upload',
|
||||||
placeholder: 'Base64 encoded file content or file reference',
|
canonicalParamId: 'fileContent',
|
||||||
condition: { field: 'operation', value: 'dropbox_upload' },
|
placeholder: 'Upload file to send to Dropbox',
|
||||||
|
mode: 'basic',
|
||||||
|
multiple: false,
|
||||||
required: true,
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'dropbox_upload' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'fileContent',
|
||||||
|
title: 'File',
|
||||||
|
type: 'short-input',
|
||||||
|
canonicalParamId: 'fileContent',
|
||||||
|
placeholder: 'Reference file from previous blocks',
|
||||||
|
mode: 'advanced',
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'dropbox_upload' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'mode',
|
id: 'mode',
|
||||||
@@ -337,7 +350,8 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
|||||||
path: { type: 'string', description: 'Path in Dropbox' },
|
path: { type: 'string', description: 'Path in Dropbox' },
|
||||||
autorename: { type: 'boolean', description: 'Auto-rename on conflict' },
|
autorename: { type: 'boolean', description: 'Auto-rename on conflict' },
|
||||||
// Upload inputs
|
// Upload inputs
|
||||||
fileContent: { type: 'string', description: 'Base64 encoded file content' },
|
uploadFile: { type: 'json', description: 'Uploaded file (UserFile)' },
|
||||||
|
fileContent: { type: 'json', description: 'File reference or UserFile object' },
|
||||||
fileName: { type: 'string', description: 'Optional filename' },
|
fileName: { type: 'string', description: 'Optional filename' },
|
||||||
mode: { type: 'string', description: 'Write mode: add or overwrite' },
|
mode: { type: 'string', description: 'Write mode: add or overwrite' },
|
||||||
mute: { type: 'boolean', description: 'Mute notifications' },
|
mute: { type: 'boolean', description: 'Mute notifications' },
|
||||||
@@ -360,7 +374,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
|||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
// Upload/Download outputs
|
// Upload/Download outputs
|
||||||
file: { type: 'json', description: 'File metadata' },
|
file: { type: 'file', description: 'Downloaded file stored in execution files' },
|
||||||
content: { type: 'string', description: 'File content (base64)' },
|
content: { type: 'string', description: 'File content (base64)' },
|
||||||
temporaryLink: { type: 'string', description: 'Temporary download link' },
|
temporaryLink: { type: 'string', description: 'Temporary download link' },
|
||||||
// List folder outputs
|
// List folder outputs
|
||||||
|
|||||||
@@ -73,5 +73,6 @@ export const ElevenLabsBlock: BlockConfig<ElevenLabsBlockResponse> = {
|
|||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
audioUrl: { type: 'string', description: 'Generated audio URL' },
|
audioUrl: { type: 'string', description: 'Generated audio URL' },
|
||||||
|
audioFile: { type: 'file', description: 'Generated audio file' },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
625
apps/sim/blocks/blocks/enrich.ts
Normal file
625
apps/sim/blocks/blocks/enrich.ts
Normal file
@@ -0,0 +1,625 @@
|
|||||||
|
import { EnrichSoIcon } from '@/components/icons'
|
||||||
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
|
import { AuthMode } from '@/blocks/types'
|
||||||
|
|
||||||
|
export const EnrichBlock: BlockConfig = {
|
||||||
|
type: 'enrich',
|
||||||
|
name: 'Enrich',
|
||||||
|
description: 'B2B data enrichment and LinkedIn intelligence with Enrich.so',
|
||||||
|
authMode: AuthMode.ApiKey,
|
||||||
|
longDescription:
|
||||||
|
'Access real-time B2B data intelligence with Enrich.so. Enrich profiles from email addresses, find work emails from LinkedIn, verify email deliverability, search for people and companies, and analyze LinkedIn post engagement.',
|
||||||
|
docsLink: 'https://docs.enrich.so/',
|
||||||
|
category: 'tools',
|
||||||
|
bgColor: '#E5E5E6',
|
||||||
|
icon: EnrichSoIcon,
|
||||||
|
subBlocks: [
|
||||||
|
{
|
||||||
|
id: 'operation',
|
||||||
|
title: 'Operation',
|
||||||
|
type: 'dropdown',
|
||||||
|
options: [
|
||||||
|
// Person/Profile Enrichment
|
||||||
|
{ label: 'Email to Profile', id: 'email_to_profile' },
|
||||||
|
{ label: 'Email to Person (Lite)', id: 'email_to_person_lite' },
|
||||||
|
{ label: 'LinkedIn Profile Enrichment', id: 'linkedin_profile' },
|
||||||
|
// Email Finding
|
||||||
|
{ label: 'Find Email', id: 'find_email' },
|
||||||
|
{ label: 'LinkedIn to Work Email', id: 'linkedin_to_work_email' },
|
||||||
|
{ label: 'LinkedIn to Personal Email', id: 'linkedin_to_personal_email' },
|
||||||
|
// Phone Finding
|
||||||
|
{ label: 'Phone Finder (LinkedIn)', id: 'phone_finder' },
|
||||||
|
{ label: 'Email to Phone', id: 'email_to_phone' },
|
||||||
|
// Email Verification
|
||||||
|
{ label: 'Verify Email', id: 'verify_email' },
|
||||||
|
{ label: 'Disposable Email Check', id: 'disposable_email_check' },
|
||||||
|
// IP/Company Lookup
|
||||||
|
{ label: 'Email to IP', id: 'email_to_ip' },
|
||||||
|
{ label: 'IP to Company', id: 'ip_to_company' },
|
||||||
|
// Company Enrichment
|
||||||
|
{ label: 'Company Lookup', id: 'company_lookup' },
|
||||||
|
{ label: 'Company Funding & Traffic', id: 'company_funding' },
|
||||||
|
{ label: 'Company Revenue', id: 'company_revenue' },
|
||||||
|
// Search
|
||||||
|
{ label: 'Search People', id: 'search_people' },
|
||||||
|
{ label: 'Search Company', id: 'search_company' },
|
||||||
|
{ label: 'Search Company Employees', id: 'search_company_employees' },
|
||||||
|
{ label: 'Search Similar Companies', id: 'search_similar_companies' },
|
||||||
|
{ label: 'Sales Pointer (People)', id: 'sales_pointer_people' },
|
||||||
|
// LinkedIn Posts/Activities
|
||||||
|
{ label: 'Search Posts', id: 'search_posts' },
|
||||||
|
{ label: 'Get Post Details', id: 'get_post_details' },
|
||||||
|
{ label: 'Search Post Reactions', id: 'search_post_reactions' },
|
||||||
|
{ label: 'Search Post Comments', id: 'search_post_comments' },
|
||||||
|
{ label: 'Search People Activities', id: 'search_people_activities' },
|
||||||
|
{ label: 'Search Company Activities', id: 'search_company_activities' },
|
||||||
|
// Other
|
||||||
|
{ label: 'Reverse Hash Lookup', id: 'reverse_hash_lookup' },
|
||||||
|
{ label: 'Search Logo', id: 'search_logo' },
|
||||||
|
{ label: 'Check Credits', id: 'check_credits' },
|
||||||
|
],
|
||||||
|
value: () => 'email_to_profile',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'apiKey',
|
||||||
|
title: 'Enrich API Key',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Enter your Enrich.so API key',
|
||||||
|
password: true,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'email',
|
||||||
|
title: 'Email Address',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'john.doe@company.com',
|
||||||
|
condition: {
|
||||||
|
field: 'operation',
|
||||||
|
value: [
|
||||||
|
'email_to_profile',
|
||||||
|
'email_to_person_lite',
|
||||||
|
'email_to_phone',
|
||||||
|
'verify_email',
|
||||||
|
'disposable_email_check',
|
||||||
|
'email_to_ip',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
required: {
|
||||||
|
field: 'operation',
|
||||||
|
value: [
|
||||||
|
'email_to_profile',
|
||||||
|
'email_to_person_lite',
|
||||||
|
'email_to_phone',
|
||||||
|
'verify_email',
|
||||||
|
'disposable_email_check',
|
||||||
|
'email_to_ip',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'inRealtime',
|
||||||
|
title: 'Fetch Fresh Data',
|
||||||
|
type: 'switch',
|
||||||
|
condition: { field: 'operation', value: 'email_to_profile' },
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'linkedinUrl',
|
||||||
|
title: 'LinkedIn Profile URL',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'linkedin.com/in/williamhgates',
|
||||||
|
condition: {
|
||||||
|
field: 'operation',
|
||||||
|
value: [
|
||||||
|
'linkedin_profile',
|
||||||
|
'linkedin_to_work_email',
|
||||||
|
'linkedin_to_personal_email',
|
||||||
|
'phone_finder',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
required: {
|
||||||
|
field: 'operation',
|
||||||
|
value: [
|
||||||
|
'linkedin_profile',
|
||||||
|
'linkedin_to_work_email',
|
||||||
|
'linkedin_to_personal_email',
|
||||||
|
'phone_finder',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'fullName',
|
||||||
|
title: 'Full Name',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'John Doe',
|
||||||
|
condition: { field: 'operation', value: 'find_email' },
|
||||||
|
required: { field: 'operation', value: 'find_email' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'companyDomain',
|
||||||
|
title: 'Company Domain',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'example.com',
|
||||||
|
condition: { field: 'operation', value: 'find_email' },
|
||||||
|
required: { field: 'operation', value: 'find_email' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'ip',
|
||||||
|
title: 'IP Address',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: '86.92.60.221',
|
||||||
|
condition: { field: 'operation', value: 'ip_to_company' },
|
||||||
|
required: { field: 'operation', value: 'ip_to_company' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'companyName',
|
||||||
|
title: 'Company Name',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Google',
|
||||||
|
condition: { field: 'operation', value: 'company_lookup' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'domain',
|
||||||
|
title: 'Domain',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'google.com',
|
||||||
|
condition: {
|
||||||
|
field: 'operation',
|
||||||
|
value: ['company_lookup', 'company_funding', 'company_revenue', 'search_logo'],
|
||||||
|
},
|
||||||
|
required: {
|
||||||
|
field: 'operation',
|
||||||
|
value: ['company_funding', 'company_revenue', 'search_logo'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'firstName',
|
||||||
|
title: 'First Name',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'John',
|
||||||
|
condition: { field: 'operation', value: 'search_people' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'lastName',
|
||||||
|
title: 'Last Name',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Doe',
|
||||||
|
condition: { field: 'operation', value: 'search_people' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'subTitle',
|
||||||
|
title: 'Job Title',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Software Engineer',
|
||||||
|
condition: { field: 'operation', value: 'search_people' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'locationCountry',
|
||||||
|
title: 'Country',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'United States',
|
||||||
|
condition: { field: 'operation', value: ['search_people', 'search_company'] },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'locationCity',
|
||||||
|
title: 'City',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'San Francisco',
|
||||||
|
condition: { field: 'operation', value: ['search_people', 'search_company'] },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'industry',
|
||||||
|
title: 'Industry',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Technology',
|
||||||
|
condition: { field: 'operation', value: 'search_people' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'currentJobTitles',
|
||||||
|
title: 'Current Job Titles (JSON)',
|
||||||
|
type: 'code',
|
||||||
|
placeholder: '["CEO", "CTO", "VP Engineering"]',
|
||||||
|
condition: { field: 'operation', value: 'search_people' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'skills',
|
||||||
|
title: 'Skills (JSON)',
|
||||||
|
type: 'code',
|
||||||
|
placeholder: '["Python", "Machine Learning"]',
|
||||||
|
condition: { field: 'operation', value: 'search_people' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'searchCompanyName',
|
||||||
|
title: 'Company Name',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Google',
|
||||||
|
condition: { field: 'operation', value: 'search_company' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'industries',
|
||||||
|
title: 'Industries (JSON)',
|
||||||
|
type: 'code',
|
||||||
|
placeholder: '["Technology", "Software"]',
|
||||||
|
condition: { field: 'operation', value: 'search_company' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'staffCountMin',
|
||||||
|
title: 'Min Employees',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: '50',
|
||||||
|
condition: { field: 'operation', value: 'search_company' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'staffCountMax',
|
||||||
|
title: 'Max Employees',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: '500',
|
||||||
|
condition: { field: 'operation', value: 'search_company' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'companyIds',
|
||||||
|
title: 'Company IDs (JSON)',
|
||||||
|
type: 'code',
|
||||||
|
placeholder: '[12345, 67890]',
|
||||||
|
condition: { field: 'operation', value: 'search_company_employees' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'country',
|
||||||
|
title: 'Country',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'United States',
|
||||||
|
condition: { field: 'operation', value: 'search_company_employees' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'city',
|
||||||
|
title: 'City',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'San Francisco',
|
||||||
|
condition: { field: 'operation', value: 'search_company_employees' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'jobTitles',
|
||||||
|
title: 'Job Titles (JSON)',
|
||||||
|
type: 'code',
|
||||||
|
placeholder: '["Software Engineer", "Product Manager"]',
|
||||||
|
condition: { field: 'operation', value: 'search_company_employees' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'linkedinCompanyUrl',
|
||||||
|
title: 'LinkedIn Company URL',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'linkedin.com/company/google',
|
||||||
|
condition: { field: 'operation', value: 'search_similar_companies' },
|
||||||
|
required: { field: 'operation', value: 'search_similar_companies' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'accountLocation',
|
||||||
|
title: 'Locations (JSON)',
|
||||||
|
type: 'code',
|
||||||
|
placeholder: '["germany", "france"]',
|
||||||
|
condition: { field: 'operation', value: 'search_similar_companies' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'employeeSizeType',
|
||||||
|
title: 'Employee Size Filter Type',
|
||||||
|
type: 'dropdown',
|
||||||
|
options: [
|
||||||
|
{ label: 'Range', id: 'RANGE' },
|
||||||
|
{ label: 'Exact', id: 'EXACT' },
|
||||||
|
],
|
||||||
|
condition: { field: 'operation', value: 'search_similar_companies' },
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'employeeSizeRange',
|
||||||
|
title: 'Employee Size Range (JSON)',
|
||||||
|
type: 'code',
|
||||||
|
placeholder: '[{"start": 50, "end": 200}]',
|
||||||
|
condition: { field: 'operation', value: 'search_similar_companies' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'num',
|
||||||
|
title: 'Results Per Page',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: '10',
|
||||||
|
condition: { field: 'operation', value: 'search_similar_companies' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'filters',
|
||||||
|
title: 'Filters (JSON)',
|
||||||
|
type: 'code',
|
||||||
|
placeholder:
|
||||||
|
'[{"type": "POSTAL_CODE", "values": [{"id": "101041448", "text": "San Francisco", "selectionType": "INCLUDED"}]}]',
|
||||||
|
condition: { field: 'operation', value: 'sales_pointer_people' },
|
||||||
|
required: { field: 'operation', value: 'sales_pointer_people' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'keywords',
|
||||||
|
title: 'Keywords',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'AI automation',
|
||||||
|
condition: { field: 'operation', value: 'search_posts' },
|
||||||
|
required: { field: 'operation', value: 'search_posts' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'datePosted',
|
||||||
|
title: 'Date Posted',
|
||||||
|
type: 'dropdown',
|
||||||
|
options: [
|
||||||
|
{ label: 'Any time', id: '' },
|
||||||
|
{ label: 'Past 24 hours', id: 'past_24_hours' },
|
||||||
|
{ label: 'Past week', id: 'past_week' },
|
||||||
|
{ label: 'Past month', id: 'past_month' },
|
||||||
|
],
|
||||||
|
condition: { field: 'operation', value: 'search_posts' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'postUrl',
|
||||||
|
title: 'LinkedIn Post URL',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'https://www.linkedin.com/posts/...',
|
||||||
|
condition: { field: 'operation', value: 'get_post_details' },
|
||||||
|
required: { field: 'operation', value: 'get_post_details' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'postUrn',
|
||||||
|
title: 'Post URN',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'urn:li:activity:7231931952839196672',
|
||||||
|
condition: {
|
||||||
|
field: 'operation',
|
||||||
|
value: ['search_post_reactions', 'search_post_comments'],
|
||||||
|
},
|
||||||
|
required: {
|
||||||
|
field: 'operation',
|
||||||
|
value: ['search_post_reactions', 'search_post_comments'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'reactionType',
|
||||||
|
title: 'Reaction Type',
|
||||||
|
type: 'dropdown',
|
||||||
|
options: [
|
||||||
|
{ label: 'All', id: 'all' },
|
||||||
|
{ label: 'Like', id: 'like' },
|
||||||
|
{ label: 'Love', id: 'love' },
|
||||||
|
{ label: 'Celebrate', id: 'celebrate' },
|
||||||
|
{ label: 'Insightful', id: 'insightful' },
|
||||||
|
{ label: 'Funny', id: 'funny' },
|
||||||
|
],
|
||||||
|
condition: { field: 'operation', value: 'search_post_reactions' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'profileId',
|
||||||
|
title: 'Profile ID',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'ACoAAC1wha0BhoDIRAHrP5rgzVDyzmSdnl-KuEk',
|
||||||
|
condition: { field: 'operation', value: 'search_people_activities' },
|
||||||
|
required: { field: 'operation', value: 'search_people_activities' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'activityType',
|
||||||
|
title: 'Activity Type',
|
||||||
|
type: 'dropdown',
|
||||||
|
options: [
|
||||||
|
{ label: 'Posts', id: 'posts' },
|
||||||
|
{ label: 'Comments', id: 'comments' },
|
||||||
|
{ label: 'Articles', id: 'articles' },
|
||||||
|
],
|
||||||
|
condition: {
|
||||||
|
field: 'operation',
|
||||||
|
value: ['search_people_activities', 'search_company_activities'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'companyId',
|
||||||
|
title: 'Company ID',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: '100746430',
|
||||||
|
condition: { field: 'operation', value: 'search_company_activities' },
|
||||||
|
required: { field: 'operation', value: 'search_company_activities' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'offset',
|
||||||
|
title: 'Offset',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: '0',
|
||||||
|
condition: { field: 'operation', value: 'search_company_activities' },
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'hash',
|
||||||
|
title: 'MD5 Hash',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: '5f0efb20de5ecfedbe0bf5e7c12353fe',
|
||||||
|
condition: { field: 'operation', value: 'reverse_hash_lookup' },
|
||||||
|
required: { field: 'operation', value: 'reverse_hash_lookup' },
|
||||||
|
},
|
||||||
|
|
||||||
|
{
|
||||||
|
id: 'page',
|
||||||
|
title: 'Page Number',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: '1',
|
||||||
|
condition: {
|
||||||
|
field: 'operation',
|
||||||
|
value: [
|
||||||
|
'search_people',
|
||||||
|
'search_company',
|
||||||
|
'search_company_employees',
|
||||||
|
'search_similar_companies',
|
||||||
|
'sales_pointer_people',
|
||||||
|
'search_posts',
|
||||||
|
'search_post_reactions',
|
||||||
|
'search_post_comments',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
required: { field: 'operation', value: 'sales_pointer_people' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'pageSize',
|
||||||
|
title: 'Results Per Page',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: '20',
|
||||||
|
condition: {
|
||||||
|
field: 'operation',
|
||||||
|
value: ['search_people', 'search_company', 'search_company_employees'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'paginationToken',
|
||||||
|
title: 'Pagination Token',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Token from previous response',
|
||||||
|
condition: {
|
||||||
|
field: 'operation',
|
||||||
|
value: ['search_people_activities', 'search_company_activities'],
|
||||||
|
},
|
||||||
|
mode: 'advanced',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
tools: {
|
||||||
|
access: [
|
||||||
|
'enrich_check_credits',
|
||||||
|
'enrich_email_to_profile',
|
||||||
|
'enrich_email_to_person_lite',
|
||||||
|
'enrich_linkedin_profile',
|
||||||
|
'enrich_find_email',
|
||||||
|
'enrich_linkedin_to_work_email',
|
||||||
|
'enrich_linkedin_to_personal_email',
|
||||||
|
'enrich_phone_finder',
|
||||||
|
'enrich_email_to_phone',
|
||||||
|
'enrich_verify_email',
|
||||||
|
'enrich_disposable_email_check',
|
||||||
|
'enrich_email_to_ip',
|
||||||
|
'enrich_ip_to_company',
|
||||||
|
'enrich_company_lookup',
|
||||||
|
'enrich_company_funding',
|
||||||
|
'enrich_company_revenue',
|
||||||
|
'enrich_search_people',
|
||||||
|
'enrich_search_company',
|
||||||
|
'enrich_search_company_employees',
|
||||||
|
'enrich_search_similar_companies',
|
||||||
|
'enrich_sales_pointer_people',
|
||||||
|
'enrich_search_posts',
|
||||||
|
'enrich_get_post_details',
|
||||||
|
'enrich_search_post_reactions',
|
||||||
|
'enrich_search_post_comments',
|
||||||
|
'enrich_search_people_activities',
|
||||||
|
'enrich_search_company_activities',
|
||||||
|
'enrich_reverse_hash_lookup',
|
||||||
|
'enrich_search_logo',
|
||||||
|
],
|
||||||
|
config: {
|
||||||
|
tool: (params) => `enrich_${params.operation}`,
|
||||||
|
params: (params) => {
|
||||||
|
const { operation, ...rest } = params
|
||||||
|
const parsedParams: Record<string, any> = { ...rest }
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (rest.currentJobTitles && typeof rest.currentJobTitles === 'string') {
|
||||||
|
parsedParams.currentJobTitles = JSON.parse(rest.currentJobTitles)
|
||||||
|
}
|
||||||
|
if (rest.skills && typeof rest.skills === 'string') {
|
||||||
|
parsedParams.skills = JSON.parse(rest.skills)
|
||||||
|
}
|
||||||
|
if (rest.industries && typeof rest.industries === 'string') {
|
||||||
|
parsedParams.industries = JSON.parse(rest.industries)
|
||||||
|
}
|
||||||
|
if (rest.companyIds && typeof rest.companyIds === 'string') {
|
||||||
|
parsedParams.companyIds = JSON.parse(rest.companyIds)
|
||||||
|
}
|
||||||
|
if (rest.jobTitles && typeof rest.jobTitles === 'string') {
|
||||||
|
parsedParams.jobTitles = JSON.parse(rest.jobTitles)
|
||||||
|
}
|
||||||
|
if (rest.accountLocation && typeof rest.accountLocation === 'string') {
|
||||||
|
parsedParams.accountLocation = JSON.parse(rest.accountLocation)
|
||||||
|
}
|
||||||
|
if (rest.employeeSizeRange && typeof rest.employeeSizeRange === 'string') {
|
||||||
|
parsedParams.employeeSizeRange = JSON.parse(rest.employeeSizeRange)
|
||||||
|
}
|
||||||
|
if (rest.filters && typeof rest.filters === 'string') {
|
||||||
|
parsedParams.filters = JSON.parse(rest.filters)
|
||||||
|
}
|
||||||
|
} catch (error: any) {
|
||||||
|
throw new Error(`Invalid JSON input: ${error.message}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (operation === 'linkedin_profile') {
|
||||||
|
parsedParams.url = rest.linkedinUrl
|
||||||
|
parsedParams.linkedinUrl = undefined
|
||||||
|
}
|
||||||
|
if (
|
||||||
|
operation === 'linkedin_to_work_email' ||
|
||||||
|
operation === 'linkedin_to_personal_email' ||
|
||||||
|
operation === 'phone_finder'
|
||||||
|
) {
|
||||||
|
parsedParams.linkedinProfile = rest.linkedinUrl
|
||||||
|
parsedParams.linkedinUrl = undefined
|
||||||
|
}
|
||||||
|
if (operation === 'company_lookup') {
|
||||||
|
parsedParams.name = rest.companyName
|
||||||
|
parsedParams.companyName = undefined
|
||||||
|
}
|
||||||
|
if (operation === 'search_company') {
|
||||||
|
parsedParams.name = rest.searchCompanyName
|
||||||
|
parsedParams.searchCompanyName = undefined
|
||||||
|
}
|
||||||
|
if (operation === 'search_similar_companies') {
|
||||||
|
parsedParams.url = rest.linkedinCompanyUrl
|
||||||
|
parsedParams.linkedinCompanyUrl = undefined
|
||||||
|
}
|
||||||
|
if (operation === 'get_post_details') {
|
||||||
|
parsedParams.url = rest.postUrl
|
||||||
|
parsedParams.postUrl = undefined
|
||||||
|
}
|
||||||
|
if (operation === 'search_logo') {
|
||||||
|
parsedParams.url = rest.domain
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parsedParams.page) {
|
||||||
|
const pageNum = Number(parsedParams.page)
|
||||||
|
if (operation === 'search_people' || operation === 'search_company') {
|
||||||
|
parsedParams.currentPage = pageNum
|
||||||
|
parsedParams.page = undefined
|
||||||
|
} else {
|
||||||
|
parsedParams.page = pageNum
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (parsedParams.pageSize) parsedParams.pageSize = Number(parsedParams.pageSize)
|
||||||
|
if (parsedParams.num) parsedParams.num = Number(parsedParams.num)
|
||||||
|
if (parsedParams.offset) parsedParams.offset = Number(parsedParams.offset)
|
||||||
|
if (parsedParams.staffCountMin)
|
||||||
|
parsedParams.staffCountMin = Number(parsedParams.staffCountMin)
|
||||||
|
if (parsedParams.staffCountMax)
|
||||||
|
parsedParams.staffCountMax = Number(parsedParams.staffCountMax)
|
||||||
|
|
||||||
|
return parsedParams
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
inputs: {
|
||||||
|
operation: { type: 'string', description: 'Enrich operation to perform' },
|
||||||
|
},
|
||||||
|
outputs: {
|
||||||
|
success: { type: 'boolean', description: 'Whether the operation was successful' },
|
||||||
|
output: { type: 'json', description: 'Output data from the Enrich operation' },
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -1,11 +1,48 @@
|
|||||||
import { createLogger } from '@sim/logger'
|
import { createLogger } from '@sim/logger'
|
||||||
import { DocumentIcon } from '@/components/icons'
|
import { DocumentIcon } from '@/components/icons'
|
||||||
|
import { inferContextFromKey } from '@/lib/uploads/utils/file-utils'
|
||||||
import type { BlockConfig, SubBlockType } from '@/blocks/types'
|
import type { BlockConfig, SubBlockType } from '@/blocks/types'
|
||||||
import { createVersionedToolSelector } from '@/blocks/utils'
|
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||||
import type { FileParserOutput } from '@/tools/file/types'
|
import type { FileParserOutput, FileParserV3Output } from '@/tools/file/types'
|
||||||
|
|
||||||
const logger = createLogger('FileBlock')
|
const logger = createLogger('FileBlock')
|
||||||
|
|
||||||
|
const resolveFilePathFromInput = (fileInput: unknown): string | null => {
|
||||||
|
if (!fileInput || typeof fileInput !== 'object') {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const record = fileInput as Record<string, unknown>
|
||||||
|
if (typeof record.path === 'string' && record.path.trim() !== '') {
|
||||||
|
return record.path
|
||||||
|
}
|
||||||
|
if (typeof record.url === 'string' && record.url.trim() !== '') {
|
||||||
|
return record.url
|
||||||
|
}
|
||||||
|
if (typeof record.key === 'string' && record.key.trim() !== '') {
|
||||||
|
const key = record.key.trim()
|
||||||
|
const context = typeof record.context === 'string' ? record.context : inferContextFromKey(key)
|
||||||
|
return `/api/files/serve/${encodeURIComponent(key)}?context=${context}`
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolveFilePathsFromInput = (fileInput: unknown): string[] => {
|
||||||
|
if (!fileInput) {
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(fileInput)) {
|
||||||
|
return fileInput
|
||||||
|
.map((file) => resolveFilePathFromInput(file))
|
||||||
|
.filter((path): path is string => Boolean(path))
|
||||||
|
}
|
||||||
|
|
||||||
|
const resolved = resolveFilePathFromInput(fileInput)
|
||||||
|
return resolved ? [resolved] : []
|
||||||
|
}
|
||||||
|
|
||||||
export const FileBlock: BlockConfig<FileParserOutput> = {
|
export const FileBlock: BlockConfig<FileParserOutput> = {
|
||||||
type: 'file',
|
type: 'file',
|
||||||
name: 'File (Legacy)',
|
name: 'File (Legacy)',
|
||||||
@@ -79,24 +116,14 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
|||||||
|
|
||||||
// Handle file upload input
|
// Handle file upload input
|
||||||
if (inputMethod === 'upload') {
|
if (inputMethod === 'upload') {
|
||||||
// Handle case where 'file' is an array (multiple files)
|
const filePaths = resolveFilePathsFromInput(params.file)
|
||||||
if (params.file && Array.isArray(params.file) && params.file.length > 0) {
|
if (filePaths.length > 0) {
|
||||||
const filePaths = params.file.map((file) => file.path)
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
||||||
fileType: params.fileType || 'auto',
|
fileType: params.fileType || 'auto',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle case where 'file' is a single file object
|
|
||||||
if (params.file?.path) {
|
|
||||||
return {
|
|
||||||
filePath: params.file.path,
|
|
||||||
fileType: params.fileType || 'auto',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no files, return error
|
// If no files, return error
|
||||||
logger.error('No files provided for upload method')
|
logger.error('No files provided for upload method')
|
||||||
throw new Error('Please upload a file')
|
throw new Error('Please upload a file')
|
||||||
@@ -116,7 +143,7 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
|||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
files: {
|
files: {
|
||||||
type: 'json',
|
type: 'file[]',
|
||||||
description: 'Array of parsed file objects with content, metadata, and file properties',
|
description: 'Array of parsed file objects with content, metadata, and file properties',
|
||||||
},
|
},
|
||||||
combinedContent: {
|
combinedContent: {
|
||||||
@@ -124,7 +151,7 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
|||||||
description: 'All file contents merged into a single text string',
|
description: 'All file contents merged into a single text string',
|
||||||
},
|
},
|
||||||
processedFiles: {
|
processedFiles: {
|
||||||
type: 'files',
|
type: 'file[]',
|
||||||
description: 'Array of UserFile objects for downstream use (attachments, uploads, etc.)',
|
description: 'Array of UserFile objects for downstream use (attachments, uploads, etc.)',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -133,9 +160,9 @@ export const FileBlock: BlockConfig<FileParserOutput> = {
|
|||||||
export const FileV2Block: BlockConfig<FileParserOutput> = {
|
export const FileV2Block: BlockConfig<FileParserOutput> = {
|
||||||
...FileBlock,
|
...FileBlock,
|
||||||
type: 'file_v2',
|
type: 'file_v2',
|
||||||
name: 'File',
|
name: 'File (Legacy)',
|
||||||
description: 'Read and parse multiple files',
|
description: 'Read and parse multiple files',
|
||||||
hideFromToolbar: false,
|
hideFromToolbar: true,
|
||||||
subBlocks: [
|
subBlocks: [
|
||||||
{
|
{
|
||||||
id: 'file',
|
id: 'file',
|
||||||
@@ -182,16 +209,17 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (Array.isArray(fileInput) && fileInput.length > 0) {
|
if (Array.isArray(fileInput) && fileInput.length > 0) {
|
||||||
const filePaths = fileInput.map((file) => file.path)
|
const filePaths = resolveFilePathsFromInput(fileInput)
|
||||||
return {
|
return {
|
||||||
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
||||||
fileType: params.fileType || 'auto',
|
fileType: params.fileType || 'auto',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fileInput?.path) {
|
const resolvedSingle = resolveFilePathsFromInput(fileInput)
|
||||||
|
if (resolvedSingle.length > 0) {
|
||||||
return {
|
return {
|
||||||
filePath: fileInput.path,
|
filePath: resolvedSingle[0],
|
||||||
fileType: params.fileType || 'auto',
|
fileType: params.fileType || 'auto',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -209,7 +237,7 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
|
|||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
files: {
|
files: {
|
||||||
type: 'json',
|
type: 'file[]',
|
||||||
description: 'Array of parsed file objects with content, metadata, and file properties',
|
description: 'Array of parsed file objects with content, metadata, and file properties',
|
||||||
},
|
},
|
||||||
combinedContent: {
|
combinedContent: {
|
||||||
@@ -218,3 +246,108 @@ export const FileV2Block: BlockConfig<FileParserOutput> = {
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const FileV3Block: BlockConfig<FileParserV3Output> = {
|
||||||
|
type: 'file_v3',
|
||||||
|
name: 'File',
|
||||||
|
description: 'Read and parse multiple files',
|
||||||
|
longDescription: 'Upload files or reference files from previous blocks to extract text content.',
|
||||||
|
docsLink: 'https://docs.sim.ai/tools/file',
|
||||||
|
category: 'tools',
|
||||||
|
bgColor: '#40916C',
|
||||||
|
icon: DocumentIcon,
|
||||||
|
subBlocks: [
|
||||||
|
{
|
||||||
|
id: 'file',
|
||||||
|
title: 'Files',
|
||||||
|
type: 'file-upload' as SubBlockType,
|
||||||
|
canonicalParamId: 'fileInput',
|
||||||
|
acceptedTypes:
|
||||||
|
'.pdf,.csv,.doc,.docx,.txt,.md,.xlsx,.xls,.html,.htm,.pptx,.ppt,.json,.xml,.rtf',
|
||||||
|
placeholder: 'Upload files to process',
|
||||||
|
multiple: true,
|
||||||
|
mode: 'basic',
|
||||||
|
maxSize: 100,
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'fileRef',
|
||||||
|
title: 'Files',
|
||||||
|
type: 'short-input' as SubBlockType,
|
||||||
|
canonicalParamId: 'fileInput',
|
||||||
|
placeholder: 'File reference from previous block',
|
||||||
|
mode: 'advanced',
|
||||||
|
required: true,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
tools: {
|
||||||
|
access: ['file_parser_v3'],
|
||||||
|
config: {
|
||||||
|
tool: () => 'file_parser_v3',
|
||||||
|
params: (params) => {
|
||||||
|
const fileInput = params.fileInput ?? params.file ?? params.filePath
|
||||||
|
if (!fileInput) {
|
||||||
|
logger.error('No file input provided')
|
||||||
|
throw new Error('File input is required')
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof fileInput === 'string') {
|
||||||
|
return {
|
||||||
|
filePath: fileInput.trim(),
|
||||||
|
fileType: params.fileType || 'auto',
|
||||||
|
workspaceId: params._context?.workspaceId,
|
||||||
|
workflowId: params._context?.workflowId,
|
||||||
|
executionId: params._context?.executionId,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(fileInput)) {
|
||||||
|
const filePaths = resolveFilePathsFromInput(fileInput)
|
||||||
|
if (filePaths.length === 0) {
|
||||||
|
logger.error('No valid file paths found in file input array')
|
||||||
|
throw new Error('File input is required')
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
filePath: filePaths.length === 1 ? filePaths[0] : filePaths,
|
||||||
|
fileType: params.fileType || 'auto',
|
||||||
|
workspaceId: params._context?.workspaceId,
|
||||||
|
workflowId: params._context?.workflowId,
|
||||||
|
executionId: params._context?.executionId,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof fileInput === 'object') {
|
||||||
|
const resolvedPaths = resolveFilePathsFromInput(fileInput)
|
||||||
|
if (resolvedPaths.length === 0) {
|
||||||
|
logger.error('File input object missing path, url, or key')
|
||||||
|
throw new Error('File input is required')
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
filePath: resolvedPaths[0],
|
||||||
|
fileType: params.fileType || 'auto',
|
||||||
|
workspaceId: params._context?.workspaceId,
|
||||||
|
workflowId: params._context?.workflowId,
|
||||||
|
executionId: params._context?.executionId,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error('Invalid file input format')
|
||||||
|
throw new Error('File input is required')
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
inputs: {
|
||||||
|
fileInput: { type: 'json', description: 'File input (upload or UserFile reference)' },
|
||||||
|
fileType: { type: 'string', description: 'File type' },
|
||||||
|
},
|
||||||
|
outputs: {
|
||||||
|
files: {
|
||||||
|
type: 'file[]',
|
||||||
|
description: 'Parsed files as UserFile objects',
|
||||||
|
},
|
||||||
|
combinedContent: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'All file contents merged into a single text string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { FirefliesIcon } from '@/components/icons'
|
import { FirefliesIcon } from '@/components/icons'
|
||||||
|
import { resolveHttpsUrlFromFileInput } from '@/lib/uploads/utils/file-utils'
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
import { AuthMode } from '@/blocks/types'
|
import { AuthMode } from '@/blocks/types'
|
||||||
import type { FirefliesResponse } from '@/tools/fireflies/types'
|
import type { FirefliesResponse } from '@/tools/fireflies/types'
|
||||||
@@ -6,8 +7,9 @@ import { getTrigger } from '@/triggers'
|
|||||||
|
|
||||||
export const FirefliesBlock: BlockConfig<FirefliesResponse> = {
|
export const FirefliesBlock: BlockConfig<FirefliesResponse> = {
|
||||||
type: 'fireflies',
|
type: 'fireflies',
|
||||||
name: 'Fireflies',
|
name: 'Fireflies (Legacy)',
|
||||||
description: 'Interact with Fireflies.ai meeting transcripts and recordings',
|
description: 'Interact with Fireflies.ai meeting transcripts and recordings',
|
||||||
|
hideFromToolbar: true,
|
||||||
authMode: AuthMode.ApiKey,
|
authMode: AuthMode.ApiKey,
|
||||||
triggerAllowed: true,
|
triggerAllowed: true,
|
||||||
longDescription:
|
longDescription:
|
||||||
@@ -587,3 +589,74 @@ Return ONLY the summary text - no quotes, no labels.`,
|
|||||||
available: ['fireflies_transcription_complete'],
|
available: ['fireflies_transcription_complete'],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const firefliesV2SubBlocks = (FirefliesBlock.subBlocks || []).filter(
|
||||||
|
(subBlock) => subBlock.id !== 'audioUrl'
|
||||||
|
)
|
||||||
|
const firefliesV2Inputs = FirefliesBlock.inputs
|
||||||
|
? Object.fromEntries(Object.entries(FirefliesBlock.inputs).filter(([key]) => key !== 'audioUrl'))
|
||||||
|
: {}
|
||||||
|
|
||||||
|
export const FirefliesV2Block: BlockConfig<FirefliesResponse> = {
|
||||||
|
...FirefliesBlock,
|
||||||
|
type: 'fireflies_v2',
|
||||||
|
name: 'Fireflies',
|
||||||
|
description: 'Interact with Fireflies.ai meeting transcripts and recordings',
|
||||||
|
hideFromToolbar: false,
|
||||||
|
subBlocks: firefliesV2SubBlocks,
|
||||||
|
tools: {
|
||||||
|
...FirefliesBlock.tools,
|
||||||
|
config: {
|
||||||
|
...FirefliesBlock.tools?.config,
|
||||||
|
tool: (params) =>
|
||||||
|
FirefliesBlock.tools?.config?.tool
|
||||||
|
? FirefliesBlock.tools.config.tool(params)
|
||||||
|
: params.operation || 'fireflies_list_transcripts',
|
||||||
|
params: (params) => {
|
||||||
|
const baseParams = FirefliesBlock.tools?.config?.params
|
||||||
|
if (!baseParams) {
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.operation === 'fireflies_upload_audio') {
|
||||||
|
let audioInput = params.audioFile || params.audioFileReference
|
||||||
|
if (!audioInput) {
|
||||||
|
throw new Error('Audio file is required.')
|
||||||
|
}
|
||||||
|
if (typeof audioInput === 'string') {
|
||||||
|
try {
|
||||||
|
audioInput = JSON.parse(audioInput)
|
||||||
|
} catch {
|
||||||
|
throw new Error('Audio file must be a valid file reference.')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (Array.isArray(audioInput)) {
|
||||||
|
throw new Error(
|
||||||
|
'File reference must be a single file, not an array. Use <block.files[0]> to select one file.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (typeof audioInput !== 'object' || audioInput === null) {
|
||||||
|
throw new Error('Audio file must be a file reference.')
|
||||||
|
}
|
||||||
|
const audioUrl = resolveHttpsUrlFromFileInput(audioInput)
|
||||||
|
if (!audioUrl) {
|
||||||
|
throw new Error('Audio file must include a https URL.')
|
||||||
|
}
|
||||||
|
|
||||||
|
return baseParams({
|
||||||
|
...params,
|
||||||
|
audioUrl,
|
||||||
|
audioFile: undefined,
|
||||||
|
audioFileReference: undefined,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return baseParams(params)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
inputs: {
|
||||||
|
...firefliesV2Inputs,
|
||||||
|
audioFileReference: { type: 'json', description: 'Audio/video file reference' },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|||||||
@@ -516,7 +516,7 @@ Return ONLY the search query - no explanations, no extra text.`,
|
|||||||
// Tool outputs
|
// Tool outputs
|
||||||
content: { type: 'string', description: 'Response content' },
|
content: { type: 'string', description: 'Response content' },
|
||||||
metadata: { type: 'json', description: 'Email metadata' },
|
metadata: { type: 'json', description: 'Email metadata' },
|
||||||
attachments: { type: 'json', description: 'Email attachments array' },
|
attachments: { type: 'file[]', description: 'Email attachments array' },
|
||||||
// Trigger outputs
|
// Trigger outputs
|
||||||
email_id: { type: 'string', description: 'Gmail message ID' },
|
email_id: { type: 'string', description: 'Gmail message ID' },
|
||||||
thread_id: { type: 'string', description: 'Gmail thread ID' },
|
thread_id: { type: 'string', description: 'Gmail thread ID' },
|
||||||
@@ -579,7 +579,7 @@ export const GmailV2Block: BlockConfig<GmailToolResponse> = {
|
|||||||
date: { type: 'string', description: 'Date' },
|
date: { type: 'string', description: 'Date' },
|
||||||
body: { type: 'string', description: 'Email body text (best-effort)' },
|
body: { type: 'string', description: 'Email body text (best-effort)' },
|
||||||
results: { type: 'json', description: 'Search/read summary results' },
|
results: { type: 'json', description: 'Search/read summary results' },
|
||||||
attachments: { type: 'json', description: 'Downloaded attachments (if enabled)' },
|
attachments: { type: 'file[]', description: 'Downloaded attachments (if enabled)' },
|
||||||
|
|
||||||
// Draft-specific outputs
|
// Draft-specific outputs
|
||||||
draftId: {
|
draftId: {
|
||||||
|
|||||||
@@ -861,7 +861,7 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
|
|||||||
permissionId: { type: 'string', description: 'Permission ID to remove' },
|
permissionId: { type: 'string', description: 'Permission ID to remove' },
|
||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
file: { type: 'json', description: 'File metadata or downloaded file data' },
|
file: { type: 'file', description: 'Downloaded file stored in execution files' },
|
||||||
files: { type: 'json', description: 'List of files' },
|
files: { type: 'json', description: 'List of files' },
|
||||||
metadata: { type: 'json', description: 'Complete file metadata (from download)' },
|
metadata: { type: 'json', description: 'Complete file metadata (from download)' },
|
||||||
content: { type: 'string', description: 'File content as text' },
|
content: { type: 'string', description: 'File content as text' },
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { GoogleSheetsIcon } from '@/components/icons'
|
import { GoogleSheetsIcon } from '@/components/icons'
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
import { AuthMode } from '@/blocks/types'
|
import { AuthMode } from '@/blocks/types'
|
||||||
|
import { createVersionedToolSelector } from '@/blocks/utils'
|
||||||
import type { GoogleSheetsResponse, GoogleSheetsV2Response } from '@/tools/google_sheets/types'
|
import type { GoogleSheetsResponse, GoogleSheetsV2Response } from '@/tools/google_sheets/types'
|
||||||
|
|
||||||
// Legacy block - hidden from toolbar
|
// Legacy block - hidden from toolbar
|
||||||
@@ -681,34 +682,38 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
|||||||
'google_sheets_copy_sheet_v2',
|
'google_sheets_copy_sheet_v2',
|
||||||
],
|
],
|
||||||
config: {
|
config: {
|
||||||
tool: (params) => {
|
tool: createVersionedToolSelector({
|
||||||
switch (params.operation) {
|
baseToolSelector: (params) => {
|
||||||
case 'read':
|
switch (params.operation) {
|
||||||
return 'google_sheets_read_v2'
|
case 'read':
|
||||||
case 'write':
|
return 'google_sheets_read'
|
||||||
return 'google_sheets_write_v2'
|
case 'write':
|
||||||
case 'update':
|
return 'google_sheets_write'
|
||||||
return 'google_sheets_update_v2'
|
case 'update':
|
||||||
case 'append':
|
return 'google_sheets_update'
|
||||||
return 'google_sheets_append_v2'
|
case 'append':
|
||||||
case 'clear':
|
return 'google_sheets_append'
|
||||||
return 'google_sheets_clear_v2'
|
case 'clear':
|
||||||
case 'get_info':
|
return 'google_sheets_clear'
|
||||||
return 'google_sheets_get_spreadsheet_v2'
|
case 'get_info':
|
||||||
case 'create':
|
return 'google_sheets_get_spreadsheet'
|
||||||
return 'google_sheets_create_spreadsheet_v2'
|
case 'create':
|
||||||
case 'batch_get':
|
return 'google_sheets_create_spreadsheet'
|
||||||
return 'google_sheets_batch_get_v2'
|
case 'batch_get':
|
||||||
case 'batch_update':
|
return 'google_sheets_batch_get'
|
||||||
return 'google_sheets_batch_update_v2'
|
case 'batch_update':
|
||||||
case 'batch_clear':
|
return 'google_sheets_batch_update'
|
||||||
return 'google_sheets_batch_clear_v2'
|
case 'batch_clear':
|
||||||
case 'copy_sheet':
|
return 'google_sheets_batch_clear'
|
||||||
return 'google_sheets_copy_sheet_v2'
|
case 'copy_sheet':
|
||||||
default:
|
return 'google_sheets_copy_sheet'
|
||||||
throw new Error(`Invalid Google Sheets V2 operation: ${params.operation}`)
|
default:
|
||||||
}
|
throw new Error(`Invalid Google Sheets operation: ${params.operation}`)
|
||||||
},
|
}
|
||||||
|
},
|
||||||
|
suffix: '_v2',
|
||||||
|
fallbackToolId: 'google_sheets_read_v2',
|
||||||
|
}),
|
||||||
params: (params) => {
|
params: (params) => {
|
||||||
const {
|
const {
|
||||||
credential,
|
credential,
|
||||||
|
|||||||
@@ -1,12 +1,14 @@
|
|||||||
import { GoogleSlidesIcon } from '@/components/icons'
|
import { GoogleSlidesIcon } from '@/components/icons'
|
||||||
|
import { resolveHttpsUrlFromFileInput } from '@/lib/uploads/utils/file-utils'
|
||||||
import type { BlockConfig } from '@/blocks/types'
|
import type { BlockConfig } from '@/blocks/types'
|
||||||
import { AuthMode } from '@/blocks/types'
|
import { AuthMode } from '@/blocks/types'
|
||||||
import type { GoogleSlidesResponse } from '@/tools/google_slides/types'
|
import type { GoogleSlidesResponse } from '@/tools/google_slides/types'
|
||||||
|
|
||||||
export const GoogleSlidesBlock: BlockConfig<GoogleSlidesResponse> = {
|
export const GoogleSlidesBlock: BlockConfig<GoogleSlidesResponse> = {
|
||||||
type: 'google_slides',
|
type: 'google_slides',
|
||||||
name: 'Google Slides',
|
name: 'Google Slides (Legacy)',
|
||||||
description: 'Read, write, and create presentations',
|
description: 'Read, write, and create presentations',
|
||||||
|
hideFromToolbar: true,
|
||||||
authMode: AuthMode.OAuth,
|
authMode: AuthMode.OAuth,
|
||||||
longDescription:
|
longDescription:
|
||||||
'Integrate Google Slides into the workflow. Can read, write, create presentations, replace text, add slides, add images, get thumbnails, get page details, delete objects, duplicate objects, reorder slides, create tables, create shapes, and insert text.',
|
'Integrate Google Slides into the workflow. Can read, write, create presentations, replace text, add slides, add images, get thumbnails, get page details, delete objects, duplicate objects, reorder slides, create tables, create shapes, and insert text.',
|
||||||
@@ -315,12 +317,26 @@ Return ONLY the JSON array - no explanations, no markdown, no extra text.`,
|
|||||||
required: true,
|
required: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'imageUrl',
|
id: 'imageFile',
|
||||||
title: 'Image URL',
|
title: 'Image',
|
||||||
type: 'short-input',
|
type: 'file-upload',
|
||||||
placeholder: 'Public URL of the image (PNG, JPEG, or GIF)',
|
canonicalParamId: 'imageSource',
|
||||||
condition: { field: 'operation', value: 'add_image' },
|
placeholder: 'Upload image (PNG, JPEG, or GIF)',
|
||||||
|
mode: 'basic',
|
||||||
|
multiple: false,
|
||||||
required: true,
|
required: true,
|
||||||
|
acceptedTypes: '.png,.jpg,.jpeg,.gif',
|
||||||
|
condition: { field: 'operation', value: 'add_image' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'imageUrl',
|
||||||
|
title: 'Image',
|
||||||
|
type: 'short-input',
|
||||||
|
canonicalParamId: 'imageSource',
|
||||||
|
placeholder: 'Reference image from previous blocks or enter URL',
|
||||||
|
mode: 'advanced',
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'add_image' },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
id: 'imageWidth',
|
id: 'imageWidth',
|
||||||
@@ -809,7 +825,9 @@ Return ONLY the text content - no explanations, no markdown formatting markers,
|
|||||||
placeholderIdMappings: { type: 'string', description: 'JSON array of placeholder ID mappings' },
|
placeholderIdMappings: { type: 'string', description: 'JSON array of placeholder ID mappings' },
|
||||||
// Add image operation
|
// Add image operation
|
||||||
pageObjectId: { type: 'string', description: 'Slide object ID for image' },
|
pageObjectId: { type: 'string', description: 'Slide object ID for image' },
|
||||||
imageUrl: { type: 'string', description: 'Image URL' },
|
imageFile: { type: 'json', description: 'Uploaded image (UserFile)' },
|
||||||
|
imageUrl: { type: 'string', description: 'Image URL or reference' },
|
||||||
|
imageSource: { type: 'json', description: 'Image source (file or URL)' },
|
||||||
imageWidth: { type: 'number', description: 'Image width in points' },
|
imageWidth: { type: 'number', description: 'Image width in points' },
|
||||||
imageHeight: { type: 'number', description: 'Image height in points' },
|
imageHeight: { type: 'number', description: 'Image height in points' },
|
||||||
positionX: { type: 'number', description: 'X position in points' },
|
positionX: { type: 'number', description: 'X position in points' },
|
||||||
@@ -887,3 +905,99 @@ Return ONLY the text content - no explanations, no markdown formatting markers,
|
|||||||
text: { type: 'string', description: 'Text that was inserted' },
|
text: { type: 'string', description: 'Text that was inserted' },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const googleSlidesV2SubBlocks = (GoogleSlidesBlock.subBlocks || []).flatMap((subBlock) => {
|
||||||
|
if (subBlock.id === 'imageFile') {
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
...subBlock,
|
||||||
|
canonicalParamId: 'imageFile',
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
if (subBlock.id !== 'imageUrl') {
|
||||||
|
return [subBlock]
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
id: 'imageFileReference',
|
||||||
|
title: 'Image',
|
||||||
|
type: 'short-input' as const,
|
||||||
|
canonicalParamId: 'imageFile',
|
||||||
|
placeholder: 'Reference image from previous blocks',
|
||||||
|
mode: 'advanced' as const,
|
||||||
|
required: true,
|
||||||
|
condition: { field: 'operation', value: 'add_image' },
|
||||||
|
},
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
|
const googleSlidesV2Inputs = GoogleSlidesBlock.inputs
|
||||||
|
? Object.fromEntries(
|
||||||
|
Object.entries(GoogleSlidesBlock.inputs).filter(
|
||||||
|
([key]) => key !== 'imageUrl' && key !== 'imageSource'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
: {}
|
||||||
|
|
||||||
|
export const GoogleSlidesV2Block: BlockConfig<GoogleSlidesResponse> = {
|
||||||
|
...GoogleSlidesBlock,
|
||||||
|
type: 'google_slides_v2',
|
||||||
|
name: 'Google Slides',
|
||||||
|
description: 'Read, write, and create presentations',
|
||||||
|
hideFromToolbar: false,
|
||||||
|
subBlocks: googleSlidesV2SubBlocks,
|
||||||
|
tools: {
|
||||||
|
access: GoogleSlidesBlock.tools!.access,
|
||||||
|
config: {
|
||||||
|
tool: GoogleSlidesBlock.tools!.config!.tool,
|
||||||
|
params: (params) => {
|
||||||
|
const baseParams = GoogleSlidesBlock.tools?.config?.params
|
||||||
|
if (!baseParams) {
|
||||||
|
return params
|
||||||
|
}
|
||||||
|
|
||||||
|
if (params.operation === 'add_image') {
|
||||||
|
let imageInput = params.imageFile || params.imageFileReference || params.imageSource
|
||||||
|
if (!imageInput) {
|
||||||
|
throw new Error('Image file is required.')
|
||||||
|
}
|
||||||
|
if (typeof imageInput === 'string') {
|
||||||
|
try {
|
||||||
|
imageInput = JSON.parse(imageInput)
|
||||||
|
} catch {
|
||||||
|
throw new Error('Image file must be a valid file reference.')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (Array.isArray(imageInput)) {
|
||||||
|
throw new Error(
|
||||||
|
'File reference must be a single file, not an array. Use <block.files[0]> to select one file.'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if (typeof imageInput !== 'object' || imageInput === null) {
|
||||||
|
throw new Error('Image file must be a file reference.')
|
||||||
|
}
|
||||||
|
const imageUrl = resolveHttpsUrlFromFileInput(imageInput)
|
||||||
|
if (!imageUrl) {
|
||||||
|
throw new Error('Image file must include a https URL.')
|
||||||
|
}
|
||||||
|
|
||||||
|
return baseParams({
|
||||||
|
...params,
|
||||||
|
imageUrl,
|
||||||
|
imageFileReference: undefined,
|
||||||
|
imageSource: undefined,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return baseParams(params)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
inputs: {
|
||||||
|
...googleSlidesV2Inputs,
|
||||||
|
imageFileReference: { type: 'json', description: 'Image file reference' },
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|||||||
@@ -526,7 +526,7 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
|
|||||||
description:
|
description:
|
||||||
'Single hold object (for create_matters_holds or list_matters_holds with holdId)',
|
'Single hold object (for create_matters_holds or list_matters_holds with holdId)',
|
||||||
},
|
},
|
||||||
file: { type: 'json', description: 'Downloaded export file (UserFile) from execution files' },
|
file: { type: 'file', description: 'Downloaded export file (UserFile) from execution files' },
|
||||||
nextPageToken: {
|
nextPageToken: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
description: 'Token for fetching next page of results (for list operations)',
|
description: 'Token for fetching next page of results (for list operations)',
|
||||||
|
|||||||
@@ -149,7 +149,7 @@ export const ImageGeneratorBlock: BlockConfig<DalleResponse> = {
|
|||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
content: { type: 'string', description: 'Generation response' },
|
content: { type: 'string', description: 'Generation response' },
|
||||||
image: { type: 'string', description: 'Generated image URL' },
|
image: { type: 'file', description: 'Generated image file (UserFile)' },
|
||||||
metadata: { type: 'json', description: 'Generation metadata' },
|
metadata: { type: 'json', description: 'Generation metadata' },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ export const ImapBlock: BlockConfig = {
|
|||||||
bodyHtml: { type: 'string', description: 'HTML email body' },
|
bodyHtml: { type: 'string', description: 'HTML email body' },
|
||||||
mailbox: { type: 'string', description: 'Mailbox/folder where email was received' },
|
mailbox: { type: 'string', description: 'Mailbox/folder where email was received' },
|
||||||
hasAttachments: { type: 'boolean', description: 'Whether email has attachments' },
|
hasAttachments: { type: 'boolean', description: 'Whether email has attachments' },
|
||||||
attachments: { type: 'json', description: 'Array of email attachments' },
|
attachments: { type: 'file[]', description: 'Array of email attachments' },
|
||||||
timestamp: { type: 'string', description: 'Event timestamp' },
|
timestamp: { type: 'string', description: 'Event timestamp' },
|
||||||
},
|
},
|
||||||
triggers: {
|
triggers: {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user