Compare commits

..

7 Commits

Author SHA1 Message Date
Vikhyath Mondreti
501b44e05a fix type issues 2026-02-02 19:53:12 -08:00
Vikhyath Mondreti
7c1e7273de feat(timeouts): execution timeout limits 2026-02-02 19:24:09 -08:00
Waleed
a9b7d75d87 feat(editor): added docs link to editor (#3116) 2026-02-02 12:22:08 -08:00
Vikhyath Mondreti
0449804ffb improvement(billing): duplicate checks for bypasses, logger billing actor consistency, run from block (#3107)
* improvement(billing): improve against direct subscription creation bypasses

* more usage of block/unblock helpers

* address bugbot comments

* fail closed

* only run dup check for orgs
2026-02-02 10:52:08 -08:00
Vikhyath Mondreti
c286f3ed24 fix(mcp): child workflow with response block returns error (#3114) 2026-02-02 09:30:35 -08:00
Vikhyath Mondreti
b738550815 fix(cleanup-cron): stale execution cleanup integer overflow (#3113) 2026-02-02 09:03:56 -08:00
Waleed
c6357f7438 feat(tools): added enrich so (#3103)
* feat(tools): added enrich so

* updated docs and types
2026-01-31 21:18:41 -08:00
137 changed files with 7337 additions and 12571 deletions

View File

@@ -5421,3 +5421,18 @@ z'
</svg> </svg>
) )
} }
export function EnrichSoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 398 394' fill='none'>
<path
fill='#5A52F4'
d='M129.705566,319.705719 C127.553314,322.684906 125.651512,325.414673 123.657059,328.277466 C113.748466,318.440308 105.605003,310.395905 97.510834,302.302216 C93.625801,298.417419 89.990181,294.269318 85.949242,290.558868 C82.857994,287.720428 82.464081,285.757660 85.772888,282.551880 C104.068108,264.826202 122.146088,246.876312 140.285110,228.989670 C141.183945,228.103317 141.957443,227.089844 143.588837,225.218384 C140.691605,225.066116 138.820053,224.882874 136.948410,224.881958 C102.798264,224.865326 68.647453,224.765244 34.498699,224.983612 C29.315699,225.016739 27.990419,223.343155 28.090912,218.397430 C28.381887,204.076935 28.189890,189.746719 28.195684,175.420319 C28.198524,168.398178 28.319166,168.279541 35.590389,168.278687 C69.074188,168.274780 102.557991,168.281174 136.041794,168.266083 C137.968231,168.265213 139.894608,168.107101 141.821030,168.022171 C142.137955,167.513992 142.454895,167.005829 142.771820,166.497650 C122.842415,146.495621 102.913002,126.493591 83.261360,106.770348 C96.563828,93.471756 109.448814,80.590523 122.656265,67.386925 C123.522743,68.161835 124.785545,69.187096 125.930321,70.330513 C144.551819,88.930206 163.103683,107.600082 181.805267,126.118790 C186.713593,130.979126 189.085648,136.448059 189.055374,143.437057 C188.899490,179.418961 188.911179,215.402191 189.046661,251.384262 C189.072296,258.190796 186.742920,263.653717 181.982727,268.323273 C164.624405,285.351227 147.295807,302.409485 129.705566,319.705719z'
/>
<path
fill='#5A52F4'
d='M276.070923,246.906128 C288.284363,258.985870 300.156097,270.902100 312.235931,282.603485 C315.158752,285.434784 315.417542,287.246246 312.383484,290.248932 C301.143494,301.372498 290.168549,312.763733 279.075592,324.036255 C278.168030,324.958496 277.121307,325.743835 275.898315,326.801086 C274.628357,325.711792 273.460663,324.822968 272.422150,323.802673 C253.888397,305.594757 235.418701,287.321289 216.818268,269.181854 C211.508789,264.003937 208.872726,258.136688 208.914001,250.565842 C209.108337,214.917786 209.084808,179.267715 208.928864,143.619293 C208.898407,136.654907 211.130066,131.122162 216.052216,126.246094 C234.867538,107.606842 253.537521,88.820908 272.274780,70.102730 C273.313202,69.065353 274.468597,68.145027 275.264038,67.440727 C288.353516,80.579514 301.213470,93.487869 314.597534,106.922356 C295.163391,126.421753 275.214752,146.437363 255.266113,166.452972 C255.540176,166.940353 255.814240,167.427734 256.088318,167.915100 C257.983887,168.035736 259.879425,168.260345 261.775085,168.261551 C295.425201,168.282852 329.075287,168.273544 362.725403,168.279831 C369.598907,168.281113 369.776215,168.463593 369.778931,175.252213 C369.784882,189.911667 369.646088,204.573074 369.861206,219.229355 C369.925110,223.585022 368.554596,224.976288 364.148865,224.956406 C329.833130,224.801605 295.516388,224.869598 261.199951,224.868744 C259.297974,224.868698 257.396027,224.868744 254.866638,224.868744 C262.350708,232.658707 269.078217,239.661194 276.070923,246.906128z'
/>
</svg>
)
}

View File

@@ -29,6 +29,7 @@ import {
DynamoDBIcon, DynamoDBIcon,
ElasticsearchIcon, ElasticsearchIcon,
ElevenLabsIcon, ElevenLabsIcon,
EnrichSoIcon,
ExaAIIcon, ExaAIIcon,
EyeIcon, EyeIcon,
FirecrawlIcon, FirecrawlIcon,
@@ -160,6 +161,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
dynamodb: DynamoDBIcon, dynamodb: DynamoDBIcon,
elasticsearch: ElasticsearchIcon, elasticsearch: ElasticsearchIcon,
elevenlabs: ElevenLabsIcon, elevenlabs: ElevenLabsIcon,
enrich: EnrichSoIcon,
exa: ExaAIIcon, exa: ExaAIIcon,
file_v2: DocumentIcon, file_v2: DocumentIcon,
firecrawl: FirecrawlIcon, firecrawl: FirecrawlIcon,

View File

@@ -180,11 +180,6 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
<td>Right-click → **Enable/Disable**</td> <td>Right-click → **Enable/Disable**</td>
<td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td> <td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td>
</tr> </tr>
<tr>
<td>Lock/Unlock a block</td>
<td>Hover block → Click lock icon (Admin only)</td>
<td><ActionImage src="/static/quick-reference/lock-block.png" alt="Lock block" /></td>
</tr>
<tr> <tr>
<td>Toggle handle orientation</td> <td>Toggle handle orientation</td>
<td>Right-click → **Toggle Handles**</td> <td>Right-click → **Toggle Handles**</td>

View File

@@ -0,0 +1,930 @@
---
title: Enrich
description: B2B data enrichment and LinkedIn intelligence with Enrich.so
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="enrich"
color="#E5E5E6"
/>
{/* MANUAL-CONTENT-START:intro */}
[Enrich.so](https://enrich.so/) delivers real-time, precision B2B data enrichment and LinkedIn intelligence. Its platform provides dynamic access to public and structured company, contact, and professional information, enabling teams to build richer profiles, improve lead quality, and drive more effective outreach.
With Enrich.so, you can:
- **Enrich contact and company profiles**: Instantly discover key data points for leads, prospects, and businesses using just an email or LinkedIn profile.
- **Verify email deliverability**: Check if emails are valid, deliverable, and safe to contact before sending.
- **Find work & personal emails**: Identify missing business emails from a LinkedIn profile or personal emails to expand your reach.
- **Reveal phone numbers and social profiles**: Surface additional communication channels for contacts through enrichment tools.
- **Analyze LinkedIn posts and engagement**: Extract insights on post reach, reactions, and audience from public LinkedIn content.
- **Conduct advanced people and company search**: Enable your agents to locate companies and professionals based on deep filters and real-time intelligence.
The Sim integration with Enrich.so empowers your agents and automations to instantly query, enrich, and validate B2B data, boosting productivity in workflows like sales prospecting, recruiting, marketing operations, and more. Combining Sim's orchestration capabilities with Enrich.so unlocks smarter, data-driven automation strategies powered by best-in-class B2B intelligence.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Access real-time B2B data intelligence with Enrich.so. Enrich profiles from email addresses, find work emails from LinkedIn, verify email deliverability, search for people and companies, and analyze LinkedIn post engagement.
## Tools
### `enrich_check_credits`
Check your Enrich API credit usage and remaining balance.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `totalCredits` | number | Total credits allocated to the account |
| `creditsUsed` | number | Credits consumed so far |
| `creditsRemaining` | number | Available credits remaining |
### `enrich_email_to_profile`
Retrieve detailed LinkedIn profile information using an email address including work history, education, and skills.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to look up \(e.g., john.doe@company.com\) |
| `inRealtime` | boolean | No | Set to true to retrieve fresh data, bypassing cached information |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `displayName` | string | Full display name |
| `firstName` | string | First name |
| `lastName` | string | Last name |
| `headline` | string | Professional headline |
| `occupation` | string | Current occupation |
| `summary` | string | Profile summary |
| `location` | string | Location |
| `country` | string | Country |
| `linkedInUrl` | string | LinkedIn profile URL |
| `photoUrl` | string | Profile photo URL |
| `connectionCount` | number | Number of connections |
| `isConnectionCountObfuscated` | boolean | Whether connection count is obfuscated \(500+\) |
| `positionHistory` | array | Work experience history |
| ↳ `title` | string | Job title |
| ↳ `company` | string | Company name |
| ↳ `startDate` | string | Start date |
| ↳ `endDate` | string | End date |
| ↳ `location` | string | Location |
| `education` | array | Education history |
| ↳ `school` | string | School name |
| ↳ `degree` | string | Degree |
| ↳ `fieldOfStudy` | string | Field of study |
| ↳ `startDate` | string | Start date |
| ↳ `endDate` | string | End date |
| `certifications` | array | Professional certifications |
| ↳ `name` | string | Certification name |
| ↳ `authority` | string | Issuing authority |
| ↳ `url` | string | Certification URL |
| `skills` | array | List of skills |
| `languages` | array | List of languages |
| `locale` | string | Profile locale \(e.g., en_US\) |
| `version` | number | Profile version number |
### `enrich_email_to_person_lite`
Retrieve basic LinkedIn profile information from an email address. A lighter version with essential data only.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to look up \(e.g., john.doe@company.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `name` | string | Full name |
| `firstName` | string | First name |
| `lastName` | string | Last name |
| `email` | string | Email address |
| `title` | string | Job title |
| `location` | string | Location |
| `company` | string | Current company |
| `companyLocation` | string | Company location |
| `companyLinkedIn` | string | Company LinkedIn URL |
| `profileId` | string | LinkedIn profile ID |
| `schoolName` | string | School name |
| `schoolUrl` | string | School URL |
| `linkedInUrl` | string | LinkedIn profile URL |
| `photoUrl` | string | Profile photo URL |
| `followerCount` | number | Number of followers |
| `connectionCount` | number | Number of connections |
| `languages` | array | Languages spoken |
| `projects` | array | Projects |
| `certifications` | array | Certifications |
| `volunteerExperience` | array | Volunteer experience |
### `enrich_linkedin_profile`
Enrich a LinkedIn profile URL with detailed information including positions, education, and social metrics.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `url` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/williamhgates\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `profileId` | string | LinkedIn profile ID |
| `firstName` | string | First name |
| `lastName` | string | Last name |
| `subTitle` | string | Profile subtitle/headline |
| `profilePicture` | string | Profile picture URL |
| `backgroundImage` | string | Background image URL |
| `industry` | string | Industry |
| `location` | string | Location |
| `followersCount` | number | Number of followers |
| `connectionsCount` | number | Number of connections |
| `premium` | boolean | Whether the account is premium |
| `influencer` | boolean | Whether the account is an influencer |
| `positions` | array | Work positions |
| ↳ `title` | string | Job title |
| ↳ `company` | string | Company name |
| ↳ `companyLogo` | string | Company logo URL |
| ↳ `startDate` | string | Start date |
| ↳ `endDate` | string | End date |
| ↳ `location` | string | Location |
| `education` | array | Education history |
| ↳ `school` | string | School name |
| ↳ `degree` | string | Degree |
| ↳ `fieldOfStudy` | string | Field of study |
| ↳ `startDate` | string | Start date |
| ↳ `endDate` | string | End date |
| `websites` | array | Personal websites |
### `enrich_find_email`
Find a person
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `fullName` | string | Yes | Person's full name \(e.g., John Doe\) |
| `companyDomain` | string | Yes | Company domain \(e.g., example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Found email address |
| `firstName` | string | First name |
| `lastName` | string | Last name |
| `domain` | string | Company domain |
| `found` | boolean | Whether an email was found |
| `acceptAll` | boolean | Whether the domain accepts all emails |
### `enrich_linkedin_to_work_email`
Find a work email address from a LinkedIn profile URL.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., https://www.linkedin.com/in/williamhgates\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Found work email address |
| `found` | boolean | Whether an email was found |
| `status` | string | Request status \(in_progress or completed\) |
### `enrich_linkedin_to_personal_email`
Find personal email address from a LinkedIn profile URL.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/username\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Personal email address |
| `found` | boolean | Whether an email was found |
| `status` | string | Request status |
### `enrich_phone_finder`
Find a phone number from a LinkedIn profile URL.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/williamhgates\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `profileUrl` | string | LinkedIn profile URL |
| `mobileNumber` | string | Found mobile phone number |
| `found` | boolean | Whether a phone number was found |
| `status` | string | Request status \(in_progress or completed\) |
### `enrich_email_to_phone`
Find a phone number associated with an email address.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to look up \(e.g., john.doe@example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Email address looked up |
| `mobileNumber` | string | Found mobile phone number |
| `found` | boolean | Whether a phone number was found |
| `status` | string | Request status \(in_progress or completed\) |
### `enrich_verify_email`
Verify an email address for deliverability, including catch-all detection and provider identification.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to verify \(e.g., john.doe@example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Email address verified |
| `status` | string | Verification status |
| `result` | string | Deliverability result \(deliverable, undeliverable, etc.\) |
| `confidenceScore` | number | Confidence score \(0-100\) |
| `smtpProvider` | string | Email service provider \(e.g., Google, Microsoft\) |
| `mailDisposable` | boolean | Whether the email is from a disposable provider |
| `mailAcceptAll` | boolean | Whether the domain is a catch-all domain |
| `free` | boolean | Whether the email uses a free email service |
### `enrich_disposable_email_check`
Check if an email address is from a disposable or temporary email provider. Returns a score and validation details.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to check \(e.g., john.doe@example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Email address checked |
| `score` | number | Validation score \(0-100\) |
| `testsPassed` | string | Number of tests passed \(e.g., "3/3"\) |
| `passed` | boolean | Whether the email passed all validation tests |
| `reason` | string | Reason for failure if email did not pass |
| `mailServerIp` | string | Mail server IP address |
| `mxRecords` | array | MX records for the domain |
| ↳ `host` | string | MX record host |
| ↳ `pref` | number | MX record preference |
### `enrich_email_to_ip`
Discover an IP address associated with an email address.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to look up \(e.g., john.doe@example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Email address looked up |
| `ip` | string | Associated IP address |
| `found` | boolean | Whether an IP address was found |
### `enrich_ip_to_company`
Identify a company from an IP address with detailed firmographic information.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `ip` | string | Yes | IP address to look up \(e.g., 86.92.60.221\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `name` | string | Company name |
| `legalName` | string | Legal company name |
| `domain` | string | Primary domain |
| `domainAliases` | array | Domain aliases |
| `sector` | string | Business sector |
| `industry` | string | Industry |
| `phone` | string | Phone number |
| `employees` | number | Number of employees |
| `revenue` | string | Estimated revenue |
| `location` | json | Company location |
| ↳ `city` | string | City |
| ↳ `state` | string | State |
| ↳ `country` | string | Country |
| ↳ `timezone` | string | Timezone |
| `linkedInUrl` | string | LinkedIn company URL |
| `twitterUrl` | string | Twitter URL |
| `facebookUrl` | string | Facebook URL |
### `enrich_company_lookup`
Look up comprehensive company information by name or domain including funding, location, and social profiles.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `name` | string | No | Company name \(e.g., Google\) |
| `domain` | string | No | Company domain \(e.g., google.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `name` | string | Company name |
| `universalName` | string | Universal company name |
| `companyId` | string | Company ID |
| `description` | string | Company description |
| `phone` | string | Phone number |
| `linkedInUrl` | string | LinkedIn company URL |
| `websiteUrl` | string | Company website |
| `followers` | number | Number of LinkedIn followers |
| `staffCount` | number | Number of employees |
| `foundedDate` | string | Date founded |
| `type` | string | Company type |
| `industries` | array | Industries |
| `specialties` | array | Company specialties |
| `headquarters` | json | Headquarters location |
| ↳ `city` | string | City |
| ↳ `country` | string | Country |
| ↳ `postalCode` | string | Postal code |
| ↳ `line1` | string | Address line 1 |
| `logo` | string | Company logo URL |
| `coverImage` | string | Cover image URL |
| `fundingRounds` | array | Funding history |
| ↳ `roundType` | string | Funding round type |
| ↳ `amount` | number | Amount raised |
| ↳ `currency` | string | Currency |
| ↳ `investors` | array | Investors |
### `enrich_company_funding`
Retrieve company funding history, traffic metrics, and executive information by domain.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `domain` | string | Yes | Company domain \(e.g., example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `legalName` | string | Legal company name |
| `employeeCount` | number | Number of employees |
| `headquarters` | string | Headquarters location |
| `industry` | string | Industry |
| `totalFundingRaised` | number | Total funding raised |
| `fundingRounds` | array | Funding rounds |
| ↳ `roundType` | string | Round type |
| ↳ `amount` | number | Amount raised |
| ↳ `date` | string | Date |
| ↳ `investors` | array | Investors |
| `monthlyVisits` | number | Monthly website visits |
| `trafficChange` | number | Traffic change percentage |
| `itSpending` | number | Estimated IT spending in USD |
| `executives` | array | Executive team |
| ↳ `name` | string | Name |
| ↳ `title` | string | Title |
### `enrich_company_revenue`
Retrieve company revenue data, CEO information, and competitive analysis by domain.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `domain` | string | Yes | Company domain \(e.g., clay.io\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `companyName` | string | Company name |
| `shortDescription` | string | Short company description |
| `fullSummary` | string | Full company summary |
| `revenue` | string | Company revenue |
| `revenueMin` | number | Minimum revenue estimate |
| `revenueMax` | number | Maximum revenue estimate |
| `employeeCount` | number | Number of employees |
| `founded` | string | Year founded |
| `ownership` | string | Ownership type |
| `status` | string | Company status \(e.g., Active\) |
| `website` | string | Company website URL |
| `ceo` | json | CEO information |
| ↳ `name` | string | CEO name |
| ↳ `designation` | string | CEO designation/title |
| ↳ `rating` | number | CEO rating |
| `socialLinks` | json | Social media links |
| ↳ `linkedIn` | string | LinkedIn URL |
| ↳ `twitter` | string | Twitter URL |
| ↳ `facebook` | string | Facebook URL |
| `totalFunding` | string | Total funding raised |
| `fundingRounds` | number | Number of funding rounds |
| `competitors` | array | Competitors |
| ↳ `name` | string | Competitor name |
| ↳ `revenue` | string | Revenue |
| ↳ `employeeCount` | number | Employee count |
| ↳ `headquarters` | string | Headquarters |
### `enrich_search_people`
Search for professionals by various criteria including name, title, skills, education, and company.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `firstName` | string | No | First name |
| `lastName` | string | No | Last name |
| `summary` | string | No | Professional summary keywords |
| `subTitle` | string | No | Job title/subtitle |
| `locationCountry` | string | No | Country |
| `locationCity` | string | No | City |
| `locationState` | string | No | State/province |
| `influencer` | boolean | No | Filter for influencers only |
| `premium` | boolean | No | Filter for premium accounts only |
| `language` | string | No | Primary language |
| `industry` | string | No | Industry |
| `currentJobTitles` | json | No | Current job titles \(array\) |
| `pastJobTitles` | json | No | Past job titles \(array\) |
| `skills` | json | No | Skills to search for \(array\) |
| `schoolNames` | json | No | School names \(array\) |
| `certifications` | json | No | Certifications to filter by \(array\) |
| `degreeNames` | json | No | Degree names to filter by \(array\) |
| `studyFields` | json | No | Fields of study to filter by \(array\) |
| `currentCompanies` | json | No | Current company IDs to filter by \(array of numbers\) |
| `pastCompanies` | json | No | Past company IDs to filter by \(array of numbers\) |
| `currentPage` | number | No | Page number \(default: 1\) |
| `pageSize` | number | No | Results per page \(default: 20\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `currentPage` | number | Current page number |
| `totalPage` | number | Total number of pages |
| `pageSize` | number | Results per page |
| `profiles` | array | Search results |
| ↳ `profileIdentifier` | string | Profile ID |
| ↳ `givenName` | string | First name |
| ↳ `familyName` | string | Last name |
| ↳ `currentPosition` | string | Current job title |
| ↳ `profileImage` | string | Profile image URL |
| ↳ `externalProfileUrl` | string | LinkedIn URL |
| ↳ `city` | string | City |
| ↳ `country` | string | Country |
| ↳ `expertSkills` | array | Skills |
### `enrich_search_company`
Search for companies by various criteria including name, industry, location, and size.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `name` | string | No | Company name |
| `website` | string | No | Company website URL |
| `tagline` | string | No | Company tagline |
| `type` | string | No | Company type \(e.g., Private, Public\) |
| `description` | string | No | Company description keywords |
| `industries` | json | No | Industries to filter by \(array\) |
| `locationCountry` | string | No | Country |
| `locationCity` | string | No | City |
| `postalCode` | string | No | Postal code |
| `locationCountryList` | json | No | Multiple countries to filter by \(array\) |
| `locationCityList` | json | No | Multiple cities to filter by \(array\) |
| `specialities` | json | No | Company specialties \(array\) |
| `followers` | number | No | Minimum number of followers |
| `staffCount` | number | No | Maximum staff count |
| `staffCountMin` | number | No | Minimum staff count |
| `staffCountMax` | number | No | Maximum staff count |
| `currentPage` | number | No | Page number \(default: 1\) |
| `pageSize` | number | No | Results per page \(default: 20\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `currentPage` | number | Current page number |
| `totalPage` | number | Total number of pages |
| `pageSize` | number | Results per page |
| `companies` | array | Search results |
| ↳ `companyName` | string | Company name |
| ↳ `tagline` | string | Company tagline |
| ↳ `webAddress` | string | Website URL |
| ↳ `industries` | array | Industries |
| ↳ `teamSize` | number | Team size |
| ↳ `linkedInProfile` | string | LinkedIn URL |
### `enrich_search_company_employees`
Search for employees within specific companies by location and job title.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `companyIds` | json | No | Array of company IDs to search within |
| `country` | string | No | Country filter \(e.g., United States\) |
| `city` | string | No | City filter \(e.g., San Francisco\) |
| `state` | string | No | State filter \(e.g., California\) |
| `jobTitles` | json | No | Job titles to filter by \(array\) |
| `page` | number | No | Page number \(default: 1\) |
| `pageSize` | number | No | Results per page \(default: 10\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `currentPage` | number | Current page number |
| `totalPage` | number | Total number of pages |
| `pageSize` | number | Number of results per page |
| `profiles` | array | Employee profiles |
| ↳ `profileIdentifier` | string | Profile ID |
| ↳ `givenName` | string | First name |
| ↳ `familyName` | string | Last name |
| ↳ `currentPosition` | string | Current job title |
| ↳ `profileImage` | string | Profile image URL |
| ↳ `externalProfileUrl` | string | LinkedIn URL |
| ↳ `city` | string | City |
| ↳ `country` | string | Country |
| ↳ `expertSkills` | array | Skills |
### `enrich_search_similar_companies`
Find companies similar to a given company by LinkedIn URL with filters for location and size.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `url` | string | Yes | LinkedIn company URL \(e.g., linkedin.com/company/google\) |
| `accountLocation` | json | No | Filter by locations \(array of country names\) |
| `employeeSizeType` | string | No | Employee size filter type \(e.g., RANGE\) |
| `employeeSizeRange` | json | No | Employee size ranges \(array of \{start, end\} objects\) |
| `page` | number | No | Page number \(default: 1\) |
| `num` | number | No | Number of results per page |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `companies` | array | Similar companies |
| ↳ `url` | string | LinkedIn URL |
| ↳ `name` | string | Company name |
| ↳ `universalName` | string | Universal name |
| ↳ `type` | string | Company type |
| ↳ `description` | string | Description |
| ↳ `phone` | string | Phone number |
| ↳ `website` | string | Website URL |
| ↳ `logo` | string | Logo URL |
| ↳ `foundedYear` | number | Year founded |
| ↳ `staffTotal` | number | Total staff |
| ↳ `industries` | array | Industries |
| ↳ `relevancyScore` | number | Relevancy score |
| ↳ `relevancyValue` | string | Relevancy value |
### `enrich_sales_pointer_people`
Advanced people search with complex filters for location, company size, seniority, experience, and more.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `page` | number | Yes | Page number \(starts at 1\) |
| `filters` | json | Yes | Array of filter objects. Each filter has type \(e.g., POSTAL_CODE, COMPANY_HEADCOUNT\), values \(array with id, text, selectionType: INCLUDED/EXCLUDED\), and optional selectedSubFilter |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `data` | array | People results |
| ↳ `name` | string | Full name |
| ↳ `summary` | string | Professional summary |
| ↳ `location` | string | Location |
| ↳ `profilePicture` | string | Profile picture URL |
| ↳ `linkedInUrn` | string | LinkedIn URN |
| ↳ `positions` | array | Work positions |
| ↳ `education` | array | Education |
| `pagination` | json | Pagination info |
| ↳ `totalCount` | number | Total results |
| ↳ `returnedCount` | number | Returned count |
| ↳ `start` | number | Start position |
| ↳ `limit` | number | Limit |
### `enrich_search_posts`
Search LinkedIn posts by keywords with date filtering.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `keywords` | string | Yes | Search keywords \(e.g., "AI automation"\) |
| `datePosted` | string | No | Time filter \(e.g., past_week, past_month\) |
| `page` | number | No | Page number \(default: 1\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `count` | number | Total number of results |
| `posts` | array | Search results |
| ↳ `url` | string | Post URL |
| ↳ `postId` | string | Post ID |
| ↳ `author` | object | Author information |
| ↳ `name` | string | Author name |
| ↳ `headline` | string | Author headline |
| ↳ `linkedInUrl` | string | Author LinkedIn URL |
| ↳ `profileImage` | string | Author profile image |
| ↳ `timestamp` | string | Post timestamp |
| ↳ `textContent` | string | Post text content |
| ↳ `hashtags` | array | Hashtags |
| ↳ `mediaUrls` | array | Media URLs |
| ↳ `reactions` | number | Number of reactions |
| ↳ `commentsCount` | number | Number of comments |
### `enrich_get_post_details`
Get detailed information about a LinkedIn post by URL.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `url` | string | Yes | LinkedIn post URL |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `postId` | string | Post ID |
| `author` | json | Author information |
| ↳ `name` | string | Author name |
| ↳ `headline` | string | Author headline |
| ↳ `linkedInUrl` | string | Author LinkedIn URL |
| ↳ `profileImage` | string | Author profile image |
| `timestamp` | string | Post timestamp |
| `textContent` | string | Post text content |
| `hashtags` | array | Hashtags |
| `mediaUrls` | array | Media URLs |
| `reactions` | number | Number of reactions |
| `commentsCount` | number | Number of comments |
### `enrich_search_post_reactions`
Get reactions on a LinkedIn post with filtering by reaction type.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `postUrn` | string | Yes | LinkedIn activity URN \(e.g., urn:li:activity:7231931952839196672\) |
| `reactionType` | string | Yes | Reaction type filter: all, like, love, celebrate, insightful, or funny \(default: all\) |
| `page` | number | Yes | Page number \(starts at 1\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `page` | number | Current page number |
| `totalPage` | number | Total number of pages |
| `count` | number | Number of reactions returned |
| `reactions` | array | Reactions |
| ↳ `reactionType` | string | Type of reaction |
| ↳ `reactor` | object | Person who reacted |
| ↳ `name` | string | Name |
| ↳ `subTitle` | string | Job title |
| ↳ `profileId` | string | Profile ID |
| ↳ `profilePicture` | string | Profile picture URL |
| ↳ `linkedInUrl` | string | LinkedIn URL |
### `enrich_search_post_comments`
Get comments on a LinkedIn post.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `postUrn` | string | Yes | LinkedIn activity URN \(e.g., urn:li:activity:7191163324208705536\) |
| `page` | number | No | Page number \(starts at 1, default: 1\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `page` | number | Current page number |
| `totalPage` | number | Total number of pages |
| `count` | number | Number of comments returned |
| `comments` | array | Comments |
| ↳ `activityId` | string | Comment activity ID |
| ↳ `commentary` | string | Comment text |
| ↳ `linkedInUrl` | string | Link to comment |
| ↳ `commenter` | object | Commenter info |
| ↳ `profileId` | string | Profile ID |
| ↳ `firstName` | string | First name |
| ↳ `lastName` | string | Last name |
| ↳ `subTitle` | string | Subtitle/headline |
| ↳ `profilePicture` | string | Profile picture URL |
| ↳ `backgroundImage` | string | Background image URL |
| ↳ `entityUrn` | string | Entity URN |
| ↳ `objectUrn` | string | Object URN |
| ↳ `profileType` | string | Profile type |
| ↳ `reactionBreakdown` | object | Reactions on the comment |
| ↳ `likes` | number | Number of likes |
| ↳ `empathy` | number | Number of empathy reactions |
| ↳ `other` | number | Number of other reactions |
### `enrich_search_people_activities`
Get a person
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `profileId` | string | Yes | LinkedIn profile ID |
| `activityType` | string | Yes | Activity type: posts, comments, or articles |
| `paginationToken` | string | No | Pagination token for next page of results |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `paginationToken` | string | Token for fetching next page |
| `activityType` | string | Type of activities returned |
| `activities` | array | Activities |
| ↳ `activityId` | string | Activity ID |
| ↳ `commentary` | string | Activity text content |
| ↳ `linkedInUrl` | string | Link to activity |
| ↳ `timeElapsed` | string | Time elapsed since activity |
| ↳ `numReactions` | number | Total number of reactions |
| ↳ `author` | object | Activity author info |
| ↳ `name` | string | Author name |
| ↳ `profileId` | string | Profile ID |
| ↳ `profilePicture` | string | Profile picture URL |
| ↳ `reactionBreakdown` | object | Reactions |
| ↳ `likes` | number | Likes |
| ↳ `empathy` | number | Empathy reactions |
| ↳ `other` | number | Other reactions |
| ↳ `attachments` | array | Attachment URLs |
### `enrich_search_company_activities`
Get a company
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `companyId` | string | Yes | LinkedIn company ID |
| `activityType` | string | Yes | Activity type: posts, comments, or articles |
| `paginationToken` | string | No | Pagination token for next page of results |
| `offset` | number | No | Number of records to skip \(default: 0\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `paginationToken` | string | Token for fetching next page |
| `activityType` | string | Type of activities returned |
| `activities` | array | Activities |
| ↳ `activityId` | string | Activity ID |
| ↳ `commentary` | string | Activity text content |
| ↳ `linkedInUrl` | string | Link to activity |
| ↳ `timeElapsed` | string | Time elapsed since activity |
| ↳ `numReactions` | number | Total number of reactions |
| ↳ `author` | object | Activity author info |
| ↳ `name` | string | Author name |
| ↳ `profileId` | string | Profile ID |
| ↳ `profilePicture` | string | Profile picture URL |
| ↳ `reactionBreakdown` | object | Reactions |
| ↳ `likes` | number | Likes |
| ↳ `empathy` | number | Empathy reactions |
| ↳ `other` | number | Other reactions |
| ↳ `attachments` | array | Attachments |
### `enrich_reverse_hash_lookup`
Convert an MD5 email hash back to the original email address and display name.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `hash` | string | Yes | MD5 hash value to look up |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `hash` | string | MD5 hash that was looked up |
| `email` | string | Original email address |
| `displayName` | string | Display name associated with the email |
| `found` | boolean | Whether an email was found for the hash |
### `enrich_search_logo`
Get a company logo image URL by domain.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `url` | string | Yes | Company domain \(e.g., google.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `logoUrl` | string | URL to fetch the company logo |
| `domain` | string | Domain that was looked up |

View File

@@ -10,6 +10,23 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#181C1E" color="#181C1E"
/> />
{/* MANUAL-CONTENT-START:intro */}
[GitHub](https://github.com/) is the worlds leading platform for hosting, collaborating on, and managing source code. GitHub offers powerful tools for version control, code review, branching strategies, and team collaboration within the rich Git ecosystem, underpinning both open source and enterprise development worldwide.
The GitHub integration in Sim allows your agents to seamlessly automate, interact with, and orchestrate workflows across your repositories. Using this integration, agents can perform an extended set of code and collaboration operations, enabling:
- **Fetch pull request details:** Retrieve a full overview of any pull request, including file diffs, branch information, metadata, approvals, and a summary of changes, for automation or review workflows.
- **Create pull request comments:** Automatically generate or post comments on PRs—such as reviews, suggestions, or status updates—enabling speedy feedback, documentation, or policy enforcement.
- **Get repository information:** Access comprehensive repository metadata, including descriptions, visibility, topics, default branches, and contributors. This supports intelligent project analysis, dynamic workflow routing, and organizational reporting.
- **Fetch the latest commit:** Quickly obtain details from the newest commit on any branch, including hashes, messages, authors, and timestamps. This is useful for monitoring development velocity, triggering downstream actions, or enforcing quality checks.
- **Trigger workflows from GitHub events:** Set up Sim workflows to start automatically from key GitHub events, including pull request creation, review comments, or when new commits are pushed, through easy webhook integration. Automate actions such as deployments, notifications, compliance checks, or documentation updates in real time.
- **Monitor and manage repository activity:** Programmatically track contributions, manage PR review states, analyze branch histories, and audit code changes. Empower agents to enforce requirements, coordinate releases, and respond dynamically to development patterns.
- **Support for advanced automations:** Combine these operations—for example, fetch PR data, leave context-aware comments, and kick off multi-step Sim workflows on code pushes or PR merges—to automate your teams engineering processes from end to end.
By leveraging all of these capabilities, the Sim GitHub integration enables agents to engage deeply in the development lifecycle. Automate code reviews, streamline team feedback, synchronize project artifacts, accelerate CI/CD, and enforce best practices with ease. Bring security, speed, and reliability to your workflows—directly within your Sim-powered automation environment, with full integration into your organizations GitHub strategy.
{/* MANUAL-CONTENT-END */}
## Usage Instructions ## Usage Instructions
Integrate Github into the workflow. Can get get PR details, create PR comment, get repository info, and get latest commit. Can be used in trigger mode to trigger a workflow when a PR is created, commented on, or a commit is pushed. Integrate Github into the workflow. Can get get PR details, create PR comment, get repository info, and get latest commit. Can be used in trigger mode to trigger a workflow when a PR is created, commented on, or a commit is pushed.

View File

@@ -11,55 +11,17 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
/> />
{/* MANUAL-CONTENT-START:intro */} {/* MANUAL-CONTENT-START:intro */}
[Google Docs](https://docs.google.com) is a powerful cloud-based document creation and editing service that allows users to create, edit, and collaborate on documents in real-time. As part of Google's productivity suite, Google Docs offers a versatile platform for text documents with robust formatting, commenting, and sharing capabilities. [Google Docs](https://docs.google.com) is Googles collaborative, cloud-based document service, enabling users to create, edit, and share documents in real time. As an integral part of Google Workspace, Docs offers rich formatting tools, commenting, version history, and seamless integration with other Google productivity tools.
Learn how to integrate the Google Docs "Read" tool in Sim to effortlessly fetch data from your docs and to integrate into your workflows. This tutorial walks you through connecting Google Docs, setting up data reads, and using that information to automate processes in real-time. Perfect for syncing live data with your agents. Google Docs empowers individuals and teams to:
<iframe - **Create and format documents:** Develop rich text documents with advanced formatting, images, and tables.
width="100%" - **Collaborate and comment:** Multiple users can edit and comment with suggestions instantly.
height="400" - **Track changes and version history:** Review, revert, and manage revisions over time.
src="https://www.youtube.com/embed/f41gy9rBHhE" - **Access from any device:** Work on documents from web, mobile, or desktop with full cloud synchronization.
title="Use the Google Docs Read tool in Sim" - **Integrate across Google services:** Connect Docs with Drive, Sheets, Slides, and external platforms for powerful workflows.
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
Learn how to integrate the Google Docs "Update" tool in Sim to effortlessly add content in your docs through your workflows. This tutorial walks you through connecting Google Docs, configuring data writes, and using that information to automate document updates seamlessly. Perfect for maintaining dynamic, real-time documentation with minimal effort. In Sim, the Google Docs integration allows your agents to read document content, write new content, and create documents programmatically as part of automated workflows. This integration unlocks automation such as document generation, report writing, content extraction, and collaborative editing—bridging the gap between AI-driven workflows and document management in your organization.
<iframe
width="100%"
height="400"
src="https://www.youtube.com/embed/L64ROHS2ivA"
title="Use the Google Docs Update tool in Sim"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
Learn how to integrate the Google Docs "Create" tool in Sim to effortlessly generate new documents through your workflows. This tutorial walks you through connecting Google Docs, setting up document creation, and using workflow data to populate content automatically. Perfect for streamlining document generation and enhancing productivity.
<iframe
width="100%"
height="400"
src="https://www.youtube.com/embed/lWpHH4qddWk"
title="Use the Google Docs Create tool in Sim"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
With Google Docs, you can:
- **Create and edit documents**: Develop text documents with comprehensive formatting options
- **Collaborate in real-time**: Work simultaneously with multiple users on the same document
- **Track changes**: View revision history and restore previous versions
- **Comment and suggest**: Provide feedback and propose edits without changing the original content
- **Access anywhere**: Use Google Docs across devices with automatic cloud synchronization
- **Work offline**: Continue working without internet connection with changes syncing when back online
- **Integrate with other services**: Connect with Google Drive, Sheets, Slides, and third-party applications
In Sim, the Google Docs integration enables your agents to interact directly with document content programmatically. This allows for powerful automation scenarios such as document creation, content extraction, collaborative editing, and document management. Your agents can read existing documents to extract information, write to documents to update content, and create new documents from scratch. This integration bridges the gap between your AI workflows and document management, enabling seamless interaction with one of the world's most widely used document platforms. By connecting Sim with Google Docs, you can automate document workflows, generate reports, extract insights from documents, and maintain documentation - all through your intelligent agents.
{/* MANUAL-CONTENT-END */} {/* MANUAL-CONTENT-END */}

View File

@@ -11,30 +11,18 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
/> />
{/* MANUAL-CONTENT-START:intro */} {/* MANUAL-CONTENT-START:intro */}
[Google Drive](https://drive.google.com) is Google's cloud storage and file synchronization service that allows users to store files, synchronize files across devices, and share files with others. As a core component of Google's productivity ecosystem, Google Drive offers robust storage, organization, and collaboration capabilities. [Google Drive](https://drive.google.com) is Googles cloud-based file storage and synchronization service, making it easy to store, manage, share, and access files securely across devices and platforms. As a core element of Google Workspace, Google Drive offers robust tools for file organization, collaboration, and seamless integration with the broader productivity suite.
Learn how to integrate the Google Drive tool in Sim to effortlessly pull information from your Drive through your workflows. This tutorial walks you through connecting Google Drive, setting up data retrieval, and using stored documents and files to enhance automation. Perfect for syncing important data with your agents in real-time. Google Drive enables individuals and teams to:
<iframe - **Store files in the cloud:** Access documents, images, videos, and more from anywhere with internet connectivity.
width="100%" - **Organize and manage content:** Create and arrange folders, use naming conventions, and leverage search for fast retrieval.
height="400" - **Share and collaborate:** Control file and folder permissions, share with individuals or groups, and collaborate in real time.
src="https://www.youtube.com/embed/cRoRr4b-EAs" - **Leverage powerful search:** Quickly locate files using Googles search technology.
title="Use the Google Drive tool in Sim" - **Access across devices:** Work with your files on desktop, mobile, or web with full synchronization.
frameBorder="0" - **Integrate deeply across Google services:** Connect with Google Docs, Sheets, Slides, and partner applications in your workflows.
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
With Google Drive, you can: In Sim, the Google Drive integration allows your agents to read, upload, download, list, and organize your Drive files programmatically. Agents can automate file management, streamline content workflows, and enable no-code automation around document storage and retrieval. By connecting Sim with Google Drive, you empower your agents to incorporate cloud file operations directly into intelligent business processes.
- **Store files in the cloud**: Upload and access your files from anywhere with internet access
- **Organize content**: Create folders, use color coding, and implement naming conventions
- **Share and collaborate**: Control access permissions and work simultaneously on files
- **Search efficiently**: Find files quickly with Google's powerful search technology
- **Access across devices**: Use Google Drive on desktop, mobile, and web platforms
- **Integrate with other services**: Connect with Google Docs, Sheets, Slides, and third-party applications
In Sim, the Google Drive integration enables your agents to interact directly with your cloud storage programmatically. This allows for powerful automation scenarios such as file management, content organization, and document workflows. Your agents can upload new files to specific folders, download existing files to process their contents, and list folder contents to navigate your storage structure. This integration bridges the gap between your AI workflows and your document management system, enabling seamless file operations without manual intervention. By connecting Sim with Google Drive, you can automate file-based workflows, manage documents intelligently, and incorporate cloud storage operations into your agent's capabilities.
{/* MANUAL-CONTENT-END */} {/* MANUAL-CONTENT-END */}

View File

@@ -11,29 +11,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
/> />
{/* MANUAL-CONTENT-START:intro */} {/* MANUAL-CONTENT-START:intro */}
[Google Search](https://www.google.com) is the world's most widely used search engine, providing access to billions of web pages and information sources. Google Search uses sophisticated algorithms to deliver relevant search results based on user queries, making it an essential tool for finding information on the internet. [Google Search](https://www.google.com) is the world's most widely used web search engine, making it easy to find information, discover new content, and answer questions in real time. With advanced search algorithms, Google Search helps you quickly locate web pages, images, news, and more using simple or complex queries.
Learn how to integrate the Google Search tool in Sim to effortlessly fetch real-time search results through your workflows. This tutorial walks you through connecting Google Search, configuring search queries, and using live data to enhance automation. Perfect for powering your agents with up-to-date information and smarter decision-making. In Sim, the Google Search integration allows your agents to search the web and retrieve live information as part of automated workflows. This enables powerful use cases such as automated research, fact-checking, knowledge synthesis, and dynamic content discovery. By connecting Sim with Google Search, your agents can perform queries, process and analyze web results, and incorporate the latest information into their decisions—without manual effort. Enhance your workflows with always up-to-date knowledge from across the internet.
<iframe
width="100%"
height="400"
src="https://www.youtube.com/embed/1B7hV9b5UMQ"
title="Use the Google Search tool in Sim"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
With Google Search, you can:
- **Find relevant information**: Access billions of web pages with Google's powerful search algorithms
- **Get specific results**: Use search operators to refine and target your queries
- **Discover diverse content**: Find text, images, videos, news, and other content types
- **Access knowledge graphs**: Get structured information about people, places, and things
- **Utilize search features**: Take advantage of specialized search tools like calculators, unit converters, and more
In Sim, the Google Search integration enables your agents to search the web programmatically and incorporate search results into their workflows. This allows for powerful automation scenarios such as research, fact-checking, data gathering, and information synthesis. Your agents can formulate search queries, retrieve relevant results, and extract information from those results to make decisions or generate insights. This integration bridges the gap between your AI workflows and the vast information available on the web, enabling your agents to access up-to-date information from across the internet. By connecting Sim with Google Search, you can create agents that stay informed with the latest information, verify facts, conduct research, and provide users with relevant web content - all without leaving your workflow.
{/* MANUAL-CONTENT-END */} {/* MANUAL-CONTENT-END */}

View File

@@ -10,6 +10,20 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#F64F9E" color="#F64F9E"
/> />
{/* MANUAL-CONTENT-START:intro */}
The Memory tool enables your agents to store, retrieve, and manage conversation memories across workflows. It acts as a persistent memory store that agents can access to maintain conversation context, recall facts, or track actions over time.
With the Memory tool, you can:
- **Add new memories**: Store relevant information, events, or conversation history by saving agent or user messages into a structured memory database
- **Retrieve memories**: Fetch specific memories or all memories tied to a conversation, helping agents recall previous interactions or facts
- **Delete memories**: Remove outdated or incorrect memories from the database to maintain accurate context
- **Append to existing conversations**: Update or expand on existing memory threads by appending new messages with the same conversation identifier
Sims Memory block is especially useful for building agents that require persistent state—helping them remember what was said earlier in a conversation, persist facts between tasks, or apply long-term history in decision-making. By integrating Memory, you enable richer, more contextual, and more dynamic workflows for your agents.
{/* MANUAL-CONTENT-END */}
## Usage Instructions ## Usage Instructions
Integrate Memory into the workflow. Can add, get a memory, get all memories, and delete memories. Integrate Memory into the workflow. Can add, get a memory, get all memories, and delete memories.

View File

@@ -24,6 +24,7 @@
"dynamodb", "dynamodb",
"elasticsearch", "elasticsearch",
"elevenlabs", "elevenlabs",
"enrich",
"exa", "exa",
"file", "file",
"firecrawl", "firecrawl",

View File

@@ -10,6 +10,21 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#181C1E" color="#181C1E"
/> />
{/* MANUAL-CONTENT-START:intro */}
The Notion tool integration enables your agents to read, create, and manage Notion pages and databases directly within your workflows. This allows you to automate the retrieval and updating of structured content, notes, documents, and more from your Notion workspace.
With the Notion tool, you can:
- **Read pages or databases**: Extract rich content or metadata from specified Notion pages or entire databases
- **Create new content**: Programmatically create new pages or databases for dynamic content generation
- **Append content**: Add new blocks or properties to existing pages and databases
- **Query databases**: Run advanced filters and searches on structured Notion data for custom workflows
- **Search your workspace**: Locate pages and databases across your Notion workspace automatically
This tool is ideal for scenarios where agents need to synchronize information, generate reports, or maintain structured notes within Notion. By bringing Notion's capabilities into automated workflows, you empower your agents to interface with knowledge, documentation, and project management data programmatically and seamlessly.
{/* MANUAL-CONTENT-END */}
## Usage Instructions ## Usage Instructions
Integrate with Notion into the workflow. Can read page, read database, create page, create database, append content, query database, and search workspace. Integrate with Notion into the workflow. Can read page, read database, create page, create database, append content, query database, and search workspace.

View File

@@ -11,7 +11,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
/> />
{/* MANUAL-CONTENT-START:intro */} {/* MANUAL-CONTENT-START:intro */}
The [Pulse](https://www.runpulse.com) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow. The [Pulse](https://www.pulseapi.com/) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
With Pulse, you can: With Pulse, you can:

View File

@@ -13,16 +13,6 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
{/* MANUAL-CONTENT-START:intro */} {/* MANUAL-CONTENT-START:intro */}
[Slack](https://www.slack.com/) is a business communication platform that offers teams a unified place for messaging, tools, and files. [Slack](https://www.slack.com/) is a business communication platform that offers teams a unified place for messaging, tools, and files.
<iframe
width="100%"
height="400"
src="https://www.youtube.com/embed/J5jz3UaWmE8"
title="Slack Integration with Sim"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
With Slack, you can: With Slack, you can:
- **Automate agent notifications**: Send real-time updates from your Sim agents to any Slack channel - **Automate agent notifications**: Send real-time updates from your Sim agents to any Slack channel

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

View File

@@ -8,6 +8,7 @@ import { verifyCronAuth } from '@/lib/auth/internal'
const logger = createLogger('CleanupStaleExecutions') const logger = createLogger('CleanupStaleExecutions')
const STALE_THRESHOLD_MINUTES = 30 const STALE_THRESHOLD_MINUTES = 30
const MAX_INT32 = 2_147_483_647
export async function GET(request: NextRequest) { export async function GET(request: NextRequest) {
try { try {
@@ -45,13 +46,14 @@ export async function GET(request: NextRequest) {
try { try {
const staleDurationMs = Date.now() - new Date(execution.startedAt).getTime() const staleDurationMs = Date.now() - new Date(execution.startedAt).getTime()
const staleDurationMinutes = Math.round(staleDurationMs / 60000) const staleDurationMinutes = Math.round(staleDurationMs / 60000)
const totalDurationMs = Math.min(staleDurationMs, MAX_INT32)
await db await db
.update(workflowExecutionLogs) .update(workflowExecutionLogs)
.set({ .set({
status: 'failed', status: 'failed',
endedAt: new Date(), endedAt: new Date(),
totalDurationMs: staleDurationMs, totalDurationMs,
executionData: sql`jsonb_set( executionData: sql`jsonb_set(
COALESCE(execution_data, '{}'::jsonb), COALESCE(execution_data, '{}'::jsonb),
ARRAY['error'], ARRAY['error'],

View File

@@ -21,6 +21,7 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateInternalToken } from '@/lib/auth/internal' import { generateInternalToken } from '@/lib/auth/internal'
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
const logger = createLogger('WorkflowMcpServeAPI') const logger = createLogger('WorkflowMcpServeAPI')
@@ -264,7 +265,7 @@ async function handleToolsCall(
method: 'POST', method: 'POST',
headers, headers,
body: JSON.stringify({ input: params.arguments || {}, triggerType: 'mcp' }), body: JSON.stringify({ input: params.arguments || {}, triggerType: 'mcp' }),
signal: AbortSignal.timeout(600000), // 10 minute timeout signal: AbortSignal.timeout(getMaxExecutionTimeout()),
}) })
const executeResult = await response.json() const executeResult = await response.json()
@@ -284,7 +285,7 @@ async function handleToolsCall(
content: [ content: [
{ type: 'text', text: JSON.stringify(executeResult.output || executeResult, null, 2) }, { type: 'text', text: JSON.stringify(executeResult.output || executeResult, null, 2) },
], ],
isError: !executeResult.success, isError: executeResult.success === false,
} }
return NextResponse.json(createResponse(id, result)) return NextResponse.json(createResponse(id, result))

View File

@@ -1,5 +1,8 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import type { NextRequest } from 'next/server' import type { NextRequest } from 'next/server'
import { getHighestPrioritySubscription } from '@/lib/billing/core/plan'
import { getExecutionTimeout } from '@/lib/core/execution-limits'
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpService } from '@/lib/mcp/service' import { mcpService } from '@/lib/mcp/service'
import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types' import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types'
@@ -7,7 +10,6 @@ import {
categorizeError, categorizeError,
createMcpErrorResponse, createMcpErrorResponse,
createMcpSuccessResponse, createMcpSuccessResponse,
MCP_CONSTANTS,
validateStringParam, validateStringParam,
} from '@/lib/mcp/utils' } from '@/lib/mcp/utils'
@@ -171,13 +173,16 @@ export const POST = withMcpAuth('read')(
arguments: args, arguments: args,
} }
const userSubscription = await getHighestPrioritySubscription(userId)
const executionTimeout = getExecutionTimeout(
userSubscription?.plan as SubscriptionPlan | undefined,
'sync'
)
const result = await Promise.race([ const result = await Promise.race([
mcpService.executeTool(userId, serverId, toolCall, workspaceId), mcpService.executeTool(userId, serverId, toolCall, workspaceId),
new Promise<never>((_, reject) => new Promise<never>((_, reject) =>
setTimeout( setTimeout(() => reject(new Error('Tool execution timeout')), executionTimeout)
() => reject(new Error('Tool execution timeout')),
MCP_CONSTANTS.EXECUTION_TIMEOUT
)
), ),
]) ])

View File

@@ -20,6 +20,7 @@ import { z } from 'zod'
import { getEmailSubject, renderInvitationEmail } from '@/components/emails' import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
import { getSession } from '@/lib/auth' import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing' import { hasAccessControlAccess } from '@/lib/billing'
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
import { requireStripeClient } from '@/lib/billing/stripe-client' import { requireStripeClient } from '@/lib/billing/stripe-client'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer' import { sendEmail } from '@/lib/messaging/email/mailer'
@@ -501,6 +502,18 @@ export async function PUT(
} }
} }
if (status === 'accepted') {
try {
await syncUsageLimitsFromSubscription(session.user.id)
} catch (syncError) {
logger.error('Failed to sync usage limits after joining org', {
userId: session.user.id,
organizationId,
error: syncError,
})
}
}
logger.info(`Organization invitation ${status}`, { logger.info(`Organization invitation ${status}`, {
organizationId, organizationId,
invitationId, invitationId,

View File

@@ -5,6 +5,7 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod' import { z } from 'zod'
import { getSession } from '@/lib/auth' import { getSession } from '@/lib/auth'
import { hasActiveSubscription } from '@/lib/billing'
const logger = createLogger('SubscriptionTransferAPI') const logger = createLogger('SubscriptionTransferAPI')
@@ -88,6 +89,14 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
) )
} }
// Check if org already has an active subscription (prevent duplicates)
if (await hasActiveSubscription(organizationId)) {
return NextResponse.json(
{ error: 'Organization already has an active subscription' },
{ status: 409 }
)
}
await db await db
.update(subscription) .update(subscription)
.set({ referenceId: organizationId }) .set({ referenceId: organizationId })

View File

@@ -203,6 +203,10 @@ export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) =
} }
updateData.billingBlocked = body.billingBlocked updateData.billingBlocked = body.billingBlocked
// Clear the reason when unblocking
if (body.billingBlocked === false) {
updateData.billingBlockedReason = null
}
updated.push('billingBlocked') updated.push('billingBlocked')
} }

View File

@@ -1,13 +1,13 @@
import { db, workflow as workflowTable } from '@sim/db'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server' import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid' import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { getTimeoutErrorMessage, isTimeoutError } from '@/lib/core/execution-limits'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse' import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { markExecutionCancelled } from '@/lib/execution/cancellation' import { markExecutionCancelled } from '@/lib/execution/cancellation'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session' import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events' import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
@@ -75,12 +75,31 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const { startBlockId, sourceSnapshot, input } = validation.data const { startBlockId, sourceSnapshot, input } = validation.data
const executionId = uuidv4() const executionId = uuidv4()
const [workflowRecord] = await db // Run preprocessing checks (billing, rate limits, usage limits)
.select({ workspaceId: workflowTable.workspaceId, userId: workflowTable.userId }) const preprocessResult = await preprocessExecution({
.from(workflowTable) workflowId,
.where(eq(workflowTable.id, workflowId)) userId,
.limit(1) triggerType: 'manual',
executionId,
requestId,
checkRateLimit: false, // Manual executions don't rate limit
checkDeployment: false, // Run-from-block doesn't require deployment
})
if (!preprocessResult.success) {
const { error } = preprocessResult
logger.warn(`[${requestId}] Preprocessing failed for run-from-block`, {
workflowId,
error: error?.message,
statusCode: error?.statusCode,
})
return NextResponse.json(
{ error: error?.message || 'Execution blocked' },
{ status: error?.statusCode || 500 }
)
}
const workflowRecord = preprocessResult.workflowRecord
if (!workflowRecord?.workspaceId) { if (!workflowRecord?.workspaceId) {
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 }) return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
} }
@@ -92,11 +111,22 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
workflowId, workflowId,
startBlockId, startBlockId,
executedBlocksCount: sourceSnapshot.executedBlocks.length, executedBlocksCount: sourceSnapshot.executedBlocks.length,
billingActorUserId: preprocessResult.actorUserId,
}) })
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId) const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
const abortController = new AbortController() const abortController = new AbortController()
let isStreamClosed = false let isStreamClosed = false
let isTimedOut = false
const syncTimeout = preprocessResult.executionTimeout?.sync
let timeoutId: NodeJS.Timeout | undefined
if (syncTimeout) {
timeoutId = setTimeout(() => {
isTimedOut = true
abortController.abort()
}, syncTimeout)
}
const stream = new ReadableStream<Uint8Array>({ const stream = new ReadableStream<Uint8Array>({
async start(controller) { async start(controller) {
@@ -148,6 +178,25 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}) })
if (result.status === 'cancelled') { if (result.status === 'cancelled') {
if (isTimedOut && syncTimeout) {
const timeoutErrorMessage = getTimeoutErrorMessage(null, syncTimeout)
logger.info(`[${requestId}] Run-from-block execution timed out`, {
timeoutMs: syncTimeout,
})
await loggingSession.markAsFailed(timeoutErrorMessage)
sendEvent({
type: 'execution:error',
timestamp: new Date().toISOString(),
executionId,
workflowId,
data: {
error: timeoutErrorMessage,
duration: result.metadata?.duration || 0,
},
})
} else {
sendEvent({ sendEvent({
type: 'execution:cancelled', type: 'execution:cancelled',
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
@@ -155,6 +204,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
workflowId, workflowId,
data: { duration: result.metadata?.duration || 0 }, data: { duration: result.metadata?.duration || 0 },
}) })
}
} else { } else {
sendEvent({ sendEvent({
type: 'execution:completed', type: 'execution:completed',
@@ -171,11 +221,25 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}) })
} }
} catch (error: unknown) { } catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error' const isTimeout = isTimeoutError(error) || isTimedOut
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`) const errorMessage = isTimeout
? getTimeoutErrorMessage(error, syncTimeout)
: error instanceof Error
? error.message
: 'Unknown error'
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`, {
isTimeout,
})
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
await loggingSession.safeCompleteWithError({
totalDurationMs: executionResult?.metadata?.duration,
error: { message: errorMessage },
traceSpans: executionResult?.logs as any,
})
sendEvent({ sendEvent({
type: 'execution:error', type: 'execution:error',
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
@@ -187,6 +251,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}, },
}) })
} finally { } finally {
if (timeoutId) clearTimeout(timeoutId)
if (!isStreamClosed) { if (!isStreamClosed) {
try { try {
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n')) controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'))
@@ -197,6 +262,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}, },
cancel() { cancel() {
isStreamClosed = true isStreamClosed = true
if (timeoutId) clearTimeout(timeoutId)
abortController.abort() abortController.abort()
markExecutionCancelled(executionId).catch(() => {}) markExecutionCancelled(executionId).catch(() => {})
}, },

View File

@@ -5,6 +5,7 @@ import { validate as uuidValidate, v4 as uuidv4 } from 'uuid'
import { z } from 'zod' import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkHybridAuth } from '@/lib/auth/hybrid'
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags' import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
import { getTimeoutErrorMessage, isTimeoutError } from '@/lib/core/execution-limits'
import { generateRequestId } from '@/lib/core/utils/request' import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse' import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
@@ -120,10 +121,6 @@ type AsyncExecutionParams = {
triggerType: CoreTriggerType triggerType: CoreTriggerType
} }
/**
* Handles async workflow execution by queueing a background job.
* Returns immediately with a 202 Accepted response containing the job ID.
*/
async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextResponse> { async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextResponse> {
const { requestId, workflowId, userId, input, triggerType } = params const { requestId, workflowId, userId, input, triggerType } = params
@@ -405,6 +402,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
if (!enableSSE) { if (!enableSSE) {
logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`) logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`)
const syncTimeout = preprocessResult.executionTimeout?.sync
try { try {
const metadata: ExecutionMetadata = { const metadata: ExecutionMetadata = {
requestId, requestId,
@@ -438,6 +436,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
includeFileBase64, includeFileBase64,
base64MaxBytes, base64MaxBytes,
stopAfterBlockId, stopAfterBlockId,
abortSignal: syncTimeout ? AbortSignal.timeout(syncTimeout) : undefined,
}) })
const outputWithBase64 = includeFileBase64 const outputWithBase64 = includeFileBase64
@@ -473,11 +472,23 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
return NextResponse.json(filteredResult) return NextResponse.json(filteredResult)
} catch (error: unknown) { } catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error' const isTimeout = isTimeoutError(error)
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`) const errorMessage = isTimeout
? getTimeoutErrorMessage(error, syncTimeout)
: error instanceof Error
? error.message
: 'Unknown error'
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`, { isTimeout })
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
await loggingSession.safeCompleteWithError({
totalDurationMs: executionResult?.metadata?.duration,
error: { message: errorMessage },
traceSpans: executionResult?.logs as any,
})
return NextResponse.json( return NextResponse.json(
{ {
success: false, success: false,
@@ -491,7 +502,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
} }
: undefined, : undefined,
}, },
{ status: 500 } { status: isTimeout ? 408 : 500 }
) )
} }
} }
@@ -537,6 +548,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const encoder = new TextEncoder() const encoder = new TextEncoder()
const abortController = new AbortController() const abortController = new AbortController()
let isStreamClosed = false let isStreamClosed = false
let isTimedOut = false
const syncTimeout = preprocessResult.executionTimeout?.sync
let timeoutId: NodeJS.Timeout | undefined
if (syncTimeout) {
timeoutId = setTimeout(() => {
isTimedOut = true
abortController.abort()
}, syncTimeout)
}
const stream = new ReadableStream<Uint8Array>({ const stream = new ReadableStream<Uint8Array>({
async start(controller) { async start(controller) {
@@ -763,7 +784,25 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
} }
if (result.status === 'cancelled') { if (result.status === 'cancelled') {
if (isTimedOut && syncTimeout) {
const timeoutErrorMessage = getTimeoutErrorMessage(null, syncTimeout)
logger.info(`[${requestId}] Workflow execution timed out`, { timeoutMs: syncTimeout })
await loggingSession.markAsFailed(timeoutErrorMessage)
sendEvent({
type: 'execution:error',
timestamp: new Date().toISOString(),
executionId,
workflowId,
data: {
error: timeoutErrorMessage,
duration: result.metadata?.duration || 0,
},
})
} else {
logger.info(`[${requestId}] Workflow execution was cancelled`) logger.info(`[${requestId}] Workflow execution was cancelled`)
sendEvent({ sendEvent({
type: 'execution:cancelled', type: 'execution:cancelled',
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
@@ -773,6 +812,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
duration: result.metadata?.duration || 0, duration: result.metadata?.duration || 0,
}, },
}) })
}
return return
} }
@@ -799,11 +839,23 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
// Cleanup base64 cache for this execution // Cleanup base64 cache for this execution
await cleanupExecutionBase64Cache(executionId) await cleanupExecutionBase64Cache(executionId)
} catch (error: unknown) { } catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error' const isTimeout = isTimeoutError(error) || isTimedOut
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`) const errorMessage = isTimeout
? getTimeoutErrorMessage(error, syncTimeout)
: error instanceof Error
? error.message
: 'Unknown error'
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`, { isTimeout })
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
await loggingSession.safeCompleteWithError({
totalDurationMs: executionResult?.metadata?.duration,
error: { message: errorMessage },
traceSpans: executionResult?.logs as any,
})
sendEvent({ sendEvent({
type: 'execution:error', type: 'execution:error',
timestamp: new Date().toISOString(), timestamp: new Date().toISOString(),
@@ -815,18 +867,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
}, },
}) })
} finally { } finally {
if (timeoutId) clearTimeout(timeoutId)
if (!isStreamClosed) { if (!isStreamClosed) {
try { try {
controller.enqueue(encoder.encode('data: [DONE]\n\n')) controller.enqueue(encoder.encode('data: [DONE]\n\n'))
controller.close() controller.close()
} catch { } catch {}
// Stream already closed - nothing to do
}
} }
} }
}, },
cancel() { cancel() {
isStreamClosed = true isStreamClosed = true
if (timeoutId) clearTimeout(timeoutId)
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`) logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
abortController.abort() abortController.abort()
markExecutionCancelled(executionId).catch(() => {}) markExecutionCancelled(executionId).catch(() => {})

View File

@@ -1,5 +1,5 @@
import { memo, useCallback } from 'react' import { memo, useCallback } from 'react'
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, Lock, LogOut, Unlock } from 'lucide-react' import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
import { Button, Copy, PlayOutline, Tooltip, Trash2 } from '@/components/emcn' import { Button, Copy, PlayOutline, Tooltip, Trash2 } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers' import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
@@ -49,7 +49,6 @@ export const ActionBar = memo(
collaborativeBatchRemoveBlocks, collaborativeBatchRemoveBlocks,
collaborativeBatchToggleBlockEnabled, collaborativeBatchToggleBlockEnabled,
collaborativeBatchToggleBlockHandles, collaborativeBatchToggleBlockHandles,
collaborativeBatchToggleLocked,
} = useCollaborativeWorkflow() } = useCollaborativeWorkflow()
const { setPendingSelection } = useWorkflowRegistry() const { setPendingSelection } = useWorkflowRegistry()
const { handleRunFromBlock } = useWorkflowExecution() const { handleRunFromBlock } = useWorkflowExecution()
@@ -85,28 +84,16 @@ export const ActionBar = memo(
) )
}, [blockId, addNotification, collaborativeBatchAddBlocks, setPendingSelection]) }, [blockId, addNotification, collaborativeBatchAddBlocks, setPendingSelection])
const { const { isEnabled, horizontalHandles, parentId, parentType } = useWorkflowStore(
isEnabled,
horizontalHandles,
parentId,
parentType,
isLocked,
isParentLocked,
isParentDisabled,
} = useWorkflowStore(
useCallback( useCallback(
(state) => { (state) => {
const block = state.blocks[blockId] const block = state.blocks[blockId]
const parentId = block?.data?.parentId const parentId = block?.data?.parentId
const parentBlock = parentId ? state.blocks[parentId] : undefined
return { return {
isEnabled: block?.enabled ?? true, isEnabled: block?.enabled ?? true,
horizontalHandles: block?.horizontalHandles ?? false, horizontalHandles: block?.horizontalHandles ?? false,
parentId, parentId,
parentType: parentBlock?.type, parentType: parentId ? state.blocks[parentId]?.type : undefined,
isLocked: block?.locked ?? false,
isParentLocked: parentBlock?.locked ?? false,
isParentDisabled: parentBlock ? !parentBlock.enabled : false,
} }
}, },
[blockId] [blockId]
@@ -174,7 +161,6 @@ export const ActionBar = memo(
{!isNoteBlock && !isInsideSubflow && ( {!isNoteBlock && !isInsideSubflow && (
<Tooltip.Root> <Tooltip.Root>
<Tooltip.Trigger asChild> <Tooltip.Trigger asChild>
<span className='inline-flex'>
<Button <Button
variant='ghost' variant='ghost'
onClick={(e) => { onClick={(e) => {
@@ -188,13 +174,12 @@ export const ActionBar = memo(
> >
<PlayOutline className={ICON_SIZE} /> <PlayOutline className={ICON_SIZE} />
</Button> </Button>
</span>
</Tooltip.Trigger> </Tooltip.Trigger>
<Tooltip.Content side='top'> <Tooltip.Content side='top'>
{(() => { {(() => {
if (disabled) return getTooltipMessage('Run from block') if (disabled) return getTooltipMessage('Run from block')
if (isExecuting) return 'Execution in progress' if (isExecuting) return 'Execution in progress'
if (!dependenciesSatisfied) return 'Run previous blocks first' if (!dependenciesSatisfied) return 'Run upstream blocks first'
return 'Run from block' return 'Run from block'
})()} })()}
</Tooltip.Content> </Tooltip.Content>
@@ -208,54 +193,18 @@ export const ActionBar = memo(
variant='ghost' variant='ghost'
onClick={(e) => { onClick={(e) => {
e.stopPropagation() e.stopPropagation()
// Can't enable if parent is disabled (must enable parent first) if (!disabled) {
const cantEnable = !isEnabled && isParentDisabled
if (!disabled && !isLocked && !isParentLocked && !cantEnable) {
collaborativeBatchToggleBlockEnabled([blockId]) collaborativeBatchToggleBlockEnabled([blockId])
} }
}} }}
className={ACTION_BUTTON_STYLES} className={ACTION_BUTTON_STYLES}
disabled={ disabled={disabled}
disabled || isLocked || isParentLocked || (!isEnabled && isParentDisabled)
}
> >
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />} {isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
</Button> </Button>
</Tooltip.Trigger> </Tooltip.Trigger>
<Tooltip.Content side='top'> <Tooltip.Content side='top'>
{isLocked || isParentLocked {getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
? 'Block is locked'
: !isEnabled && isParentDisabled
? 'Parent container is disabled'
: getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
</Tooltip.Content>
</Tooltip.Root>
)}
{userPermissions.canAdmin && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={(e) => {
e.stopPropagation()
// Can't unlock a block if its parent container is locked
if (!disabled && !(isLocked && isParentLocked)) {
collaborativeBatchToggleLocked([blockId])
}
}}
className={ACTION_BUTTON_STYLES}
disabled={disabled || (isLocked && isParentLocked)}
>
{isLocked ? <Unlock className={ICON_SIZE} /> : <Lock className={ICON_SIZE} />}
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
{isLocked && isParentLocked
? 'Parent container is locked'
: isLocked
? 'Unlock Block'
: 'Lock Block'}
</Tooltip.Content> </Tooltip.Content>
</Tooltip.Root> </Tooltip.Root>
)} )}
@@ -288,12 +237,12 @@ export const ActionBar = memo(
variant='ghost' variant='ghost'
onClick={(e) => { onClick={(e) => {
e.stopPropagation() e.stopPropagation()
if (!disabled && !isLocked && !isParentLocked) { if (!disabled) {
collaborativeBatchToggleBlockHandles([blockId]) collaborativeBatchToggleBlockHandles([blockId])
} }
}} }}
className={ACTION_BUTTON_STYLES} className={ACTION_BUTTON_STYLES}
disabled={disabled || isLocked || isParentLocked} disabled={disabled}
> >
{horizontalHandles ? ( {horizontalHandles ? (
<ArrowLeftRight className={ICON_SIZE} /> <ArrowLeftRight className={ICON_SIZE} />
@@ -303,9 +252,7 @@ export const ActionBar = memo(
</Button> </Button>
</Tooltip.Trigger> </Tooltip.Trigger>
<Tooltip.Content side='top'> <Tooltip.Content side='top'>
{isLocked || isParentLocked {getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')}
? 'Block is locked'
: getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')}
</Tooltip.Content> </Tooltip.Content>
</Tooltip.Root> </Tooltip.Root>
)} )}
@@ -317,23 +264,19 @@ export const ActionBar = memo(
variant='ghost' variant='ghost'
onClick={(e) => { onClick={(e) => {
e.stopPropagation() e.stopPropagation()
if (!disabled && userPermissions.canEdit && !isLocked && !isParentLocked) { if (!disabled && userPermissions.canEdit) {
window.dispatchEvent( window.dispatchEvent(
new CustomEvent('remove-from-subflow', { detail: { blockIds: [blockId] } }) new CustomEvent('remove-from-subflow', { detail: { blockIds: [blockId] } })
) )
} }
}} }}
className={ACTION_BUTTON_STYLES} className={ACTION_BUTTON_STYLES}
disabled={disabled || !userPermissions.canEdit || isLocked || isParentLocked} disabled={disabled || !userPermissions.canEdit}
> >
<LogOut className={ICON_SIZE} /> <LogOut className={ICON_SIZE} />
</Button> </Button>
</Tooltip.Trigger> </Tooltip.Trigger>
<Tooltip.Content side='top'> <Tooltip.Content side='top'>{getTooltipMessage('Remove from Subflow')}</Tooltip.Content>
{isLocked || isParentLocked
? 'Block is locked'
: getTooltipMessage('Remove from Subflow')}
</Tooltip.Content>
</Tooltip.Root> </Tooltip.Root>
)} )}
@@ -343,19 +286,17 @@ export const ActionBar = memo(
variant='ghost' variant='ghost'
onClick={(e) => { onClick={(e) => {
e.stopPropagation() e.stopPropagation()
if (!disabled && !isLocked && !isParentLocked) { if (!disabled) {
collaborativeBatchRemoveBlocks([blockId]) collaborativeBatchRemoveBlocks([blockId])
} }
}} }}
className={ACTION_BUTTON_STYLES} className={ACTION_BUTTON_STYLES}
disabled={disabled || isLocked || isParentLocked} disabled={disabled}
> >
<Trash2 className={ICON_SIZE} /> <Trash2 className={ICON_SIZE} />
</Button> </Button>
</Tooltip.Trigger> </Tooltip.Trigger>
<Tooltip.Content side='top'> <Tooltip.Content side='top'>{getTooltipMessage('Delete Block')}</Tooltip.Content>
{isLocked || isParentLocked ? 'Block is locked' : getTooltipMessage('Delete Block')}
</Tooltip.Content>
</Tooltip.Root> </Tooltip.Root>
</div> </div>
) )

View File

@@ -20,9 +20,6 @@ export interface BlockInfo {
horizontalHandles: boolean horizontalHandles: boolean
parentId?: string parentId?: string
parentType?: string parentType?: string
locked?: boolean
isParentLocked?: boolean
isParentDisabled?: boolean
} }
/** /**
@@ -49,17 +46,10 @@ export interface BlockMenuProps {
showRemoveFromSubflow?: boolean showRemoveFromSubflow?: boolean
/** Whether run from block is available (has snapshot, was executed, not inside subflow) */ /** Whether run from block is available (has snapshot, was executed, not inside subflow) */
canRunFromBlock?: boolean canRunFromBlock?: boolean
/** Whether to disable edit actions (user can't edit OR blocks are locked) */
disableEdit?: boolean disableEdit?: boolean
/** Whether the user has edit permission (ignoring locked state) */
userCanEdit?: boolean
isExecuting?: boolean isExecuting?: boolean
/** Whether the selected block is a trigger (has no incoming edges) */ /** Whether the selected block is a trigger (has no incoming edges) */
isPositionalTrigger?: boolean isPositionalTrigger?: boolean
/** Callback to toggle locked state of selected blocks */
onToggleLocked?: () => void
/** Whether the user has admin permissions */
canAdmin?: boolean
} }
/** /**
@@ -88,22 +78,13 @@ export function BlockMenu({
showRemoveFromSubflow = false, showRemoveFromSubflow = false,
canRunFromBlock = false, canRunFromBlock = false,
disableEdit = false, disableEdit = false,
userCanEdit = true,
isExecuting = false, isExecuting = false,
isPositionalTrigger = false, isPositionalTrigger = false,
onToggleLocked,
canAdmin = false,
}: BlockMenuProps) { }: BlockMenuProps) {
const isSingleBlock = selectedBlocks.length === 1 const isSingleBlock = selectedBlocks.length === 1
const allEnabled = selectedBlocks.every((b) => b.enabled) const allEnabled = selectedBlocks.every((b) => b.enabled)
const allDisabled = selectedBlocks.every((b) => !b.enabled) const allDisabled = selectedBlocks.every((b) => !b.enabled)
const allLocked = selectedBlocks.every((b) => b.locked)
const allUnlocked = selectedBlocks.every((b) => !b.locked)
// Can't unlock blocks that have locked parents
const hasBlockWithLockedParent = selectedBlocks.some((b) => b.locked && b.isParentLocked)
// Can't enable blocks that have disabled parents
const hasBlockWithDisabledParent = selectedBlocks.some((b) => !b.enabled && b.isParentDisabled)
const hasSingletonBlock = selectedBlocks.some( const hasSingletonBlock = selectedBlocks.some(
(b) => (b) =>
@@ -127,12 +108,6 @@ export function BlockMenu({
return 'Toggle Enabled' return 'Toggle Enabled'
} }
const getToggleLockedLabel = () => {
if (allLocked) return 'Unlock'
if (allUnlocked) return 'Lock'
return 'Toggle Lock'
}
return ( return (
<Popover <Popover
open={isOpen} open={isOpen}
@@ -164,7 +139,7 @@ export function BlockMenu({
</PopoverItem> </PopoverItem>
<PopoverItem <PopoverItem
className='group' className='group'
disabled={!userCanEdit || !hasClipboard} disabled={disableEdit || !hasClipboard}
onClick={() => { onClick={() => {
onPaste() onPaste()
onClose() onClose()
@@ -175,7 +150,7 @@ export function BlockMenu({
</PopoverItem> </PopoverItem>
{!hasSingletonBlock && ( {!hasSingletonBlock && (
<PopoverItem <PopoverItem
disabled={!userCanEdit} disabled={disableEdit}
onClick={() => { onClick={() => {
onDuplicate() onDuplicate()
onClose() onClose()
@@ -189,15 +164,13 @@ export function BlockMenu({
{!allNoteBlocks && <PopoverDivider />} {!allNoteBlocks && <PopoverDivider />}
{!allNoteBlocks && ( {!allNoteBlocks && (
<PopoverItem <PopoverItem
disabled={disableEdit || hasBlockWithDisabledParent} disabled={disableEdit}
onClick={() => { onClick={() => {
if (!disableEdit && !hasBlockWithDisabledParent) {
onToggleEnabled() onToggleEnabled()
onClose() onClose()
}
}} }}
> >
{hasBlockWithDisabledParent ? 'Parent is disabled' : getToggleEnabledLabel()} {getToggleEnabledLabel()}
</PopoverItem> </PopoverItem>
)} )}
{!allNoteBlocks && !isSubflow && ( {!allNoteBlocks && !isSubflow && (
@@ -222,19 +195,6 @@ export function BlockMenu({
Remove from Subflow Remove from Subflow
</PopoverItem> </PopoverItem>
)} )}
{canAdmin && onToggleLocked && (
<PopoverItem
disabled={hasBlockWithLockedParent}
onClick={() => {
if (!hasBlockWithLockedParent) {
onToggleLocked()
onClose()
}
}}
>
{hasBlockWithLockedParent ? 'Parent is locked' : getToggleLockedLabel()}
</PopoverItem>
)}
{/* Single block actions */} {/* Single block actions */}
{isSingleBlock && <PopoverDivider />} {isSingleBlock && <PopoverDivider />}

View File

@@ -34,8 +34,6 @@ export interface CanvasMenuProps {
canUndo?: boolean canUndo?: boolean
canRedo?: boolean canRedo?: boolean
isInvitationsDisabled?: boolean isInvitationsDisabled?: boolean
/** Whether the workflow has locked blocks (disables auto-layout) */
hasLockedBlocks?: boolean
} }
/** /**
@@ -62,7 +60,6 @@ export function CanvasMenu({
disableEdit = false, disableEdit = false,
canUndo = false, canUndo = false,
canRedo = false, canRedo = false,
hasLockedBlocks = false,
}: CanvasMenuProps) { }: CanvasMenuProps) {
return ( return (
<Popover <Popover
@@ -132,12 +129,11 @@ export function CanvasMenu({
</PopoverItem> </PopoverItem>
<PopoverItem <PopoverItem
className='group' className='group'
disabled={disableEdit || hasLockedBlocks} disabled={disableEdit}
onClick={() => { onClick={() => {
onAutoLayout() onAutoLayout()
onClose() onClose()
}} }}
title={hasLockedBlocks ? 'Unlock blocks to use auto-layout' : undefined}
> >
<span>Auto-layout</span> <span>Auto-layout</span>
<span className='ml-auto opacity-70 group-hover:opacity-100'>L</span> <span className='ml-auto opacity-70 group-hover:opacity-100'>L</span>

View File

@@ -9,9 +9,7 @@ import {
ChevronUp, ChevronUp,
ExternalLink, ExternalLink,
Loader2, Loader2,
Lock,
Pencil, Pencil,
Unlock,
} from 'lucide-react' } from 'lucide-react'
import { useParams } from 'next/navigation' import { useParams } from 'next/navigation'
import { useShallow } from 'zustand/react/shallow' import { useShallow } from 'zustand/react/shallow'
@@ -48,11 +46,16 @@ import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { usePanelEditorStore } from '@/stores/panel' import { usePanelEditorStore } from '@/stores/panel'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
/** Stable empty object to avoid creating new references */ /** Stable empty object to avoid creating new references */
const EMPTY_SUBBLOCK_VALUES = {} as Record<string, any> const EMPTY_SUBBLOCK_VALUES = {} as Record<string, any>
/** Shared style for dashed divider lines */
const DASHED_DIVIDER_STYLE = {
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
} as const
/** /**
* Icon component for rendering block icons. * Icon component for rendering block icons.
* *
@@ -92,39 +95,23 @@ export function Editor() {
const blockConfig = currentBlock ? getBlock(currentBlock.type) : null const blockConfig = currentBlock ? getBlock(currentBlock.type) : null
const title = currentBlock?.name || 'Editor' const title = currentBlock?.name || 'Editor'
// Check if selected block is a subflow (loop or parallel)
const isSubflow = const isSubflow =
currentBlock && (currentBlock.type === 'loop' || currentBlock.type === 'parallel') currentBlock && (currentBlock.type === 'loop' || currentBlock.type === 'parallel')
// Get subflow display properties from configs
const subflowConfig = isSubflow ? (currentBlock.type === 'loop' ? LoopTool : ParallelTool) : null const subflowConfig = isSubflow ? (currentBlock.type === 'loop' ? LoopTool : ParallelTool) : null
// Check if selected block is a workflow block
const isWorkflowBlock = const isWorkflowBlock =
currentBlock && (currentBlock.type === 'workflow' || currentBlock.type === 'workflow_input') currentBlock && (currentBlock.type === 'workflow' || currentBlock.type === 'workflow_input')
// Get workspace ID from params
const params = useParams() const params = useParams()
const workspaceId = params.workspaceId as string const workspaceId = params.workspaceId as string
// Refs for resize functionality
const subBlocksRef = useRef<HTMLDivElement>(null) const subBlocksRef = useRef<HTMLDivElement>(null)
// Get user permissions
const userPermissions = useUserPermissionsContext() const userPermissions = useUserPermissionsContext()
// Check if block is locked (or inside a locked container) and compute edit permission
// Locked blocks cannot be edited by anyone (admins can only lock/unlock)
const blocks = useWorkflowStore((state) => state.blocks)
const parentId = currentBlock?.data?.parentId as string | undefined
const isParentLocked = parentId ? (blocks[parentId]?.locked ?? false) : false
const isLocked = (currentBlock?.locked ?? false) || isParentLocked
const canEditBlock = userPermissions.canEdit && !isLocked
// Get active workflow ID
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId) const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
// Get block properties (advanced/trigger modes)
const { advancedMode, triggerMode } = useEditorBlockProperties( const { advancedMode, triggerMode } = useEditorBlockProperties(
currentBlockId, currentBlockId,
currentWorkflow.isSnapshotView currentWorkflow.isSnapshotView
@@ -156,20 +143,19 @@ export function Editor() {
[subBlocksForCanonical] [subBlocksForCanonical]
) )
const canonicalModeOverrides = currentBlock?.data?.canonicalModes const canonicalModeOverrides = currentBlock?.data?.canonicalModes
const advancedValuesPresent = hasAdvancedValues( const advancedValuesPresent = useMemo(
subBlocksForCanonical, () => hasAdvancedValues(subBlocksForCanonical, blockSubBlockValues, canonicalIndex),
blockSubBlockValues, [subBlocksForCanonical, blockSubBlockValues, canonicalIndex]
canonicalIndex
) )
const displayAdvancedOptions = canEditBlock ? advancedMode : advancedMode || advancedValuesPresent const displayAdvancedOptions = userPermissions.canEdit
? advancedMode
: advancedMode || advancedValuesPresent
const hasAdvancedOnlyFields = useMemo(() => { const hasAdvancedOnlyFields = useMemo(() => {
for (const subBlock of subBlocksForCanonical) { for (const subBlock of subBlocksForCanonical) {
// Must be standalone advanced (mode: 'advanced' without canonicalParamId)
if (subBlock.mode !== 'advanced') continue if (subBlock.mode !== 'advanced') continue
if (canonicalIndex.canonicalIdBySubBlockId[subBlock.id]) continue if (canonicalIndex.canonicalIdBySubBlockId[subBlock.id]) continue
// Check condition - skip if condition not met for current values
if ( if (
subBlock.condition && subBlock.condition &&
!evaluateSubBlockCondition(subBlock.condition, blockSubBlockValues) !evaluateSubBlockCondition(subBlock.condition, blockSubBlockValues)
@@ -182,7 +168,6 @@ export function Editor() {
return false return false
}, [subBlocksForCanonical, canonicalIndex.canonicalIdBySubBlockId, blockSubBlockValues]) }, [subBlocksForCanonical, canonicalIndex.canonicalIdBySubBlockId, blockSubBlockValues])
// Get subblock layout using custom hook
const { subBlocks, stateToUse: subBlockState } = useEditorSubblockLayout( const { subBlocks, stateToUse: subBlockState } = useEditorSubblockLayout(
blockConfig || ({} as any), blockConfig || ({} as any),
currentBlockId || '', currentBlockId || '',
@@ -215,41 +200,43 @@ export function Editor() {
return { regularSubBlocks: regular, advancedOnlySubBlocks: advancedOnly } return { regularSubBlocks: regular, advancedOnlySubBlocks: advancedOnly }
}, [subBlocks, canonicalIndex.canonicalIdBySubBlockId]) }, [subBlocks, canonicalIndex.canonicalIdBySubBlockId])
// Get block connections
const { incomingConnections, hasIncomingConnections } = useBlockConnections(currentBlockId || '') const { incomingConnections, hasIncomingConnections } = useBlockConnections(currentBlockId || '')
// Connections resize hook
const { handleMouseDown: handleConnectionsResizeMouseDown, isResizing } = useConnectionsResize({ const { handleMouseDown: handleConnectionsResizeMouseDown, isResizing } = useConnectionsResize({
subBlocksRef, subBlocksRef,
}) })
// Collaborative actions
const { const {
collaborativeSetBlockCanonicalMode, collaborativeSetBlockCanonicalMode,
collaborativeUpdateBlockName, collaborativeUpdateBlockName,
collaborativeToggleBlockAdvancedMode, collaborativeToggleBlockAdvancedMode,
collaborativeBatchToggleLocked,
} = useCollaborativeWorkflow() } = useCollaborativeWorkflow()
// Advanced mode toggle handler
const handleToggleAdvancedMode = useCallback(() => { const handleToggleAdvancedMode = useCallback(() => {
if (!currentBlockId || !canEditBlock) return if (!currentBlockId || !userPermissions.canEdit) return
collaborativeToggleBlockAdvancedMode(currentBlockId) collaborativeToggleBlockAdvancedMode(currentBlockId)
}, [currentBlockId, canEditBlock, collaborativeToggleBlockAdvancedMode]) }, [currentBlockId, userPermissions.canEdit, collaborativeToggleBlockAdvancedMode])
// Rename state
const [isRenaming, setIsRenaming] = useState(false) const [isRenaming, setIsRenaming] = useState(false)
const [editedName, setEditedName] = useState('') const [editedName, setEditedName] = useState('')
const nameInputRef = useRef<HTMLInputElement>(null)
/**
* Ref callback that auto-selects the input text when mounted.
*/
const nameInputRefCallback = useCallback((element: HTMLInputElement | null) => {
if (element) {
element.select()
}
}, [])
/** /**
* Handles starting the rename process. * Handles starting the rename process.
*/ */
const handleStartRename = useCallback(() => { const handleStartRename = useCallback(() => {
if (!canEditBlock || !currentBlock) return if (!userPermissions.canEdit || !currentBlock) return
setEditedName(currentBlock.name || '') setEditedName(currentBlock.name || '')
setIsRenaming(true) setIsRenaming(true)
}, [canEditBlock, currentBlock]) }, [userPermissions.canEdit, currentBlock])
/** /**
* Handles saving the renamed block. * Handles saving the renamed block.
@@ -261,7 +248,6 @@ export function Editor() {
if (trimmedName && trimmedName !== currentBlock?.name) { if (trimmedName && trimmedName !== currentBlock?.name) {
const result = collaborativeUpdateBlockName(currentBlockId, trimmedName) const result = collaborativeUpdateBlockName(currentBlockId, trimmedName)
if (!result.success) { if (!result.success) {
// Keep rename mode open on error so user can correct the name
return return
} }
} }
@@ -276,14 +262,6 @@ export function Editor() {
setEditedName('') setEditedName('')
}, []) }, [])
// Focus input when entering rename mode
useEffect(() => {
if (isRenaming && nameInputRef.current) {
nameInputRef.current.select()
}
}, [isRenaming])
// Trigger rename mode when signaled from context menu
useEffect(() => { useEffect(() => {
if (shouldFocusRename && currentBlock) { if (shouldFocusRename && currentBlock) {
handleStartRename() handleStartRename()
@@ -294,17 +272,13 @@ export function Editor() {
/** /**
* Handles opening documentation link in a new secure tab. * Handles opening documentation link in a new secure tab.
*/ */
const handleOpenDocs = () => { const handleOpenDocs = useCallback(() => {
const docsLink = isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink const docsLink = isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink
if (docsLink) { window.open(docsLink || 'https://docs.sim.ai/quick-reference', '_blank', 'noopener,noreferrer')
window.open(docsLink, '_blank', 'noopener,noreferrer') }, [isSubflow, subflowConfig?.docsLink, blockConfig?.docsLink])
}
}
// Get child workflow ID for workflow blocks
const childWorkflowId = isWorkflowBlock ? blockSubBlockValues?.workflowId : null const childWorkflowId = isWorkflowBlock ? blockSubBlockValues?.workflowId : null
// Fetch child workflow state for preview (only for workflow blocks with a selected workflow)
const { data: childWorkflowState, isLoading: isLoadingChildWorkflow } = const { data: childWorkflowState, isLoading: isLoadingChildWorkflow } =
useWorkflowState(childWorkflowId) useWorkflowState(childWorkflowId)
@@ -317,7 +291,6 @@ export function Editor() {
} }
}, [childWorkflowId, workspaceId]) }, [childWorkflowId, workspaceId])
// Determine if connections are at minimum height (collapsed state)
const isConnectionsAtMinHeight = connectionsHeight <= 35 const isConnectionsAtMinHeight = connectionsHeight <= 35
return ( return (
@@ -338,7 +311,7 @@ export function Editor() {
)} )}
{isRenaming ? ( {isRenaming ? (
<input <input
ref={nameInputRef} ref={nameInputRefCallback}
type='text' type='text'
value={editedName} value={editedName}
onChange={(e) => setEditedName(e.target.value)} onChange={(e) => setEditedName(e.target.value)}
@@ -368,36 +341,6 @@ export function Editor() {
)} )}
</div> </div>
<div className='flex shrink-0 items-center gap-[8px]'> <div className='flex shrink-0 items-center gap-[8px]'>
{/* Locked indicator - clickable to unlock if user has admin permissions, block is locked, and parent is not locked */}
{isLocked && currentBlock && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
{userPermissions.canAdmin && currentBlock.locked && !isParentLocked ? (
<Button
variant='ghost'
className='p-0'
onClick={() => collaborativeBatchToggleLocked([currentBlockId!])}
aria-label='Unlock block'
>
<Unlock className='h-[14px] w-[14px] text-[var(--text-secondary)]' />
</Button>
) : (
<div className='flex items-center justify-center'>
<Lock className='h-[14px] w-[14px] text-[var(--text-secondary)]' />
</div>
)}
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<p>
{isParentLocked
? 'Parent container is locked'
: userPermissions.canAdmin && currentBlock.locked
? 'Unlock block'
: 'Block is locked'}
</p>
</Tooltip.Content>
</Tooltip.Root>
)}
{/* Rename button */} {/* Rename button */}
{currentBlock && ( {currentBlock && (
<Tooltip.Root> <Tooltip.Root>
@@ -406,7 +349,7 @@ export function Editor() {
variant='ghost' variant='ghost'
className='p-0' className='p-0'
onClick={isRenaming ? handleSaveRename : handleStartRename} onClick={isRenaming ? handleSaveRename : handleStartRename}
disabled={!canEditBlock} disabled={!userPermissions.canEdit}
aria-label={isRenaming ? 'Save name' : 'Rename block'} aria-label={isRenaming ? 'Save name' : 'Rename block'}
> >
{isRenaming ? ( {isRenaming ? (
@@ -439,7 +382,6 @@ export function Editor() {
</Tooltip.Content> </Tooltip.Content>
</Tooltip.Root> </Tooltip.Root>
)} */} )} */}
{currentBlock && (isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink) && (
<Tooltip.Root> <Tooltip.Root>
<Tooltip.Trigger asChild> <Tooltip.Trigger asChild>
<Button <Button
@@ -455,7 +397,6 @@ export function Editor() {
<p>Open docs</p> <p>Open docs</p>
</Tooltip.Content> </Tooltip.Content>
</Tooltip.Root> </Tooltip.Root>
)}
</div> </div>
</div> </div>
@@ -474,7 +415,7 @@ export function Editor() {
incomingConnections={incomingConnections} incomingConnections={incomingConnections}
handleConnectionsResizeMouseDown={handleConnectionsResizeMouseDown} handleConnectionsResizeMouseDown={handleConnectionsResizeMouseDown}
toggleConnectionsCollapsed={toggleConnectionsCollapsed} toggleConnectionsCollapsed={toggleConnectionsCollapsed}
userCanEdit={canEditBlock} userCanEdit={userPermissions.canEdit}
isConnectionsAtMinHeight={isConnectionsAtMinHeight} isConnectionsAtMinHeight={isConnectionsAtMinHeight}
/> />
) : ( ) : (
@@ -535,13 +476,7 @@ export function Editor() {
</div> </div>
</div> </div>
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'> <div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
<div <div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
className='h-[1.25px]'
style={{
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
}}
/>
</div> </div>
</> </>
)} )}
@@ -582,14 +517,14 @@ export function Editor() {
config={subBlock} config={subBlock}
isPreview={false} isPreview={false}
subBlockValues={subBlockState} subBlockValues={subBlockState}
disabled={!canEditBlock} disabled={!userPermissions.canEdit}
fieldDiffStatus={undefined} fieldDiffStatus={undefined}
allowExpandInPreview={false} allowExpandInPreview={false}
canonicalToggle={ canonicalToggle={
isCanonicalSwap && canonicalMode && canonicalId isCanonicalSwap && canonicalMode && canonicalId
? { ? {
mode: canonicalMode, mode: canonicalMode,
disabled: !canEditBlock, disabled: !userPermissions.canEdit,
onToggle: () => { onToggle: () => {
if (!currentBlockId) return if (!currentBlockId) return
const nextMode = const nextMode =
@@ -606,28 +541,16 @@ export function Editor() {
/> />
{showDivider && ( {showDivider && (
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'> <div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
<div <div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
className='h-[1.25px]'
style={{
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
}}
/>
</div> </div>
)} )}
</div> </div>
) )
})} })}
{hasAdvancedOnlyFields && canEditBlock && ( {hasAdvancedOnlyFields && userPermissions.canEdit && (
<div className='flex items-center gap-[10px] px-[2px] pt-[14px] pb-[12px]'> <div className='flex items-center gap-[10px] px-[2px] pt-[14px] pb-[12px]'>
<div <div className='h-[1.25px] flex-1' style={DASHED_DIVIDER_STYLE} />
className='h-[1.25px] flex-1'
style={{
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
}}
/>
<button <button
type='button' type='button'
onClick={handleToggleAdvancedMode} onClick={handleToggleAdvancedMode}
@@ -640,13 +563,7 @@ export function Editor() {
className={`h-[14px] w-[14px] transition-transform duration-200 ${displayAdvancedOptions ? 'rotate-180' : ''}`} className={`h-[14px] w-[14px] transition-transform duration-200 ${displayAdvancedOptions ? 'rotate-180' : ''}`}
/> />
</button> </button>
<div <div className='h-[1.25px] flex-1' style={DASHED_DIVIDER_STYLE} />
className='h-[1.25px] flex-1'
style={{
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
}}
/>
</div> </div>
)} )}
@@ -664,19 +581,13 @@ export function Editor() {
config={subBlock} config={subBlock}
isPreview={false} isPreview={false}
subBlockValues={subBlockState} subBlockValues={subBlockState}
disabled={!canEditBlock} disabled={!userPermissions.canEdit}
fieldDiffStatus={undefined} fieldDiffStatus={undefined}
allowExpandInPreview={false} allowExpandInPreview={false}
/> />
{index < advancedOnlySubBlocks.length - 1 && ( {index < advancedOnlySubBlocks.length - 1 && (
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'> <div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
<div <div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
className='h-[1.25px]'
style={{
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
}}
/>
</div> </div>
)} )}
</div> </div>

View File

@@ -45,13 +45,11 @@ import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowI
import { useDeleteWorkflow, useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks' import { useDeleteWorkflow, useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks'
import { usePermissionConfig } from '@/hooks/use-permission-config' import { usePermissionConfig } from '@/hooks/use-permission-config'
import { useChatStore } from '@/stores/chat/store' import { useChatStore } from '@/stores/chat/store'
import { useNotificationStore } from '@/stores/notifications/store'
import type { PanelTab } from '@/stores/panel' import type { PanelTab } from '@/stores/panel'
import { usePanelStore, useVariablesStore as usePanelVariablesStore } from '@/stores/panel' import { usePanelStore, useVariablesStore as usePanelVariablesStore } from '@/stores/panel'
import { useVariablesStore } from '@/stores/variables/store' import { useVariablesStore } from '@/stores/variables/store'
import { getWorkflowWithValues } from '@/stores/workflows' import { getWorkflowWithValues } from '@/stores/workflows'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('Panel') const logger = createLogger('Panel')
/** /**
@@ -121,11 +119,6 @@ export const Panel = memo(function Panel() {
hydration.phase === 'state-loading' hydration.phase === 'state-loading'
const { handleAutoLayout: autoLayoutWithFitView } = useAutoLayout(activeWorkflowId || null) const { handleAutoLayout: autoLayoutWithFitView } = useAutoLayout(activeWorkflowId || null)
// Check for locked blocks (disables auto-layout)
const hasLockedBlocks = useWorkflowStore((state) =>
Object.values(state.blocks).some((block) => block.locked)
)
// Delete workflow hook // Delete workflow hook
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({ const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
workspaceId, workspaceId,
@@ -237,24 +230,11 @@ export const Panel = memo(function Panel() {
setIsAutoLayouting(true) setIsAutoLayouting(true)
try { try {
const result = await autoLayoutWithFitView() await autoLayoutWithFitView()
if (!result.success && result.error) {
useNotificationStore.getState().addNotification({
level: 'info',
message: result.error,
workflowId: activeWorkflowId || undefined,
})
}
} finally { } finally {
setIsAutoLayouting(false) setIsAutoLayouting(false)
} }
}, [ }, [isExecuting, userPermissions.canEdit, isAutoLayouting, autoLayoutWithFitView])
isExecuting,
userPermissions.canEdit,
isAutoLayouting,
autoLayoutWithFitView,
activeWorkflowId,
])
/** /**
* Handles exporting workflow as JSON * Handles exporting workflow as JSON
@@ -424,10 +404,7 @@ export const Panel = memo(function Panel() {
<PopoverContent align='start' side='bottom' sideOffset={8}> <PopoverContent align='start' side='bottom' sideOffset={8}>
<PopoverItem <PopoverItem
onClick={handleAutoLayout} onClick={handleAutoLayout}
disabled={ disabled={isExecuting || !userPermissions.canEdit || isAutoLayouting}
isExecuting || !userPermissions.canEdit || isAutoLayouting || hasLockedBlocks
}
title={hasLockedBlocks ? 'Unlock blocks to use auto-layout' : undefined}
> >
<Layout className='h-3 w-3' animate={isAutoLayouting} variant='clockwise' /> <Layout className='h-3 w-3' animate={isAutoLayouting} variant='clockwise' />
<span>Auto layout</span> <span>Auto layout</span>

View File

@@ -80,7 +80,6 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
: undefined : undefined
const isEnabled = currentBlock?.enabled ?? true const isEnabled = currentBlock?.enabled ?? true
const isLocked = currentBlock?.locked ?? false
const isPreview = data?.isPreview || false const isPreview = data?.isPreview || false
// Focus state // Focus state
@@ -201,10 +200,7 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
{blockName} {blockName}
</span> </span>
</div> </div>
<div className='flex items-center gap-1'>
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>} {!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
</div>
</div> </div>
{!isPreview && ( {!isPreview && (

View File

@@ -18,8 +18,6 @@ export interface UseBlockStateReturn {
diffStatus: DiffStatus diffStatus: DiffStatus
/** Whether this is a deleted block in diff mode */ /** Whether this is a deleted block in diff mode */
isDeletedBlock: boolean isDeletedBlock: boolean
/** Whether the block is locked */
isLocked: boolean
} }
/** /**
@@ -42,11 +40,6 @@ export function useBlockState(
? (data.blockState?.enabled ?? true) ? (data.blockState?.enabled ?? true)
: (currentBlock?.enabled ?? true) : (currentBlock?.enabled ?? true)
// Determine if block is locked
const isLocked = data.isPreview
? (data.blockState?.locked ?? false)
: (currentBlock?.locked ?? false)
// Get diff status // Get diff status
const diffStatus: DiffStatus = const diffStatus: DiffStatus =
currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock) currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock)
@@ -75,6 +68,5 @@ export function useBlockState(
isActive, isActive,
diffStatus, diffStatus,
isDeletedBlock: isDeletedBlock ?? false, isDeletedBlock: isDeletedBlock ?? false,
isLocked,
} }
} }

View File

@@ -672,7 +672,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
currentWorkflow, currentWorkflow,
activeWorkflowId, activeWorkflowId,
isEnabled, isEnabled,
isLocked,
handleClick, handleClick,
hasRing, hasRing,
ringStyles, ringStyles,
@@ -1101,7 +1100,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
{name} {name}
</span> </span>
</div> </div>
<div className='relative z-10 flex flex-shrink-0 items-center gap-1'> <div className='relative z-10 flex flex-shrink-0 items-center gap-2'>
{isWorkflowSelector && {isWorkflowSelector &&
childWorkflowId && childWorkflowId &&
typeof childIsDeployed === 'boolean' && typeof childIsDeployed === 'boolean' &&
@@ -1134,7 +1133,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
</Tooltip.Root> </Tooltip.Root>
)} )}
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>} {!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
{type === 'schedule' && shouldShowScheduleBadge && scheduleInfo?.isDisabled && ( {type === 'schedule' && shouldShowScheduleBadge && scheduleInfo?.isDisabled && (
<Tooltip.Root> <Tooltip.Root>

View File

@@ -47,7 +47,6 @@ export function useBlockVisual({
isActive: isExecuting, isActive: isExecuting,
diffStatus, diffStatus,
isDeletedBlock, isDeletedBlock,
isLocked,
} = useBlockState(blockId, currentWorkflow, data) } = useBlockState(blockId, currentWorkflow, data)
const currentBlockId = usePanelEditorStore((state) => state.currentBlockId) const currentBlockId = usePanelEditorStore((state) => state.currentBlockId)
@@ -104,7 +103,6 @@ export function useBlockVisual({
currentWorkflow, currentWorkflow,
activeWorkflowId, activeWorkflowId,
isEnabled, isEnabled,
isLocked,
handleClick, handleClick,
hasRing, hasRing,
ringStyles, ringStyles,

View File

@@ -31,8 +31,7 @@ export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasCo
nodes.map((n) => { nodes.map((n) => {
const block = blocks[n.id] const block = blocks[n.id]
const parentId = block?.data?.parentId const parentId = block?.data?.parentId
const parentBlock = parentId ? blocks[parentId] : undefined const parentType = parentId ? blocks[parentId]?.type : undefined
const parentType = parentBlock?.type
return { return {
id: n.id, id: n.id,
type: block?.type || '', type: block?.type || '',
@@ -40,9 +39,6 @@ export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasCo
horizontalHandles: block?.horizontalHandles ?? false, horizontalHandles: block?.horizontalHandles ?? false,
parentId, parentId,
parentType, parentType,
locked: block?.locked ?? false,
isParentLocked: parentBlock?.locked ?? false,
isParentDisabled: parentBlock ? !parentBlock.enabled : false,
} }
}), }),
[blocks] [blocks]

View File

@@ -27,7 +27,7 @@ import { useExecutionStore } from '@/stores/execution'
import { useNotificationStore } from '@/stores/notifications' import { useNotificationStore } from '@/stores/notifications'
import { useVariablesStore } from '@/stores/panel' import { useVariablesStore } from '@/stores/panel'
import { useEnvironmentStore } from '@/stores/settings/environment' import { useEnvironmentStore } from '@/stores/settings/environment'
import { type ConsoleEntry, useTerminalConsoleStore } from '@/stores/terminal' import { useTerminalConsoleStore } from '@/stores/terminal'
import { useWorkflowDiffStore } from '@/stores/workflow-diff' import { useWorkflowDiffStore } from '@/stores/workflow-diff'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { mergeSubblockState } from '@/stores/workflows/utils' import { mergeSubblockState } from '@/stores/workflows/utils'
@@ -1153,16 +1153,10 @@ export function useWorkflowExecution() {
logs: accumulatedBlockLogs, logs: accumulatedBlockLogs,
} }
// Only add workflow-level error if no blocks have executed yet if (activeWorkflowId) {
// This catches pre-execution errors (validation, serialization, etc.) cancelRunningEntries(activeWorkflowId)
// Block execution errors are already logged via onBlockError callback }
const { entries } = useTerminalConsoleStore.getState()
const existingLogs = entries.filter(
(log: ConsoleEntry) => log.executionId === executionId
)
if (existingLogs.length === 0) {
// No blocks executed yet - this is a pre-execution error
addConsole({ addConsole({
input: {}, input: {},
output: {}, output: {},
@@ -1172,11 +1166,16 @@ export function useWorkflowExecution() {
startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(), startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(),
endedAt: new Date().toISOString(), endedAt: new Date().toISOString(),
workflowId: activeWorkflowId, workflowId: activeWorkflowId,
blockId: 'validation', blockId: 'workflow-error',
executionId, executionId,
blockName: 'Workflow Validation', blockName: 'Workflow Error',
blockType: 'validation', blockType: 'error',
}) })
},
onExecutionCancelled: () => {
if (activeWorkflowId) {
cancelRunningEntries(activeWorkflowId)
} }
}, },
}, },
@@ -1718,13 +1717,28 @@ export function useWorkflowExecution() {
'Workflow was modified. Run the workflow again to enable running from block.', 'Workflow was modified. Run the workflow again to enable running from block.',
workflowId, workflowId,
}) })
} else {
addNotification({
level: 'error',
message: data.error || 'Run from block failed',
workflowId,
})
} }
cancelRunningEntries(workflowId)
addConsole({
input: {},
output: {},
success: false,
error: data.error,
durationMs: data.duration || 0,
startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(),
endedAt: new Date().toISOString(),
workflowId,
blockId: 'workflow-error',
executionId,
blockName: 'Workflow Error',
blockType: 'error',
})
},
onExecutionCancelled: () => {
cancelRunningEntries(workflowId)
}, },
}, },
}) })

View File

@@ -52,16 +52,6 @@ export async function applyAutoLayoutAndUpdateStore(
return { success: false, error: 'No blocks to layout' } return { success: false, error: 'No blocks to layout' }
} }
// Check for locked blocks - auto-layout is disabled when blocks are locked
const hasLockedBlocks = Object.values(blocks).some((block) => block.locked)
if (hasLockedBlocks) {
logger.info('Auto layout skipped: workflow contains locked blocks', { workflowId })
return {
success: false,
error: 'Auto-layout is disabled when blocks are locked. Unlock blocks to use auto-layout.',
}
}
// Merge with default options // Merge with default options
const layoutOptions = { const layoutOptions = {
spacing: { spacing: {

View File

@@ -1,87 +0,0 @@
import type { BlockState } from '@/stores/workflows/workflow/types'
/**
* Result of filtering protected blocks from a deletion operation
*/
export interface FilterProtectedBlocksResult {
/** Block IDs that can be deleted (not protected) */
deletableIds: string[]
/** Block IDs that are protected and cannot be deleted */
protectedIds: string[]
/** Whether all blocks are protected (deletion should be cancelled entirely) */
allProtected: boolean
}
/**
* Checks if a block is protected from editing/deletion.
* A block is protected if it is locked or if its parent container is locked.
*
* @param blockId - The ID of the block to check
* @param blocks - Record of all blocks in the workflow
* @returns True if the block is protected
*/
export function isBlockProtected(blockId: string, blocks: Record<string, BlockState>): boolean {
const block = blocks[blockId]
if (!block) return false
// Block is locked directly
if (block.locked) return true
// Block is inside a locked container
const parentId = block.data?.parentId
if (parentId && blocks[parentId]?.locked) return true
return false
}
/**
* Checks if an edge is protected from modification.
* An edge is protected if either its source or target block is protected.
*
* @param edge - The edge to check (must have source and target)
* @param blocks - Record of all blocks in the workflow
* @returns True if the edge is protected
*/
export function isEdgeProtected(
edge: { source: string; target: string },
blocks: Record<string, BlockState>
): boolean {
return isBlockProtected(edge.source, blocks) || isBlockProtected(edge.target, blocks)
}
/**
* Filters out protected blocks from a list of block IDs for deletion.
* Protected blocks are those that are locked or inside a locked container.
*
* @param blockIds - Array of block IDs to filter
* @param blocks - Record of all blocks in the workflow
* @returns Result containing deletable IDs, protected IDs, and whether all are protected
*/
export function filterProtectedBlocks(
blockIds: string[],
blocks: Record<string, BlockState>
): FilterProtectedBlocksResult {
const protectedIds = blockIds.filter((id) => isBlockProtected(id, blocks))
const deletableIds = blockIds.filter((id) => !protectedIds.includes(id))
return {
deletableIds,
protectedIds,
allProtected: protectedIds.length === blockIds.length && blockIds.length > 0,
}
}
/**
* Checks if any blocks in the selection are protected.
* Useful for determining if edit actions should be disabled.
*
* @param blockIds - Array of block IDs to check
* @param blocks - Record of all blocks in the workflow
* @returns True if any block is protected
*/
export function hasProtectedBlocks(
blockIds: string[],
blocks: Record<string, BlockState>
): boolean {
return blockIds.some((id) => isBlockProtected(id, blocks))
}

View File

@@ -1,5 +1,4 @@
export * from './auto-layout-utils' export * from './auto-layout-utils'
export * from './block-protection-utils'
export * from './block-ring-utils' export * from './block-ring-utils'
export * from './node-position-utils' export * from './node-position-utils'
export * from './workflow-canvas-helpers' export * from './workflow-canvas-helpers'

View File

@@ -55,10 +55,7 @@ import {
clearDragHighlights, clearDragHighlights,
computeClampedPositionUpdates, computeClampedPositionUpdates,
estimateBlockDimensions, estimateBlockDimensions,
filterProtectedBlocks,
getClampedPositionForNode, getClampedPositionForNode,
isBlockProtected,
isEdgeProtected,
isInEditableElement, isInEditableElement,
resolveParentChildSelectionConflicts, resolveParentChildSelectionConflicts,
validateTriggerPaste, validateTriggerPaste,
@@ -546,7 +543,6 @@ const WorkflowContent = React.memo(() => {
collaborativeBatchRemoveBlocks, collaborativeBatchRemoveBlocks,
collaborativeBatchToggleBlockEnabled, collaborativeBatchToggleBlockEnabled,
collaborativeBatchToggleBlockHandles, collaborativeBatchToggleBlockHandles,
collaborativeBatchToggleLocked,
undo, undo,
redo, redo,
} = useCollaborativeWorkflow() } = useCollaborativeWorkflow()
@@ -1073,27 +1069,8 @@ const WorkflowContent = React.memo(() => {
const handleContextDelete = useCallback(() => { const handleContextDelete = useCallback(() => {
const blockIds = contextMenuBlocks.map((b) => b.id) const blockIds = contextMenuBlocks.map((b) => b.id)
const { deletableIds, protectedIds, allProtected } = filterProtectedBlocks(blockIds, blocks) collaborativeBatchRemoveBlocks(blockIds)
}, [contextMenuBlocks, collaborativeBatchRemoveBlocks])
if (protectedIds.length > 0) {
if (allProtected) {
addNotification({
level: 'info',
message: 'Cannot delete locked blocks or blocks inside locked containers',
workflowId: activeWorkflowId || undefined,
})
return
}
addNotification({
level: 'info',
message: `Skipped ${protectedIds.length} protected block(s)`,
workflowId: activeWorkflowId || undefined,
})
}
if (deletableIds.length > 0) {
collaborativeBatchRemoveBlocks(deletableIds)
}
}, [contextMenuBlocks, collaborativeBatchRemoveBlocks, addNotification, activeWorkflowId, blocks])
const handleContextToggleEnabled = useCallback(() => { const handleContextToggleEnabled = useCallback(() => {
const blockIds = contextMenuBlocks.map((block) => block.id) const blockIds = contextMenuBlocks.map((block) => block.id)
@@ -1105,11 +1082,6 @@ const WorkflowContent = React.memo(() => {
collaborativeBatchToggleBlockHandles(blockIds) collaborativeBatchToggleBlockHandles(blockIds)
}, [contextMenuBlocks, collaborativeBatchToggleBlockHandles]) }, [contextMenuBlocks, collaborativeBatchToggleBlockHandles])
const handleContextToggleLocked = useCallback(() => {
const blockIds = contextMenuBlocks.map((block) => block.id)
collaborativeBatchToggleLocked(blockIds)
}, [contextMenuBlocks, collaborativeBatchToggleLocked])
const handleContextRemoveFromSubflow = useCallback(() => { const handleContextRemoveFromSubflow = useCallback(() => {
const blocksToRemove = contextMenuBlocks.filter( const blocksToRemove = contextMenuBlocks.filter(
(block) => block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel') (block) => block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
@@ -1979,6 +1951,7 @@ const WorkflowContent = React.memo(() => {
const loadingWorkflowRef = useRef<string | null>(null) const loadingWorkflowRef = useRef<string | null>(null)
const currentWorkflowExists = Boolean(workflows[workflowIdParam]) const currentWorkflowExists = Boolean(workflows[workflowIdParam])
/** Initializes workflow when it exists in registry and needs hydration. */
useEffect(() => { useEffect(() => {
const currentId = workflowIdParam const currentId = workflowIdParam
const currentWorkspaceHydration = hydration.workspaceId const currentWorkspaceHydration = hydration.workspaceId
@@ -2155,7 +2128,6 @@ const WorkflowContent = React.memo(() => {
parentId: block.data?.parentId, parentId: block.data?.parentId,
extent: block.data?.extent || undefined, extent: block.data?.extent || undefined,
dragHandle: '.workflow-drag-handle', dragHandle: '.workflow-drag-handle',
draggable: !isBlockProtected(block.id, blocks),
data: { data: {
...block.data, ...block.data,
name: block.name, name: block.name,
@@ -2191,7 +2163,6 @@ const WorkflowContent = React.memo(() => {
position, position,
parentId: block.data?.parentId, parentId: block.data?.parentId,
dragHandle, dragHandle,
draggable: !isBlockProtected(block.id, blocks),
extent: (() => { extent: (() => {
// Clamp children to subflow body (exclude header) // Clamp children to subflow body (exclude header)
const parentId = block.data?.parentId as string | undefined const parentId = block.data?.parentId as string | undefined
@@ -2520,18 +2491,12 @@ const WorkflowContent = React.memo(() => {
const edgeIdsToRemove = changes const edgeIdsToRemove = changes
.filter((change: any) => change.type === 'remove') .filter((change: any) => change.type === 'remove')
.map((change: any) => change.id) .map((change: any) => change.id)
.filter((edgeId: string) => {
// Prevent removing edges connected to protected blocks
const edge = edges.find((e) => e.id === edgeId)
if (!edge) return true
return !isEdgeProtected(edge, blocks)
})
if (edgeIdsToRemove.length > 0) { if (edgeIdsToRemove.length > 0) {
collaborativeBatchRemoveEdges(edgeIdsToRemove) collaborativeBatchRemoveEdges(edgeIdsToRemove)
} }
}, },
[collaborativeBatchRemoveEdges, edges, blocks] [collaborativeBatchRemoveEdges]
) )
/** /**
@@ -2593,16 +2558,6 @@ const WorkflowContent = React.memo(() => {
if (!sourceNode || !targetNode) return if (!sourceNode || !targetNode) return
// Prevent connections to/from protected blocks
if (isEdgeProtected(connection, blocks)) {
addNotification({
level: 'info',
message: 'Cannot connect to locked blocks or blocks inside locked containers',
workflowId: activeWorkflowId || undefined,
})
return
}
// Get parent information (handle container start node case) // Get parent information (handle container start node case)
const sourceParentId = const sourceParentId =
blocks[sourceNode.id]?.data?.parentId || blocks[sourceNode.id]?.data?.parentId ||
@@ -2665,7 +2620,7 @@ const WorkflowContent = React.memo(() => {
connectionCompletedRef.current = true connectionCompletedRef.current = true
} }
}, },
[addEdge, getNodes, blocks, addNotification, activeWorkflowId] [addEdge, getNodes, blocks]
) )
/** /**
@@ -2760,9 +2715,6 @@ const WorkflowContent = React.memo(() => {
// Only consider container nodes that aren't the dragged node // Only consider container nodes that aren't the dragged node
if (n.type !== 'subflowNode' || n.id === node.id) return false if (n.type !== 'subflowNode' || n.id === node.id) return false
// Don't allow dropping into locked containers
if (blocks[n.id]?.locked) return false
// Get the container's absolute position // Get the container's absolute position
const containerAbsolutePos = getNodeAbsolutePosition(n.id) const containerAbsolutePos = getNodeAbsolutePosition(n.id)
@@ -2855,11 +2807,6 @@ const WorkflowContent = React.memo(() => {
/** Captures initial parent ID and position when drag starts. */ /** Captures initial parent ID and position when drag starts. */
const onNodeDragStart = useCallback( const onNodeDragStart = useCallback(
(_event: React.MouseEvent, node: any) => { (_event: React.MouseEvent, node: any) => {
// Prevent dragging protected blocks
if (isBlockProtected(node.id, blocks)) {
return
}
// Store the original parent ID when starting to drag // Store the original parent ID when starting to drag
const currentParentId = blocks[node.id]?.data?.parentId || null const currentParentId = blocks[node.id]?.data?.parentId || null
setDragStartParentId(currentParentId) setDragStartParentId(currentParentId)
@@ -2888,7 +2835,7 @@ const WorkflowContent = React.memo(() => {
} }
}) })
}, },
[blocks, setDragStartPosition, getNodes, setPotentialParentId] [blocks, setDragStartPosition, getNodes, potentialParentId, setPotentialParentId]
) )
/** Handles node drag stop to establish parent-child relationships. */ /** Handles node drag stop to establish parent-child relationships. */
@@ -2950,18 +2897,6 @@ const WorkflowContent = React.memo(() => {
// Don't process parent changes if the node hasn't actually changed parent or is being moved within same parent // Don't process parent changes if the node hasn't actually changed parent or is being moved within same parent
if (potentialParentId === dragStartParentId) return if (potentialParentId === dragStartParentId) return
// Prevent moving locked blocks out of locked containers
// Unlocked blocks (e.g., duplicates) can be moved out freely
if (dragStartParentId && blocks[dragStartParentId]?.locked && blocks[node.id]?.locked) {
addNotification({
level: 'info',
message: 'Cannot move locked blocks out of locked containers',
workflowId: activeWorkflowId || undefined,
})
setPotentialParentId(dragStartParentId) // Reset to original parent
return
}
// Check if this is a starter block - starter blocks should never be in containers // Check if this is a starter block - starter blocks should never be in containers
const isStarterBlock = node.data?.type === 'starter' const isStarterBlock = node.data?.type === 'starter'
if (isStarterBlock) { if (isStarterBlock) {
@@ -3358,16 +3293,6 @@ const WorkflowContent = React.memo(() => {
/** Stable delete handler to avoid creating new function references per edge. */ /** Stable delete handler to avoid creating new function references per edge. */
const handleEdgeDelete = useCallback( const handleEdgeDelete = useCallback(
(edgeId: string) => { (edgeId: string) => {
// Prevent removing edges connected to protected blocks
const edge = edges.find((e) => e.id === edgeId)
if (edge && isEdgeProtected(edge, blocks)) {
addNotification({
level: 'info',
message: 'Cannot remove connections from locked blocks',
workflowId: activeWorkflowId || undefined,
})
return
}
removeEdge(edgeId) removeEdge(edgeId)
// Remove this edge from selection (find by edge ID value) // Remove this edge from selection (find by edge ID value)
setSelectedEdges((prev) => { setSelectedEdges((prev) => {
@@ -3380,7 +3305,7 @@ const WorkflowContent = React.memo(() => {
return next return next
}) })
}, },
[removeEdge, edges, blocks, addNotification, activeWorkflowId] [removeEdge]
) )
/** Transforms edges to include selection state and delete handlers. Memoized to prevent re-renders. */ /** Transforms edges to include selection state and delete handlers. Memoized to prevent re-renders. */
@@ -3421,15 +3346,9 @@ const WorkflowContent = React.memo(() => {
// Handle edge deletion first (edges take priority if selected) // Handle edge deletion first (edges take priority if selected)
if (selectedEdges.size > 0) { if (selectedEdges.size > 0) {
// Get all selected edge IDs and filter out edges connected to protected blocks // Get all selected edge IDs and batch delete them
const edgeIds = Array.from(selectedEdges.values()).filter((edgeId) => { const edgeIds = Array.from(selectedEdges.values())
const edge = edges.find((e) => e.id === edgeId)
if (!edge) return true
return !isEdgeProtected(edge, blocks)
})
if (edgeIds.length > 0) {
collaborativeBatchRemoveEdges(edgeIds) collaborativeBatchRemoveEdges(edgeIds)
}
setSelectedEdges(new Map()) setSelectedEdges(new Map())
return return
} }
@@ -3446,29 +3365,7 @@ const WorkflowContent = React.memo(() => {
event.preventDefault() event.preventDefault()
const selectedIds = selectedNodes.map((node) => node.id) const selectedIds = selectedNodes.map((node) => node.id)
const { deletableIds, protectedIds, allProtected } = filterProtectedBlocks( collaborativeBatchRemoveBlocks(selectedIds)
selectedIds,
blocks
)
if (protectedIds.length > 0) {
if (allProtected) {
addNotification({
level: 'info',
message: 'Cannot delete locked blocks or blocks inside locked containers',
workflowId: activeWorkflowId || undefined,
})
return
}
addNotification({
level: 'info',
message: `Skipped ${protectedIds.length} protected block(s)`,
workflowId: activeWorkflowId || undefined,
})
}
if (deletableIds.length > 0) {
collaborativeBatchRemoveBlocks(deletableIds)
}
} }
window.addEventListener('keydown', handleKeyDown) window.addEventListener('keydown', handleKeyDown)
@@ -3479,10 +3376,6 @@ const WorkflowContent = React.memo(() => {
getNodes, getNodes,
collaborativeBatchRemoveBlocks, collaborativeBatchRemoveBlocks,
effectivePermissions.canEdit, effectivePermissions.canEdit,
blocks,
edges,
addNotification,
activeWorkflowId,
]) ])
return ( return (
@@ -3603,18 +3496,12 @@ const WorkflowContent = React.memo(() => {
(b) => b.parentId && (b.parentType === 'loop' || b.parentType === 'parallel') (b) => b.parentId && (b.parentType === 'loop' || b.parentType === 'parallel')
)} )}
canRunFromBlock={runFromBlockState.canRun} canRunFromBlock={runFromBlockState.canRun}
disableEdit={ disableEdit={!effectivePermissions.canEdit}
!effectivePermissions.canEdit ||
contextMenuBlocks.some((b) => b.locked || b.isParentLocked)
}
userCanEdit={effectivePermissions.canEdit}
isExecuting={isExecuting} isExecuting={isExecuting}
isPositionalTrigger={ isPositionalTrigger={
contextMenuBlocks.length === 1 && contextMenuBlocks.length === 1 &&
edges.filter((e) => e.target === contextMenuBlocks[0]?.id).length === 0 edges.filter((e) => e.target === contextMenuBlocks[0]?.id).length === 0
} }
onToggleLocked={handleContextToggleLocked}
canAdmin={effectivePermissions.canAdmin}
/> />
<CanvasMenu <CanvasMenu
@@ -3637,7 +3524,6 @@ const WorkflowContent = React.memo(() => {
disableEdit={!effectivePermissions.canEdit} disableEdit={!effectivePermissions.canEdit}
canUndo={canUndo} canUndo={canUndo}
canRedo={canRedo} canRedo={canRedo}
hasLockedBlocks={Object.values(blocks).some((b) => b.locked)}
/> />
</> </>
)} )}

View File

@@ -0,0 +1,625 @@
import { EnrichSoIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types'
export const EnrichBlock: BlockConfig = {
type: 'enrich',
name: 'Enrich',
description: 'B2B data enrichment and LinkedIn intelligence with Enrich.so',
authMode: AuthMode.ApiKey,
longDescription:
'Access real-time B2B data intelligence with Enrich.so. Enrich profiles from email addresses, find work emails from LinkedIn, verify email deliverability, search for people and companies, and analyze LinkedIn post engagement.',
docsLink: 'https://docs.enrich.so/',
category: 'tools',
bgColor: '#E5E5E6',
icon: EnrichSoIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
// Person/Profile Enrichment
{ label: 'Email to Profile', id: 'email_to_profile' },
{ label: 'Email to Person (Lite)', id: 'email_to_person_lite' },
{ label: 'LinkedIn Profile Enrichment', id: 'linkedin_profile' },
// Email Finding
{ label: 'Find Email', id: 'find_email' },
{ label: 'LinkedIn to Work Email', id: 'linkedin_to_work_email' },
{ label: 'LinkedIn to Personal Email', id: 'linkedin_to_personal_email' },
// Phone Finding
{ label: 'Phone Finder (LinkedIn)', id: 'phone_finder' },
{ label: 'Email to Phone', id: 'email_to_phone' },
// Email Verification
{ label: 'Verify Email', id: 'verify_email' },
{ label: 'Disposable Email Check', id: 'disposable_email_check' },
// IP/Company Lookup
{ label: 'Email to IP', id: 'email_to_ip' },
{ label: 'IP to Company', id: 'ip_to_company' },
// Company Enrichment
{ label: 'Company Lookup', id: 'company_lookup' },
{ label: 'Company Funding & Traffic', id: 'company_funding' },
{ label: 'Company Revenue', id: 'company_revenue' },
// Search
{ label: 'Search People', id: 'search_people' },
{ label: 'Search Company', id: 'search_company' },
{ label: 'Search Company Employees', id: 'search_company_employees' },
{ label: 'Search Similar Companies', id: 'search_similar_companies' },
{ label: 'Sales Pointer (People)', id: 'sales_pointer_people' },
// LinkedIn Posts/Activities
{ label: 'Search Posts', id: 'search_posts' },
{ label: 'Get Post Details', id: 'get_post_details' },
{ label: 'Search Post Reactions', id: 'search_post_reactions' },
{ label: 'Search Post Comments', id: 'search_post_comments' },
{ label: 'Search People Activities', id: 'search_people_activities' },
{ label: 'Search Company Activities', id: 'search_company_activities' },
// Other
{ label: 'Reverse Hash Lookup', id: 'reverse_hash_lookup' },
{ label: 'Search Logo', id: 'search_logo' },
{ label: 'Check Credits', id: 'check_credits' },
],
value: () => 'email_to_profile',
},
{
id: 'apiKey',
title: 'Enrich API Key',
type: 'short-input',
placeholder: 'Enter your Enrich.so API key',
password: true,
required: true,
},
{
id: 'email',
title: 'Email Address',
type: 'short-input',
placeholder: 'john.doe@company.com',
condition: {
field: 'operation',
value: [
'email_to_profile',
'email_to_person_lite',
'email_to_phone',
'verify_email',
'disposable_email_check',
'email_to_ip',
],
},
required: {
field: 'operation',
value: [
'email_to_profile',
'email_to_person_lite',
'email_to_phone',
'verify_email',
'disposable_email_check',
'email_to_ip',
],
},
},
{
id: 'inRealtime',
title: 'Fetch Fresh Data',
type: 'switch',
condition: { field: 'operation', value: 'email_to_profile' },
mode: 'advanced',
},
{
id: 'linkedinUrl',
title: 'LinkedIn Profile URL',
type: 'short-input',
placeholder: 'linkedin.com/in/williamhgates',
condition: {
field: 'operation',
value: [
'linkedin_profile',
'linkedin_to_work_email',
'linkedin_to_personal_email',
'phone_finder',
],
},
required: {
field: 'operation',
value: [
'linkedin_profile',
'linkedin_to_work_email',
'linkedin_to_personal_email',
'phone_finder',
],
},
},
{
id: 'fullName',
title: 'Full Name',
type: 'short-input',
placeholder: 'John Doe',
condition: { field: 'operation', value: 'find_email' },
required: { field: 'operation', value: 'find_email' },
},
{
id: 'companyDomain',
title: 'Company Domain',
type: 'short-input',
placeholder: 'example.com',
condition: { field: 'operation', value: 'find_email' },
required: { field: 'operation', value: 'find_email' },
},
{
id: 'ip',
title: 'IP Address',
type: 'short-input',
placeholder: '86.92.60.221',
condition: { field: 'operation', value: 'ip_to_company' },
required: { field: 'operation', value: 'ip_to_company' },
},
{
id: 'companyName',
title: 'Company Name',
type: 'short-input',
placeholder: 'Google',
condition: { field: 'operation', value: 'company_lookup' },
},
{
id: 'domain',
title: 'Domain',
type: 'short-input',
placeholder: 'google.com',
condition: {
field: 'operation',
value: ['company_lookup', 'company_funding', 'company_revenue', 'search_logo'],
},
required: {
field: 'operation',
value: ['company_funding', 'company_revenue', 'search_logo'],
},
},
{
id: 'firstName',
title: 'First Name',
type: 'short-input',
placeholder: 'John',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'lastName',
title: 'Last Name',
type: 'short-input',
placeholder: 'Doe',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'subTitle',
title: 'Job Title',
type: 'short-input',
placeholder: 'Software Engineer',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'locationCountry',
title: 'Country',
type: 'short-input',
placeholder: 'United States',
condition: { field: 'operation', value: ['search_people', 'search_company'] },
},
{
id: 'locationCity',
title: 'City',
type: 'short-input',
placeholder: 'San Francisco',
condition: { field: 'operation', value: ['search_people', 'search_company'] },
},
{
id: 'industry',
title: 'Industry',
type: 'short-input',
placeholder: 'Technology',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'currentJobTitles',
title: 'Current Job Titles (JSON)',
type: 'code',
placeholder: '["CEO", "CTO", "VP Engineering"]',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'skills',
title: 'Skills (JSON)',
type: 'code',
placeholder: '["Python", "Machine Learning"]',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'searchCompanyName',
title: 'Company Name',
type: 'short-input',
placeholder: 'Google',
condition: { field: 'operation', value: 'search_company' },
},
{
id: 'industries',
title: 'Industries (JSON)',
type: 'code',
placeholder: '["Technology", "Software"]',
condition: { field: 'operation', value: 'search_company' },
},
{
id: 'staffCountMin',
title: 'Min Employees',
type: 'short-input',
placeholder: '50',
condition: { field: 'operation', value: 'search_company' },
},
{
id: 'staffCountMax',
title: 'Max Employees',
type: 'short-input',
placeholder: '500',
condition: { field: 'operation', value: 'search_company' },
},
{
id: 'companyIds',
title: 'Company IDs (JSON)',
type: 'code',
placeholder: '[12345, 67890]',
condition: { field: 'operation', value: 'search_company_employees' },
},
{
id: 'country',
title: 'Country',
type: 'short-input',
placeholder: 'United States',
condition: { field: 'operation', value: 'search_company_employees' },
},
{
id: 'city',
title: 'City',
type: 'short-input',
placeholder: 'San Francisco',
condition: { field: 'operation', value: 'search_company_employees' },
},
{
id: 'jobTitles',
title: 'Job Titles (JSON)',
type: 'code',
placeholder: '["Software Engineer", "Product Manager"]',
condition: { field: 'operation', value: 'search_company_employees' },
},
{
id: 'linkedinCompanyUrl',
title: 'LinkedIn Company URL',
type: 'short-input',
placeholder: 'linkedin.com/company/google',
condition: { field: 'operation', value: 'search_similar_companies' },
required: { field: 'operation', value: 'search_similar_companies' },
},
{
id: 'accountLocation',
title: 'Locations (JSON)',
type: 'code',
placeholder: '["germany", "france"]',
condition: { field: 'operation', value: 'search_similar_companies' },
},
{
id: 'employeeSizeType',
title: 'Employee Size Filter Type',
type: 'dropdown',
options: [
{ label: 'Range', id: 'RANGE' },
{ label: 'Exact', id: 'EXACT' },
],
condition: { field: 'operation', value: 'search_similar_companies' },
mode: 'advanced',
},
{
id: 'employeeSizeRange',
title: 'Employee Size Range (JSON)',
type: 'code',
placeholder: '[{"start": 50, "end": 200}]',
condition: { field: 'operation', value: 'search_similar_companies' },
},
{
id: 'num',
title: 'Results Per Page',
type: 'short-input',
placeholder: '10',
condition: { field: 'operation', value: 'search_similar_companies' },
},
{
id: 'filters',
title: 'Filters (JSON)',
type: 'code',
placeholder:
'[{"type": "POSTAL_CODE", "values": [{"id": "101041448", "text": "San Francisco", "selectionType": "INCLUDED"}]}]',
condition: { field: 'operation', value: 'sales_pointer_people' },
required: { field: 'operation', value: 'sales_pointer_people' },
},
{
id: 'keywords',
title: 'Keywords',
type: 'short-input',
placeholder: 'AI automation',
condition: { field: 'operation', value: 'search_posts' },
required: { field: 'operation', value: 'search_posts' },
},
{
id: 'datePosted',
title: 'Date Posted',
type: 'dropdown',
options: [
{ label: 'Any time', id: '' },
{ label: 'Past 24 hours', id: 'past_24_hours' },
{ label: 'Past week', id: 'past_week' },
{ label: 'Past month', id: 'past_month' },
],
condition: { field: 'operation', value: 'search_posts' },
},
{
id: 'postUrl',
title: 'LinkedIn Post URL',
type: 'short-input',
placeholder: 'https://www.linkedin.com/posts/...',
condition: { field: 'operation', value: 'get_post_details' },
required: { field: 'operation', value: 'get_post_details' },
},
{
id: 'postUrn',
title: 'Post URN',
type: 'short-input',
placeholder: 'urn:li:activity:7231931952839196672',
condition: {
field: 'operation',
value: ['search_post_reactions', 'search_post_comments'],
},
required: {
field: 'operation',
value: ['search_post_reactions', 'search_post_comments'],
},
},
{
id: 'reactionType',
title: 'Reaction Type',
type: 'dropdown',
options: [
{ label: 'All', id: 'all' },
{ label: 'Like', id: 'like' },
{ label: 'Love', id: 'love' },
{ label: 'Celebrate', id: 'celebrate' },
{ label: 'Insightful', id: 'insightful' },
{ label: 'Funny', id: 'funny' },
],
condition: { field: 'operation', value: 'search_post_reactions' },
},
{
id: 'profileId',
title: 'Profile ID',
type: 'short-input',
placeholder: 'ACoAAC1wha0BhoDIRAHrP5rgzVDyzmSdnl-KuEk',
condition: { field: 'operation', value: 'search_people_activities' },
required: { field: 'operation', value: 'search_people_activities' },
},
{
id: 'activityType',
title: 'Activity Type',
type: 'dropdown',
options: [
{ label: 'Posts', id: 'posts' },
{ label: 'Comments', id: 'comments' },
{ label: 'Articles', id: 'articles' },
],
condition: {
field: 'operation',
value: ['search_people_activities', 'search_company_activities'],
},
},
{
id: 'companyId',
title: 'Company ID',
type: 'short-input',
placeholder: '100746430',
condition: { field: 'operation', value: 'search_company_activities' },
required: { field: 'operation', value: 'search_company_activities' },
},
{
id: 'offset',
title: 'Offset',
type: 'short-input',
placeholder: '0',
condition: { field: 'operation', value: 'search_company_activities' },
mode: 'advanced',
},
{
id: 'hash',
title: 'MD5 Hash',
type: 'short-input',
placeholder: '5f0efb20de5ecfedbe0bf5e7c12353fe',
condition: { field: 'operation', value: 'reverse_hash_lookup' },
required: { field: 'operation', value: 'reverse_hash_lookup' },
},
{
id: 'page',
title: 'Page Number',
type: 'short-input',
placeholder: '1',
condition: {
field: 'operation',
value: [
'search_people',
'search_company',
'search_company_employees',
'search_similar_companies',
'sales_pointer_people',
'search_posts',
'search_post_reactions',
'search_post_comments',
],
},
required: { field: 'operation', value: 'sales_pointer_people' },
},
{
id: 'pageSize',
title: 'Results Per Page',
type: 'short-input',
placeholder: '20',
condition: {
field: 'operation',
value: ['search_people', 'search_company', 'search_company_employees'],
},
},
{
id: 'paginationToken',
title: 'Pagination Token',
type: 'short-input',
placeholder: 'Token from previous response',
condition: {
field: 'operation',
value: ['search_people_activities', 'search_company_activities'],
},
mode: 'advanced',
},
],
tools: {
access: [
'enrich_check_credits',
'enrich_email_to_profile',
'enrich_email_to_person_lite',
'enrich_linkedin_profile',
'enrich_find_email',
'enrich_linkedin_to_work_email',
'enrich_linkedin_to_personal_email',
'enrich_phone_finder',
'enrich_email_to_phone',
'enrich_verify_email',
'enrich_disposable_email_check',
'enrich_email_to_ip',
'enrich_ip_to_company',
'enrich_company_lookup',
'enrich_company_funding',
'enrich_company_revenue',
'enrich_search_people',
'enrich_search_company',
'enrich_search_company_employees',
'enrich_search_similar_companies',
'enrich_sales_pointer_people',
'enrich_search_posts',
'enrich_get_post_details',
'enrich_search_post_reactions',
'enrich_search_post_comments',
'enrich_search_people_activities',
'enrich_search_company_activities',
'enrich_reverse_hash_lookup',
'enrich_search_logo',
],
config: {
tool: (params) => `enrich_${params.operation}`,
params: (params) => {
const { operation, ...rest } = params
const parsedParams: Record<string, any> = { ...rest }
try {
if (rest.currentJobTitles && typeof rest.currentJobTitles === 'string') {
parsedParams.currentJobTitles = JSON.parse(rest.currentJobTitles)
}
if (rest.skills && typeof rest.skills === 'string') {
parsedParams.skills = JSON.parse(rest.skills)
}
if (rest.industries && typeof rest.industries === 'string') {
parsedParams.industries = JSON.parse(rest.industries)
}
if (rest.companyIds && typeof rest.companyIds === 'string') {
parsedParams.companyIds = JSON.parse(rest.companyIds)
}
if (rest.jobTitles && typeof rest.jobTitles === 'string') {
parsedParams.jobTitles = JSON.parse(rest.jobTitles)
}
if (rest.accountLocation && typeof rest.accountLocation === 'string') {
parsedParams.accountLocation = JSON.parse(rest.accountLocation)
}
if (rest.employeeSizeRange && typeof rest.employeeSizeRange === 'string') {
parsedParams.employeeSizeRange = JSON.parse(rest.employeeSizeRange)
}
if (rest.filters && typeof rest.filters === 'string') {
parsedParams.filters = JSON.parse(rest.filters)
}
} catch (error: any) {
throw new Error(`Invalid JSON input: ${error.message}`)
}
if (operation === 'linkedin_profile') {
parsedParams.url = rest.linkedinUrl
parsedParams.linkedinUrl = undefined
}
if (
operation === 'linkedin_to_work_email' ||
operation === 'linkedin_to_personal_email' ||
operation === 'phone_finder'
) {
parsedParams.linkedinProfile = rest.linkedinUrl
parsedParams.linkedinUrl = undefined
}
if (operation === 'company_lookup') {
parsedParams.name = rest.companyName
parsedParams.companyName = undefined
}
if (operation === 'search_company') {
parsedParams.name = rest.searchCompanyName
parsedParams.searchCompanyName = undefined
}
if (operation === 'search_similar_companies') {
parsedParams.url = rest.linkedinCompanyUrl
parsedParams.linkedinCompanyUrl = undefined
}
if (operation === 'get_post_details') {
parsedParams.url = rest.postUrl
parsedParams.postUrl = undefined
}
if (operation === 'search_logo') {
parsedParams.url = rest.domain
}
if (parsedParams.page) {
const pageNum = Number(parsedParams.page)
if (operation === 'search_people' || operation === 'search_company') {
parsedParams.currentPage = pageNum
parsedParams.page = undefined
} else {
parsedParams.page = pageNum
}
}
if (parsedParams.pageSize) parsedParams.pageSize = Number(parsedParams.pageSize)
if (parsedParams.num) parsedParams.num = Number(parsedParams.num)
if (parsedParams.offset) parsedParams.offset = Number(parsedParams.offset)
if (parsedParams.staffCountMin)
parsedParams.staffCountMin = Number(parsedParams.staffCountMin)
if (parsedParams.staffCountMax)
parsedParams.staffCountMax = Number(parsedParams.staffCountMax)
return parsedParams
},
},
},
inputs: {
operation: { type: 'string', description: 'Enrich operation to perform' },
},
outputs: {
success: { type: 'boolean', description: 'Whether the operation was successful' },
output: { type: 'json', description: 'Output data from the Enrich operation' },
},
}

View File

@@ -26,6 +26,7 @@ import { DuckDuckGoBlock } from '@/blocks/blocks/duckduckgo'
import { DynamoDBBlock } from '@/blocks/blocks/dynamodb' import { DynamoDBBlock } from '@/blocks/blocks/dynamodb'
import { ElasticsearchBlock } from '@/blocks/blocks/elasticsearch' import { ElasticsearchBlock } from '@/blocks/blocks/elasticsearch'
import { ElevenLabsBlock } from '@/blocks/blocks/elevenlabs' import { ElevenLabsBlock } from '@/blocks/blocks/elevenlabs'
import { EnrichBlock } from '@/blocks/blocks/enrich'
import { EvaluatorBlock } from '@/blocks/blocks/evaluator' import { EvaluatorBlock } from '@/blocks/blocks/evaluator'
import { ExaBlock } from '@/blocks/blocks/exa' import { ExaBlock } from '@/blocks/blocks/exa'
import { FileBlock, FileV2Block } from '@/blocks/blocks/file' import { FileBlock, FileV2Block } from '@/blocks/blocks/file'
@@ -188,6 +189,7 @@ export const registry: Record<string, BlockConfig> = {
dynamodb: DynamoDBBlock, dynamodb: DynamoDBBlock,
elasticsearch: ElasticsearchBlock, elasticsearch: ElasticsearchBlock,
elevenlabs: ElevenLabsBlock, elevenlabs: ElevenLabsBlock,
enrich: EnrichBlock,
evaluator: EvaluatorBlock, evaluator: EvaluatorBlock,
exa: ExaBlock, exa: ExaBlock,
file: FileBlock, file: FileBlock,

View File

@@ -458,8 +458,8 @@ export function getCodeEditorProps(options?: {
'caret-[var(--text-primary)] dark:caret-white', 'caret-[var(--text-primary)] dark:caret-white',
// Font smoothing // Font smoothing
'[-webkit-font-smoothing:antialiased] [-moz-osx-font-smoothing:grayscale]', '[-webkit-font-smoothing:antialiased] [-moz-osx-font-smoothing:grayscale]',
// Disable interaction for streaming/preview/disabled // Disable interaction for streaming/preview
(isStreaming || isPreview || disabled) && 'pointer-events-none' (isStreaming || isPreview) && 'pointer-events-none'
), ),
} }
} }

View File

@@ -5421,3 +5421,18 @@ z'
</svg> </svg>
) )
} }
export function EnrichSoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 398 394' fill='none'>
<path
fill='#5A52F4'
d='M129.705566,319.705719 C127.553314,322.684906 125.651512,325.414673 123.657059,328.277466 C113.748466,318.440308 105.605003,310.395905 97.510834,302.302216 C93.625801,298.417419 89.990181,294.269318 85.949242,290.558868 C82.857994,287.720428 82.464081,285.757660 85.772888,282.551880 C104.068108,264.826202 122.146088,246.876312 140.285110,228.989670 C141.183945,228.103317 141.957443,227.089844 143.588837,225.218384 C140.691605,225.066116 138.820053,224.882874 136.948410,224.881958 C102.798264,224.865326 68.647453,224.765244 34.498699,224.983612 C29.315699,225.016739 27.990419,223.343155 28.090912,218.397430 C28.381887,204.076935 28.189890,189.746719 28.195684,175.420319 C28.198524,168.398178 28.319166,168.279541 35.590389,168.278687 C69.074188,168.274780 102.557991,168.281174 136.041794,168.266083 C137.968231,168.265213 139.894608,168.107101 141.821030,168.022171 C142.137955,167.513992 142.454895,167.005829 142.771820,166.497650 C122.842415,146.495621 102.913002,126.493591 83.261360,106.770348 C96.563828,93.471756 109.448814,80.590523 122.656265,67.386925 C123.522743,68.161835 124.785545,69.187096 125.930321,70.330513 C144.551819,88.930206 163.103683,107.600082 181.805267,126.118790 C186.713593,130.979126 189.085648,136.448059 189.055374,143.437057 C188.899490,179.418961 188.911179,215.402191 189.046661,251.384262 C189.072296,258.190796 186.742920,263.653717 181.982727,268.323273 C164.624405,285.351227 147.295807,302.409485 129.705566,319.705719z'
/>
<path
fill='#5A52F4'
d='M276.070923,246.906128 C288.284363,258.985870 300.156097,270.902100 312.235931,282.603485 C315.158752,285.434784 315.417542,287.246246 312.383484,290.248932 C301.143494,301.372498 290.168549,312.763733 279.075592,324.036255 C278.168030,324.958496 277.121307,325.743835 275.898315,326.801086 C274.628357,325.711792 273.460663,324.822968 272.422150,323.802673 C253.888397,305.594757 235.418701,287.321289 216.818268,269.181854 C211.508789,264.003937 208.872726,258.136688 208.914001,250.565842 C209.108337,214.917786 209.084808,179.267715 208.928864,143.619293 C208.898407,136.654907 211.130066,131.122162 216.052216,126.246094 C234.867538,107.606842 253.537521,88.820908 272.274780,70.102730 C273.313202,69.065353 274.468597,68.145027 275.264038,67.440727 C288.353516,80.579514 301.213470,93.487869 314.597534,106.922356 C295.163391,126.421753 275.214752,146.437363 255.266113,166.452972 C255.540176,166.940353 255.814240,167.427734 256.088318,167.915100 C257.983887,168.035736 259.879425,168.260345 261.775085,168.261551 C295.425201,168.282852 329.075287,168.273544 362.725403,168.279831 C369.598907,168.281113 369.776215,168.463593 369.778931,175.252213 C369.784882,189.911667 369.646088,204.573074 369.861206,219.229355 C369.925110,223.585022 368.554596,224.976288 364.148865,224.956406 C329.833130,224.801605 295.516388,224.869598 261.199951,224.868744 C259.297974,224.868698 257.396027,224.868744 254.866638,224.868744 C262.350708,232.658707 269.078217,239.661194 276.070923,246.906128z'
/>
</svg>
)
}

View File

@@ -185,10 +185,16 @@ export const HTTP = {
}, },
} as const } as const
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
export const AGENT = { export const AGENT = {
DEFAULT_MODEL: 'claude-sonnet-4-5', DEFAULT_MODEL: 'claude-sonnet-4-5',
DEFAULT_FUNCTION_TIMEOUT: 600000, get DEFAULT_FUNCTION_TIMEOUT() {
REQUEST_TIMEOUT: 600000, return getMaxExecutionTimeout()
},
get REQUEST_TIMEOUT() {
return getMaxExecutionTimeout()
},
CUSTOM_TOOL_PREFIX: 'custom_', CUSTOM_TOOL_PREFIX: 'custom_',
} as const } as const

View File

@@ -14,7 +14,7 @@ const sleep = async (ms: number, options: SleepOptions = {}): Promise<boolean> =
const { signal, executionId } = options const { signal, executionId } = options
const useRedis = isRedisCancellationEnabled() && !!executionId const useRedis = isRedisCancellationEnabled() && !!executionId
if (!useRedis && signal?.aborted) { if (signal?.aborted) {
return false return false
} }
@@ -27,7 +27,7 @@ const sleep = async (ms: number, options: SleepOptions = {}): Promise<boolean> =
const cleanup = () => { const cleanup = () => {
if (mainTimeoutId) clearTimeout(mainTimeoutId) if (mainTimeoutId) clearTimeout(mainTimeoutId)
if (checkIntervalId) clearInterval(checkIntervalId) if (checkIntervalId) clearInterval(checkIntervalId)
if (!useRedis && signal) signal.removeEventListener('abort', onAbort) if (signal) signal.removeEventListener('abort', onAbort)
} }
const onAbort = () => { const onAbort = () => {
@@ -37,6 +37,10 @@ const sleep = async (ms: number, options: SleepOptions = {}): Promise<boolean> =
resolve(false) resolve(false)
} }
if (signal) {
signal.addEventListener('abort', onAbort, { once: true })
}
if (useRedis) { if (useRedis) {
checkIntervalId = setInterval(async () => { checkIntervalId = setInterval(async () => {
if (resolved) return if (resolved) return
@@ -49,8 +53,6 @@ const sleep = async (ms: number, options: SleepOptions = {}): Promise<boolean> =
} }
} catch {} } catch {}
}, CANCELLATION_CHECK_INTERVAL_MS) }, CANCELLATION_CHECK_INTERVAL_MS)
} else if (signal) {
signal.addEventListener('abort', onAbort, { once: true })
} }
mainTimeoutId = setTimeout(() => { mainTimeoutId = setTimeout(() => {

View File

@@ -212,11 +212,11 @@ export class WorkflowBlockHandler implements BlockHandler {
/** /**
* Parses a potentially nested workflow error message to extract: * Parses a potentially nested workflow error message to extract:
* - The chain of workflow names * - The chain of workflow names
* - The actual root error message (preserving the block name prefix for the failing block) * - The actual root error message (preserving the block prefix for the failing block)
* *
* Handles formats like: * Handles formats like:
* - "workflow-name" failed: error * - "workflow-name" failed: error
* - Block Name: "workflow-name" failed: error * - [block_type] Block Name: "workflow-name" failed: error
* - Workflow chain: A → B | error * - Workflow chain: A → B | error
*/ */
private parseNestedWorkflowError(message: string): { chain: string[]; rootError: string } { private parseNestedWorkflowError(message: string): { chain: string[]; rootError: string } {
@@ -234,8 +234,8 @@ export class WorkflowBlockHandler implements BlockHandler {
// Extract workflow names from patterns like: // Extract workflow names from patterns like:
// - "workflow-name" failed: // - "workflow-name" failed:
// - Block Name: "workflow-name" failed: // - [block_type] Block Name: "workflow-name" failed:
const workflowPattern = /(?:\[[^\]]+\]\s*)?(?:[^:]+:\s*)?"([^"]+)"\s*failed:\s*/g const workflowPattern = /(?:\[[^\]]+\]\s*[^:]+:\s*)?"([^"]+)"\s*failed:\s*/g
let match: RegExpExecArray | null let match: RegExpExecArray | null
let lastIndex = 0 let lastIndex = 0
@@ -247,7 +247,7 @@ export class WorkflowBlockHandler implements BlockHandler {
} }
// The root error is everything after the last match // The root error is everything after the last match
// Keep the block name prefix (e.g., Function 1:) so we know which block failed // Keep the block prefix (e.g., [function] Function 1:) so we know which block failed
const rootError = lastIndex > 0 ? remaining.slice(lastIndex) : remaining const rootError = lastIndex > 0 ? remaining.slice(lastIndex) : remaining
return { chain, rootError: rootError.trim() || 'Unknown error' } return { chain, rootError: rootError.trim() || 'Unknown error' }

View File

@@ -47,7 +47,7 @@ export function buildBlockExecutionError(details: BlockExecutionErrorDetails): E
const blockName = details.block.metadata?.name || details.block.id const blockName = details.block.metadata?.name || details.block.id
const blockType = details.block.metadata?.id || 'unknown' const blockType = details.block.metadata?.id || 'unknown'
const error = new Error(`${blockName}: ${errorMessage}`) const error = new Error(`[${blockType}] ${blockName}: ${errorMessage}`)
Object.assign(error, { Object.assign(error, {
blockId: details.block.id, blockId: details.block.id,

View File

@@ -409,20 +409,6 @@ export function useCollaborativeWorkflow() {
logger.info('Successfully applied batch-toggle-handles from remote user') logger.info('Successfully applied batch-toggle-handles from remote user')
break break
} }
case BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED: {
const { blockIds } = payload
logger.info('Received batch-toggle-locked from remote user', {
userId,
count: (blockIds || []).length,
})
if (blockIds && blockIds.length > 0) {
useWorkflowStore.getState().batchToggleLocked(blockIds)
}
logger.info('Successfully applied batch-toggle-locked from remote user')
break
}
case BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT: { case BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT: {
const { updates } = payload const { updates } = payload
logger.info('Received batch-update-parent from remote user', { logger.info('Received batch-update-parent from remote user', {
@@ -837,27 +823,14 @@ export function useCollaborativeWorkflow() {
if (ids.length === 0) return if (ids.length === 0) return
const currentBlocks = useWorkflowStore.getState().blocks
const previousStates: Record<string, boolean> = {} const previousStates: Record<string, boolean> = {}
const validIds: string[] = [] const validIds: string[] = []
// For each ID, collect non-locked blocks and their children for undo/redo
for (const id of ids) { for (const id of ids) {
const block = currentBlocks[id] const block = useWorkflowStore.getState().blocks[id]
if (!block) continue if (block) {
// Skip locked blocks
if (block.locked) continue
validIds.push(id)
previousStates[id] = block.enabled previousStates[id] = block.enabled
validIds.push(id)
// If it's a loop or parallel, also capture children's previous states for undo/redo
if (block.type === 'loop' || block.type === 'parallel') {
Object.entries(currentBlocks).forEach(([blockId, b]) => {
if (b.data?.parentId === id && !b.locked) {
previousStates[blockId] = b.enabled
}
})
} }
} }
@@ -1019,25 +992,12 @@ export function useCollaborativeWorkflow() {
if (ids.length === 0) return if (ids.length === 0) return
const blocks = useWorkflowStore.getState().blocks
// Helper to check if a block is protected (locked or inside locked parent)
const isProtected = (blockId: string): boolean => {
const block = blocks[blockId]
if (!block) return false
if (block.locked) return true
const parentId = block.data?.parentId
if (parentId && blocks[parentId]?.locked) return true
return false
}
const previousStates: Record<string, boolean> = {} const previousStates: Record<string, boolean> = {}
const validIds: string[] = [] const validIds: string[] = []
for (const id of ids) { for (const id of ids) {
const block = blocks[id] const block = useWorkflowStore.getState().blocks[id]
// Skip locked blocks and blocks inside locked containers if (block) {
if (block && !isProtected(id)) {
previousStates[id] = block.horizontalHandles ?? false previousStates[id] = block.horizontalHandles ?? false
validIds.push(id) validIds.push(id)
} }
@@ -1065,58 +1025,6 @@ export function useCollaborativeWorkflow() {
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo] [isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
) )
const collaborativeBatchToggleLocked = useCallback(
(ids: string[]) => {
if (isBaselineDiffView) {
return
}
if (ids.length === 0) return
const currentBlocks = useWorkflowStore.getState().blocks
const previousStates: Record<string, boolean> = {}
const validIds: string[] = []
// For each ID, collect blocks and their children for undo/redo
for (const id of ids) {
const block = currentBlocks[id]
if (!block) continue
validIds.push(id)
previousStates[id] = block.locked ?? false
// If it's a loop or parallel, also capture children's previous states for undo/redo
if (block.type === 'loop' || block.type === 'parallel') {
Object.entries(currentBlocks).forEach(([blockId, b]) => {
if (b.data?.parentId === id) {
previousStates[blockId] = b.locked ?? false
}
})
}
}
if (validIds.length === 0) return
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED,
target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds: validIds, previousStates },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
useWorkflowStore.getState().batchToggleLocked(validIds)
undoRedo.recordBatchToggleLocked(validIds, previousStates)
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeBatchAddEdges = useCallback( const collaborativeBatchAddEdges = useCallback(
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => { (edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
if (isBaselineDiffView) { if (isBaselineDiffView) {
@@ -1761,7 +1669,6 @@ export function useCollaborativeWorkflow() {
collaborativeToggleBlockAdvancedMode, collaborativeToggleBlockAdvancedMode,
collaborativeSetBlockCanonicalMode, collaborativeSetBlockCanonicalMode,
collaborativeBatchToggleBlockHandles, collaborativeBatchToggleBlockHandles,
collaborativeBatchToggleLocked,
collaborativeBatchAddBlocks, collaborativeBatchAddBlocks,
collaborativeBatchRemoveBlocks, collaborativeBatchRemoveBlocks,
collaborativeBatchAddEdges, collaborativeBatchAddEdges,

View File

@@ -20,7 +20,6 @@ import {
type BatchRemoveEdgesOperation, type BatchRemoveEdgesOperation,
type BatchToggleEnabledOperation, type BatchToggleEnabledOperation,
type BatchToggleHandlesOperation, type BatchToggleHandlesOperation,
type BatchToggleLockedOperation,
type BatchUpdateParentOperation, type BatchUpdateParentOperation,
captureLatestEdges, captureLatestEdges,
captureLatestSubBlockValues, captureLatestSubBlockValues,
@@ -416,36 +415,6 @@ export function useUndoRedo() {
[activeWorkflowId, userId] [activeWorkflowId, userId]
) )
const recordBatchToggleLocked = useCallback(
(blockIds: string[], previousStates: Record<string, boolean>) => {
if (!activeWorkflowId || blockIds.length === 0) return
const operation: BatchToggleLockedOperation = {
id: crypto.randomUUID(),
type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: { blockIds, previousStates },
}
const inverse: BatchToggleLockedOperation = {
id: crypto.randomUUID(),
type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: { blockIds, previousStates },
}
const entry = createOperationEntry(operation, inverse)
useUndoRedoStore.getState().push(activeWorkflowId, userId, entry)
logger.debug('Recorded batch toggle locked', { blockIds, previousStates })
},
[activeWorkflowId, userId]
)
const undo = useCallback(async () => { const undo = useCallback(async () => {
if (!activeWorkflowId) return if (!activeWorkflowId) return
@@ -808,9 +777,7 @@ export function useUndoRedo() {
const toggleOp = entry.inverse as BatchToggleEnabledOperation const toggleOp = entry.inverse as BatchToggleEnabledOperation
const { blockIds, previousStates } = toggleOp.data const { blockIds, previousStates } = toggleOp.data
// Restore all blocks in previousStates (includes children of containers) const validBlockIds = blockIds.filter((id) => useWorkflowStore.getState().blocks[id])
const allBlockIds = Object.keys(previousStates)
const validBlockIds = allBlockIds.filter((id) => useWorkflowStore.getState().blocks[id])
if (validBlockIds.length === 0) { if (validBlockIds.length === 0) {
logger.debug('Undo batch-toggle-enabled skipped; no blocks exist') logger.debug('Undo batch-toggle-enabled skipped; no blocks exist')
break break
@@ -821,14 +788,14 @@ export function useUndoRedo() {
operation: { operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED, operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED,
target: OPERATION_TARGETS.BLOCKS, target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds, previousStates }, payload: { blockIds: validBlockIds, previousStates },
}, },
workflowId: activeWorkflowId, workflowId: activeWorkflowId,
userId, userId,
}) })
// Use setBlockEnabled to directly restore to previous state // Use setBlockEnabled to directly restore to previous state
// This restores all affected blocks including children of containers // This is more robust than conditional toggle in collaborative scenarios
validBlockIds.forEach((blockId) => { validBlockIds.forEach((blockId) => {
useWorkflowStore.getState().setBlockEnabled(blockId, previousStates[blockId]) useWorkflowStore.getState().setBlockEnabled(blockId, previousStates[blockId])
}) })
@@ -862,36 +829,6 @@ export function useUndoRedo() {
}) })
break break
} }
case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED: {
const toggleOp = entry.inverse as BatchToggleLockedOperation
const { blockIds, previousStates } = toggleOp.data
// Restore all blocks in previousStates (includes children of containers)
const allBlockIds = Object.keys(previousStates)
const validBlockIds = allBlockIds.filter((id) => useWorkflowStore.getState().blocks[id])
if (validBlockIds.length === 0) {
logger.debug('Undo batch-toggle-locked skipped; no blocks exist')
break
}
addToQueue({
id: opId,
operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED,
target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds, previousStates },
},
workflowId: activeWorkflowId,
userId,
})
// Use setBlockLocked to directly restore to previous state
// This restores all affected blocks including children of containers
validBlockIds.forEach((blockId) => {
useWorkflowStore.getState().setBlockLocked(blockId, previousStates[blockId])
})
break
}
case UNDO_REDO_OPERATIONS.APPLY_DIFF: { case UNDO_REDO_OPERATIONS.APPLY_DIFF: {
const applyDiffInverse = entry.inverse as any const applyDiffInverse = entry.inverse as any
const { baselineSnapshot } = applyDiffInverse.data const { baselineSnapshot } = applyDiffInverse.data
@@ -1428,9 +1365,7 @@ export function useUndoRedo() {
const toggleOp = entry.operation as BatchToggleEnabledOperation const toggleOp = entry.operation as BatchToggleEnabledOperation
const { blockIds, previousStates } = toggleOp.data const { blockIds, previousStates } = toggleOp.data
// Process all blocks in previousStates (includes children of containers) const validBlockIds = blockIds.filter((id) => useWorkflowStore.getState().blocks[id])
const allBlockIds = Object.keys(previousStates)
const validBlockIds = allBlockIds.filter((id) => useWorkflowStore.getState().blocks[id])
if (validBlockIds.length === 0) { if (validBlockIds.length === 0) {
logger.debug('Redo batch-toggle-enabled skipped; no blocks exist') logger.debug('Redo batch-toggle-enabled skipped; no blocks exist')
break break
@@ -1441,18 +1376,16 @@ export function useUndoRedo() {
operation: { operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED, operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED,
target: OPERATION_TARGETS.BLOCKS, target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds, previousStates }, payload: { blockIds: validBlockIds, previousStates },
}, },
workflowId: activeWorkflowId, workflowId: activeWorkflowId,
userId, userId,
}) })
// Compute target state the same way batchToggleEnabled does: // Use setBlockEnabled to directly set to toggled state
// use !firstBlock.enabled, where firstBlock is blockIds[0] // Redo sets to !previousStates (the state after the original toggle)
const firstBlockId = blockIds[0]
const targetEnabled = !previousStates[firstBlockId]
validBlockIds.forEach((blockId) => { validBlockIds.forEach((blockId) => {
useWorkflowStore.getState().setBlockEnabled(blockId, targetEnabled) useWorkflowStore.getState().setBlockEnabled(blockId, !previousStates[blockId])
}) })
break break
} }
@@ -1484,38 +1417,6 @@ export function useUndoRedo() {
}) })
break break
} }
case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED: {
const toggleOp = entry.operation as BatchToggleLockedOperation
const { blockIds, previousStates } = toggleOp.data
// Process all blocks in previousStates (includes children of containers)
const allBlockIds = Object.keys(previousStates)
const validBlockIds = allBlockIds.filter((id) => useWorkflowStore.getState().blocks[id])
if (validBlockIds.length === 0) {
logger.debug('Redo batch-toggle-locked skipped; no blocks exist')
break
}
addToQueue({
id: opId,
operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED,
target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds, previousStates },
},
workflowId: activeWorkflowId,
userId,
})
// Compute target state the same way batchToggleLocked does:
// use !firstBlock.locked, where firstBlock is blockIds[0]
const firstBlockId = blockIds[0]
const targetLocked = !previousStates[firstBlockId]
validBlockIds.forEach((blockId) => {
useWorkflowStore.getState().setBlockLocked(blockId, targetLocked)
})
break
}
case UNDO_REDO_OPERATIONS.APPLY_DIFF: { case UNDO_REDO_OPERATIONS.APPLY_DIFF: {
// Redo apply-diff means re-applying the proposed state with diff markers // Redo apply-diff means re-applying the proposed state with diff markers
const applyDiffOp = entry.operation as any const applyDiffOp = entry.operation as any
@@ -1837,7 +1738,6 @@ export function useUndoRedo() {
recordBatchUpdateParent, recordBatchUpdateParent,
recordBatchToggleEnabled, recordBatchToggleEnabled,
recordBatchToggleHandles, recordBatchToggleHandles,
recordBatchToggleLocked,
recordApplyDiff, recordApplyDiff,
recordAcceptDiff, recordAcceptDiff,
recordRejectDiff, recordRejectDiff,

View File

@@ -1,7 +1,10 @@
export { AGENT_CARD_PATH } from '@a2a-js/sdk' export { AGENT_CARD_PATH } from '@a2a-js/sdk'
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
export const A2A_PROTOCOL_VERSION = '0.3.0' export const A2A_PROTOCOL_VERSION = '0.3.0'
export const A2A_DEFAULT_TIMEOUT = 300000 export const A2A_DEFAULT_TIMEOUT = DEFAULT_EXECUTION_TIMEOUT_MS
/** /**
* Maximum number of messages stored per task in the database. * Maximum number of messages stored per task in the database.

View File

@@ -1,20 +1,37 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import * as schema from '@sim/db/schema' import * as schema from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm' import { and, eq } from 'drizzle-orm'
import { hasActiveSubscription } from '@/lib/billing'
const logger = createLogger('BillingAuthorization')
/** /**
* Check if a user is authorized to manage billing for a given reference ID * Check if a user is authorized to manage billing for a given reference ID
* Reference ID can be either a user ID (individual subscription) or organization ID (team subscription) * Reference ID can be either a user ID (individual subscription) or organization ID (team subscription)
*
* This function also performs duplicate subscription validation for organizations:
* - Rejects if an organization already has an active subscription (prevents duplicates)
* - Personal subscriptions (referenceId === userId) skip this check to allow upgrades
*/ */
export async function authorizeSubscriptionReference( export async function authorizeSubscriptionReference(
userId: string, userId: string,
referenceId: string referenceId: string
): Promise<boolean> { ): Promise<boolean> {
// User can always manage their own subscriptions // User can always manage their own subscriptions (Pro upgrades, etc.)
if (referenceId === userId) { if (referenceId === userId) {
return true return true
} }
// For organizations: check for existing active subscriptions to prevent duplicates
if (await hasActiveSubscription(referenceId)) {
logger.warn('Blocking checkout - active subscription already exists for organization', {
userId,
referenceId,
})
return false
}
// Check if referenceId is an organizationId the user has admin rights to // Check if referenceId is an organizationId the user has admin rights to
const members = await db const members = await db
.select() .select()

View File

@@ -25,9 +25,11 @@ export function useSubscriptionUpgrade() {
} }
let currentSubscriptionId: string | undefined let currentSubscriptionId: string | undefined
let allSubscriptions: any[] = []
try { try {
const listResult = await client.subscription.list() const listResult = await client.subscription.list()
const activePersonalSub = listResult.data?.find( allSubscriptions = listResult.data || []
const activePersonalSub = allSubscriptions.find(
(sub: any) => sub.status === 'active' && sub.referenceId === userId (sub: any) => sub.status === 'active' && sub.referenceId === userId
) )
currentSubscriptionId = activePersonalSub?.id currentSubscriptionId = activePersonalSub?.id
@@ -50,6 +52,25 @@ export function useSubscriptionUpgrade() {
) )
if (existingOrg) { if (existingOrg) {
// Check if this org already has an active team subscription
const existingTeamSub = allSubscriptions.find(
(sub: any) =>
sub.status === 'active' &&
sub.referenceId === existingOrg.id &&
(sub.plan === 'team' || sub.plan === 'enterprise')
)
if (existingTeamSub) {
logger.warn('Organization already has an active team subscription', {
userId,
organizationId: existingOrg.id,
existingSubscriptionId: existingTeamSub.id,
})
throw new Error(
'This organization already has an active team subscription. Please manage it from the billing settings.'
)
}
logger.info('Using existing organization for team plan upgrade', { logger.info('Using existing organization for team plan upgrade', {
userId, userId,
organizationId: existingOrg.id, organizationId: existingOrg.id,

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { member, subscription } from '@sim/db/schema' import { member, organization, subscription } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq, inArray } from 'drizzle-orm' import { and, eq, inArray } from 'drizzle-orm'
import { checkEnterprisePlan, checkProPlan, checkTeamPlan } from '@/lib/billing/subscriptions/utils' import { checkEnterprisePlan, checkProPlan, checkTeamPlan } from '@/lib/billing/subscriptions/utils'
@@ -26,10 +26,22 @@ export async function getHighestPrioritySubscription(userId: string) {
let orgSubs: typeof personalSubs = [] let orgSubs: typeof personalSubs = []
if (orgIds.length > 0) { if (orgIds.length > 0) {
// Verify orgs exist to filter out orphaned subscriptions
const existingOrgs = await db
.select({ id: organization.id })
.from(organization)
.where(inArray(organization.id, orgIds))
const validOrgIds = existingOrgs.map((o) => o.id)
if (validOrgIds.length > 0) {
orgSubs = await db orgSubs = await db
.select() .select()
.from(subscription) .from(subscription)
.where(and(inArray(subscription.referenceId, orgIds), eq(subscription.status, 'active'))) .where(
and(inArray(subscription.referenceId, validOrgIds), eq(subscription.status, 'active'))
)
}
} }
const allSubs = [...personalSubs, ...orgSubs] const allSubs = [...personalSubs, ...orgSubs]

View File

@@ -25,6 +25,28 @@ const logger = createLogger('SubscriptionCore')
export { getHighestPrioritySubscription } export { getHighestPrioritySubscription }
/**
* Check if a referenceId (user ID or org ID) has an active subscription
* Used for duplicate subscription prevention
*
* Fails closed: returns true on error to prevent duplicate creation
*/
export async function hasActiveSubscription(referenceId: string): Promise<boolean> {
try {
const [activeSub] = await db
.select({ id: subscription.id })
.from(subscription)
.where(and(eq(subscription.referenceId, referenceId), eq(subscription.status, 'active')))
.limit(1)
return !!activeSub
} catch (error) {
logger.error('Error checking active subscription', { error, referenceId })
// Fail closed: assume subscription exists to prevent duplicate creation
return true
}
}
/** /**
* Check if user is on Pro plan (direct or via organization) * Check if user is on Pro plan (direct or via organization)
*/ */

View File

@@ -11,6 +11,7 @@ export {
getHighestPrioritySubscription as getActiveSubscription, getHighestPrioritySubscription as getActiveSubscription,
getUserSubscriptionState as getSubscriptionState, getUserSubscriptionState as getSubscriptionState,
hasAccessControlAccess, hasAccessControlAccess,
hasActiveSubscription,
hasCredentialSetsAccess, hasCredentialSetsAccess,
hasSSOAccess, hasSSOAccess,
isEnterpriseOrgAdminOrOwner, isEnterpriseOrgAdminOrOwner,
@@ -32,6 +33,11 @@ export {
} from '@/lib/billing/core/usage' } from '@/lib/billing/core/usage'
export * from '@/lib/billing/credits/balance' export * from '@/lib/billing/credits/balance'
export * from '@/lib/billing/credits/purchase' export * from '@/lib/billing/credits/purchase'
export {
blockOrgMembers,
getOrgMemberIds,
unblockOrgMembers,
} from '@/lib/billing/organizations/membership'
export * from '@/lib/billing/subscriptions/utils' export * from '@/lib/billing/subscriptions/utils'
export { canEditUsageLimit as canEditLimit } from '@/lib/billing/subscriptions/utils' export { canEditUsageLimit as canEditLimit } from '@/lib/billing/subscriptions/utils'
export * from '@/lib/billing/types' export * from '@/lib/billing/types'

View File

@@ -8,6 +8,7 @@ import {
} from '@sim/db/schema' } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm' import { and, eq } from 'drizzle-orm'
import { hasActiveSubscription } from '@/lib/billing'
import { getPlanPricing } from '@/lib/billing/core/billing' import { getPlanPricing } from '@/lib/billing/core/billing'
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage' import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
@@ -159,6 +160,16 @@ export async function ensureOrganizationForTeamSubscription(
if (existingMembership.length > 0) { if (existingMembership.length > 0) {
const membership = existingMembership[0] const membership = existingMembership[0]
if (membership.role === 'owner' || membership.role === 'admin') { if (membership.role === 'owner' || membership.role === 'admin') {
// Check if org already has an active subscription (prevent duplicates)
if (await hasActiveSubscription(membership.organizationId)) {
logger.error('Organization already has an active subscription', {
userId,
organizationId: membership.organizationId,
newSubscriptionId: subscription.id,
})
throw new Error('Organization already has an active subscription')
}
logger.info('User already owns/admins an org, using it', { logger.info('User already owns/admins an org, using it', {
userId, userId,
organizationId: membership.organizationId, organizationId: membership.organizationId,

View File

@@ -15,13 +15,86 @@ import {
userStats, userStats,
} from '@sim/db/schema' } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm' import { and, eq, inArray, isNull, ne, or, sql } from 'drizzle-orm'
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage' import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
import { requireStripeClient } from '@/lib/billing/stripe-client' import { requireStripeClient } from '@/lib/billing/stripe-client'
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management' import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
const logger = createLogger('OrganizationMembership') const logger = createLogger('OrganizationMembership')
export type BillingBlockReason = 'payment_failed' | 'dispute'
/**
* Get all member user IDs for an organization
*/
export async function getOrgMemberIds(organizationId: string): Promise<string[]> {
const members = await db
.select({ userId: member.userId })
.from(member)
.where(eq(member.organizationId, organizationId))
return members.map((m) => m.userId)
}
/**
* Block all members of an organization for billing reasons
* Returns the number of members actually blocked
*
* Reason priority: dispute > payment_failed
* A payment_failed block won't overwrite an existing dispute block
*/
export async function blockOrgMembers(
organizationId: string,
reason: BillingBlockReason
): Promise<number> {
const memberIds = await getOrgMemberIds(organizationId)
if (memberIds.length === 0) {
return 0
}
// Don't overwrite dispute blocks with payment_failed (dispute is higher priority)
const whereClause =
reason === 'payment_failed'
? and(
inArray(userStats.userId, memberIds),
or(ne(userStats.billingBlockedReason, 'dispute'), isNull(userStats.billingBlockedReason))
)
: inArray(userStats.userId, memberIds)
const result = await db
.update(userStats)
.set({ billingBlocked: true, billingBlockedReason: reason })
.where(whereClause)
.returning({ userId: userStats.userId })
return result.length
}
/**
* Unblock all members of an organization blocked for a specific reason
* Only unblocks members blocked for the specified reason (not other reasons)
* Returns the number of members actually unblocked
*/
export async function unblockOrgMembers(
organizationId: string,
reason: BillingBlockReason
): Promise<number> {
const memberIds = await getOrgMemberIds(organizationId)
if (memberIds.length === 0) {
return 0
}
const result = await db
.update(userStats)
.set({ billingBlocked: false, billingBlockedReason: null })
.where(and(inArray(userStats.userId, memberIds), eq(userStats.billingBlockedReason, reason)))
.returning({ userId: userStats.userId })
return result.length
}
export interface RestoreProResult { export interface RestoreProResult {
restored: boolean restored: boolean
usageRestored: boolean usageRestored: boolean

View File

@@ -1,8 +1,9 @@
import { db } from '@sim/db' import { db } from '@sim/db'
import { member, subscription, user, userStats } from '@sim/db/schema' import { subscription, user, userStats } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm' import { and, eq } from 'drizzle-orm'
import type Stripe from 'stripe' import type Stripe from 'stripe'
import { blockOrgMembers, unblockOrgMembers } from '@/lib/billing'
import { requireStripeClient } from '@/lib/billing/stripe-client' import { requireStripeClient } from '@/lib/billing/stripe-client'
const logger = createLogger('DisputeWebhooks') const logger = createLogger('DisputeWebhooks')
@@ -57,36 +58,34 @@ export async function handleChargeDispute(event: Stripe.Event): Promise<void> {
if (subs.length > 0) { if (subs.length > 0) {
const orgId = subs[0].referenceId const orgId = subs[0].referenceId
const memberCount = await blockOrgMembers(orgId, 'dispute')
const owners = await db if (memberCount > 0) {
.select({ userId: member.userId }) logger.warn('Blocked all org members due to dispute', {
.from(member)
.where(and(eq(member.organizationId, orgId), eq(member.role, 'owner')))
.limit(1)
if (owners.length > 0) {
await db
.update(userStats)
.set({ billingBlocked: true, billingBlockedReason: 'dispute' })
.where(eq(userStats.userId, owners[0].userId))
logger.warn('Blocked org owner due to dispute', {
disputeId: dispute.id, disputeId: dispute.id,
ownerId: owners[0].userId,
organizationId: orgId, organizationId: orgId,
memberCount,
}) })
} }
} }
} }
/** /**
* Handles charge.dispute.closed - unblocks user if dispute was won * Handles charge.dispute.closed - unblocks user if dispute was won or warning closed
*
* Status meanings:
* - 'won': Merchant won, customer's chargeback denied → unblock
* - 'lost': Customer won, money refunded → stay blocked (they owe us)
* - 'warning_closed': Pre-dispute inquiry closed without chargeback → unblock (false alarm)
*/ */
export async function handleDisputeClosed(event: Stripe.Event): Promise<void> { export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
const dispute = event.data.object as Stripe.Dispute const dispute = event.data.object as Stripe.Dispute
if (dispute.status !== 'won') { // Only unblock if we won or the warning was closed without a full dispute
logger.info('Dispute not won, user remains blocked', { const shouldUnblock = dispute.status === 'won' || dispute.status === 'warning_closed'
if (!shouldUnblock) {
logger.info('Dispute resolved against us, user remains blocked', {
disputeId: dispute.id, disputeId: dispute.id,
status: dispute.status, status: dispute.status,
}) })
@@ -98,7 +97,7 @@ export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
return return
} }
// Find and unblock user (Pro plans) // Find and unblock user (Pro plans) - only if blocked for dispute, not other reasons
const users = await db const users = await db
.select({ id: user.id }) .select({ id: user.id })
.from(user) .from(user)
@@ -109,16 +108,17 @@ export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
await db await db
.update(userStats) .update(userStats)
.set({ billingBlocked: false, billingBlockedReason: null }) .set({ billingBlocked: false, billingBlockedReason: null })
.where(eq(userStats.userId, users[0].id)) .where(and(eq(userStats.userId, users[0].id), eq(userStats.billingBlockedReason, 'dispute')))
logger.info('Unblocked user after winning dispute', { logger.info('Unblocked user after dispute resolved in our favor', {
disputeId: dispute.id, disputeId: dispute.id,
userId: users[0].id, userId: users[0].id,
status: dispute.status,
}) })
return return
} }
// Find and unblock org owner (Team/Enterprise) // Find and unblock all org members (Team/Enterprise) - consistent with payment success
const subs = await db const subs = await db
.select({ referenceId: subscription.referenceId }) .select({ referenceId: subscription.referenceId })
.from(subscription) .from(subscription)
@@ -127,24 +127,13 @@ export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
if (subs.length > 0) { if (subs.length > 0) {
const orgId = subs[0].referenceId const orgId = subs[0].referenceId
const memberCount = await unblockOrgMembers(orgId, 'dispute')
const owners = await db logger.info('Unblocked all org members after dispute resolved in our favor', {
.select({ userId: member.userId })
.from(member)
.where(and(eq(member.organizationId, orgId), eq(member.role, 'owner')))
.limit(1)
if (owners.length > 0) {
await db
.update(userStats)
.set({ billingBlocked: false, billingBlockedReason: null })
.where(eq(userStats.userId, owners[0].userId))
logger.info('Unblocked org owner after winning dispute', {
disputeId: dispute.id, disputeId: dispute.id,
ownerId: owners[0].userId,
organizationId: orgId, organizationId: orgId,
memberCount,
status: dispute.status,
}) })
} }
} }
}

View File

@@ -8,12 +8,13 @@ import {
userStats, userStats,
} from '@sim/db/schema' } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq, inArray } from 'drizzle-orm' import { and, eq, inArray, isNull, ne, or } from 'drizzle-orm'
import type Stripe from 'stripe' import type Stripe from 'stripe'
import { getEmailSubject, PaymentFailedEmail, renderCreditPurchaseEmail } from '@/components/emails' import { getEmailSubject, PaymentFailedEmail, renderCreditPurchaseEmail } from '@/components/emails'
import { calculateSubscriptionOverage } from '@/lib/billing/core/billing' import { calculateSubscriptionOverage } from '@/lib/billing/core/billing'
import { addCredits, getCreditBalance, removeCredits } from '@/lib/billing/credits/balance' import { addCredits, getCreditBalance, removeCredits } from '@/lib/billing/credits/balance'
import { setUsageLimitForCredits } from '@/lib/billing/credits/purchase' import { setUsageLimitForCredits } from '@/lib/billing/credits/purchase'
import { blockOrgMembers, unblockOrgMembers } from '@/lib/billing/organizations/membership'
import { requireStripeClient } from '@/lib/billing/stripe-client' import { requireStripeClient } from '@/lib/billing/stripe-client'
import { getBaseUrl } from '@/lib/core/utils/urls' import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer' import { sendEmail } from '@/lib/messaging/email/mailer'
@@ -502,24 +503,7 @@ export async function handleInvoicePaymentSucceeded(event: Stripe.Event) {
} }
if (sub.plan === 'team' || sub.plan === 'enterprise') { if (sub.plan === 'team' || sub.plan === 'enterprise') {
const members = await db await unblockOrgMembers(sub.referenceId, 'payment_failed')
.select({ userId: member.userId })
.from(member)
.where(eq(member.organizationId, sub.referenceId))
const memberIds = members.map((m) => m.userId)
if (memberIds.length > 0) {
// Only unblock users blocked for payment_failed, not disputes
await db
.update(userStats)
.set({ billingBlocked: false, billingBlockedReason: null })
.where(
and(
inArray(userStats.userId, memberIds),
eq(userStats.billingBlockedReason, 'payment_failed')
)
)
}
} else { } else {
// Only unblock users blocked for payment_failed, not disputes // Only unblock users blocked for payment_failed, not disputes
await db await db
@@ -616,28 +600,26 @@ export async function handleInvoicePaymentFailed(event: Stripe.Event) {
if (records.length > 0) { if (records.length > 0) {
const sub = records[0] const sub = records[0]
if (sub.plan === 'team' || sub.plan === 'enterprise') { if (sub.plan === 'team' || sub.plan === 'enterprise') {
const members = await db const memberCount = await blockOrgMembers(sub.referenceId, 'payment_failed')
.select({ userId: member.userId })
.from(member)
.where(eq(member.organizationId, sub.referenceId))
const memberIds = members.map((m) => m.userId)
if (memberIds.length > 0) {
await db
.update(userStats)
.set({ billingBlocked: true, billingBlockedReason: 'payment_failed' })
.where(inArray(userStats.userId, memberIds))
}
logger.info('Blocked team/enterprise members due to payment failure', { logger.info('Blocked team/enterprise members due to payment failure', {
organizationId: sub.referenceId, organizationId: sub.referenceId,
memberCount: members.length, memberCount,
isOverageInvoice, isOverageInvoice,
}) })
} else { } else {
// Don't overwrite dispute blocks (dispute > payment_failed priority)
await db await db
.update(userStats) .update(userStats)
.set({ billingBlocked: true, billingBlockedReason: 'payment_failed' }) .set({ billingBlocked: true, billingBlockedReason: 'payment_failed' })
.where(eq(userStats.userId, sub.referenceId)) .where(
and(
eq(userStats.userId, sub.referenceId),
or(
ne(userStats.billingBlockedReason, 'dispute'),
isNull(userStats.billingBlockedReason)
)
)
)
logger.info('Blocked user due to payment failure', { logger.info('Blocked user due to payment failure', {
userId: sub.referenceId, userId: sub.referenceId,
isOverageInvoice, isOverageInvoice,

View File

@@ -3,6 +3,7 @@ import { member, organization, subscription } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq, ne } from 'drizzle-orm' import { and, eq, ne } from 'drizzle-orm'
import { calculateSubscriptionOverage } from '@/lib/billing/core/billing' import { calculateSubscriptionOverage } from '@/lib/billing/core/billing'
import { hasActiveSubscription } from '@/lib/billing/core/subscription'
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage' import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
import { restoreUserProSubscription } from '@/lib/billing/organizations/membership' import { restoreUserProSubscription } from '@/lib/billing/organizations/membership'
import { requireStripeClient } from '@/lib/billing/stripe-client' import { requireStripeClient } from '@/lib/billing/stripe-client'
@@ -52,14 +53,37 @@ async function restoreMemberProSubscriptions(organizationId: string): Promise<nu
/** /**
* Cleanup organization when team/enterprise subscription is deleted. * Cleanup organization when team/enterprise subscription is deleted.
* - Checks if other active subscriptions point to this org (skip deletion if so)
* - Restores member Pro subscriptions * - Restores member Pro subscriptions
* - Deletes the organization * - Deletes the organization (only if no other active subs)
* - Syncs usage limits for former members (resets to free or Pro tier) * - Syncs usage limits for former members (resets to free or Pro tier)
*/ */
async function cleanupOrganizationSubscription(organizationId: string): Promise<{ async function cleanupOrganizationSubscription(organizationId: string): Promise<{
restoredProCount: number restoredProCount: number
membersSynced: number membersSynced: number
organizationDeleted: boolean
}> { }> {
// Check if other active subscriptions still point to this org
// Note: The subscription being deleted is already marked as 'canceled' by better-auth
// before this handler runs, so we only find truly active ones
if (await hasActiveSubscription(organizationId)) {
logger.info('Skipping organization deletion - other active subscriptions exist', {
organizationId,
})
// Still sync limits for members since this subscription was deleted
const memberUserIds = await db
.select({ userId: member.userId })
.from(member)
.where(eq(member.organizationId, organizationId))
for (const m of memberUserIds) {
await syncUsageLimitsFromSubscription(m.userId)
}
return { restoredProCount: 0, membersSynced: memberUserIds.length, organizationDeleted: false }
}
// Get member userIds before deletion (needed for limit syncing after org deletion) // Get member userIds before deletion (needed for limit syncing after org deletion)
const memberUserIds = await db const memberUserIds = await db
.select({ userId: member.userId }) .select({ userId: member.userId })
@@ -75,7 +99,7 @@ async function cleanupOrganizationSubscription(organizationId: string): Promise<
await syncUsageLimitsFromSubscription(m.userId) await syncUsageLimitsFromSubscription(m.userId)
} }
return { restoredProCount, membersSynced: memberUserIds.length } return { restoredProCount, membersSynced: memberUserIds.length, organizationDeleted: true }
} }
/** /**
@@ -172,15 +196,14 @@ export async function handleSubscriptionDeleted(subscription: {
referenceId: subscription.referenceId, referenceId: subscription.referenceId,
}) })
const { restoredProCount, membersSynced } = await cleanupOrganizationSubscription( const { restoredProCount, membersSynced, organizationDeleted } =
subscription.referenceId await cleanupOrganizationSubscription(subscription.referenceId)
)
logger.info('Successfully processed enterprise subscription cancellation', { logger.info('Successfully processed enterprise subscription cancellation', {
subscriptionId: subscription.id, subscriptionId: subscription.id,
stripeSubscriptionId, stripeSubscriptionId,
restoredProCount, restoredProCount,
organizationDeleted: true, organizationDeleted,
membersSynced, membersSynced,
}) })
return return
@@ -297,7 +320,7 @@ export async function handleSubscriptionDeleted(subscription: {
const cleanup = await cleanupOrganizationSubscription(subscription.referenceId) const cleanup = await cleanupOrganizationSubscription(subscription.referenceId)
restoredProCount = cleanup.restoredProCount restoredProCount = cleanup.restoredProCount
membersSynced = cleanup.membersSynced membersSynced = cleanup.membersSynced
organizationDeleted = true organizationDeleted = cleanup.organizationDeleted
} else if (subscription.plan === 'pro') { } else if (subscription.plan === 'pro') {
await syncUsageLimitsFromSubscription(subscription.referenceId) await syncUsageLimitsFromSubscription(subscription.referenceId)
membersSynced = 1 membersSynced = 1

View File

@@ -5,11 +5,9 @@ import type { ToolUIConfig } from './ui-config'
const baseToolLogger = createLogger('BaseClientTool') const baseToolLogger = createLogger('BaseClientTool')
/** Default timeout for tool execution (5 minutes) */ const DEFAULT_TOOL_TIMEOUT_MS = 5 * 60 * 1000
const DEFAULT_TOOL_TIMEOUT_MS = 2 * 60 * 1000
/** Timeout for tools that run workflows (10 minutes) */ export const WORKFLOW_EXECUTION_TIMEOUT_MS = 5 * 60 * 1000
export const WORKFLOW_EXECUTION_TIMEOUT_MS = 10 * 60 * 1000
// Client tool call states used by the new runtime // Client tool call states used by the new runtime
export enum ClientToolCallState { export enum ClientToolCallState {

View File

@@ -54,7 +54,6 @@ type SkippedItemType =
| 'block_not_found' | 'block_not_found'
| 'invalid_block_type' | 'invalid_block_type'
| 'block_not_allowed' | 'block_not_allowed'
| 'block_locked'
| 'tool_not_allowed' | 'tool_not_allowed'
| 'invalid_edge_target' | 'invalid_edge_target'
| 'invalid_edge_source' | 'invalid_edge_source'
@@ -619,7 +618,6 @@ function createBlockFromParams(
subBlocks: {}, subBlocks: {},
outputs: outputs, outputs: outputs,
data: parentId ? { parentId, extent: 'parent' as const } : {}, data: parentId ? { parentId, extent: 'parent' as const } : {},
locked: false,
} }
// Add validated inputs as subBlocks // Add validated inputs as subBlocks
@@ -1522,24 +1520,6 @@ function applyOperationsToWorkflowState(
break break
} }
// Check if block is locked or inside a locked container
const deleteBlock = modifiedState.blocks[block_id]
const deleteParentId = deleteBlock.data?.parentId as string | undefined
const deleteParentLocked = deleteParentId
? modifiedState.blocks[deleteParentId]?.locked
: false
if (deleteBlock.locked || deleteParentLocked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'delete',
blockId: block_id,
reason: deleteParentLocked
? `Block "${block_id}" is inside locked container "${deleteParentId}" and cannot be deleted`
: `Block "${block_id}" is locked and cannot be deleted`,
})
break
}
// Find all child blocks to remove // Find all child blocks to remove
const blocksToRemove = new Set<string>([block_id]) const blocksToRemove = new Set<string>([block_id])
const findChildren = (parentId: string) => { const findChildren = (parentId: string) => {
@@ -1575,21 +1555,6 @@ function applyOperationsToWorkflowState(
const block = modifiedState.blocks[block_id] const block = modifiedState.blocks[block_id]
// Check if block is locked or inside a locked container
const editParentId = block.data?.parentId as string | undefined
const editParentLocked = editParentId ? modifiedState.blocks[editParentId]?.locked : false
if (block.locked || editParentLocked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'edit',
blockId: block_id,
reason: editParentLocked
? `Block "${block_id}" is inside locked container "${editParentId}" and cannot be edited`
: `Block "${block_id}" is locked and cannot be edited`,
})
break
}
// Ensure block has essential properties // Ensure block has essential properties
if (!block.type) { if (!block.type) {
logger.warn(`Block ${block_id} missing type property, skipping edit`, { logger.warn(`Block ${block_id} missing type property, skipping edit`, {
@@ -2157,19 +2122,6 @@ function applyOperationsToWorkflowState(
// Handle nested nodes (for loops/parallels created from scratch) // Handle nested nodes (for loops/parallels created from scratch)
if (params.nestedNodes) { if (params.nestedNodes) {
// Defensive check: verify parent is not locked before adding children
// (Parent was just created with locked: false, but check for consistency)
const parentBlock = modifiedState.blocks[block_id]
if (parentBlock?.locked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'add_nested_nodes',
blockId: block_id,
reason: `Container "${block_id}" is locked - cannot add nested nodes`,
})
break
}
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => { Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
// Validate childId is a valid string // Validate childId is a valid string
if (!isValidKey(childId)) { if (!isValidKey(childId)) {
@@ -2257,18 +2209,6 @@ function applyOperationsToWorkflowState(
break break
} }
// Check if subflow is locked
if (subflowBlock.locked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'insert_into_subflow',
blockId: block_id,
reason: `Subflow "${subflowId}" is locked - cannot insert block "${block_id}"`,
details: { subflowId },
})
break
}
if (subflowBlock.type !== 'loop' && subflowBlock.type !== 'parallel') { if (subflowBlock.type !== 'loop' && subflowBlock.type !== 'parallel') {
logger.error('Subflow block has invalid type', { logger.error('Subflow block has invalid type', {
subflowId, subflowId,
@@ -2307,17 +2247,6 @@ function applyOperationsToWorkflowState(
break break
} }
// Check if existing block is locked
if (existingBlock.locked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'insert_into_subflow',
blockId: block_id,
reason: `Block "${block_id}" is locked and cannot be moved into a subflow`,
})
break
}
// Moving existing block into subflow - just update parent // Moving existing block into subflow - just update parent
existingBlock.data = { existingBlock.data = {
...existingBlock.data, ...existingBlock.data,
@@ -2463,30 +2392,6 @@ function applyOperationsToWorkflowState(
break break
} }
// Check if block is locked
if (block.locked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'extract_from_subflow',
blockId: block_id,
reason: `Block "${block_id}" is locked and cannot be extracted from subflow`,
})
break
}
// Check if parent subflow is locked
const parentSubflow = modifiedState.blocks[subflowId]
if (parentSubflow?.locked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'extract_from_subflow',
blockId: block_id,
reason: `Subflow "${subflowId}" is locked - cannot extract block "${block_id}"`,
details: { subflowId },
})
break
}
// Verify it's actually a child of this subflow // Verify it's actually a child of this subflow
if (block.data?.parentId !== subflowId) { if (block.data?.parentId !== subflowId) {
logger.warn('Block is not a child of specified subflow', { logger.warn('Block is not a child of specified subflow', {

View File

@@ -170,6 +170,11 @@ export const env = createEnv({
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('600'), // Enterprise tier sync API executions per minute RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('600'), // Enterprise tier sync API executions per minute
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('5000'), // Enterprise tier async API executions per minute RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('5000'), // Enterprise tier async API executions per minute
EXECUTION_TIMEOUT_FREE: z.string().optional().default('300'),
EXECUTION_TIMEOUT_PRO: z.string().optional().default('3600'),
EXECUTION_TIMEOUT_TEAM: z.string().optional().default('3600'),
EXECUTION_TIMEOUT_ENTERPRISE: z.string().optional().default('3600'),
// Knowledge Base Processing Configuration - Shared across all processing methods // Knowledge Base Processing Configuration - Shared across all processing methods
KB_CONFIG_MAX_DURATION: z.number().optional().default(600), // Max processing duration in seconds (10 minutes) KB_CONFIG_MAX_DURATION: z.number().optional().default(600), // Max processing duration in seconds (10 minutes)
KB_CONFIG_MAX_ATTEMPTS: z.number().optional().default(3), // Max retry attempts KB_CONFIG_MAX_ATTEMPTS: z.number().optional().default(3), // Max retry attempts

View File

@@ -0,0 +1 @@
export * from './types'

View File

@@ -0,0 +1,122 @@
import { env } from '@/lib/core/config/env'
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
export interface ExecutionTimeoutConfig {
sync: number
async: number
}
const DEFAULT_SYNC_TIMEOUTS = {
free: 300,
pro: 3600,
team: 3600,
enterprise: 3600,
} as const
const ASYNC_TIMEOUT_SECONDS = 5400
function getSyncTimeoutForPlan(plan: SubscriptionPlan): number {
const envVarMap: Record<SubscriptionPlan, string | undefined> = {
free: env.EXECUTION_TIMEOUT_FREE,
pro: env.EXECUTION_TIMEOUT_PRO,
team: env.EXECUTION_TIMEOUT_TEAM,
enterprise: env.EXECUTION_TIMEOUT_ENTERPRISE,
}
return (Number.parseInt(envVarMap[plan] || '') || DEFAULT_SYNC_TIMEOUTS[plan]) * 1000
}
export const EXECUTION_TIMEOUTS: Record<SubscriptionPlan, ExecutionTimeoutConfig> = {
free: {
sync: getSyncTimeoutForPlan('free'),
async: ASYNC_TIMEOUT_SECONDS * 1000,
},
pro: {
sync: getSyncTimeoutForPlan('pro'),
async: ASYNC_TIMEOUT_SECONDS * 1000,
},
team: {
sync: getSyncTimeoutForPlan('team'),
async: ASYNC_TIMEOUT_SECONDS * 1000,
},
enterprise: {
sync: getSyncTimeoutForPlan('enterprise'),
async: ASYNC_TIMEOUT_SECONDS * 1000,
},
}
export function getExecutionTimeout(
plan: SubscriptionPlan | undefined,
type: 'sync' | 'async' = 'sync'
): number {
return EXECUTION_TIMEOUTS[plan || 'free'][type]
}
export function getExecutionTimeoutSeconds(
plan: SubscriptionPlan | undefined,
type: 'sync' | 'async' = 'sync'
): number {
return Math.floor(getExecutionTimeout(plan, type) / 1000)
}
export function getMaxExecutionTimeout(): number {
return EXECUTION_TIMEOUTS.enterprise.async
}
export const DEFAULT_EXECUTION_TIMEOUT_MS = EXECUTION_TIMEOUTS.free.sync
export class ExecutionTimeoutError extends Error {
constructor(
public readonly timeoutMs: number,
public readonly plan?: SubscriptionPlan
) {
const timeoutSeconds = Math.floor(timeoutMs / 1000)
const timeoutMinutes = Math.floor(timeoutSeconds / 60)
const displayTime =
timeoutMinutes > 0
? `${timeoutMinutes} minute${timeoutMinutes > 1 ? 's' : ''}`
: `${timeoutSeconds} seconds`
super(`Execution timed out after ${displayTime}`)
this.name = 'ExecutionTimeoutError'
}
}
export function isTimeoutError(error: unknown): boolean {
if (error instanceof ExecutionTimeoutError) return true
if (!(error instanceof Error)) return false
const name = error.name.toLowerCase()
const message = error.message.toLowerCase()
return (
name === 'timeouterror' ||
name === 'aborterror' ||
message.includes('timeout') ||
message.includes('timed out') ||
message.includes('aborted')
)
}
export function createTimeoutError(
timeoutMs: number,
plan?: SubscriptionPlan
): ExecutionTimeoutError {
return new ExecutionTimeoutError(timeoutMs, plan)
}
export function getTimeoutErrorMessage(error: unknown, timeoutMs?: number): string {
if (error instanceof ExecutionTimeoutError) {
return error.message
}
if (timeoutMs) {
const timeoutSeconds = Math.floor(timeoutMs / 1000)
const timeoutMinutes = Math.floor(timeoutSeconds / 60)
const displayTime =
timeoutMinutes > 0
? `${timeoutMinutes} minute${timeoutMinutes > 1 ? 's' : ''}`
: `${timeoutSeconds} seconds`
return `Execution timed out after ${displayTime}`
}
return 'Execution timed out'
}

View File

@@ -1,7 +1,3 @@
/** import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
* Execution timeout constants
*
* DEFAULT_EXECUTION_TIMEOUT_MS: The default timeout for executing user code (10 minutes)
*/
export const DEFAULT_EXECUTION_TIMEOUT_MS = 600000 // 10 minutes (600 seconds) export { DEFAULT_EXECUTION_TIMEOUT_MS }

View File

@@ -4,7 +4,9 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm' import { eq } from 'drizzle-orm'
import { checkServerSideUsageLimits } from '@/lib/billing/calculations/usage-monitor' import { checkServerSideUsageLimits } from '@/lib/billing/calculations/usage-monitor'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription' import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { getExecutionTimeout } from '@/lib/core/execution-limits'
import { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter' import { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter'
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
import { LoggingSession } from '@/lib/logs/execution/logging-session' import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils' import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
import type { CoreTriggerType } from '@/stores/logs/filters/types' import type { CoreTriggerType } from '@/stores/logs/filters/types'
@@ -133,10 +135,10 @@ export interface PreprocessExecutionResult {
success: boolean success: boolean
error?: { error?: {
message: string message: string
statusCode: number // HTTP status code (401, 402, 403, 404, 429, 500) statusCode: number
logCreated: boolean // Whether error was logged to execution_logs logCreated: boolean
} }
actorUserId?: string // The user ID that will be billed actorUserId?: string
workflowRecord?: WorkflowRecord workflowRecord?: WorkflowRecord
userSubscription?: SubscriptionInfo | null userSubscription?: SubscriptionInfo | null
rateLimitInfo?: { rateLimitInfo?: {
@@ -144,6 +146,10 @@ export interface PreprocessExecutionResult {
remaining: number remaining: number
resetAt: Date resetAt: Date
} }
executionTimeout?: {
sync: number
async: number
}
} }
type WorkflowRecord = typeof workflow.$inferSelect type WorkflowRecord = typeof workflow.$inferSelect
@@ -484,12 +490,17 @@ export async function preprocessExecution(
triggerType, triggerType,
}) })
const plan = userSubscription?.plan as SubscriptionPlan | undefined
return { return {
success: true, success: true,
actorUserId, actorUserId,
workflowRecord, workflowRecord,
userSubscription, userSubscription,
rateLimitInfo, rateLimitInfo,
executionTimeout: {
sync: getExecutionTimeout(plan, 'sync'),
async: getExecutionTimeout(plan, 'async'),
},
} }
} }

View File

@@ -33,6 +33,7 @@ import type {
WorkflowExecutionSnapshot, WorkflowExecutionSnapshot,
WorkflowState, WorkflowState,
} from '@/lib/logs/types' } from '@/lib/logs/types'
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
export interface ToolCall { export interface ToolCall {
name: string name: string
@@ -503,7 +504,7 @@ export class ExecutionLogger implements IExecutionLoggerService {
} }
try { try {
// Get the workflow record to get the userId // Get the workflow record to get workspace and fallback userId
const [workflowRecord] = await db const [workflowRecord] = await db
.select() .select()
.from(workflow) .from(workflow)
@@ -515,7 +516,12 @@ export class ExecutionLogger implements IExecutionLoggerService {
return return
} }
const userId = workflowRecord.userId let billingUserId: string | null = null
if (workflowRecord.workspaceId) {
billingUserId = await getWorkspaceBilledAccountUserId(workflowRecord.workspaceId)
}
const userId = billingUserId || workflowRecord.userId
const costToStore = costSummary.totalCost const costToStore = costSummary.totalCost
const existing = await db.select().from(userStats).where(eq(userStats.userId, userId)) const existing = await db.select().from(userStats).where(eq(userStats.userId, userId))

View File

@@ -776,11 +776,16 @@ export class LoggingSession {
await db await db
.update(workflowExecutionLogs) .update(workflowExecutionLogs)
.set({ .set({
level: 'error',
status: 'failed', status: 'failed',
executionData: sql`jsonb_set( executionData: sql`jsonb_set(
jsonb_set(
COALESCE(execution_data, '{}'::jsonb), COALESCE(execution_data, '{}'::jsonb),
ARRAY['error'], ARRAY['error'],
to_jsonb(${message}::text) to_jsonb(${message}::text)
),
ARRAY['finalOutput'],
jsonb_build_object('error', ${message}::text)
)`, )`,
}) })
.where(eq(workflowExecutionLogs.executionId, executionId)) .where(eq(workflowExecutionLogs.executionId, executionId))

View File

@@ -12,6 +12,7 @@ import { Client } from '@modelcontextprotocol/sdk/client/index.js'
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js' import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js'
import type { ListToolsResult, Tool } from '@modelcontextprotocol/sdk/types.js' import type { ListToolsResult, Tool } from '@modelcontextprotocol/sdk/types.js'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
import { import {
McpConnectionError, McpConnectionError,
type McpConnectionStatus, type McpConnectionStatus,
@@ -202,7 +203,7 @@ export class McpClient {
const sdkResult = await this.client.callTool( const sdkResult = await this.client.callTool(
{ name: toolCall.name, arguments: toolCall.arguments }, { name: toolCall.name, arguments: toolCall.arguments },
undefined, undefined,
{ timeout: 600000 } // 10 minutes - override SDK's 60s default { timeout: getMaxExecutionTimeout() }
) )
return sdkResult as McpToolResult return sdkResult as McpToolResult

View File

@@ -34,7 +34,7 @@ export function sanitizeHeaders(
* Client-safe MCP constants * Client-safe MCP constants
*/ */
export const MCP_CLIENT_CONSTANTS = { export const MCP_CLIENT_CONSTANTS = {
CLIENT_TIMEOUT: 600000, CLIENT_TIMEOUT: 5 * 60 * 1000,
MAX_RETRIES: 3, MAX_RETRIES: 3,
RECONNECT_DELAY: 1000, RECONNECT_DELAY: 1000,
} as const } as const

View File

@@ -81,8 +81,8 @@ describe('generateMcpServerId', () => {
}) })
describe('MCP_CONSTANTS', () => { describe('MCP_CONSTANTS', () => {
it.concurrent('has correct execution timeout (10 minutes)', () => { it.concurrent('has correct execution timeout (5 minutes)', () => {
expect(MCP_CONSTANTS.EXECUTION_TIMEOUT).toBe(600000) expect(MCP_CONSTANTS.EXECUTION_TIMEOUT).toBe(300000)
}) })
it.concurrent('has correct cache timeout (5 minutes)', () => { it.concurrent('has correct cache timeout (5 minutes)', () => {
@@ -107,8 +107,8 @@ describe('MCP_CONSTANTS', () => {
}) })
describe('MCP_CLIENT_CONSTANTS', () => { describe('MCP_CLIENT_CONSTANTS', () => {
it.concurrent('has correct client timeout (10 minutes)', () => { it.concurrent('has correct client timeout (5 minutes)', () => {
expect(MCP_CLIENT_CONSTANTS.CLIENT_TIMEOUT).toBe(600000) expect(MCP_CLIENT_CONSTANTS.CLIENT_TIMEOUT).toBe(300000)
}) })
it.concurrent('has correct auto refresh interval (5 minutes)', () => { it.concurrent('has correct auto refresh interval (5 minutes)', () => {

View File

@@ -1,12 +1,11 @@
import { NextResponse } from 'next/server' import { NextResponse } from 'next/server'
import { DEFAULT_EXECUTION_TIMEOUT_MS, getExecutionTimeout } from '@/lib/core/execution-limits'
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
import type { McpApiResponse } from '@/lib/mcp/types' import type { McpApiResponse } from '@/lib/mcp/types'
import { isMcpTool, MCP } from '@/executor/constants' import { isMcpTool, MCP } from '@/executor/constants'
/**
* MCP-specific constants
*/
export const MCP_CONSTANTS = { export const MCP_CONSTANTS = {
EXECUTION_TIMEOUT: 600000, EXECUTION_TIMEOUT: DEFAULT_EXECUTION_TIMEOUT_MS,
CACHE_TIMEOUT: 5 * 60 * 1000, CACHE_TIMEOUT: 5 * 60 * 1000,
DEFAULT_RETRIES: 3, DEFAULT_RETRIES: 3,
DEFAULT_CONNECTION_TIMEOUT: 30000, DEFAULT_CONNECTION_TIMEOUT: 30000,
@@ -14,6 +13,10 @@ export const MCP_CONSTANTS = {
MAX_CONSECUTIVE_FAILURES: 3, MAX_CONSECUTIVE_FAILURES: 3,
} as const } as const
export function getMcpExecutionTimeout(plan?: SubscriptionPlan): number {
return getExecutionTimeout(plan, 'sync')
}
/** /**
* Core MCP tool parameter keys that are metadata, not user-entered test values. * Core MCP tool parameter keys that are metadata, not user-entered test values.
* These should be preserved when cleaning up params during schema updates. * These should be preserved when cleaning up params during schema updates.
@@ -45,11 +48,8 @@ export function sanitizeHeaders(
) )
} }
/**
* Client-safe MCP constants
*/
export const MCP_CLIENT_CONSTANTS = { export const MCP_CLIENT_CONSTANTS = {
CLIENT_TIMEOUT: 600000, CLIENT_TIMEOUT: DEFAULT_EXECUTION_TIMEOUT_MS,
AUTO_REFRESH_INTERVAL: 5 * 60 * 1000, AUTO_REFRESH_INTERVAL: 5 * 60 * 1000,
} as const } as const

View File

@@ -296,26 +296,6 @@ describe('hasWorkflowChanged', () => {
}) })
expect(hasWorkflowChanged(state1, state2)).toBe(true) expect(hasWorkflowChanged(state1, state2)).toBe(true)
}) })
it.concurrent('should detect locked/unlocked changes', () => {
const state1 = createWorkflowState({
blocks: { block1: createBlock('block1', { locked: false }) },
})
const state2 = createWorkflowState({
blocks: { block1: createBlock('block1', { locked: true }) },
})
expect(hasWorkflowChanged(state1, state2)).toBe(true)
})
it.concurrent('should not detect changes when locked state is the same', () => {
const state1 = createWorkflowState({
blocks: { block1: createBlock('block1', { locked: true }) },
})
const state2 = createWorkflowState({
blocks: { block1: createBlock('block1', { locked: true }) },
})
expect(hasWorkflowChanged(state1, state2)).toBe(false)
})
}) })
describe('SubBlock Changes', () => { describe('SubBlock Changes', () => {

View File

@@ -157,7 +157,7 @@ export function generateWorkflowDiffSummary(
} }
// Check other block properties (boolean fields) // Check other block properties (boolean fields)
// Use !! to normalize: null/undefined/false are all equivalent (falsy) // Use !! to normalize: null/undefined/false are all equivalent (falsy)
const blockFields = ['horizontalHandles', 'advancedMode', 'triggerMode', 'locked'] as const const blockFields = ['horizontalHandles', 'advancedMode', 'triggerMode'] as const
for (const field of blockFields) { for (const field of blockFields) {
if (!!currentBlock[field] !== !!previousBlock[field]) { if (!!currentBlock[field] !== !!previousBlock[field]) {
changes.push({ changes.push({

View File

@@ -100,7 +100,6 @@ function buildStartBlockState(
triggerMode: false, triggerMode: false,
height: 0, height: 0,
data: {}, data: {},
locked: false,
} }
return { blockState, subBlockValues } return { blockState, subBlockValues }

View File

@@ -1,173 +0,0 @@
/**
* @vitest-environment node
*/
import { beforeEach, describe, expect, it, vi } from 'vitest'
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
// Mock all external dependencies before imports
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
}),
}))
vi.mock('@/stores/workflows/workflow/store', () => ({
useWorkflowStore: {
getState: () => ({
getWorkflowState: () => ({ blocks: {}, edges: [], loops: {}, parallels: {} }),
}),
},
}))
vi.mock('@/stores/workflows/utils', () => ({
mergeSubblockState: (blocks: Record<string, BlockState>) => blocks,
}))
vi.mock('@/lib/workflows/sanitization/key-validation', () => ({
isValidKey: (key: string) => key !== 'undefined' && key !== 'null' && key !== '',
}))
vi.mock('@/lib/workflows/autolayout', () => ({
transferBlockHeights: vi.fn(),
applyTargetedLayout: (blocks: Record<string, BlockState>) => blocks,
applyAutoLayout: () => ({ success: true, blocks: {} }),
}))
vi.mock('@/lib/workflows/autolayout/constants', () => ({
DEFAULT_HORIZONTAL_SPACING: 500,
DEFAULT_VERTICAL_SPACING: 400,
DEFAULT_LAYOUT_OPTIONS: {},
}))
vi.mock('@/stores/workflows/workflow/utils', () => ({
generateLoopBlocks: () => ({}),
generateParallelBlocks: () => ({}),
}))
import { WorkflowDiffEngine } from './diff-engine'
function createMockBlock(overrides: Partial<BlockState> = {}): BlockState {
return {
id: 'block-1',
type: 'agent',
name: 'Test Block',
enabled: true,
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
...overrides,
} as BlockState
}
function createMockWorkflowState(blocks: Record<string, BlockState>): WorkflowState {
return {
blocks,
edges: [],
loops: {},
parallels: {},
}
}
describe('WorkflowDiffEngine', () => {
let engine: WorkflowDiffEngine
beforeEach(() => {
engine = new WorkflowDiffEngine()
vi.clearAllMocks()
})
describe('hasBlockChanged detection', () => {
describe('locked state changes', () => {
it.concurrent(
'should detect when block locked state changes from false to true',
async () => {
const freshEngine = new WorkflowDiffEngine()
const baseline = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: false }),
})
const proposed = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: true }),
})
const result = await freshEngine.createDiffFromWorkflowState(
proposed,
undefined,
baseline
)
expect(result.success).toBe(true)
expect(result.diff?.diffAnalysis?.edited_blocks).toContain('block-1')
}
)
it.concurrent('should not detect change when locked state is the same', async () => {
const freshEngine = new WorkflowDiffEngine()
const baseline = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: true }),
})
const proposed = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: true }),
})
const result = await freshEngine.createDiffFromWorkflowState(proposed, undefined, baseline)
expect(result.success).toBe(true)
expect(result.diff?.diffAnalysis?.edited_blocks).not.toContain('block-1')
})
it.concurrent('should detect change when locked goes from undefined to true', async () => {
const freshEngine = new WorkflowDiffEngine()
const baseline = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1' }), // locked undefined
})
const proposed = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: true }),
})
const result = await freshEngine.createDiffFromWorkflowState(proposed, undefined, baseline)
expect(result.success).toBe(true)
// The hasBlockChanged function uses !!locked for comparison
// so undefined -> true should be detected as a change
expect(result.diff?.diffAnalysis?.edited_blocks).toContain('block-1')
})
it.concurrent('should not detect change when both locked states are falsy', async () => {
const freshEngine = new WorkflowDiffEngine()
const baseline = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1' }), // locked undefined
})
const proposed = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: false }), // locked false
})
const result = await freshEngine.createDiffFromWorkflowState(proposed, undefined, baseline)
expect(result.success).toBe(true)
// undefined and false should both be falsy, so !! comparison makes them equal
expect(result.diff?.diffAnalysis?.edited_blocks).not.toContain('block-1')
})
})
})
describe('diff lifecycle', () => {
it.concurrent('should start with no diff', () => {
const freshEngine = new WorkflowDiffEngine()
expect(freshEngine.hasDiff()).toBe(false)
expect(freshEngine.getCurrentDiff()).toBeUndefined()
})
it.concurrent('should clear diff', () => {
const freshEngine = new WorkflowDiffEngine()
freshEngine.clearDiff()
expect(freshEngine.hasDiff()).toBe(false)
})
})
})

View File

@@ -215,7 +215,6 @@ function hasBlockChanged(currentBlock: BlockState, proposedBlock: BlockState): b
if (currentBlock.name !== proposedBlock.name) return true if (currentBlock.name !== proposedBlock.name) return true
if (currentBlock.enabled !== proposedBlock.enabled) return true if (currentBlock.enabled !== proposedBlock.enabled) return true
if (currentBlock.triggerMode !== proposedBlock.triggerMode) return true if (currentBlock.triggerMode !== proposedBlock.triggerMode) return true
if (!!currentBlock.locked !== !!proposedBlock.locked) return true
// Compare subBlocks // Compare subBlocks
const currentSubKeys = Object.keys(currentBlock.subBlocks || {}) const currentSubKeys = Object.keys(currentBlock.subBlocks || {})

View File

@@ -62,9 +62,6 @@ export interface ExecutionErrorEvent extends BaseExecutionEvent {
} }
} }
/**
* Execution cancelled event
*/
export interface ExecutionCancelledEvent extends BaseExecutionEvent { export interface ExecutionCancelledEvent extends BaseExecutionEvent {
type: 'execution:cancelled' type: 'execution:cancelled'
workflowId: string workflowId: string
@@ -171,9 +168,6 @@ export type ExecutionEvent =
| StreamChunkEvent | StreamChunkEvent
| StreamDoneEvent | StreamDoneEvent
/**
* Extracted data types for use in callbacks
*/
export type ExecutionStartedData = ExecutionStartedEvent['data'] export type ExecutionStartedData = ExecutionStartedEvent['data']
export type ExecutionCompletedData = ExecutionCompletedEvent['data'] export type ExecutionCompletedData = ExecutionCompletedEvent['data']
export type ExecutionErrorData = ExecutionErrorEvent['data'] export type ExecutionErrorData = ExecutionErrorEvent['data']

View File

@@ -189,7 +189,6 @@ export async function duplicateWorkflow(
parentId: newParentId, parentId: newParentId,
extent: newExtent, extent: newExtent,
data: updatedData, data: updatedData,
locked: false, // Duplicated blocks should always be unlocked
createdAt: now, createdAt: now,
updatedAt: now, updatedAt: now,
} }

View File

@@ -226,7 +226,6 @@ export async function loadWorkflowFromNormalizedTables(
subBlocks: (block.subBlocks as BlockState['subBlocks']) || {}, subBlocks: (block.subBlocks as BlockState['subBlocks']) || {},
outputs: (block.outputs as BlockState['outputs']) || {}, outputs: (block.outputs as BlockState['outputs']) || {},
data: blockData, data: blockData,
locked: block.locked,
} }
blocksMap[block.id] = assembled blocksMap[block.id] = assembled
@@ -364,7 +363,6 @@ export async function saveWorkflowToNormalizedTables(
data: block.data || {}, data: block.data || {},
parentId: block.data?.parentId || null, parentId: block.data?.parentId || null,
extent: block.data?.extent || null, extent: block.data?.extent || null,
locked: block.locked ?? false,
})) }))
await tx.insert(workflowBlocks).values(blockInserts) await tx.insert(workflowBlocks).values(blockInserts)
@@ -629,8 +627,7 @@ export function regenerateWorkflowStateIds(state: RegenerateStateInput): Regener
// Regenerate blocks with updated references // Regenerate blocks with updated references
Object.entries(state.blocks || {}).forEach(([oldId, block]) => { Object.entries(state.blocks || {}).forEach(([oldId, block]) => {
const newId = blockIdMapping.get(oldId)! const newId = blockIdMapping.get(oldId)!
// Duplicated blocks are always unlocked so users can edit them const newBlock: BlockState = { ...block, id: newId }
const newBlock: BlockState = { ...block, id: newId, locked: false }
// Update parentId reference if it exists // Update parentId reference if it exists
if (newBlock.data?.parentId) { if (newBlock.data?.parentId) {

View File

@@ -17,7 +17,6 @@ export const BLOCKS_OPERATIONS = {
BATCH_TOGGLE_ENABLED: 'batch-toggle-enabled', BATCH_TOGGLE_ENABLED: 'batch-toggle-enabled',
BATCH_TOGGLE_HANDLES: 'batch-toggle-handles', BATCH_TOGGLE_HANDLES: 'batch-toggle-handles',
BATCH_UPDATE_PARENT: 'batch-update-parent', BATCH_UPDATE_PARENT: 'batch-update-parent',
BATCH_TOGGLE_LOCKED: 'batch-toggle-locked',
} as const } as const
export type BlocksOperation = (typeof BLOCKS_OPERATIONS)[keyof typeof BLOCKS_OPERATIONS] export type BlocksOperation = (typeof BLOCKS_OPERATIONS)[keyof typeof BLOCKS_OPERATIONS]
@@ -86,7 +85,6 @@ export const UNDO_REDO_OPERATIONS = {
BATCH_UPDATE_PARENT: 'batch-update-parent', BATCH_UPDATE_PARENT: 'batch-update-parent',
BATCH_TOGGLE_ENABLED: 'batch-toggle-enabled', BATCH_TOGGLE_ENABLED: 'batch-toggle-enabled',
BATCH_TOGGLE_HANDLES: 'batch-toggle-handles', BATCH_TOGGLE_HANDLES: 'batch-toggle-handles',
BATCH_TOGGLE_LOCKED: 'batch-toggle-locked',
APPLY_DIFF: 'apply-diff', APPLY_DIFF: 'apply-diff',
ACCEPT_DIFF: 'accept-diff', ACCEPT_DIFF: 'accept-diff',
REJECT_DIFF: 'reject-diff', REJECT_DIFF: 'reject-diff',

View File

@@ -507,37 +507,7 @@ async function handleBlocksOperationTx(
}) })
if (blocks && blocks.length > 0) { if (blocks && blocks.length > 0) {
// Fetch existing blocks to check for locked parents const blockValues = blocks.map((block: Record<string, unknown>) => {
const existingBlocks = await tx
.select({ id: workflowBlocks.id, locked: workflowBlocks.locked })
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
type ExistingBlockRecord = (typeof existingBlocks)[number]
const lockedParentIds = new Set(
existingBlocks
.filter((b: ExistingBlockRecord) => b.locked)
.map((b: ExistingBlockRecord) => b.id)
)
// Filter out blocks being added to locked parents
const allowedBlocks = (blocks as Array<Record<string, unknown>>).filter((block) => {
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
| string
| undefined
if (parentId && lockedParentIds.has(parentId)) {
logger.info(`Skipping block ${block.id} - parent ${parentId} is locked`)
return false
}
return true
})
if (allowedBlocks.length === 0) {
logger.info('All blocks filtered out due to locked parents, skipping add')
break
}
const blockValues = allowedBlocks.map((block: Record<string, unknown>) => {
const blockId = block.id as string const blockId = block.id as string
const mergedSubBlocks = mergeSubBlockValues( const mergedSubBlocks = mergeSubBlockValues(
block.subBlocks as Record<string, unknown>, block.subBlocks as Record<string, unknown>,
@@ -559,7 +529,6 @@ async function handleBlocksOperationTx(
advancedMode: (block.advancedMode as boolean) ?? false, advancedMode: (block.advancedMode as boolean) ?? false,
triggerMode: (block.triggerMode as boolean) ?? false, triggerMode: (block.triggerMode as boolean) ?? false,
height: (block.height as number) || 0, height: (block.height as number) || 0,
locked: (block.locked as boolean) ?? false,
} }
}) })
@@ -568,7 +537,7 @@ async function handleBlocksOperationTx(
// Create subflow entries for loop/parallel blocks (skip if already in payload) // Create subflow entries for loop/parallel blocks (skip if already in payload)
const loopIds = new Set(loops ? Object.keys(loops) : []) const loopIds = new Set(loops ? Object.keys(loops) : [])
const parallelIds = new Set(parallels ? Object.keys(parallels) : []) const parallelIds = new Set(parallels ? Object.keys(parallels) : [])
for (const block of allowedBlocks) { for (const block of blocks) {
const blockId = block.id as string const blockId = block.id as string
if (block.type === 'loop' && !loopIds.has(blockId)) { if (block.type === 'loop' && !loopIds.has(blockId)) {
await tx.insert(workflowSubflows).values({ await tx.insert(workflowSubflows).values({
@@ -597,7 +566,7 @@ async function handleBlocksOperationTx(
// Update parent subflow node lists // Update parent subflow node lists
const parentIds = new Set<string>() const parentIds = new Set<string>()
for (const block of allowedBlocks) { for (const block of blocks) {
const parentId = (block.data as Record<string, unknown>)?.parentId as string | undefined const parentId = (block.data as Record<string, unknown>)?.parentId as string | undefined
if (parentId) { if (parentId) {
parentIds.add(parentId) parentIds.add(parentId)
@@ -655,74 +624,44 @@ async function handleBlocksOperationTx(
logger.info(`Batch removing ${ids.length} blocks from workflow ${workflowId}`) logger.info(`Batch removing ${ids.length} blocks from workflow ${workflowId}`)
// Fetch all blocks to check lock status and filter out protected blocks
const allBlocks = await tx
.select({
id: workflowBlocks.id,
type: workflowBlocks.type,
locked: workflowBlocks.locked,
data: workflowBlocks.data,
})
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
type BlockRecord = (typeof allBlocks)[number]
const blocksById: Record<string, BlockRecord> = Object.fromEntries(
allBlocks.map((b: BlockRecord) => [b.id, b])
)
// Helper to check if a block is protected (locked or inside locked parent)
const isProtected = (blockId: string): boolean => {
const block = blocksById[blockId]
if (!block) return false
if (block.locked) return true
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
| string
| undefined
if (parentId && blocksById[parentId]?.locked) return true
return false
}
// Filter out protected blocks from deletion request
const deletableIds = ids.filter((id) => !isProtected(id))
if (deletableIds.length === 0) {
logger.info('All requested blocks are protected, skipping deletion')
return
}
if (deletableIds.length < ids.length) {
logger.info(
`Filtered out ${ids.length - deletableIds.length} protected blocks from deletion`
)
}
// Collect all block IDs including children of subflows // Collect all block IDs including children of subflows
const allBlocksToDelete = new Set<string>(deletableIds) const allBlocksToDelete = new Set<string>(ids)
for (const id of deletableIds) { for (const id of ids) {
const block = blocksById[id] const blockToRemove = await tx
if (block && isSubflowBlockType(block.type)) { .select({ type: workflowBlocks.type })
// Include all children of the subflow (they should be deleted with parent) .from(workflowBlocks)
for (const b of allBlocks) { .where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
const parentId = (b.data as Record<string, unknown> | null)?.parentId .limit(1)
if (parentId === id) {
allBlocksToDelete.add(b.id) if (blockToRemove.length > 0 && isSubflowBlockType(blockToRemove[0].type)) {
} const childBlocks = await tx
} .select({ id: workflowBlocks.id })
.from(workflowBlocks)
.where(
and(
eq(workflowBlocks.workflowId, workflowId),
sql`${workflowBlocks.data}->>'parentId' = ${id}`
)
)
childBlocks.forEach((child: { id: string }) => allBlocksToDelete.add(child.id))
} }
} }
const blockIdsArray = Array.from(allBlocksToDelete) const blockIdsArray = Array.from(allBlocksToDelete)
// Collect parent IDs BEFORE deleting blocks (use blocksById, already fetched) // Collect parent IDs BEFORE deleting blocks
const parentIds = new Set<string>() const parentIds = new Set<string>()
for (const id of deletableIds) { for (const id of ids) {
const block = blocksById[id] const parentInfo = await tx
const parentId = (block?.data as Record<string, unknown> | null)?.parentId as .select({ parentId: sql<string | null>`${workflowBlocks.data}->>'parentId'` })
| string .from(workflowBlocks)
| undefined .where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
if (parentId) { .limit(1)
parentIds.add(parentId)
if (parentInfo.length > 0 && parentInfo[0].parentId) {
parentIds.add(parentInfo[0].parentId)
} }
} }
@@ -802,61 +741,22 @@ async function handleBlocksOperationTx(
`Batch toggling enabled state for ${blockIds.length} blocks in workflow ${workflowId}` `Batch toggling enabled state for ${blockIds.length} blocks in workflow ${workflowId}`
) )
// Get all blocks in workflow to find children and check locked state const blocks = await tx
const allBlocks = await tx .select({ id: workflowBlocks.id, enabled: workflowBlocks.enabled })
.select({
id: workflowBlocks.id,
enabled: workflowBlocks.enabled,
locked: workflowBlocks.locked,
type: workflowBlocks.type,
data: workflowBlocks.data,
})
.from(workflowBlocks) .from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId)) .where(and(eq(workflowBlocks.workflowId, workflowId), inArray(workflowBlocks.id, blockIds)))
type BlockRecord = (typeof allBlocks)[number] for (const block of blocks) {
const blocksById: Record<string, BlockRecord> = Object.fromEntries(
allBlocks.map((b: BlockRecord) => [b.id, b])
)
const blocksToToggle = new Set<string>()
// Collect all blocks to toggle including children of containers
for (const id of blockIds) {
const block = blocksById[id]
if (!block || block.locked) continue
blocksToToggle.add(id)
// If it's a loop or parallel, also include all children
if (block.type === 'loop' || block.type === 'parallel') {
for (const b of allBlocks) {
const parentId = (b.data as Record<string, unknown> | null)?.parentId
if (parentId === id && !b.locked) {
blocksToToggle.add(b.id)
}
}
}
}
// Determine target enabled state based on first toggleable block
if (blocksToToggle.size === 0) break
const firstToggleableId = Array.from(blocksToToggle)[0]
const firstBlock = blocksById[firstToggleableId]
if (!firstBlock) break
const targetEnabled = !firstBlock.enabled
// Update all affected blocks
for (const blockId of blocksToToggle) {
await tx await tx
.update(workflowBlocks) .update(workflowBlocks)
.set({ .set({
enabled: targetEnabled, enabled: !block.enabled,
updatedAt: new Date(), updatedAt: new Date(),
}) })
.where(and(eq(workflowBlocks.id, blockId), eq(workflowBlocks.workflowId, workflowId))) .where(and(eq(workflowBlocks.id, block.id), eq(workflowBlocks.workflowId, workflowId)))
} }
logger.debug(`Batch toggled enabled state for ${blocksToToggle.size} blocks`) logger.debug(`Batch toggled enabled state for ${blocks.length} blocks`)
break break
} }
@@ -868,118 +768,22 @@ async function handleBlocksOperationTx(
logger.info(`Batch toggling handles for ${blockIds.length} blocks in workflow ${workflowId}`) logger.info(`Batch toggling handles for ${blockIds.length} blocks in workflow ${workflowId}`)
// Fetch all blocks to check lock status and filter out protected blocks const blocks = await tx
const allBlocks = await tx .select({ id: workflowBlocks.id, horizontalHandles: workflowBlocks.horizontalHandles })
.select({
id: workflowBlocks.id,
horizontalHandles: workflowBlocks.horizontalHandles,
locked: workflowBlocks.locked,
data: workflowBlocks.data,
})
.from(workflowBlocks) .from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId)) .where(and(eq(workflowBlocks.workflowId, workflowId), inArray(workflowBlocks.id, blockIds)))
type HandleBlockRecord = (typeof allBlocks)[number] for (const block of blocks) {
const blocksById: Record<string, HandleBlockRecord> = Object.fromEntries(
allBlocks.map((b: HandleBlockRecord) => [b.id, b])
)
// Helper to check if a block is protected (locked or inside locked parent)
const isProtected = (blockId: string): boolean => {
const block = blocksById[blockId]
if (!block) return false
if (block.locked) return true
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
| string
| undefined
if (parentId && blocksById[parentId]?.locked) return true
return false
}
// Filter to only toggle handles on unprotected blocks
const blocksToToggle = blockIds.filter((id) => blocksById[id] && !isProtected(id))
if (blocksToToggle.length === 0) {
logger.info('All requested blocks are protected, skipping handles toggle')
break
}
for (const blockId of blocksToToggle) {
const block = blocksById[blockId]
await tx await tx
.update(workflowBlocks) .update(workflowBlocks)
.set({ .set({
horizontalHandles: !block.horizontalHandles, horizontalHandles: !block.horizontalHandles,
updatedAt: new Date(), updatedAt: new Date(),
}) })
.where(and(eq(workflowBlocks.id, blockId), eq(workflowBlocks.workflowId, workflowId))) .where(and(eq(workflowBlocks.id, block.id), eq(workflowBlocks.workflowId, workflowId)))
} }
logger.debug(`Batch toggled handles for ${blocksToToggle.length} blocks`) logger.debug(`Batch toggled handles for ${blocks.length} blocks`)
break
}
case BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED: {
const { blockIds } = payload
if (!Array.isArray(blockIds) || blockIds.length === 0) {
return
}
logger.info(`Batch toggling locked for ${blockIds.length} blocks in workflow ${workflowId}`)
// Get all blocks in workflow to find children
const allBlocks = await tx
.select({
id: workflowBlocks.id,
locked: workflowBlocks.locked,
type: workflowBlocks.type,
data: workflowBlocks.data,
})
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
type LockedBlockRecord = (typeof allBlocks)[number]
const blocksById: Record<string, LockedBlockRecord> = Object.fromEntries(
allBlocks.map((b: LockedBlockRecord) => [b.id, b])
)
const blocksToToggle = new Set<string>()
// Collect all blocks to toggle including children of containers
for (const id of blockIds) {
const block = blocksById[id]
if (!block) continue
blocksToToggle.add(id)
// If it's a loop or parallel, also include all children
if (block.type === 'loop' || block.type === 'parallel') {
for (const b of allBlocks) {
const parentId = (b.data as Record<string, unknown> | null)?.parentId
if (parentId === id) {
blocksToToggle.add(b.id)
}
}
}
}
// Determine target locked state based on first toggleable block
if (blocksToToggle.size === 0) break
const firstToggleableId = Array.from(blocksToToggle)[0]
const firstBlock = blocksById[firstToggleableId]
if (!firstBlock) break
const targetLocked = !firstBlock.locked
// Update all affected blocks
for (const blockId of blocksToToggle) {
await tx
.update(workflowBlocks)
.set({
locked: targetLocked,
updatedAt: new Date(),
})
.where(and(eq(workflowBlocks.id, blockId), eq(workflowBlocks.workflowId, workflowId)))
}
logger.debug(`Batch toggled locked for ${blocksToToggle.size} blocks`)
break break
} }
@@ -991,54 +795,19 @@ async function handleBlocksOperationTx(
logger.info(`Batch updating parent for ${updates.length} blocks in workflow ${workflowId}`) logger.info(`Batch updating parent for ${updates.length} blocks in workflow ${workflowId}`)
// Fetch all blocks to check lock status
const allBlocks = await tx
.select({
id: workflowBlocks.id,
locked: workflowBlocks.locked,
data: workflowBlocks.data,
})
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
type ParentBlockRecord = (typeof allBlocks)[number]
const blocksById: Record<string, ParentBlockRecord> = Object.fromEntries(
allBlocks.map((b: ParentBlockRecord) => [b.id, b])
)
// Helper to check if a block is protected (locked or inside locked parent)
const isProtected = (blockId: string): boolean => {
const block = blocksById[blockId]
if (!block) return false
if (block.locked) return true
const currentParentId = (block.data as Record<string, unknown> | null)?.parentId as
| string
| undefined
if (currentParentId && blocksById[currentParentId]?.locked) return true
return false
}
for (const update of updates) { for (const update of updates) {
const { id, parentId, position } = update const { id, parentId, position } = update
if (!id) continue if (!id) continue
// Skip protected blocks (locked or inside locked container)
if (isProtected(id)) {
logger.info(`Skipping block ${id} parent update - block is protected`)
continue
}
// Skip if trying to move into a locked container
if (parentId && blocksById[parentId]?.locked) {
logger.info(`Skipping block ${id} parent update - target parent ${parentId} is locked`)
continue
}
// Fetch current parent to update subflow node lists // Fetch current parent to update subflow node lists
const existing = blocksById[id] const [existing] = await tx
const existingParentId = (existing?.data as Record<string, unknown> | null)?.parentId as .select({
| string id: workflowBlocks.id,
| undefined parentId: sql<string | null>`${workflowBlocks.data}->>'parentId'`,
})
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
if (!existing) { if (!existing) {
logger.warn(`Block ${id} not found for batch-update-parent`) logger.warn(`Block ${id} not found for batch-update-parent`)
@@ -1083,8 +852,8 @@ async function handleBlocksOperationTx(
await updateSubflowNodeList(tx, workflowId, parentId) await updateSubflowNodeList(tx, workflowId, parentId)
} }
// If the block had a previous parent, update that parent's node list as well // If the block had a previous parent, update that parent's node list as well
if (existingParentId && existingParentId !== parentId) { if (existing?.parentId && existing.parentId !== parentId) {
await updateSubflowNodeList(tx, workflowId, existingParentId) await updateSubflowNodeList(tx, workflowId, existing.parentId)
} }
} }
@@ -1429,7 +1198,6 @@ async function handleWorkflowOperationTx(
advancedMode: block.advancedMode ?? false, advancedMode: block.advancedMode ?? false,
triggerMode: block.triggerMode ?? false, triggerMode: block.triggerMode ?? false,
height: block.height || 0, height: block.height || 0,
locked: block.locked ?? false,
})) }))
await tx.insert(workflowBlocks).values(blockValues) await tx.insert(workflowBlocks).values(blockValues)

View File

@@ -214,12 +214,6 @@ describe('checkRolePermission', () => {
readAllowed: false, readAllowed: false,
}, },
{ operation: 'toggle-handles', adminAllowed: true, writeAllowed: true, readAllowed: false }, { operation: 'toggle-handles', adminAllowed: true, writeAllowed: true, readAllowed: false },
{
operation: 'batch-toggle-locked',
adminAllowed: true,
writeAllowed: false, // Admin-only operation
readAllowed: false,
},
{ {
operation: 'batch-update-positions', operation: 'batch-update-positions',
adminAllowed: true, adminAllowed: true,

View File

@@ -14,10 +14,7 @@ import {
const logger = createLogger('SocketPermissions') const logger = createLogger('SocketPermissions')
// Admin-only operations (require admin role) // All write operations (admin and write roles have same permissions)
const ADMIN_ONLY_OPERATIONS: string[] = [BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED]
// Write operations (admin and write roles both have these permissions)
const WRITE_OPERATIONS: string[] = [ const WRITE_OPERATIONS: string[] = [
// Block operations // Block operations
BLOCK_OPERATIONS.UPDATE_POSITION, BLOCK_OPERATIONS.UPDATE_POSITION,
@@ -54,7 +51,7 @@ const READ_OPERATIONS: string[] = [
// Define operation permissions based on role // Define operation permissions based on role
const ROLE_PERMISSIONS: Record<string, string[]> = { const ROLE_PERMISSIONS: Record<string, string[]> = {
admin: [...ADMIN_ONLY_OPERATIONS, ...WRITE_OPERATIONS], admin: WRITE_OPERATIONS,
write: WRITE_OPERATIONS, write: WRITE_OPERATIONS,
read: READ_OPERATIONS, read: READ_OPERATIONS,
} }

View File

@@ -208,17 +208,6 @@ export const BatchToggleHandlesSchema = z.object({
operationId: z.string().optional(), operationId: z.string().optional(),
}) })
export const BatchToggleLockedSchema = z.object({
operation: z.literal(BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED),
target: z.literal(OPERATION_TARGETS.BLOCKS),
payload: z.object({
blockIds: z.array(z.string()),
previousStates: z.record(z.boolean()),
}),
timestamp: z.number(),
operationId: z.string().optional(),
})
export const BatchUpdateParentSchema = z.object({ export const BatchUpdateParentSchema = z.object({
operation: z.literal(BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT), operation: z.literal(BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT),
target: z.literal(OPERATION_TARGETS.BLOCKS), target: z.literal(OPERATION_TARGETS.BLOCKS),
@@ -242,7 +231,6 @@ export const WorkflowOperationSchema = z.union([
BatchRemoveBlocksSchema, BatchRemoveBlocksSchema,
BatchToggleEnabledSchema, BatchToggleEnabledSchema,
BatchToggleHandlesSchema, BatchToggleHandlesSchema,
BatchToggleLockedSchema,
BatchUpdateParentSchema, BatchUpdateParentSchema,
EdgeOperationSchema, EdgeOperationSchema,
BatchAddEdgesSchema, BatchAddEdgesSchema,

View File

@@ -97,14 +97,6 @@ export interface BatchToggleHandlesOperation extends BaseOperation {
} }
} }
export interface BatchToggleLockedOperation extends BaseOperation {
type: typeof UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED
data: {
blockIds: string[]
previousStates: Record<string, boolean>
}
}
export interface ApplyDiffOperation extends BaseOperation { export interface ApplyDiffOperation extends BaseOperation {
type: typeof UNDO_REDO_OPERATIONS.APPLY_DIFF type: typeof UNDO_REDO_OPERATIONS.APPLY_DIFF
data: { data: {
@@ -144,7 +136,6 @@ export type Operation =
| BatchUpdateParentOperation | BatchUpdateParentOperation
| BatchToggleEnabledOperation | BatchToggleEnabledOperation
| BatchToggleHandlesOperation | BatchToggleHandlesOperation
| BatchToggleLockedOperation
| ApplyDiffOperation | ApplyDiffOperation
| AcceptDiffOperation | AcceptDiffOperation
| RejectDiffOperation | RejectDiffOperation

View File

@@ -167,15 +167,6 @@ export function createInverseOperation(operation: Operation): Operation {
}, },
} }
case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED:
return {
...operation,
data: {
blockIds: operation.data.blockIds,
previousStates: operation.data.previousStates,
},
}
default: { default: {
const exhaustiveCheck: never = operation const exhaustiveCheck: never = operation
throw new Error(`Unhandled operation type: ${(exhaustiveCheck as Operation).type}`) throw new Error(`Unhandled operation type: ${(exhaustiveCheck as Operation).type}`)

View File

@@ -432,104 +432,4 @@ describe('regenerateBlockIds', () => {
expect(duplicatedBlock.position).toEqual({ x: 280, y: 70 }) expect(duplicatedBlock.position).toEqual({ x: 280, y: 70 })
expect(duplicatedBlock.data?.parentId).toBe(loopId) expect(duplicatedBlock.data?.parentId).toBe(loopId)
}) })
it('should unlock pasted block when source is locked', () => {
const blockId = 'block-1'
const blocksToCopy = {
[blockId]: createAgentBlock({
id: blockId,
name: 'Locked Agent',
position: { x: 100, y: 50 },
locked: true,
}),
}
const result = regenerateBlockIds(
blocksToCopy,
[],
{},
{},
{},
positionOffset,
{},
getUniqueBlockName
)
const newBlocks = Object.values(result.blocks)
expect(newBlocks).toHaveLength(1)
// Pasted blocks are always unlocked so users can edit them
const pastedBlock = newBlocks[0]
expect(pastedBlock.locked).toBe(false)
})
it('should keep pasted block unlocked when source is unlocked', () => {
const blockId = 'block-1'
const blocksToCopy = {
[blockId]: createAgentBlock({
id: blockId,
name: 'Unlocked Agent',
position: { x: 100, y: 50 },
locked: false,
}),
}
const result = regenerateBlockIds(
blocksToCopy,
[],
{},
{},
{},
positionOffset,
{},
getUniqueBlockName
)
const newBlocks = Object.values(result.blocks)
expect(newBlocks).toHaveLength(1)
const pastedBlock = newBlocks[0]
expect(pastedBlock.locked).toBe(false)
})
it('should unlock all pasted blocks regardless of source locked state', () => {
const lockedId = 'locked-1'
const unlockedId = 'unlocked-1'
const blocksToCopy = {
[lockedId]: createAgentBlock({
id: lockedId,
name: 'Originally Locked Agent',
position: { x: 100, y: 50 },
locked: true,
}),
[unlockedId]: createFunctionBlock({
id: unlockedId,
name: 'Originally Unlocked Function',
position: { x: 200, y: 50 },
locked: false,
}),
}
const result = regenerateBlockIds(
blocksToCopy,
[],
{},
{},
{},
positionOffset,
{},
getUniqueBlockName
)
const newBlocks = Object.values(result.blocks)
expect(newBlocks).toHaveLength(2)
// All pasted blocks should be unlocked so users can edit them
for (const block of newBlocks) {
expect(block.locked).toBe(false)
}
})
}) })

View File

@@ -203,7 +203,6 @@ export function prepareBlockState(options: PrepareBlockStateOptions): BlockState
advancedMode: false, advancedMode: false,
triggerMode, triggerMode,
height: 0, height: 0,
locked: false,
} }
} }
@@ -482,8 +481,6 @@ export function regenerateBlockIds(
position: newPosition, position: newPosition,
// Temporarily keep data as-is, we'll fix parentId in second pass // Temporarily keep data as-is, we'll fix parentId in second pass
data: block.data ? { ...block.data } : block.data, data: block.data ? { ...block.data } : block.data,
// Duplicated blocks are always unlocked so users can edit them
locked: false,
} }
newBlocks[newId] = newBlock newBlocks[newId] = newBlock
@@ -511,15 +508,15 @@ export function regenerateBlockIds(
parentId: newParentId, parentId: newParentId,
extent: 'parent', extent: 'parent',
} }
} else if (existingBlockNames[oldParentId] && !existingBlockNames[oldParentId].locked) { } else if (existingBlockNames[oldParentId]) {
// Parent exists in existing workflow and is not locked - keep original parentId // Parent exists in existing workflow - keep original parentId (block stays in same subflow)
block.data = { block.data = {
...block.data, ...block.data,
parentId: oldParentId, parentId: oldParentId,
extent: 'parent', extent: 'parent',
} }
} else { } else {
// Parent doesn't exist anywhere OR parent is locked - clear the relationship // Parent doesn't exist anywhere - clear the relationship
block.data = { ...block.data, parentId: undefined, extent: undefined } block.data = { ...block.data, parentId: undefined, extent: undefined }
} }
} }

View File

@@ -1144,223 +1144,6 @@ describe('workflow store', () => {
}) })
}) })
describe('batchToggleLocked', () => {
it('should toggle block locked state', () => {
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
// Initial state is undefined (falsy)
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBeFalsy()
batchToggleLocked(['block-1'])
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(true)
batchToggleLocked(['block-1'])
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(false)
})
it('should cascade lock to children when locking a loop', () => {
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('loop-1', 'loop', 'My Loop', { x: 0, y: 0 }, { loopType: 'for', count: 3 })
addBlock(
'child-1',
'function',
'Child',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
batchToggleLocked(['loop-1'])
const { blocks } = useWorkflowStore.getState()
expect(blocks['loop-1'].locked).toBe(true)
expect(blocks['child-1'].locked).toBe(true)
})
it('should cascade unlock to children when unlocking a parallel', () => {
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('parallel-1', 'parallel', 'My Parallel', { x: 0, y: 0 }, { count: 3 })
addBlock(
'child-1',
'function',
'Child',
{ x: 50, y: 50 },
{ parentId: 'parallel-1' },
'parallel-1',
'parent'
)
// Lock first
batchToggleLocked(['parallel-1'])
expect(useWorkflowStore.getState().blocks['child-1'].locked).toBe(true)
// Unlock
batchToggleLocked(['parallel-1'])
const { blocks } = useWorkflowStore.getState()
expect(blocks['parallel-1'].locked).toBe(false)
expect(blocks['child-1'].locked).toBe(false)
})
it('should toggle multiple blocks at once', () => {
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test 1', { x: 0, y: 0 })
addBlock('block-2', 'function', 'Test 2', { x: 100, y: 0 })
batchToggleLocked(['block-1', 'block-2'])
const { blocks } = useWorkflowStore.getState()
expect(blocks['block-1'].locked).toBe(true)
expect(blocks['block-2'].locked).toBe(true)
})
})
describe('setBlockLocked', () => {
it('should set block locked state', () => {
const { setBlockLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
setBlockLocked('block-1', true)
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(true)
setBlockLocked('block-1', false)
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(false)
})
it('should not update if locked state is already the target value', () => {
const { setBlockLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
// First set to true
setBlockLocked('block-1', true)
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(true)
// Setting to true again should still be true
setBlockLocked('block-1', true)
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(true)
})
})
describe('duplicateBlock with locked', () => {
it('should unlock duplicate when duplicating a locked block', () => {
const { setBlockLocked, duplicateBlock } = useWorkflowStore.getState()
addBlock('original', 'agent', 'Original Agent', { x: 0, y: 0 })
setBlockLocked('original', true)
expect(useWorkflowStore.getState().blocks.original.locked).toBe(true)
duplicateBlock('original')
const { blocks } = useWorkflowStore.getState()
const blockIds = Object.keys(blocks)
expect(blockIds.length).toBe(2)
const duplicatedId = blockIds.find((id) => id !== 'original')
expect(duplicatedId).toBeDefined()
if (duplicatedId) {
// Original should still be locked
expect(blocks.original.locked).toBe(true)
// Duplicate should be unlocked so users can edit it
expect(blocks[duplicatedId].locked).toBe(false)
}
})
it('should create unlocked duplicate when duplicating an unlocked block', () => {
const { duplicateBlock } = useWorkflowStore.getState()
addBlock('original', 'agent', 'Original Agent', { x: 0, y: 0 })
duplicateBlock('original')
const { blocks } = useWorkflowStore.getState()
const blockIds = Object.keys(blocks)
const duplicatedId = blockIds.find((id) => id !== 'original')
if (duplicatedId) {
expect(blocks[duplicatedId].locked).toBeFalsy()
}
})
it('should place duplicate outside locked container when duplicating block inside locked loop', () => {
const { batchToggleLocked, duplicateBlock } = useWorkflowStore.getState()
// Create a loop with a child block
addBlock('loop-1', 'loop', 'My Loop', { x: 0, y: 0 }, { loopType: 'for', count: 3 })
addBlock(
'child-1',
'function',
'Child',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
// Lock the loop (which cascades to the child)
batchToggleLocked(['loop-1'])
expect(useWorkflowStore.getState().blocks['child-1'].locked).toBe(true)
// Duplicate the child block
duplicateBlock('child-1')
const { blocks } = useWorkflowStore.getState()
const blockIds = Object.keys(blocks)
expect(blockIds.length).toBe(3) // loop, original child, duplicate
const duplicatedId = blockIds.find((id) => id !== 'loop-1' && id !== 'child-1')
expect(duplicatedId).toBeDefined()
if (duplicatedId) {
// Duplicate should be unlocked
expect(blocks[duplicatedId].locked).toBe(false)
// Duplicate should NOT have a parentId (placed outside the locked container)
expect(blocks[duplicatedId].data?.parentId).toBeUndefined()
// Original should still be inside the loop
expect(blocks['child-1'].data?.parentId).toBe('loop-1')
}
})
it('should keep duplicate inside unlocked container when duplicating block inside unlocked loop', () => {
const { duplicateBlock } = useWorkflowStore.getState()
// Create a loop with a child block (not locked)
addBlock('loop-1', 'loop', 'My Loop', { x: 0, y: 0 }, { loopType: 'for', count: 3 })
addBlock(
'child-1',
'function',
'Child',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
// Duplicate the child block (loop is NOT locked)
duplicateBlock('child-1')
const { blocks } = useWorkflowStore.getState()
const blockIds = Object.keys(blocks)
const duplicatedId = blockIds.find((id) => id !== 'loop-1' && id !== 'child-1')
if (duplicatedId) {
// Duplicate should still be inside the loop since it's not locked
expect(blocks[duplicatedId].data?.parentId).toBe('loop-1')
}
})
})
describe('updateBlockName', () => { describe('updateBlockName', () => {
beforeEach(() => { beforeEach(() => {
useWorkflowStore.setState({ useWorkflowStore.setState({

View File

@@ -207,7 +207,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
triggerMode?: boolean triggerMode?: boolean
height?: number height?: number
data?: Record<string, any> data?: Record<string, any>
locked?: boolean
}>, }>,
edges?: Edge[], edges?: Edge[],
subBlockValues?: Record<string, Record<string, unknown>>, subBlockValues?: Record<string, Record<string, unknown>>,
@@ -232,7 +231,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
triggerMode: block.triggerMode ?? false, triggerMode: block.triggerMode ?? false,
height: block.height ?? 0, height: block.height ?? 0,
data: block.data, data: block.data,
locked: block.locked ?? false,
} }
} }
@@ -367,71 +365,26 @@ export const useWorkflowStore = create<WorkflowStore>()(
}, },
batchToggleEnabled: (ids: string[]) => { batchToggleEnabled: (ids: string[]) => {
if (ids.length === 0) return const newBlocks = { ...get().blocks }
const currentBlocks = get().blocks
const newBlocks = { ...currentBlocks }
const blocksToToggle = new Set<string>()
// For each ID, collect blocks to toggle (skip locked blocks entirely)
// If it's a container, also include non-locked children
for (const id of ids) { for (const id of ids) {
const block = currentBlocks[id] if (newBlocks[id]) {
if (!block) continue newBlocks[id] = { ...newBlocks[id], enabled: !newBlocks[id].enabled }
// Skip locked blocks entirely (including their children)
if (block.locked) continue
blocksToToggle.add(id)
// If it's a loop or parallel, also include non-locked children
if (block.type === 'loop' || block.type === 'parallel') {
Object.entries(currentBlocks).forEach(([blockId, b]) => {
if (b.data?.parentId === id && !b.locked) {
blocksToToggle.add(blockId)
}
})
} }
} }
// If no blocks can be toggled, exit early
if (blocksToToggle.size === 0) return
// Determine target enabled state based on first toggleable block
const firstToggleableId = Array.from(blocksToToggle)[0]
const firstBlock = currentBlocks[firstToggleableId]
const targetEnabled = !firstBlock.enabled
// Apply the enabled state to all toggleable blocks
for (const blockId of blocksToToggle) {
newBlocks[blockId] = { ...newBlocks[blockId], enabled: targetEnabled }
}
set({ blocks: newBlocks, edges: [...get().edges] }) set({ blocks: newBlocks, edges: [...get().edges] })
get().updateLastSaved() get().updateLastSaved()
}, },
batchToggleHandles: (ids: string[]) => { batchToggleHandles: (ids: string[]) => {
const currentBlocks = get().blocks const newBlocks = { ...get().blocks }
const newBlocks = { ...currentBlocks }
// Helper to check if a block is protected (locked or inside locked parent)
const isProtected = (blockId: string): boolean => {
const block = currentBlocks[blockId]
if (!block) return false
if (block.locked) return true
const parentId = block.data?.parentId
if (parentId && currentBlocks[parentId]?.locked) return true
return false
}
for (const id of ids) { for (const id of ids) {
if (!newBlocks[id] || isProtected(id)) continue if (newBlocks[id]) {
newBlocks[id] = { newBlocks[id] = {
...newBlocks[id], ...newBlocks[id],
horizontalHandles: !newBlocks[id].horizontalHandles, horizontalHandles: !newBlocks[id].horizontalHandles,
} }
} }
}
set({ blocks: newBlocks, edges: [...get().edges] }) set({ blocks: newBlocks, edges: [...get().edges] })
get().updateLastSaved() get().updateLastSaved()
}, },
@@ -574,34 +527,10 @@ export const useWorkflowStore = create<WorkflowStore>()(
if (!block) return if (!block) return
const newId = crypto.randomUUID() const newId = crypto.randomUUID()
const offsetPosition = {
// Check if block is inside a locked container - if so, place duplicate outside
const parentId = block.data?.parentId
const parentBlock = parentId ? get().blocks[parentId] : undefined
const isParentLocked = parentBlock?.locked ?? false
// If parent is locked, calculate position outside the container
let offsetPosition: Position
const newData = block.data ? { ...block.data } : undefined
if (isParentLocked && parentBlock) {
// Place duplicate outside the locked container (to the right of it)
const containerWidth = parentBlock.data?.width ?? 400
offsetPosition = {
x: parentBlock.position.x + containerWidth + 50,
y: parentBlock.position.y,
}
// Remove parent relationship since we're placing outside
if (newData) {
newData.parentId = undefined
newData.extent = undefined
}
} else {
offsetPosition = {
x: block.position.x + DEFAULT_DUPLICATE_OFFSET.x, x: block.position.x + DEFAULT_DUPLICATE_OFFSET.x,
y: block.position.y + DEFAULT_DUPLICATE_OFFSET.y, y: block.position.y + DEFAULT_DUPLICATE_OFFSET.y,
} }
}
const newName = getUniqueBlockName(block.name, get().blocks) const newName = getUniqueBlockName(block.name, get().blocks)
@@ -628,8 +557,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
name: newName, name: newName,
position: offsetPosition, position: offsetPosition,
subBlocks: newSubBlocks, subBlocks: newSubBlocks,
locked: false,
data: newData,
}, },
}, },
edges: [...get().edges], edges: [...get().edges],
@@ -1237,70 +1164,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
getDragStartPosition: () => { getDragStartPosition: () => {
return get().dragStartPosition || null return get().dragStartPosition || null
}, },
setBlockLocked: (id: string, locked: boolean) => {
const block = get().blocks[id]
if (!block || block.locked === locked) return
const newState = {
blocks: {
...get().blocks,
[id]: {
...block,
locked,
},
},
edges: [...get().edges],
loops: { ...get().loops },
parallels: { ...get().parallels },
}
set(newState)
get().updateLastSaved()
},
batchToggleLocked: (ids: string[]) => {
if (ids.length === 0) return
const currentBlocks = get().blocks
const newBlocks = { ...currentBlocks }
const blocksToToggle = new Set<string>()
// For each ID, collect blocks to toggle
// If it's a container, also include all children
for (const id of ids) {
const block = currentBlocks[id]
if (!block) continue
blocksToToggle.add(id)
// If it's a loop or parallel, also include all children
if (block.type === 'loop' || block.type === 'parallel') {
Object.entries(currentBlocks).forEach(([blockId, b]) => {
if (b.data?.parentId === id) {
blocksToToggle.add(blockId)
}
})
}
}
// If no blocks found, exit early
if (blocksToToggle.size === 0) return
// Determine target locked state based on first block in original ids
const firstBlock = currentBlocks[ids[0]]
if (!firstBlock) return
const targetLocked = !firstBlock.locked
// Apply the locked state to all blocks
for (const blockId of blocksToToggle) {
newBlocks[blockId] = { ...newBlocks[blockId], locked: targetLocked }
}
set({ blocks: newBlocks, edges: [...get().edges] })
get().updateLastSaved()
},
}), }),
{ name: 'workflow-store' } { name: 'workflow-store' }
) )

View File

@@ -87,7 +87,6 @@ export interface BlockState {
triggerMode?: boolean triggerMode?: boolean
data?: BlockData data?: BlockData
layout?: BlockLayoutState layout?: BlockLayoutState
locked?: boolean
} }
export interface SubBlockState { export interface SubBlockState {
@@ -132,7 +131,6 @@ export interface Loop {
whileCondition?: string // JS expression that evaluates to boolean (for while loops) whileCondition?: string // JS expression that evaluates to boolean (for while loops)
doWhileCondition?: string // JS expression that evaluates to boolean (for do-while loops) doWhileCondition?: string // JS expression that evaluates to boolean (for do-while loops)
enabled: boolean enabled: boolean
locked?: boolean
} }
export interface Parallel { export interface Parallel {
@@ -142,7 +140,6 @@ export interface Parallel {
count?: number // Number of parallel executions for count-based parallel count?: number // Number of parallel executions for count-based parallel
parallelType?: 'count' | 'collection' // Explicit parallel type to avoid inference bugs parallelType?: 'count' | 'collection' // Explicit parallel type to avoid inference bugs
enabled: boolean enabled: boolean
locked?: boolean
} }
export interface Variable { export interface Variable {
@@ -236,8 +233,6 @@ export interface WorkflowActions {
workflowState: WorkflowState, workflowState: WorkflowState,
options?: { updateLastSaved?: boolean } options?: { updateLastSaved?: boolean }
) => void ) => void
setBlockLocked: (id: string, locked: boolean) => void
batchToggleLocked: (ids: string[]) => void
} }
export type WorkflowStore = WorkflowState & WorkflowActions export type WorkflowStore = WorkflowState & WorkflowActions

View File

@@ -1,8 +1,9 @@
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
import type { RunActorParams, RunActorResult } from '@/tools/apify/types' import type { RunActorParams, RunActorResult } from '@/tools/apify/types'
import type { ToolConfig } from '@/tools/types' import type { ToolConfig } from '@/tools/types'
const POLL_INTERVAL_MS = 5000 // 5 seconds between polls const POLL_INTERVAL_MS = 5000
const MAX_POLL_TIME_MS = 300000 // 5 minutes maximum polling time const MAX_POLL_TIME_MS = DEFAULT_EXECUTION_TIMEOUT_MS
export const apifyRunActorAsyncTool: ToolConfig<RunActorParams, RunActorResult> = { export const apifyRunActorAsyncTool: ToolConfig<RunActorParams, RunActorResult> = {
id: 'apify_run_actor_async', id: 'apify_run_actor_async',

View File

@@ -1,11 +1,12 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
import type { BrowserUseRunTaskParams, BrowserUseRunTaskResponse } from '@/tools/browser_use/types' import type { BrowserUseRunTaskParams, BrowserUseRunTaskResponse } from '@/tools/browser_use/types'
import type { ToolConfig, ToolResponse } from '@/tools/types' import type { ToolConfig, ToolResponse } from '@/tools/types'
const logger = createLogger('BrowserUseTool') const logger = createLogger('BrowserUseTool')
const POLL_INTERVAL_MS = 5000 const POLL_INTERVAL_MS = 5000
const MAX_POLL_TIME_MS = 600000 // 10 minutes const MAX_POLL_TIME_MS = DEFAULT_EXECUTION_TIMEOUT_MS
const MAX_CONSECUTIVE_ERRORS = 3 const MAX_CONSECUTIVE_ERRORS = 3
async function createSessionWithProfile( async function createSessionWithProfile(

View File

@@ -0,0 +1,55 @@
import type { EnrichCheckCreditsParams, EnrichCheckCreditsResponse } from '@/tools/enrich/types'
import type { ToolConfig } from '@/tools/types'
export const checkCreditsTool: ToolConfig<EnrichCheckCreditsParams, EnrichCheckCreditsResponse> = {
id: 'enrich_check_credits',
name: 'Enrich Check Credits',
description: 'Check your Enrich API credit usage and remaining balance.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Enrich API key',
},
},
request: {
url: 'https://api.enrich.so/v1/api/auth',
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
totalCredits: data.total_credits ?? 0,
creditsUsed: data.credits_used ?? 0,
creditsRemaining: data.credits_remaining ?? 0,
},
}
},
outputs: {
totalCredits: {
type: 'number',
description: 'Total credits allocated to the account',
},
creditsUsed: {
type: 'number',
description: 'Credits consumed so far',
},
creditsRemaining: {
type: 'number',
description: 'Available credits remaining',
},
},
}

View File

@@ -0,0 +1,143 @@
import type { EnrichCompanyFundingParams, EnrichCompanyFundingResponse } from '@/tools/enrich/types'
import type { ToolConfig } from '@/tools/types'
export const companyFundingTool: ToolConfig<
EnrichCompanyFundingParams,
EnrichCompanyFundingResponse
> = {
id: 'enrich_company_funding',
name: 'Enrich Company Funding',
description:
'Retrieve company funding history, traffic metrics, and executive information by domain.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Enrich API key',
},
domain: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Company domain (e.g., example.com)',
},
},
request: {
url: (params) => {
const url = new URL('https://api.enrich.so/v1/api/company-funding-plus')
url.searchParams.append('domain', params.domain.trim())
return url.toString()
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const resultData = data.data ?? data
const fundingRounds =
(resultData.fundingRounds ?? resultData.funding_rounds)?.map((round: any) => ({
roundType: round.roundType ?? round.round_type ?? '',
amount: round.amount ?? null,
date: round.date ?? null,
investors: round.investors ?? [],
})) ?? []
const executives = (resultData.executives ?? []).map((exec: any) => ({
name: exec.name ?? exec.fullName ?? '',
title: exec.title ?? '',
}))
return {
success: true,
output: {
legalName: resultData.legalName ?? resultData.legal_name ?? null,
employeeCount: resultData.employeeCount ?? resultData.employee_count ?? null,
headquarters: resultData.headquarters ?? null,
industry: resultData.industry ?? null,
totalFundingRaised:
resultData.totalFundingRaised ?? resultData.total_funding_raised ?? null,
fundingRounds,
monthlyVisits: resultData.monthlyVisits ?? resultData.monthly_visits ?? null,
trafficChange: resultData.trafficChange ?? resultData.traffic_change ?? null,
itSpending: resultData.itSpending ?? resultData.it_spending ?? null,
executives,
},
}
},
outputs: {
legalName: {
type: 'string',
description: 'Legal company name',
optional: true,
},
employeeCount: {
type: 'number',
description: 'Number of employees',
optional: true,
},
headquarters: {
type: 'string',
description: 'Headquarters location',
optional: true,
},
industry: {
type: 'string',
description: 'Industry',
optional: true,
},
totalFundingRaised: {
type: 'number',
description: 'Total funding raised',
optional: true,
},
fundingRounds: {
type: 'array',
description: 'Funding rounds',
items: {
type: 'object',
properties: {
roundType: { type: 'string', description: 'Round type' },
amount: { type: 'number', description: 'Amount raised' },
date: { type: 'string', description: 'Date' },
investors: { type: 'array', description: 'Investors' },
},
},
},
monthlyVisits: {
type: 'number',
description: 'Monthly website visits',
optional: true,
},
trafficChange: {
type: 'number',
description: 'Traffic change percentage',
optional: true,
},
itSpending: {
type: 'number',
description: 'Estimated IT spending in USD',
optional: true,
},
executives: {
type: 'array',
description: 'Executive team',
items: {
type: 'object',
properties: {
name: { type: 'string', description: 'Name' },
title: { type: 'string', description: 'Title' },
},
},
},
},
}

View File

@@ -0,0 +1,197 @@
import type { EnrichCompanyLookupParams, EnrichCompanyLookupResponse } from '@/tools/enrich/types'
import type { ToolConfig } from '@/tools/types'
export const companyLookupTool: ToolConfig<EnrichCompanyLookupParams, EnrichCompanyLookupResponse> =
{
id: 'enrich_company_lookup',
name: 'Enrich Company Lookup',
description:
'Look up comprehensive company information by name or domain including funding, location, and social profiles.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Enrich API key',
},
name: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Company name (e.g., Google)',
},
domain: {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Company domain (e.g., google.com)',
},
},
request: {
url: (params) => {
const url = new URL('https://api.enrich.so/v1/api/company')
if (params.name) {
url.searchParams.append('name', params.name.trim())
}
if (params.domain) {
url.searchParams.append('domain', params.domain.trim())
}
return url.toString()
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const fundingRounds =
data.fundingData?.map((round: any) => ({
roundType: round.fundingRound ?? '',
amount: round.moneyRaised?.amount ?? null,
currency: round.moneyRaised?.currency ?? null,
investors: round.investors ?? [],
})) ?? []
return {
success: true,
output: {
name: data.name ?? null,
universalName: data.universal_name ?? null,
companyId: data.company_id ?? null,
description: data.description ?? null,
phone: data.phone ?? null,
linkedInUrl: data.url ?? null,
websiteUrl: data.website ?? null,
followers: data.followers ?? null,
staffCount: data.staffCount ?? null,
foundedDate: data.founded ?? null,
type: data.type ?? null,
industries: data.industries ?? [],
specialties: data.specialities ?? [],
headquarters: {
city: data.headquarter?.city ?? null,
country: data.headquarter?.country ?? null,
postalCode: data.headquarter?.postalCode ?? null,
line1: data.headquarter?.line1 ?? null,
},
logo: data.logo ?? null,
coverImage: data.cover ?? null,
fundingRounds,
},
}
},
outputs: {
name: {
type: 'string',
description: 'Company name',
optional: true,
},
universalName: {
type: 'string',
description: 'Universal company name',
optional: true,
},
companyId: {
type: 'string',
description: 'Company ID',
optional: true,
},
description: {
type: 'string',
description: 'Company description',
optional: true,
},
phone: {
type: 'string',
description: 'Phone number',
optional: true,
},
linkedInUrl: {
type: 'string',
description: 'LinkedIn company URL',
optional: true,
},
websiteUrl: {
type: 'string',
description: 'Company website',
optional: true,
},
followers: {
type: 'number',
description: 'Number of LinkedIn followers',
optional: true,
},
staffCount: {
type: 'number',
description: 'Number of employees',
optional: true,
},
foundedDate: {
type: 'string',
description: 'Date founded',
optional: true,
},
type: {
type: 'string',
description: 'Company type',
optional: true,
},
industries: {
type: 'array',
description: 'Industries',
items: {
type: 'string',
description: 'Industry',
},
},
specialties: {
type: 'array',
description: 'Company specialties',
items: {
type: 'string',
description: 'Specialty',
},
},
headquarters: {
type: 'json',
description: 'Headquarters location',
properties: {
city: { type: 'string', description: 'City' },
country: { type: 'string', description: 'Country' },
postalCode: { type: 'string', description: 'Postal code' },
line1: { type: 'string', description: 'Address line 1' },
},
},
logo: {
type: 'string',
description: 'Company logo URL',
optional: true,
},
coverImage: {
type: 'string',
description: 'Cover image URL',
optional: true,
},
fundingRounds: {
type: 'array',
description: 'Funding history',
items: {
type: 'object',
properties: {
roundType: { type: 'string', description: 'Funding round type' },
amount: { type: 'number', description: 'Amount raised' },
currency: { type: 'string', description: 'Currency' },
investors: { type: 'array', description: 'Investors' },
},
},
},
},
}

View File

@@ -0,0 +1,215 @@
import type { EnrichCompanyRevenueParams, EnrichCompanyRevenueResponse } from '@/tools/enrich/types'
import type { ToolConfig } from '@/tools/types'
export const companyRevenueTool: ToolConfig<
EnrichCompanyRevenueParams,
EnrichCompanyRevenueResponse
> = {
id: 'enrich_company_revenue',
name: 'Enrich Company Revenue',
description:
'Retrieve company revenue data, CEO information, and competitive analysis by domain.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Enrich API key',
},
domain: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Company domain (e.g., clay.io)',
},
},
request: {
url: (params) => {
const url = new URL('https://api.enrich.so/v1/api/company-revenue-plus')
url.searchParams.append('domain', params.domain.trim())
return url.toString()
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const competitors =
data.competitors?.map((comp: any) => ({
name: comp.name ?? '',
revenue: comp.revenue ?? null,
employeeCount: comp.employee_count ?? comp.employeeCount ?? null,
headquarters: comp.headquarters ?? null,
})) ?? []
// Handle socialLinks as array [{type, url}] or object {linkedIn, twitter, facebook}
const socialLinksArray = data.socialLinks ?? data.social_links
let socialLinks = {
linkedIn: null as string | null,
twitter: null as string | null,
facebook: null as string | null,
}
if (Array.isArray(socialLinksArray)) {
for (const link of socialLinksArray) {
const linkType = (link.type ?? '').toLowerCase()
if (linkType === 'linkedin') socialLinks.linkedIn = link.url ?? null
else if (linkType === 'twitter') socialLinks.twitter = link.url ?? null
else if (linkType === 'facebook') socialLinks.facebook = link.url ?? null
}
} else if (socialLinksArray && typeof socialLinksArray === 'object') {
socialLinks = {
linkedIn: socialLinksArray.linkedIn ?? socialLinksArray.linkedin ?? null,
twitter: socialLinksArray.twitter ?? null,
facebook: socialLinksArray.facebook ?? null,
}
}
// Handle fundingRounds as array or number
const fundingRoundsData = data.fundingRounds ?? data.funding_rounds
const fundingRoundsCount = Array.isArray(fundingRoundsData)
? fundingRoundsData.length
: fundingRoundsData
// Handle revenueDetails array for min/max
const revenueDetails = data.revenueDetails ?? data.revenue_details
let revenueMin = data.revenueMin ?? data.revenue_min ?? null
let revenueMax = data.revenueMax ?? data.revenue_max ?? null
if (Array.isArray(revenueDetails) && revenueDetails.length > 0) {
revenueMin = revenueDetails[0]?.rangeBegin ?? revenueDetails[0]?.range_begin ?? revenueMin
revenueMax = revenueDetails[0]?.rangeEnd ?? revenueDetails[0]?.range_end ?? revenueMax
}
return {
success: true,
output: {
companyName: data.companyName ?? data.company_name ?? null,
shortDescription: data.shortDescription ?? data.short_description ?? null,
fullSummary: data.fullSummary ?? data.full_summary ?? null,
revenue: data.revenue ?? null,
revenueMin,
revenueMax,
employeeCount: data.employeeCount ?? data.employee_count ?? null,
founded: data.founded ?? null,
ownership: data.ownership ?? null,
status: data.status ?? null,
website: data.website ?? null,
ceo: {
name: data.ceo?.fullName ?? data.ceo?.name ?? null,
designation: data.ceo?.designation ?? data.ceo?.title ?? null,
rating: data.ceo?.rating ?? null,
},
socialLinks,
totalFunding: data.totalFunding ?? data.total_funding ?? null,
fundingRounds: fundingRoundsCount ?? null,
competitors,
},
}
},
outputs: {
companyName: {
type: 'string',
description: 'Company name',
optional: true,
},
shortDescription: {
type: 'string',
description: 'Short company description',
optional: true,
},
fullSummary: {
type: 'string',
description: 'Full company summary',
optional: true,
},
revenue: {
type: 'string',
description: 'Company revenue',
optional: true,
},
revenueMin: {
type: 'number',
description: 'Minimum revenue estimate',
optional: true,
},
revenueMax: {
type: 'number',
description: 'Maximum revenue estimate',
optional: true,
},
employeeCount: {
type: 'number',
description: 'Number of employees',
optional: true,
},
founded: {
type: 'string',
description: 'Year founded',
optional: true,
},
ownership: {
type: 'string',
description: 'Ownership type',
optional: true,
},
status: {
type: 'string',
description: 'Company status (e.g., Active)',
optional: true,
},
website: {
type: 'string',
description: 'Company website URL',
optional: true,
},
ceo: {
type: 'json',
description: 'CEO information',
properties: {
name: { type: 'string', description: 'CEO name' },
designation: { type: 'string', description: 'CEO designation/title' },
rating: { type: 'number', description: 'CEO rating' },
},
},
socialLinks: {
type: 'json',
description: 'Social media links',
properties: {
linkedIn: { type: 'string', description: 'LinkedIn URL' },
twitter: { type: 'string', description: 'Twitter URL' },
facebook: { type: 'string', description: 'Facebook URL' },
},
},
totalFunding: {
type: 'string',
description: 'Total funding raised',
optional: true,
},
fundingRounds: {
type: 'number',
description: 'Number of funding rounds',
optional: true,
},
competitors: {
type: 'array',
description: 'Competitors',
items: {
type: 'object',
properties: {
name: { type: 'string', description: 'Competitor name' },
revenue: { type: 'string', description: 'Revenue' },
employeeCount: { type: 'number', description: 'Employee count' },
headquarters: { type: 'string', description: 'Headquarters' },
},
},
},
},
}

View File

@@ -0,0 +1,102 @@
import type {
EnrichDisposableEmailCheckParams,
EnrichDisposableEmailCheckResponse,
} from '@/tools/enrich/types'
import type { ToolConfig } from '@/tools/types'
export const disposableEmailCheckTool: ToolConfig<
EnrichDisposableEmailCheckParams,
EnrichDisposableEmailCheckResponse
> = {
id: 'enrich_disposable_email_check',
name: 'Enrich Disposable Email Check',
description:
'Check if an email address is from a disposable or temporary email provider. Returns a score and validation details.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Enrich API key',
},
email: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Email address to check (e.g., john.doe@example.com)',
},
},
request: {
url: (params) => {
const url = new URL('https://api.enrich.so/v1/api/disposable-email-check')
url.searchParams.append('email', params.email.trim())
return url.toString()
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const emailData = data.data ?? {}
return {
success: true,
output: {
email: emailData.email ?? '',
score: emailData.score ?? 0,
testsPassed: emailData.tests_passed ?? '0/0',
passed: emailData.passed ?? false,
reason: emailData.reason ?? null,
mailServerIp: emailData.mail_server_ip ?? null,
mxRecords: emailData.mx_records ?? [],
},
}
},
outputs: {
email: {
type: 'string',
description: 'Email address checked',
},
score: {
type: 'number',
description: 'Validation score (0-100)',
},
testsPassed: {
type: 'string',
description: 'Number of tests passed (e.g., "3/3")',
},
passed: {
type: 'boolean',
description: 'Whether the email passed all validation tests',
},
reason: {
type: 'string',
description: 'Reason for failure if email did not pass',
optional: true,
},
mailServerIp: {
type: 'string',
description: 'Mail server IP address',
optional: true,
},
mxRecords: {
type: 'array',
description: 'MX records for the domain',
items: {
type: 'object',
properties: {
host: { type: 'string', description: 'MX record host' },
pref: { type: 'number', description: 'MX record preference' },
},
},
},
},
}

View File

@@ -0,0 +1,67 @@
import type { EnrichEmailToIpParams, EnrichEmailToIpResponse } from '@/tools/enrich/types'
import type { ToolConfig } from '@/tools/types'
export const emailToIpTool: ToolConfig<EnrichEmailToIpParams, EnrichEmailToIpResponse> = {
id: 'enrich_email_to_ip',
name: 'Enrich Email to IP',
description: 'Discover an IP address associated with an email address.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Enrich API key',
},
email: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Email address to look up (e.g., john.doe@example.com)',
},
},
request: {
url: (params) => {
const url = new URL('https://api.enrich.so/v1/api/email-to-ip')
url.searchParams.append('email', params.email.trim())
return url.toString()
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const ipData = data.data ?? {}
return {
success: true,
output: {
email: ipData.email ?? '',
ip: ipData.ip ?? null,
found: !!ipData.ip,
},
}
},
outputs: {
email: {
type: 'string',
description: 'Email address looked up',
},
ip: {
type: 'string',
description: 'Associated IP address',
optional: true,
},
found: {
type: 'boolean',
description: 'Whether an IP address was found',
},
},
}

View File

@@ -0,0 +1,177 @@
import type {
EnrichEmailToPersonLiteParams,
EnrichEmailToPersonLiteResponse,
} from '@/tools/enrich/types'
import type { ToolConfig } from '@/tools/types'
export const emailToPersonLiteTool: ToolConfig<
EnrichEmailToPersonLiteParams,
EnrichEmailToPersonLiteResponse
> = {
id: 'enrich_email_to_person_lite',
name: 'Enrich Email to Person Lite',
description:
'Retrieve basic LinkedIn profile information from an email address. A lighter version with essential data only.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Enrich API key',
},
email: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Email address to look up (e.g., john.doe@company.com)',
},
},
request: {
url: (params) => {
const url = new URL('https://api.enrich.so/v1/api/email-to-linkedin-lite')
url.searchParams.append('email', params.email.trim())
return url.toString()
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
name: data.name ?? null,
firstName: data.first_name ?? data.firstName ?? null,
lastName: data.last_name ?? data.lastName ?? null,
email: data.email ?? null,
title: data.title ?? null,
location: data.location ?? null,
company: data.company ?? null,
companyLocation: data.company_location ?? data.companyLocation ?? null,
companyLinkedIn: data.company_linkedin ?? data.companyLinkedIn ?? null,
profileId: data.profile_id ?? data.profileId ?? null,
schoolName: data.school_name ?? data.schoolName ?? null,
schoolUrl: data.school_url ?? data.schoolUrl ?? null,
linkedInUrl: data.linkedin_url ?? data.linkedInUrl ?? null,
photoUrl: data.photo_url ?? data.photoUrl ?? null,
followerCount: data.follower_count ?? data.followerCount ?? null,
connectionCount: data.connection_count ?? data.connectionCount ?? null,
languages: data.languages ?? [],
projects: data.projects ?? [],
certifications: data.certifications ?? [],
volunteerExperience: data.volunteer_experience ?? data.volunteerExperience ?? [],
},
}
},
outputs: {
name: {
type: 'string',
description: 'Full name',
optional: true,
},
firstName: {
type: 'string',
description: 'First name',
optional: true,
},
lastName: {
type: 'string',
description: 'Last name',
optional: true,
},
email: {
type: 'string',
description: 'Email address',
optional: true,
},
title: {
type: 'string',
description: 'Job title',
optional: true,
},
location: {
type: 'string',
description: 'Location',
optional: true,
},
company: {
type: 'string',
description: 'Current company',
optional: true,
},
companyLocation: {
type: 'string',
description: 'Company location',
optional: true,
},
companyLinkedIn: {
type: 'string',
description: 'Company LinkedIn URL',
optional: true,
},
profileId: {
type: 'string',
description: 'LinkedIn profile ID',
optional: true,
},
schoolName: {
type: 'string',
description: 'School name',
optional: true,
},
schoolUrl: {
type: 'string',
description: 'School URL',
optional: true,
},
linkedInUrl: {
type: 'string',
description: 'LinkedIn profile URL',
optional: true,
},
photoUrl: {
type: 'string',
description: 'Profile photo URL',
optional: true,
},
followerCount: {
type: 'number',
description: 'Number of followers',
optional: true,
},
connectionCount: {
type: 'number',
description: 'Number of connections',
optional: true,
},
languages: {
type: 'array',
description: 'Languages spoken',
items: { type: 'string', description: 'Language' },
},
projects: {
type: 'array',
description: 'Projects',
items: { type: 'string', description: 'Project' },
},
certifications: {
type: 'array',
description: 'Certifications',
items: { type: 'string', description: 'Certification' },
},
volunteerExperience: {
type: 'array',
description: 'Volunteer experience',
items: { type: 'string', description: 'Volunteer role' },
},
},
}

Some files were not shown because too many files have changed in this diff Show More