Compare commits

...

15 Commits

Author SHA1 Message Date
waleed
3ac79b0443 fix(logs): format duration in log details panel 2026-02-03 13:08:54 -08:00
waleed
47cb0a2cc9 fix(formatting): return null for missing values, strip trailing zeros 2026-02-02 23:44:28 -08:00
Waleed
a529db112e feat(ee): add enterprise modules (#3121) 2026-02-02 23:44:28 -08:00
waleed
c69d6bf976 fix(formatting): use parseFloat to preserve fractional milliseconds 2026-02-02 23:31:40 -08:00
waleed
567f39267c fix(logs): add precision to logs list duration formatting 2026-02-02 23:10:19 -08:00
waleed
12409f5eb5 fix(formatting): preserve original precision and rounding behavior 2026-02-02 22:58:44 -08:00
waleed
7386d227eb fix(formatting): consolidate duration formatting into shared utility 2026-02-02 14:53:14 -08:00
Waleed
a9b7d75d87 feat(editor): added docs link to editor (#3116) 2026-02-02 12:22:08 -08:00
Vikhyath Mondreti
0449804ffb improvement(billing): duplicate checks for bypasses, logger billing actor consistency, run from block (#3107)
* improvement(billing): improve against direct subscription creation bypasses

* more usage of block/unblock helpers

* address bugbot comments

* fail closed

* only run dup check for orgs
2026-02-02 10:52:08 -08:00
Vikhyath Mondreti
c286f3ed24 fix(mcp): child workflow with response block returns error (#3114) 2026-02-02 09:30:35 -08:00
Vikhyath Mondreti
b738550815 fix(cleanup-cron): stale execution cleanup integer overflow (#3113) 2026-02-02 09:03:56 -08:00
Waleed
c6357f7438 feat(tools): added enrich so (#3103)
* feat(tools): added enrich so

* updated docs and types
2026-01-31 21:18:41 -08:00
Waleed
b1118935f7 fix(workflow): optimize loop/parallel regeneration and prevent duplicate agent tools (#3100)
* fix(workflow): optimize loop/parallel regeneration and prevent duplicate agent tools

* refactor(workflow): remove addBlock in favor of batchAddBlocks

- Migrated undo-redo to use batchAddBlocks instead of addBlock loop
- Removed addBlock method from workflow store (now unused)
- Updated tests to use helper function wrapping batchAddBlocks
- This fixes the cursor bot comments about inconsistent parent checking
2026-01-31 17:55:32 -08:00
Waleed
3e18b4186c fix(mcp): pass timeout to SDK callTool to override 60s default (#3101) 2026-01-31 17:44:49 -08:00
Vikhyath Mondreti
e1ac201936 improvement(ratelimits, sockets): increase across all plans, reconnecting notif for sockets (#3096)
* improvement(rate-limits): increase across all plans

* improve sockets with reconnecting

* address bugbot comment

* fix typing
2026-01-31 16:48:57 -08:00
131 changed files with 8319 additions and 1115 deletions

View File

@@ -5421,3 +5421,18 @@ z'
</svg>
)
}
export function EnrichSoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 398 394' fill='none'>
<path
fill='#5A52F4'
d='M129.705566,319.705719 C127.553314,322.684906 125.651512,325.414673 123.657059,328.277466 C113.748466,318.440308 105.605003,310.395905 97.510834,302.302216 C93.625801,298.417419 89.990181,294.269318 85.949242,290.558868 C82.857994,287.720428 82.464081,285.757660 85.772888,282.551880 C104.068108,264.826202 122.146088,246.876312 140.285110,228.989670 C141.183945,228.103317 141.957443,227.089844 143.588837,225.218384 C140.691605,225.066116 138.820053,224.882874 136.948410,224.881958 C102.798264,224.865326 68.647453,224.765244 34.498699,224.983612 C29.315699,225.016739 27.990419,223.343155 28.090912,218.397430 C28.381887,204.076935 28.189890,189.746719 28.195684,175.420319 C28.198524,168.398178 28.319166,168.279541 35.590389,168.278687 C69.074188,168.274780 102.557991,168.281174 136.041794,168.266083 C137.968231,168.265213 139.894608,168.107101 141.821030,168.022171 C142.137955,167.513992 142.454895,167.005829 142.771820,166.497650 C122.842415,146.495621 102.913002,126.493591 83.261360,106.770348 C96.563828,93.471756 109.448814,80.590523 122.656265,67.386925 C123.522743,68.161835 124.785545,69.187096 125.930321,70.330513 C144.551819,88.930206 163.103683,107.600082 181.805267,126.118790 C186.713593,130.979126 189.085648,136.448059 189.055374,143.437057 C188.899490,179.418961 188.911179,215.402191 189.046661,251.384262 C189.072296,258.190796 186.742920,263.653717 181.982727,268.323273 C164.624405,285.351227 147.295807,302.409485 129.705566,319.705719z'
/>
<path
fill='#5A52F4'
d='M276.070923,246.906128 C288.284363,258.985870 300.156097,270.902100 312.235931,282.603485 C315.158752,285.434784 315.417542,287.246246 312.383484,290.248932 C301.143494,301.372498 290.168549,312.763733 279.075592,324.036255 C278.168030,324.958496 277.121307,325.743835 275.898315,326.801086 C274.628357,325.711792 273.460663,324.822968 272.422150,323.802673 C253.888397,305.594757 235.418701,287.321289 216.818268,269.181854 C211.508789,264.003937 208.872726,258.136688 208.914001,250.565842 C209.108337,214.917786 209.084808,179.267715 208.928864,143.619293 C208.898407,136.654907 211.130066,131.122162 216.052216,126.246094 C234.867538,107.606842 253.537521,88.820908 272.274780,70.102730 C273.313202,69.065353 274.468597,68.145027 275.264038,67.440727 C288.353516,80.579514 301.213470,93.487869 314.597534,106.922356 C295.163391,126.421753 275.214752,146.437363 255.266113,166.452972 C255.540176,166.940353 255.814240,167.427734 256.088318,167.915100 C257.983887,168.035736 259.879425,168.260345 261.775085,168.261551 C295.425201,168.282852 329.075287,168.273544 362.725403,168.279831 C369.598907,168.281113 369.776215,168.463593 369.778931,175.252213 C369.784882,189.911667 369.646088,204.573074 369.861206,219.229355 C369.925110,223.585022 368.554596,224.976288 364.148865,224.956406 C329.833130,224.801605 295.516388,224.869598 261.199951,224.868744 C259.297974,224.868698 257.396027,224.868744 254.866638,224.868744 C262.350708,232.658707 269.078217,239.661194 276.070923,246.906128z'
/>
</svg>
)
}

View File

@@ -29,6 +29,7 @@ import {
DynamoDBIcon,
ElasticsearchIcon,
ElevenLabsIcon,
EnrichSoIcon,
ExaAIIcon,
EyeIcon,
FirecrawlIcon,
@@ -160,6 +161,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
dynamodb: DynamoDBIcon,
elasticsearch: ElasticsearchIcon,
elevenlabs: ElevenLabsIcon,
enrich: EnrichSoIcon,
exa: ExaAIIcon,
file_v2: DocumentIcon,
firecrawl: FirecrawlIcon,

View File

@@ -27,16 +27,16 @@ All API responses include information about your workflow execution limits and u
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 60, // Sustained rate limit per minute
"maxBurst": 120, // Maximum burst capacity
"remaining": 118, // Current tokens available (up to maxBurst)
"resetAt": "..." // When tokens next refill
"requestsPerMinute": 150, // Sustained rate limit per minute
"maxBurst": 300, // Maximum burst capacity
"remaining": 298, // Current tokens available (up to maxBurst)
"resetAt": "..." // When tokens next refill
},
"async": {
"requestsPerMinute": 200, // Sustained rate limit per minute
"maxBurst": 400, // Maximum burst capacity
"remaining": 398, // Current tokens available
"resetAt": "..." // When tokens next refill
"requestsPerMinute": 1000, // Sustained rate limit per minute
"maxBurst": 2000, // Maximum burst capacity
"remaining": 1998, // Current tokens available
"resetAt": "..." // When tokens next refill
}
},
"usage": {
@@ -107,28 +107,28 @@ Query workflow execution logs with extensive filtering options.
}
],
"nextCursor": "eyJzIjoiMjAyNS0wMS0wMVQxMjozNDo1Ni43ODlaIiwiaWQiOiJsb2dfYWJjMTIzIn0",
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 60,
"maxBurst": 120,
"remaining": 118,
"resetAt": "2025-01-01T12:35:56.789Z"
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 150,
"maxBurst": 300,
"remaining": 298,
"resetAt": "2025-01-01T12:35:56.789Z"
},
"async": {
"requestsPerMinute": 1000,
"maxBurst": 2000,
"remaining": 1998,
"resetAt": "2025-01-01T12:35:56.789Z"
}
},
"async": {
"requestsPerMinute": 200,
"maxBurst": 400,
"remaining": 398,
"resetAt": "2025-01-01T12:35:56.789Z"
"usage": {
"currentPeriodCost": 1.234,
"limit": 10,
"plan": "pro",
"isExceeded": false
}
},
"usage": {
"currentPeriodCost": 1.234,
"limit": 10,
"plan": "pro",
"isExceeded": false
}
}
}
```
</Tab>
@@ -188,15 +188,15 @@ Retrieve detailed information about a specific log entry.
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 60,
"maxBurst": 120,
"remaining": 118,
"requestsPerMinute": 150,
"maxBurst": 300,
"remaining": 298,
"resetAt": "2025-01-01T12:35:56.789Z"
},
"async": {
"requestsPerMinute": 200,
"maxBurst": 400,
"remaining": 398,
"requestsPerMinute": 1000,
"maxBurst": 2000,
"remaining": 1998,
"resetAt": "2025-01-01T12:35:56.789Z"
}
},
@@ -477,10 +477,10 @@ The API uses a **token bucket algorithm** for rate limiting, providing fair usag
| Plan | Requests/Minute | Burst Capacity |
|------|-----------------|----------------|
| Free | 10 | 20 |
| Pro | 30 | 60 |
| Team | 60 | 120 |
| Enterprise | 120 | 240 |
| Free | 30 | 60 |
| Pro | 100 | 200 |
| Team | 200 | 400 |
| Enterprise | 500 | 1000 |
**How it works:**
- Tokens refill at `requestsPerMinute` rate

View File

@@ -170,16 +170,16 @@ curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" htt
"rateLimit": {
"sync": {
"isLimited": false,
"requestsPerMinute": 25,
"maxBurst": 50,
"remaining": 50,
"requestsPerMinute": 150,
"maxBurst": 300,
"remaining": 300,
"resetAt": "2025-09-08T22:51:55.999Z"
},
"async": {
"isLimited": false,
"requestsPerMinute": 200,
"maxBurst": 400,
"remaining": 400,
"requestsPerMinute": 1000,
"maxBurst": 2000,
"remaining": 2000,
"resetAt": "2025-09-08T22:51:56.155Z"
},
"authType": "api"
@@ -206,11 +206,11 @@ curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" htt
Different subscription plans have different usage limits:
| Plan | Monthly Usage Limit | Rate Limits (per minute) |
|------|-------------------|-------------------------|
| **Free** | $20 | 5 sync, 10 async |
| **Pro** | $100 | 10 sync, 50 async |
| **Team** | $500 (pooled) | 50 sync, 100 async |
| Plan | Monthly Usage Included | Rate Limits (per minute) |
|------|------------------------|-------------------------|
| **Free** | $20 | 50 sync, 200 async |
| **Pro** | $20 (adjustable) | 150 sync, 1,000 async |
| **Team** | $40/seat (pooled, adjustable) | 300 sync, 2,500 async |
| **Enterprise** | Custom | Custom |
## Billing Model

View File

@@ -0,0 +1,930 @@
---
title: Enrich
description: B2B data enrichment and LinkedIn intelligence with Enrich.so
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="enrich"
color="#E5E5E6"
/>
{/* MANUAL-CONTENT-START:intro */}
[Enrich.so](https://enrich.so/) delivers real-time, precision B2B data enrichment and LinkedIn intelligence. Its platform provides dynamic access to public and structured company, contact, and professional information, enabling teams to build richer profiles, improve lead quality, and drive more effective outreach.
With Enrich.so, you can:
- **Enrich contact and company profiles**: Instantly discover key data points for leads, prospects, and businesses using just an email or LinkedIn profile.
- **Verify email deliverability**: Check if emails are valid, deliverable, and safe to contact before sending.
- **Find work & personal emails**: Identify missing business emails from a LinkedIn profile or personal emails to expand your reach.
- **Reveal phone numbers and social profiles**: Surface additional communication channels for contacts through enrichment tools.
- **Analyze LinkedIn posts and engagement**: Extract insights on post reach, reactions, and audience from public LinkedIn content.
- **Conduct advanced people and company search**: Enable your agents to locate companies and professionals based on deep filters and real-time intelligence.
The Sim integration with Enrich.so empowers your agents and automations to instantly query, enrich, and validate B2B data, boosting productivity in workflows like sales prospecting, recruiting, marketing operations, and more. Combining Sim's orchestration capabilities with Enrich.so unlocks smarter, data-driven automation strategies powered by best-in-class B2B intelligence.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Access real-time B2B data intelligence with Enrich.so. Enrich profiles from email addresses, find work emails from LinkedIn, verify email deliverability, search for people and companies, and analyze LinkedIn post engagement.
## Tools
### `enrich_check_credits`
Check your Enrich API credit usage and remaining balance.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `totalCredits` | number | Total credits allocated to the account |
| `creditsUsed` | number | Credits consumed so far |
| `creditsRemaining` | number | Available credits remaining |
### `enrich_email_to_profile`
Retrieve detailed LinkedIn profile information using an email address including work history, education, and skills.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to look up \(e.g., john.doe@company.com\) |
| `inRealtime` | boolean | No | Set to true to retrieve fresh data, bypassing cached information |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `displayName` | string | Full display name |
| `firstName` | string | First name |
| `lastName` | string | Last name |
| `headline` | string | Professional headline |
| `occupation` | string | Current occupation |
| `summary` | string | Profile summary |
| `location` | string | Location |
| `country` | string | Country |
| `linkedInUrl` | string | LinkedIn profile URL |
| `photoUrl` | string | Profile photo URL |
| `connectionCount` | number | Number of connections |
| `isConnectionCountObfuscated` | boolean | Whether connection count is obfuscated \(500+\) |
| `positionHistory` | array | Work experience history |
| ↳ `title` | string | Job title |
| ↳ `company` | string | Company name |
| ↳ `startDate` | string | Start date |
| ↳ `endDate` | string | End date |
| ↳ `location` | string | Location |
| `education` | array | Education history |
| ↳ `school` | string | School name |
| ↳ `degree` | string | Degree |
| ↳ `fieldOfStudy` | string | Field of study |
| ↳ `startDate` | string | Start date |
| ↳ `endDate` | string | End date |
| `certifications` | array | Professional certifications |
| ↳ `name` | string | Certification name |
| ↳ `authority` | string | Issuing authority |
| ↳ `url` | string | Certification URL |
| `skills` | array | List of skills |
| `languages` | array | List of languages |
| `locale` | string | Profile locale \(e.g., en_US\) |
| `version` | number | Profile version number |
### `enrich_email_to_person_lite`
Retrieve basic LinkedIn profile information from an email address. A lighter version with essential data only.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to look up \(e.g., john.doe@company.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `name` | string | Full name |
| `firstName` | string | First name |
| `lastName` | string | Last name |
| `email` | string | Email address |
| `title` | string | Job title |
| `location` | string | Location |
| `company` | string | Current company |
| `companyLocation` | string | Company location |
| `companyLinkedIn` | string | Company LinkedIn URL |
| `profileId` | string | LinkedIn profile ID |
| `schoolName` | string | School name |
| `schoolUrl` | string | School URL |
| `linkedInUrl` | string | LinkedIn profile URL |
| `photoUrl` | string | Profile photo URL |
| `followerCount` | number | Number of followers |
| `connectionCount` | number | Number of connections |
| `languages` | array | Languages spoken |
| `projects` | array | Projects |
| `certifications` | array | Certifications |
| `volunteerExperience` | array | Volunteer experience |
### `enrich_linkedin_profile`
Enrich a LinkedIn profile URL with detailed information including positions, education, and social metrics.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `url` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/williamhgates\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `profileId` | string | LinkedIn profile ID |
| `firstName` | string | First name |
| `lastName` | string | Last name |
| `subTitle` | string | Profile subtitle/headline |
| `profilePicture` | string | Profile picture URL |
| `backgroundImage` | string | Background image URL |
| `industry` | string | Industry |
| `location` | string | Location |
| `followersCount` | number | Number of followers |
| `connectionsCount` | number | Number of connections |
| `premium` | boolean | Whether the account is premium |
| `influencer` | boolean | Whether the account is an influencer |
| `positions` | array | Work positions |
| ↳ `title` | string | Job title |
| ↳ `company` | string | Company name |
| ↳ `companyLogo` | string | Company logo URL |
| ↳ `startDate` | string | Start date |
| ↳ `endDate` | string | End date |
| ↳ `location` | string | Location |
| `education` | array | Education history |
| ↳ `school` | string | School name |
| ↳ `degree` | string | Degree |
| ↳ `fieldOfStudy` | string | Field of study |
| ↳ `startDate` | string | Start date |
| ↳ `endDate` | string | End date |
| `websites` | array | Personal websites |
### `enrich_find_email`
Find a person
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `fullName` | string | Yes | Person's full name \(e.g., John Doe\) |
| `companyDomain` | string | Yes | Company domain \(e.g., example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Found email address |
| `firstName` | string | First name |
| `lastName` | string | Last name |
| `domain` | string | Company domain |
| `found` | boolean | Whether an email was found |
| `acceptAll` | boolean | Whether the domain accepts all emails |
### `enrich_linkedin_to_work_email`
Find a work email address from a LinkedIn profile URL.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., https://www.linkedin.com/in/williamhgates\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Found work email address |
| `found` | boolean | Whether an email was found |
| `status` | string | Request status \(in_progress or completed\) |
### `enrich_linkedin_to_personal_email`
Find personal email address from a LinkedIn profile URL.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/username\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Personal email address |
| `found` | boolean | Whether an email was found |
| `status` | string | Request status |
### `enrich_phone_finder`
Find a phone number from a LinkedIn profile URL.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `linkedinProfile` | string | Yes | LinkedIn profile URL \(e.g., linkedin.com/in/williamhgates\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `profileUrl` | string | LinkedIn profile URL |
| `mobileNumber` | string | Found mobile phone number |
| `found` | boolean | Whether a phone number was found |
| `status` | string | Request status \(in_progress or completed\) |
### `enrich_email_to_phone`
Find a phone number associated with an email address.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to look up \(e.g., john.doe@example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Email address looked up |
| `mobileNumber` | string | Found mobile phone number |
| `found` | boolean | Whether a phone number was found |
| `status` | string | Request status \(in_progress or completed\) |
### `enrich_verify_email`
Verify an email address for deliverability, including catch-all detection and provider identification.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to verify \(e.g., john.doe@example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Email address verified |
| `status` | string | Verification status |
| `result` | string | Deliverability result \(deliverable, undeliverable, etc.\) |
| `confidenceScore` | number | Confidence score \(0-100\) |
| `smtpProvider` | string | Email service provider \(e.g., Google, Microsoft\) |
| `mailDisposable` | boolean | Whether the email is from a disposable provider |
| `mailAcceptAll` | boolean | Whether the domain is a catch-all domain |
| `free` | boolean | Whether the email uses a free email service |
### `enrich_disposable_email_check`
Check if an email address is from a disposable or temporary email provider. Returns a score and validation details.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to check \(e.g., john.doe@example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Email address checked |
| `score` | number | Validation score \(0-100\) |
| `testsPassed` | string | Number of tests passed \(e.g., "3/3"\) |
| `passed` | boolean | Whether the email passed all validation tests |
| `reason` | string | Reason for failure if email did not pass |
| `mailServerIp` | string | Mail server IP address |
| `mxRecords` | array | MX records for the domain |
| ↳ `host` | string | MX record host |
| ↳ `pref` | number | MX record preference |
### `enrich_email_to_ip`
Discover an IP address associated with an email address.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `email` | string | Yes | Email address to look up \(e.g., john.doe@example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `email` | string | Email address looked up |
| `ip` | string | Associated IP address |
| `found` | boolean | Whether an IP address was found |
### `enrich_ip_to_company`
Identify a company from an IP address with detailed firmographic information.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `ip` | string | Yes | IP address to look up \(e.g., 86.92.60.221\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `name` | string | Company name |
| `legalName` | string | Legal company name |
| `domain` | string | Primary domain |
| `domainAliases` | array | Domain aliases |
| `sector` | string | Business sector |
| `industry` | string | Industry |
| `phone` | string | Phone number |
| `employees` | number | Number of employees |
| `revenue` | string | Estimated revenue |
| `location` | json | Company location |
| ↳ `city` | string | City |
| ↳ `state` | string | State |
| ↳ `country` | string | Country |
| ↳ `timezone` | string | Timezone |
| `linkedInUrl` | string | LinkedIn company URL |
| `twitterUrl` | string | Twitter URL |
| `facebookUrl` | string | Facebook URL |
### `enrich_company_lookup`
Look up comprehensive company information by name or domain including funding, location, and social profiles.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `name` | string | No | Company name \(e.g., Google\) |
| `domain` | string | No | Company domain \(e.g., google.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `name` | string | Company name |
| `universalName` | string | Universal company name |
| `companyId` | string | Company ID |
| `description` | string | Company description |
| `phone` | string | Phone number |
| `linkedInUrl` | string | LinkedIn company URL |
| `websiteUrl` | string | Company website |
| `followers` | number | Number of LinkedIn followers |
| `staffCount` | number | Number of employees |
| `foundedDate` | string | Date founded |
| `type` | string | Company type |
| `industries` | array | Industries |
| `specialties` | array | Company specialties |
| `headquarters` | json | Headquarters location |
| ↳ `city` | string | City |
| ↳ `country` | string | Country |
| ↳ `postalCode` | string | Postal code |
| ↳ `line1` | string | Address line 1 |
| `logo` | string | Company logo URL |
| `coverImage` | string | Cover image URL |
| `fundingRounds` | array | Funding history |
| ↳ `roundType` | string | Funding round type |
| ↳ `amount` | number | Amount raised |
| ↳ `currency` | string | Currency |
| ↳ `investors` | array | Investors |
### `enrich_company_funding`
Retrieve company funding history, traffic metrics, and executive information by domain.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `domain` | string | Yes | Company domain \(e.g., example.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `legalName` | string | Legal company name |
| `employeeCount` | number | Number of employees |
| `headquarters` | string | Headquarters location |
| `industry` | string | Industry |
| `totalFundingRaised` | number | Total funding raised |
| `fundingRounds` | array | Funding rounds |
| ↳ `roundType` | string | Round type |
| ↳ `amount` | number | Amount raised |
| ↳ `date` | string | Date |
| ↳ `investors` | array | Investors |
| `monthlyVisits` | number | Monthly website visits |
| `trafficChange` | number | Traffic change percentage |
| `itSpending` | number | Estimated IT spending in USD |
| `executives` | array | Executive team |
| ↳ `name` | string | Name |
| ↳ `title` | string | Title |
### `enrich_company_revenue`
Retrieve company revenue data, CEO information, and competitive analysis by domain.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `domain` | string | Yes | Company domain \(e.g., clay.io\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `companyName` | string | Company name |
| `shortDescription` | string | Short company description |
| `fullSummary` | string | Full company summary |
| `revenue` | string | Company revenue |
| `revenueMin` | number | Minimum revenue estimate |
| `revenueMax` | number | Maximum revenue estimate |
| `employeeCount` | number | Number of employees |
| `founded` | string | Year founded |
| `ownership` | string | Ownership type |
| `status` | string | Company status \(e.g., Active\) |
| `website` | string | Company website URL |
| `ceo` | json | CEO information |
| ↳ `name` | string | CEO name |
| ↳ `designation` | string | CEO designation/title |
| ↳ `rating` | number | CEO rating |
| `socialLinks` | json | Social media links |
| ↳ `linkedIn` | string | LinkedIn URL |
| ↳ `twitter` | string | Twitter URL |
| ↳ `facebook` | string | Facebook URL |
| `totalFunding` | string | Total funding raised |
| `fundingRounds` | number | Number of funding rounds |
| `competitors` | array | Competitors |
| ↳ `name` | string | Competitor name |
| ↳ `revenue` | string | Revenue |
| ↳ `employeeCount` | number | Employee count |
| ↳ `headquarters` | string | Headquarters |
### `enrich_search_people`
Search for professionals by various criteria including name, title, skills, education, and company.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `firstName` | string | No | First name |
| `lastName` | string | No | Last name |
| `summary` | string | No | Professional summary keywords |
| `subTitle` | string | No | Job title/subtitle |
| `locationCountry` | string | No | Country |
| `locationCity` | string | No | City |
| `locationState` | string | No | State/province |
| `influencer` | boolean | No | Filter for influencers only |
| `premium` | boolean | No | Filter for premium accounts only |
| `language` | string | No | Primary language |
| `industry` | string | No | Industry |
| `currentJobTitles` | json | No | Current job titles \(array\) |
| `pastJobTitles` | json | No | Past job titles \(array\) |
| `skills` | json | No | Skills to search for \(array\) |
| `schoolNames` | json | No | School names \(array\) |
| `certifications` | json | No | Certifications to filter by \(array\) |
| `degreeNames` | json | No | Degree names to filter by \(array\) |
| `studyFields` | json | No | Fields of study to filter by \(array\) |
| `currentCompanies` | json | No | Current company IDs to filter by \(array of numbers\) |
| `pastCompanies` | json | No | Past company IDs to filter by \(array of numbers\) |
| `currentPage` | number | No | Page number \(default: 1\) |
| `pageSize` | number | No | Results per page \(default: 20\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `currentPage` | number | Current page number |
| `totalPage` | number | Total number of pages |
| `pageSize` | number | Results per page |
| `profiles` | array | Search results |
| ↳ `profileIdentifier` | string | Profile ID |
| ↳ `givenName` | string | First name |
| ↳ `familyName` | string | Last name |
| ↳ `currentPosition` | string | Current job title |
| ↳ `profileImage` | string | Profile image URL |
| ↳ `externalProfileUrl` | string | LinkedIn URL |
| ↳ `city` | string | City |
| ↳ `country` | string | Country |
| ↳ `expertSkills` | array | Skills |
### `enrich_search_company`
Search for companies by various criteria including name, industry, location, and size.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `name` | string | No | Company name |
| `website` | string | No | Company website URL |
| `tagline` | string | No | Company tagline |
| `type` | string | No | Company type \(e.g., Private, Public\) |
| `description` | string | No | Company description keywords |
| `industries` | json | No | Industries to filter by \(array\) |
| `locationCountry` | string | No | Country |
| `locationCity` | string | No | City |
| `postalCode` | string | No | Postal code |
| `locationCountryList` | json | No | Multiple countries to filter by \(array\) |
| `locationCityList` | json | No | Multiple cities to filter by \(array\) |
| `specialities` | json | No | Company specialties \(array\) |
| `followers` | number | No | Minimum number of followers |
| `staffCount` | number | No | Maximum staff count |
| `staffCountMin` | number | No | Minimum staff count |
| `staffCountMax` | number | No | Maximum staff count |
| `currentPage` | number | No | Page number \(default: 1\) |
| `pageSize` | number | No | Results per page \(default: 20\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `currentPage` | number | Current page number |
| `totalPage` | number | Total number of pages |
| `pageSize` | number | Results per page |
| `companies` | array | Search results |
| ↳ `companyName` | string | Company name |
| ↳ `tagline` | string | Company tagline |
| ↳ `webAddress` | string | Website URL |
| ↳ `industries` | array | Industries |
| ↳ `teamSize` | number | Team size |
| ↳ `linkedInProfile` | string | LinkedIn URL |
### `enrich_search_company_employees`
Search for employees within specific companies by location and job title.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `companyIds` | json | No | Array of company IDs to search within |
| `country` | string | No | Country filter \(e.g., United States\) |
| `city` | string | No | City filter \(e.g., San Francisco\) |
| `state` | string | No | State filter \(e.g., California\) |
| `jobTitles` | json | No | Job titles to filter by \(array\) |
| `page` | number | No | Page number \(default: 1\) |
| `pageSize` | number | No | Results per page \(default: 10\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `currentPage` | number | Current page number |
| `totalPage` | number | Total number of pages |
| `pageSize` | number | Number of results per page |
| `profiles` | array | Employee profiles |
| ↳ `profileIdentifier` | string | Profile ID |
| ↳ `givenName` | string | First name |
| ↳ `familyName` | string | Last name |
| ↳ `currentPosition` | string | Current job title |
| ↳ `profileImage` | string | Profile image URL |
| ↳ `externalProfileUrl` | string | LinkedIn URL |
| ↳ `city` | string | City |
| ↳ `country` | string | Country |
| ↳ `expertSkills` | array | Skills |
### `enrich_search_similar_companies`
Find companies similar to a given company by LinkedIn URL with filters for location and size.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `url` | string | Yes | LinkedIn company URL \(e.g., linkedin.com/company/google\) |
| `accountLocation` | json | No | Filter by locations \(array of country names\) |
| `employeeSizeType` | string | No | Employee size filter type \(e.g., RANGE\) |
| `employeeSizeRange` | json | No | Employee size ranges \(array of \{start, end\} objects\) |
| `page` | number | No | Page number \(default: 1\) |
| `num` | number | No | Number of results per page |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `companies` | array | Similar companies |
| ↳ `url` | string | LinkedIn URL |
| ↳ `name` | string | Company name |
| ↳ `universalName` | string | Universal name |
| ↳ `type` | string | Company type |
| ↳ `description` | string | Description |
| ↳ `phone` | string | Phone number |
| ↳ `website` | string | Website URL |
| ↳ `logo` | string | Logo URL |
| ↳ `foundedYear` | number | Year founded |
| ↳ `staffTotal` | number | Total staff |
| ↳ `industries` | array | Industries |
| ↳ `relevancyScore` | number | Relevancy score |
| ↳ `relevancyValue` | string | Relevancy value |
### `enrich_sales_pointer_people`
Advanced people search with complex filters for location, company size, seniority, experience, and more.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `page` | number | Yes | Page number \(starts at 1\) |
| `filters` | json | Yes | Array of filter objects. Each filter has type \(e.g., POSTAL_CODE, COMPANY_HEADCOUNT\), values \(array with id, text, selectionType: INCLUDED/EXCLUDED\), and optional selectedSubFilter |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `data` | array | People results |
| ↳ `name` | string | Full name |
| ↳ `summary` | string | Professional summary |
| ↳ `location` | string | Location |
| ↳ `profilePicture` | string | Profile picture URL |
| ↳ `linkedInUrn` | string | LinkedIn URN |
| ↳ `positions` | array | Work positions |
| ↳ `education` | array | Education |
| `pagination` | json | Pagination info |
| ↳ `totalCount` | number | Total results |
| ↳ `returnedCount` | number | Returned count |
| ↳ `start` | number | Start position |
| ↳ `limit` | number | Limit |
### `enrich_search_posts`
Search LinkedIn posts by keywords with date filtering.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `keywords` | string | Yes | Search keywords \(e.g., "AI automation"\) |
| `datePosted` | string | No | Time filter \(e.g., past_week, past_month\) |
| `page` | number | No | Page number \(default: 1\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `count` | number | Total number of results |
| `posts` | array | Search results |
| ↳ `url` | string | Post URL |
| ↳ `postId` | string | Post ID |
| ↳ `author` | object | Author information |
| ↳ `name` | string | Author name |
| ↳ `headline` | string | Author headline |
| ↳ `linkedInUrl` | string | Author LinkedIn URL |
| ↳ `profileImage` | string | Author profile image |
| ↳ `timestamp` | string | Post timestamp |
| ↳ `textContent` | string | Post text content |
| ↳ `hashtags` | array | Hashtags |
| ↳ `mediaUrls` | array | Media URLs |
| ↳ `reactions` | number | Number of reactions |
| ↳ `commentsCount` | number | Number of comments |
### `enrich_get_post_details`
Get detailed information about a LinkedIn post by URL.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `url` | string | Yes | LinkedIn post URL |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `postId` | string | Post ID |
| `author` | json | Author information |
| ↳ `name` | string | Author name |
| ↳ `headline` | string | Author headline |
| ↳ `linkedInUrl` | string | Author LinkedIn URL |
| ↳ `profileImage` | string | Author profile image |
| `timestamp` | string | Post timestamp |
| `textContent` | string | Post text content |
| `hashtags` | array | Hashtags |
| `mediaUrls` | array | Media URLs |
| `reactions` | number | Number of reactions |
| `commentsCount` | number | Number of comments |
### `enrich_search_post_reactions`
Get reactions on a LinkedIn post with filtering by reaction type.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `postUrn` | string | Yes | LinkedIn activity URN \(e.g., urn:li:activity:7231931952839196672\) |
| `reactionType` | string | Yes | Reaction type filter: all, like, love, celebrate, insightful, or funny \(default: all\) |
| `page` | number | Yes | Page number \(starts at 1\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `page` | number | Current page number |
| `totalPage` | number | Total number of pages |
| `count` | number | Number of reactions returned |
| `reactions` | array | Reactions |
| ↳ `reactionType` | string | Type of reaction |
| ↳ `reactor` | object | Person who reacted |
| ↳ `name` | string | Name |
| ↳ `subTitle` | string | Job title |
| ↳ `profileId` | string | Profile ID |
| ↳ `profilePicture` | string | Profile picture URL |
| ↳ `linkedInUrl` | string | LinkedIn URL |
### `enrich_search_post_comments`
Get comments on a LinkedIn post.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `postUrn` | string | Yes | LinkedIn activity URN \(e.g., urn:li:activity:7191163324208705536\) |
| `page` | number | No | Page number \(starts at 1, default: 1\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `page` | number | Current page number |
| `totalPage` | number | Total number of pages |
| `count` | number | Number of comments returned |
| `comments` | array | Comments |
| ↳ `activityId` | string | Comment activity ID |
| ↳ `commentary` | string | Comment text |
| ↳ `linkedInUrl` | string | Link to comment |
| ↳ `commenter` | object | Commenter info |
| ↳ `profileId` | string | Profile ID |
| ↳ `firstName` | string | First name |
| ↳ `lastName` | string | Last name |
| ↳ `subTitle` | string | Subtitle/headline |
| ↳ `profilePicture` | string | Profile picture URL |
| ↳ `backgroundImage` | string | Background image URL |
| ↳ `entityUrn` | string | Entity URN |
| ↳ `objectUrn` | string | Object URN |
| ↳ `profileType` | string | Profile type |
| ↳ `reactionBreakdown` | object | Reactions on the comment |
| ↳ `likes` | number | Number of likes |
| ↳ `empathy` | number | Number of empathy reactions |
| ↳ `other` | number | Number of other reactions |
### `enrich_search_people_activities`
Get a person
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `profileId` | string | Yes | LinkedIn profile ID |
| `activityType` | string | Yes | Activity type: posts, comments, or articles |
| `paginationToken` | string | No | Pagination token for next page of results |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `paginationToken` | string | Token for fetching next page |
| `activityType` | string | Type of activities returned |
| `activities` | array | Activities |
| ↳ `activityId` | string | Activity ID |
| ↳ `commentary` | string | Activity text content |
| ↳ `linkedInUrl` | string | Link to activity |
| ↳ `timeElapsed` | string | Time elapsed since activity |
| ↳ `numReactions` | number | Total number of reactions |
| ↳ `author` | object | Activity author info |
| ↳ `name` | string | Author name |
| ↳ `profileId` | string | Profile ID |
| ↳ `profilePicture` | string | Profile picture URL |
| ↳ `reactionBreakdown` | object | Reactions |
| ↳ `likes` | number | Likes |
| ↳ `empathy` | number | Empathy reactions |
| ↳ `other` | number | Other reactions |
| ↳ `attachments` | array | Attachment URLs |
### `enrich_search_company_activities`
Get a company
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `companyId` | string | Yes | LinkedIn company ID |
| `activityType` | string | Yes | Activity type: posts, comments, or articles |
| `paginationToken` | string | No | Pagination token for next page of results |
| `offset` | number | No | Number of records to skip \(default: 0\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `paginationToken` | string | Token for fetching next page |
| `activityType` | string | Type of activities returned |
| `activities` | array | Activities |
| ↳ `activityId` | string | Activity ID |
| ↳ `commentary` | string | Activity text content |
| ↳ `linkedInUrl` | string | Link to activity |
| ↳ `timeElapsed` | string | Time elapsed since activity |
| ↳ `numReactions` | number | Total number of reactions |
| ↳ `author` | object | Activity author info |
| ↳ `name` | string | Author name |
| ↳ `profileId` | string | Profile ID |
| ↳ `profilePicture` | string | Profile picture URL |
| ↳ `reactionBreakdown` | object | Reactions |
| ↳ `likes` | number | Likes |
| ↳ `empathy` | number | Empathy reactions |
| ↳ `other` | number | Other reactions |
| ↳ `attachments` | array | Attachments |
### `enrich_reverse_hash_lookup`
Convert an MD5 email hash back to the original email address and display name.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `hash` | string | Yes | MD5 hash value to look up |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `hash` | string | MD5 hash that was looked up |
| `email` | string | Original email address |
| `displayName` | string | Display name associated with the email |
| `found` | boolean | Whether an email was found for the hash |
### `enrich_search_logo`
Get a company logo image URL by domain.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `apiKey` | string | Yes | Enrich API key |
| `url` | string | Yes | Company domain \(e.g., google.com\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `logoUrl` | string | URL to fetch the company logo |
| `domain` | string | Domain that was looked up |

View File

@@ -10,6 +10,23 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#181C1E"
/>
{/* MANUAL-CONTENT-START:intro */}
[GitHub](https://github.com/) is the worlds leading platform for hosting, collaborating on, and managing source code. GitHub offers powerful tools for version control, code review, branching strategies, and team collaboration within the rich Git ecosystem, underpinning both open source and enterprise development worldwide.
The GitHub integration in Sim allows your agents to seamlessly automate, interact with, and orchestrate workflows across your repositories. Using this integration, agents can perform an extended set of code and collaboration operations, enabling:
- **Fetch pull request details:** Retrieve a full overview of any pull request, including file diffs, branch information, metadata, approvals, and a summary of changes, for automation or review workflows.
- **Create pull request comments:** Automatically generate or post comments on PRs—such as reviews, suggestions, or status updates—enabling speedy feedback, documentation, or policy enforcement.
- **Get repository information:** Access comprehensive repository metadata, including descriptions, visibility, topics, default branches, and contributors. This supports intelligent project analysis, dynamic workflow routing, and organizational reporting.
- **Fetch the latest commit:** Quickly obtain details from the newest commit on any branch, including hashes, messages, authors, and timestamps. This is useful for monitoring development velocity, triggering downstream actions, or enforcing quality checks.
- **Trigger workflows from GitHub events:** Set up Sim workflows to start automatically from key GitHub events, including pull request creation, review comments, or when new commits are pushed, through easy webhook integration. Automate actions such as deployments, notifications, compliance checks, or documentation updates in real time.
- **Monitor and manage repository activity:** Programmatically track contributions, manage PR review states, analyze branch histories, and audit code changes. Empower agents to enforce requirements, coordinate releases, and respond dynamically to development patterns.
- **Support for advanced automations:** Combine these operations—for example, fetch PR data, leave context-aware comments, and kick off multi-step Sim workflows on code pushes or PR merges—to automate your teams engineering processes from end to end.
By leveraging all of these capabilities, the Sim GitHub integration enables agents to engage deeply in the development lifecycle. Automate code reviews, streamline team feedback, synchronize project artifacts, accelerate CI/CD, and enforce best practices with ease. Bring security, speed, and reliability to your workflows—directly within your Sim-powered automation environment, with full integration into your organizations GitHub strategy.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Github into the workflow. Can get get PR details, create PR comment, get repository info, and get latest commit. Can be used in trigger mode to trigger a workflow when a PR is created, commented on, or a commit is pushed.

View File

@@ -11,55 +11,17 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
/>
{/* MANUAL-CONTENT-START:intro */}
[Google Docs](https://docs.google.com) is a powerful cloud-based document creation and editing service that allows users to create, edit, and collaborate on documents in real-time. As part of Google's productivity suite, Google Docs offers a versatile platform for text documents with robust formatting, commenting, and sharing capabilities.
[Google Docs](https://docs.google.com) is Googles collaborative, cloud-based document service, enabling users to create, edit, and share documents in real time. As an integral part of Google Workspace, Docs offers rich formatting tools, commenting, version history, and seamless integration with other Google productivity tools.
Learn how to integrate the Google Docs "Read" tool in Sim to effortlessly fetch data from your docs and to integrate into your workflows. This tutorial walks you through connecting Google Docs, setting up data reads, and using that information to automate processes in real-time. Perfect for syncing live data with your agents.
Google Docs empowers individuals and teams to:
<iframe
width="100%"
height="400"
src="https://www.youtube.com/embed/f41gy9rBHhE"
title="Use the Google Docs Read tool in Sim"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
- **Create and format documents:** Develop rich text documents with advanced formatting, images, and tables.
- **Collaborate and comment:** Multiple users can edit and comment with suggestions instantly.
- **Track changes and version history:** Review, revert, and manage revisions over time.
- **Access from any device:** Work on documents from web, mobile, or desktop with full cloud synchronization.
- **Integrate across Google services:** Connect Docs with Drive, Sheets, Slides, and external platforms for powerful workflows.
Learn how to integrate the Google Docs "Update" tool in Sim to effortlessly add content in your docs through your workflows. This tutorial walks you through connecting Google Docs, configuring data writes, and using that information to automate document updates seamlessly. Perfect for maintaining dynamic, real-time documentation with minimal effort.
<iframe
width="100%"
height="400"
src="https://www.youtube.com/embed/L64ROHS2ivA"
title="Use the Google Docs Update tool in Sim"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
Learn how to integrate the Google Docs "Create" tool in Sim to effortlessly generate new documents through your workflows. This tutorial walks you through connecting Google Docs, setting up document creation, and using workflow data to populate content automatically. Perfect for streamlining document generation and enhancing productivity.
<iframe
width="100%"
height="400"
src="https://www.youtube.com/embed/lWpHH4qddWk"
title="Use the Google Docs Create tool in Sim"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
With Google Docs, you can:
- **Create and edit documents**: Develop text documents with comprehensive formatting options
- **Collaborate in real-time**: Work simultaneously with multiple users on the same document
- **Track changes**: View revision history and restore previous versions
- **Comment and suggest**: Provide feedback and propose edits without changing the original content
- **Access anywhere**: Use Google Docs across devices with automatic cloud synchronization
- **Work offline**: Continue working without internet connection with changes syncing when back online
- **Integrate with other services**: Connect with Google Drive, Sheets, Slides, and third-party applications
In Sim, the Google Docs integration enables your agents to interact directly with document content programmatically. This allows for powerful automation scenarios such as document creation, content extraction, collaborative editing, and document management. Your agents can read existing documents to extract information, write to documents to update content, and create new documents from scratch. This integration bridges the gap between your AI workflows and document management, enabling seamless interaction with one of the world's most widely used document platforms. By connecting Sim with Google Docs, you can automate document workflows, generate reports, extract insights from documents, and maintain documentation - all through your intelligent agents.
In Sim, the Google Docs integration allows your agents to read document content, write new content, and create documents programmatically as part of automated workflows. This integration unlocks automation such as document generation, report writing, content extraction, and collaborative editing—bridging the gap between AI-driven workflows and document management in your organization.
{/* MANUAL-CONTENT-END */}

View File

@@ -11,30 +11,18 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
/>
{/* MANUAL-CONTENT-START:intro */}
[Google Drive](https://drive.google.com) is Google's cloud storage and file synchronization service that allows users to store files, synchronize files across devices, and share files with others. As a core component of Google's productivity ecosystem, Google Drive offers robust storage, organization, and collaboration capabilities.
[Google Drive](https://drive.google.com) is Googles cloud-based file storage and synchronization service, making it easy to store, manage, share, and access files securely across devices and platforms. As a core element of Google Workspace, Google Drive offers robust tools for file organization, collaboration, and seamless integration with the broader productivity suite.
Learn how to integrate the Google Drive tool in Sim to effortlessly pull information from your Drive through your workflows. This tutorial walks you through connecting Google Drive, setting up data retrieval, and using stored documents and files to enhance automation. Perfect for syncing important data with your agents in real-time.
Google Drive enables individuals and teams to:
<iframe
width="100%"
height="400"
src="https://www.youtube.com/embed/cRoRr4b-EAs"
title="Use the Google Drive tool in Sim"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
- **Store files in the cloud:** Access documents, images, videos, and more from anywhere with internet connectivity.
- **Organize and manage content:** Create and arrange folders, use naming conventions, and leverage search for fast retrieval.
- **Share and collaborate:** Control file and folder permissions, share with individuals or groups, and collaborate in real time.
- **Leverage powerful search:** Quickly locate files using Googles search technology.
- **Access across devices:** Work with your files on desktop, mobile, or web with full synchronization.
- **Integrate deeply across Google services:** Connect with Google Docs, Sheets, Slides, and partner applications in your workflows.
With Google Drive, you can:
- **Store files in the cloud**: Upload and access your files from anywhere with internet access
- **Organize content**: Create folders, use color coding, and implement naming conventions
- **Share and collaborate**: Control access permissions and work simultaneously on files
- **Search efficiently**: Find files quickly with Google's powerful search technology
- **Access across devices**: Use Google Drive on desktop, mobile, and web platforms
- **Integrate with other services**: Connect with Google Docs, Sheets, Slides, and third-party applications
In Sim, the Google Drive integration enables your agents to interact directly with your cloud storage programmatically. This allows for powerful automation scenarios such as file management, content organization, and document workflows. Your agents can upload new files to specific folders, download existing files to process their contents, and list folder contents to navigate your storage structure. This integration bridges the gap between your AI workflows and your document management system, enabling seamless file operations without manual intervention. By connecting Sim with Google Drive, you can automate file-based workflows, manage documents intelligently, and incorporate cloud storage operations into your agent's capabilities.
In Sim, the Google Drive integration allows your agents to read, upload, download, list, and organize your Drive files programmatically. Agents can automate file management, streamline content workflows, and enable no-code automation around document storage and retrieval. By connecting Sim with Google Drive, you empower your agents to incorporate cloud file operations directly into intelligent business processes.
{/* MANUAL-CONTENT-END */}

View File

@@ -11,29 +11,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
/>
{/* MANUAL-CONTENT-START:intro */}
[Google Search](https://www.google.com) is the world's most widely used search engine, providing access to billions of web pages and information sources. Google Search uses sophisticated algorithms to deliver relevant search results based on user queries, making it an essential tool for finding information on the internet.
[Google Search](https://www.google.com) is the world's most widely used web search engine, making it easy to find information, discover new content, and answer questions in real time. With advanced search algorithms, Google Search helps you quickly locate web pages, images, news, and more using simple or complex queries.
Learn how to integrate the Google Search tool in Sim to effortlessly fetch real-time search results through your workflows. This tutorial walks you through connecting Google Search, configuring search queries, and using live data to enhance automation. Perfect for powering your agents with up-to-date information and smarter decision-making.
<iframe
width="100%"
height="400"
src="https://www.youtube.com/embed/1B7hV9b5UMQ"
title="Use the Google Search tool in Sim"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
With Google Search, you can:
- **Find relevant information**: Access billions of web pages with Google's powerful search algorithms
- **Get specific results**: Use search operators to refine and target your queries
- **Discover diverse content**: Find text, images, videos, news, and other content types
- **Access knowledge graphs**: Get structured information about people, places, and things
- **Utilize search features**: Take advantage of specialized search tools like calculators, unit converters, and more
In Sim, the Google Search integration enables your agents to search the web programmatically and incorporate search results into their workflows. This allows for powerful automation scenarios such as research, fact-checking, data gathering, and information synthesis. Your agents can formulate search queries, retrieve relevant results, and extract information from those results to make decisions or generate insights. This integration bridges the gap between your AI workflows and the vast information available on the web, enabling your agents to access up-to-date information from across the internet. By connecting Sim with Google Search, you can create agents that stay informed with the latest information, verify facts, conduct research, and provide users with relevant web content - all without leaving your workflow.
In Sim, the Google Search integration allows your agents to search the web and retrieve live information as part of automated workflows. This enables powerful use cases such as automated research, fact-checking, knowledge synthesis, and dynamic content discovery. By connecting Sim with Google Search, your agents can perform queries, process and analyze web results, and incorporate the latest information into their decisions—without manual effort. Enhance your workflows with always up-to-date knowledge from across the internet.
{/* MANUAL-CONTENT-END */}

View File

@@ -10,6 +10,20 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#F64F9E"
/>
{/* MANUAL-CONTENT-START:intro */}
The Memory tool enables your agents to store, retrieve, and manage conversation memories across workflows. It acts as a persistent memory store that agents can access to maintain conversation context, recall facts, or track actions over time.
With the Memory tool, you can:
- **Add new memories**: Store relevant information, events, or conversation history by saving agent or user messages into a structured memory database
- **Retrieve memories**: Fetch specific memories or all memories tied to a conversation, helping agents recall previous interactions or facts
- **Delete memories**: Remove outdated or incorrect memories from the database to maintain accurate context
- **Append to existing conversations**: Update or expand on existing memory threads by appending new messages with the same conversation identifier
Sims Memory block is especially useful for building agents that require persistent state—helping them remember what was said earlier in a conversation, persist facts between tasks, or apply long-term history in decision-making. By integrating Memory, you enable richer, more contextual, and more dynamic workflows for your agents.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Memory into the workflow. Can add, get a memory, get all memories, and delete memories.

View File

@@ -24,6 +24,7 @@
"dynamodb",
"elasticsearch",
"elevenlabs",
"enrich",
"exa",
"file",
"firecrawl",

View File

@@ -10,6 +10,21 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#181C1E"
/>
{/* MANUAL-CONTENT-START:intro */}
The Notion tool integration enables your agents to read, create, and manage Notion pages and databases directly within your workflows. This allows you to automate the retrieval and updating of structured content, notes, documents, and more from your Notion workspace.
With the Notion tool, you can:
- **Read pages or databases**: Extract rich content or metadata from specified Notion pages or entire databases
- **Create new content**: Programmatically create new pages or databases for dynamic content generation
- **Append content**: Add new blocks or properties to existing pages and databases
- **Query databases**: Run advanced filters and searches on structured Notion data for custom workflows
- **Search your workspace**: Locate pages and databases across your Notion workspace automatically
This tool is ideal for scenarios where agents need to synchronize information, generate reports, or maintain structured notes within Notion. By bringing Notion's capabilities into automated workflows, you empower your agents to interface with knowledge, documentation, and project management data programmatically and seamlessly.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate with Notion into the workflow. Can read page, read database, create page, create database, append content, query database, and search workspace.

View File

@@ -13,16 +13,6 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
{/* MANUAL-CONTENT-START:intro */}
[Slack](https://www.slack.com/) is a business communication platform that offers teams a unified place for messaging, tools, and files.
<iframe
width="100%"
height="400"
src="https://www.youtube.com/embed/J5jz3UaWmE8"
title="Slack Integration with Sim"
frameBorder="0"
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
allowFullScreen
></iframe>
With Slack, you can:
- **Automate agent notifications**: Send real-time updates from your Sim agents to any Slack channel

View File

@@ -1,6 +1,6 @@
import { redirect } from 'next/navigation'
import { getEnv, isTruthy } from '@/lib/core/config/env'
import SSOForm from '@/app/(auth)/sso/sso-form'
import SSOForm from '@/ee/sso/components/sso-form'
export const dynamic = 'force-dynamic'

View File

@@ -8,6 +8,7 @@ import { verifyCronAuth } from '@/lib/auth/internal'
const logger = createLogger('CleanupStaleExecutions')
const STALE_THRESHOLD_MINUTES = 30
const MAX_INT32 = 2_147_483_647
export async function GET(request: NextRequest) {
try {
@@ -45,13 +46,14 @@ export async function GET(request: NextRequest) {
try {
const staleDurationMs = Date.now() - new Date(execution.startedAt).getTime()
const staleDurationMinutes = Math.round(staleDurationMs / 60000)
const totalDurationMs = Math.min(staleDurationMs, MAX_INT32)
await db
.update(workflowExecutionLogs)
.set({
status: 'failed',
endedAt: new Date(),
totalDurationMs: staleDurationMs,
totalDurationMs,
executionData: sql`jsonb_set(
COALESCE(execution_data, '{}'::jsonb),
ARRAY['error'],

View File

@@ -284,7 +284,7 @@ async function handleToolsCall(
content: [
{ type: 'text', text: JSON.stringify(executeResult.output || executeResult, null, 2) },
],
isError: !executeResult.success,
isError: executeResult.success === false,
}
return NextResponse.json(createResponse(id, result))

View File

@@ -20,6 +20,7 @@ import { z } from 'zod'
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
import { getSession } from '@/lib/auth'
import { hasAccessControlAccess } from '@/lib/billing'
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
import { requireStripeClient } from '@/lib/billing/stripe-client'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer'
@@ -501,6 +502,18 @@ export async function PUT(
}
}
if (status === 'accepted') {
try {
await syncUsageLimitsFromSubscription(session.user.id)
} catch (syncError) {
logger.error('Failed to sync usage limits after joining org', {
userId: session.user.id,
organizationId,
error: syncError,
})
}
}
logger.info(`Organization invitation ${status}`, {
organizationId,
invitationId,

View File

@@ -29,7 +29,7 @@ import { hasWorkspaceAdminAccess } from '@/lib/workspaces/permissions/utils'
import {
InvitationsNotAllowedError,
validateInvitationsAllowed,
} from '@/executor/utils/permission-check'
} from '@/ee/access-control/utils/permission-check'
const logger = createLogger('OrganizationInvitations')

View File

@@ -5,6 +5,7 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { hasActiveSubscription } from '@/lib/billing'
const logger = createLogger('SubscriptionTransferAPI')
@@ -88,6 +89,14 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
)
}
// Check if org already has an active subscription (prevent duplicates)
if (await hasActiveSubscription(organizationId)) {
return NextResponse.json(
{ error: 'Organization already has an active subscription' },
{ status: 409 }
)
}
await db
.update(subscription)
.set({ referenceId: organizationId })

View File

@@ -203,6 +203,10 @@ export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) =
}
updateData.billingBlocked = body.billingBlocked
// Clear the reason when unblocking
if (body.billingBlocked === false) {
updateData.billingBlockedReason = null
}
updated.push('billingBlocked')
}

View File

@@ -1,6 +1,4 @@
import { db, workflow as workflowTable } from '@sim/db'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod'
@@ -8,6 +6,7 @@ import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { markExecutionCancelled } from '@/lib/execution/cancellation'
import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
@@ -75,12 +74,31 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
const { startBlockId, sourceSnapshot, input } = validation.data
const executionId = uuidv4()
const [workflowRecord] = await db
.select({ workspaceId: workflowTable.workspaceId, userId: workflowTable.userId })
.from(workflowTable)
.where(eq(workflowTable.id, workflowId))
.limit(1)
// Run preprocessing checks (billing, rate limits, usage limits)
const preprocessResult = await preprocessExecution({
workflowId,
userId,
triggerType: 'manual',
executionId,
requestId,
checkRateLimit: false, // Manual executions don't rate limit
checkDeployment: false, // Run-from-block doesn't require deployment
})
if (!preprocessResult.success) {
const { error } = preprocessResult
logger.warn(`[${requestId}] Preprocessing failed for run-from-block`, {
workflowId,
error: error?.message,
statusCode: error?.statusCode,
})
return NextResponse.json(
{ error: error?.message || 'Execution blocked' },
{ status: error?.statusCode || 500 }
)
}
const workflowRecord = preprocessResult.workflowRecord
if (!workflowRecord?.workspaceId) {
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
}
@@ -92,6 +110,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
workflowId,
startBlockId,
executedBlocksCount: sourceSnapshot.executedBlocks.length,
billingActorUserId: preprocessResult.actorUserId,
})
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)

View File

@@ -102,7 +102,7 @@ describe('Workspace Invitations API Route', () => {
inArray: vi.fn().mockImplementation((field, values) => ({ type: 'inArray', field, values })),
}))
vi.doMock('@/executor/utils/permission-check', () => ({
vi.doMock('@/ee/access-control/utils/permission-check', () => ({
validateInvitationsAllowed: vi.fn().mockResolvedValue(undefined),
InvitationsNotAllowedError: class InvitationsNotAllowedError extends Error {
constructor() {

View File

@@ -21,7 +21,7 @@ import { getFromEmailAddress } from '@/lib/messaging/email/utils'
import {
InvitationsNotAllowedError,
validateInvitationsAllowed,
} from '@/executor/utils/permission-check'
} from '@/ee/access-control/utils/permission-check'
export const dynamic = 'force-dynamic'
@@ -38,7 +38,6 @@ export async function GET(req: NextRequest) {
}
try {
// Get all workspaces where the user has permissions
const userWorkspaces = await db
.select({ id: workspace.id })
.from(workspace)
@@ -55,10 +54,8 @@ export async function GET(req: NextRequest) {
return NextResponse.json({ invitations: [] })
}
// Get all workspaceIds where the user is a member
const workspaceIds = userWorkspaces.map((w) => w.id)
// Find all invitations for those workspaces
const invitations = await db
.select()
.from(workspaceInvitation)

View File

@@ -14,11 +14,11 @@ import {
ChatMessageContainer,
EmailAuth,
PasswordAuth,
SSOAuth,
VoiceInterface,
} from '@/app/chat/components'
import { CHAT_ERROR_MESSAGES, CHAT_REQUEST_TIMEOUT_MS } from '@/app/chat/constants'
import { useAudioStreaming, useChatStreaming } from '@/app/chat/hooks'
import SSOAuth from '@/ee/sso/components/sso-auth'
const logger = createLogger('ChatClient')

View File

@@ -1,6 +1,5 @@
export { default as EmailAuth } from './auth/email/email-auth'
export { default as PasswordAuth } from './auth/password/password-auth'
export { default as SSOAuth } from './auth/sso/sso-auth'
export { ChatErrorState } from './error-state/error-state'
export { ChatHeader } from './header/header'
export { ChatInput } from './input/input'

View File

@@ -1,7 +1,7 @@
import { redirect } from 'next/navigation'
import { getSession } from '@/lib/auth'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { Knowledge } from './knowledge'
interface KnowledgePageProps {
@@ -23,7 +23,6 @@ export default async function KnowledgePage({ params }: KnowledgePageProps) {
redirect('/')
}
// Check permission group restrictions
const permissionConfig = await getUserPermissionConfig(session.user.id)
if (permissionConfig?.hideKnowledgeBaseTab) {
redirect(`/workspace/${workspaceId}`)

View File

@@ -18,6 +18,7 @@ import {
import { ScrollArea } from '@/components/ui/scroll-area'
import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
import { cn } from '@/lib/core/utils/cn'
import { formatDuration } from '@/lib/core/utils/formatting'
import { filterHiddenOutputKeys } from '@/lib/logs/execution/trace-spans/trace-spans'
import {
ExecutionSnapshot,
@@ -453,7 +454,7 @@ export const LogDetails = memo(function LogDetails({
Duration
</span>
<span className='font-medium text-[13px] text-[var(--text-secondary)]'>
{log.duration || '—'}
{formatDuration(log.duration, { precision: 2 }) || '—'}
</span>
</div>

View File

@@ -6,11 +6,11 @@ import Link from 'next/link'
import { List, type RowComponentProps, useListRef } from 'react-window'
import { Badge, buttonVariants } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { formatDuration } from '@/lib/core/utils/formatting'
import {
DELETED_WORKFLOW_COLOR,
DELETED_WORKFLOW_LABEL,
formatDate,
formatDuration,
getDisplayStatus,
LOG_COLUMNS,
StatusBadge,
@@ -113,7 +113,7 @@ const LogRow = memo(
<div className={`${LOG_COLUMNS.duration.width} ${LOG_COLUMNS.duration.minWidth}`}>
<Badge variant='default' className='rounded-[6px] px-[9px] py-[2px] text-[12px]'>
{formatDuration(log.duration) || '—'}
{formatDuration(log.duration, { precision: 2 }) || '—'}
</Badge>
</div>
</div>

View File

@@ -1,6 +1,7 @@
import React from 'react'
import { format } from 'date-fns'
import { Badge } from '@/components/emcn'
import { formatDuration } from '@/lib/core/utils/formatting'
import { getIntegrationMetadata } from '@/lib/logs/get-trigger-options'
import { getBlock } from '@/blocks/registry'
import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types'
@@ -362,47 +363,14 @@ export function mapToExecutionLogAlt(log: RawLogResponse): ExecutionLog {
}
}
/**
* Format duration for display in logs UI
* If duration is under 1 second, displays as milliseconds (e.g., "500ms")
* If duration is 1 second or more, displays as seconds (e.g., "1.23s")
* @param duration - Duration string (e.g., "500ms") or null
* @returns Formatted duration string or null
*/
export function formatDuration(duration: string | null): string | null {
if (!duration) return null
// Extract numeric value from duration string (e.g., "500ms" -> 500)
const ms = Number.parseInt(duration.replace(/[^0-9]/g, ''), 10)
if (!Number.isFinite(ms)) return duration
if (ms < 1000) {
return `${ms}ms`
}
// Convert to seconds with up to 2 decimal places
const seconds = ms / 1000
return `${seconds.toFixed(2).replace(/\.?0+$/, '')}s`
}
/**
* Format latency value for display in dashboard UI
* If latency is under 1 second, displays as milliseconds (e.g., "500ms")
* If latency is 1 second or more, displays as seconds (e.g., "1.23s")
* @param ms - Latency in milliseconds (number)
* @returns Formatted latency string
*/
export function formatLatency(ms: number): string {
if (!Number.isFinite(ms) || ms <= 0) return '—'
if (ms < 1000) {
return `${Math.round(ms)}ms`
}
// Convert to seconds with up to 2 decimal places
const seconds = ms / 1000
return `${seconds.toFixed(2).replace(/\.?0+$/, '')}s`
return formatDuration(ms, { precision: 2 }) ?? '—'
}
export const formatDate = (dateString: string) => {

View File

@@ -1,10 +1,11 @@
'use client'
import type React from 'react'
import { createContext, useCallback, useContext, useEffect, useMemo, useState } from 'react'
import { createContext, useCallback, useContext, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { useParams } from 'next/navigation'
import { useSocket } from '@/app/workspace/providers/socket-provider'
import {
useWorkspacePermissionsQuery,
type WorkspacePermissions,
@@ -57,14 +58,42 @@ export function WorkspacePermissionsProvider({ children }: WorkspacePermissionsP
const [hasShownOfflineNotification, setHasShownOfflineNotification] = useState(false)
const hasOperationError = useOperationQueueStore((state) => state.hasOperationError)
const addNotification = useNotificationStore((state) => state.addNotification)
const removeNotification = useNotificationStore((state) => state.removeNotification)
const { isReconnecting } = useSocket()
const reconnectingNotificationIdRef = useRef<string | null>(null)
const isOfflineMode = hasOperationError
useEffect(() => {
if (isReconnecting && !reconnectingNotificationIdRef.current && !isOfflineMode) {
const id = addNotification({
level: 'error',
message: 'Reconnecting...',
})
reconnectingNotificationIdRef.current = id
} else if (!isReconnecting && reconnectingNotificationIdRef.current) {
removeNotification(reconnectingNotificationIdRef.current)
reconnectingNotificationIdRef.current = null
}
return () => {
if (reconnectingNotificationIdRef.current) {
removeNotification(reconnectingNotificationIdRef.current)
reconnectingNotificationIdRef.current = null
}
}
}, [isReconnecting, isOfflineMode, addNotification, removeNotification])
useEffect(() => {
if (!isOfflineMode || hasShownOfflineNotification) {
return
}
if (reconnectingNotificationIdRef.current) {
removeNotification(reconnectingNotificationIdRef.current)
reconnectingNotificationIdRef.current = null
}
try {
addNotification({
level: 'error',
@@ -78,7 +107,7 @@ export function WorkspacePermissionsProvider({ children }: WorkspacePermissionsP
} catch (error) {
logger.error('Failed to add offline notification', { error })
}
}, [addNotification, hasShownOfflineNotification, isOfflineMode])
}, [addNotification, removeNotification, hasShownOfflineNotification, isOfflineMode])
const {
data: workspacePermissions,

View File

@@ -6,7 +6,7 @@ import { getSession } from '@/lib/auth'
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
import type { Template as WorkspaceTemplate } from '@/app/workspace/[workspaceId]/templates/templates'
import Templates from '@/app/workspace/[workspaceId]/templates/templates'
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
interface TemplatesPageProps {
params: Promise<{

View File

@@ -3,6 +3,7 @@
import { memo, useEffect, useMemo, useRef, useState } from 'react'
import clsx from 'clsx'
import { ChevronUp } from 'lucide-react'
import { formatDuration } from '@/lib/core/utils/formatting'
import { CopilotMarkdownRenderer } from '../markdown-renderer'
/** Removes thinking tags (raw or escaped) and special tags from streamed content */
@@ -241,15 +242,11 @@ export function ThinkingBlock({
return () => window.clearInterval(intervalId)
}, [isStreaming, isExpanded, userHasScrolledAway])
/** Formats duration in milliseconds to seconds (minimum 1s) */
const formatDuration = (ms: number) => {
const seconds = Math.max(1, Math.round(ms / 1000))
return `${seconds}s`
}
const hasContent = cleanContent.length > 0
const isThinkingDone = !isStreaming || hasFollowingContent || hasSpecialTags
const durationText = `${label} for ${formatDuration(duration)}`
// Round to nearest second (minimum 1s) to match original behavior
const roundedMs = Math.max(1000, Math.round(duration / 1000) * 1000)
const durationText = `${label} for ${formatDuration(roundedMs)}`
const getStreamingLabel = (lbl: string) => {
if (lbl === 'Thought') return 'Thinking'

View File

@@ -15,6 +15,7 @@ import {
hasInterrupt as hasInterruptFromConfig,
isSpecialTool as isSpecialToolFromConfig,
} from '@/lib/copilot/tools/client/ui-config'
import { formatDuration } from '@/lib/core/utils/formatting'
import { CopilotMarkdownRenderer } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/markdown-renderer'
import { SmoothStreamingText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/smooth-streaming'
import { ThinkingBlock } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/copilot/components/copilot-message/components/thinking-block'
@@ -848,13 +849,10 @@ const SubagentContentRenderer = memo(function SubagentContentRenderer({
(allParsed.options && Object.keys(allParsed.options).length > 0)
)
const formatDuration = (ms: number) => {
const seconds = Math.max(1, Math.round(ms / 1000))
return `${seconds}s`
}
const outerLabel = getSubagentCompletionLabel(toolCall.name)
const durationText = `${outerLabel} for ${formatDuration(duration)}`
// Round to nearest second (minimum 1s) to match original behavior
const roundedMs = Math.max(1000, Math.round(duration / 1000) * 1000)
const durationText = `${outerLabel} for ${formatDuration(roundedMs)}`
const renderCollapsibleContent = () => (
<>

View File

@@ -0,0 +1,443 @@
/**
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
interface StoredTool {
type: string
title?: string
toolId?: string
params?: Record<string, string>
customToolId?: string
schema?: any
code?: string
operation?: string
usageControl?: 'auto' | 'force' | 'none'
}
const isMcpToolAlreadySelected = (selectedTools: StoredTool[], mcpToolId: string): boolean => {
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
}
const isCustomToolAlreadySelected = (
selectedTools: StoredTool[],
customToolId: string
): boolean => {
return selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
)
}
const isWorkflowAlreadySelected = (selectedTools: StoredTool[], workflowId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
)
}
describe('isMcpToolAlreadySelected', () => {
describe('basic functionality', () => {
it.concurrent('returns false when selectedTools is empty', () => {
expect(isMcpToolAlreadySelected([], 'mcp-tool-123')).toBe(false)
})
it.concurrent('returns false when MCP tool is not in selectedTools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'different-mcp-tool', title: 'Different Tool' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(false)
})
it.concurrent('returns true when MCP tool is already selected', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-123', title: 'My MCP Tool' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(true)
})
it.concurrent('returns true when MCP tool is one of many selected tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'mcp', toolId: 'mcp-tool-123', title: 'My MCP Tool' },
{ type: 'workflow_input', toolId: 'workflow_executor' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(true)
})
})
describe('type discrimination', () => {
it.concurrent('does not match non-MCP tools with same toolId', () => {
const selectedTools: StoredTool[] = [{ type: 'http_request', toolId: 'mcp-tool-123' }]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(false)
})
it.concurrent('does not match custom tools even with toolId set', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', toolId: 'custom-mcp-tool-123', customToolId: 'db-id' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(false)
})
})
describe('multiple MCP tools', () => {
it.concurrent('correctly identifies first of multiple MCP tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
{ type: 'mcp', toolId: 'mcp-tool-3', title: 'Tool 3' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-1')).toBe(true)
})
it.concurrent('correctly identifies middle MCP tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
{ type: 'mcp', toolId: 'mcp-tool-3', title: 'Tool 3' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-2')).toBe(true)
})
it.concurrent('correctly identifies last MCP tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
{ type: 'mcp', toolId: 'mcp-tool-3', title: 'Tool 3' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-3')).toBe(true)
})
it.concurrent('returns false for non-existent MCP tool among many', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-999')).toBe(false)
})
})
})
describe('isCustomToolAlreadySelected', () => {
describe('basic functionality', () => {
it.concurrent('returns false when selectedTools is empty', () => {
expect(isCustomToolAlreadySelected([], 'custom-tool-123')).toBe(false)
})
it.concurrent('returns false when custom tool is not in selectedTools', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'different-custom-tool' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
it.concurrent('returns true when custom tool is already selected', () => {
const selectedTools: StoredTool[] = [{ type: 'custom-tool', customToolId: 'custom-tool-123' }]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(true)
})
it.concurrent('returns true when custom tool is one of many selected tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-1', title: 'MCP Tool' },
{ type: 'custom-tool', customToolId: 'custom-tool-123' },
{ type: 'http_request', toolId: 'http_request_tool' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(true)
})
})
describe('type discrimination', () => {
it.concurrent('does not match non-custom tools with similar IDs', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'custom-tool-123', title: 'MCP with similar ID' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
it.concurrent('does not match MCP tools even if customToolId happens to match', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-id', customToolId: 'custom-tool-123' } as StoredTool,
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
})
describe('legacy inline custom tools', () => {
it.concurrent('does not match legacy inline tools without customToolId', () => {
const selectedTools: StoredTool[] = [
{
type: 'custom-tool',
title: 'Legacy Tool',
toolId: 'custom-myFunction',
schema: { function: { name: 'myFunction' } },
code: 'return true',
},
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
it.concurrent('does not false-positive on legacy tools when checking for database tool', () => {
const selectedTools: StoredTool[] = [
{
type: 'custom-tool',
title: 'Legacy Tool',
schema: { function: { name: 'sameName' } },
code: 'return true',
},
]
expect(isCustomToolAlreadySelected(selectedTools, 'db-tool-1')).toBe(false)
})
})
describe('multiple custom tools', () => {
it.concurrent('correctly identifies first of multiple custom tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
{ type: 'custom-tool', customToolId: 'custom-3' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-1')).toBe(true)
})
it.concurrent('correctly identifies middle custom tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
{ type: 'custom-tool', customToolId: 'custom-3' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-2')).toBe(true)
})
it.concurrent('correctly identifies last custom tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
{ type: 'custom-tool', customToolId: 'custom-3' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-3')).toBe(true)
})
it.concurrent('returns false for non-existent custom tool among many', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-999')).toBe(false)
})
})
describe('mixed tool types', () => {
it.concurrent('correctly identifies custom tool in mixed list', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'MCP Tool' },
{ type: 'custom-tool', customToolId: 'custom-tool-123' },
{ type: 'http_request', toolId: 'http_request' },
{ type: 'workflow_input', toolId: 'workflow_executor' },
{ type: 'custom-tool', title: 'Legacy', schema: {}, code: '' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(true)
})
it.concurrent('does not confuse MCP toolId with custom customToolId', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'shared-id-123', title: 'MCP Tool' },
{ type: 'custom-tool', customToolId: 'different-id' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'shared-id-123')).toBe(false)
})
})
})
describe('isWorkflowAlreadySelected', () => {
describe('basic functionality', () => {
it.concurrent('returns false when selectedTools is empty', () => {
expect(isWorkflowAlreadySelected([], 'workflow-123')).toBe(false)
})
it.concurrent('returns false when workflow is not in selectedTools', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'different-workflow' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
it.concurrent('returns true when workflow is already selected', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-123' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(true)
})
it.concurrent('returns true when workflow is one of many selected tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-1', title: 'MCP Tool' },
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-123' },
},
{ type: 'custom-tool', customToolId: 'custom-1' },
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(true)
})
})
describe('type discrimination', () => {
it.concurrent('does not match non-workflow_input tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'workflow-123', params: { workflowId: 'workflow-123' } },
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
it.concurrent('does not match workflow_input without params', () => {
const selectedTools: StoredTool[] = [{ type: 'workflow_input', toolId: 'workflow_executor' }]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
it.concurrent('does not match workflow_input with different workflowId in params', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'other-workflow' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
})
describe('multiple workflows', () => {
it.concurrent('allows different workflows to be selected', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-a' },
},
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-b' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-a')).toBe(true)
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-b')).toBe(true)
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-c')).toBe(false)
})
it.concurrent('correctly identifies specific workflow among many', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-1' },
},
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-2' },
},
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-3' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-2')).toBe(true)
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-999')).toBe(false)
})
})
})
describe('duplicate prevention integration scenarios', () => {
describe('add then try to re-add', () => {
it.concurrent('prevents re-adding the same MCP tool', () => {
const selectedTools: StoredTool[] = [
{
type: 'mcp',
toolId: 'planetscale-query',
title: 'PlanetScale Query',
params: { serverId: 'server-1' },
},
]
expect(isMcpToolAlreadySelected(selectedTools, 'planetscale-query')).toBe(true)
})
it.concurrent('prevents re-adding the same custom tool', () => {
const selectedTools: StoredTool[] = [
{
type: 'custom-tool',
customToolId: 'my-custom-tool-uuid',
usageControl: 'auto',
},
]
expect(isCustomToolAlreadySelected(selectedTools, 'my-custom-tool-uuid')).toBe(true)
})
it.concurrent('prevents re-adding the same workflow', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'my-workflow-uuid' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'my-workflow-uuid')).toBe(true)
})
})
describe('remove then re-add', () => {
it.concurrent('allows re-adding MCP tool after removal', () => {
const selectedToolsAfterRemoval: StoredTool[] = []
expect(isMcpToolAlreadySelected(selectedToolsAfterRemoval, 'planetscale-query')).toBe(false)
})
it.concurrent('allows re-adding custom tool after removal', () => {
const selectedToolsAfterRemoval: StoredTool[] = [
{ type: 'mcp', toolId: 'some-other-tool', title: 'Other' },
]
expect(isCustomToolAlreadySelected(selectedToolsAfterRemoval, 'my-custom-tool-uuid')).toBe(
false
)
})
it.concurrent('allows re-adding workflow after removal', () => {
const selectedToolsAfterRemoval: StoredTool[] = [
{ type: 'mcp', toolId: 'some-tool', title: 'Other' },
]
expect(isWorkflowAlreadySelected(selectedToolsAfterRemoval, 'my-workflow-uuid')).toBe(false)
})
})
describe('different tools with similar names', () => {
it.concurrent('allows adding different MCP tools from same server', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'server1-tool-a', title: 'Tool A', params: { serverId: 'server1' } },
]
expect(isMcpToolAlreadySelected(selectedTools, 'server1-tool-b')).toBe(false)
})
it.concurrent('allows adding different custom tools', () => {
const selectedTools: StoredTool[] = [{ type: 'custom-tool', customToolId: 'custom-a' }]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-b')).toBe(false)
})
it.concurrent('allows adding different workflows', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-a' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-b')).toBe(false)
})
})
})

View File

@@ -1226,6 +1226,40 @@ export const ToolInput = memo(function ToolInput({
return selectedTools.some((tool) => tool.toolId === toolId)
}
/**
* Checks if an MCP tool is already selected.
*
* @param mcpToolId - The MCP tool identifier to check
* @returns `true` if the MCP tool is already selected
*/
const isMcpToolAlreadySelected = (mcpToolId: string): boolean => {
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
}
/**
* Checks if a custom tool is already selected.
*
* @param customToolId - The custom tool identifier to check
* @returns `true` if the custom tool is already selected
*/
const isCustomToolAlreadySelected = (customToolId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
)
}
/**
* Checks if a workflow is already selected.
*
* @param workflowId - The workflow identifier to check
* @returns `true` if the workflow is already selected
*/
const isWorkflowAlreadySelected = (workflowId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
)
}
/**
* Checks if a block supports multiple operations.
*
@@ -1745,24 +1779,29 @@ export const ToolInput = memo(function ToolInput({
if (!permissionConfig.disableCustomTools && customTools.length > 0) {
groups.push({
section: 'Custom Tools',
items: customTools.map((customTool) => ({
label: customTool.title,
value: `custom-${customTool.id}`,
iconElement: createToolIcon('#3B82F6', WrenchIcon),
onSelect: () => {
const newTool: StoredTool = {
type: 'custom-tool',
customToolId: customTool.id,
usageControl: 'auto',
isExpanded: true,
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
})),
items: customTools.map((customTool) => {
const alreadySelected = isCustomToolAlreadySelected(customTool.id)
return {
label: customTool.title,
value: `custom-${customTool.id}`,
iconElement: createToolIcon('#3B82F6', WrenchIcon),
disabled: isPreview || alreadySelected,
onSelect: () => {
if (alreadySelected) return
const newTool: StoredTool = {
type: 'custom-tool',
customToolId: customTool.id,
usageControl: 'auto',
isExpanded: true,
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
}
}),
})
}
@@ -1772,11 +1811,13 @@ export const ToolInput = memo(function ToolInput({
section: 'MCP Tools',
items: availableMcpTools.map((mcpTool) => {
const server = mcpServers.find((s) => s.id === mcpTool.serverId)
const alreadySelected = isMcpToolAlreadySelected(mcpTool.id)
return {
label: mcpTool.name,
value: `mcp-${mcpTool.id}`,
iconElement: createToolIcon(mcpTool.bgColor || '#6366F1', mcpTool.icon || McpIcon),
onSelect: () => {
if (alreadySelected) return
const newTool: StoredTool = {
type: 'mcp',
title: mcpTool.name,
@@ -1796,7 +1837,7 @@ export const ToolInput = memo(function ToolInput({
}
handleMcpToolSelect(newTool, true)
},
disabled: isPreview || disabled,
disabled: isPreview || disabled || alreadySelected,
}
}),
})
@@ -1810,12 +1851,17 @@ export const ToolInput = memo(function ToolInput({
if (builtInTools.length > 0) {
groups.push({
section: 'Built-in Tools',
items: builtInTools.map((block) => ({
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
onSelect: () => handleSelectTool(block),
})),
items: builtInTools.map((block) => {
const toolId = getToolIdForOperation(block.type, undefined)
const alreadySelected = toolId ? isToolAlreadySelected(toolId, block.type) : false
return {
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
disabled: isPreview || alreadySelected,
onSelect: () => handleSelectTool(block),
}
}),
})
}
@@ -1823,12 +1869,17 @@ export const ToolInput = memo(function ToolInput({
if (integrations.length > 0) {
groups.push({
section: 'Integrations',
items: integrations.map((block) => ({
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
onSelect: () => handleSelectTool(block),
})),
items: integrations.map((block) => {
const toolId = getToolIdForOperation(block.type, undefined)
const alreadySelected = toolId ? isToolAlreadySelected(toolId, block.type) : false
return {
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
disabled: isPreview || alreadySelected,
onSelect: () => handleSelectTool(block),
}
}),
})
}
@@ -1836,29 +1887,33 @@ export const ToolInput = memo(function ToolInput({
if (availableWorkflows.length > 0) {
groups.push({
section: 'Workflows',
items: availableWorkflows.map((workflow) => ({
label: workflow.name,
value: `workflow-${workflow.id}`,
iconElement: createToolIcon('#6366F1', WorkflowIcon),
onSelect: () => {
const newTool: StoredTool = {
type: 'workflow_input',
title: 'Workflow',
toolId: 'workflow_executor',
params: {
workflowId: workflow.id,
},
isExpanded: true,
usageControl: 'auto',
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
disabled: isPreview || disabled,
})),
items: availableWorkflows.map((workflow) => {
const alreadySelected = isWorkflowAlreadySelected(workflow.id)
return {
label: workflow.name,
value: `workflow-${workflow.id}`,
iconElement: createToolIcon('#6366F1', WorkflowIcon),
onSelect: () => {
if (alreadySelected) return
const newTool: StoredTool = {
type: 'workflow_input',
title: 'Workflow',
toolId: 'workflow_executor',
params: {
workflowId: workflow.id,
},
isExpanded: true,
usageControl: 'auto',
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
disabled: isPreview || disabled || alreadySelected,
}
}),
})
}
@@ -1877,6 +1932,11 @@ export const ToolInput = memo(function ToolInput({
permissionConfig.disableCustomTools,
permissionConfig.disableMcpTools,
availableWorkflows,
getToolIdForOperation,
isToolAlreadySelected,
isMcpToolAlreadySelected,
isCustomToolAlreadySelected,
isWorkflowAlreadySelected,
])
const toolRequiresOAuth = (toolId: string): boolean => {

View File

@@ -50,6 +50,12 @@ import { useSubBlockStore } from '@/stores/workflows/subblock/store'
/** Stable empty object to avoid creating new references */
const EMPTY_SUBBLOCK_VALUES = {} as Record<string, any>
/** Shared style for dashed divider lines */
const DASHED_DIVIDER_STYLE = {
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
} as const
/**
* Icon component for rendering block icons.
*
@@ -89,31 +95,23 @@ export function Editor() {
const blockConfig = currentBlock ? getBlock(currentBlock.type) : null
const title = currentBlock?.name || 'Editor'
// Check if selected block is a subflow (loop or parallel)
const isSubflow =
currentBlock && (currentBlock.type === 'loop' || currentBlock.type === 'parallel')
// Get subflow display properties from configs
const subflowConfig = isSubflow ? (currentBlock.type === 'loop' ? LoopTool : ParallelTool) : null
// Check if selected block is a workflow block
const isWorkflowBlock =
currentBlock && (currentBlock.type === 'workflow' || currentBlock.type === 'workflow_input')
// Get workspace ID from params
const params = useParams()
const workspaceId = params.workspaceId as string
// Refs for resize functionality
const subBlocksRef = useRef<HTMLDivElement>(null)
// Get user permissions
const userPermissions = useUserPermissionsContext()
// Get active workflow ID
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
// Get block properties (advanced/trigger modes)
const { advancedMode, triggerMode } = useEditorBlockProperties(
currentBlockId,
currentWorkflow.isSnapshotView
@@ -145,10 +143,9 @@ export function Editor() {
[subBlocksForCanonical]
)
const canonicalModeOverrides = currentBlock?.data?.canonicalModes
const advancedValuesPresent = hasAdvancedValues(
subBlocksForCanonical,
blockSubBlockValues,
canonicalIndex
const advancedValuesPresent = useMemo(
() => hasAdvancedValues(subBlocksForCanonical, blockSubBlockValues, canonicalIndex),
[subBlocksForCanonical, blockSubBlockValues, canonicalIndex]
)
const displayAdvancedOptions = userPermissions.canEdit
? advancedMode
@@ -156,11 +153,9 @@ export function Editor() {
const hasAdvancedOnlyFields = useMemo(() => {
for (const subBlock of subBlocksForCanonical) {
// Must be standalone advanced (mode: 'advanced' without canonicalParamId)
if (subBlock.mode !== 'advanced') continue
if (canonicalIndex.canonicalIdBySubBlockId[subBlock.id]) continue
// Check condition - skip if condition not met for current values
if (
subBlock.condition &&
!evaluateSubBlockCondition(subBlock.condition, blockSubBlockValues)
@@ -173,7 +168,6 @@ export function Editor() {
return false
}, [subBlocksForCanonical, canonicalIndex.canonicalIdBySubBlockId, blockSubBlockValues])
// Get subblock layout using custom hook
const { subBlocks, stateToUse: subBlockState } = useEditorSubblockLayout(
blockConfig || ({} as any),
currentBlockId || '',
@@ -206,31 +200,34 @@ export function Editor() {
return { regularSubBlocks: regular, advancedOnlySubBlocks: advancedOnly }
}, [subBlocks, canonicalIndex.canonicalIdBySubBlockId])
// Get block connections
const { incomingConnections, hasIncomingConnections } = useBlockConnections(currentBlockId || '')
// Connections resize hook
const { handleMouseDown: handleConnectionsResizeMouseDown, isResizing } = useConnectionsResize({
subBlocksRef,
})
// Collaborative actions
const {
collaborativeSetBlockCanonicalMode,
collaborativeUpdateBlockName,
collaborativeToggleBlockAdvancedMode,
} = useCollaborativeWorkflow()
// Advanced mode toggle handler
const handleToggleAdvancedMode = useCallback(() => {
if (!currentBlockId || !userPermissions.canEdit) return
collaborativeToggleBlockAdvancedMode(currentBlockId)
}, [currentBlockId, userPermissions.canEdit, collaborativeToggleBlockAdvancedMode])
// Rename state
const [isRenaming, setIsRenaming] = useState(false)
const [editedName, setEditedName] = useState('')
const nameInputRef = useRef<HTMLInputElement>(null)
/**
* Ref callback that auto-selects the input text when mounted.
*/
const nameInputRefCallback = useCallback((element: HTMLInputElement | null) => {
if (element) {
element.select()
}
}, [])
/**
* Handles starting the rename process.
@@ -251,7 +248,6 @@ export function Editor() {
if (trimmedName && trimmedName !== currentBlock?.name) {
const result = collaborativeUpdateBlockName(currentBlockId, trimmedName)
if (!result.success) {
// Keep rename mode open on error so user can correct the name
return
}
}
@@ -266,14 +262,6 @@ export function Editor() {
setEditedName('')
}, [])
// Focus input when entering rename mode
useEffect(() => {
if (isRenaming && nameInputRef.current) {
nameInputRef.current.select()
}
}, [isRenaming])
// Trigger rename mode when signaled from context menu
useEffect(() => {
if (shouldFocusRename && currentBlock) {
handleStartRename()
@@ -284,17 +272,13 @@ export function Editor() {
/**
* Handles opening documentation link in a new secure tab.
*/
const handleOpenDocs = () => {
const handleOpenDocs = useCallback(() => {
const docsLink = isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink
if (docsLink) {
window.open(docsLink, '_blank', 'noopener,noreferrer')
}
}
window.open(docsLink || 'https://docs.sim.ai/quick-reference', '_blank', 'noopener,noreferrer')
}, [isSubflow, subflowConfig?.docsLink, blockConfig?.docsLink])
// Get child workflow ID for workflow blocks
const childWorkflowId = isWorkflowBlock ? blockSubBlockValues?.workflowId : null
// Fetch child workflow state for preview (only for workflow blocks with a selected workflow)
const { data: childWorkflowState, isLoading: isLoadingChildWorkflow } =
useWorkflowState(childWorkflowId)
@@ -307,7 +291,6 @@ export function Editor() {
}
}, [childWorkflowId, workspaceId])
// Determine if connections are at minimum height (collapsed state)
const isConnectionsAtMinHeight = connectionsHeight <= 35
return (
@@ -328,7 +311,7 @@ export function Editor() {
)}
{isRenaming ? (
<input
ref={nameInputRef}
ref={nameInputRefCallback}
type='text'
value={editedName}
onChange={(e) => setEditedName(e.target.value)}
@@ -399,23 +382,21 @@ export function Editor() {
</Tooltip.Content>
</Tooltip.Root>
)} */}
{currentBlock && (isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink) && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
className='p-0'
onClick={handleOpenDocs}
aria-label='Open documentation'
>
<BookOpen className='h-[14px] w-[14px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<p>Open docs</p>
</Tooltip.Content>
</Tooltip.Root>
)}
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
className='p-0'
onClick={handleOpenDocs}
aria-label='Open documentation'
>
<BookOpen className='h-[14px] w-[14px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<p>Open docs</p>
</Tooltip.Content>
</Tooltip.Root>
</div>
</div>
@@ -495,13 +476,7 @@ export function Editor() {
</div>
</div>
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
<div
className='h-[1.25px]'
style={{
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
}}
/>
<div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
</div>
</>
)}
@@ -566,13 +541,7 @@ export function Editor() {
/>
{showDivider && (
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
<div
className='h-[1.25px]'
style={{
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
}}
/>
<div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
</div>
)}
</div>
@@ -581,13 +550,7 @@ export function Editor() {
{hasAdvancedOnlyFields && userPermissions.canEdit && (
<div className='flex items-center gap-[10px] px-[2px] pt-[14px] pb-[12px]'>
<div
className='h-[1.25px] flex-1'
style={{
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
}}
/>
<div className='h-[1.25px] flex-1' style={DASHED_DIVIDER_STYLE} />
<button
type='button'
onClick={handleToggleAdvancedMode}
@@ -600,13 +563,7 @@ export function Editor() {
className={`h-[14px] w-[14px] transition-transform duration-200 ${displayAdvancedOptions ? 'rotate-180' : ''}`}
/>
</button>
<div
className='h-[1.25px] flex-1'
style={{
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
}}
/>
<div className='h-[1.25px] flex-1' style={DASHED_DIVIDER_STYLE} />
</div>
)}
@@ -630,13 +587,7 @@ export function Editor() {
/>
{index < advancedOnlySubBlocks.length - 1 && (
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
<div
className='h-[1.25px]'
style={{
backgroundImage:
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
}}
/>
<div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
</div>
)}
</div>

View File

@@ -24,6 +24,7 @@ import {
Tooltip,
} from '@/components/emcn'
import { getEnv, isTruthy } from '@/lib/core/config/env'
import { formatDuration } from '@/lib/core/utils/formatting'
import { useRegisterGlobalCommands } from '@/app/workspace/[workspaceId]/providers/global-commands-provider'
import { createCommands } from '@/app/workspace/[workspaceId]/utils/commands-utils'
import {
@@ -43,7 +44,6 @@ import {
type EntryNode,
type ExecutionGroup,
flattenBlockEntriesOnly,
formatDuration,
getBlockColor,
getBlockIcon,
groupEntriesByExecution,
@@ -128,7 +128,7 @@ const BlockRow = memo(function BlockRow({
<StatusDisplay
isRunning={isRunning}
isCanceled={isCanceled}
formattedDuration={formatDuration(entry.durationMs)}
formattedDuration={formatDuration(entry.durationMs, { precision: 2 }) ?? '-'}
/>
</span>
</div>
@@ -201,7 +201,7 @@ const IterationNodeRow = memo(function IterationNodeRow({
<StatusDisplay
isRunning={hasRunningChild}
isCanceled={hasCanceledChild}
formattedDuration={formatDuration(entry.durationMs)}
formattedDuration={formatDuration(entry.durationMs, { precision: 2 }) ?? '-'}
/>
</span>
</div>
@@ -314,7 +314,7 @@ const SubflowNodeRow = memo(function SubflowNodeRow({
<StatusDisplay
isRunning={hasRunningDescendant}
isCanceled={hasCanceledDescendant}
formattedDuration={formatDuration(entry.durationMs)}
formattedDuration={formatDuration(entry.durationMs, { precision: 2 }) ?? '-'}
/>
</span>
</div>

View File

@@ -53,17 +53,6 @@ export function getBlockColor(blockType: string): string {
return '#6b7280'
}
/**
* Formats duration from milliseconds to readable format
*/
export function formatDuration(ms?: number): string {
if (ms === undefined || ms === null) return '-'
if (ms < 1000) {
return `${Math.round(ms)}ms`
}
return `${(ms / 1000).toFixed(2)}s`
}
/**
* Determines if a keyboard event originated from a text-editable element
*/

View File

@@ -30,6 +30,7 @@ import {
Textarea,
} from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { formatDuration } from '@/lib/core/utils/formatting'
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
import { formatEditSequence } from '@/lib/workflows/training/compute-edit-sequence'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
@@ -575,7 +576,9 @@ export function TrainingModal() {
<span className='text-[var(--text-muted)]'>Duration:</span>{' '}
<span className='text-[var(--text-secondary)]'>
{dataset.metadata?.duration
? `${(dataset.metadata.duration / 1000).toFixed(1)}s`
? formatDuration(dataset.metadata.duration, {
precision: 1,
})
: 'N/A'}
</span>
</div>

View File

@@ -246,7 +246,6 @@ export function CredentialSets() {
setNewSetDescription('')
setNewSetProvider('google-email')
// Open detail view for the newly created group
if (result?.credentialSet) {
setViewingSet(result.credentialSet)
}
@@ -336,7 +335,6 @@ export function CredentialSets() {
email,
})
// Start 60s cooldown
setResendCooldowns((prev) => ({ ...prev, [invitationId]: 60 }))
const interval = setInterval(() => {
setResendCooldowns((prev) => {
@@ -393,7 +391,6 @@ export function CredentialSets() {
return <GmailIcon className='h-4 w-4' />
}
// All hooks must be called before any early returns
const activeMemberships = useMemo(
() => memberships.filter((m) => m.status === 'active'),
[memberships]
@@ -447,7 +444,6 @@ export function CredentialSets() {
<div className='flex h-full flex-col gap-[16px]'>
<div className='min-h-0 flex-1 overflow-y-auto'>
<div className='flex flex-col gap-[16px]'>
{/* Group Info */}
<div className='flex items-center gap-[16px]'>
<div className='flex items-center gap-[8px]'>
<span className='font-medium text-[13px] text-[var(--text-primary)]'>
@@ -471,7 +467,6 @@ export function CredentialSets() {
</div>
</div>
{/* Invite Section - Email Tags Input */}
<div className='flex flex-col gap-[4px]'>
<div className='flex items-center gap-[8px]'>
<TagInput
@@ -495,7 +490,6 @@ export function CredentialSets() {
{emailError && <p className='text-[12px] text-[var(--text-error)]'>{emailError}</p>}
</div>
{/* Members List - styled like team members */}
<div className='flex flex-col gap-[16px]'>
<h4 className='font-medium text-[14px] text-[var(--text-primary)]'>Members</h4>
@@ -519,7 +513,6 @@ export function CredentialSets() {
</p>
) : (
<div className='flex flex-col gap-[16px]'>
{/* Active Members */}
{activeMembers.map((member) => {
const name = member.userName || 'Unknown'
const avatarInitial = name.charAt(0).toUpperCase()
@@ -572,7 +565,6 @@ export function CredentialSets() {
)
})}
{/* Pending Invitations */}
{pendingInvitations.map((invitation) => {
const email = invitation.email || 'Unknown'
const emailPrefix = email.split('@')[0]
@@ -641,7 +633,6 @@ export function CredentialSets() {
</div>
</div>
{/* Footer Actions */}
<div className='mt-auto flex items-center justify-end'>
<Button onClick={handleBackToList} variant='tertiary'>
Back
@@ -822,7 +813,6 @@ export function CredentialSets() {
</div>
</div>
{/* Create Polling Group Modal */}
<Modal open={showCreateModal} onOpenChange={handleCloseCreateModal}>
<ModalContent size='sm'>
<ModalHeader>Create Polling Group</ModalHeader>
@@ -895,7 +885,6 @@ export function CredentialSets() {
</ModalContent>
</Modal>
{/* Leave Confirmation Modal */}
<Modal open={!!leavingMembership} onOpenChange={() => setLeavingMembership(null)}>
<ModalContent size='sm'>
<ModalHeader>Leave Polling Group</ModalHeader>
@@ -923,7 +912,6 @@ export function CredentialSets() {
</ModalContent>
</Modal>
{/* Delete Confirmation Modal */}
<Modal open={!!deletingSet} onOpenChange={() => setDeletingSet(null)}>
<ModalContent size='sm'>
<ModalHeader>Delete Polling Group</ModalHeader>

View File

@@ -1,4 +1,3 @@
export { AccessControl } from './access-control/access-control'
export { ApiKeys } from './api-keys/api-keys'
export { BYOK } from './byok/byok'
export { Copilot } from './copilot/copilot'
@@ -10,7 +9,6 @@ export { Files as FileUploads } from './files/files'
export { General } from './general/general'
export { Integrations } from './integrations/integrations'
export { MCP } from './mcp/mcp'
export { SSO } from './sso/sso'
export { Subscription } from './subscription/subscription'
export { TeamManagement } from './team-management/team-management'
export { WorkflowMcpServers } from './workflow-mcp-servers/workflow-mcp-servers'

View File

@@ -407,14 +407,12 @@ export function MCP({ initialServerId }: MCPProps) {
const [urlScrollLeft, setUrlScrollLeft] = useState(0)
const [headerScrollLeft, setHeaderScrollLeft] = useState<Record<string, number>>({})
// Auto-select server when initialServerId is provided
useEffect(() => {
if (initialServerId && servers.some((s) => s.id === initialServerId)) {
setSelectedServerId(initialServerId)
}
}, [initialServerId, servers])
// Force refresh tools when entering server detail view to detect stale schemas
useEffect(() => {
if (selectedServerId) {
forceRefreshTools(workspaceId)
@@ -717,7 +715,6 @@ export function MCP({ initialServerId }: MCPProps) {
`Refreshed MCP server: ${serverId}, workflows updated: ${result.workflowsUpdated}`
)
// If the active workflow was updated, reload its subblock values from DB
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
if (activeWorkflowId && result.updatedWorkflowIds?.includes(activeWorkflowId)) {
logger.info(`Active workflow ${activeWorkflowId} was updated, reloading subblock values`)

View File

@@ -13,8 +13,8 @@ import { SlackMonoIcon } from '@/components/icons'
import type { PlanFeature } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components/plan-card'
export const PRO_PLAN_FEATURES: PlanFeature[] = [
{ icon: Zap, text: '25 runs per minute (sync)' },
{ icon: Clock, text: '200 runs per minute (async)' },
{ icon: Zap, text: '150 runs per minute (sync)' },
{ icon: Clock, text: '1,000 runs per minute (async)' },
{ icon: HardDrive, text: '50GB file storage' },
{ icon: Building2, text: 'Unlimited workspaces' },
{ icon: Users, text: 'Unlimited invites' },
@@ -22,8 +22,8 @@ export const PRO_PLAN_FEATURES: PlanFeature[] = [
]
export const TEAM_PLAN_FEATURES: PlanFeature[] = [
{ icon: Zap, text: '75 runs per minute (sync)' },
{ icon: Clock, text: '500 runs per minute (async)' },
{ icon: Zap, text: '300 runs per minute (sync)' },
{ icon: Clock, text: '2,500 runs per minute (async)' },
{ icon: HardDrive, text: '500GB file storage (pooled)' },
{ icon: Building2, text: 'Unlimited workspaces' },
{ icon: Users, text: 'Unlimited invites' },

View File

@@ -41,7 +41,6 @@ import { getEnv, isTruthy } from '@/lib/core/config/env'
import { isHosted } from '@/lib/core/config/feature-flags'
import { getUserRole } from '@/lib/workspaces/organization'
import {
AccessControl,
ApiKeys,
BYOK,
Copilot,
@@ -53,15 +52,16 @@ import {
General,
Integrations,
MCP,
SSO,
Subscription,
TeamManagement,
WorkflowMcpServers,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components'
import { TemplateProfile } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/template-profile/template-profile'
import { AccessControl } from '@/ee/access-control/components/access-control'
import { SSO } from '@/ee/sso/components/sso-settings'
import { ssoKeys, useSSOProviders } from '@/ee/sso/hooks/sso'
import { generalSettingsKeys, useGeneralSettings } from '@/hooks/queries/general-settings'
import { organizationKeys, useOrganizations } from '@/hooks/queries/organization'
import { ssoKeys, useSSOProviders } from '@/hooks/queries/sso'
import { subscriptionKeys, useSubscriptionData } from '@/hooks/queries/subscription'
import { usePermissionConfig } from '@/hooks/use-permission-config'
import { useSettingsModalStore } from '@/stores/modals/settings/store'

View File

@@ -49,6 +49,7 @@ interface SocketContextType {
socket: Socket | null
isConnected: boolean
isConnecting: boolean
isReconnecting: boolean
authFailed: boolean
currentWorkflowId: string | null
currentSocketId: string | null
@@ -66,9 +67,16 @@ interface SocketContextType {
blockId: string,
subblockId: string,
value: any,
operationId?: string
operationId: string | undefined,
workflowId: string
) => void
emitVariableUpdate: (
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void
emitVariableUpdate: (variableId: string, field: string, value: any, operationId?: string) => void
emitCursorUpdate: (cursor: { x: number; y: number } | null) => void
emitSelectionUpdate: (selection: { type: 'block' | 'edge' | 'none'; id?: string }) => void
@@ -88,6 +96,7 @@ const SocketContext = createContext<SocketContextType>({
socket: null,
isConnected: false,
isConnecting: false,
isReconnecting: false,
authFailed: false,
currentWorkflowId: null,
currentSocketId: null,
@@ -122,6 +131,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
const [socket, setSocket] = useState<Socket | null>(null)
const [isConnected, setIsConnected] = useState(false)
const [isConnecting, setIsConnecting] = useState(false)
const [isReconnecting, setIsReconnecting] = useState(false)
const [currentWorkflowId, setCurrentWorkflowId] = useState<string | null>(null)
const [currentSocketId, setCurrentSocketId] = useState<string | null>(null)
const [presenceUsers, setPresenceUsers] = useState<PresenceUser[]>([])
@@ -236,20 +246,19 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
setCurrentWorkflowId(null)
setPresenceUsers([])
logger.info('Socket disconnected', {
reason,
})
// socket.active indicates if auto-reconnect will happen
if (socketInstance.active) {
setIsReconnecting(true)
logger.info('Socket disconnected, will auto-reconnect', { reason })
} else {
setIsReconnecting(false)
logger.info('Socket disconnected, no auto-reconnect', { reason })
}
})
socketInstance.on('connect_error', (error: any) => {
socketInstance.on('connect_error', (error: Error) => {
setIsConnecting(false)
logger.error('Socket connection error:', {
message: error.message,
stack: error.stack,
description: error.description,
type: error.type,
transport: error.transport,
})
logger.error('Socket connection error:', { message: error.message })
// Check if this is an authentication failure
const isAuthError =
@@ -261,43 +270,41 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
logger.warn(
'Authentication failed - stopping reconnection attempts. User may need to refresh/re-login.'
)
// Stop reconnection attempts to prevent infinite loop
socketInstance.disconnect()
// Reset state to allow re-initialization when session is restored
setSocket(null)
setAuthFailed(true)
setIsReconnecting(false)
initializedRef.current = false
} else if (socketInstance.active) {
// Temporary failure, will auto-reconnect
setIsReconnecting(true)
}
})
socketInstance.on('reconnect', (attemptNumber) => {
// Reconnection events are on the Manager (socket.io), not the socket itself
socketInstance.io.on('reconnect', (attemptNumber) => {
setIsConnected(true)
setIsReconnecting(false)
setCurrentSocketId(socketInstance.id ?? null)
logger.info('Socket reconnected successfully', {
attemptNumber,
socketId: socketInstance.id,
transport: socketInstance.io.engine?.transport?.name,
})
// Note: join-workflow is handled by the useEffect watching isConnected
})
socketInstance.on('reconnect_attempt', (attemptNumber) => {
logger.info('Socket reconnection attempt (fresh token will be generated)', {
attemptNumber,
timestamp: new Date().toISOString(),
})
socketInstance.io.on('reconnect_attempt', (attemptNumber) => {
setIsReconnecting(true)
logger.info('Socket reconnection attempt', { attemptNumber })
})
socketInstance.on('reconnect_error', (error: any) => {
logger.error('Socket reconnection error:', {
message: error.message,
attemptNumber: error.attemptNumber,
type: error.type,
})
socketInstance.io.on('reconnect_error', (error: Error) => {
logger.error('Socket reconnection error:', { message: error.message })
})
socketInstance.on('reconnect_failed', () => {
socketInstance.io.on('reconnect_failed', () => {
logger.error('Socket reconnection failed - all attempts exhausted')
setIsReconnecting(false)
setIsConnecting(false)
})
@@ -629,6 +636,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
if (commit) {
socket.emit('workflow-operation', {
workflowId: currentWorkflowId,
operation,
target,
payload,
@@ -645,6 +653,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
}
pendingPositionUpdates.current.set(blockId, {
workflowId: currentWorkflowId,
operation,
target,
payload,
@@ -666,6 +675,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
}
} else {
socket.emit('workflow-operation', {
workflowId: currentWorkflowId,
operation,
target,
payload,
@@ -678,47 +688,51 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
)
const emitSubblockUpdate = useCallback(
(blockId: string, subblockId: string, value: any, operationId?: string) => {
if (socket && currentWorkflowId) {
socket.emit('subblock-update', {
blockId,
subblockId,
value,
timestamp: Date.now(),
operationId,
})
} else {
logger.warn('Cannot emit subblock update: no socket connection or workflow room', {
hasSocket: !!socket,
currentWorkflowId,
blockId,
subblockId,
})
(
blockId: string,
subblockId: string,
value: any,
operationId: string | undefined,
workflowId: string
) => {
if (!socket) {
logger.warn('Cannot emit subblock update: no socket connection', { workflowId, blockId })
return
}
socket.emit('subblock-update', {
workflowId,
blockId,
subblockId,
value,
timestamp: Date.now(),
operationId,
})
},
[socket, currentWorkflowId]
[socket]
)
const emitVariableUpdate = useCallback(
(variableId: string, field: string, value: any, operationId?: string) => {
if (socket && currentWorkflowId) {
socket.emit('variable-update', {
variableId,
field,
value,
timestamp: Date.now(),
operationId,
})
} else {
logger.warn('Cannot emit variable update: no socket connection or workflow room', {
hasSocket: !!socket,
currentWorkflowId,
variableId,
field,
})
(
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => {
if (!socket) {
logger.warn('Cannot emit variable update: no socket connection', { workflowId, variableId })
return
}
socket.emit('variable-update', {
workflowId,
variableId,
field,
value,
timestamp: Date.now(),
operationId,
})
},
[socket, currentWorkflowId]
[socket]
)
const lastCursorEmit = useRef(0)
@@ -794,6 +808,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socket,
isConnected,
isConnecting,
isReconnecting,
authFailed,
currentWorkflowId,
currentSocketId,
@@ -820,6 +835,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socket,
isConnected,
isConnecting,
isReconnecting,
authFailed,
currentWorkflowId,
currentSocketId,

View File

@@ -19,6 +19,7 @@ import { checkUsageStatus } from '@/lib/billing/calculations/usage-monitor'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { RateLimiter } from '@/lib/core/rate-limiter'
import { decryptSecret } from '@/lib/core/security/encryption'
import { formatDuration } from '@/lib/core/utils/formatting'
import { getBaseUrl } from '@/lib/core/utils/urls'
import type { TraceSpan, WorkflowExecutionLog } from '@/lib/logs/types'
import { sendEmail } from '@/lib/messaging/email/mailer'
@@ -227,12 +228,6 @@ async function deliverWebhook(
}
}
function formatDuration(ms: number): string {
if (ms < 1000) return `${ms}ms`
if (ms < 60000) return `${(ms / 1000).toFixed(1)}s`
return `${(ms / 60000).toFixed(1)}m`
}
function formatCost(cost?: Record<string, unknown>): string {
if (!cost?.total) return 'N/A'
const total = cost.total as number
@@ -302,7 +297,7 @@ async function deliverEmail(
workflowName: payload.data.workflowName || 'Unknown Workflow',
status: payload.data.status,
trigger: payload.data.trigger,
duration: formatDuration(payload.data.totalDurationMs),
duration: formatDuration(payload.data.totalDurationMs, { precision: 1 }) ?? '-',
cost: formatCost(payload.data.cost),
logUrl,
alertReason,
@@ -315,7 +310,7 @@ async function deliverEmail(
to: subscription.emailRecipients,
subject,
html,
text: `${subject}\n${alertReason ? `\nReason: ${alertReason}\n` : ''}\nWorkflow: ${payload.data.workflowName}\nStatus: ${statusText}\nTrigger: ${payload.data.trigger}\nDuration: ${formatDuration(payload.data.totalDurationMs)}\nCost: ${formatCost(payload.data.cost)}\n\nView Log: ${logUrl}${includedDataText}`,
text: `${subject}\n${alertReason ? `\nReason: ${alertReason}\n` : ''}\nWorkflow: ${payload.data.workflowName}\nStatus: ${statusText}\nTrigger: ${payload.data.trigger}\nDuration: ${formatDuration(payload.data.totalDurationMs, { precision: 1 }) ?? '-'}\nCost: ${formatCost(payload.data.cost)}\n\nView Log: ${logUrl}${includedDataText}`,
emailType: 'notifications',
})
@@ -373,7 +368,10 @@ async function deliverSlack(
fields: [
{ type: 'mrkdwn', text: `*Status:*\n${payload.data.status}` },
{ type: 'mrkdwn', text: `*Trigger:*\n${payload.data.trigger}` },
{ type: 'mrkdwn', text: `*Duration:*\n${formatDuration(payload.data.totalDurationMs)}` },
{
type: 'mrkdwn',
text: `*Duration:*\n${formatDuration(payload.data.totalDurationMs, { precision: 1 }) ?? '-'}`,
},
{ type: 'mrkdwn', text: `*Cost:*\n${formatCost(payload.data.cost)}` },
],
},

View File

@@ -0,0 +1,625 @@
import { EnrichSoIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types'
export const EnrichBlock: BlockConfig = {
type: 'enrich',
name: 'Enrich',
description: 'B2B data enrichment and LinkedIn intelligence with Enrich.so',
authMode: AuthMode.ApiKey,
longDescription:
'Access real-time B2B data intelligence with Enrich.so. Enrich profiles from email addresses, find work emails from LinkedIn, verify email deliverability, search for people and companies, and analyze LinkedIn post engagement.',
docsLink: 'https://docs.enrich.so/',
category: 'tools',
bgColor: '#E5E5E6',
icon: EnrichSoIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
// Person/Profile Enrichment
{ label: 'Email to Profile', id: 'email_to_profile' },
{ label: 'Email to Person (Lite)', id: 'email_to_person_lite' },
{ label: 'LinkedIn Profile Enrichment', id: 'linkedin_profile' },
// Email Finding
{ label: 'Find Email', id: 'find_email' },
{ label: 'LinkedIn to Work Email', id: 'linkedin_to_work_email' },
{ label: 'LinkedIn to Personal Email', id: 'linkedin_to_personal_email' },
// Phone Finding
{ label: 'Phone Finder (LinkedIn)', id: 'phone_finder' },
{ label: 'Email to Phone', id: 'email_to_phone' },
// Email Verification
{ label: 'Verify Email', id: 'verify_email' },
{ label: 'Disposable Email Check', id: 'disposable_email_check' },
// IP/Company Lookup
{ label: 'Email to IP', id: 'email_to_ip' },
{ label: 'IP to Company', id: 'ip_to_company' },
// Company Enrichment
{ label: 'Company Lookup', id: 'company_lookup' },
{ label: 'Company Funding & Traffic', id: 'company_funding' },
{ label: 'Company Revenue', id: 'company_revenue' },
// Search
{ label: 'Search People', id: 'search_people' },
{ label: 'Search Company', id: 'search_company' },
{ label: 'Search Company Employees', id: 'search_company_employees' },
{ label: 'Search Similar Companies', id: 'search_similar_companies' },
{ label: 'Sales Pointer (People)', id: 'sales_pointer_people' },
// LinkedIn Posts/Activities
{ label: 'Search Posts', id: 'search_posts' },
{ label: 'Get Post Details', id: 'get_post_details' },
{ label: 'Search Post Reactions', id: 'search_post_reactions' },
{ label: 'Search Post Comments', id: 'search_post_comments' },
{ label: 'Search People Activities', id: 'search_people_activities' },
{ label: 'Search Company Activities', id: 'search_company_activities' },
// Other
{ label: 'Reverse Hash Lookup', id: 'reverse_hash_lookup' },
{ label: 'Search Logo', id: 'search_logo' },
{ label: 'Check Credits', id: 'check_credits' },
],
value: () => 'email_to_profile',
},
{
id: 'apiKey',
title: 'Enrich API Key',
type: 'short-input',
placeholder: 'Enter your Enrich.so API key',
password: true,
required: true,
},
{
id: 'email',
title: 'Email Address',
type: 'short-input',
placeholder: 'john.doe@company.com',
condition: {
field: 'operation',
value: [
'email_to_profile',
'email_to_person_lite',
'email_to_phone',
'verify_email',
'disposable_email_check',
'email_to_ip',
],
},
required: {
field: 'operation',
value: [
'email_to_profile',
'email_to_person_lite',
'email_to_phone',
'verify_email',
'disposable_email_check',
'email_to_ip',
],
},
},
{
id: 'inRealtime',
title: 'Fetch Fresh Data',
type: 'switch',
condition: { field: 'operation', value: 'email_to_profile' },
mode: 'advanced',
},
{
id: 'linkedinUrl',
title: 'LinkedIn Profile URL',
type: 'short-input',
placeholder: 'linkedin.com/in/williamhgates',
condition: {
field: 'operation',
value: [
'linkedin_profile',
'linkedin_to_work_email',
'linkedin_to_personal_email',
'phone_finder',
],
},
required: {
field: 'operation',
value: [
'linkedin_profile',
'linkedin_to_work_email',
'linkedin_to_personal_email',
'phone_finder',
],
},
},
{
id: 'fullName',
title: 'Full Name',
type: 'short-input',
placeholder: 'John Doe',
condition: { field: 'operation', value: 'find_email' },
required: { field: 'operation', value: 'find_email' },
},
{
id: 'companyDomain',
title: 'Company Domain',
type: 'short-input',
placeholder: 'example.com',
condition: { field: 'operation', value: 'find_email' },
required: { field: 'operation', value: 'find_email' },
},
{
id: 'ip',
title: 'IP Address',
type: 'short-input',
placeholder: '86.92.60.221',
condition: { field: 'operation', value: 'ip_to_company' },
required: { field: 'operation', value: 'ip_to_company' },
},
{
id: 'companyName',
title: 'Company Name',
type: 'short-input',
placeholder: 'Google',
condition: { field: 'operation', value: 'company_lookup' },
},
{
id: 'domain',
title: 'Domain',
type: 'short-input',
placeholder: 'google.com',
condition: {
field: 'operation',
value: ['company_lookup', 'company_funding', 'company_revenue', 'search_logo'],
},
required: {
field: 'operation',
value: ['company_funding', 'company_revenue', 'search_logo'],
},
},
{
id: 'firstName',
title: 'First Name',
type: 'short-input',
placeholder: 'John',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'lastName',
title: 'Last Name',
type: 'short-input',
placeholder: 'Doe',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'subTitle',
title: 'Job Title',
type: 'short-input',
placeholder: 'Software Engineer',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'locationCountry',
title: 'Country',
type: 'short-input',
placeholder: 'United States',
condition: { field: 'operation', value: ['search_people', 'search_company'] },
},
{
id: 'locationCity',
title: 'City',
type: 'short-input',
placeholder: 'San Francisco',
condition: { field: 'operation', value: ['search_people', 'search_company'] },
},
{
id: 'industry',
title: 'Industry',
type: 'short-input',
placeholder: 'Technology',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'currentJobTitles',
title: 'Current Job Titles (JSON)',
type: 'code',
placeholder: '["CEO", "CTO", "VP Engineering"]',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'skills',
title: 'Skills (JSON)',
type: 'code',
placeholder: '["Python", "Machine Learning"]',
condition: { field: 'operation', value: 'search_people' },
},
{
id: 'searchCompanyName',
title: 'Company Name',
type: 'short-input',
placeholder: 'Google',
condition: { field: 'operation', value: 'search_company' },
},
{
id: 'industries',
title: 'Industries (JSON)',
type: 'code',
placeholder: '["Technology", "Software"]',
condition: { field: 'operation', value: 'search_company' },
},
{
id: 'staffCountMin',
title: 'Min Employees',
type: 'short-input',
placeholder: '50',
condition: { field: 'operation', value: 'search_company' },
},
{
id: 'staffCountMax',
title: 'Max Employees',
type: 'short-input',
placeholder: '500',
condition: { field: 'operation', value: 'search_company' },
},
{
id: 'companyIds',
title: 'Company IDs (JSON)',
type: 'code',
placeholder: '[12345, 67890]',
condition: { field: 'operation', value: 'search_company_employees' },
},
{
id: 'country',
title: 'Country',
type: 'short-input',
placeholder: 'United States',
condition: { field: 'operation', value: 'search_company_employees' },
},
{
id: 'city',
title: 'City',
type: 'short-input',
placeholder: 'San Francisco',
condition: { field: 'operation', value: 'search_company_employees' },
},
{
id: 'jobTitles',
title: 'Job Titles (JSON)',
type: 'code',
placeholder: '["Software Engineer", "Product Manager"]',
condition: { field: 'operation', value: 'search_company_employees' },
},
{
id: 'linkedinCompanyUrl',
title: 'LinkedIn Company URL',
type: 'short-input',
placeholder: 'linkedin.com/company/google',
condition: { field: 'operation', value: 'search_similar_companies' },
required: { field: 'operation', value: 'search_similar_companies' },
},
{
id: 'accountLocation',
title: 'Locations (JSON)',
type: 'code',
placeholder: '["germany", "france"]',
condition: { field: 'operation', value: 'search_similar_companies' },
},
{
id: 'employeeSizeType',
title: 'Employee Size Filter Type',
type: 'dropdown',
options: [
{ label: 'Range', id: 'RANGE' },
{ label: 'Exact', id: 'EXACT' },
],
condition: { field: 'operation', value: 'search_similar_companies' },
mode: 'advanced',
},
{
id: 'employeeSizeRange',
title: 'Employee Size Range (JSON)',
type: 'code',
placeholder: '[{"start": 50, "end": 200}]',
condition: { field: 'operation', value: 'search_similar_companies' },
},
{
id: 'num',
title: 'Results Per Page',
type: 'short-input',
placeholder: '10',
condition: { field: 'operation', value: 'search_similar_companies' },
},
{
id: 'filters',
title: 'Filters (JSON)',
type: 'code',
placeholder:
'[{"type": "POSTAL_CODE", "values": [{"id": "101041448", "text": "San Francisco", "selectionType": "INCLUDED"}]}]',
condition: { field: 'operation', value: 'sales_pointer_people' },
required: { field: 'operation', value: 'sales_pointer_people' },
},
{
id: 'keywords',
title: 'Keywords',
type: 'short-input',
placeholder: 'AI automation',
condition: { field: 'operation', value: 'search_posts' },
required: { field: 'operation', value: 'search_posts' },
},
{
id: 'datePosted',
title: 'Date Posted',
type: 'dropdown',
options: [
{ label: 'Any time', id: '' },
{ label: 'Past 24 hours', id: 'past_24_hours' },
{ label: 'Past week', id: 'past_week' },
{ label: 'Past month', id: 'past_month' },
],
condition: { field: 'operation', value: 'search_posts' },
},
{
id: 'postUrl',
title: 'LinkedIn Post URL',
type: 'short-input',
placeholder: 'https://www.linkedin.com/posts/...',
condition: { field: 'operation', value: 'get_post_details' },
required: { field: 'operation', value: 'get_post_details' },
},
{
id: 'postUrn',
title: 'Post URN',
type: 'short-input',
placeholder: 'urn:li:activity:7231931952839196672',
condition: {
field: 'operation',
value: ['search_post_reactions', 'search_post_comments'],
},
required: {
field: 'operation',
value: ['search_post_reactions', 'search_post_comments'],
},
},
{
id: 'reactionType',
title: 'Reaction Type',
type: 'dropdown',
options: [
{ label: 'All', id: 'all' },
{ label: 'Like', id: 'like' },
{ label: 'Love', id: 'love' },
{ label: 'Celebrate', id: 'celebrate' },
{ label: 'Insightful', id: 'insightful' },
{ label: 'Funny', id: 'funny' },
],
condition: { field: 'operation', value: 'search_post_reactions' },
},
{
id: 'profileId',
title: 'Profile ID',
type: 'short-input',
placeholder: 'ACoAAC1wha0BhoDIRAHrP5rgzVDyzmSdnl-KuEk',
condition: { field: 'operation', value: 'search_people_activities' },
required: { field: 'operation', value: 'search_people_activities' },
},
{
id: 'activityType',
title: 'Activity Type',
type: 'dropdown',
options: [
{ label: 'Posts', id: 'posts' },
{ label: 'Comments', id: 'comments' },
{ label: 'Articles', id: 'articles' },
],
condition: {
field: 'operation',
value: ['search_people_activities', 'search_company_activities'],
},
},
{
id: 'companyId',
title: 'Company ID',
type: 'short-input',
placeholder: '100746430',
condition: { field: 'operation', value: 'search_company_activities' },
required: { field: 'operation', value: 'search_company_activities' },
},
{
id: 'offset',
title: 'Offset',
type: 'short-input',
placeholder: '0',
condition: { field: 'operation', value: 'search_company_activities' },
mode: 'advanced',
},
{
id: 'hash',
title: 'MD5 Hash',
type: 'short-input',
placeholder: '5f0efb20de5ecfedbe0bf5e7c12353fe',
condition: { field: 'operation', value: 'reverse_hash_lookup' },
required: { field: 'operation', value: 'reverse_hash_lookup' },
},
{
id: 'page',
title: 'Page Number',
type: 'short-input',
placeholder: '1',
condition: {
field: 'operation',
value: [
'search_people',
'search_company',
'search_company_employees',
'search_similar_companies',
'sales_pointer_people',
'search_posts',
'search_post_reactions',
'search_post_comments',
],
},
required: { field: 'operation', value: 'sales_pointer_people' },
},
{
id: 'pageSize',
title: 'Results Per Page',
type: 'short-input',
placeholder: '20',
condition: {
field: 'operation',
value: ['search_people', 'search_company', 'search_company_employees'],
},
},
{
id: 'paginationToken',
title: 'Pagination Token',
type: 'short-input',
placeholder: 'Token from previous response',
condition: {
field: 'operation',
value: ['search_people_activities', 'search_company_activities'],
},
mode: 'advanced',
},
],
tools: {
access: [
'enrich_check_credits',
'enrich_email_to_profile',
'enrich_email_to_person_lite',
'enrich_linkedin_profile',
'enrich_find_email',
'enrich_linkedin_to_work_email',
'enrich_linkedin_to_personal_email',
'enrich_phone_finder',
'enrich_email_to_phone',
'enrich_verify_email',
'enrich_disposable_email_check',
'enrich_email_to_ip',
'enrich_ip_to_company',
'enrich_company_lookup',
'enrich_company_funding',
'enrich_company_revenue',
'enrich_search_people',
'enrich_search_company',
'enrich_search_company_employees',
'enrich_search_similar_companies',
'enrich_sales_pointer_people',
'enrich_search_posts',
'enrich_get_post_details',
'enrich_search_post_reactions',
'enrich_search_post_comments',
'enrich_search_people_activities',
'enrich_search_company_activities',
'enrich_reverse_hash_lookup',
'enrich_search_logo',
],
config: {
tool: (params) => `enrich_${params.operation}`,
params: (params) => {
const { operation, ...rest } = params
const parsedParams: Record<string, any> = { ...rest }
try {
if (rest.currentJobTitles && typeof rest.currentJobTitles === 'string') {
parsedParams.currentJobTitles = JSON.parse(rest.currentJobTitles)
}
if (rest.skills && typeof rest.skills === 'string') {
parsedParams.skills = JSON.parse(rest.skills)
}
if (rest.industries && typeof rest.industries === 'string') {
parsedParams.industries = JSON.parse(rest.industries)
}
if (rest.companyIds && typeof rest.companyIds === 'string') {
parsedParams.companyIds = JSON.parse(rest.companyIds)
}
if (rest.jobTitles && typeof rest.jobTitles === 'string') {
parsedParams.jobTitles = JSON.parse(rest.jobTitles)
}
if (rest.accountLocation && typeof rest.accountLocation === 'string') {
parsedParams.accountLocation = JSON.parse(rest.accountLocation)
}
if (rest.employeeSizeRange && typeof rest.employeeSizeRange === 'string') {
parsedParams.employeeSizeRange = JSON.parse(rest.employeeSizeRange)
}
if (rest.filters && typeof rest.filters === 'string') {
parsedParams.filters = JSON.parse(rest.filters)
}
} catch (error: any) {
throw new Error(`Invalid JSON input: ${error.message}`)
}
if (operation === 'linkedin_profile') {
parsedParams.url = rest.linkedinUrl
parsedParams.linkedinUrl = undefined
}
if (
operation === 'linkedin_to_work_email' ||
operation === 'linkedin_to_personal_email' ||
operation === 'phone_finder'
) {
parsedParams.linkedinProfile = rest.linkedinUrl
parsedParams.linkedinUrl = undefined
}
if (operation === 'company_lookup') {
parsedParams.name = rest.companyName
parsedParams.companyName = undefined
}
if (operation === 'search_company') {
parsedParams.name = rest.searchCompanyName
parsedParams.searchCompanyName = undefined
}
if (operation === 'search_similar_companies') {
parsedParams.url = rest.linkedinCompanyUrl
parsedParams.linkedinCompanyUrl = undefined
}
if (operation === 'get_post_details') {
parsedParams.url = rest.postUrl
parsedParams.postUrl = undefined
}
if (operation === 'search_logo') {
parsedParams.url = rest.domain
}
if (parsedParams.page) {
const pageNum = Number(parsedParams.page)
if (operation === 'search_people' || operation === 'search_company') {
parsedParams.currentPage = pageNum
parsedParams.page = undefined
} else {
parsedParams.page = pageNum
}
}
if (parsedParams.pageSize) parsedParams.pageSize = Number(parsedParams.pageSize)
if (parsedParams.num) parsedParams.num = Number(parsedParams.num)
if (parsedParams.offset) parsedParams.offset = Number(parsedParams.offset)
if (parsedParams.staffCountMin)
parsedParams.staffCountMin = Number(parsedParams.staffCountMin)
if (parsedParams.staffCountMax)
parsedParams.staffCountMax = Number(parsedParams.staffCountMax)
return parsedParams
},
},
},
inputs: {
operation: { type: 'string', description: 'Enrich operation to perform' },
},
outputs: {
success: { type: 'boolean', description: 'Whether the operation was successful' },
output: { type: 'json', description: 'Output data from the Enrich operation' },
},
}

View File

@@ -26,6 +26,7 @@ import { DuckDuckGoBlock } from '@/blocks/blocks/duckduckgo'
import { DynamoDBBlock } from '@/blocks/blocks/dynamodb'
import { ElasticsearchBlock } from '@/blocks/blocks/elasticsearch'
import { ElevenLabsBlock } from '@/blocks/blocks/elevenlabs'
import { EnrichBlock } from '@/blocks/blocks/enrich'
import { EvaluatorBlock } from '@/blocks/blocks/evaluator'
import { ExaBlock } from '@/blocks/blocks/exa'
import { FileBlock, FileV2Block } from '@/blocks/blocks/file'
@@ -188,6 +189,7 @@ export const registry: Record<string, BlockConfig> = {
dynamodb: DynamoDBBlock,
elasticsearch: ElasticsearchBlock,
elevenlabs: ElevenLabsBlock,
enrich: EnrichBlock,
evaluator: EvaluatorBlock,
exa: ExaBlock,
file: FileBlock,

View File

@@ -13,8 +13,8 @@ interface FreeTierUpgradeEmailProps {
const proFeatures = [
{ label: '$20/month', desc: 'in credits included' },
{ label: '25 runs/min', desc: 'sync executions' },
{ label: '200 runs/min', desc: 'async executions' },
{ label: '150 runs/min', desc: 'sync executions' },
{ label: '1,000 runs/min', desc: 'async executions' },
{ label: '50GB storage', desc: 'for files & assets' },
{ label: 'Unlimited', desc: 'workspaces & invites' },
]

View File

@@ -5421,3 +5421,18 @@ z'
</svg>
)
}
export function EnrichSoIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 398 394' fill='none'>
<path
fill='#5A52F4'
d='M129.705566,319.705719 C127.553314,322.684906 125.651512,325.414673 123.657059,328.277466 C113.748466,318.440308 105.605003,310.395905 97.510834,302.302216 C93.625801,298.417419 89.990181,294.269318 85.949242,290.558868 C82.857994,287.720428 82.464081,285.757660 85.772888,282.551880 C104.068108,264.826202 122.146088,246.876312 140.285110,228.989670 C141.183945,228.103317 141.957443,227.089844 143.588837,225.218384 C140.691605,225.066116 138.820053,224.882874 136.948410,224.881958 C102.798264,224.865326 68.647453,224.765244 34.498699,224.983612 C29.315699,225.016739 27.990419,223.343155 28.090912,218.397430 C28.381887,204.076935 28.189890,189.746719 28.195684,175.420319 C28.198524,168.398178 28.319166,168.279541 35.590389,168.278687 C69.074188,168.274780 102.557991,168.281174 136.041794,168.266083 C137.968231,168.265213 139.894608,168.107101 141.821030,168.022171 C142.137955,167.513992 142.454895,167.005829 142.771820,166.497650 C122.842415,146.495621 102.913002,126.493591 83.261360,106.770348 C96.563828,93.471756 109.448814,80.590523 122.656265,67.386925 C123.522743,68.161835 124.785545,69.187096 125.930321,70.330513 C144.551819,88.930206 163.103683,107.600082 181.805267,126.118790 C186.713593,130.979126 189.085648,136.448059 189.055374,143.437057 C188.899490,179.418961 188.911179,215.402191 189.046661,251.384262 C189.072296,258.190796 186.742920,263.653717 181.982727,268.323273 C164.624405,285.351227 147.295807,302.409485 129.705566,319.705719z'
/>
<path
fill='#5A52F4'
d='M276.070923,246.906128 C288.284363,258.985870 300.156097,270.902100 312.235931,282.603485 C315.158752,285.434784 315.417542,287.246246 312.383484,290.248932 C301.143494,301.372498 290.168549,312.763733 279.075592,324.036255 C278.168030,324.958496 277.121307,325.743835 275.898315,326.801086 C274.628357,325.711792 273.460663,324.822968 272.422150,323.802673 C253.888397,305.594757 235.418701,287.321289 216.818268,269.181854 C211.508789,264.003937 208.872726,258.136688 208.914001,250.565842 C209.108337,214.917786 209.084808,179.267715 208.928864,143.619293 C208.898407,136.654907 211.130066,131.122162 216.052216,126.246094 C234.867538,107.606842 253.537521,88.820908 272.274780,70.102730 C273.313202,69.065353 274.468597,68.145027 275.264038,67.440727 C288.353516,80.579514 301.213470,93.487869 314.597534,106.922356 C295.163391,126.421753 275.214752,146.437363 255.266113,166.452972 C255.540176,166.940353 255.814240,167.427734 256.088318,167.915100 C257.983887,168.035736 259.879425,168.260345 261.775085,168.261551 C295.425201,168.282852 329.075287,168.273544 362.725403,168.279831 C369.598907,168.281113 369.776215,168.463593 369.778931,175.252213 C369.784882,189.911667 369.646088,204.573074 369.861206,219.229355 C369.925110,223.585022 368.554596,224.976288 364.148865,224.956406 C329.833130,224.801605 295.516388,224.869598 261.199951,224.868744 C259.297974,224.868698 257.396027,224.868744 254.866638,224.868744 C262.350708,232.658707 269.078217,239.661194 276.070923,246.906128z'
/>
</svg>
)
}

View File

@@ -7,6 +7,7 @@ import { Button } from '@/components/ui/button'
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
import type { ToolCallGroup, ToolCallState } from '@/lib/copilot/types'
import { cn } from '@/lib/core/utils/cn'
import { formatDuration } from '@/lib/core/utils/formatting'
interface ToolCallProps {
toolCall: ToolCallState
@@ -225,11 +226,6 @@ export function ToolCallCompletion({ toolCall, isCompact = false }: ToolCallProp
const isError = toolCall.state === 'error'
const isAborted = toolCall.state === 'aborted'
const formatDuration = (duration?: number) => {
if (!duration) return ''
return duration < 1000 ? `${duration}ms` : `${(duration / 1000).toFixed(1)}s`
}
return (
<div
className={cn(
@@ -279,7 +275,7 @@ export function ToolCallCompletion({ toolCall, isCompact = false }: ToolCallProp
)}
style={{ fontSize: '0.625rem' }}
>
{formatDuration(toolCall.duration)}
{toolCall.duration ? formatDuration(toolCall.duration, { precision: 1 }) : ''}
</Badge>
)}
</div>

43
apps/sim/ee/LICENSE Normal file
View File

@@ -0,0 +1,43 @@
Sim Enterprise License
Copyright (c) 2025-present Sim Studio, Inc.
This software and associated documentation files (the "Software") are licensed
under the following terms:
1. LICENSE GRANT
Subject to the terms of this license, Sim Studio, Inc. grants you a limited,
non-exclusive, non-transferable license to use the Software for:
- Development, testing, and evaluation purposes
- Internal non-production use
Production use of the Software requires a valid Sim Enterprise subscription.
2. RESTRICTIONS
You may not:
- Use the Software in production without a valid Enterprise subscription
- Modify, adapt, or create derivative works of the Software
- Redistribute, sublicense, or transfer the Software
- Remove or alter any proprietary notices in the Software
3. ENTERPRISE SUBSCRIPTION
Production deployment of enterprise features requires an active Sim Enterprise
subscription. Contact sales@simstudio.ai for licensing information.
4. DISCLAIMER
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
5. LIMITATION OF LIABILITY
IN NO EVENT SHALL SIM STUDIO, INC. BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY ARISING FROM THE USE OF THE SOFTWARE.
For questions about enterprise licensing, contact: sales@simstudio.ai

21
apps/sim/ee/README.md Normal file
View File

@@ -0,0 +1,21 @@
# Sim Enterprise Edition
This directory contains enterprise features that require a Sim Enterprise subscription
for production use.
## Features
- **SSO (Single Sign-On)**: OIDC and SAML authentication integration
- **Access Control**: Permission groups for fine-grained user access management
- **Credential Sets**: Shared credential pools for email polling workflows
## Licensing
See [LICENSE](./LICENSE) for terms. Development and testing use is permitted.
Production deployment requires an active Enterprise subscription.
## Architecture
Enterprise features are imported directly throughout the codebase. The `ee/` directory
is required at build time. Feature visibility is controlled at runtime via environment
variables (e.g., `NEXT_PUBLIC_ACCESS_CONTROL_ENABLED`, `NEXT_PUBLIC_SSO_ENABLED`).

View File

@@ -29,7 +29,6 @@ import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
import { getUserColor } from '@/lib/workspaces/colors'
import { getUserRole } from '@/lib/workspaces/organization'
import { getAllBlocks } from '@/blocks'
import { useOrganization, useOrganizations } from '@/hooks/queries/organization'
import {
type PermissionGroup,
useBulkAddPermissionGroupMembers,
@@ -39,7 +38,8 @@ import {
usePermissionGroups,
useRemovePermissionGroupMember,
useUpdatePermissionGroup,
} from '@/hooks/queries/permission-groups'
} from '@/ee/access-control/hooks/permission-groups'
import { useOrganization, useOrganizations } from '@/hooks/queries/organization'
import { useSubscriptionData } from '@/hooks/queries/subscription'
import { PROVIDER_DEFINITIONS } from '@/providers/models'
import { getAllProviderIds } from '@/providers/utils'
@@ -255,7 +255,6 @@ export function AccessControl() {
queryEnabled
)
// Show loading while dependencies load, or while permission groups query is pending
const isLoading = orgsLoading || subLoading || (queryEnabled && groupsLoading)
const { data: organization } = useOrganization(activeOrganization?.id || '')
@@ -410,10 +409,8 @@ export function AccessControl() {
}, [viewingGroup, editingConfig])
const allBlocks = useMemo(() => {
// Filter out hidden blocks and start_trigger (which should never be disabled)
const blocks = getAllBlocks().filter((b) => !b.hideFromToolbar && b.type !== 'start_trigger')
return blocks.sort((a, b) => {
// Group by category: triggers first, then blocks, then tools
const categoryOrder = { triggers: 0, blocks: 1, tools: 2 }
const catA = categoryOrder[a.category] ?? 3
const catB = categoryOrder[b.category] ?? 3
@@ -555,10 +552,9 @@ export function AccessControl() {
}, [viewingGroup, editingConfig, activeOrganization?.id, updatePermissionGroup])
const handleOpenAddMembersModal = useCallback(() => {
const existingMemberUserIds = new Set(members.map((m) => m.userId))
setSelectedMemberIds(new Set())
setShowAddMembersModal(true)
}, [members])
}, [])
const handleAddSelectedMembers = useCallback(async () => {
if (!viewingGroup || selectedMemberIds.size === 0) return
@@ -891,7 +887,6 @@ export function AccessControl() {
prev
? {
...prev,
// When deselecting all, keep start_trigger allowed (it should never be disabled)
allowedIntegrations: allAllowed ? ['start_trigger'] : null,
}
: prev

View File

@@ -1,3 +1,5 @@
'use client'
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
import { fetchJson } from '@/hooks/selectors/helpers'

View File

@@ -11,55 +11,13 @@ import { isBillingEnabled } from '@/lib/core/config/feature-flags'
import { cn } from '@/lib/core/utils/cn'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { getUserRole } from '@/lib/workspaces/organization/utils'
import { SSO_TRUSTED_PROVIDERS } from '@/ee/sso/constants'
import { useConfigureSSO, useSSOProviders } from '@/ee/sso/hooks/sso'
import { useOrganizations } from '@/hooks/queries/organization'
import { useConfigureSSO, useSSOProviders } from '@/hooks/queries/sso'
import { useSubscriptionData } from '@/hooks/queries/subscription'
const logger = createLogger('SSO')
const TRUSTED_SSO_PROVIDERS = [
'okta',
'okta-saml',
'okta-prod',
'okta-dev',
'okta-staging',
'okta-test',
'azure-ad',
'azure-active-directory',
'azure-corp',
'azure-enterprise',
'adfs',
'adfs-company',
'adfs-corp',
'adfs-enterprise',
'auth0',
'auth0-prod',
'auth0-dev',
'auth0-staging',
'onelogin',
'onelogin-prod',
'onelogin-corp',
'jumpcloud',
'jumpcloud-prod',
'jumpcloud-corp',
'ping-identity',
'ping-federate',
'pingone',
'shibboleth',
'shibboleth-idp',
'google-workspace',
'google-sso',
'saml',
'saml2',
'saml-sso',
'oidc',
'oidc-sso',
'openid-connect',
'custom-sso',
'enterprise-sso',
'company-sso',
]
interface SSOProvider {
id: string
providerId: string
@@ -565,7 +523,7 @@ export function SSO() {
<Combobox
value={formData.providerId}
onChange={(value: string) => handleInputChange('providerId', value)}
options={TRUSTED_SSO_PROVIDERS.map((id) => ({
options={SSO_TRUSTED_PROVIDERS.map((id) => ({
label: id,
value: id,
}))}

View File

@@ -1,3 +1,7 @@
/**
* List of trusted SSO provider identifiers.
* Used for validation and autocomplete in SSO configuration.
*/
export const SSO_TRUSTED_PROVIDERS = [
'okta',
'okta-saml',

View File

@@ -1,3 +1,5 @@
'use client'
import { keepPreviousData, useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { organizationKeys } from '@/hooks/queries/organization'
@@ -75,39 +77,3 @@ export function useConfigureSSO() {
},
})
}
/**
* Delete SSO provider mutation
*/
interface DeleteSSOParams {
providerId: string
orgId?: string
}
export function useDeleteSSO() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ providerId }: DeleteSSOParams) => {
const response = await fetch(`/api/auth/sso/providers/${providerId}`, {
method: 'DELETE',
})
if (!response.ok) {
const error = await response.json()
throw new Error(error.message || 'Failed to delete SSO provider')
}
return response.json()
},
onSuccess: (_data, variables) => {
queryClient.invalidateQueries({ queryKey: ssoKeys.providers() })
if (variables.orgId) {
queryClient.invalidateQueries({
queryKey: organizationKeys.detail(variables.orgId),
})
}
},
})
}

View File

@@ -5,6 +5,7 @@ import {
hydrateUserFilesWithBase64,
} from '@/lib/uploads/utils/user-file-base64.server'
import { sanitizeInputFormat, sanitizeTools } from '@/lib/workflows/comparison/normalize'
import { validateBlockType } from '@/ee/access-control/utils/permission-check'
import {
BlockType,
buildResumeApiUrl,
@@ -31,7 +32,6 @@ import { streamingResponseFormatProcessor } from '@/executor/utils'
import { buildBlockExecutionError, normalizeError } from '@/executor/utils/errors'
import { isJSONString } from '@/executor/utils/json'
import { filterOutputForLog } from '@/executor/utils/output-filter'
import { validateBlockType } from '@/executor/utils/permission-check'
import type { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedBlock } from '@/serializer/types'
import type { SubflowType } from '@/stores/workflows/workflow/types'

View File

@@ -6,6 +6,12 @@ import { createMcpToolId } from '@/lib/mcp/utils'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { getAllBlocks } from '@/blocks'
import type { BlockOutput } from '@/blocks/types'
import {
validateBlockType,
validateCustomToolsAllowed,
validateMcpToolsAllowed,
validateModelProvider,
} from '@/ee/access-control/utils/permission-check'
import { AGENT, BlockType, DEFAULTS, REFERENCE, stripCustomToolPrefix } from '@/executor/constants'
import { memoryService } from '@/executor/handlers/agent/memory'
import type {
@@ -18,12 +24,6 @@ import type { BlockHandler, ExecutionContext, StreamingExecution } from '@/execu
import { collectBlockData } from '@/executor/utils/block-data'
import { buildAPIUrl, buildAuthHeaders } from '@/executor/utils/http'
import { stringifyJSON } from '@/executor/utils/json'
import {
validateBlockType,
validateCustomToolsAllowed,
validateMcpToolsAllowed,
validateModelProvider,
} from '@/executor/utils/permission-check'
import { executeProviderRequest } from '@/providers'
import { getProviderFromModel, transformBlockTool } from '@/providers/utils'
import type { SerializedBlock } from '@/serializer/types'

View File

@@ -4,11 +4,11 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import type { BlockOutput } from '@/blocks/types'
import { validateModelProvider } from '@/ee/access-control/utils/permission-check'
import { BlockType, DEFAULTS, EVALUATOR } from '@/executor/constants'
import type { BlockHandler, ExecutionContext } from '@/executor/types'
import { buildAPIUrl, buildAuthHeaders, extractAPIErrorMessage } from '@/executor/utils/http'
import { isJSONString, parseJSON, stringifyJSON } from '@/executor/utils/json'
import { validateModelProvider } from '@/executor/utils/permission-check'
import { calculateCost, getProviderFromModel } from '@/providers/utils'
import type { SerializedBlock } from '@/serializer/types'

View File

@@ -6,6 +6,7 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
import { refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { generateRouterPrompt, generateRouterV2Prompt } from '@/blocks/blocks/router'
import type { BlockOutput } from '@/blocks/types'
import { validateModelProvider } from '@/ee/access-control/utils/permission-check'
import {
BlockType,
DEFAULTS,
@@ -15,7 +16,6 @@ import {
} from '@/executor/constants'
import type { BlockHandler, ExecutionContext } from '@/executor/types'
import { buildAuthHeaders } from '@/executor/utils/http'
import { validateModelProvider } from '@/executor/utils/permission-check'
import { calculateCost, getProviderFromModel } from '@/providers/utils'
import type { SerializedBlock } from '@/serializer/types'

View File

@@ -1,3 +1,5 @@
'use client'
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { fetchJson } from '@/hooks/selectors/helpers'

View File

@@ -146,10 +146,6 @@ export function useCollaborativeWorkflow() {
cancelOperationsForVariable,
} = useOperationQueue()
const isInActiveRoom = useCallback(() => {
return !!currentWorkflowId && activeWorkflowId === currentWorkflowId
}, [currentWorkflowId, activeWorkflowId])
// Register emit functions with operation queue store
useEffect(() => {
registerEmitFunctions(
@@ -162,10 +158,19 @@ export function useCollaborativeWorkflow() {
useEffect(() => {
const handleWorkflowOperation = (data: any) => {
const { operation, target, payload, userId } = data
const { operation, target, payload, userId, metadata } = data
if (isApplyingRemoteChange.current) return
// Filter broadcasts by workflowId to prevent cross-workflow updates
if (metadata?.workflowId && metadata.workflowId !== activeWorkflowId) {
logger.debug('Ignoring workflow operation for different workflow', {
broadcastWorkflowId: metadata.workflowId,
activeWorkflowId,
})
return
}
logger.info(`Received ${operation} on ${target} from user ${userId}`)
// Apply the operation to local state
@@ -436,16 +441,24 @@ export function useCollaborativeWorkflow() {
}
const handleSubblockUpdate = (data: any) => {
const { blockId, subblockId, value, userId } = data
const { workflowId, blockId, subblockId, value, userId } = data
if (isApplyingRemoteChange.current) return
// Filter broadcasts by workflowId to prevent cross-workflow updates
if (workflowId && workflowId !== activeWorkflowId) {
logger.debug('Ignoring subblock update for different workflow', {
broadcastWorkflowId: workflowId,
activeWorkflowId,
})
return
}
logger.info(`Received subblock update from user ${userId}: ${blockId}.${subblockId}`)
isApplyingRemoteChange.current = true
try {
// The setValue function automatically uses the active workflow ID
useSubBlockStore.getState().setValue(blockId, subblockId, value)
const blockType = useWorkflowStore.getState().blocks?.[blockId]?.type
if (activeWorkflowId && blockType === 'function' && subblockId === 'code') {
@@ -459,10 +472,19 @@ export function useCollaborativeWorkflow() {
}
const handleVariableUpdate = (data: any) => {
const { variableId, field, value, userId } = data
const { workflowId, variableId, field, value, userId } = data
if (isApplyingRemoteChange.current) return
// Filter broadcasts by workflowId to prevent cross-workflow updates
if (workflowId && workflowId !== activeWorkflowId) {
logger.debug('Ignoring variable update for different workflow', {
broadcastWorkflowId: workflowId,
activeWorkflowId,
})
return
}
logger.info(`Received variable update from user ${userId}: ${variableId}.${field}`)
isApplyingRemoteChange.current = true
@@ -623,13 +645,9 @@ export function useCollaborativeWorkflow() {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping operation - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
operation,
target,
})
// Queue operations if we have an active workflow - queue handles socket readiness
if (!activeWorkflowId) {
logger.debug('Skipping operation - no active workflow', { operation, target })
return
}
@@ -642,20 +660,13 @@ export function useCollaborativeWorkflow() {
target,
payload,
},
workflowId: activeWorkflowId || '',
workflowId: activeWorkflowId,
userId: session?.user?.id || 'unknown',
})
localAction()
},
[
addToQueue,
session?.user?.id,
isBaselineDiffView,
activeWorkflowId,
isInActiveRoom,
currentWorkflowId,
]
[addToQueue, session?.user?.id, isBaselineDiffView, activeWorkflowId]
)
const collaborativeBatchUpdatePositions = useCallback(
@@ -669,8 +680,8 @@ export function useCollaborativeWorkflow() {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch position update - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch position update - no active workflow')
return
}
@@ -714,7 +725,7 @@ export function useCollaborativeWorkflow() {
}
}
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeUpdateBlockName = useCallback(
@@ -858,8 +869,8 @@ export function useCollaborativeWorkflow() {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch update parent - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch update parent - no active workflow')
return
}
@@ -928,7 +939,7 @@ export function useCollaborativeWorkflow() {
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
},
[isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
[isBaselineDiffView, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
)
const collaborativeToggleBlockAdvancedMode = useCallback(
@@ -1020,8 +1031,8 @@ export function useCollaborativeWorkflow() {
return false
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch add edges - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch add edges - no active workflow')
return false
}
@@ -1055,7 +1066,7 @@ export function useCollaborativeWorkflow() {
return true
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeBatchRemoveEdges = useCallback(
@@ -1064,8 +1075,8 @@ export function useCollaborativeWorkflow() {
return false
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove edges - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch remove edges - no active workflow')
return false
}
@@ -1113,7 +1124,7 @@ export function useCollaborativeWorkflow() {
logger.info('Batch removed edges', { count: validEdgeIds.length })
return true
},
[isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
[isBaselineDiffView, addToQueue, activeWorkflowId, session, undoRedo]
)
const collaborativeSetSubblockValue = useCallback(
@@ -1148,11 +1159,9 @@ export function useCollaborativeWorkflow() {
// Best-effort; do not block on clearing
}
// Only emit to socket if in active room
if (!isInActiveRoom()) {
logger.debug('Local update applied, skipping socket emit - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
// Queue socket operation if we have an active workflow
if (!activeWorkflowId) {
logger.debug('Local update applied, skipping socket queue - no active workflow', {
blockId,
subblockId,
})
@@ -1174,14 +1183,7 @@ export function useCollaborativeWorkflow() {
userId: session?.user?.id || 'unknown',
})
},
[
currentWorkflowId,
activeWorkflowId,
addToQueue,
session?.user?.id,
isBaselineDiffView,
isInActiveRoom,
]
[activeWorkflowId, addToQueue, session?.user?.id, isBaselineDiffView]
)
// Immediate tag selection (uses queue but processes immediately, no debouncing)
@@ -1193,13 +1195,8 @@ export function useCollaborativeWorkflow() {
return
}
if (!isInActiveRoom()) {
logger.debug('Skipping tag selection - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
blockId,
subblockId,
})
if (!activeWorkflowId) {
logger.debug('Skipping tag selection - no active workflow', { blockId, subblockId })
return
}
@@ -1220,14 +1217,7 @@ export function useCollaborativeWorkflow() {
userId: session?.user?.id || 'unknown',
})
},
[
isBaselineDiffView,
addToQueue,
currentWorkflowId,
activeWorkflowId,
session?.user?.id,
isInActiveRoom,
]
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id]
)
const collaborativeUpdateLoopType = useCallback(
@@ -1514,8 +1504,8 @@ export function useCollaborativeWorkflow() {
subBlockValues: Record<string, Record<string, unknown>> = {},
options?: { skipUndoRedo?: boolean }
) => {
if (!isInActiveRoom()) {
logger.debug('Skipping batch add blocks - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch add blocks - no active workflow')
return false
}
@@ -1568,7 +1558,7 @@ export function useCollaborativeWorkflow() {
return true
},
[addToQueue, activeWorkflowId, session?.user?.id, isBaselineDiffView, isInActiveRoom, undoRedo]
[addToQueue, activeWorkflowId, session?.user?.id, isBaselineDiffView, undoRedo]
)
const collaborativeBatchRemoveBlocks = useCallback(
@@ -1577,8 +1567,8 @@ export function useCollaborativeWorkflow() {
return false
}
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove blocks - not in active workflow')
if (!activeWorkflowId) {
logger.debug('Skipping batch remove blocks - no active workflow')
return false
}
@@ -1662,7 +1652,6 @@ export function useCollaborativeWorkflow() {
addToQueue,
activeWorkflowId,
session?.user?.id,
isInActiveRoom,
cancelOperationsForBlock,
undoRedo,
]

View File

@@ -1,3 +1,5 @@
'use client'
import { useMemo } from 'react'
import { getEnv, isTruthy } from '@/lib/core/config/env'
import { isAccessControlEnabled, isHosted } from '@/lib/core/config/feature-flags'
@@ -5,8 +7,8 @@ import {
DEFAULT_PERMISSION_GROUP_CONFIG,
type PermissionGroupConfig,
} from '@/lib/permission-groups/types'
import { useUserPermissionConfig } from '@/ee/access-control/hooks/permission-groups'
import { useOrganizations } from '@/hooks/queries/organization'
import { useUserPermissionConfig } from '@/hooks/queries/permission-groups'
export interface PermissionConfigResult {
config: PermissionGroupConfig

View File

@@ -29,7 +29,6 @@ import {
useUndoRedoStore,
} from '@/stores/undo-redo'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState } from '@/stores/workflows/workflow/types'
@@ -504,47 +503,9 @@ export function useUndoRedo() {
userId,
})
blocksToAdd.forEach((block) => {
useWorkflowStore
.getState()
.addBlock(
block.id,
block.type,
block.name,
block.position,
block.data,
block.data?.parentId,
block.data?.extent,
{
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode,
height: block.height,
}
)
})
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
...subBlockValues,
},
},
}))
}
if (edgeSnapshots && edgeSnapshots.length > 0) {
const edgesToAdd = edgeSnapshots.filter(
(edge) => !useWorkflowStore.getState().edges.find((e) => e.id === edge.id)
)
if (edgesToAdd.length > 0) {
useWorkflowStore.getState().batchAddEdges(edgesToAdd)
}
}
useWorkflowStore
.getState()
.batchAddBlocks(blocksToAdd, edgeSnapshots || [], subBlockValues || {})
break
}
case UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES: {
@@ -1085,47 +1046,9 @@ export function useUndoRedo() {
userId,
})
blocksToAdd.forEach((block) => {
useWorkflowStore
.getState()
.addBlock(
block.id,
block.type,
block.name,
block.position,
block.data,
block.data?.parentId,
block.data?.extent,
{
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode,
height: block.height,
}
)
})
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
...subBlockValues,
},
},
}))
}
if (edgeSnapshots && edgeSnapshots.length > 0) {
const edgesToAdd = edgeSnapshots.filter(
(edge) => !useWorkflowStore.getState().edges.find((e) => e.id === edge.id)
)
if (edgesToAdd.length > 0) {
useWorkflowStore.getState().batchAddEdges(edgesToAdd)
}
}
useWorkflowStore
.getState()
.batchAddBlocks(blocksToAdd, edgeSnapshots || [], subBlockValues || {})
break
}
case UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS: {

View File

@@ -59,8 +59,8 @@ import { sendEmail } from '@/lib/messaging/email/mailer'
import { getFromEmailAddress, getPersonalEmailFrom } from '@/lib/messaging/email/utils'
import { quickValidateEmail } from '@/lib/messaging/email/validation'
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
import { SSO_TRUSTED_PROVIDERS } from '@/ee/sso/constants'
import { createAnonymousSession, ensureAnonymousUserExists } from './anonymous'
import { SSO_TRUSTED_PROVIDERS } from './sso/constants'
const logger = createLogger('Auth')

View File

@@ -1,20 +1,37 @@
import { db } from '@sim/db'
import * as schema from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { hasActiveSubscription } from '@/lib/billing'
const logger = createLogger('BillingAuthorization')
/**
* Check if a user is authorized to manage billing for a given reference ID
* Reference ID can be either a user ID (individual subscription) or organization ID (team subscription)
*
* This function also performs duplicate subscription validation for organizations:
* - Rejects if an organization already has an active subscription (prevents duplicates)
* - Personal subscriptions (referenceId === userId) skip this check to allow upgrades
*/
export async function authorizeSubscriptionReference(
userId: string,
referenceId: string
): Promise<boolean> {
// User can always manage their own subscriptions
// User can always manage their own subscriptions (Pro upgrades, etc.)
if (referenceId === userId) {
return true
}
// For organizations: check for existing active subscriptions to prevent duplicates
if (await hasActiveSubscription(referenceId)) {
logger.warn('Blocking checkout - active subscription already exists for organization', {
userId,
referenceId,
})
return false
}
// Check if referenceId is an organizationId the user has admin rights to
const members = await db
.select()

View File

@@ -25,9 +25,11 @@ export function useSubscriptionUpgrade() {
}
let currentSubscriptionId: string | undefined
let allSubscriptions: any[] = []
try {
const listResult = await client.subscription.list()
const activePersonalSub = listResult.data?.find(
allSubscriptions = listResult.data || []
const activePersonalSub = allSubscriptions.find(
(sub: any) => sub.status === 'active' && sub.referenceId === userId
)
currentSubscriptionId = activePersonalSub?.id
@@ -50,6 +52,25 @@ export function useSubscriptionUpgrade() {
)
if (existingOrg) {
// Check if this org already has an active team subscription
const existingTeamSub = allSubscriptions.find(
(sub: any) =>
sub.status === 'active' &&
sub.referenceId === existingOrg.id &&
(sub.plan === 'team' || sub.plan === 'enterprise')
)
if (existingTeamSub) {
logger.warn('Organization already has an active team subscription', {
userId,
organizationId: existingOrg.id,
existingSubscriptionId: existingTeamSub.id,
})
throw new Error(
'This organization already has an active team subscription. Please manage it from the billing settings.'
)
}
logger.info('Using existing organization for team plan upgrade', {
userId,
organizationId: existingOrg.id,

View File

@@ -1,5 +1,5 @@
import { db } from '@sim/db'
import { member, subscription } from '@sim/db/schema'
import { member, organization, subscription } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, inArray } from 'drizzle-orm'
import { checkEnterprisePlan, checkProPlan, checkTeamPlan } from '@/lib/billing/subscriptions/utils'
@@ -26,10 +26,22 @@ export async function getHighestPrioritySubscription(userId: string) {
let orgSubs: typeof personalSubs = []
if (orgIds.length > 0) {
orgSubs = await db
.select()
.from(subscription)
.where(and(inArray(subscription.referenceId, orgIds), eq(subscription.status, 'active')))
// Verify orgs exist to filter out orphaned subscriptions
const existingOrgs = await db
.select({ id: organization.id })
.from(organization)
.where(inArray(organization.id, orgIds))
const validOrgIds = existingOrgs.map((o) => o.id)
if (validOrgIds.length > 0) {
orgSubs = await db
.select()
.from(subscription)
.where(
and(inArray(subscription.referenceId, validOrgIds), eq(subscription.status, 'active'))
)
}
}
const allSubs = [...personalSubs, ...orgSubs]

View File

@@ -25,6 +25,28 @@ const logger = createLogger('SubscriptionCore')
export { getHighestPrioritySubscription }
/**
* Check if a referenceId (user ID or org ID) has an active subscription
* Used for duplicate subscription prevention
*
* Fails closed: returns true on error to prevent duplicate creation
*/
export async function hasActiveSubscription(referenceId: string): Promise<boolean> {
try {
const [activeSub] = await db
.select({ id: subscription.id })
.from(subscription)
.where(and(eq(subscription.referenceId, referenceId), eq(subscription.status, 'active')))
.limit(1)
return !!activeSub
} catch (error) {
logger.error('Error checking active subscription', { error, referenceId })
// Fail closed: assume subscription exists to prevent duplicate creation
return true
}
}
/**
* Check if user is on Pro plan (direct or via organization)
*/

View File

@@ -11,6 +11,7 @@ export {
getHighestPrioritySubscription as getActiveSubscription,
getUserSubscriptionState as getSubscriptionState,
hasAccessControlAccess,
hasActiveSubscription,
hasCredentialSetsAccess,
hasSSOAccess,
isEnterpriseOrgAdminOrOwner,
@@ -32,6 +33,11 @@ export {
} from '@/lib/billing/core/usage'
export * from '@/lib/billing/credits/balance'
export * from '@/lib/billing/credits/purchase'
export {
blockOrgMembers,
getOrgMemberIds,
unblockOrgMembers,
} from '@/lib/billing/organizations/membership'
export * from '@/lib/billing/subscriptions/utils'
export { canEditUsageLimit as canEditLimit } from '@/lib/billing/subscriptions/utils'
export * from '@/lib/billing/types'

View File

@@ -8,6 +8,7 @@ import {
} from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import { hasActiveSubscription } from '@/lib/billing'
import { getPlanPricing } from '@/lib/billing/core/billing'
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
@@ -159,6 +160,16 @@ export async function ensureOrganizationForTeamSubscription(
if (existingMembership.length > 0) {
const membership = existingMembership[0]
if (membership.role === 'owner' || membership.role === 'admin') {
// Check if org already has an active subscription (prevent duplicates)
if (await hasActiveSubscription(membership.organizationId)) {
logger.error('Organization already has an active subscription', {
userId,
organizationId: membership.organizationId,
newSubscriptionId: subscription.id,
})
throw new Error('Organization already has an active subscription')
}
logger.info('User already owns/admins an org, using it', {
userId,
organizationId: membership.organizationId,

View File

@@ -15,13 +15,86 @@ import {
userStats,
} from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, sql } from 'drizzle-orm'
import { and, eq, inArray, isNull, ne, or, sql } from 'drizzle-orm'
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
import { requireStripeClient } from '@/lib/billing/stripe-client'
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
const logger = createLogger('OrganizationMembership')
export type BillingBlockReason = 'payment_failed' | 'dispute'
/**
* Get all member user IDs for an organization
*/
export async function getOrgMemberIds(organizationId: string): Promise<string[]> {
const members = await db
.select({ userId: member.userId })
.from(member)
.where(eq(member.organizationId, organizationId))
return members.map((m) => m.userId)
}
/**
* Block all members of an organization for billing reasons
* Returns the number of members actually blocked
*
* Reason priority: dispute > payment_failed
* A payment_failed block won't overwrite an existing dispute block
*/
export async function blockOrgMembers(
organizationId: string,
reason: BillingBlockReason
): Promise<number> {
const memberIds = await getOrgMemberIds(organizationId)
if (memberIds.length === 0) {
return 0
}
// Don't overwrite dispute blocks with payment_failed (dispute is higher priority)
const whereClause =
reason === 'payment_failed'
? and(
inArray(userStats.userId, memberIds),
or(ne(userStats.billingBlockedReason, 'dispute'), isNull(userStats.billingBlockedReason))
)
: inArray(userStats.userId, memberIds)
const result = await db
.update(userStats)
.set({ billingBlocked: true, billingBlockedReason: reason })
.where(whereClause)
.returning({ userId: userStats.userId })
return result.length
}
/**
* Unblock all members of an organization blocked for a specific reason
* Only unblocks members blocked for the specified reason (not other reasons)
* Returns the number of members actually unblocked
*/
export async function unblockOrgMembers(
organizationId: string,
reason: BillingBlockReason
): Promise<number> {
const memberIds = await getOrgMemberIds(organizationId)
if (memberIds.length === 0) {
return 0
}
const result = await db
.update(userStats)
.set({ billingBlocked: false, billingBlockedReason: null })
.where(and(inArray(userStats.userId, memberIds), eq(userStats.billingBlockedReason, reason)))
.returning({ userId: userStats.userId })
return result.length
}
export interface RestoreProResult {
restored: boolean
usageRestored: boolean

View File

@@ -1,8 +1,9 @@
import { db } from '@sim/db'
import { member, subscription, user, userStats } from '@sim/db/schema'
import { subscription, user, userStats } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm'
import type Stripe from 'stripe'
import { blockOrgMembers, unblockOrgMembers } from '@/lib/billing'
import { requireStripeClient } from '@/lib/billing/stripe-client'
const logger = createLogger('DisputeWebhooks')
@@ -57,36 +58,34 @@ export async function handleChargeDispute(event: Stripe.Event): Promise<void> {
if (subs.length > 0) {
const orgId = subs[0].referenceId
const memberCount = await blockOrgMembers(orgId, 'dispute')
const owners = await db
.select({ userId: member.userId })
.from(member)
.where(and(eq(member.organizationId, orgId), eq(member.role, 'owner')))
.limit(1)
if (owners.length > 0) {
await db
.update(userStats)
.set({ billingBlocked: true, billingBlockedReason: 'dispute' })
.where(eq(userStats.userId, owners[0].userId))
logger.warn('Blocked org owner due to dispute', {
if (memberCount > 0) {
logger.warn('Blocked all org members due to dispute', {
disputeId: dispute.id,
ownerId: owners[0].userId,
organizationId: orgId,
memberCount,
})
}
}
}
/**
* Handles charge.dispute.closed - unblocks user if dispute was won
* Handles charge.dispute.closed - unblocks user if dispute was won or warning closed
*
* Status meanings:
* - 'won': Merchant won, customer's chargeback denied → unblock
* - 'lost': Customer won, money refunded → stay blocked (they owe us)
* - 'warning_closed': Pre-dispute inquiry closed without chargeback → unblock (false alarm)
*/
export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
const dispute = event.data.object as Stripe.Dispute
if (dispute.status !== 'won') {
logger.info('Dispute not won, user remains blocked', {
// Only unblock if we won or the warning was closed without a full dispute
const shouldUnblock = dispute.status === 'won' || dispute.status === 'warning_closed'
if (!shouldUnblock) {
logger.info('Dispute resolved against us, user remains blocked', {
disputeId: dispute.id,
status: dispute.status,
})
@@ -98,7 +97,7 @@ export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
return
}
// Find and unblock user (Pro plans)
// Find and unblock user (Pro plans) - only if blocked for dispute, not other reasons
const users = await db
.select({ id: user.id })
.from(user)
@@ -109,16 +108,17 @@ export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
await db
.update(userStats)
.set({ billingBlocked: false, billingBlockedReason: null })
.where(eq(userStats.userId, users[0].id))
.where(and(eq(userStats.userId, users[0].id), eq(userStats.billingBlockedReason, 'dispute')))
logger.info('Unblocked user after winning dispute', {
logger.info('Unblocked user after dispute resolved in our favor', {
disputeId: dispute.id,
userId: users[0].id,
status: dispute.status,
})
return
}
// Find and unblock org owner (Team/Enterprise)
// Find and unblock all org members (Team/Enterprise) - consistent with payment success
const subs = await db
.select({ referenceId: subscription.referenceId })
.from(subscription)
@@ -127,24 +127,13 @@ export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
if (subs.length > 0) {
const orgId = subs[0].referenceId
const memberCount = await unblockOrgMembers(orgId, 'dispute')
const owners = await db
.select({ userId: member.userId })
.from(member)
.where(and(eq(member.organizationId, orgId), eq(member.role, 'owner')))
.limit(1)
if (owners.length > 0) {
await db
.update(userStats)
.set({ billingBlocked: false, billingBlockedReason: null })
.where(eq(userStats.userId, owners[0].userId))
logger.info('Unblocked org owner after winning dispute', {
disputeId: dispute.id,
ownerId: owners[0].userId,
organizationId: orgId,
})
}
logger.info('Unblocked all org members after dispute resolved in our favor', {
disputeId: dispute.id,
organizationId: orgId,
memberCount,
status: dispute.status,
})
}
}

View File

@@ -8,12 +8,13 @@ import {
userStats,
} from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, inArray } from 'drizzle-orm'
import { and, eq, inArray, isNull, ne, or } from 'drizzle-orm'
import type Stripe from 'stripe'
import { getEmailSubject, PaymentFailedEmail, renderCreditPurchaseEmail } from '@/components/emails'
import { calculateSubscriptionOverage } from '@/lib/billing/core/billing'
import { addCredits, getCreditBalance, removeCredits } from '@/lib/billing/credits/balance'
import { setUsageLimitForCredits } from '@/lib/billing/credits/purchase'
import { blockOrgMembers, unblockOrgMembers } from '@/lib/billing/organizations/membership'
import { requireStripeClient } from '@/lib/billing/stripe-client'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer'
@@ -502,24 +503,7 @@ export async function handleInvoicePaymentSucceeded(event: Stripe.Event) {
}
if (sub.plan === 'team' || sub.plan === 'enterprise') {
const members = await db
.select({ userId: member.userId })
.from(member)
.where(eq(member.organizationId, sub.referenceId))
const memberIds = members.map((m) => m.userId)
if (memberIds.length > 0) {
// Only unblock users blocked for payment_failed, not disputes
await db
.update(userStats)
.set({ billingBlocked: false, billingBlockedReason: null })
.where(
and(
inArray(userStats.userId, memberIds),
eq(userStats.billingBlockedReason, 'payment_failed')
)
)
}
await unblockOrgMembers(sub.referenceId, 'payment_failed')
} else {
// Only unblock users blocked for payment_failed, not disputes
await db
@@ -616,28 +600,26 @@ export async function handleInvoicePaymentFailed(event: Stripe.Event) {
if (records.length > 0) {
const sub = records[0]
if (sub.plan === 'team' || sub.plan === 'enterprise') {
const members = await db
.select({ userId: member.userId })
.from(member)
.where(eq(member.organizationId, sub.referenceId))
const memberIds = members.map((m) => m.userId)
if (memberIds.length > 0) {
await db
.update(userStats)
.set({ billingBlocked: true, billingBlockedReason: 'payment_failed' })
.where(inArray(userStats.userId, memberIds))
}
const memberCount = await blockOrgMembers(sub.referenceId, 'payment_failed')
logger.info('Blocked team/enterprise members due to payment failure', {
organizationId: sub.referenceId,
memberCount: members.length,
memberCount,
isOverageInvoice,
})
} else {
// Don't overwrite dispute blocks (dispute > payment_failed priority)
await db
.update(userStats)
.set({ billingBlocked: true, billingBlockedReason: 'payment_failed' })
.where(eq(userStats.userId, sub.referenceId))
.where(
and(
eq(userStats.userId, sub.referenceId),
or(
ne(userStats.billingBlockedReason, 'dispute'),
isNull(userStats.billingBlockedReason)
)
)
)
logger.info('Blocked user due to payment failure', {
userId: sub.referenceId,
isOverageInvoice,

View File

@@ -3,6 +3,7 @@ import { member, organization, subscription } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, ne } from 'drizzle-orm'
import { calculateSubscriptionOverage } from '@/lib/billing/core/billing'
import { hasActiveSubscription } from '@/lib/billing/core/subscription'
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
import { restoreUserProSubscription } from '@/lib/billing/organizations/membership'
import { requireStripeClient } from '@/lib/billing/stripe-client'
@@ -52,14 +53,37 @@ async function restoreMemberProSubscriptions(organizationId: string): Promise<nu
/**
* Cleanup organization when team/enterprise subscription is deleted.
* - Checks if other active subscriptions point to this org (skip deletion if so)
* - Restores member Pro subscriptions
* - Deletes the organization
* - Deletes the organization (only if no other active subs)
* - Syncs usage limits for former members (resets to free or Pro tier)
*/
async function cleanupOrganizationSubscription(organizationId: string): Promise<{
restoredProCount: number
membersSynced: number
organizationDeleted: boolean
}> {
// Check if other active subscriptions still point to this org
// Note: The subscription being deleted is already marked as 'canceled' by better-auth
// before this handler runs, so we only find truly active ones
if (await hasActiveSubscription(organizationId)) {
logger.info('Skipping organization deletion - other active subscriptions exist', {
organizationId,
})
// Still sync limits for members since this subscription was deleted
const memberUserIds = await db
.select({ userId: member.userId })
.from(member)
.where(eq(member.organizationId, organizationId))
for (const m of memberUserIds) {
await syncUsageLimitsFromSubscription(m.userId)
}
return { restoredProCount: 0, membersSynced: memberUserIds.length, organizationDeleted: false }
}
// Get member userIds before deletion (needed for limit syncing after org deletion)
const memberUserIds = await db
.select({ userId: member.userId })
@@ -75,7 +99,7 @@ async function cleanupOrganizationSubscription(organizationId: string): Promise<
await syncUsageLimitsFromSubscription(m.userId)
}
return { restoredProCount, membersSynced: memberUserIds.length }
return { restoredProCount, membersSynced: memberUserIds.length, organizationDeleted: true }
}
/**
@@ -172,15 +196,14 @@ export async function handleSubscriptionDeleted(subscription: {
referenceId: subscription.referenceId,
})
const { restoredProCount, membersSynced } = await cleanupOrganizationSubscription(
subscription.referenceId
)
const { restoredProCount, membersSynced, organizationDeleted } =
await cleanupOrganizationSubscription(subscription.referenceId)
logger.info('Successfully processed enterprise subscription cancellation', {
subscriptionId: subscription.id,
stripeSubscriptionId,
restoredProCount,
organizationDeleted: true,
organizationDeleted,
membersSynced,
})
return
@@ -297,7 +320,7 @@ export async function handleSubscriptionDeleted(subscription: {
const cleanup = await cleanupOrganizationSubscription(subscription.referenceId)
restoredProCount = cleanup.restoredProCount
membersSynced = cleanup.membersSynced
organizationDeleted = true
organizationDeleted = cleanup.organizationDeleted
} else if (subscription.plan === 'pro') {
await syncUsageLimitsFromSubscription(subscription.referenceId)
membersSynced = 1

View File

@@ -5,8 +5,8 @@ import { and, eq, isNull } from 'drizzle-orm'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { sanitizeForCopilot } from '@/lib/workflows/sanitization/json-sanitizer'
import { isHiddenFromDisplay } from '@/blocks/types'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { escapeRegExp } from '@/executor/constants'
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
import type { ChatContext } from '@/stores/panel/copilot/types'
export type AgentContextType =

View File

@@ -7,7 +7,7 @@ import {
} from '@/lib/copilot/tools/shared/schemas'
import { registry as blockRegistry, getLatestBlock } from '@/blocks/registry'
import { isHiddenFromDisplay, type SubBlockConfig } from '@/blocks/types'
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { PROVIDER_DEFINITIONS } from '@/providers/models'
import { tools as toolsRegistry } from '@/tools/registry'
import { getTrigger, isTriggerValid } from '@/triggers'

View File

@@ -6,7 +6,7 @@ import {
type GetBlockOptionsResultType,
} from '@/lib/copilot/tools/shared/schemas'
import { registry as blockRegistry, getLatestBlock } from '@/blocks/registry'
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { tools as toolsRegistry } from '@/tools/registry'
export const getBlockOptionsServerTool: BaseServerTool<

View File

@@ -6,7 +6,7 @@ import {
} from '@/lib/copilot/tools/shared/schemas'
import { registry as blockRegistry } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
export const getBlocksAndToolsServerTool: BaseServerTool<
ReturnType<typeof GetBlocksAndToolsInput.parse>,

View File

@@ -8,7 +8,7 @@ import {
} from '@/lib/copilot/tools/shared/schemas'
import { registry as blockRegistry } from '@/blocks/registry'
import { AuthMode, type BlockConfig, isHiddenFromDisplay } from '@/blocks/types'
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { PROVIDER_DEFINITIONS } from '@/providers/models'
import { tools as toolsRegistry } from '@/tools/registry'
import { getTrigger, isTriggerValid } from '@/triggers'

View File

@@ -3,7 +3,7 @@ import { z } from 'zod'
import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool'
import { registry as blockRegistry } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
export const GetTriggerBlocksInput = z.object({})
export const GetTriggerBlocksResult = z.object({

View File

@@ -15,8 +15,8 @@ import { buildCanonicalIndex, isCanonicalPair } from '@/lib/workflows/subblocks/
import { TriggerUtils } from '@/lib/workflows/triggers/triggers'
import { getAllBlocks, getBlock } from '@/blocks/registry'
import type { BlockConfig, SubBlockConfig } from '@/blocks/types'
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
import { EDGE, normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
import { getUserPermissionConfig } from '@/executor/utils/permission-check'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants'

View File

@@ -161,14 +161,14 @@ export const env = createEnv({
// Rate Limiting Configuration
RATE_LIMIT_WINDOW_MS: z.string().optional().default('60000'), // Rate limit window duration in milliseconds (default: 1 minute)
MANUAL_EXECUTION_LIMIT: z.string().optional().default('999999'),// Manual execution bypass value (effectively unlimited)
RATE_LIMIT_FREE_SYNC: z.string().optional().default('10'), // Free tier sync API executions per minute
RATE_LIMIT_FREE_ASYNC: z.string().optional().default('50'), // Free tier async API executions per minute
RATE_LIMIT_PRO_SYNC: z.string().optional().default('25'), // Pro tier sync API executions per minute
RATE_LIMIT_PRO_ASYNC: z.string().optional().default('200'), // Pro tier async API executions per minute
RATE_LIMIT_TEAM_SYNC: z.string().optional().default('75'), // Team tier sync API executions per minute
RATE_LIMIT_TEAM_ASYNC: z.string().optional().default('500'), // Team tier async API executions per minute
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('150'), // Enterprise tier sync API executions per minute
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('1000'), // Enterprise tier async API executions per minute
RATE_LIMIT_FREE_SYNC: z.string().optional().default('50'), // Free tier sync API executions per minute
RATE_LIMIT_FREE_ASYNC: z.string().optional().default('200'), // Free tier async API executions per minute
RATE_LIMIT_PRO_SYNC: z.string().optional().default('150'), // Pro tier sync API executions per minute
RATE_LIMIT_PRO_ASYNC: z.string().optional().default('1000'), // Pro tier async API executions per minute
RATE_LIMIT_TEAM_SYNC: z.string().optional().default('300'), // Team tier sync API executions per minute
RATE_LIMIT_TEAM_ASYNC: z.string().optional().default('2500'), // Team tier async API executions per minute
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('600'), // Enterprise tier sync API executions per minute
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('5000'), // Enterprise tier async API executions per minute
// Knowledge Base Processing Configuration - Shared across all processing methods
KB_CONFIG_MAX_DURATION: z.number().optional().default(600), // Max processing duration in seconds (10 minutes)

View File

@@ -28,24 +28,24 @@ function createBucketConfig(ratePerMinute: number, burstMultiplier = 2): TokenBu
export const RATE_LIMITS: Record<SubscriptionPlan, RateLimitConfig> = {
free: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_SYNC) || 10),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_ASYNC) || 50),
apiEndpoint: createBucketConfig(10),
},
pro: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_SYNC) || 25),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_ASYNC) || 200),
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_SYNC) || 50),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_ASYNC) || 200),
apiEndpoint: createBucketConfig(30),
},
pro: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_SYNC) || 150),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_ASYNC) || 1000),
apiEndpoint: createBucketConfig(100),
},
team: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_SYNC) || 75),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_ASYNC) || 500),
apiEndpoint: createBucketConfig(60),
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_SYNC) || 300),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_ASYNC) || 2500),
apiEndpoint: createBucketConfig(200),
},
enterprise: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_SYNC) || 150),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_ASYNC) || 1000),
apiEndpoint: createBucketConfig(120),
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_SYNC) || 600),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_ASYNC) || 5000),
apiEndpoint: createBucketConfig(500),
},
}

View File

@@ -153,22 +153,50 @@ export function formatCompactTimestamp(iso: string): string {
}
/**
* Format a duration in milliseconds to a human-readable format
* @param durationMs - The duration in milliseconds
* Format a duration to a human-readable format
* @param duration - Duration in milliseconds (number) or as string (e.g., "500ms")
* @param options - Optional formatting options
* @param options.precision - Number of decimal places for seconds (default: 0)
* @returns A formatted duration string
* @param options.precision - Number of decimal places for seconds (default: 0), trailing zeros are stripped
* @returns A formatted duration string, or null if input is null/undefined
*/
export function formatDuration(durationMs: number, options?: { precision?: number }): string {
const precision = options?.precision ?? 0
if (durationMs < 1000) {
return `${durationMs}ms`
export function formatDuration(
duration: number | string | undefined | null,
options?: { precision?: number }
): string | null {
if (duration === undefined || duration === null) {
return null
}
const seconds = durationMs / 1000
// Parse string durations (e.g., "500ms", "0.44ms", "1234")
let ms: number
if (typeof duration === 'string') {
ms = Number.parseFloat(duration.replace(/[^0-9.-]/g, ''))
if (!Number.isFinite(ms)) {
return duration
}
} else {
ms = duration
}
const precision = options?.precision ?? 0
if (ms < 1) {
// Sub-millisecond: show with 2 decimal places
return `${ms.toFixed(2)}ms`
}
if (ms < 1000) {
// Milliseconds: round to integer
return `${Math.round(ms)}ms`
}
const seconds = ms / 1000
if (seconds < 60) {
return precision > 0 ? `${seconds.toFixed(precision)}s` : `${Math.floor(seconds)}s`
if (precision > 0) {
// Strip trailing zeros (e.g., "5.00s" -> "5s", "5.10s" -> "5.1s")
return `${seconds.toFixed(precision).replace(/\.?0+$/, '')}s`
}
return `${Math.floor(seconds)}s`
}
const minutes = Math.floor(seconds / 60)

View File

@@ -33,6 +33,7 @@ import type {
WorkflowExecutionSnapshot,
WorkflowState,
} from '@/lib/logs/types'
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
export interface ToolCall {
name: string
@@ -503,7 +504,7 @@ export class ExecutionLogger implements IExecutionLoggerService {
}
try {
// Get the workflow record to get the userId
// Get the workflow record to get workspace and fallback userId
const [workflowRecord] = await db
.select()
.from(workflow)
@@ -515,7 +516,12 @@ export class ExecutionLogger implements IExecutionLoggerService {
return
}
const userId = workflowRecord.userId
let billingUserId: string | null = null
if (workflowRecord.workspaceId) {
billingUserId = await getWorkspaceBilledAccountUserId(workflowRecord.workspaceId)
}
const userId = billingUserId || workflowRecord.userId
const costToStore = costSummary.totalCost
const existing = await db.select().from(userStats).where(eq(userStats.userId, userId))

View File

@@ -199,10 +199,11 @@ export class McpClient {
protocolVersion: this.getNegotiatedVersion(),
})
const sdkResult = await this.client.callTool({
name: toolCall.name,
arguments: toolCall.arguments,
})
const sdkResult = await this.client.callTool(
{ name: toolCall.name, arguments: toolCall.arguments },
undefined,
{ timeout: 600000 } // 10 minutes - override SDK's 60s default
)
return sdkResult as McpToolResult
} catch (error) {

View File

@@ -39,16 +39,23 @@ export function cleanupPendingSubblocksForSocket(socketId: string): void {
export function setupSubblocksHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket.on('subblock-update', async (data) => {
const { blockId, subblockId, value, timestamp, operationId } = data
const {
workflowId: payloadWorkflowId,
blockId,
subblockId,
value,
timestamp,
operationId,
} = data
try {
const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const sessionWorkflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const session = await roomManager.getUserSession(socket.id)
if (!workflowId || !session) {
if (!sessionWorkflowId || !session) {
logger.debug(`Ignoring subblock update: socket not connected to any workflow room`, {
socketId: socket.id,
hasWorkflowId: !!workflowId,
hasWorkflowId: !!sessionWorkflowId,
hasSession: !!session,
})
socket.emit('operation-forbidden', {
@@ -61,6 +68,24 @@ export function setupSubblocksHandlers(socket: AuthenticatedSocket, roomManager:
return
}
const workflowId = payloadWorkflowId || sessionWorkflowId
if (payloadWorkflowId && payloadWorkflowId !== sessionWorkflowId) {
logger.warn('Workflow ID mismatch in subblock update', {
payloadWorkflowId,
sessionWorkflowId,
socketId: socket.id,
})
if (operationId) {
socket.emit('operation-failed', {
operationId,
error: 'Workflow ID mismatch',
retryable: true,
})
}
return
}
const hasRoom = await roomManager.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`Ignoring subblock update: workflow room not found`, {
@@ -182,20 +207,17 @@ async function flushSubblockUpdate(
if (updateSuccessful) {
// Broadcast to room excluding all senders (works cross-pod via Redis adapter)
const senderSocketIds = [...pending.opToSocket.values()]
const broadcastPayload = {
workflowId,
blockId,
subblockId,
value,
timestamp,
}
if (senderSocketIds.length > 0) {
io.to(workflowId).except(senderSocketIds).emit('subblock-update', {
blockId,
subblockId,
value,
timestamp,
})
io.to(workflowId).except(senderSocketIds).emit('subblock-update', broadcastPayload)
} else {
io.to(workflowId).emit('subblock-update', {
blockId,
subblockId,
value,
timestamp,
})
io.to(workflowId).emit('subblock-update', broadcastPayload)
}
// Confirm all coalesced operationIds (io.to(socketId) works cross-pod)

View File

@@ -35,16 +35,16 @@ export function cleanupPendingVariablesForSocket(socketId: string): void {
export function setupVariablesHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket.on('variable-update', async (data) => {
const { variableId, field, value, timestamp, operationId } = data
const { workflowId: payloadWorkflowId, variableId, field, value, timestamp, operationId } = data
try {
const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const sessionWorkflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const session = await roomManager.getUserSession(socket.id)
if (!workflowId || !session) {
if (!sessionWorkflowId || !session) {
logger.debug(`Ignoring variable update: socket not connected to any workflow room`, {
socketId: socket.id,
hasWorkflowId: !!workflowId,
hasWorkflowId: !!sessionWorkflowId,
hasSession: !!session,
})
socket.emit('operation-forbidden', {
@@ -57,6 +57,24 @@ export function setupVariablesHandlers(socket: AuthenticatedSocket, roomManager:
return
}
const workflowId = payloadWorkflowId || sessionWorkflowId
if (payloadWorkflowId && payloadWorkflowId !== sessionWorkflowId) {
logger.warn('Workflow ID mismatch in variable update', {
payloadWorkflowId,
sessionWorkflowId,
socketId: socket.id,
})
if (operationId) {
socket.emit('operation-failed', {
operationId,
error: 'Workflow ID mismatch',
retryable: true,
})
}
return
}
const hasRoom = await roomManager.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`Ignoring variable update: workflow room not found`, {
@@ -179,20 +197,17 @@ async function flushVariableUpdate(
if (updateSuccessful) {
// Broadcast to room excluding all senders (works cross-pod via Redis adapter)
const senderSocketIds = [...pending.opToSocket.values()]
const broadcastPayload = {
workflowId,
variableId,
field,
value,
timestamp,
}
if (senderSocketIds.length > 0) {
io.to(workflowId).except(senderSocketIds).emit('variable-update', {
variableId,
field,
value,
timestamp,
})
io.to(workflowId).except(senderSocketIds).emit('variable-update', broadcastPayload)
} else {
io.to(workflowId).emit('variable-update', {
variableId,
field,
value,
timestamp,
})
io.to(workflowId).emit('variable-update', broadcastPayload)
}
// Confirm all coalesced operationIds (io.to(socketId) works cross-pod)

View File

@@ -24,16 +24,40 @@ let emitWorkflowOperation:
| ((operation: string, target: string, payload: any, operationId?: string) => void)
| null = null
let emitSubblockUpdate:
| ((blockId: string, subblockId: string, value: any, operationId?: string) => void)
| ((
blockId: string,
subblockId: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void)
| null = null
let emitVariableUpdate:
| ((variableId: string, field: string, value: any, operationId?: string) => void)
| ((
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void)
| null = null
export function registerEmitFunctions(
workflowEmit: (operation: string, target: string, payload: any, operationId?: string) => void,
subblockEmit: (blockId: string, subblockId: string, value: any, operationId?: string) => void,
variableEmit: (variableId: string, field: string, value: any, operationId?: string) => void,
subblockEmit: (
blockId: string,
subblockId: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void,
variableEmit: (
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void,
workflowId: string | null
) {
emitWorkflowOperation = workflowEmit
@@ -196,14 +220,16 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
}
if (!retryable) {
logger.debug('Operation marked as non-retryable, removing from queue', { operationId })
logger.error(
'Operation failed with non-retryable error - state out of sync, triggering offline mode',
{
operationId,
operation: operation.operation.operation,
target: operation.operation.target,
}
)
set((state) => ({
operations: state.operations.filter((op) => op.id !== operationId),
isProcessing: false,
}))
get().processNextOperation()
get().triggerOfflineMode()
return
}
@@ -305,11 +331,23 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
const { operation: op, target, payload } = nextOperation.operation
if (op === 'subblock-update' && target === 'subblock') {
if (emitSubblockUpdate) {
emitSubblockUpdate(payload.blockId, payload.subblockId, payload.value, nextOperation.id)
emitSubblockUpdate(
payload.blockId,
payload.subblockId,
payload.value,
nextOperation.id,
nextOperation.workflowId
)
}
} else if (op === 'variable-update' && target === 'variable') {
if (emitVariableUpdate) {
emitVariableUpdate(payload.variableId, payload.field, payload.value, nextOperation.id)
emitVariableUpdate(
payload.variableId,
payload.field,
payload.value,
nextOperation.id,
nextOperation.workflowId
)
}
} else {
if (emitWorkflowOperation) {

View File

@@ -26,6 +26,49 @@ import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
/**
* Helper function to add a single block using batchAddBlocks.
* Provides a simpler interface for tests.
*/
function addBlock(
id: string,
type: string,
name: string,
position: { x: number; y: number },
data?: Record<string, unknown>,
parentId?: string,
extent?: 'parent',
blockProperties?: {
enabled?: boolean
horizontalHandles?: boolean
advancedMode?: boolean
triggerMode?: boolean
height?: number
}
) {
const blockData = {
...data,
...(parentId && { parentId, extent: extent || 'parent' }),
}
useWorkflowStore.getState().batchAddBlocks([
{
id,
type,
name,
position,
subBlocks: {},
outputs: {},
enabled: blockProperties?.enabled ?? true,
horizontalHandles: blockProperties?.horizontalHandles ?? true,
advancedMode: blockProperties?.advancedMode ?? false,
triggerMode: blockProperties?.triggerMode ?? false,
height: blockProperties?.height ?? 0,
data: blockData,
},
])
}
describe('workflow store', () => {
beforeEach(() => {
const localStorageMock = createMockStorage()
@@ -39,10 +82,8 @@ describe('workflow store', () => {
})
})
describe('addBlock', () => {
describe('batchAddBlocks (via addBlock helper)', () => {
it('should add a block with correct default properties', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('agent-1', 'agent', 'My Agent', { x: 100, y: 200 })
const { blocks } = useWorkflowStore.getState()
@@ -53,8 +94,6 @@ describe('workflow store', () => {
})
it('should add a block with parent relationship for containers', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('loop-1', 'loop', 'My Loop', { x: 0, y: 0 }, { loopType: 'for', count: 3 })
addBlock(
'child-1',
@@ -73,8 +112,6 @@ describe('workflow store', () => {
})
it('should add multiple blocks correctly', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'agent', 'Agent', { x: 200, y: 0 })
addBlock('block-3', 'function', 'Function', { x: 400, y: 0 })
@@ -87,8 +124,6 @@ describe('workflow store', () => {
})
it('should create a block with default properties when no blockProperties provided', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('agent1', 'agent', 'Test Agent', { x: 100, y: 200 })
const state = useWorkflowStore.getState()
@@ -105,8 +140,6 @@ describe('workflow store', () => {
})
it('should create a block with custom blockProperties for regular blocks', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'agent1',
'agent',
@@ -134,8 +167,6 @@ describe('workflow store', () => {
})
it('should create a loop block with custom blockProperties', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'loop1',
'loop',
@@ -163,8 +194,6 @@ describe('workflow store', () => {
})
it('should create a parallel block with custom blockProperties', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'parallel1',
'parallel',
@@ -192,8 +221,6 @@ describe('workflow store', () => {
})
it('should handle partial blockProperties (only some properties provided)', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'agent1',
'agent',
@@ -216,8 +243,6 @@ describe('workflow store', () => {
})
it('should handle blockProperties with parent relationships', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('loop1', 'loop', 'Parent Loop', { x: 0, y: 0 })
addBlock(
@@ -249,7 +274,7 @@ describe('workflow store', () => {
describe('batchRemoveBlocks', () => {
it('should remove a block', () => {
const { addBlock, batchRemoveBlocks } = useWorkflowStore.getState()
const { batchRemoveBlocks } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
batchRemoveBlocks(['block-1'])
@@ -259,7 +284,7 @@ describe('workflow store', () => {
})
it('should remove connected edges when block is removed', () => {
const { addBlock, batchAddEdges, batchRemoveBlocks } = useWorkflowStore.getState()
const { batchAddEdges, batchRemoveBlocks } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'Middle', { x: 200, y: 0 })
@@ -286,7 +311,7 @@ describe('workflow store', () => {
describe('batchAddEdges', () => {
it('should add an edge between two blocks', () => {
const { addBlock, batchAddEdges } = useWorkflowStore.getState()
const { batchAddEdges } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -298,7 +323,7 @@ describe('workflow store', () => {
})
it('should not add duplicate connections', () => {
const { addBlock, batchAddEdges } = useWorkflowStore.getState()
const { batchAddEdges } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -313,7 +338,7 @@ describe('workflow store', () => {
describe('batchRemoveEdges', () => {
it('should remove an edge by id', () => {
const { addBlock, batchAddEdges, batchRemoveEdges } = useWorkflowStore.getState()
const { batchAddEdges, batchRemoveEdges } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -335,7 +360,7 @@ describe('workflow store', () => {
describe('clear', () => {
it('should clear all blocks and edges', () => {
const { addBlock, batchAddEdges, clear } = useWorkflowStore.getState()
const { batchAddEdges, clear } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -351,7 +376,7 @@ describe('workflow store', () => {
describe('batchToggleEnabled', () => {
it('should toggle block enabled state', () => {
const { addBlock, batchToggleEnabled } = useWorkflowStore.getState()
const { batchToggleEnabled } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
@@ -367,7 +392,7 @@ describe('workflow store', () => {
describe('duplicateBlock', () => {
it('should duplicate a block', () => {
const { addBlock, duplicateBlock } = useWorkflowStore.getState()
const { duplicateBlock } = useWorkflowStore.getState()
addBlock('original', 'agent', 'Original Agent', { x: 0, y: 0 })
@@ -391,7 +416,7 @@ describe('workflow store', () => {
describe('batchUpdatePositions', () => {
it('should update block position', () => {
const { addBlock, batchUpdatePositions } = useWorkflowStore.getState()
const { batchUpdatePositions } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
@@ -404,7 +429,7 @@ describe('workflow store', () => {
describe('loop management', () => {
it('should regenerate loops when updateLoopCount is called', () => {
const { addBlock, updateLoopCount } = useWorkflowStore.getState()
const { updateLoopCount } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -428,7 +453,7 @@ describe('workflow store', () => {
})
it('should regenerate loops when updateLoopType is called', () => {
const { addBlock, updateLoopType } = useWorkflowStore.getState()
const { updateLoopType } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -453,7 +478,7 @@ describe('workflow store', () => {
})
it('should regenerate loops when updateLoopCollection is called', () => {
const { addBlock, updateLoopCollection } = useWorkflowStore.getState()
const { updateLoopCollection } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -476,7 +501,7 @@ describe('workflow store', () => {
})
it('should clamp loop count between 1 and 1000', () => {
const { addBlock, updateLoopCount } = useWorkflowStore.getState()
const { updateLoopCount } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -502,7 +527,7 @@ describe('workflow store', () => {
describe('parallel management', () => {
it('should regenerate parallels when updateParallelCount is called', () => {
const { addBlock, updateParallelCount } = useWorkflowStore.getState()
const { updateParallelCount } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -525,7 +550,7 @@ describe('workflow store', () => {
})
it('should regenerate parallels when updateParallelCollection is called', () => {
const { addBlock, updateParallelCollection } = useWorkflowStore.getState()
const { updateParallelCollection } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -552,7 +577,7 @@ describe('workflow store', () => {
})
it('should clamp parallel count between 1 and 20', () => {
const { addBlock, updateParallelCount } = useWorkflowStore.getState()
const { updateParallelCount } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -575,7 +600,7 @@ describe('workflow store', () => {
})
it('should regenerate parallels when updateParallelType is called', () => {
const { addBlock, updateParallelType } = useWorkflowStore.getState()
const { updateParallelType } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -601,7 +626,7 @@ describe('workflow store', () => {
describe('mode switching', () => {
it('should toggle advanced mode on a block', () => {
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
addBlock('agent1', 'agent', 'Test Agent', { x: 0, y: 0 })
@@ -618,7 +643,7 @@ describe('workflow store', () => {
})
it('should preserve systemPrompt and userPrompt when switching modes', () => {
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { setState: setSubBlockState } = useSubBlockStore
useWorkflowRegistry.setState({ activeWorkflowId: 'test-workflow' })
addBlock('agent1', 'agent', 'Test Agent', { x: 0, y: 0 })
@@ -651,7 +676,7 @@ describe('workflow store', () => {
})
it('should preserve memories when switching from advanced to basic mode', () => {
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { setState: setSubBlockState } = useSubBlockStore
useWorkflowRegistry.setState({ activeWorkflowId: 'test-workflow' })
@@ -691,7 +716,7 @@ describe('workflow store', () => {
})
it('should handle mode switching when no subblock values exist', () => {
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
useWorkflowRegistry.setState({ activeWorkflowId: 'test-workflow' })
@@ -753,7 +778,7 @@ describe('workflow store', () => {
describe('replaceWorkflowState', () => {
it('should replace entire workflow state', () => {
const { addBlock, replaceWorkflowState } = useWorkflowStore.getState()
const { replaceWorkflowState } = useWorkflowStore.getState()
addBlock('old-1', 'function', 'Old', { x: 0, y: 0 })
@@ -769,7 +794,7 @@ describe('workflow store', () => {
describe('getWorkflowState', () => {
it('should return current workflow state', () => {
const { addBlock, getWorkflowState } = useWorkflowStore.getState()
const { getWorkflowState } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -782,6 +807,343 @@ describe('workflow store', () => {
})
})
describe('loop/parallel regeneration optimization', () => {
it('should NOT regenerate loops when adding a regular block without parentId', () => {
// Add a loop first
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
const stateAfterLoop = useWorkflowStore.getState()
const loopsAfterLoop = stateAfterLoop.loops
// Add a regular block (no parentId)
addBlock('agent-1', 'agent', 'Agent 1', { x: 200, y: 0 })
const stateAfterAgent = useWorkflowStore.getState()
// Loops should be unchanged (same content)
expect(Object.keys(stateAfterAgent.loops)).toEqual(Object.keys(loopsAfterLoop))
expect(stateAfterAgent.loops['loop-1'].nodes).toEqual(loopsAfterLoop['loop-1'].nodes)
})
it('should regenerate loops when adding a child to a loop', () => {
// Add a loop
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
const stateAfterLoop = useWorkflowStore.getState()
expect(stateAfterLoop.loops['loop-1'].nodes).toEqual([])
// Add a child block to the loop
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
const stateAfterChild = useWorkflowStore.getState()
// Loop should now include the child
expect(stateAfterChild.loops['loop-1'].nodes).toContain('child-1')
})
it('should NOT regenerate parallels when adding a child to a loop', () => {
// Add both a loop and a parallel
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
addBlock('parallel-1', 'parallel', 'Parallel 1', { x: 300, y: 0 }, { count: 3 })
const stateAfterContainers = useWorkflowStore.getState()
const parallelsAfterContainers = stateAfterContainers.parallels
// Add a child to the loop (not the parallel)
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
const stateAfterChild = useWorkflowStore.getState()
// Parallels should be unchanged
expect(stateAfterChild.parallels['parallel-1'].nodes).toEqual(
parallelsAfterContainers['parallel-1'].nodes
)
})
it('should regenerate parallels when adding a child to a parallel', () => {
// Add a parallel
addBlock('parallel-1', 'parallel', 'Parallel 1', { x: 0, y: 0 }, { count: 3 })
const stateAfterParallel = useWorkflowStore.getState()
expect(stateAfterParallel.parallels['parallel-1'].nodes).toEqual([])
// Add a child block to the parallel
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'parallel-1' },
'parallel-1',
'parent'
)
const stateAfterChild = useWorkflowStore.getState()
// Parallel should now include the child
expect(stateAfterChild.parallels['parallel-1'].nodes).toContain('child-1')
})
it('should handle adding blocks in any order and produce correct final state', () => {
// Add child BEFORE the loop (simulating undo-redo edge case)
// Note: The child's parentId points to a loop that doesn't exist yet
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
// At this point, the child exists but loop doesn't
const stateAfterChild = useWorkflowStore.getState()
expect(stateAfterChild.blocks['child-1']).toBeDefined()
expect(stateAfterChild.loops['loop-1']).toBeUndefined()
// Now add the loop
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
// Final state should be correct - loop should include the child
const finalState = useWorkflowStore.getState()
expect(finalState.loops['loop-1']).toBeDefined()
expect(finalState.loops['loop-1'].nodes).toContain('child-1')
})
})
describe('batchAddBlocks optimization', () => {
it('should NOT regenerate loops/parallels when adding regular blocks', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
// Set up initial state with a loop
useWorkflowStore.setState({
blocks: {
'loop-1': {
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
horizontalHandles: true,
advancedMode: false,
triggerMode: false,
height: 0,
data: { loopType: 'for', count: 5 },
},
},
edges: [],
loops: {
'loop-1': {
id: 'loop-1',
nodes: [],
iterations: 5,
loopType: 'for',
enabled: true,
},
},
parallels: {},
})
const stateBefore = useWorkflowStore.getState()
// Add regular blocks (no parentId, not loop/parallel type)
batchAddBlocks([
{
id: 'agent-1',
type: 'agent',
name: 'Agent 1',
position: { x: 200, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
},
{
id: 'function-1',
type: 'function',
name: 'Function 1',
position: { x: 400, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
},
])
const stateAfter = useWorkflowStore.getState()
// Loops should be unchanged
expect(stateAfter.loops['loop-1'].nodes).toEqual(stateBefore.loops['loop-1'].nodes)
})
it('should regenerate loops when batch adding a loop block', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
batchAddBlocks([
{
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
data: { loopType: 'for', count: 5 },
},
])
const state = useWorkflowStore.getState()
expect(state.loops['loop-1']).toBeDefined()
expect(state.loops['loop-1'].iterations).toBe(5)
})
it('should regenerate loops when batch adding a child of a loop', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
// First add a loop
batchAddBlocks([
{
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
data: { loopType: 'for', count: 5 },
},
])
// Then add a child
batchAddBlocks([
{
id: 'child-1',
type: 'function',
name: 'Child 1',
position: { x: 50, y: 50 },
subBlocks: {},
outputs: {},
enabled: true,
data: { parentId: 'loop-1' },
},
])
const state = useWorkflowStore.getState()
expect(state.loops['loop-1'].nodes).toContain('child-1')
})
it('should correctly handle batch adding loop and its children together', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
// Add loop and child in same batch
batchAddBlocks([
{
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
data: { loopType: 'for', count: 5 },
},
{
id: 'child-1',
type: 'function',
name: 'Child 1',
position: { x: 50, y: 50 },
subBlocks: {},
outputs: {},
enabled: true,
data: { parentId: 'loop-1' },
},
])
const state = useWorkflowStore.getState()
expect(state.loops['loop-1']).toBeDefined()
expect(state.loops['loop-1'].nodes).toContain('child-1')
})
})
describe('edge operations should not affect loops/parallels', () => {
it('should preserve loops when adding edges', () => {
const { batchAddEdges } = useWorkflowStore.getState()
// Create a loop with a child
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
addBlock('external-1', 'function', 'External', { x: 300, y: 0 })
const stateBeforeEdge = useWorkflowStore.getState()
const loopsBeforeEdge = stateBeforeEdge.loops
// Add an edge (should not affect loops)
batchAddEdges([{ id: 'e1', source: 'loop-1', target: 'external-1' }])
const stateAfterEdge = useWorkflowStore.getState()
// Loops should be unchanged
expect(stateAfterEdge.loops['loop-1'].nodes).toEqual(loopsBeforeEdge['loop-1'].nodes)
expect(stateAfterEdge.loops['loop-1'].iterations).toEqual(
loopsBeforeEdge['loop-1'].iterations
)
})
it('should preserve loops when removing edges', () => {
const { batchAddEdges, batchRemoveEdges } = useWorkflowStore.getState()
// Create a loop with a child and an edge
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
addBlock('external-1', 'function', 'External', { x: 300, y: 0 })
batchAddEdges([{ id: 'e1', source: 'loop-1', target: 'external-1' }])
const stateBeforeRemove = useWorkflowStore.getState()
const loopsBeforeRemove = stateBeforeRemove.loops
// Remove the edge
batchRemoveEdges(['e1'])
const stateAfterRemove = useWorkflowStore.getState()
// Loops should be unchanged
expect(stateAfterRemove.loops['loop-1'].nodes).toEqual(loopsBeforeRemove['loop-1'].nodes)
})
})
describe('updateBlockName', () => {
beforeEach(() => {
useWorkflowStore.setState({
@@ -791,8 +1153,6 @@ describe('workflow store', () => {
parallels: {},
})
const { addBlock } = useWorkflowStore.getState()
addBlock('block1', 'agent', 'Column AD', { x: 0, y: 0 })
addBlock('block2', 'function', 'Employee Length', { x: 100, y: 0 })
addBlock('block3', 'starter', 'Start', { x: 200, y: 0 })

View File

@@ -3,8 +3,6 @@ import type { Edge } from 'reactflow'
import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import { DEFAULT_DUPLICATE_OFFSET } from '@/lib/workflows/autolayout/constants'
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { getBlock } from '@/blocks'
import type { SubBlockConfig } from '@/blocks/types'
import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -114,135 +112,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
set({ needsRedeployment })
},
addBlock: (
id: string,
type: string,
name: string,
position: Position,
data?: Record<string, any>,
parentId?: string,
extent?: 'parent',
blockProperties?: {
enabled?: boolean
horizontalHandles?: boolean
advancedMode?: boolean
triggerMode?: boolean
height?: number
}
) => {
const blockConfig = getBlock(type)
// For custom nodes like loop and parallel that don't use BlockConfig
if (!blockConfig && (type === 'loop' || type === 'parallel')) {
// Merge parentId and extent into data if provided
const nodeData = {
...data,
...(parentId && { parentId, extent: extent || 'parent' }),
}
const newState = {
blocks: {
...get().blocks,
[id]: {
id,
type,
name,
position,
subBlocks: {},
outputs: {},
enabled: blockProperties?.enabled ?? true,
horizontalHandles: blockProperties?.horizontalHandles ?? true,
advancedMode: blockProperties?.advancedMode ?? false,
triggerMode: blockProperties?.triggerMode ?? false,
height: blockProperties?.height ?? 0,
data: nodeData,
},
},
edges: [...get().edges],
loops: get().generateLoopBlocks(),
parallels: get().generateParallelBlocks(),
}
set(newState)
get().updateLastSaved()
return
}
if (!blockConfig) return
// Merge parentId and extent into data for regular blocks
const nodeData = {
...data,
...(parentId && { parentId, extent: extent || 'parent' }),
}
const subBlocks: Record<string, SubBlockState> = {}
const subBlockStore = useSubBlockStore.getState()
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
blockConfig.subBlocks.forEach((subBlock) => {
const subBlockId = subBlock.id
const initialValue = resolveInitialSubblockValue(subBlock)
const normalizedValue =
initialValue !== undefined && initialValue !== null ? initialValue : null
subBlocks[subBlockId] = {
id: subBlockId,
type: subBlock.type,
value: normalizedValue as SubBlockState['value'],
}
if (activeWorkflowId) {
try {
const valueToStore =
initialValue !== undefined ? cloneInitialSubblockValue(initialValue) : null
subBlockStore.setValue(id, subBlockId, valueToStore)
} catch (error) {
logger.warn('Failed to seed sub-block store value during block creation', {
blockId: id,
subBlockId,
error: error instanceof Error ? error.message : String(error),
})
}
} else {
logger.warn('Cannot seed sub-block store value: activeWorkflowId not available', {
blockId: id,
subBlockId,
})
}
})
// Get outputs based on trigger mode
const triggerMode = blockProperties?.triggerMode ?? false
const outputs = getBlockOutputs(type, subBlocks, triggerMode)
const newState = {
blocks: {
...get().blocks,
[id]: {
id,
type,
name,
position,
subBlocks,
outputs,
enabled: blockProperties?.enabled ?? true,
horizontalHandles: blockProperties?.horizontalHandles ?? true,
advancedMode: blockProperties?.advancedMode ?? false,
triggerMode: triggerMode,
height: blockProperties?.height ?? 0,
layout: {},
data: nodeData,
},
},
edges: [...get().edges],
loops: get().generateLoopBlocks(),
parallels: get().generateParallelBlocks(),
}
set(newState)
get().updateLastSaved()
},
updateNodeDimensions: (id: string, dimensions: { width: number; height: number }) => {
set((state) => {
const block = state.blocks[id]
@@ -386,11 +255,27 @@ export const useWorkflowStore = create<WorkflowStore>()(
}
}
// Only regenerate loops/parallels if we're adding blocks that affect them:
// - Adding a loop/parallel container block
// - Adding a block as a child of a loop/parallel (has parentId pointing to one)
const needsLoopRegeneration = blocks.some(
(block) =>
block.type === 'loop' ||
(block.data?.parentId && newBlocks[block.data.parentId]?.type === 'loop')
)
const needsParallelRegeneration = blocks.some(
(block) =>
block.type === 'parallel' ||
(block.data?.parentId && newBlocks[block.data.parentId]?.type === 'parallel')
)
set({
blocks: newBlocks,
edges: newEdges,
loops: generateLoopBlocks(newBlocks),
parallels: generateParallelBlocks(newBlocks),
loops: needsLoopRegeneration ? generateLoopBlocks(newBlocks) : { ...get().loops },
parallels: needsParallelRegeneration
? generateParallelBlocks(newBlocks)
: { ...get().parallels },
})
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
@@ -529,8 +414,9 @@ export const useWorkflowStore = create<WorkflowStore>()(
set({
blocks: { ...blocks },
edges: newEdges,
loops: generateLoopBlocks(blocks),
parallels: generateParallelBlocks(blocks),
// Edges don't affect loop/parallel structure (determined by parentId), skip regeneration
loops: { ...get().loops },
parallels: { ...get().parallels },
})
get().updateLastSaved()
@@ -544,8 +430,9 @@ export const useWorkflowStore = create<WorkflowStore>()(
set({
blocks: { ...blocks },
edges: newEdges,
loops: generateLoopBlocks(blocks),
parallels: generateParallelBlocks(blocks),
// Edges don't affect loop/parallel structure (determined by parentId), skip regeneration
loops: { ...get().loops },
parallels: { ...get().parallels },
})
get().updateLastSaved()

View File

@@ -175,22 +175,6 @@ export interface WorkflowState {
}
export interface WorkflowActions {
addBlock: (
id: string,
type: string,
name: string,
position: Position,
data?: Record<string, any>,
parentId?: string,
extent?: 'parent',
blockProperties?: {
enabled?: boolean
horizontalHandles?: boolean
advancedMode?: boolean
triggerMode?: boolean
height?: number
}
) => void
updateNodeDimensions: (id: string, dimensions: { width: number; height: number }) => void
batchUpdateBlocksWithParent: (
updates: Array<{

View File

@@ -0,0 +1,55 @@
import type { EnrichCheckCreditsParams, EnrichCheckCreditsResponse } from '@/tools/enrich/types'
import type { ToolConfig } from '@/tools/types'
export const checkCreditsTool: ToolConfig<EnrichCheckCreditsParams, EnrichCheckCreditsResponse> = {
id: 'enrich_check_credits',
name: 'Enrich Check Credits',
description: 'Check your Enrich API credit usage and remaining balance.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Enrich API key',
},
},
request: {
url: 'https://api.enrich.so/v1/api/auth',
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
return {
success: true,
output: {
totalCredits: data.total_credits ?? 0,
creditsUsed: data.credits_used ?? 0,
creditsRemaining: data.credits_remaining ?? 0,
},
}
},
outputs: {
totalCredits: {
type: 'number',
description: 'Total credits allocated to the account',
},
creditsUsed: {
type: 'number',
description: 'Credits consumed so far',
},
creditsRemaining: {
type: 'number',
description: 'Available credits remaining',
},
},
}

View File

@@ -0,0 +1,143 @@
import type { EnrichCompanyFundingParams, EnrichCompanyFundingResponse } from '@/tools/enrich/types'
import type { ToolConfig } from '@/tools/types'
export const companyFundingTool: ToolConfig<
EnrichCompanyFundingParams,
EnrichCompanyFundingResponse
> = {
id: 'enrich_company_funding',
name: 'Enrich Company Funding',
description:
'Retrieve company funding history, traffic metrics, and executive information by domain.',
version: '1.0.0',
params: {
apiKey: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'Enrich API key',
},
domain: {
type: 'string',
required: true,
visibility: 'user-or-llm',
description: 'Company domain (e.g., example.com)',
},
},
request: {
url: (params) => {
const url = new URL('https://api.enrich.so/v1/api/company-funding-plus')
url.searchParams.append('domain', params.domain.trim())
return url.toString()
},
method: 'GET',
headers: (params) => ({
Authorization: `Bearer ${params.apiKey}`,
'Content-Type': 'application/json',
}),
},
transformResponse: async (response: Response) => {
const data = await response.json()
const resultData = data.data ?? data
const fundingRounds =
(resultData.fundingRounds ?? resultData.funding_rounds)?.map((round: any) => ({
roundType: round.roundType ?? round.round_type ?? '',
amount: round.amount ?? null,
date: round.date ?? null,
investors: round.investors ?? [],
})) ?? []
const executives = (resultData.executives ?? []).map((exec: any) => ({
name: exec.name ?? exec.fullName ?? '',
title: exec.title ?? '',
}))
return {
success: true,
output: {
legalName: resultData.legalName ?? resultData.legal_name ?? null,
employeeCount: resultData.employeeCount ?? resultData.employee_count ?? null,
headquarters: resultData.headquarters ?? null,
industry: resultData.industry ?? null,
totalFundingRaised:
resultData.totalFundingRaised ?? resultData.total_funding_raised ?? null,
fundingRounds,
monthlyVisits: resultData.monthlyVisits ?? resultData.monthly_visits ?? null,
trafficChange: resultData.trafficChange ?? resultData.traffic_change ?? null,
itSpending: resultData.itSpending ?? resultData.it_spending ?? null,
executives,
},
}
},
outputs: {
legalName: {
type: 'string',
description: 'Legal company name',
optional: true,
},
employeeCount: {
type: 'number',
description: 'Number of employees',
optional: true,
},
headquarters: {
type: 'string',
description: 'Headquarters location',
optional: true,
},
industry: {
type: 'string',
description: 'Industry',
optional: true,
},
totalFundingRaised: {
type: 'number',
description: 'Total funding raised',
optional: true,
},
fundingRounds: {
type: 'array',
description: 'Funding rounds',
items: {
type: 'object',
properties: {
roundType: { type: 'string', description: 'Round type' },
amount: { type: 'number', description: 'Amount raised' },
date: { type: 'string', description: 'Date' },
investors: { type: 'array', description: 'Investors' },
},
},
},
monthlyVisits: {
type: 'number',
description: 'Monthly website visits',
optional: true,
},
trafficChange: {
type: 'number',
description: 'Traffic change percentage',
optional: true,
},
itSpending: {
type: 'number',
description: 'Estimated IT spending in USD',
optional: true,
},
executives: {
type: 'array',
description: 'Executive team',
items: {
type: 'object',
properties: {
name: { type: 'string', description: 'Name' },
title: { type: 'string', description: 'Title' },
},
},
},
},
}

Some files were not shown because too many files have changed in this diff Show More