Compare commits

..

45 Commits

Author SHA1 Message Date
Waleed
8528fbe2d2 v0.5.78: billing fixes, mcp timeout increase, reactquery migrations, updated tool param visibilities, DSPy and Google Maps integrations 2026-01-31 13:48:22 -08:00
Waleed
31fdd2be13 v0.5.77: room manager redis migration, tool outputs, ui fixes 2026-01-30 14:57:17 -08:00
Waleed
028bc652c2 v0.5.76: posthog improvements, readme updates 2026-01-29 00:13:19 -08:00
Waleed
c6bf5cd58c v0.5.75: search modal overhaul, helm chart updates, run from block, terminal and visual debugging improvements 2026-01-28 22:54:13 -08:00
Vikhyath Mondreti
11dc18a80d v0.5.74: autolayout improvements, clerk integration, auth enforcements 2026-01-27 20:37:39 -08:00
Waleed
ab4e9dc72f v0.5.73: ci, helm updates, kb, ui fixes, note block enhancements 2026-01-26 22:04:35 -08:00
Vikhyath Mondreti
1c58c35bd8 v0.5.72: azure connection string, supabase improvement, multitrigger resolution, docs quick reference 2026-01-25 23:42:27 -08:00
Waleed
d63a5cb504 v0.5.71: ux, ci improvements, docs updates 2026-01-25 03:08:08 -08:00
Waleed
8bd5d41723 v0.5.70: router fix, anthropic agent response format adherence 2026-01-24 20:57:02 -08:00
Waleed
c12931bc50 v0.5.69: kb upgrades, blog, copilot improvements, auth consolidation (#2973)
* fix(subflows): tag dropdown + resolution logic (#2949)

* fix(subflows): tag dropdown + resolution logic

* fixes;

* revert parallel change

* chore(deps): bump posthog-js to 1.334.1 (#2948)

* fix(idempotency): add conflict target to atomicallyClaimDb query + remove redundant db namespace tracking (#2950)

* fix(idempotency): add conflict target to atomicallyClaimDb query

* delete needs to account for namespace

* simplify namespace filtering logic

* fix cleanup

* consistent target

* improvement(kb): add document filtering, select all, and React Query migration (#2951)

* improvement(kb): add document filtering, select all, and React Query migration

* test(kb): update tests for enabledFilter and removed userId params

* fix(kb): remove non-null assertion, add explicit guard

* improvement(logs): trace span, details (#2952)

* improvement(action-bar): ordering

* improvement(logs): details, trace span

* feat(blog): v0.5 release post (#2953)

* feat(blog): v0.5 post

* improvement(blog): simplify title and remove code block header

- Simplified blog title from Introducing Sim Studio v0.5 to Introducing Sim v0.5
- Removed language label header and copy button from code blocks for cleaner appearance

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* ack PR comments

* small styling improvements

* created system to create post-specific components

* updated componnet

* cache invalidation

---------

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>

* feat(admin): add credits endpoint to issue credits to users (#2954)

* feat(admin): add credits endpoint to issue credits to users

* fix(admin): use existing credit functions and handle enterprise seats

* fix(admin): reject NaN and Infinity in amount validation

* styling

* fix(admin): validate userId and email are strings

* improvement(copilot): fast mode, subagent tool responses and allow preferences (#2955)

* Improvements

* Fix actions mapping

* Remove console logs

* fix(billing): handle missing userStats and prevent crashes (#2956)

* fix(billing): handle missing userStats and prevent crashes

* fix(billing): correct import path for getFilledPillColor

* fix(billing): add Number.isFinite check to lastPeriodCost

* fix(logs): refresh logic to refresh logs details (#2958)

* fix(security): add authentication and input validation to API routes (#2959)

* fix(security): add authentication and input validation to API routes

* moved utils

* remove extraneous commetns

* removed unused dep

* improvement(helm): add internal ingress support and same-host path consolidation (#2960)

* improvement(helm): add internal ingress support and same-host path consolidation

* improvement(helm): clean up ingress template comments

Simplify verbose inline Helm comments and section dividers to match the
minimal style used in services.yaml.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* fix(helm): add missing copilot path consolidation for realtime host

When copilot.host equals realtime.host but differs from app.host,
copilot paths were not being routed. Added logic to consolidate
copilot paths into the realtime rule for this scenario.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* improvement(helm): follow ingress best practices

- Remove orphan comments that appeared when services were disabled
- Add documentation about path ordering requirements
- Paths rendered in order: realtime, copilot, app (specific before catch-all)
- Clean template output matching industry Helm chart standards

---------

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>

* feat(blog): enterprise post (#2961)

* feat(blog): enterprise post

* added more images, styling

* more content

* updated v0-5 post

* remove unused transition

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>

* fix(envvars): resolution standardized (#2957)

* fix(envvars): resolution standardized

* remove comments

* address bugbot

* fix highlighting for env vars

* remove comments

* address greptile

* address bugbot

* fix(copilot): mask credentials fix (#2963)

* Fix copilot masking

* Clean up

* Lint

* improvement(webhooks): remove dead code (#2965)

* fix(webhooks): subscription recreation path

* improvement(webhooks): remove dead code

* fix tests

* address bugbot comments

* fix restoration edge case

* fix more edge cases

* address bugbot comments

* fix gmail polling

* add warnings for UI indication for credential sets

* fix(preview): subblock values (#2969)

* fix(child-workflow): nested spans handoff (#2966)

* fix(child-workflow): nested spans handoff

* remove overly defensive programming

* update type check

* type more code

* remove more dead code

* address bugbot comments

* fix(security): restrict API key access on internal-only routes (#2964)

* fix(security): restrict API key access on internal-only routes

* test(security): update function execute tests for checkInternalAuth

* updated agent handler

* move session check higher in checkSessionOrInternalAuth

* extracted duplicate code into helper for resolving user from jwt

* fix(copilot): update copilot chat title (#2968)

* fix(hitl): fix condition blocks after hitl (#2967)

* fix(notes): ghost edges (#2970)

* fix(notes): ghost edges

* fix deployed state fallback

* fallback

* remove UI level checks

* annotation missing from autoconnect source check

* improvement(docs): loop and parallel var reference syntax (#2975)

* fix(blog): slash actions description (#2976)

* improvement(docs): loop and parallel var reference syntax

* fix(blog): slash actions description

* fix(auth): copilot routes (#2977)

* Fix copilot auth

* Fix

* Fix

* Fix

* fix(copilot): fix edit summary for loops/parallels (#2978)

* fix(integrations): hide from tool bar (#2544)

* fix(landing): ui (#2979)

* fix(edge-validation): race condition on collaborative add (#2980)

* fix(variables): boolean type support and input improvements (#2981)

* fix(variables): boolean type support and input improvements

* fix formatting

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
Co-authored-by: Siddharth Ganesan <33737564+Sg312@users.noreply.github.com>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
2026-01-24 14:29:53 -08:00
Waleed
e9c4251c1c v0.5.68: router block reasoning, executor improvements, variable resolution consolidation, helm updates (#2946)
* improvement(workflow-item): stabilize avatar layout and fix name truncation (#2939)

* improvement(workflow-item): stabilize avatar layout and fix name truncation

* fix(avatars): revert overflow bg to hardcoded color for contrast

* fix(executor): stop parallel execution when block errors (#2940)

* improvement(helm): add per-deployment extraVolumes support (#2942)

* fix(gmail): expose messageId field in read email block (#2943)

* fix(resolver): consolidate reference resolution  (#2941)

* fix(resolver): consolidate code to resolve references

* fix edge cases

* use already formatted error

* fix multi index

* fix backwards compat reachability

* handle backwards compatibility accurately

* use shared constant correctly

* feat(router): expose reasoning output in router v2 block (#2945)

* fix(copilot): always allow, credential masking (#2947)

* Fix always allow, credential validation

* Credential masking

* Autoload

* fix(executor): handle condition dead-end branches in loops (#2944)

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Siddharth Ganesan <33737564+Sg312@users.noreply.github.com>
2026-01-22 13:48:15 -08:00
Waleed
cc2be33d6b v0.5.67: loading, password reset, ui improvements, helm updates (#2928)
* fix(zustand): updated to useShallow from deprecated createWithEqualityFn (#2919)

* fix(logger): use direct env access for webpack inlining (#2920)

* fix(notifications): text overflow with line-clamp (#2921)

* chore(helm): add env vars for Vertex AI, orgs, and telemetry (#2922)

* fix(auth): improve reset password flow and consolidate brand detection (#2924)

* fix(auth): improve reset password flow and consolidate brand detection

* fix(auth): set errorHandled for EMAIL_NOT_VERIFIED to prevent duplicate error

* fix(auth): clear success message on login errors

* chore(auth): fix import order per lint

* fix(action-bar): duplicate subflows with children (#2923)

* fix(action-bar): duplicate subflows with children

* fix(action-bar): add validateTriggerPaste for subflow duplicate

* fix(resolver): agent response format, input formats, root level (#2925)

* fix(resolvers): agent response format, input formats, root level

* fix response block initial seeding

* fix tests

* fix(messages-input): fix cursor alignment and auto-resize with overlay (#2926)

* fix(messages-input): fix cursor alignment and auto-resize with overlay

* fixed remaining zustand warnings

* fix(stores): remove dead code causing log spam on startup (#2927)

* fix(stores): remove dead code causing log spam on startup

* fix(stores): replace custom tools zustand store with react query cache

* improvement(ui): use BrandedButton and BrandedLink components (#2930)

- Refactor auth forms to use BrandedButton component
- Add BrandedLink component for changelog page
- Reduce code duplication in login, signup, reset-password forms
- Update star count default value

* fix(custom-tools): remove unsafe title fallback in getCustomTool (#2929)

* fix(custom-tools): remove unsafe title fallback in getCustomTool

* fix(custom-tools): restore title fallback in getCustomTool lookup

Custom tools are referenced by title (custom_${title}), not database ID.
The title fallback is required for client-side tool resolution to work.

* fix(null-bodies): empty bodies handling (#2931)

* fix(null-statuses): empty bodies handling

* address bugbot comment

* fix(token-refresh): microsoft, notion, x, linear (#2933)

* fix(microsoft): proactive refresh needed

* fix(x): missing token refresh flag

* notion and linear missing flag too

* address bugbot comment

* fix(auth): handle EMAIL_NOT_VERIFIED in onError callback (#2932)

* fix(auth): handle EMAIL_NOT_VERIFIED in onError callback

* refactor(auth): extract redirectToVerify helper to reduce duplication

* fix(workflow-selector): use dedicated selector for workflow dropdown (#2934)

* feat(workflow-block): preview (#2935)

* improvement(copilot): tool configs to show nested props (#2936)

* fix(auth): add genericOAuth providers to trustedProviders (#2937)

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
2026-01-21 22:53:25 -08:00
Vikhyath Mondreti
45371e521e v0.5.66: external http requests fix, ring highlighting 2026-01-21 02:55:39 -08:00
Waleed
0ce0f98aa5 v0.5.65: gemini updates, textract integration, ui updates (#2909)
* fix(google): wrap primitive tool responses for Gemini API compatibility (#2900)

* fix(canonical): copilot path + update parent (#2901)

* fix(rss): add top-level title, link, pubDate fields to RSS trigger output (#2902)

* fix(rss): add top-level title, link, pubDate fields to RSS trigger output

* fix(imap): add top-level fields to IMAP trigger output

* improvement(browseruse): add profile id param (#2903)

* improvement(browseruse): add profile id param

* make request a stub since we have directExec

* improvement(executor): upgraded abort controller to handle aborts for loops and parallels (#2880)

* improvement(executor): upgraded abort controller to handle aborts for loops and parallels

* comments

* improvement(files): update execution for passing base64 strings (#2906)

* progress

* improvement(execution): update execution for passing base64 strings

* fix types

* cleanup comments

* path security vuln

* reject promise correctly

* fix redirect case

* remove proxy routes

* fix tests

* use ipaddr

* feat(tools): added textract, added v2 for mistral, updated tag dropdown (#2904)

* feat(tools): added textract

* cleanup

* ack pr comments

* reorder

* removed upload for textract async version

* fix additional fields dropdown in editor, update parser to leave validation to be done on the server

* added mistral v2, files v2, and finalized textract

* updated the rest of the old file patterns, updated mistral outputs for v2

* updated tag dropdown to parse non-operation fields as well

* updated extension finder

* cleanup

* added description for inputs to workflow

* use helper for internal route check

* fix tag dropdown merge conflict change

* remove duplicate code

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>

* fix(ui): change add inputs button to match output selector (#2907)

* fix(canvas): removed invite to workspace from canvas popover (#2908)

* fix(canvas): removed invite to workspace

* removed unused props

* fix(copilot): legacy tool display names (#2911)

* fix(a2a): canonical merge  (#2912)

* fix canonical merge

* fix empty array case

* fix(change-detection): copilot diffs have extra field (#2913)

* improvement(logs): improved logs ui bugs, added subflow disable UI (#2910)

* improvement(logs): improved logs ui bugs, added subflow disable UI

* added duplicate to action bar for subflows

* feat(broadcast): email v0.5 (#2905)

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
2026-01-20 23:54:55 -08:00
Waleed
dff1c9d083 v0.5.64: unsubscribe, search improvements, metrics, additional SSO configuration 2026-01-20 00:34:11 -08:00
Vikhyath Mondreti
b09f683072 v0.5.63: ui and performance improvements, more google tools 2026-01-18 15:22:42 -08:00
Vikhyath Mondreti
a8bb0db660 v0.5.62: webhook bug fixes, seeding default subblock values, block selection fixes 2026-01-16 20:27:06 -08:00
Waleed
af82820a28 v0.5.61: webhook improvements, workflow controls, react query for deployment status, chat fixes, reducto and pulse OCR, linear fixes 2026-01-16 18:06:23 -08:00
Waleed
4372841797 v0.5.60: invitation flow improvements, chat fixes, a2a improvements, additional copilot actions 2026-01-15 00:02:18 -08:00
Waleed
5e8c843241 v0.5.59: a2a support, documentation 2026-01-13 13:21:21 -08:00
Waleed
7bf3d73ee6 v0.5.58: export folders, new tools, permissions groups enhancements 2026-01-13 00:56:59 -08:00
Vikhyath Mondreti
7ffc11a738 v0.5.57: subagents, context menu improvements, bug fixes 2026-01-11 11:38:40 -08:00
Waleed
be578e2ed7 v0.5.56: batch operations, access control and permission groups, billing fixes 2026-01-10 00:31:34 -08:00
Waleed
f415e5edc4 v0.5.55: polling groups, bedrock provider, devcontainer fixes, workflow preview enhancements 2026-01-08 23:36:56 -08:00
Waleed
13a6e6c3fa v0.5.54: seo, model blacklist, helm chart updates, fireflies integration, autoconnect improvements, billing fixes 2026-01-07 16:09:45 -08:00
Waleed
f5ab7f21ae v0.5.53: hotkey improvements, added redis fallback, fixes for workflow tool 2026-01-06 23:34:52 -08:00
Waleed
bfb6fffe38 v0.5.52: new port-based router block, combobox expression and variable support 2026-01-06 16:14:10 -08:00
Waleed
4fbec0a43f v0.5.51: triggers, kb, condition block improvements, supabase and grain integration updates 2026-01-06 14:26:46 -08:00
Waleed
585f5e365b v0.5.50: import improvements, ui upgrades, kb styling and performance improvements 2026-01-05 00:35:55 -08:00
Waleed
3792bdd252 v0.5.49: hitl improvements, new email styles, imap trigger, logs context menu (#2672)
* feat(logs-context-menu): consolidated logs utils and types, added logs record context menu (#2659)

* feat(email): welcome email; improvement(emails): ui/ux (#2658)

* feat(email): welcome email; improvement(emails): ui/ux

* improvement(emails): links, accounts, preview

* refactor(emails): file structure and wrapper components

* added envvar for personal emails sent, added isHosted gate

* fixed failing tests, added env mock

* fix: removed comment

---------

Co-authored-by: waleed <walif6@gmail.com>

* fix(logging): hitl + trigger dev crash protection (#2664)

* hitl gaps

* deal with trigger worker crashes

* cleanup import strcuture

* feat(imap): added support for imap trigger (#2663)

* feat(tools): added support for imap trigger

* feat(imap): added parity, tested

* ack PR comments

* final cleanup

* feat(i18n): update translations (#2665)

Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>

* fix(grain): updated grain trigger to auto-establish trigger (#2666)

Co-authored-by: aadamgough <adam@sim.ai>

* feat(admin): routes to manage deployments (#2667)

* feat(admin): routes to manage deployments

* fix naming fo deployed by

* feat(time-picker): added timepicker emcn component, added to playground, added searchable prop for dropdown, added more timezones for schedule, updated license and notice date (#2668)

* feat(time-picker): added timepicker emcn component, added to playground, added searchable prop for dropdown, added more timezones for schedule, updated license and notice date

* removed unused params, cleaned up redundant utils

* improvement(invite): aligned styling (#2669)

* improvement(invite): aligned with rest of app

* fix(invite): error handling

* fix: addressed comments

---------

Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>
Co-authored-by: Adam Gough <77861281+aadamgough@users.noreply.github.com>
Co-authored-by: aadamgough <adam@sim.ai>
2026-01-03 13:19:18 -08:00
Waleed
eb5d1f3e5b v0.5.48: copy-paste workflow blocks, docs updates, mcp tool fixes 2025-12-31 18:00:04 -08:00
Waleed
54ab82c8dd v0.5.47: deploy workflow as mcp, kb chunks tokenizer, UI improvements, jira service management tools 2025-12-30 23:18:58 -08:00
Waleed
f895bf469b v0.5.46: build improvements, greptile, light mode improvements 2025-12-29 02:17:52 -08:00
Waleed
dd3209af06 v0.5.45: light mode fixes, realtime usage indicator, docker build improvements 2025-12-27 19:57:42 -08:00
Waleed
b6ba3b50a7 v0.5.44: keyboard shortcuts, autolayout, light mode, byok, testing improvements 2025-12-26 21:25:19 -08:00
Waleed
b304233062 v0.5.43: export logs, circleback, grain, vertex, code hygiene, schedule improvements 2025-12-23 19:19:18 -08:00
Vikhyath Mondreti
57e4b49bd6 v0.5.42: fix memory migration 2025-12-23 01:24:54 -08:00
Vikhyath Mondreti
e12dd204ed v0.5.41: memory fixes, copilot improvements, knowledgebase improvements, LLM providers standardization 2025-12-23 00:15:18 -08:00
Vikhyath Mondreti
3d9d9cbc54 v0.5.40: supabase ops to allow non-public schemas, jira uuid 2025-12-21 22:28:05 -08:00
Waleed
0f4ec962ad v0.5.39: notion, workflow variables fixes 2025-12-20 20:44:00 -08:00
Waleed
4827866f9a v0.5.38: snap to grid, copilot ux improvements, billing line items 2025-12-20 17:24:38 -08:00
Waleed
3e697d9ed9 v0.5.37: redaction utils consolidation, logs updates, autoconnect improvements, additional kb tag types 2025-12-19 22:31:55 -08:00
Martin Yankov
4431a1a484 fix(helm): add custom egress rules to realtime network policy (#2481)
The realtime service network policy was missing the custom egress rules section
that allows configuration of additional egress rules via values.yaml. This caused
the realtime pods to be unable to connect to external databases (e.g., PostgreSQL
on port 5432) when using external database configurations.

The app network policy already had this section, but the realtime network policy
was missing it, creating an inconsistency and preventing the realtime service
from accessing external databases configured via networkPolicy.egress values.

This fix adds the same custom egress rules template section to the realtime
network policy, matching the app network policy behavior and allowing users to
configure database connectivity via values.yaml.
2025-12-19 18:59:08 -08:00
Waleed
4d1a9a3f22 v0.5.36: hitl improvements, opengraph, slack fixes, one-click unsubscribe, auth checks, new db indexes 2025-12-19 01:27:49 -08:00
Vikhyath Mondreti
eb07a080fb v0.5.35: helm updates, copilot improvements, 404 for docs, salesforce fixes, subflow resize clamping 2025-12-18 16:23:19 -08:00
62 changed files with 759 additions and 13516 deletions

View File

@@ -27,16 +27,16 @@ All API responses include information about your workflow execution limits and u
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 150, // Sustained rate limit per minute
"maxBurst": 300, // Maximum burst capacity
"remaining": 298, // Current tokens available (up to maxBurst)
"resetAt": "..." // When tokens next refill
"requestsPerMinute": 60, // Sustained rate limit per minute
"maxBurst": 120, // Maximum burst capacity
"remaining": 118, // Current tokens available (up to maxBurst)
"resetAt": "..." // When tokens next refill
},
"async": {
"requestsPerMinute": 1000, // Sustained rate limit per minute
"maxBurst": 2000, // Maximum burst capacity
"remaining": 1998, // Current tokens available
"resetAt": "..." // When tokens next refill
"requestsPerMinute": 200, // Sustained rate limit per minute
"maxBurst": 400, // Maximum burst capacity
"remaining": 398, // Current tokens available
"resetAt": "..." // When tokens next refill
}
},
"usage": {
@@ -107,28 +107,28 @@ Query workflow execution logs with extensive filtering options.
}
],
"nextCursor": "eyJzIjoiMjAyNS0wMS0wMVQxMjozNDo1Ni43ODlaIiwiaWQiOiJsb2dfYWJjMTIzIn0",
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 150,
"maxBurst": 300,
"remaining": 298,
"resetAt": "2025-01-01T12:35:56.789Z"
},
"async": {
"requestsPerMinute": 1000,
"maxBurst": 2000,
"remaining": 1998,
"resetAt": "2025-01-01T12:35:56.789Z"
}
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 60,
"maxBurst": 120,
"remaining": 118,
"resetAt": "2025-01-01T12:35:56.789Z"
},
"usage": {
"currentPeriodCost": 1.234,
"limit": 10,
"plan": "pro",
"isExceeded": false
"async": {
"requestsPerMinute": 200,
"maxBurst": 400,
"remaining": 398,
"resetAt": "2025-01-01T12:35:56.789Z"
}
},
"usage": {
"currentPeriodCost": 1.234,
"limit": 10,
"plan": "pro",
"isExceeded": false
}
}
}
```
</Tab>
@@ -188,15 +188,15 @@ Retrieve detailed information about a specific log entry.
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"requestsPerMinute": 150,
"maxBurst": 300,
"remaining": 298,
"requestsPerMinute": 60,
"maxBurst": 120,
"remaining": 118,
"resetAt": "2025-01-01T12:35:56.789Z"
},
"async": {
"requestsPerMinute": 1000,
"maxBurst": 2000,
"remaining": 1998,
"requestsPerMinute": 200,
"maxBurst": 400,
"remaining": 398,
"resetAt": "2025-01-01T12:35:56.789Z"
}
},
@@ -477,10 +477,10 @@ The API uses a **token bucket algorithm** for rate limiting, providing fair usag
| Plan | Requests/Minute | Burst Capacity |
|------|-----------------|----------------|
| Free | 30 | 60 |
| Pro | 100 | 200 |
| Team | 200 | 400 |
| Enterprise | 500 | 1000 |
| Free | 10 | 20 |
| Pro | 30 | 60 |
| Team | 60 | 120 |
| Enterprise | 120 | 240 |
**How it works:**
- Tokens refill at `requestsPerMinute` rate

View File

@@ -170,16 +170,16 @@ curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" htt
"rateLimit": {
"sync": {
"isLimited": false,
"requestsPerMinute": 150,
"maxBurst": 300,
"remaining": 300,
"requestsPerMinute": 25,
"maxBurst": 50,
"remaining": 50,
"resetAt": "2025-09-08T22:51:55.999Z"
},
"async": {
"isLimited": false,
"requestsPerMinute": 1000,
"maxBurst": 2000,
"remaining": 2000,
"requestsPerMinute": 200,
"maxBurst": 400,
"remaining": 400,
"resetAt": "2025-09-08T22:51:56.155Z"
},
"authType": "api"
@@ -206,11 +206,11 @@ curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" htt
Different subscription plans have different usage limits:
| Plan | Monthly Usage Included | Rate Limits (per minute) |
|------|------------------------|-------------------------|
| **Free** | $20 | 50 sync, 200 async |
| **Pro** | $20 (adjustable) | 150 sync, 1,000 async |
| **Team** | $40/seat (pooled, adjustable) | 300 sync, 2,500 async |
| Plan | Monthly Usage Limit | Rate Limits (per minute) |
|------|-------------------|-------------------------|
| **Free** | $20 | 5 sync, 10 async |
| **Pro** | $100 | 10 sync, 50 async |
| **Team** | $500 (pooled) | 50 sync, 100 async |
| **Enterprise** | Custom | Custom |
## Billing Model

View File

@@ -180,11 +180,6 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
<td>Right-click → **Enable/Disable**</td>
<td><ActionImage src="/static/quick-reference/disable-block.png" alt="Disable block" /></td>
</tr>
<tr>
<td>Lock/Unlock a block</td>
<td>Hover block → Click lock icon (Admin only)</td>
<td><ActionImage src="/static/quick-reference/lock-block.png" alt="Lock block" /></td>
</tr>
<tr>
<td>Toggle handle orientation</td>
<td>Right-click → **Toggle Handles**</td>

View File

@@ -11,7 +11,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
/>
{/* MANUAL-CONTENT-START:intro */}
The [Pulse](https://www.runpulse.com) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
The [Pulse](https://www.pulseapi.com/) tool enables seamless extraction of text and structured content from a wide variety of documents—including PDFs, images, and Office files—using state-of-the-art OCR (Optical Character Recognition) powered by Pulse. Designed for automated agentic workflows, Pulse Parser makes it easy to unlock valuable information trapped in unstructured documents and integrate the extracted content directly into your workflow.
With Pulse, you can:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

View File

@@ -1,11 +1,10 @@
'use client'
import type React from 'react'
import { createContext, useCallback, useContext, useEffect, useMemo, useRef, useState } from 'react'
import { createContext, useCallback, useContext, useEffect, useMemo, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { useParams } from 'next/navigation'
import { useSocket } from '@/app/workspace/providers/socket-provider'
import {
useWorkspacePermissionsQuery,
type WorkspacePermissions,
@@ -58,42 +57,14 @@ export function WorkspacePermissionsProvider({ children }: WorkspacePermissionsP
const [hasShownOfflineNotification, setHasShownOfflineNotification] = useState(false)
const hasOperationError = useOperationQueueStore((state) => state.hasOperationError)
const addNotification = useNotificationStore((state) => state.addNotification)
const removeNotification = useNotificationStore((state) => state.removeNotification)
const { isReconnecting } = useSocket()
const reconnectingNotificationIdRef = useRef<string | null>(null)
const isOfflineMode = hasOperationError
useEffect(() => {
if (isReconnecting && !reconnectingNotificationIdRef.current && !isOfflineMode) {
const id = addNotification({
level: 'error',
message: 'Reconnecting...',
})
reconnectingNotificationIdRef.current = id
} else if (!isReconnecting && reconnectingNotificationIdRef.current) {
removeNotification(reconnectingNotificationIdRef.current)
reconnectingNotificationIdRef.current = null
}
return () => {
if (reconnectingNotificationIdRef.current) {
removeNotification(reconnectingNotificationIdRef.current)
reconnectingNotificationIdRef.current = null
}
}
}, [isReconnecting, isOfflineMode, addNotification, removeNotification])
useEffect(() => {
if (!isOfflineMode || hasShownOfflineNotification) {
return
}
if (reconnectingNotificationIdRef.current) {
removeNotification(reconnectingNotificationIdRef.current)
reconnectingNotificationIdRef.current = null
}
try {
addNotification({
level: 'error',
@@ -107,7 +78,7 @@ export function WorkspacePermissionsProvider({ children }: WorkspacePermissionsP
} catch (error) {
logger.error('Failed to add offline notification', { error })
}
}, [addNotification, removeNotification, hasShownOfflineNotification, isOfflineMode])
}, [addNotification, hasShownOfflineNotification, isOfflineMode])
const {
data: workspacePermissions,

View File

@@ -1,5 +1,5 @@
import { memo, useCallback } from 'react'
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, Lock, LogOut, Unlock } from 'lucide-react'
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
import { Button, Copy, PlayOutline, Tooltip, Trash2 } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
@@ -49,7 +49,6 @@ export const ActionBar = memo(
collaborativeBatchRemoveBlocks,
collaborativeBatchToggleBlockEnabled,
collaborativeBatchToggleBlockHandles,
collaborativeBatchToggleLocked,
} = useCollaborativeWorkflow()
const { setPendingSelection } = useWorkflowRegistry()
const { handleRunFromBlock } = useWorkflowExecution()
@@ -85,28 +84,16 @@ export const ActionBar = memo(
)
}, [blockId, addNotification, collaborativeBatchAddBlocks, setPendingSelection])
const {
isEnabled,
horizontalHandles,
parentId,
parentType,
isLocked,
isParentLocked,
isParentDisabled,
} = useWorkflowStore(
const { isEnabled, horizontalHandles, parentId, parentType } = useWorkflowStore(
useCallback(
(state) => {
const block = state.blocks[blockId]
const parentId = block?.data?.parentId
const parentBlock = parentId ? state.blocks[parentId] : undefined
return {
isEnabled: block?.enabled ?? true,
horizontalHandles: block?.horizontalHandles ?? false,
parentId,
parentType: parentBlock?.type,
isLocked: block?.locked ?? false,
isParentLocked: parentBlock?.locked ?? false,
isParentDisabled: parentBlock ? !parentBlock.enabled : false,
parentType: parentId ? state.blocks[parentId]?.type : undefined,
}
},
[blockId]
@@ -174,27 +161,25 @@ export const ActionBar = memo(
{!isNoteBlock && !isInsideSubflow && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<span className='inline-flex'>
<Button
variant='ghost'
onClick={(e) => {
e.stopPropagation()
if (canRunFromBlock && !disabled) {
handleRunFromBlockClick()
}
}}
className={ACTION_BUTTON_STYLES}
disabled={disabled || !canRunFromBlock}
>
<PlayOutline className={ICON_SIZE} />
</Button>
</span>
<Button
variant='ghost'
onClick={(e) => {
e.stopPropagation()
if (canRunFromBlock && !disabled) {
handleRunFromBlockClick()
}
}}
className={ACTION_BUTTON_STYLES}
disabled={disabled || !canRunFromBlock}
>
<PlayOutline className={ICON_SIZE} />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
{(() => {
if (disabled) return getTooltipMessage('Run from block')
if (isExecuting) return 'Execution in progress'
if (!dependenciesSatisfied) return 'Run previous blocks first'
if (!dependenciesSatisfied) return 'Run upstream blocks first'
return 'Run from block'
})()}
</Tooltip.Content>
@@ -208,54 +193,18 @@ export const ActionBar = memo(
variant='ghost'
onClick={(e) => {
e.stopPropagation()
// Can't enable if parent is disabled (must enable parent first)
const cantEnable = !isEnabled && isParentDisabled
if (!disabled && !isLocked && !isParentLocked && !cantEnable) {
if (!disabled) {
collaborativeBatchToggleBlockEnabled([blockId])
}
}}
className={ACTION_BUTTON_STYLES}
disabled={
disabled || isLocked || isParentLocked || (!isEnabled && isParentDisabled)
}
disabled={disabled}
>
{isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
{isLocked || isParentLocked
? 'Block is locked'
: !isEnabled && isParentDisabled
? 'Parent container is disabled'
: getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
</Tooltip.Content>
</Tooltip.Root>
)}
{userPermissions.canAdmin && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={(e) => {
e.stopPropagation()
// Can't unlock a block if its parent container is locked
if (!disabled && !(isLocked && isParentLocked)) {
collaborativeBatchToggleLocked([blockId])
}
}}
className={ACTION_BUTTON_STYLES}
disabled={disabled || (isLocked && isParentLocked)}
>
{isLocked ? <Unlock className={ICON_SIZE} /> : <Lock className={ICON_SIZE} />}
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
{isLocked && isParentLocked
? 'Parent container is locked'
: isLocked
? 'Unlock Block'
: 'Lock Block'}
{getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
</Tooltip.Content>
</Tooltip.Root>
)}
@@ -288,12 +237,12 @@ export const ActionBar = memo(
variant='ghost'
onClick={(e) => {
e.stopPropagation()
if (!disabled && !isLocked && !isParentLocked) {
if (!disabled) {
collaborativeBatchToggleBlockHandles([blockId])
}
}}
className={ACTION_BUTTON_STYLES}
disabled={disabled || isLocked || isParentLocked}
disabled={disabled}
>
{horizontalHandles ? (
<ArrowLeftRight className={ICON_SIZE} />
@@ -303,9 +252,7 @@ export const ActionBar = memo(
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
{isLocked || isParentLocked
? 'Block is locked'
: getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')}
{getTooltipMessage(horizontalHandles ? 'Vertical Ports' : 'Horizontal Ports')}
</Tooltip.Content>
</Tooltip.Root>
)}
@@ -317,23 +264,19 @@ export const ActionBar = memo(
variant='ghost'
onClick={(e) => {
e.stopPropagation()
if (!disabled && userPermissions.canEdit && !isLocked && !isParentLocked) {
if (!disabled && userPermissions.canEdit) {
window.dispatchEvent(
new CustomEvent('remove-from-subflow', { detail: { blockIds: [blockId] } })
)
}
}}
className={ACTION_BUTTON_STYLES}
disabled={disabled || !userPermissions.canEdit || isLocked || isParentLocked}
disabled={disabled || !userPermissions.canEdit}
>
<LogOut className={ICON_SIZE} />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
{isLocked || isParentLocked
? 'Block is locked'
: getTooltipMessage('Remove from Subflow')}
</Tooltip.Content>
<Tooltip.Content side='top'>{getTooltipMessage('Remove from Subflow')}</Tooltip.Content>
</Tooltip.Root>
)}
@@ -343,19 +286,17 @@ export const ActionBar = memo(
variant='ghost'
onClick={(e) => {
e.stopPropagation()
if (!disabled && !isLocked && !isParentLocked) {
if (!disabled) {
collaborativeBatchRemoveBlocks([blockId])
}
}}
className={ACTION_BUTTON_STYLES}
disabled={disabled || isLocked || isParentLocked}
disabled={disabled}
>
<Trash2 className={ICON_SIZE} />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
{isLocked || isParentLocked ? 'Block is locked' : getTooltipMessage('Delete Block')}
</Tooltip.Content>
<Tooltip.Content side='top'>{getTooltipMessage('Delete Block')}</Tooltip.Content>
</Tooltip.Root>
</div>
)

View File

@@ -20,9 +20,6 @@ export interface BlockInfo {
horizontalHandles: boolean
parentId?: string
parentType?: string
locked?: boolean
isParentLocked?: boolean
isParentDisabled?: boolean
}
/**
@@ -49,17 +46,10 @@ export interface BlockMenuProps {
showRemoveFromSubflow?: boolean
/** Whether run from block is available (has snapshot, was executed, not inside subflow) */
canRunFromBlock?: boolean
/** Whether to disable edit actions (user can't edit OR blocks are locked) */
disableEdit?: boolean
/** Whether the user has edit permission (ignoring locked state) */
userCanEdit?: boolean
isExecuting?: boolean
/** Whether the selected block is a trigger (has no incoming edges) */
isPositionalTrigger?: boolean
/** Callback to toggle locked state of selected blocks */
onToggleLocked?: () => void
/** Whether the user has admin permissions */
canAdmin?: boolean
}
/**
@@ -88,22 +78,13 @@ export function BlockMenu({
showRemoveFromSubflow = false,
canRunFromBlock = false,
disableEdit = false,
userCanEdit = true,
isExecuting = false,
isPositionalTrigger = false,
onToggleLocked,
canAdmin = false,
}: BlockMenuProps) {
const isSingleBlock = selectedBlocks.length === 1
const allEnabled = selectedBlocks.every((b) => b.enabled)
const allDisabled = selectedBlocks.every((b) => !b.enabled)
const allLocked = selectedBlocks.every((b) => b.locked)
const allUnlocked = selectedBlocks.every((b) => !b.locked)
// Can't unlock blocks that have locked parents
const hasBlockWithLockedParent = selectedBlocks.some((b) => b.locked && b.isParentLocked)
// Can't enable blocks that have disabled parents
const hasBlockWithDisabledParent = selectedBlocks.some((b) => !b.enabled && b.isParentDisabled)
const hasSingletonBlock = selectedBlocks.some(
(b) =>
@@ -127,12 +108,6 @@ export function BlockMenu({
return 'Toggle Enabled'
}
const getToggleLockedLabel = () => {
if (allLocked) return 'Unlock'
if (allUnlocked) return 'Lock'
return 'Toggle Lock'
}
return (
<Popover
open={isOpen}
@@ -164,7 +139,7 @@ export function BlockMenu({
</PopoverItem>
<PopoverItem
className='group'
disabled={!userCanEdit || !hasClipboard}
disabled={disableEdit || !hasClipboard}
onClick={() => {
onPaste()
onClose()
@@ -175,7 +150,7 @@ export function BlockMenu({
</PopoverItem>
{!hasSingletonBlock && (
<PopoverItem
disabled={!userCanEdit}
disabled={disableEdit}
onClick={() => {
onDuplicate()
onClose()
@@ -189,15 +164,13 @@ export function BlockMenu({
{!allNoteBlocks && <PopoverDivider />}
{!allNoteBlocks && (
<PopoverItem
disabled={disableEdit || hasBlockWithDisabledParent}
disabled={disableEdit}
onClick={() => {
if (!disableEdit && !hasBlockWithDisabledParent) {
onToggleEnabled()
onClose()
}
onToggleEnabled()
onClose()
}}
>
{hasBlockWithDisabledParent ? 'Parent is disabled' : getToggleEnabledLabel()}
{getToggleEnabledLabel()}
</PopoverItem>
)}
{!allNoteBlocks && !isSubflow && (
@@ -222,19 +195,6 @@ export function BlockMenu({
Remove from Subflow
</PopoverItem>
)}
{canAdmin && onToggleLocked && (
<PopoverItem
disabled={hasBlockWithLockedParent}
onClick={() => {
if (!hasBlockWithLockedParent) {
onToggleLocked()
onClose()
}
}}
>
{hasBlockWithLockedParent ? 'Parent is locked' : getToggleLockedLabel()}
</PopoverItem>
)}
{/* Single block actions */}
{isSingleBlock && <PopoverDivider />}

View File

@@ -34,8 +34,6 @@ export interface CanvasMenuProps {
canUndo?: boolean
canRedo?: boolean
isInvitationsDisabled?: boolean
/** Whether the workflow has locked blocks (disables auto-layout) */
hasLockedBlocks?: boolean
}
/**
@@ -62,7 +60,6 @@ export function CanvasMenu({
disableEdit = false,
canUndo = false,
canRedo = false,
hasLockedBlocks = false,
}: CanvasMenuProps) {
return (
<Popover
@@ -132,12 +129,11 @@ export function CanvasMenu({
</PopoverItem>
<PopoverItem
className='group'
disabled={disableEdit || hasLockedBlocks}
disabled={disableEdit}
onClick={() => {
onAutoLayout()
onClose()
}}
title={hasLockedBlocks ? 'Unlock blocks to use auto-layout' : undefined}
>
<span>Auto-layout</span>
<span className='ml-auto opacity-70 group-hover:opacity-100'>L</span>

View File

@@ -1,443 +0,0 @@
/**
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
interface StoredTool {
type: string
title?: string
toolId?: string
params?: Record<string, string>
customToolId?: string
schema?: any
code?: string
operation?: string
usageControl?: 'auto' | 'force' | 'none'
}
const isMcpToolAlreadySelected = (selectedTools: StoredTool[], mcpToolId: string): boolean => {
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
}
const isCustomToolAlreadySelected = (
selectedTools: StoredTool[],
customToolId: string
): boolean => {
return selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
)
}
const isWorkflowAlreadySelected = (selectedTools: StoredTool[], workflowId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
)
}
describe('isMcpToolAlreadySelected', () => {
describe('basic functionality', () => {
it.concurrent('returns false when selectedTools is empty', () => {
expect(isMcpToolAlreadySelected([], 'mcp-tool-123')).toBe(false)
})
it.concurrent('returns false when MCP tool is not in selectedTools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'different-mcp-tool', title: 'Different Tool' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(false)
})
it.concurrent('returns true when MCP tool is already selected', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-123', title: 'My MCP Tool' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(true)
})
it.concurrent('returns true when MCP tool is one of many selected tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'mcp', toolId: 'mcp-tool-123', title: 'My MCP Tool' },
{ type: 'workflow_input', toolId: 'workflow_executor' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(true)
})
})
describe('type discrimination', () => {
it.concurrent('does not match non-MCP tools with same toolId', () => {
const selectedTools: StoredTool[] = [{ type: 'http_request', toolId: 'mcp-tool-123' }]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(false)
})
it.concurrent('does not match custom tools even with toolId set', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', toolId: 'custom-mcp-tool-123', customToolId: 'db-id' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-123')).toBe(false)
})
})
describe('multiple MCP tools', () => {
it.concurrent('correctly identifies first of multiple MCP tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
{ type: 'mcp', toolId: 'mcp-tool-3', title: 'Tool 3' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-1')).toBe(true)
})
it.concurrent('correctly identifies middle MCP tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
{ type: 'mcp', toolId: 'mcp-tool-3', title: 'Tool 3' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-2')).toBe(true)
})
it.concurrent('correctly identifies last MCP tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
{ type: 'mcp', toolId: 'mcp-tool-3', title: 'Tool 3' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-3')).toBe(true)
})
it.concurrent('returns false for non-existent MCP tool among many', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'Tool 1' },
{ type: 'mcp', toolId: 'mcp-tool-2', title: 'Tool 2' },
]
expect(isMcpToolAlreadySelected(selectedTools, 'mcp-tool-999')).toBe(false)
})
})
})
describe('isCustomToolAlreadySelected', () => {
describe('basic functionality', () => {
it.concurrent('returns false when selectedTools is empty', () => {
expect(isCustomToolAlreadySelected([], 'custom-tool-123')).toBe(false)
})
it.concurrent('returns false when custom tool is not in selectedTools', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'different-custom-tool' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
it.concurrent('returns true when custom tool is already selected', () => {
const selectedTools: StoredTool[] = [{ type: 'custom-tool', customToolId: 'custom-tool-123' }]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(true)
})
it.concurrent('returns true when custom tool is one of many selected tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-1', title: 'MCP Tool' },
{ type: 'custom-tool', customToolId: 'custom-tool-123' },
{ type: 'http_request', toolId: 'http_request_tool' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(true)
})
})
describe('type discrimination', () => {
it.concurrent('does not match non-custom tools with similar IDs', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'custom-tool-123', title: 'MCP with similar ID' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
it.concurrent('does not match MCP tools even if customToolId happens to match', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-id', customToolId: 'custom-tool-123' } as StoredTool,
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
})
describe('legacy inline custom tools', () => {
it.concurrent('does not match legacy inline tools without customToolId', () => {
const selectedTools: StoredTool[] = [
{
type: 'custom-tool',
title: 'Legacy Tool',
toolId: 'custom-myFunction',
schema: { function: { name: 'myFunction' } },
code: 'return true',
},
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(false)
})
it.concurrent('does not false-positive on legacy tools when checking for database tool', () => {
const selectedTools: StoredTool[] = [
{
type: 'custom-tool',
title: 'Legacy Tool',
schema: { function: { name: 'sameName' } },
code: 'return true',
},
]
expect(isCustomToolAlreadySelected(selectedTools, 'db-tool-1')).toBe(false)
})
})
describe('multiple custom tools', () => {
it.concurrent('correctly identifies first of multiple custom tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
{ type: 'custom-tool', customToolId: 'custom-3' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-1')).toBe(true)
})
it.concurrent('correctly identifies middle custom tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
{ type: 'custom-tool', customToolId: 'custom-3' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-2')).toBe(true)
})
it.concurrent('correctly identifies last custom tool', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
{ type: 'custom-tool', customToolId: 'custom-3' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-3')).toBe(true)
})
it.concurrent('returns false for non-existent custom tool among many', () => {
const selectedTools: StoredTool[] = [
{ type: 'custom-tool', customToolId: 'custom-1' },
{ type: 'custom-tool', customToolId: 'custom-2' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-999')).toBe(false)
})
})
describe('mixed tool types', () => {
it.concurrent('correctly identifies custom tool in mixed list', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-tool-1', title: 'MCP Tool' },
{ type: 'custom-tool', customToolId: 'custom-tool-123' },
{ type: 'http_request', toolId: 'http_request' },
{ type: 'workflow_input', toolId: 'workflow_executor' },
{ type: 'custom-tool', title: 'Legacy', schema: {}, code: '' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-tool-123')).toBe(true)
})
it.concurrent('does not confuse MCP toolId with custom customToolId', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'shared-id-123', title: 'MCP Tool' },
{ type: 'custom-tool', customToolId: 'different-id' },
]
expect(isCustomToolAlreadySelected(selectedTools, 'shared-id-123')).toBe(false)
})
})
})
describe('isWorkflowAlreadySelected', () => {
describe('basic functionality', () => {
it.concurrent('returns false when selectedTools is empty', () => {
expect(isWorkflowAlreadySelected([], 'workflow-123')).toBe(false)
})
it.concurrent('returns false when workflow is not in selectedTools', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'different-workflow' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
it.concurrent('returns true when workflow is already selected', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-123' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(true)
})
it.concurrent('returns true when workflow is one of many selected tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'mcp-1', title: 'MCP Tool' },
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-123' },
},
{ type: 'custom-tool', customToolId: 'custom-1' },
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(true)
})
})
describe('type discrimination', () => {
it.concurrent('does not match non-workflow_input tools', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'workflow-123', params: { workflowId: 'workflow-123' } },
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
it.concurrent('does not match workflow_input without params', () => {
const selectedTools: StoredTool[] = [{ type: 'workflow_input', toolId: 'workflow_executor' }]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
it.concurrent('does not match workflow_input with different workflowId in params', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'other-workflow' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-123')).toBe(false)
})
})
describe('multiple workflows', () => {
it.concurrent('allows different workflows to be selected', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-a' },
},
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-b' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-a')).toBe(true)
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-b')).toBe(true)
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-c')).toBe(false)
})
it.concurrent('correctly identifies specific workflow among many', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-1' },
},
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-2' },
},
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-3' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-2')).toBe(true)
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-999')).toBe(false)
})
})
})
describe('duplicate prevention integration scenarios', () => {
describe('add then try to re-add', () => {
it.concurrent('prevents re-adding the same MCP tool', () => {
const selectedTools: StoredTool[] = [
{
type: 'mcp',
toolId: 'planetscale-query',
title: 'PlanetScale Query',
params: { serverId: 'server-1' },
},
]
expect(isMcpToolAlreadySelected(selectedTools, 'planetscale-query')).toBe(true)
})
it.concurrent('prevents re-adding the same custom tool', () => {
const selectedTools: StoredTool[] = [
{
type: 'custom-tool',
customToolId: 'my-custom-tool-uuid',
usageControl: 'auto',
},
]
expect(isCustomToolAlreadySelected(selectedTools, 'my-custom-tool-uuid')).toBe(true)
})
it.concurrent('prevents re-adding the same workflow', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'my-workflow-uuid' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'my-workflow-uuid')).toBe(true)
})
})
describe('remove then re-add', () => {
it.concurrent('allows re-adding MCP tool after removal', () => {
const selectedToolsAfterRemoval: StoredTool[] = []
expect(isMcpToolAlreadySelected(selectedToolsAfterRemoval, 'planetscale-query')).toBe(false)
})
it.concurrent('allows re-adding custom tool after removal', () => {
const selectedToolsAfterRemoval: StoredTool[] = [
{ type: 'mcp', toolId: 'some-other-tool', title: 'Other' },
]
expect(isCustomToolAlreadySelected(selectedToolsAfterRemoval, 'my-custom-tool-uuid')).toBe(
false
)
})
it.concurrent('allows re-adding workflow after removal', () => {
const selectedToolsAfterRemoval: StoredTool[] = [
{ type: 'mcp', toolId: 'some-tool', title: 'Other' },
]
expect(isWorkflowAlreadySelected(selectedToolsAfterRemoval, 'my-workflow-uuid')).toBe(false)
})
})
describe('different tools with similar names', () => {
it.concurrent('allows adding different MCP tools from same server', () => {
const selectedTools: StoredTool[] = [
{ type: 'mcp', toolId: 'server1-tool-a', title: 'Tool A', params: { serverId: 'server1' } },
]
expect(isMcpToolAlreadySelected(selectedTools, 'server1-tool-b')).toBe(false)
})
it.concurrent('allows adding different custom tools', () => {
const selectedTools: StoredTool[] = [{ type: 'custom-tool', customToolId: 'custom-a' }]
expect(isCustomToolAlreadySelected(selectedTools, 'custom-b')).toBe(false)
})
it.concurrent('allows adding different workflows', () => {
const selectedTools: StoredTool[] = [
{
type: 'workflow_input',
toolId: 'workflow_executor',
params: { workflowId: 'workflow-a' },
},
]
expect(isWorkflowAlreadySelected(selectedTools, 'workflow-b')).toBe(false)
})
})
})

View File

@@ -1226,40 +1226,6 @@ export const ToolInput = memo(function ToolInput({
return selectedTools.some((tool) => tool.toolId === toolId)
}
/**
* Checks if an MCP tool is already selected.
*
* @param mcpToolId - The MCP tool identifier to check
* @returns `true` if the MCP tool is already selected
*/
const isMcpToolAlreadySelected = (mcpToolId: string): boolean => {
return selectedTools.some((tool) => tool.type === 'mcp' && tool.toolId === mcpToolId)
}
/**
* Checks if a custom tool is already selected.
*
* @param customToolId - The custom tool identifier to check
* @returns `true` if the custom tool is already selected
*/
const isCustomToolAlreadySelected = (customToolId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'custom-tool' && tool.customToolId === customToolId
)
}
/**
* Checks if a workflow is already selected.
*
* @param workflowId - The workflow identifier to check
* @returns `true` if the workflow is already selected
*/
const isWorkflowAlreadySelected = (workflowId: string): boolean => {
return selectedTools.some(
(tool) => tool.type === 'workflow_input' && tool.params?.workflowId === workflowId
)
}
/**
* Checks if a block supports multiple operations.
*
@@ -1779,29 +1745,24 @@ export const ToolInput = memo(function ToolInput({
if (!permissionConfig.disableCustomTools && customTools.length > 0) {
groups.push({
section: 'Custom Tools',
items: customTools.map((customTool) => {
const alreadySelected = isCustomToolAlreadySelected(customTool.id)
return {
label: customTool.title,
value: `custom-${customTool.id}`,
iconElement: createToolIcon('#3B82F6', WrenchIcon),
disabled: isPreview || alreadySelected,
onSelect: () => {
if (alreadySelected) return
const newTool: StoredTool = {
type: 'custom-tool',
customToolId: customTool.id,
usageControl: 'auto',
isExpanded: true,
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
}
}),
items: customTools.map((customTool) => ({
label: customTool.title,
value: `custom-${customTool.id}`,
iconElement: createToolIcon('#3B82F6', WrenchIcon),
onSelect: () => {
const newTool: StoredTool = {
type: 'custom-tool',
customToolId: customTool.id,
usageControl: 'auto',
isExpanded: true,
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
})),
})
}
@@ -1811,13 +1772,11 @@ export const ToolInput = memo(function ToolInput({
section: 'MCP Tools',
items: availableMcpTools.map((mcpTool) => {
const server = mcpServers.find((s) => s.id === mcpTool.serverId)
const alreadySelected = isMcpToolAlreadySelected(mcpTool.id)
return {
label: mcpTool.name,
value: `mcp-${mcpTool.id}`,
iconElement: createToolIcon(mcpTool.bgColor || '#6366F1', mcpTool.icon || McpIcon),
onSelect: () => {
if (alreadySelected) return
const newTool: StoredTool = {
type: 'mcp',
title: mcpTool.name,
@@ -1837,7 +1796,7 @@ export const ToolInput = memo(function ToolInput({
}
handleMcpToolSelect(newTool, true)
},
disabled: isPreview || disabled || alreadySelected,
disabled: isPreview || disabled,
}
}),
})
@@ -1851,17 +1810,12 @@ export const ToolInput = memo(function ToolInput({
if (builtInTools.length > 0) {
groups.push({
section: 'Built-in Tools',
items: builtInTools.map((block) => {
const toolId = getToolIdForOperation(block.type, undefined)
const alreadySelected = toolId ? isToolAlreadySelected(toolId, block.type) : false
return {
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
disabled: isPreview || alreadySelected,
onSelect: () => handleSelectTool(block),
}
}),
items: builtInTools.map((block) => ({
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
onSelect: () => handleSelectTool(block),
})),
})
}
@@ -1869,17 +1823,12 @@ export const ToolInput = memo(function ToolInput({
if (integrations.length > 0) {
groups.push({
section: 'Integrations',
items: integrations.map((block) => {
const toolId = getToolIdForOperation(block.type, undefined)
const alreadySelected = toolId ? isToolAlreadySelected(toolId, block.type) : false
return {
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
disabled: isPreview || alreadySelected,
onSelect: () => handleSelectTool(block),
}
}),
items: integrations.map((block) => ({
label: block.name,
value: `builtin-${block.type}`,
iconElement: createToolIcon(block.bgColor, block.icon),
onSelect: () => handleSelectTool(block),
})),
})
}
@@ -1887,33 +1836,29 @@ export const ToolInput = memo(function ToolInput({
if (availableWorkflows.length > 0) {
groups.push({
section: 'Workflows',
items: availableWorkflows.map((workflow) => {
const alreadySelected = isWorkflowAlreadySelected(workflow.id)
return {
label: workflow.name,
value: `workflow-${workflow.id}`,
iconElement: createToolIcon('#6366F1', WorkflowIcon),
onSelect: () => {
if (alreadySelected) return
const newTool: StoredTool = {
type: 'workflow_input',
title: 'Workflow',
toolId: 'workflow_executor',
params: {
workflowId: workflow.id,
},
isExpanded: true,
usageControl: 'auto',
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
disabled: isPreview || disabled || alreadySelected,
}
}),
items: availableWorkflows.map((workflow) => ({
label: workflow.name,
value: `workflow-${workflow.id}`,
iconElement: createToolIcon('#6366F1', WorkflowIcon),
onSelect: () => {
const newTool: StoredTool = {
type: 'workflow_input',
title: 'Workflow',
toolId: 'workflow_executor',
params: {
workflowId: workflow.id,
},
isExpanded: true,
usageControl: 'auto',
}
setStoreValue([
...selectedTools.map((tool) => ({ ...tool, isExpanded: false })),
newTool,
])
setOpen(false)
},
disabled: isPreview || disabled,
})),
})
}
@@ -1932,11 +1877,6 @@ export const ToolInput = memo(function ToolInput({
permissionConfig.disableCustomTools,
permissionConfig.disableMcpTools,
availableWorkflows,
getToolIdForOperation,
isToolAlreadySelected,
isMcpToolAlreadySelected,
isCustomToolAlreadySelected,
isWorkflowAlreadySelected,
])
const toolRequiresOAuth = (toolId: string): boolean => {

View File

@@ -9,9 +9,7 @@ import {
ChevronUp,
ExternalLink,
Loader2,
Lock,
Pencil,
Unlock,
} from 'lucide-react'
import { useParams } from 'next/navigation'
import { useShallow } from 'zustand/react/shallow'
@@ -48,7 +46,6 @@ import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { usePanelEditorStore } from '@/stores/panel'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
/** Stable empty object to avoid creating new references */
const EMPTY_SUBBLOCK_VALUES = {} as Record<string, any>
@@ -113,14 +110,6 @@ export function Editor() {
// Get user permissions
const userPermissions = useUserPermissionsContext()
// Check if block is locked (or inside a locked container) and compute edit permission
// Locked blocks cannot be edited by anyone (admins can only lock/unlock)
const blocks = useWorkflowStore((state) => state.blocks)
const parentId = currentBlock?.data?.parentId as string | undefined
const isParentLocked = parentId ? (blocks[parentId]?.locked ?? false) : false
const isLocked = (currentBlock?.locked ?? false) || isParentLocked
const canEditBlock = userPermissions.canEdit && !isLocked
// Get active workflow ID
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
@@ -161,7 +150,9 @@ export function Editor() {
blockSubBlockValues,
canonicalIndex
)
const displayAdvancedOptions = canEditBlock ? advancedMode : advancedMode || advancedValuesPresent
const displayAdvancedOptions = userPermissions.canEdit
? advancedMode
: advancedMode || advancedValuesPresent
const hasAdvancedOnlyFields = useMemo(() => {
for (const subBlock of subBlocksForCanonical) {
@@ -228,14 +219,13 @@ export function Editor() {
collaborativeSetBlockCanonicalMode,
collaborativeUpdateBlockName,
collaborativeToggleBlockAdvancedMode,
collaborativeBatchToggleLocked,
} = useCollaborativeWorkflow()
// Advanced mode toggle handler
const handleToggleAdvancedMode = useCallback(() => {
if (!currentBlockId || !canEditBlock) return
if (!currentBlockId || !userPermissions.canEdit) return
collaborativeToggleBlockAdvancedMode(currentBlockId)
}, [currentBlockId, canEditBlock, collaborativeToggleBlockAdvancedMode])
}, [currentBlockId, userPermissions.canEdit, collaborativeToggleBlockAdvancedMode])
// Rename state
const [isRenaming, setIsRenaming] = useState(false)
@@ -246,10 +236,10 @@ export function Editor() {
* Handles starting the rename process.
*/
const handleStartRename = useCallback(() => {
if (!canEditBlock || !currentBlock) return
if (!userPermissions.canEdit || !currentBlock) return
setEditedName(currentBlock.name || '')
setIsRenaming(true)
}, [canEditBlock, currentBlock])
}, [userPermissions.canEdit, currentBlock])
/**
* Handles saving the renamed block.
@@ -368,36 +358,6 @@ export function Editor() {
)}
</div>
<div className='flex shrink-0 items-center gap-[8px]'>
{/* Locked indicator - clickable to unlock if user has admin permissions, block is locked, and parent is not locked */}
{isLocked && currentBlock && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
{userPermissions.canAdmin && currentBlock.locked && !isParentLocked ? (
<Button
variant='ghost'
className='p-0'
onClick={() => collaborativeBatchToggleLocked([currentBlockId!])}
aria-label='Unlock block'
>
<Unlock className='h-[14px] w-[14px] text-[var(--text-secondary)]' />
</Button>
) : (
<div className='flex items-center justify-center'>
<Lock className='h-[14px] w-[14px] text-[var(--text-secondary)]' />
</div>
)}
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<p>
{isParentLocked
? 'Parent container is locked'
: userPermissions.canAdmin && currentBlock.locked
? 'Unlock block'
: 'Block is locked'}
</p>
</Tooltip.Content>
</Tooltip.Root>
)}
{/* Rename button */}
{currentBlock && (
<Tooltip.Root>
@@ -406,7 +366,7 @@ export function Editor() {
variant='ghost'
className='p-0'
onClick={isRenaming ? handleSaveRename : handleStartRename}
disabled={!canEditBlock}
disabled={!userPermissions.canEdit}
aria-label={isRenaming ? 'Save name' : 'Rename block'}
>
{isRenaming ? (
@@ -474,7 +434,7 @@ export function Editor() {
incomingConnections={incomingConnections}
handleConnectionsResizeMouseDown={handleConnectionsResizeMouseDown}
toggleConnectionsCollapsed={toggleConnectionsCollapsed}
userCanEdit={canEditBlock}
userCanEdit={userPermissions.canEdit}
isConnectionsAtMinHeight={isConnectionsAtMinHeight}
/>
) : (
@@ -582,14 +542,14 @@ export function Editor() {
config={subBlock}
isPreview={false}
subBlockValues={subBlockState}
disabled={!canEditBlock}
disabled={!userPermissions.canEdit}
fieldDiffStatus={undefined}
allowExpandInPreview={false}
canonicalToggle={
isCanonicalSwap && canonicalMode && canonicalId
? {
mode: canonicalMode,
disabled: !canEditBlock,
disabled: !userPermissions.canEdit,
onToggle: () => {
if (!currentBlockId) return
const nextMode =
@@ -619,7 +579,7 @@ export function Editor() {
)
})}
{hasAdvancedOnlyFields && canEditBlock && (
{hasAdvancedOnlyFields && userPermissions.canEdit && (
<div className='flex items-center gap-[10px] px-[2px] pt-[14px] pb-[12px]'>
<div
className='h-[1.25px] flex-1'
@@ -664,7 +624,7 @@ export function Editor() {
config={subBlock}
isPreview={false}
subBlockValues={subBlockState}
disabled={!canEditBlock}
disabled={!userPermissions.canEdit}
fieldDiffStatus={undefined}
allowExpandInPreview={false}
/>

View File

@@ -45,13 +45,11 @@ import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowI
import { useDeleteWorkflow, useImportWorkflow } from '@/app/workspace/[workspaceId]/w/hooks'
import { usePermissionConfig } from '@/hooks/use-permission-config'
import { useChatStore } from '@/stores/chat/store'
import { useNotificationStore } from '@/stores/notifications/store'
import type { PanelTab } from '@/stores/panel'
import { usePanelStore, useVariablesStore as usePanelVariablesStore } from '@/stores/panel'
import { useVariablesStore } from '@/stores/variables/store'
import { getWorkflowWithValues } from '@/stores/workflows'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
const logger = createLogger('Panel')
/**
@@ -121,11 +119,6 @@ export const Panel = memo(function Panel() {
hydration.phase === 'state-loading'
const { handleAutoLayout: autoLayoutWithFitView } = useAutoLayout(activeWorkflowId || null)
// Check for locked blocks (disables auto-layout)
const hasLockedBlocks = useWorkflowStore((state) =>
Object.values(state.blocks).some((block) => block.locked)
)
// Delete workflow hook
const { isDeleting, handleDeleteWorkflow } = useDeleteWorkflow({
workspaceId,
@@ -237,24 +230,11 @@ export const Panel = memo(function Panel() {
setIsAutoLayouting(true)
try {
const result = await autoLayoutWithFitView()
if (!result.success && result.error) {
useNotificationStore.getState().addNotification({
level: 'info',
message: result.error,
workflowId: activeWorkflowId || undefined,
})
}
await autoLayoutWithFitView()
} finally {
setIsAutoLayouting(false)
}
}, [
isExecuting,
userPermissions.canEdit,
isAutoLayouting,
autoLayoutWithFitView,
activeWorkflowId,
])
}, [isExecuting, userPermissions.canEdit, isAutoLayouting, autoLayoutWithFitView])
/**
* Handles exporting workflow as JSON
@@ -424,10 +404,7 @@ export const Panel = memo(function Panel() {
<PopoverContent align='start' side='bottom' sideOffset={8}>
<PopoverItem
onClick={handleAutoLayout}
disabled={
isExecuting || !userPermissions.canEdit || isAutoLayouting || hasLockedBlocks
}
title={hasLockedBlocks ? 'Unlock blocks to use auto-layout' : undefined}
disabled={isExecuting || !userPermissions.canEdit || isAutoLayouting}
>
<Layout className='h-3 w-3' animate={isAutoLayouting} variant='clockwise' />
<span>Auto layout</span>

View File

@@ -80,7 +80,6 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
: undefined
const isEnabled = currentBlock?.enabled ?? true
const isLocked = currentBlock?.locked ?? false
const isPreview = data?.isPreview || false
// Focus state
@@ -201,10 +200,7 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<Subf
{blockName}
</span>
</div>
<div className='flex items-center gap-1'>
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
</div>
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
</div>
{!isPreview && (

View File

@@ -18,8 +18,6 @@ export interface UseBlockStateReturn {
diffStatus: DiffStatus
/** Whether this is a deleted block in diff mode */
isDeletedBlock: boolean
/** Whether the block is locked */
isLocked: boolean
}
/**
@@ -42,11 +40,6 @@ export function useBlockState(
? (data.blockState?.enabled ?? true)
: (currentBlock?.enabled ?? true)
// Determine if block is locked
const isLocked = data.isPreview
? (data.blockState?.locked ?? false)
: (currentBlock?.locked ?? false)
// Get diff status
const diffStatus: DiffStatus =
currentWorkflow.isDiffMode && currentBlock && hasDiffStatus(currentBlock)
@@ -75,6 +68,5 @@ export function useBlockState(
isActive,
diffStatus,
isDeletedBlock: isDeletedBlock ?? false,
isLocked,
}
}

View File

@@ -672,7 +672,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
currentWorkflow,
activeWorkflowId,
isEnabled,
isLocked,
handleClick,
hasRing,
ringStyles,
@@ -1101,7 +1100,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
{name}
</span>
</div>
<div className='relative z-10 flex flex-shrink-0 items-center gap-1'>
<div className='relative z-10 flex flex-shrink-0 items-center gap-2'>
{isWorkflowSelector &&
childWorkflowId &&
typeof childIsDeployed === 'boolean' &&
@@ -1134,7 +1133,6 @@ export const WorkflowBlock = memo(function WorkflowBlock({
</Tooltip.Root>
)}
{!isEnabled && <Badge variant='gray-secondary'>disabled</Badge>}
{isLocked && <Badge variant='gray-secondary'>locked</Badge>}
{type === 'schedule' && shouldShowScheduleBadge && scheduleInfo?.isDisabled && (
<Tooltip.Root>

View File

@@ -47,7 +47,6 @@ export function useBlockVisual({
isActive: isExecuting,
diffStatus,
isDeletedBlock,
isLocked,
} = useBlockState(blockId, currentWorkflow, data)
const currentBlockId = usePanelEditorStore((state) => state.currentBlockId)
@@ -104,7 +103,6 @@ export function useBlockVisual({
currentWorkflow,
activeWorkflowId,
isEnabled,
isLocked,
handleClick,
hasRing,
ringStyles,

View File

@@ -31,8 +31,7 @@ export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasCo
nodes.map((n) => {
const block = blocks[n.id]
const parentId = block?.data?.parentId
const parentBlock = parentId ? blocks[parentId] : undefined
const parentType = parentBlock?.type
const parentType = parentId ? blocks[parentId]?.type : undefined
return {
id: n.id,
type: block?.type || '',
@@ -40,9 +39,6 @@ export function useCanvasContextMenu({ blocks, getNodes, setNodes }: UseCanvasCo
horizontalHandles: block?.horizontalHandles ?? false,
parentId,
parentType,
locked: block?.locked ?? false,
isParentLocked: parentBlock?.locked ?? false,
isParentDisabled: parentBlock ? !parentBlock.enabled : false,
}
}),
[blocks]

View File

@@ -52,16 +52,6 @@ export async function applyAutoLayoutAndUpdateStore(
return { success: false, error: 'No blocks to layout' }
}
// Check for locked blocks - auto-layout is disabled when blocks are locked
const hasLockedBlocks = Object.values(blocks).some((block) => block.locked)
if (hasLockedBlocks) {
logger.info('Auto layout skipped: workflow contains locked blocks', { workflowId })
return {
success: false,
error: 'Auto-layout is disabled when blocks are locked. Unlock blocks to use auto-layout.',
}
}
// Merge with default options
const layoutOptions = {
spacing: {

View File

@@ -1,87 +0,0 @@
import type { BlockState } from '@/stores/workflows/workflow/types'
/**
* Result of filtering protected blocks from a deletion operation
*/
export interface FilterProtectedBlocksResult {
/** Block IDs that can be deleted (not protected) */
deletableIds: string[]
/** Block IDs that are protected and cannot be deleted */
protectedIds: string[]
/** Whether all blocks are protected (deletion should be cancelled entirely) */
allProtected: boolean
}
/**
* Checks if a block is protected from editing/deletion.
* A block is protected if it is locked or if its parent container is locked.
*
* @param blockId - The ID of the block to check
* @param blocks - Record of all blocks in the workflow
* @returns True if the block is protected
*/
export function isBlockProtected(blockId: string, blocks: Record<string, BlockState>): boolean {
const block = blocks[blockId]
if (!block) return false
// Block is locked directly
if (block.locked) return true
// Block is inside a locked container
const parentId = block.data?.parentId
if (parentId && blocks[parentId]?.locked) return true
return false
}
/**
* Checks if an edge is protected from modification.
* An edge is protected if either its source or target block is protected.
*
* @param edge - The edge to check (must have source and target)
* @param blocks - Record of all blocks in the workflow
* @returns True if the edge is protected
*/
export function isEdgeProtected(
edge: { source: string; target: string },
blocks: Record<string, BlockState>
): boolean {
return isBlockProtected(edge.source, blocks) || isBlockProtected(edge.target, blocks)
}
/**
* Filters out protected blocks from a list of block IDs for deletion.
* Protected blocks are those that are locked or inside a locked container.
*
* @param blockIds - Array of block IDs to filter
* @param blocks - Record of all blocks in the workflow
* @returns Result containing deletable IDs, protected IDs, and whether all are protected
*/
export function filterProtectedBlocks(
blockIds: string[],
blocks: Record<string, BlockState>
): FilterProtectedBlocksResult {
const protectedIds = blockIds.filter((id) => isBlockProtected(id, blocks))
const deletableIds = blockIds.filter((id) => !protectedIds.includes(id))
return {
deletableIds,
protectedIds,
allProtected: protectedIds.length === blockIds.length && blockIds.length > 0,
}
}
/**
* Checks if any blocks in the selection are protected.
* Useful for determining if edit actions should be disabled.
*
* @param blockIds - Array of block IDs to check
* @param blocks - Record of all blocks in the workflow
* @returns True if any block is protected
*/
export function hasProtectedBlocks(
blockIds: string[],
blocks: Record<string, BlockState>
): boolean {
return blockIds.some((id) => isBlockProtected(id, blocks))
}

View File

@@ -1,5 +1,4 @@
export * from './auto-layout-utils'
export * from './block-protection-utils'
export * from './block-ring-utils'
export * from './node-position-utils'
export * from './workflow-canvas-helpers'

View File

@@ -55,10 +55,7 @@ import {
clearDragHighlights,
computeClampedPositionUpdates,
estimateBlockDimensions,
filterProtectedBlocks,
getClampedPositionForNode,
isBlockProtected,
isEdgeProtected,
isInEditableElement,
resolveParentChildSelectionConflicts,
validateTriggerPaste,
@@ -546,7 +543,6 @@ const WorkflowContent = React.memo(() => {
collaborativeBatchRemoveBlocks,
collaborativeBatchToggleBlockEnabled,
collaborativeBatchToggleBlockHandles,
collaborativeBatchToggleLocked,
undo,
redo,
} = useCollaborativeWorkflow()
@@ -1073,27 +1069,8 @@ const WorkflowContent = React.memo(() => {
const handleContextDelete = useCallback(() => {
const blockIds = contextMenuBlocks.map((b) => b.id)
const { deletableIds, protectedIds, allProtected } = filterProtectedBlocks(blockIds, blocks)
if (protectedIds.length > 0) {
if (allProtected) {
addNotification({
level: 'info',
message: 'Cannot delete locked blocks or blocks inside locked containers',
workflowId: activeWorkflowId || undefined,
})
return
}
addNotification({
level: 'info',
message: `Skipped ${protectedIds.length} protected block(s)`,
workflowId: activeWorkflowId || undefined,
})
}
if (deletableIds.length > 0) {
collaborativeBatchRemoveBlocks(deletableIds)
}
}, [contextMenuBlocks, collaborativeBatchRemoveBlocks, addNotification, activeWorkflowId, blocks])
collaborativeBatchRemoveBlocks(blockIds)
}, [contextMenuBlocks, collaborativeBatchRemoveBlocks])
const handleContextToggleEnabled = useCallback(() => {
const blockIds = contextMenuBlocks.map((block) => block.id)
@@ -1105,11 +1082,6 @@ const WorkflowContent = React.memo(() => {
collaborativeBatchToggleBlockHandles(blockIds)
}, [contextMenuBlocks, collaborativeBatchToggleBlockHandles])
const handleContextToggleLocked = useCallback(() => {
const blockIds = contextMenuBlocks.map((block) => block.id)
collaborativeBatchToggleLocked(blockIds)
}, [contextMenuBlocks, collaborativeBatchToggleLocked])
const handleContextRemoveFromSubflow = useCallback(() => {
const blocksToRemove = contextMenuBlocks.filter(
(block) => block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
@@ -1979,6 +1951,7 @@ const WorkflowContent = React.memo(() => {
const loadingWorkflowRef = useRef<string | null>(null)
const currentWorkflowExists = Boolean(workflows[workflowIdParam])
/** Initializes workflow when it exists in registry and needs hydration. */
useEffect(() => {
const currentId = workflowIdParam
const currentWorkspaceHydration = hydration.workspaceId
@@ -2155,7 +2128,6 @@ const WorkflowContent = React.memo(() => {
parentId: block.data?.parentId,
extent: block.data?.extent || undefined,
dragHandle: '.workflow-drag-handle',
draggable: !isBlockProtected(block.id, blocks),
data: {
...block.data,
name: block.name,
@@ -2191,7 +2163,6 @@ const WorkflowContent = React.memo(() => {
position,
parentId: block.data?.parentId,
dragHandle,
draggable: !isBlockProtected(block.id, blocks),
extent: (() => {
// Clamp children to subflow body (exclude header)
const parentId = block.data?.parentId as string | undefined
@@ -2520,18 +2491,12 @@ const WorkflowContent = React.memo(() => {
const edgeIdsToRemove = changes
.filter((change: any) => change.type === 'remove')
.map((change: any) => change.id)
.filter((edgeId: string) => {
// Prevent removing edges connected to protected blocks
const edge = edges.find((e) => e.id === edgeId)
if (!edge) return true
return !isEdgeProtected(edge, blocks)
})
if (edgeIdsToRemove.length > 0) {
collaborativeBatchRemoveEdges(edgeIdsToRemove)
}
},
[collaborativeBatchRemoveEdges, edges, blocks]
[collaborativeBatchRemoveEdges]
)
/**
@@ -2593,16 +2558,6 @@ const WorkflowContent = React.memo(() => {
if (!sourceNode || !targetNode) return
// Prevent connections to/from protected blocks
if (isEdgeProtected(connection, blocks)) {
addNotification({
level: 'info',
message: 'Cannot connect to locked blocks or blocks inside locked containers',
workflowId: activeWorkflowId || undefined,
})
return
}
// Get parent information (handle container start node case)
const sourceParentId =
blocks[sourceNode.id]?.data?.parentId ||
@@ -2665,7 +2620,7 @@ const WorkflowContent = React.memo(() => {
connectionCompletedRef.current = true
}
},
[addEdge, getNodes, blocks, addNotification, activeWorkflowId]
[addEdge, getNodes, blocks]
)
/**
@@ -2760,9 +2715,6 @@ const WorkflowContent = React.memo(() => {
// Only consider container nodes that aren't the dragged node
if (n.type !== 'subflowNode' || n.id === node.id) return false
// Don't allow dropping into locked containers
if (blocks[n.id]?.locked) return false
// Get the container's absolute position
const containerAbsolutePos = getNodeAbsolutePosition(n.id)
@@ -2855,11 +2807,6 @@ const WorkflowContent = React.memo(() => {
/** Captures initial parent ID and position when drag starts. */
const onNodeDragStart = useCallback(
(_event: React.MouseEvent, node: any) => {
// Prevent dragging protected blocks
if (isBlockProtected(node.id, blocks)) {
return
}
// Store the original parent ID when starting to drag
const currentParentId = blocks[node.id]?.data?.parentId || null
setDragStartParentId(currentParentId)
@@ -2888,7 +2835,7 @@ const WorkflowContent = React.memo(() => {
}
})
},
[blocks, setDragStartPosition, getNodes, setPotentialParentId]
[blocks, setDragStartPosition, getNodes, potentialParentId, setPotentialParentId]
)
/** Handles node drag stop to establish parent-child relationships. */
@@ -2950,18 +2897,6 @@ const WorkflowContent = React.memo(() => {
// Don't process parent changes if the node hasn't actually changed parent or is being moved within same parent
if (potentialParentId === dragStartParentId) return
// Prevent moving locked blocks out of locked containers
// Unlocked blocks (e.g., duplicates) can be moved out freely
if (dragStartParentId && blocks[dragStartParentId]?.locked && blocks[node.id]?.locked) {
addNotification({
level: 'info',
message: 'Cannot move locked blocks out of locked containers',
workflowId: activeWorkflowId || undefined,
})
setPotentialParentId(dragStartParentId) // Reset to original parent
return
}
// Check if this is a starter block - starter blocks should never be in containers
const isStarterBlock = node.data?.type === 'starter'
if (isStarterBlock) {
@@ -3358,16 +3293,6 @@ const WorkflowContent = React.memo(() => {
/** Stable delete handler to avoid creating new function references per edge. */
const handleEdgeDelete = useCallback(
(edgeId: string) => {
// Prevent removing edges connected to protected blocks
const edge = edges.find((e) => e.id === edgeId)
if (edge && isEdgeProtected(edge, blocks)) {
addNotification({
level: 'info',
message: 'Cannot remove connections from locked blocks',
workflowId: activeWorkflowId || undefined,
})
return
}
removeEdge(edgeId)
// Remove this edge from selection (find by edge ID value)
setSelectedEdges((prev) => {
@@ -3380,7 +3305,7 @@ const WorkflowContent = React.memo(() => {
return next
})
},
[removeEdge, edges, blocks, addNotification, activeWorkflowId]
[removeEdge]
)
/** Transforms edges to include selection state and delete handlers. Memoized to prevent re-renders. */
@@ -3421,15 +3346,9 @@ const WorkflowContent = React.memo(() => {
// Handle edge deletion first (edges take priority if selected)
if (selectedEdges.size > 0) {
// Get all selected edge IDs and filter out edges connected to protected blocks
const edgeIds = Array.from(selectedEdges.values()).filter((edgeId) => {
const edge = edges.find((e) => e.id === edgeId)
if (!edge) return true
return !isEdgeProtected(edge, blocks)
})
if (edgeIds.length > 0) {
collaborativeBatchRemoveEdges(edgeIds)
}
// Get all selected edge IDs and batch delete them
const edgeIds = Array.from(selectedEdges.values())
collaborativeBatchRemoveEdges(edgeIds)
setSelectedEdges(new Map())
return
}
@@ -3446,29 +3365,7 @@ const WorkflowContent = React.memo(() => {
event.preventDefault()
const selectedIds = selectedNodes.map((node) => node.id)
const { deletableIds, protectedIds, allProtected } = filterProtectedBlocks(
selectedIds,
blocks
)
if (protectedIds.length > 0) {
if (allProtected) {
addNotification({
level: 'info',
message: 'Cannot delete locked blocks or blocks inside locked containers',
workflowId: activeWorkflowId || undefined,
})
return
}
addNotification({
level: 'info',
message: `Skipped ${protectedIds.length} protected block(s)`,
workflowId: activeWorkflowId || undefined,
})
}
if (deletableIds.length > 0) {
collaborativeBatchRemoveBlocks(deletableIds)
}
collaborativeBatchRemoveBlocks(selectedIds)
}
window.addEventListener('keydown', handleKeyDown)
@@ -3479,10 +3376,6 @@ const WorkflowContent = React.memo(() => {
getNodes,
collaborativeBatchRemoveBlocks,
effectivePermissions.canEdit,
blocks,
edges,
addNotification,
activeWorkflowId,
])
return (
@@ -3603,18 +3496,12 @@ const WorkflowContent = React.memo(() => {
(b) => b.parentId && (b.parentType === 'loop' || b.parentType === 'parallel')
)}
canRunFromBlock={runFromBlockState.canRun}
disableEdit={
!effectivePermissions.canEdit ||
contextMenuBlocks.some((b) => b.locked || b.isParentLocked)
}
userCanEdit={effectivePermissions.canEdit}
disableEdit={!effectivePermissions.canEdit}
isExecuting={isExecuting}
isPositionalTrigger={
contextMenuBlocks.length === 1 &&
edges.filter((e) => e.target === contextMenuBlocks[0]?.id).length === 0
}
onToggleLocked={handleContextToggleLocked}
canAdmin={effectivePermissions.canAdmin}
/>
<CanvasMenu
@@ -3637,7 +3524,6 @@ const WorkflowContent = React.memo(() => {
disableEdit={!effectivePermissions.canEdit}
canUndo={canUndo}
canRedo={canRedo}
hasLockedBlocks={Object.values(blocks).some((b) => b.locked)}
/>
</>
)}

View File

@@ -13,8 +13,8 @@ import { SlackMonoIcon } from '@/components/icons'
import type { PlanFeature } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/subscription/components/plan-card'
export const PRO_PLAN_FEATURES: PlanFeature[] = [
{ icon: Zap, text: '150 runs per minute (sync)' },
{ icon: Clock, text: '1,000 runs per minute (async)' },
{ icon: Zap, text: '25 runs per minute (sync)' },
{ icon: Clock, text: '200 runs per minute (async)' },
{ icon: HardDrive, text: '50GB file storage' },
{ icon: Building2, text: 'Unlimited workspaces' },
{ icon: Users, text: 'Unlimited invites' },
@@ -22,8 +22,8 @@ export const PRO_PLAN_FEATURES: PlanFeature[] = [
]
export const TEAM_PLAN_FEATURES: PlanFeature[] = [
{ icon: Zap, text: '300 runs per minute (sync)' },
{ icon: Clock, text: '2,500 runs per minute (async)' },
{ icon: Zap, text: '75 runs per minute (sync)' },
{ icon: Clock, text: '500 runs per minute (async)' },
{ icon: HardDrive, text: '500GB file storage (pooled)' },
{ icon: Building2, text: 'Unlimited workspaces' },
{ icon: Users, text: 'Unlimited invites' },

View File

@@ -49,7 +49,6 @@ interface SocketContextType {
socket: Socket | null
isConnected: boolean
isConnecting: boolean
isReconnecting: boolean
authFailed: boolean
currentWorkflowId: string | null
currentSocketId: string | null
@@ -67,16 +66,9 @@ interface SocketContextType {
blockId: string,
subblockId: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void
emitVariableUpdate: (
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
operationId?: string
) => void
emitVariableUpdate: (variableId: string, field: string, value: any, operationId?: string) => void
emitCursorUpdate: (cursor: { x: number; y: number } | null) => void
emitSelectionUpdate: (selection: { type: 'block' | 'edge' | 'none'; id?: string }) => void
@@ -96,7 +88,6 @@ const SocketContext = createContext<SocketContextType>({
socket: null,
isConnected: false,
isConnecting: false,
isReconnecting: false,
authFailed: false,
currentWorkflowId: null,
currentSocketId: null,
@@ -131,7 +122,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
const [socket, setSocket] = useState<Socket | null>(null)
const [isConnected, setIsConnected] = useState(false)
const [isConnecting, setIsConnecting] = useState(false)
const [isReconnecting, setIsReconnecting] = useState(false)
const [currentWorkflowId, setCurrentWorkflowId] = useState<string | null>(null)
const [currentSocketId, setCurrentSocketId] = useState<string | null>(null)
const [presenceUsers, setPresenceUsers] = useState<PresenceUser[]>([])
@@ -246,19 +236,20 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
setCurrentWorkflowId(null)
setPresenceUsers([])
// socket.active indicates if auto-reconnect will happen
if (socketInstance.active) {
setIsReconnecting(true)
logger.info('Socket disconnected, will auto-reconnect', { reason })
} else {
setIsReconnecting(false)
logger.info('Socket disconnected, no auto-reconnect', { reason })
}
logger.info('Socket disconnected', {
reason,
})
})
socketInstance.on('connect_error', (error: Error) => {
socketInstance.on('connect_error', (error: any) => {
setIsConnecting(false)
logger.error('Socket connection error:', { message: error.message })
logger.error('Socket connection error:', {
message: error.message,
stack: error.stack,
description: error.description,
type: error.type,
transport: error.transport,
})
// Check if this is an authentication failure
const isAuthError =
@@ -270,41 +261,43 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
logger.warn(
'Authentication failed - stopping reconnection attempts. User may need to refresh/re-login.'
)
// Stop reconnection attempts to prevent infinite loop
socketInstance.disconnect()
// Reset state to allow re-initialization when session is restored
setSocket(null)
setAuthFailed(true)
setIsReconnecting(false)
initializedRef.current = false
} else if (socketInstance.active) {
// Temporary failure, will auto-reconnect
setIsReconnecting(true)
}
})
// Reconnection events are on the Manager (socket.io), not the socket itself
socketInstance.io.on('reconnect', (attemptNumber) => {
socketInstance.on('reconnect', (attemptNumber) => {
setIsConnected(true)
setIsReconnecting(false)
setCurrentSocketId(socketInstance.id ?? null)
logger.info('Socket reconnected successfully', {
attemptNumber,
socketId: socketInstance.id,
transport: socketInstance.io.engine?.transport?.name,
})
// Note: join-workflow is handled by the useEffect watching isConnected
})
socketInstance.io.on('reconnect_attempt', (attemptNumber) => {
setIsReconnecting(true)
logger.info('Socket reconnection attempt', { attemptNumber })
socketInstance.on('reconnect_attempt', (attemptNumber) => {
logger.info('Socket reconnection attempt (fresh token will be generated)', {
attemptNumber,
timestamp: new Date().toISOString(),
})
})
socketInstance.io.on('reconnect_error', (error: Error) => {
logger.error('Socket reconnection error:', { message: error.message })
socketInstance.on('reconnect_error', (error: any) => {
logger.error('Socket reconnection error:', {
message: error.message,
attemptNumber: error.attemptNumber,
type: error.type,
})
})
socketInstance.io.on('reconnect_failed', () => {
socketInstance.on('reconnect_failed', () => {
logger.error('Socket reconnection failed - all attempts exhausted')
setIsReconnecting(false)
setIsConnecting(false)
})
@@ -636,7 +629,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
if (commit) {
socket.emit('workflow-operation', {
workflowId: currentWorkflowId,
operation,
target,
payload,
@@ -653,7 +645,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
}
pendingPositionUpdates.current.set(blockId, {
workflowId: currentWorkflowId,
operation,
target,
payload,
@@ -675,7 +666,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
}
} else {
socket.emit('workflow-operation', {
workflowId: currentWorkflowId,
operation,
target,
payload,
@@ -688,51 +678,47 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
)
const emitSubblockUpdate = useCallback(
(
blockId: string,
subblockId: string,
value: any,
operationId: string | undefined,
workflowId: string
) => {
if (!socket) {
logger.warn('Cannot emit subblock update: no socket connection', { workflowId, blockId })
return
(blockId: string, subblockId: string, value: any, operationId?: string) => {
if (socket && currentWorkflowId) {
socket.emit('subblock-update', {
blockId,
subblockId,
value,
timestamp: Date.now(),
operationId,
})
} else {
logger.warn('Cannot emit subblock update: no socket connection or workflow room', {
hasSocket: !!socket,
currentWorkflowId,
blockId,
subblockId,
})
}
socket.emit('subblock-update', {
workflowId,
blockId,
subblockId,
value,
timestamp: Date.now(),
operationId,
})
},
[socket]
[socket, currentWorkflowId]
)
const emitVariableUpdate = useCallback(
(
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => {
if (!socket) {
logger.warn('Cannot emit variable update: no socket connection', { workflowId, variableId })
return
(variableId: string, field: string, value: any, operationId?: string) => {
if (socket && currentWorkflowId) {
socket.emit('variable-update', {
variableId,
field,
value,
timestamp: Date.now(),
operationId,
})
} else {
logger.warn('Cannot emit variable update: no socket connection or workflow room', {
hasSocket: !!socket,
currentWorkflowId,
variableId,
field,
})
}
socket.emit('variable-update', {
workflowId,
variableId,
field,
value,
timestamp: Date.now(),
operationId,
})
},
[socket]
[socket, currentWorkflowId]
)
const lastCursorEmit = useRef(0)
@@ -808,7 +794,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socket,
isConnected,
isConnecting,
isReconnecting,
authFailed,
currentWorkflowId,
currentSocketId,
@@ -835,7 +820,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socket,
isConnected,
isConnecting,
isReconnecting,
authFailed,
currentWorkflowId,
currentSocketId,

View File

@@ -13,8 +13,8 @@ interface FreeTierUpgradeEmailProps {
const proFeatures = [
{ label: '$20/month', desc: 'in credits included' },
{ label: '150 runs/min', desc: 'sync executions' },
{ label: '1,000 runs/min', desc: 'async executions' },
{ label: '25 runs/min', desc: 'sync executions' },
{ label: '200 runs/min', desc: 'async executions' },
{ label: '50GB storage', desc: 'for files & assets' },
{ label: 'Unlimited', desc: 'workspaces & invites' },
]

View File

@@ -458,8 +458,8 @@ export function getCodeEditorProps(options?: {
'caret-[var(--text-primary)] dark:caret-white',
// Font smoothing
'[-webkit-font-smoothing:antialiased] [-moz-osx-font-smoothing:grayscale]',
// Disable interaction for streaming/preview/disabled
(isStreaming || isPreview || disabled) && 'pointer-events-none'
// Disable interaction for streaming/preview
(isStreaming || isPreview) && 'pointer-events-none'
),
}
}

View File

@@ -212,11 +212,11 @@ export class WorkflowBlockHandler implements BlockHandler {
/**
* Parses a potentially nested workflow error message to extract:
* - The chain of workflow names
* - The actual root error message (preserving the block name prefix for the failing block)
* - The actual root error message (preserving the block prefix for the failing block)
*
* Handles formats like:
* - "workflow-name" failed: error
* - Block Name: "workflow-name" failed: error
* - [block_type] Block Name: "workflow-name" failed: error
* - Workflow chain: A → B | error
*/
private parseNestedWorkflowError(message: string): { chain: string[]; rootError: string } {
@@ -234,8 +234,8 @@ export class WorkflowBlockHandler implements BlockHandler {
// Extract workflow names from patterns like:
// - "workflow-name" failed:
// - Block Name: "workflow-name" failed:
const workflowPattern = /(?:\[[^\]]+\]\s*)?(?:[^:]+:\s*)?"([^"]+)"\s*failed:\s*/g
// - [block_type] Block Name: "workflow-name" failed:
const workflowPattern = /(?:\[[^\]]+\]\s*[^:]+:\s*)?"([^"]+)"\s*failed:\s*/g
let match: RegExpExecArray | null
let lastIndex = 0
@@ -247,7 +247,7 @@ export class WorkflowBlockHandler implements BlockHandler {
}
// The root error is everything after the last match
// Keep the block name prefix (e.g., Function 1:) so we know which block failed
// Keep the block prefix (e.g., [function] Function 1:) so we know which block failed
const rootError = lastIndex > 0 ? remaining.slice(lastIndex) : remaining
return { chain, rootError: rootError.trim() || 'Unknown error' }

View File

@@ -47,7 +47,7 @@ export function buildBlockExecutionError(details: BlockExecutionErrorDetails): E
const blockName = details.block.metadata?.name || details.block.id
const blockType = details.block.metadata?.id || 'unknown'
const error = new Error(`${blockName}: ${errorMessage}`)
const error = new Error(`[${blockType}] ${blockName}: ${errorMessage}`)
Object.assign(error, {
blockId: details.block.id,

View File

@@ -146,6 +146,10 @@ export function useCollaborativeWorkflow() {
cancelOperationsForVariable,
} = useOperationQueue()
const isInActiveRoom = useCallback(() => {
return !!currentWorkflowId && activeWorkflowId === currentWorkflowId
}, [currentWorkflowId, activeWorkflowId])
// Register emit functions with operation queue store
useEffect(() => {
registerEmitFunctions(
@@ -158,19 +162,10 @@ export function useCollaborativeWorkflow() {
useEffect(() => {
const handleWorkflowOperation = (data: any) => {
const { operation, target, payload, userId, metadata } = data
const { operation, target, payload, userId } = data
if (isApplyingRemoteChange.current) return
// Filter broadcasts by workflowId to prevent cross-workflow updates
if (metadata?.workflowId && metadata.workflowId !== activeWorkflowId) {
logger.debug('Ignoring workflow operation for different workflow', {
broadcastWorkflowId: metadata.workflowId,
activeWorkflowId,
})
return
}
logger.info(`Received ${operation} on ${target} from user ${userId}`)
// Apply the operation to local state
@@ -409,20 +404,6 @@ export function useCollaborativeWorkflow() {
logger.info('Successfully applied batch-toggle-handles from remote user')
break
}
case BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED: {
const { blockIds } = payload
logger.info('Received batch-toggle-locked from remote user', {
userId,
count: (blockIds || []).length,
})
if (blockIds && blockIds.length > 0) {
useWorkflowStore.getState().batchToggleLocked(blockIds)
}
logger.info('Successfully applied batch-toggle-locked from remote user')
break
}
case BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT: {
const { updates } = payload
logger.info('Received batch-update-parent from remote user', {
@@ -455,24 +436,16 @@ export function useCollaborativeWorkflow() {
}
const handleSubblockUpdate = (data: any) => {
const { workflowId, blockId, subblockId, value, userId } = data
const { blockId, subblockId, value, userId } = data
if (isApplyingRemoteChange.current) return
// Filter broadcasts by workflowId to prevent cross-workflow updates
if (workflowId && workflowId !== activeWorkflowId) {
logger.debug('Ignoring subblock update for different workflow', {
broadcastWorkflowId: workflowId,
activeWorkflowId,
})
return
}
logger.info(`Received subblock update from user ${userId}: ${blockId}.${subblockId}`)
isApplyingRemoteChange.current = true
try {
// The setValue function automatically uses the active workflow ID
useSubBlockStore.getState().setValue(blockId, subblockId, value)
const blockType = useWorkflowStore.getState().blocks?.[blockId]?.type
if (activeWorkflowId && blockType === 'function' && subblockId === 'code') {
@@ -486,19 +459,10 @@ export function useCollaborativeWorkflow() {
}
const handleVariableUpdate = (data: any) => {
const { workflowId, variableId, field, value, userId } = data
const { variableId, field, value, userId } = data
if (isApplyingRemoteChange.current) return
// Filter broadcasts by workflowId to prevent cross-workflow updates
if (workflowId && workflowId !== activeWorkflowId) {
logger.debug('Ignoring variable update for different workflow', {
broadcastWorkflowId: workflowId,
activeWorkflowId,
})
return
}
logger.info(`Received variable update from user ${userId}: ${variableId}.${field}`)
isApplyingRemoteChange.current = true
@@ -659,9 +623,13 @@ export function useCollaborativeWorkflow() {
return
}
// Queue operations if we have an active workflow - queue handles socket readiness
if (!activeWorkflowId) {
logger.debug('Skipping operation - no active workflow', { operation, target })
if (!isInActiveRoom()) {
logger.debug('Skipping operation - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
operation,
target,
})
return
}
@@ -674,13 +642,20 @@ export function useCollaborativeWorkflow() {
target,
payload,
},
workflowId: activeWorkflowId,
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
localAction()
},
[addToQueue, session?.user?.id, isBaselineDiffView, activeWorkflowId]
[
addToQueue,
session?.user?.id,
isBaselineDiffView,
activeWorkflowId,
isInActiveRoom,
currentWorkflowId,
]
)
const collaborativeBatchUpdatePositions = useCallback(
@@ -694,8 +669,8 @@ export function useCollaborativeWorkflow() {
return
}
if (!activeWorkflowId) {
logger.debug('Skipping batch position update - no active workflow')
if (!isInActiveRoom()) {
logger.debug('Skipping batch position update - not in active workflow')
return
}
@@ -739,7 +714,7 @@ export function useCollaborativeWorkflow() {
}
}
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
)
const collaborativeUpdateBlockName = useCallback(
@@ -837,27 +812,14 @@ export function useCollaborativeWorkflow() {
if (ids.length === 0) return
const currentBlocks = useWorkflowStore.getState().blocks
const previousStates: Record<string, boolean> = {}
const validIds: string[] = []
// For each ID, collect non-locked blocks and their children for undo/redo
for (const id of ids) {
const block = currentBlocks[id]
if (!block) continue
// Skip locked blocks
if (block.locked) continue
validIds.push(id)
previousStates[id] = block.enabled
// If it's a loop or parallel, also capture children's previous states for undo/redo
if (block.type === 'loop' || block.type === 'parallel') {
Object.entries(currentBlocks).forEach(([blockId, b]) => {
if (b.data?.parentId === id && !b.locked) {
previousStates[blockId] = b.enabled
}
})
const block = useWorkflowStore.getState().blocks[id]
if (block) {
previousStates[id] = block.enabled
validIds.push(id)
}
}
@@ -896,8 +858,8 @@ export function useCollaborativeWorkflow() {
return
}
if (!activeWorkflowId) {
logger.debug('Skipping batch update parent - no active workflow')
if (!isInActiveRoom()) {
logger.debug('Skipping batch update parent - not in active workflow')
return
}
@@ -966,7 +928,7 @@ export function useCollaborativeWorkflow() {
logger.debug('Batch updated parent for blocks', { updateCount: updates.length })
},
[isBaselineDiffView, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
[isBaselineDiffView, isInActiveRoom, undoRedo, addToQueue, activeWorkflowId, session?.user?.id]
)
const collaborativeToggleBlockAdvancedMode = useCallback(
@@ -1019,25 +981,12 @@ export function useCollaborativeWorkflow() {
if (ids.length === 0) return
const blocks = useWorkflowStore.getState().blocks
// Helper to check if a block is protected (locked or inside locked parent)
const isProtected = (blockId: string): boolean => {
const block = blocks[blockId]
if (!block) return false
if (block.locked) return true
const parentId = block.data?.parentId
if (parentId && blocks[parentId]?.locked) return true
return false
}
const previousStates: Record<string, boolean> = {}
const validIds: string[] = []
for (const id of ids) {
const block = blocks[id]
// Skip locked blocks and blocks inside locked containers
if (block && !isProtected(id)) {
const block = useWorkflowStore.getState().blocks[id]
if (block) {
previousStates[id] = block.horizontalHandles ?? false
validIds.push(id)
}
@@ -1065,66 +1014,14 @@ export function useCollaborativeWorkflow() {
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeBatchToggleLocked = useCallback(
(ids: string[]) => {
if (isBaselineDiffView) {
return
}
if (ids.length === 0) return
const currentBlocks = useWorkflowStore.getState().blocks
const previousStates: Record<string, boolean> = {}
const validIds: string[] = []
// For each ID, collect blocks and their children for undo/redo
for (const id of ids) {
const block = currentBlocks[id]
if (!block) continue
validIds.push(id)
previousStates[id] = block.locked ?? false
// If it's a loop or parallel, also capture children's previous states for undo/redo
if (block.type === 'loop' || block.type === 'parallel') {
Object.entries(currentBlocks).forEach(([blockId, b]) => {
if (b.data?.parentId === id) {
previousStates[blockId] = b.locked ?? false
}
})
}
}
if (validIds.length === 0) return
const operationId = crypto.randomUUID()
addToQueue({
id: operationId,
operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED,
target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds: validIds, previousStates },
},
workflowId: activeWorkflowId || '',
userId: session?.user?.id || 'unknown',
})
useWorkflowStore.getState().batchToggleLocked(validIds)
undoRedo.recordBatchToggleLocked(validIds, previousStates)
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
)
const collaborativeBatchAddEdges = useCallback(
(edges: Edge[], options?: { skipUndoRedo?: boolean }) => {
if (isBaselineDiffView) {
return false
}
if (!activeWorkflowId) {
logger.debug('Skipping batch add edges - no active workflow')
if (!isInActiveRoom()) {
logger.debug('Skipping batch add edges - not in active workflow')
return false
}
@@ -1158,7 +1055,7 @@ export function useCollaborativeWorkflow() {
return true
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, undoRedo]
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id, isInActiveRoom, undoRedo]
)
const collaborativeBatchRemoveEdges = useCallback(
@@ -1167,8 +1064,8 @@ export function useCollaborativeWorkflow() {
return false
}
if (!activeWorkflowId) {
logger.debug('Skipping batch remove edges - no active workflow')
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove edges - not in active workflow')
return false
}
@@ -1216,7 +1113,7 @@ export function useCollaborativeWorkflow() {
logger.info('Batch removed edges', { count: validEdgeIds.length })
return true
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session, undoRedo]
[isBaselineDiffView, isInActiveRoom, addToQueue, activeWorkflowId, session, undoRedo]
)
const collaborativeSetSubblockValue = useCallback(
@@ -1251,9 +1148,11 @@ export function useCollaborativeWorkflow() {
// Best-effort; do not block on clearing
}
// Queue socket operation if we have an active workflow
if (!activeWorkflowId) {
logger.debug('Local update applied, skipping socket queue - no active workflow', {
// Only emit to socket if in active room
if (!isInActiveRoom()) {
logger.debug('Local update applied, skipping socket emit - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
blockId,
subblockId,
})
@@ -1275,7 +1174,14 @@ export function useCollaborativeWorkflow() {
userId: session?.user?.id || 'unknown',
})
},
[activeWorkflowId, addToQueue, session?.user?.id, isBaselineDiffView]
[
currentWorkflowId,
activeWorkflowId,
addToQueue,
session?.user?.id,
isBaselineDiffView,
isInActiveRoom,
]
)
// Immediate tag selection (uses queue but processes immediately, no debouncing)
@@ -1287,8 +1193,13 @@ export function useCollaborativeWorkflow() {
return
}
if (!activeWorkflowId) {
logger.debug('Skipping tag selection - no active workflow', { blockId, subblockId })
if (!isInActiveRoom()) {
logger.debug('Skipping tag selection - not in active workflow', {
currentWorkflowId,
activeWorkflowId,
blockId,
subblockId,
})
return
}
@@ -1309,7 +1220,14 @@ export function useCollaborativeWorkflow() {
userId: session?.user?.id || 'unknown',
})
},
[isBaselineDiffView, addToQueue, activeWorkflowId, session?.user?.id]
[
isBaselineDiffView,
addToQueue,
currentWorkflowId,
activeWorkflowId,
session?.user?.id,
isInActiveRoom,
]
)
const collaborativeUpdateLoopType = useCallback(
@@ -1596,8 +1514,8 @@ export function useCollaborativeWorkflow() {
subBlockValues: Record<string, Record<string, unknown>> = {},
options?: { skipUndoRedo?: boolean }
) => {
if (!activeWorkflowId) {
logger.debug('Skipping batch add blocks - no active workflow')
if (!isInActiveRoom()) {
logger.debug('Skipping batch add blocks - not in active workflow')
return false
}
@@ -1650,7 +1568,7 @@ export function useCollaborativeWorkflow() {
return true
},
[addToQueue, activeWorkflowId, session?.user?.id, isBaselineDiffView, undoRedo]
[addToQueue, activeWorkflowId, session?.user?.id, isBaselineDiffView, isInActiveRoom, undoRedo]
)
const collaborativeBatchRemoveBlocks = useCallback(
@@ -1659,8 +1577,8 @@ export function useCollaborativeWorkflow() {
return false
}
if (!activeWorkflowId) {
logger.debug('Skipping batch remove blocks - no active workflow')
if (!isInActiveRoom()) {
logger.debug('Skipping batch remove blocks - not in active workflow')
return false
}
@@ -1744,6 +1662,7 @@ export function useCollaborativeWorkflow() {
addToQueue,
activeWorkflowId,
session?.user?.id,
isInActiveRoom,
cancelOperationsForBlock,
undoRedo,
]
@@ -1761,7 +1680,6 @@ export function useCollaborativeWorkflow() {
collaborativeToggleBlockAdvancedMode,
collaborativeSetBlockCanonicalMode,
collaborativeBatchToggleBlockHandles,
collaborativeBatchToggleLocked,
collaborativeBatchAddBlocks,
collaborativeBatchRemoveBlocks,
collaborativeBatchAddEdges,

View File

@@ -20,7 +20,6 @@ import {
type BatchRemoveEdgesOperation,
type BatchToggleEnabledOperation,
type BatchToggleHandlesOperation,
type BatchToggleLockedOperation,
type BatchUpdateParentOperation,
captureLatestEdges,
captureLatestSubBlockValues,
@@ -30,6 +29,7 @@ import {
useUndoRedoStore,
} from '@/stores/undo-redo'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type { BlockState } from '@/stores/workflows/workflow/types'
@@ -416,36 +416,6 @@ export function useUndoRedo() {
[activeWorkflowId, userId]
)
const recordBatchToggleLocked = useCallback(
(blockIds: string[], previousStates: Record<string, boolean>) => {
if (!activeWorkflowId || blockIds.length === 0) return
const operation: BatchToggleLockedOperation = {
id: crypto.randomUUID(),
type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: { blockIds, previousStates },
}
const inverse: BatchToggleLockedOperation = {
id: crypto.randomUUID(),
type: UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED,
timestamp: Date.now(),
workflowId: activeWorkflowId,
userId,
data: { blockIds, previousStates },
}
const entry = createOperationEntry(operation, inverse)
useUndoRedoStore.getState().push(activeWorkflowId, userId, entry)
logger.debug('Recorded batch toggle locked', { blockIds, previousStates })
},
[activeWorkflowId, userId]
)
const undo = useCallback(async () => {
if (!activeWorkflowId) return
@@ -534,9 +504,47 @@ export function useUndoRedo() {
userId,
})
useWorkflowStore
.getState()
.batchAddBlocks(blocksToAdd, edgeSnapshots || [], subBlockValues || {})
blocksToAdd.forEach((block) => {
useWorkflowStore
.getState()
.addBlock(
block.id,
block.type,
block.name,
block.position,
block.data,
block.data?.parentId,
block.data?.extent,
{
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode,
height: block.height,
}
)
})
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
...subBlockValues,
},
},
}))
}
if (edgeSnapshots && edgeSnapshots.length > 0) {
const edgesToAdd = edgeSnapshots.filter(
(edge) => !useWorkflowStore.getState().edges.find((e) => e.id === edge.id)
)
if (edgesToAdd.length > 0) {
useWorkflowStore.getState().batchAddEdges(edgesToAdd)
}
}
break
}
case UNDO_REDO_OPERATIONS.BATCH_REMOVE_EDGES: {
@@ -808,9 +816,7 @@ export function useUndoRedo() {
const toggleOp = entry.inverse as BatchToggleEnabledOperation
const { blockIds, previousStates } = toggleOp.data
// Restore all blocks in previousStates (includes children of containers)
const allBlockIds = Object.keys(previousStates)
const validBlockIds = allBlockIds.filter((id) => useWorkflowStore.getState().blocks[id])
const validBlockIds = blockIds.filter((id) => useWorkflowStore.getState().blocks[id])
if (validBlockIds.length === 0) {
logger.debug('Undo batch-toggle-enabled skipped; no blocks exist')
break
@@ -821,14 +827,14 @@ export function useUndoRedo() {
operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED,
target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds, previousStates },
payload: { blockIds: validBlockIds, previousStates },
},
workflowId: activeWorkflowId,
userId,
})
// Use setBlockEnabled to directly restore to previous state
// This restores all affected blocks including children of containers
// This is more robust than conditional toggle in collaborative scenarios
validBlockIds.forEach((blockId) => {
useWorkflowStore.getState().setBlockEnabled(blockId, previousStates[blockId])
})
@@ -862,36 +868,6 @@ export function useUndoRedo() {
})
break
}
case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED: {
const toggleOp = entry.inverse as BatchToggleLockedOperation
const { blockIds, previousStates } = toggleOp.data
// Restore all blocks in previousStates (includes children of containers)
const allBlockIds = Object.keys(previousStates)
const validBlockIds = allBlockIds.filter((id) => useWorkflowStore.getState().blocks[id])
if (validBlockIds.length === 0) {
logger.debug('Undo batch-toggle-locked skipped; no blocks exist')
break
}
addToQueue({
id: opId,
operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED,
target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds, previousStates },
},
workflowId: activeWorkflowId,
userId,
})
// Use setBlockLocked to directly restore to previous state
// This restores all affected blocks including children of containers
validBlockIds.forEach((blockId) => {
useWorkflowStore.getState().setBlockLocked(blockId, previousStates[blockId])
})
break
}
case UNDO_REDO_OPERATIONS.APPLY_DIFF: {
const applyDiffInverse = entry.inverse as any
const { baselineSnapshot } = applyDiffInverse.data
@@ -1109,9 +1085,47 @@ export function useUndoRedo() {
userId,
})
useWorkflowStore
.getState()
.batchAddBlocks(blocksToAdd, edgeSnapshots || [], subBlockValues || {})
blocksToAdd.forEach((block) => {
useWorkflowStore
.getState()
.addBlock(
block.id,
block.type,
block.name,
block.position,
block.data,
block.data?.parentId,
block.data?.extent,
{
enabled: block.enabled,
horizontalHandles: block.horizontalHandles,
advancedMode: block.advancedMode,
triggerMode: block.triggerMode,
height: block.height,
}
)
})
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
useSubBlockStore.setState((state) => ({
workflowValues: {
...state.workflowValues,
[activeWorkflowId]: {
...state.workflowValues[activeWorkflowId],
...subBlockValues,
},
},
}))
}
if (edgeSnapshots && edgeSnapshots.length > 0) {
const edgesToAdd = edgeSnapshots.filter(
(edge) => !useWorkflowStore.getState().edges.find((e) => e.id === edge.id)
)
if (edgesToAdd.length > 0) {
useWorkflowStore.getState().batchAddEdges(edgesToAdd)
}
}
break
}
case UNDO_REDO_OPERATIONS.BATCH_REMOVE_BLOCKS: {
@@ -1428,9 +1442,7 @@ export function useUndoRedo() {
const toggleOp = entry.operation as BatchToggleEnabledOperation
const { blockIds, previousStates } = toggleOp.data
// Process all blocks in previousStates (includes children of containers)
const allBlockIds = Object.keys(previousStates)
const validBlockIds = allBlockIds.filter((id) => useWorkflowStore.getState().blocks[id])
const validBlockIds = blockIds.filter((id) => useWorkflowStore.getState().blocks[id])
if (validBlockIds.length === 0) {
logger.debug('Redo batch-toggle-enabled skipped; no blocks exist')
break
@@ -1441,18 +1453,16 @@ export function useUndoRedo() {
operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_ENABLED,
target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds, previousStates },
payload: { blockIds: validBlockIds, previousStates },
},
workflowId: activeWorkflowId,
userId,
})
// Compute target state the same way batchToggleEnabled does:
// use !firstBlock.enabled, where firstBlock is blockIds[0]
const firstBlockId = blockIds[0]
const targetEnabled = !previousStates[firstBlockId]
// Use setBlockEnabled to directly set to toggled state
// Redo sets to !previousStates (the state after the original toggle)
validBlockIds.forEach((blockId) => {
useWorkflowStore.getState().setBlockEnabled(blockId, targetEnabled)
useWorkflowStore.getState().setBlockEnabled(blockId, !previousStates[blockId])
})
break
}
@@ -1484,38 +1494,6 @@ export function useUndoRedo() {
})
break
}
case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED: {
const toggleOp = entry.operation as BatchToggleLockedOperation
const { blockIds, previousStates } = toggleOp.data
// Process all blocks in previousStates (includes children of containers)
const allBlockIds = Object.keys(previousStates)
const validBlockIds = allBlockIds.filter((id) => useWorkflowStore.getState().blocks[id])
if (validBlockIds.length === 0) {
logger.debug('Redo batch-toggle-locked skipped; no blocks exist')
break
}
addToQueue({
id: opId,
operation: {
operation: BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED,
target: OPERATION_TARGETS.BLOCKS,
payload: { blockIds, previousStates },
},
workflowId: activeWorkflowId,
userId,
})
// Compute target state the same way batchToggleLocked does:
// use !firstBlock.locked, where firstBlock is blockIds[0]
const firstBlockId = blockIds[0]
const targetLocked = !previousStates[firstBlockId]
validBlockIds.forEach((blockId) => {
useWorkflowStore.getState().setBlockLocked(blockId, targetLocked)
})
break
}
case UNDO_REDO_OPERATIONS.APPLY_DIFF: {
// Redo apply-diff means re-applying the proposed state with diff markers
const applyDiffOp = entry.operation as any
@@ -1837,7 +1815,6 @@ export function useUndoRedo() {
recordBatchUpdateParent,
recordBatchToggleEnabled,
recordBatchToggleHandles,
recordBatchToggleLocked,
recordApplyDiff,
recordAcceptDiff,
recordRejectDiff,

View File

@@ -54,7 +54,6 @@ type SkippedItemType =
| 'block_not_found'
| 'invalid_block_type'
| 'block_not_allowed'
| 'block_locked'
| 'tool_not_allowed'
| 'invalid_edge_target'
| 'invalid_edge_source'
@@ -619,7 +618,6 @@ function createBlockFromParams(
subBlocks: {},
outputs: outputs,
data: parentId ? { parentId, extent: 'parent' as const } : {},
locked: false,
}
// Add validated inputs as subBlocks
@@ -1522,24 +1520,6 @@ function applyOperationsToWorkflowState(
break
}
// Check if block is locked or inside a locked container
const deleteBlock = modifiedState.blocks[block_id]
const deleteParentId = deleteBlock.data?.parentId as string | undefined
const deleteParentLocked = deleteParentId
? modifiedState.blocks[deleteParentId]?.locked
: false
if (deleteBlock.locked || deleteParentLocked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'delete',
blockId: block_id,
reason: deleteParentLocked
? `Block "${block_id}" is inside locked container "${deleteParentId}" and cannot be deleted`
: `Block "${block_id}" is locked and cannot be deleted`,
})
break
}
// Find all child blocks to remove
const blocksToRemove = new Set<string>([block_id])
const findChildren = (parentId: string) => {
@@ -1575,21 +1555,6 @@ function applyOperationsToWorkflowState(
const block = modifiedState.blocks[block_id]
// Check if block is locked or inside a locked container
const editParentId = block.data?.parentId as string | undefined
const editParentLocked = editParentId ? modifiedState.blocks[editParentId]?.locked : false
if (block.locked || editParentLocked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'edit',
blockId: block_id,
reason: editParentLocked
? `Block "${block_id}" is inside locked container "${editParentId}" and cannot be edited`
: `Block "${block_id}" is locked and cannot be edited`,
})
break
}
// Ensure block has essential properties
if (!block.type) {
logger.warn(`Block ${block_id} missing type property, skipping edit`, {
@@ -2157,19 +2122,6 @@ function applyOperationsToWorkflowState(
// Handle nested nodes (for loops/parallels created from scratch)
if (params.nestedNodes) {
// Defensive check: verify parent is not locked before adding children
// (Parent was just created with locked: false, but check for consistency)
const parentBlock = modifiedState.blocks[block_id]
if (parentBlock?.locked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'add_nested_nodes',
blockId: block_id,
reason: `Container "${block_id}" is locked - cannot add nested nodes`,
})
break
}
Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => {
// Validate childId is a valid string
if (!isValidKey(childId)) {
@@ -2257,18 +2209,6 @@ function applyOperationsToWorkflowState(
break
}
// Check if subflow is locked
if (subflowBlock.locked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'insert_into_subflow',
blockId: block_id,
reason: `Subflow "${subflowId}" is locked - cannot insert block "${block_id}"`,
details: { subflowId },
})
break
}
if (subflowBlock.type !== 'loop' && subflowBlock.type !== 'parallel') {
logger.error('Subflow block has invalid type', {
subflowId,
@@ -2307,17 +2247,6 @@ function applyOperationsToWorkflowState(
break
}
// Check if existing block is locked
if (existingBlock.locked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'insert_into_subflow',
blockId: block_id,
reason: `Block "${block_id}" is locked and cannot be moved into a subflow`,
})
break
}
// Moving existing block into subflow - just update parent
existingBlock.data = {
...existingBlock.data,
@@ -2463,30 +2392,6 @@ function applyOperationsToWorkflowState(
break
}
// Check if block is locked
if (block.locked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'extract_from_subflow',
blockId: block_id,
reason: `Block "${block_id}" is locked and cannot be extracted from subflow`,
})
break
}
// Check if parent subflow is locked
const parentSubflow = modifiedState.blocks[subflowId]
if (parentSubflow?.locked) {
logSkippedItem(skippedItems, {
type: 'block_locked',
operationType: 'extract_from_subflow',
blockId: block_id,
reason: `Subflow "${subflowId}" is locked - cannot extract block "${block_id}"`,
details: { subflowId },
})
break
}
// Verify it's actually a child of this subflow
if (block.data?.parentId !== subflowId) {
logger.warn('Block is not a child of specified subflow', {

View File

@@ -161,14 +161,14 @@ export const env = createEnv({
// Rate Limiting Configuration
RATE_LIMIT_WINDOW_MS: z.string().optional().default('60000'), // Rate limit window duration in milliseconds (default: 1 minute)
MANUAL_EXECUTION_LIMIT: z.string().optional().default('999999'),// Manual execution bypass value (effectively unlimited)
RATE_LIMIT_FREE_SYNC: z.string().optional().default('50'), // Free tier sync API executions per minute
RATE_LIMIT_FREE_ASYNC: z.string().optional().default('200'), // Free tier async API executions per minute
RATE_LIMIT_PRO_SYNC: z.string().optional().default('150'), // Pro tier sync API executions per minute
RATE_LIMIT_PRO_ASYNC: z.string().optional().default('1000'), // Pro tier async API executions per minute
RATE_LIMIT_TEAM_SYNC: z.string().optional().default('300'), // Team tier sync API executions per minute
RATE_LIMIT_TEAM_ASYNC: z.string().optional().default('2500'), // Team tier async API executions per minute
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('600'), // Enterprise tier sync API executions per minute
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('5000'), // Enterprise tier async API executions per minute
RATE_LIMIT_FREE_SYNC: z.string().optional().default('10'), // Free tier sync API executions per minute
RATE_LIMIT_FREE_ASYNC: z.string().optional().default('50'), // Free tier async API executions per minute
RATE_LIMIT_PRO_SYNC: z.string().optional().default('25'), // Pro tier sync API executions per minute
RATE_LIMIT_PRO_ASYNC: z.string().optional().default('200'), // Pro tier async API executions per minute
RATE_LIMIT_TEAM_SYNC: z.string().optional().default('75'), // Team tier sync API executions per minute
RATE_LIMIT_TEAM_ASYNC: z.string().optional().default('500'), // Team tier async API executions per minute
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('150'), // Enterprise tier sync API executions per minute
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('1000'), // Enterprise tier async API executions per minute
// Knowledge Base Processing Configuration - Shared across all processing methods
KB_CONFIG_MAX_DURATION: z.number().optional().default(600), // Max processing duration in seconds (10 minutes)

View File

@@ -28,24 +28,24 @@ function createBucketConfig(ratePerMinute: number, burstMultiplier = 2): TokenBu
export const RATE_LIMITS: Record<SubscriptionPlan, RateLimitConfig> = {
free: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_SYNC) || 50),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_ASYNC) || 200),
apiEndpoint: createBucketConfig(30),
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_SYNC) || 10),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_FREE_ASYNC) || 50),
apiEndpoint: createBucketConfig(10),
},
pro: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_SYNC) || 150),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_ASYNC) || 1000),
apiEndpoint: createBucketConfig(100),
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_SYNC) || 25),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_PRO_ASYNC) || 200),
apiEndpoint: createBucketConfig(30),
},
team: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_SYNC) || 300),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_ASYNC) || 2500),
apiEndpoint: createBucketConfig(200),
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_SYNC) || 75),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_TEAM_ASYNC) || 500),
apiEndpoint: createBucketConfig(60),
},
enterprise: {
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_SYNC) || 600),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_ASYNC) || 5000),
apiEndpoint: createBucketConfig(500),
sync: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_SYNC) || 150),
async: createBucketConfig(Number.parseInt(env.RATE_LIMIT_ENTERPRISE_ASYNC) || 1000),
apiEndpoint: createBucketConfig(120),
},
}

View File

@@ -199,11 +199,10 @@ export class McpClient {
protocolVersion: this.getNegotiatedVersion(),
})
const sdkResult = await this.client.callTool(
{ name: toolCall.name, arguments: toolCall.arguments },
undefined,
{ timeout: 600000 } // 10 minutes - override SDK's 60s default
)
const sdkResult = await this.client.callTool({
name: toolCall.name,
arguments: toolCall.arguments,
})
return sdkResult as McpToolResult
} catch (error) {

View File

@@ -296,26 +296,6 @@ describe('hasWorkflowChanged', () => {
})
expect(hasWorkflowChanged(state1, state2)).toBe(true)
})
it.concurrent('should detect locked/unlocked changes', () => {
const state1 = createWorkflowState({
blocks: { block1: createBlock('block1', { locked: false }) },
})
const state2 = createWorkflowState({
blocks: { block1: createBlock('block1', { locked: true }) },
})
expect(hasWorkflowChanged(state1, state2)).toBe(true)
})
it.concurrent('should not detect changes when locked state is the same', () => {
const state1 = createWorkflowState({
blocks: { block1: createBlock('block1', { locked: true }) },
})
const state2 = createWorkflowState({
blocks: { block1: createBlock('block1', { locked: true }) },
})
expect(hasWorkflowChanged(state1, state2)).toBe(false)
})
})
describe('SubBlock Changes', () => {

View File

@@ -157,7 +157,7 @@ export function generateWorkflowDiffSummary(
}
// Check other block properties (boolean fields)
// Use !! to normalize: null/undefined/false are all equivalent (falsy)
const blockFields = ['horizontalHandles', 'advancedMode', 'triggerMode', 'locked'] as const
const blockFields = ['horizontalHandles', 'advancedMode', 'triggerMode'] as const
for (const field of blockFields) {
if (!!currentBlock[field] !== !!previousBlock[field]) {
changes.push({

View File

@@ -100,7 +100,6 @@ function buildStartBlockState(
triggerMode: false,
height: 0,
data: {},
locked: false,
}
return { blockState, subBlockValues }

View File

@@ -1,173 +0,0 @@
/**
* @vitest-environment node
*/
import { beforeEach, describe, expect, it, vi } from 'vitest'
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
// Mock all external dependencies before imports
vi.mock('@sim/logger', () => ({
createLogger: () => ({
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
}),
}))
vi.mock('@/stores/workflows/workflow/store', () => ({
useWorkflowStore: {
getState: () => ({
getWorkflowState: () => ({ blocks: {}, edges: [], loops: {}, parallels: {} }),
}),
},
}))
vi.mock('@/stores/workflows/utils', () => ({
mergeSubblockState: (blocks: Record<string, BlockState>) => blocks,
}))
vi.mock('@/lib/workflows/sanitization/key-validation', () => ({
isValidKey: (key: string) => key !== 'undefined' && key !== 'null' && key !== '',
}))
vi.mock('@/lib/workflows/autolayout', () => ({
transferBlockHeights: vi.fn(),
applyTargetedLayout: (blocks: Record<string, BlockState>) => blocks,
applyAutoLayout: () => ({ success: true, blocks: {} }),
}))
vi.mock('@/lib/workflows/autolayout/constants', () => ({
DEFAULT_HORIZONTAL_SPACING: 500,
DEFAULT_VERTICAL_SPACING: 400,
DEFAULT_LAYOUT_OPTIONS: {},
}))
vi.mock('@/stores/workflows/workflow/utils', () => ({
generateLoopBlocks: () => ({}),
generateParallelBlocks: () => ({}),
}))
import { WorkflowDiffEngine } from './diff-engine'
function createMockBlock(overrides: Partial<BlockState> = {}): BlockState {
return {
id: 'block-1',
type: 'agent',
name: 'Test Block',
enabled: true,
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
...overrides,
} as BlockState
}
function createMockWorkflowState(blocks: Record<string, BlockState>): WorkflowState {
return {
blocks,
edges: [],
loops: {},
parallels: {},
}
}
describe('WorkflowDiffEngine', () => {
let engine: WorkflowDiffEngine
beforeEach(() => {
engine = new WorkflowDiffEngine()
vi.clearAllMocks()
})
describe('hasBlockChanged detection', () => {
describe('locked state changes', () => {
it.concurrent(
'should detect when block locked state changes from false to true',
async () => {
const freshEngine = new WorkflowDiffEngine()
const baseline = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: false }),
})
const proposed = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: true }),
})
const result = await freshEngine.createDiffFromWorkflowState(
proposed,
undefined,
baseline
)
expect(result.success).toBe(true)
expect(result.diff?.diffAnalysis?.edited_blocks).toContain('block-1')
}
)
it.concurrent('should not detect change when locked state is the same', async () => {
const freshEngine = new WorkflowDiffEngine()
const baseline = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: true }),
})
const proposed = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: true }),
})
const result = await freshEngine.createDiffFromWorkflowState(proposed, undefined, baseline)
expect(result.success).toBe(true)
expect(result.diff?.diffAnalysis?.edited_blocks).not.toContain('block-1')
})
it.concurrent('should detect change when locked goes from undefined to true', async () => {
const freshEngine = new WorkflowDiffEngine()
const baseline = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1' }), // locked undefined
})
const proposed = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: true }),
})
const result = await freshEngine.createDiffFromWorkflowState(proposed, undefined, baseline)
expect(result.success).toBe(true)
// The hasBlockChanged function uses !!locked for comparison
// so undefined -> true should be detected as a change
expect(result.diff?.diffAnalysis?.edited_blocks).toContain('block-1')
})
it.concurrent('should not detect change when both locked states are falsy', async () => {
const freshEngine = new WorkflowDiffEngine()
const baseline = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1' }), // locked undefined
})
const proposed = createMockWorkflowState({
'block-1': createMockBlock({ id: 'block-1', locked: false }), // locked false
})
const result = await freshEngine.createDiffFromWorkflowState(proposed, undefined, baseline)
expect(result.success).toBe(true)
// undefined and false should both be falsy, so !! comparison makes them equal
expect(result.diff?.diffAnalysis?.edited_blocks).not.toContain('block-1')
})
})
})
describe('diff lifecycle', () => {
it.concurrent('should start with no diff', () => {
const freshEngine = new WorkflowDiffEngine()
expect(freshEngine.hasDiff()).toBe(false)
expect(freshEngine.getCurrentDiff()).toBeUndefined()
})
it.concurrent('should clear diff', () => {
const freshEngine = new WorkflowDiffEngine()
freshEngine.clearDiff()
expect(freshEngine.hasDiff()).toBe(false)
})
})
})

View File

@@ -215,7 +215,6 @@ function hasBlockChanged(currentBlock: BlockState, proposedBlock: BlockState): b
if (currentBlock.name !== proposedBlock.name) return true
if (currentBlock.enabled !== proposedBlock.enabled) return true
if (currentBlock.triggerMode !== proposedBlock.triggerMode) return true
if (!!currentBlock.locked !== !!proposedBlock.locked) return true
// Compare subBlocks
const currentSubKeys = Object.keys(currentBlock.subBlocks || {})

View File

@@ -189,7 +189,6 @@ export async function duplicateWorkflow(
parentId: newParentId,
extent: newExtent,
data: updatedData,
locked: false, // Duplicated blocks should always be unlocked
createdAt: now,
updatedAt: now,
}

View File

@@ -226,7 +226,6 @@ export async function loadWorkflowFromNormalizedTables(
subBlocks: (block.subBlocks as BlockState['subBlocks']) || {},
outputs: (block.outputs as BlockState['outputs']) || {},
data: blockData,
locked: block.locked,
}
blocksMap[block.id] = assembled
@@ -364,7 +363,6 @@ export async function saveWorkflowToNormalizedTables(
data: block.data || {},
parentId: block.data?.parentId || null,
extent: block.data?.extent || null,
locked: block.locked ?? false,
}))
await tx.insert(workflowBlocks).values(blockInserts)
@@ -629,8 +627,7 @@ export function regenerateWorkflowStateIds(state: RegenerateStateInput): Regener
// Regenerate blocks with updated references
Object.entries(state.blocks || {}).forEach(([oldId, block]) => {
const newId = blockIdMapping.get(oldId)!
// Duplicated blocks are always unlocked so users can edit them
const newBlock: BlockState = { ...block, id: newId, locked: false }
const newBlock: BlockState = { ...block, id: newId }
// Update parentId reference if it exists
if (newBlock.data?.parentId) {

View File

@@ -17,7 +17,6 @@ export const BLOCKS_OPERATIONS = {
BATCH_TOGGLE_ENABLED: 'batch-toggle-enabled',
BATCH_TOGGLE_HANDLES: 'batch-toggle-handles',
BATCH_UPDATE_PARENT: 'batch-update-parent',
BATCH_TOGGLE_LOCKED: 'batch-toggle-locked',
} as const
export type BlocksOperation = (typeof BLOCKS_OPERATIONS)[keyof typeof BLOCKS_OPERATIONS]
@@ -86,7 +85,6 @@ export const UNDO_REDO_OPERATIONS = {
BATCH_UPDATE_PARENT: 'batch-update-parent',
BATCH_TOGGLE_ENABLED: 'batch-toggle-enabled',
BATCH_TOGGLE_HANDLES: 'batch-toggle-handles',
BATCH_TOGGLE_LOCKED: 'batch-toggle-locked',
APPLY_DIFF: 'apply-diff',
ACCEPT_DIFF: 'accept-diff',
REJECT_DIFF: 'reject-diff',

View File

@@ -507,37 +507,7 @@ async function handleBlocksOperationTx(
})
if (blocks && blocks.length > 0) {
// Fetch existing blocks to check for locked parents
const existingBlocks = await tx
.select({ id: workflowBlocks.id, locked: workflowBlocks.locked })
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
type ExistingBlockRecord = (typeof existingBlocks)[number]
const lockedParentIds = new Set(
existingBlocks
.filter((b: ExistingBlockRecord) => b.locked)
.map((b: ExistingBlockRecord) => b.id)
)
// Filter out blocks being added to locked parents
const allowedBlocks = (blocks as Array<Record<string, unknown>>).filter((block) => {
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
| string
| undefined
if (parentId && lockedParentIds.has(parentId)) {
logger.info(`Skipping block ${block.id} - parent ${parentId} is locked`)
return false
}
return true
})
if (allowedBlocks.length === 0) {
logger.info('All blocks filtered out due to locked parents, skipping add')
break
}
const blockValues = allowedBlocks.map((block: Record<string, unknown>) => {
const blockValues = blocks.map((block: Record<string, unknown>) => {
const blockId = block.id as string
const mergedSubBlocks = mergeSubBlockValues(
block.subBlocks as Record<string, unknown>,
@@ -559,7 +529,6 @@ async function handleBlocksOperationTx(
advancedMode: (block.advancedMode as boolean) ?? false,
triggerMode: (block.triggerMode as boolean) ?? false,
height: (block.height as number) || 0,
locked: (block.locked as boolean) ?? false,
}
})
@@ -568,7 +537,7 @@ async function handleBlocksOperationTx(
// Create subflow entries for loop/parallel blocks (skip if already in payload)
const loopIds = new Set(loops ? Object.keys(loops) : [])
const parallelIds = new Set(parallels ? Object.keys(parallels) : [])
for (const block of allowedBlocks) {
for (const block of blocks) {
const blockId = block.id as string
if (block.type === 'loop' && !loopIds.has(blockId)) {
await tx.insert(workflowSubflows).values({
@@ -597,7 +566,7 @@ async function handleBlocksOperationTx(
// Update parent subflow node lists
const parentIds = new Set<string>()
for (const block of allowedBlocks) {
for (const block of blocks) {
const parentId = (block.data as Record<string, unknown>)?.parentId as string | undefined
if (parentId) {
parentIds.add(parentId)
@@ -655,74 +624,44 @@ async function handleBlocksOperationTx(
logger.info(`Batch removing ${ids.length} blocks from workflow ${workflowId}`)
// Fetch all blocks to check lock status and filter out protected blocks
const allBlocks = await tx
.select({
id: workflowBlocks.id,
type: workflowBlocks.type,
locked: workflowBlocks.locked,
data: workflowBlocks.data,
})
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
type BlockRecord = (typeof allBlocks)[number]
const blocksById: Record<string, BlockRecord> = Object.fromEntries(
allBlocks.map((b: BlockRecord) => [b.id, b])
)
// Helper to check if a block is protected (locked or inside locked parent)
const isProtected = (blockId: string): boolean => {
const block = blocksById[blockId]
if (!block) return false
if (block.locked) return true
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
| string
| undefined
if (parentId && blocksById[parentId]?.locked) return true
return false
}
// Filter out protected blocks from deletion request
const deletableIds = ids.filter((id) => !isProtected(id))
if (deletableIds.length === 0) {
logger.info('All requested blocks are protected, skipping deletion')
return
}
if (deletableIds.length < ids.length) {
logger.info(
`Filtered out ${ids.length - deletableIds.length} protected blocks from deletion`
)
}
// Collect all block IDs including children of subflows
const allBlocksToDelete = new Set<string>(deletableIds)
const allBlocksToDelete = new Set<string>(ids)
for (const id of deletableIds) {
const block = blocksById[id]
if (block && isSubflowBlockType(block.type)) {
// Include all children of the subflow (they should be deleted with parent)
for (const b of allBlocks) {
const parentId = (b.data as Record<string, unknown> | null)?.parentId
if (parentId === id) {
allBlocksToDelete.add(b.id)
}
}
for (const id of ids) {
const blockToRemove = await tx
.select({ type: workflowBlocks.type })
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
if (blockToRemove.length > 0 && isSubflowBlockType(blockToRemove[0].type)) {
const childBlocks = await tx
.select({ id: workflowBlocks.id })
.from(workflowBlocks)
.where(
and(
eq(workflowBlocks.workflowId, workflowId),
sql`${workflowBlocks.data}->>'parentId' = ${id}`
)
)
childBlocks.forEach((child: { id: string }) => allBlocksToDelete.add(child.id))
}
}
const blockIdsArray = Array.from(allBlocksToDelete)
// Collect parent IDs BEFORE deleting blocks (use blocksById, already fetched)
// Collect parent IDs BEFORE deleting blocks
const parentIds = new Set<string>()
for (const id of deletableIds) {
const block = blocksById[id]
const parentId = (block?.data as Record<string, unknown> | null)?.parentId as
| string
| undefined
if (parentId) {
parentIds.add(parentId)
for (const id of ids) {
const parentInfo = await tx
.select({ parentId: sql<string | null>`${workflowBlocks.data}->>'parentId'` })
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
if (parentInfo.length > 0 && parentInfo[0].parentId) {
parentIds.add(parentInfo[0].parentId)
}
}
@@ -802,61 +741,22 @@ async function handleBlocksOperationTx(
`Batch toggling enabled state for ${blockIds.length} blocks in workflow ${workflowId}`
)
// Get all blocks in workflow to find children and check locked state
const allBlocks = await tx
.select({
id: workflowBlocks.id,
enabled: workflowBlocks.enabled,
locked: workflowBlocks.locked,
type: workflowBlocks.type,
data: workflowBlocks.data,
})
const blocks = await tx
.select({ id: workflowBlocks.id, enabled: workflowBlocks.enabled })
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
.where(and(eq(workflowBlocks.workflowId, workflowId), inArray(workflowBlocks.id, blockIds)))
type BlockRecord = (typeof allBlocks)[number]
const blocksById: Record<string, BlockRecord> = Object.fromEntries(
allBlocks.map((b: BlockRecord) => [b.id, b])
)
const blocksToToggle = new Set<string>()
// Collect all blocks to toggle including children of containers
for (const id of blockIds) {
const block = blocksById[id]
if (!block || block.locked) continue
blocksToToggle.add(id)
// If it's a loop or parallel, also include all children
if (block.type === 'loop' || block.type === 'parallel') {
for (const b of allBlocks) {
const parentId = (b.data as Record<string, unknown> | null)?.parentId
if (parentId === id && !b.locked) {
blocksToToggle.add(b.id)
}
}
}
}
// Determine target enabled state based on first toggleable block
if (blocksToToggle.size === 0) break
const firstToggleableId = Array.from(blocksToToggle)[0]
const firstBlock = blocksById[firstToggleableId]
if (!firstBlock) break
const targetEnabled = !firstBlock.enabled
// Update all affected blocks
for (const blockId of blocksToToggle) {
for (const block of blocks) {
await tx
.update(workflowBlocks)
.set({
enabled: targetEnabled,
enabled: !block.enabled,
updatedAt: new Date(),
})
.where(and(eq(workflowBlocks.id, blockId), eq(workflowBlocks.workflowId, workflowId)))
.where(and(eq(workflowBlocks.id, block.id), eq(workflowBlocks.workflowId, workflowId)))
}
logger.debug(`Batch toggled enabled state for ${blocksToToggle.size} blocks`)
logger.debug(`Batch toggled enabled state for ${blocks.length} blocks`)
break
}
@@ -868,118 +768,22 @@ async function handleBlocksOperationTx(
logger.info(`Batch toggling handles for ${blockIds.length} blocks in workflow ${workflowId}`)
// Fetch all blocks to check lock status and filter out protected blocks
const allBlocks = await tx
.select({
id: workflowBlocks.id,
horizontalHandles: workflowBlocks.horizontalHandles,
locked: workflowBlocks.locked,
data: workflowBlocks.data,
})
const blocks = await tx
.select({ id: workflowBlocks.id, horizontalHandles: workflowBlocks.horizontalHandles })
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
.where(and(eq(workflowBlocks.workflowId, workflowId), inArray(workflowBlocks.id, blockIds)))
type HandleBlockRecord = (typeof allBlocks)[number]
const blocksById: Record<string, HandleBlockRecord> = Object.fromEntries(
allBlocks.map((b: HandleBlockRecord) => [b.id, b])
)
// Helper to check if a block is protected (locked or inside locked parent)
const isProtected = (blockId: string): boolean => {
const block = blocksById[blockId]
if (!block) return false
if (block.locked) return true
const parentId = (block.data as Record<string, unknown> | null)?.parentId as
| string
| undefined
if (parentId && blocksById[parentId]?.locked) return true
return false
}
// Filter to only toggle handles on unprotected blocks
const blocksToToggle = blockIds.filter((id) => blocksById[id] && !isProtected(id))
if (blocksToToggle.length === 0) {
logger.info('All requested blocks are protected, skipping handles toggle')
break
}
for (const blockId of blocksToToggle) {
const block = blocksById[blockId]
for (const block of blocks) {
await tx
.update(workflowBlocks)
.set({
horizontalHandles: !block.horizontalHandles,
updatedAt: new Date(),
})
.where(and(eq(workflowBlocks.id, blockId), eq(workflowBlocks.workflowId, workflowId)))
.where(and(eq(workflowBlocks.id, block.id), eq(workflowBlocks.workflowId, workflowId)))
}
logger.debug(`Batch toggled handles for ${blocksToToggle.length} blocks`)
break
}
case BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED: {
const { blockIds } = payload
if (!Array.isArray(blockIds) || blockIds.length === 0) {
return
}
logger.info(`Batch toggling locked for ${blockIds.length} blocks in workflow ${workflowId}`)
// Get all blocks in workflow to find children
const allBlocks = await tx
.select({
id: workflowBlocks.id,
locked: workflowBlocks.locked,
type: workflowBlocks.type,
data: workflowBlocks.data,
})
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
type LockedBlockRecord = (typeof allBlocks)[number]
const blocksById: Record<string, LockedBlockRecord> = Object.fromEntries(
allBlocks.map((b: LockedBlockRecord) => [b.id, b])
)
const blocksToToggle = new Set<string>()
// Collect all blocks to toggle including children of containers
for (const id of blockIds) {
const block = blocksById[id]
if (!block) continue
blocksToToggle.add(id)
// If it's a loop or parallel, also include all children
if (block.type === 'loop' || block.type === 'parallel') {
for (const b of allBlocks) {
const parentId = (b.data as Record<string, unknown> | null)?.parentId
if (parentId === id) {
blocksToToggle.add(b.id)
}
}
}
}
// Determine target locked state based on first toggleable block
if (blocksToToggle.size === 0) break
const firstToggleableId = Array.from(blocksToToggle)[0]
const firstBlock = blocksById[firstToggleableId]
if (!firstBlock) break
const targetLocked = !firstBlock.locked
// Update all affected blocks
for (const blockId of blocksToToggle) {
await tx
.update(workflowBlocks)
.set({
locked: targetLocked,
updatedAt: new Date(),
})
.where(and(eq(workflowBlocks.id, blockId), eq(workflowBlocks.workflowId, workflowId)))
}
logger.debug(`Batch toggled locked for ${blocksToToggle.size} blocks`)
logger.debug(`Batch toggled handles for ${blocks.length} blocks`)
break
}
@@ -991,54 +795,19 @@ async function handleBlocksOperationTx(
logger.info(`Batch updating parent for ${updates.length} blocks in workflow ${workflowId}`)
// Fetch all blocks to check lock status
const allBlocks = await tx
.select({
id: workflowBlocks.id,
locked: workflowBlocks.locked,
data: workflowBlocks.data,
})
.from(workflowBlocks)
.where(eq(workflowBlocks.workflowId, workflowId))
type ParentBlockRecord = (typeof allBlocks)[number]
const blocksById: Record<string, ParentBlockRecord> = Object.fromEntries(
allBlocks.map((b: ParentBlockRecord) => [b.id, b])
)
// Helper to check if a block is protected (locked or inside locked parent)
const isProtected = (blockId: string): boolean => {
const block = blocksById[blockId]
if (!block) return false
if (block.locked) return true
const currentParentId = (block.data as Record<string, unknown> | null)?.parentId as
| string
| undefined
if (currentParentId && blocksById[currentParentId]?.locked) return true
return false
}
for (const update of updates) {
const { id, parentId, position } = update
if (!id) continue
// Skip protected blocks (locked or inside locked container)
if (isProtected(id)) {
logger.info(`Skipping block ${id} parent update - block is protected`)
continue
}
// Skip if trying to move into a locked container
if (parentId && blocksById[parentId]?.locked) {
logger.info(`Skipping block ${id} parent update - target parent ${parentId} is locked`)
continue
}
// Fetch current parent to update subflow node lists
const existing = blocksById[id]
const existingParentId = (existing?.data as Record<string, unknown> | null)?.parentId as
| string
| undefined
const [existing] = await tx
.select({
id: workflowBlocks.id,
parentId: sql<string | null>`${workflowBlocks.data}->>'parentId'`,
})
.from(workflowBlocks)
.where(and(eq(workflowBlocks.id, id), eq(workflowBlocks.workflowId, workflowId)))
.limit(1)
if (!existing) {
logger.warn(`Block ${id} not found for batch-update-parent`)
@@ -1083,8 +852,8 @@ async function handleBlocksOperationTx(
await updateSubflowNodeList(tx, workflowId, parentId)
}
// If the block had a previous parent, update that parent's node list as well
if (existingParentId && existingParentId !== parentId) {
await updateSubflowNodeList(tx, workflowId, existingParentId)
if (existing?.parentId && existing.parentId !== parentId) {
await updateSubflowNodeList(tx, workflowId, existing.parentId)
}
}
@@ -1429,7 +1198,6 @@ async function handleWorkflowOperationTx(
advancedMode: block.advancedMode ?? false,
triggerMode: block.triggerMode ?? false,
height: block.height || 0,
locked: block.locked ?? false,
}))
await tx.insert(workflowBlocks).values(blockValues)

View File

@@ -39,23 +39,16 @@ export function cleanupPendingSubblocksForSocket(socketId: string): void {
export function setupSubblocksHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket.on('subblock-update', async (data) => {
const {
workflowId: payloadWorkflowId,
blockId,
subblockId,
value,
timestamp,
operationId,
} = data
const { blockId, subblockId, value, timestamp, operationId } = data
try {
const sessionWorkflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const session = await roomManager.getUserSession(socket.id)
if (!sessionWorkflowId || !session) {
if (!workflowId || !session) {
logger.debug(`Ignoring subblock update: socket not connected to any workflow room`, {
socketId: socket.id,
hasWorkflowId: !!sessionWorkflowId,
hasWorkflowId: !!workflowId,
hasSession: !!session,
})
socket.emit('operation-forbidden', {
@@ -68,24 +61,6 @@ export function setupSubblocksHandlers(socket: AuthenticatedSocket, roomManager:
return
}
const workflowId = payloadWorkflowId || sessionWorkflowId
if (payloadWorkflowId && payloadWorkflowId !== sessionWorkflowId) {
logger.warn('Workflow ID mismatch in subblock update', {
payloadWorkflowId,
sessionWorkflowId,
socketId: socket.id,
})
if (operationId) {
socket.emit('operation-failed', {
operationId,
error: 'Workflow ID mismatch',
retryable: true,
})
}
return
}
const hasRoom = await roomManager.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`Ignoring subblock update: workflow room not found`, {
@@ -207,17 +182,20 @@ async function flushSubblockUpdate(
if (updateSuccessful) {
// Broadcast to room excluding all senders (works cross-pod via Redis adapter)
const senderSocketIds = [...pending.opToSocket.values()]
const broadcastPayload = {
workflowId,
blockId,
subblockId,
value,
timestamp,
}
if (senderSocketIds.length > 0) {
io.to(workflowId).except(senderSocketIds).emit('subblock-update', broadcastPayload)
io.to(workflowId).except(senderSocketIds).emit('subblock-update', {
blockId,
subblockId,
value,
timestamp,
})
} else {
io.to(workflowId).emit('subblock-update', broadcastPayload)
io.to(workflowId).emit('subblock-update', {
blockId,
subblockId,
value,
timestamp,
})
}
// Confirm all coalesced operationIds (io.to(socketId) works cross-pod)

View File

@@ -35,16 +35,16 @@ export function cleanupPendingVariablesForSocket(socketId: string): void {
export function setupVariablesHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket.on('variable-update', async (data) => {
const { workflowId: payloadWorkflowId, variableId, field, value, timestamp, operationId } = data
const { variableId, field, value, timestamp, operationId } = data
try {
const sessionWorkflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const session = await roomManager.getUserSession(socket.id)
if (!sessionWorkflowId || !session) {
if (!workflowId || !session) {
logger.debug(`Ignoring variable update: socket not connected to any workflow room`, {
socketId: socket.id,
hasWorkflowId: !!sessionWorkflowId,
hasWorkflowId: !!workflowId,
hasSession: !!session,
})
socket.emit('operation-forbidden', {
@@ -57,24 +57,6 @@ export function setupVariablesHandlers(socket: AuthenticatedSocket, roomManager:
return
}
const workflowId = payloadWorkflowId || sessionWorkflowId
if (payloadWorkflowId && payloadWorkflowId !== sessionWorkflowId) {
logger.warn('Workflow ID mismatch in variable update', {
payloadWorkflowId,
sessionWorkflowId,
socketId: socket.id,
})
if (operationId) {
socket.emit('operation-failed', {
operationId,
error: 'Workflow ID mismatch',
retryable: true,
})
}
return
}
const hasRoom = await roomManager.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`Ignoring variable update: workflow room not found`, {
@@ -197,17 +179,20 @@ async function flushVariableUpdate(
if (updateSuccessful) {
// Broadcast to room excluding all senders (works cross-pod via Redis adapter)
const senderSocketIds = [...pending.opToSocket.values()]
const broadcastPayload = {
workflowId,
variableId,
field,
value,
timestamp,
}
if (senderSocketIds.length > 0) {
io.to(workflowId).except(senderSocketIds).emit('variable-update', broadcastPayload)
io.to(workflowId).except(senderSocketIds).emit('variable-update', {
variableId,
field,
value,
timestamp,
})
} else {
io.to(workflowId).emit('variable-update', broadcastPayload)
io.to(workflowId).emit('variable-update', {
variableId,
field,
value,
timestamp,
})
}
// Confirm all coalesced operationIds (io.to(socketId) works cross-pod)

View File

@@ -214,12 +214,6 @@ describe('checkRolePermission', () => {
readAllowed: false,
},
{ operation: 'toggle-handles', adminAllowed: true, writeAllowed: true, readAllowed: false },
{
operation: 'batch-toggle-locked',
adminAllowed: true,
writeAllowed: false, // Admin-only operation
readAllowed: false,
},
{
operation: 'batch-update-positions',
adminAllowed: true,

View File

@@ -14,10 +14,7 @@ import {
const logger = createLogger('SocketPermissions')
// Admin-only operations (require admin role)
const ADMIN_ONLY_OPERATIONS: string[] = [BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED]
// Write operations (admin and write roles both have these permissions)
// All write operations (admin and write roles have same permissions)
const WRITE_OPERATIONS: string[] = [
// Block operations
BLOCK_OPERATIONS.UPDATE_POSITION,
@@ -54,7 +51,7 @@ const READ_OPERATIONS: string[] = [
// Define operation permissions based on role
const ROLE_PERMISSIONS: Record<string, string[]> = {
admin: [...ADMIN_ONLY_OPERATIONS, ...WRITE_OPERATIONS],
admin: WRITE_OPERATIONS,
write: WRITE_OPERATIONS,
read: READ_OPERATIONS,
}

View File

@@ -208,17 +208,6 @@ export const BatchToggleHandlesSchema = z.object({
operationId: z.string().optional(),
})
export const BatchToggleLockedSchema = z.object({
operation: z.literal(BLOCKS_OPERATIONS.BATCH_TOGGLE_LOCKED),
target: z.literal(OPERATION_TARGETS.BLOCKS),
payload: z.object({
blockIds: z.array(z.string()),
previousStates: z.record(z.boolean()),
}),
timestamp: z.number(),
operationId: z.string().optional(),
})
export const BatchUpdateParentSchema = z.object({
operation: z.literal(BLOCKS_OPERATIONS.BATCH_UPDATE_PARENT),
target: z.literal(OPERATION_TARGETS.BLOCKS),
@@ -242,7 +231,6 @@ export const WorkflowOperationSchema = z.union([
BatchRemoveBlocksSchema,
BatchToggleEnabledSchema,
BatchToggleHandlesSchema,
BatchToggleLockedSchema,
BatchUpdateParentSchema,
EdgeOperationSchema,
BatchAddEdgesSchema,

View File

@@ -24,40 +24,16 @@ let emitWorkflowOperation:
| ((operation: string, target: string, payload: any, operationId?: string) => void)
| null = null
let emitSubblockUpdate:
| ((
blockId: string,
subblockId: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void)
| ((blockId: string, subblockId: string, value: any, operationId?: string) => void)
| null = null
let emitVariableUpdate:
| ((
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void)
| ((variableId: string, field: string, value: any, operationId?: string) => void)
| null = null
export function registerEmitFunctions(
workflowEmit: (operation: string, target: string, payload: any, operationId?: string) => void,
subblockEmit: (
blockId: string,
subblockId: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void,
variableEmit: (
variableId: string,
field: string,
value: any,
operationId: string | undefined,
workflowId: string
) => void,
subblockEmit: (blockId: string, subblockId: string, value: any, operationId?: string) => void,
variableEmit: (variableId: string, field: string, value: any, operationId?: string) => void,
workflowId: string | null
) {
emitWorkflowOperation = workflowEmit
@@ -220,16 +196,14 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
}
if (!retryable) {
logger.error(
'Operation failed with non-retryable error - state out of sync, triggering offline mode',
{
operationId,
operation: operation.operation.operation,
target: operation.operation.target,
}
)
logger.debug('Operation marked as non-retryable, removing from queue', { operationId })
get().triggerOfflineMode()
set((state) => ({
operations: state.operations.filter((op) => op.id !== operationId),
isProcessing: false,
}))
get().processNextOperation()
return
}
@@ -331,23 +305,11 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
const { operation: op, target, payload } = nextOperation.operation
if (op === 'subblock-update' && target === 'subblock') {
if (emitSubblockUpdate) {
emitSubblockUpdate(
payload.blockId,
payload.subblockId,
payload.value,
nextOperation.id,
nextOperation.workflowId
)
emitSubblockUpdate(payload.blockId, payload.subblockId, payload.value, nextOperation.id)
}
} else if (op === 'variable-update' && target === 'variable') {
if (emitVariableUpdate) {
emitVariableUpdate(
payload.variableId,
payload.field,
payload.value,
nextOperation.id,
nextOperation.workflowId
)
emitVariableUpdate(payload.variableId, payload.field, payload.value, nextOperation.id)
}
} else {
if (emitWorkflowOperation) {

View File

@@ -97,14 +97,6 @@ export interface BatchToggleHandlesOperation extends BaseOperation {
}
}
export interface BatchToggleLockedOperation extends BaseOperation {
type: typeof UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED
data: {
blockIds: string[]
previousStates: Record<string, boolean>
}
}
export interface ApplyDiffOperation extends BaseOperation {
type: typeof UNDO_REDO_OPERATIONS.APPLY_DIFF
data: {
@@ -144,7 +136,6 @@ export type Operation =
| BatchUpdateParentOperation
| BatchToggleEnabledOperation
| BatchToggleHandlesOperation
| BatchToggleLockedOperation
| ApplyDiffOperation
| AcceptDiffOperation
| RejectDiffOperation

View File

@@ -167,15 +167,6 @@ export function createInverseOperation(operation: Operation): Operation {
},
}
case UNDO_REDO_OPERATIONS.BATCH_TOGGLE_LOCKED:
return {
...operation,
data: {
blockIds: operation.data.blockIds,
previousStates: operation.data.previousStates,
},
}
default: {
const exhaustiveCheck: never = operation
throw new Error(`Unhandled operation type: ${(exhaustiveCheck as Operation).type}`)

View File

@@ -432,104 +432,4 @@ describe('regenerateBlockIds', () => {
expect(duplicatedBlock.position).toEqual({ x: 280, y: 70 })
expect(duplicatedBlock.data?.parentId).toBe(loopId)
})
it('should unlock pasted block when source is locked', () => {
const blockId = 'block-1'
const blocksToCopy = {
[blockId]: createAgentBlock({
id: blockId,
name: 'Locked Agent',
position: { x: 100, y: 50 },
locked: true,
}),
}
const result = regenerateBlockIds(
blocksToCopy,
[],
{},
{},
{},
positionOffset,
{},
getUniqueBlockName
)
const newBlocks = Object.values(result.blocks)
expect(newBlocks).toHaveLength(1)
// Pasted blocks are always unlocked so users can edit them
const pastedBlock = newBlocks[0]
expect(pastedBlock.locked).toBe(false)
})
it('should keep pasted block unlocked when source is unlocked', () => {
const blockId = 'block-1'
const blocksToCopy = {
[blockId]: createAgentBlock({
id: blockId,
name: 'Unlocked Agent',
position: { x: 100, y: 50 },
locked: false,
}),
}
const result = regenerateBlockIds(
blocksToCopy,
[],
{},
{},
{},
positionOffset,
{},
getUniqueBlockName
)
const newBlocks = Object.values(result.blocks)
expect(newBlocks).toHaveLength(1)
const pastedBlock = newBlocks[0]
expect(pastedBlock.locked).toBe(false)
})
it('should unlock all pasted blocks regardless of source locked state', () => {
const lockedId = 'locked-1'
const unlockedId = 'unlocked-1'
const blocksToCopy = {
[lockedId]: createAgentBlock({
id: lockedId,
name: 'Originally Locked Agent',
position: { x: 100, y: 50 },
locked: true,
}),
[unlockedId]: createFunctionBlock({
id: unlockedId,
name: 'Originally Unlocked Function',
position: { x: 200, y: 50 },
locked: false,
}),
}
const result = regenerateBlockIds(
blocksToCopy,
[],
{},
{},
{},
positionOffset,
{},
getUniqueBlockName
)
const newBlocks = Object.values(result.blocks)
expect(newBlocks).toHaveLength(2)
// All pasted blocks should be unlocked so users can edit them
for (const block of newBlocks) {
expect(block.locked).toBe(false)
}
})
})

View File

@@ -203,7 +203,6 @@ export function prepareBlockState(options: PrepareBlockStateOptions): BlockState
advancedMode: false,
triggerMode,
height: 0,
locked: false,
}
}
@@ -482,8 +481,6 @@ export function regenerateBlockIds(
position: newPosition,
// Temporarily keep data as-is, we'll fix parentId in second pass
data: block.data ? { ...block.data } : block.data,
// Duplicated blocks are always unlocked so users can edit them
locked: false,
}
newBlocks[newId] = newBlock
@@ -511,15 +508,15 @@ export function regenerateBlockIds(
parentId: newParentId,
extent: 'parent',
}
} else if (existingBlockNames[oldParentId] && !existingBlockNames[oldParentId].locked) {
// Parent exists in existing workflow and is not locked - keep original parentId
} else if (existingBlockNames[oldParentId]) {
// Parent exists in existing workflow - keep original parentId (block stays in same subflow)
block.data = {
...block.data,
parentId: oldParentId,
extent: 'parent',
}
} else {
// Parent doesn't exist anywhere OR parent is locked - clear the relationship
// Parent doesn't exist anywhere - clear the relationship
block.data = { ...block.data, parentId: undefined, extent: undefined }
}
}

View File

@@ -26,49 +26,6 @@ import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
/**
* Helper function to add a single block using batchAddBlocks.
* Provides a simpler interface for tests.
*/
function addBlock(
id: string,
type: string,
name: string,
position: { x: number; y: number },
data?: Record<string, unknown>,
parentId?: string,
extent?: 'parent',
blockProperties?: {
enabled?: boolean
horizontalHandles?: boolean
advancedMode?: boolean
triggerMode?: boolean
height?: number
}
) {
const blockData = {
...data,
...(parentId && { parentId, extent: extent || 'parent' }),
}
useWorkflowStore.getState().batchAddBlocks([
{
id,
type,
name,
position,
subBlocks: {},
outputs: {},
enabled: blockProperties?.enabled ?? true,
horizontalHandles: blockProperties?.horizontalHandles ?? true,
advancedMode: blockProperties?.advancedMode ?? false,
triggerMode: blockProperties?.triggerMode ?? false,
height: blockProperties?.height ?? 0,
data: blockData,
},
])
}
describe('workflow store', () => {
beforeEach(() => {
const localStorageMock = createMockStorage()
@@ -82,8 +39,10 @@ describe('workflow store', () => {
})
})
describe('batchAddBlocks (via addBlock helper)', () => {
describe('addBlock', () => {
it('should add a block with correct default properties', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('agent-1', 'agent', 'My Agent', { x: 100, y: 200 })
const { blocks } = useWorkflowStore.getState()
@@ -94,6 +53,8 @@ describe('workflow store', () => {
})
it('should add a block with parent relationship for containers', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('loop-1', 'loop', 'My Loop', { x: 0, y: 0 }, { loopType: 'for', count: 3 })
addBlock(
'child-1',
@@ -112,6 +73,8 @@ describe('workflow store', () => {
})
it('should add multiple blocks correctly', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'agent', 'Agent', { x: 200, y: 0 })
addBlock('block-3', 'function', 'Function', { x: 400, y: 0 })
@@ -124,6 +87,8 @@ describe('workflow store', () => {
})
it('should create a block with default properties when no blockProperties provided', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('agent1', 'agent', 'Test Agent', { x: 100, y: 200 })
const state = useWorkflowStore.getState()
@@ -140,6 +105,8 @@ describe('workflow store', () => {
})
it('should create a block with custom blockProperties for regular blocks', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'agent1',
'agent',
@@ -167,6 +134,8 @@ describe('workflow store', () => {
})
it('should create a loop block with custom blockProperties', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'loop1',
'loop',
@@ -194,6 +163,8 @@ describe('workflow store', () => {
})
it('should create a parallel block with custom blockProperties', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'parallel1',
'parallel',
@@ -221,6 +192,8 @@ describe('workflow store', () => {
})
it('should handle partial blockProperties (only some properties provided)', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock(
'agent1',
'agent',
@@ -243,6 +216,8 @@ describe('workflow store', () => {
})
it('should handle blockProperties with parent relationships', () => {
const { addBlock } = useWorkflowStore.getState()
addBlock('loop1', 'loop', 'Parent Loop', { x: 0, y: 0 })
addBlock(
@@ -274,7 +249,7 @@ describe('workflow store', () => {
describe('batchRemoveBlocks', () => {
it('should remove a block', () => {
const { batchRemoveBlocks } = useWorkflowStore.getState()
const { addBlock, batchRemoveBlocks } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
batchRemoveBlocks(['block-1'])
@@ -284,7 +259,7 @@ describe('workflow store', () => {
})
it('should remove connected edges when block is removed', () => {
const { batchAddEdges, batchRemoveBlocks } = useWorkflowStore.getState()
const { addBlock, batchAddEdges, batchRemoveBlocks } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'Middle', { x: 200, y: 0 })
@@ -311,7 +286,7 @@ describe('workflow store', () => {
describe('batchAddEdges', () => {
it('should add an edge between two blocks', () => {
const { batchAddEdges } = useWorkflowStore.getState()
const { addBlock, batchAddEdges } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -323,7 +298,7 @@ describe('workflow store', () => {
})
it('should not add duplicate connections', () => {
const { batchAddEdges } = useWorkflowStore.getState()
const { addBlock, batchAddEdges } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -338,7 +313,7 @@ describe('workflow store', () => {
describe('batchRemoveEdges', () => {
it('should remove an edge by id', () => {
const { batchAddEdges, batchRemoveEdges } = useWorkflowStore.getState()
const { addBlock, batchAddEdges, batchRemoveEdges } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -360,7 +335,7 @@ describe('workflow store', () => {
describe('clear', () => {
it('should clear all blocks and edges', () => {
const { batchAddEdges, clear } = useWorkflowStore.getState()
const { addBlock, batchAddEdges, clear } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -376,7 +351,7 @@ describe('workflow store', () => {
describe('batchToggleEnabled', () => {
it('should toggle block enabled state', () => {
const { batchToggleEnabled } = useWorkflowStore.getState()
const { addBlock, batchToggleEnabled } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
@@ -392,7 +367,7 @@ describe('workflow store', () => {
describe('duplicateBlock', () => {
it('should duplicate a block', () => {
const { duplicateBlock } = useWorkflowStore.getState()
const { addBlock, duplicateBlock } = useWorkflowStore.getState()
addBlock('original', 'agent', 'Original Agent', { x: 0, y: 0 })
@@ -416,7 +391,7 @@ describe('workflow store', () => {
describe('batchUpdatePositions', () => {
it('should update block position', () => {
const { batchUpdatePositions } = useWorkflowStore.getState()
const { addBlock, batchUpdatePositions } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
@@ -429,7 +404,7 @@ describe('workflow store', () => {
describe('loop management', () => {
it('should regenerate loops when updateLoopCount is called', () => {
const { updateLoopCount } = useWorkflowStore.getState()
const { addBlock, updateLoopCount } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -453,7 +428,7 @@ describe('workflow store', () => {
})
it('should regenerate loops when updateLoopType is called', () => {
const { updateLoopType } = useWorkflowStore.getState()
const { addBlock, updateLoopType } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -478,7 +453,7 @@ describe('workflow store', () => {
})
it('should regenerate loops when updateLoopCollection is called', () => {
const { updateLoopCollection } = useWorkflowStore.getState()
const { addBlock, updateLoopCollection } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -501,7 +476,7 @@ describe('workflow store', () => {
})
it('should clamp loop count between 1 and 1000', () => {
const { updateLoopCount } = useWorkflowStore.getState()
const { addBlock, updateLoopCount } = useWorkflowStore.getState()
addBlock(
'loop1',
@@ -527,7 +502,7 @@ describe('workflow store', () => {
describe('parallel management', () => {
it('should regenerate parallels when updateParallelCount is called', () => {
const { updateParallelCount } = useWorkflowStore.getState()
const { addBlock, updateParallelCount } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -550,7 +525,7 @@ describe('workflow store', () => {
})
it('should regenerate parallels when updateParallelCollection is called', () => {
const { updateParallelCollection } = useWorkflowStore.getState()
const { addBlock, updateParallelCollection } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -577,7 +552,7 @@ describe('workflow store', () => {
})
it('should clamp parallel count between 1 and 20', () => {
const { updateParallelCount } = useWorkflowStore.getState()
const { addBlock, updateParallelCount } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -600,7 +575,7 @@ describe('workflow store', () => {
})
it('should regenerate parallels when updateParallelType is called', () => {
const { updateParallelType } = useWorkflowStore.getState()
const { addBlock, updateParallelType } = useWorkflowStore.getState()
addBlock(
'parallel1',
@@ -626,7 +601,7 @@ describe('workflow store', () => {
describe('mode switching', () => {
it('should toggle advanced mode on a block', () => {
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
addBlock('agent1', 'agent', 'Test Agent', { x: 0, y: 0 })
@@ -643,7 +618,7 @@ describe('workflow store', () => {
})
it('should preserve systemPrompt and userPrompt when switching modes', () => {
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { setState: setSubBlockState } = useSubBlockStore
useWorkflowRegistry.setState({ activeWorkflowId: 'test-workflow' })
addBlock('agent1', 'agent', 'Test Agent', { x: 0, y: 0 })
@@ -676,7 +651,7 @@ describe('workflow store', () => {
})
it('should preserve memories when switching from advanced to basic mode', () => {
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { setState: setSubBlockState } = useSubBlockStore
useWorkflowRegistry.setState({ activeWorkflowId: 'test-workflow' })
@@ -716,7 +691,7 @@ describe('workflow store', () => {
})
it('should handle mode switching when no subblock values exist', () => {
const { toggleBlockAdvancedMode } = useWorkflowStore.getState()
const { addBlock, toggleBlockAdvancedMode } = useWorkflowStore.getState()
useWorkflowRegistry.setState({ activeWorkflowId: 'test-workflow' })
@@ -778,7 +753,7 @@ describe('workflow store', () => {
describe('replaceWorkflowState', () => {
it('should replace entire workflow state', () => {
const { replaceWorkflowState } = useWorkflowStore.getState()
const { addBlock, replaceWorkflowState } = useWorkflowStore.getState()
addBlock('old-1', 'function', 'Old', { x: 0, y: 0 })
@@ -794,7 +769,7 @@ describe('workflow store', () => {
describe('getWorkflowState', () => {
it('should return current workflow state', () => {
const { getWorkflowState } = useWorkflowStore.getState()
const { addBlock, getWorkflowState } = useWorkflowStore.getState()
addBlock('block-1', 'starter', 'Start', { x: 0, y: 0 })
addBlock('block-2', 'function', 'End', { x: 200, y: 0 })
@@ -807,560 +782,6 @@ describe('workflow store', () => {
})
})
describe('loop/parallel regeneration optimization', () => {
it('should NOT regenerate loops when adding a regular block without parentId', () => {
// Add a loop first
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
const stateAfterLoop = useWorkflowStore.getState()
const loopsAfterLoop = stateAfterLoop.loops
// Add a regular block (no parentId)
addBlock('agent-1', 'agent', 'Agent 1', { x: 200, y: 0 })
const stateAfterAgent = useWorkflowStore.getState()
// Loops should be unchanged (same content)
expect(Object.keys(stateAfterAgent.loops)).toEqual(Object.keys(loopsAfterLoop))
expect(stateAfterAgent.loops['loop-1'].nodes).toEqual(loopsAfterLoop['loop-1'].nodes)
})
it('should regenerate loops when adding a child to a loop', () => {
// Add a loop
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
const stateAfterLoop = useWorkflowStore.getState()
expect(stateAfterLoop.loops['loop-1'].nodes).toEqual([])
// Add a child block to the loop
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
const stateAfterChild = useWorkflowStore.getState()
// Loop should now include the child
expect(stateAfterChild.loops['loop-1'].nodes).toContain('child-1')
})
it('should NOT regenerate parallels when adding a child to a loop', () => {
// Add both a loop and a parallel
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
addBlock('parallel-1', 'parallel', 'Parallel 1', { x: 300, y: 0 }, { count: 3 })
const stateAfterContainers = useWorkflowStore.getState()
const parallelsAfterContainers = stateAfterContainers.parallels
// Add a child to the loop (not the parallel)
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
const stateAfterChild = useWorkflowStore.getState()
// Parallels should be unchanged
expect(stateAfterChild.parallels['parallel-1'].nodes).toEqual(
parallelsAfterContainers['parallel-1'].nodes
)
})
it('should regenerate parallels when adding a child to a parallel', () => {
// Add a parallel
addBlock('parallel-1', 'parallel', 'Parallel 1', { x: 0, y: 0 }, { count: 3 })
const stateAfterParallel = useWorkflowStore.getState()
expect(stateAfterParallel.parallels['parallel-1'].nodes).toEqual([])
// Add a child block to the parallel
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'parallel-1' },
'parallel-1',
'parent'
)
const stateAfterChild = useWorkflowStore.getState()
// Parallel should now include the child
expect(stateAfterChild.parallels['parallel-1'].nodes).toContain('child-1')
})
it('should handle adding blocks in any order and produce correct final state', () => {
// Add child BEFORE the loop (simulating undo-redo edge case)
// Note: The child's parentId points to a loop that doesn't exist yet
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
// At this point, the child exists but loop doesn't
const stateAfterChild = useWorkflowStore.getState()
expect(stateAfterChild.blocks['child-1']).toBeDefined()
expect(stateAfterChild.loops['loop-1']).toBeUndefined()
// Now add the loop
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
// Final state should be correct - loop should include the child
const finalState = useWorkflowStore.getState()
expect(finalState.loops['loop-1']).toBeDefined()
expect(finalState.loops['loop-1'].nodes).toContain('child-1')
})
})
describe('batchAddBlocks optimization', () => {
it('should NOT regenerate loops/parallels when adding regular blocks', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
// Set up initial state with a loop
useWorkflowStore.setState({
blocks: {
'loop-1': {
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
horizontalHandles: true,
advancedMode: false,
triggerMode: false,
height: 0,
data: { loopType: 'for', count: 5 },
},
},
edges: [],
loops: {
'loop-1': {
id: 'loop-1',
nodes: [],
iterations: 5,
loopType: 'for',
enabled: true,
},
},
parallels: {},
})
const stateBefore = useWorkflowStore.getState()
// Add regular blocks (no parentId, not loop/parallel type)
batchAddBlocks([
{
id: 'agent-1',
type: 'agent',
name: 'Agent 1',
position: { x: 200, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
},
{
id: 'function-1',
type: 'function',
name: 'Function 1',
position: { x: 400, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
},
])
const stateAfter = useWorkflowStore.getState()
// Loops should be unchanged
expect(stateAfter.loops['loop-1'].nodes).toEqual(stateBefore.loops['loop-1'].nodes)
})
it('should regenerate loops when batch adding a loop block', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
batchAddBlocks([
{
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
data: { loopType: 'for', count: 5 },
},
])
const state = useWorkflowStore.getState()
expect(state.loops['loop-1']).toBeDefined()
expect(state.loops['loop-1'].iterations).toBe(5)
})
it('should regenerate loops when batch adding a child of a loop', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
// First add a loop
batchAddBlocks([
{
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
data: { loopType: 'for', count: 5 },
},
])
// Then add a child
batchAddBlocks([
{
id: 'child-1',
type: 'function',
name: 'Child 1',
position: { x: 50, y: 50 },
subBlocks: {},
outputs: {},
enabled: true,
data: { parentId: 'loop-1' },
},
])
const state = useWorkflowStore.getState()
expect(state.loops['loop-1'].nodes).toContain('child-1')
})
it('should correctly handle batch adding loop and its children together', () => {
const { batchAddBlocks } = useWorkflowStore.getState()
// Add loop and child in same batch
batchAddBlocks([
{
id: 'loop-1',
type: 'loop',
name: 'Loop 1',
position: { x: 0, y: 0 },
subBlocks: {},
outputs: {},
enabled: true,
data: { loopType: 'for', count: 5 },
},
{
id: 'child-1',
type: 'function',
name: 'Child 1',
position: { x: 50, y: 50 },
subBlocks: {},
outputs: {},
enabled: true,
data: { parentId: 'loop-1' },
},
])
const state = useWorkflowStore.getState()
expect(state.loops['loop-1']).toBeDefined()
expect(state.loops['loop-1'].nodes).toContain('child-1')
})
})
describe('edge operations should not affect loops/parallels', () => {
it('should preserve loops when adding edges', () => {
const { batchAddEdges } = useWorkflowStore.getState()
// Create a loop with a child
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
addBlock('external-1', 'function', 'External', { x: 300, y: 0 })
const stateBeforeEdge = useWorkflowStore.getState()
const loopsBeforeEdge = stateBeforeEdge.loops
// Add an edge (should not affect loops)
batchAddEdges([{ id: 'e1', source: 'loop-1', target: 'external-1' }])
const stateAfterEdge = useWorkflowStore.getState()
// Loops should be unchanged
expect(stateAfterEdge.loops['loop-1'].nodes).toEqual(loopsBeforeEdge['loop-1'].nodes)
expect(stateAfterEdge.loops['loop-1'].iterations).toEqual(
loopsBeforeEdge['loop-1'].iterations
)
})
it('should preserve loops when removing edges', () => {
const { batchAddEdges, batchRemoveEdges } = useWorkflowStore.getState()
// Create a loop with a child and an edge
addBlock('loop-1', 'loop', 'Loop 1', { x: 0, y: 0 }, { loopType: 'for', count: 5 })
addBlock(
'child-1',
'function',
'Child 1',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
addBlock('external-1', 'function', 'External', { x: 300, y: 0 })
batchAddEdges([{ id: 'e1', source: 'loop-1', target: 'external-1' }])
const stateBeforeRemove = useWorkflowStore.getState()
const loopsBeforeRemove = stateBeforeRemove.loops
// Remove the edge
batchRemoveEdges(['e1'])
const stateAfterRemove = useWorkflowStore.getState()
// Loops should be unchanged
expect(stateAfterRemove.loops['loop-1'].nodes).toEqual(loopsBeforeRemove['loop-1'].nodes)
})
})
describe('batchToggleLocked', () => {
it('should toggle block locked state', () => {
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
// Initial state is undefined (falsy)
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBeFalsy()
batchToggleLocked(['block-1'])
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(true)
batchToggleLocked(['block-1'])
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(false)
})
it('should cascade lock to children when locking a loop', () => {
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('loop-1', 'loop', 'My Loop', { x: 0, y: 0 }, { loopType: 'for', count: 3 })
addBlock(
'child-1',
'function',
'Child',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
batchToggleLocked(['loop-1'])
const { blocks } = useWorkflowStore.getState()
expect(blocks['loop-1'].locked).toBe(true)
expect(blocks['child-1'].locked).toBe(true)
})
it('should cascade unlock to children when unlocking a parallel', () => {
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('parallel-1', 'parallel', 'My Parallel', { x: 0, y: 0 }, { count: 3 })
addBlock(
'child-1',
'function',
'Child',
{ x: 50, y: 50 },
{ parentId: 'parallel-1' },
'parallel-1',
'parent'
)
// Lock first
batchToggleLocked(['parallel-1'])
expect(useWorkflowStore.getState().blocks['child-1'].locked).toBe(true)
// Unlock
batchToggleLocked(['parallel-1'])
const { blocks } = useWorkflowStore.getState()
expect(blocks['parallel-1'].locked).toBe(false)
expect(blocks['child-1'].locked).toBe(false)
})
it('should toggle multiple blocks at once', () => {
const { batchToggleLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test 1', { x: 0, y: 0 })
addBlock('block-2', 'function', 'Test 2', { x: 100, y: 0 })
batchToggleLocked(['block-1', 'block-2'])
const { blocks } = useWorkflowStore.getState()
expect(blocks['block-1'].locked).toBe(true)
expect(blocks['block-2'].locked).toBe(true)
})
})
describe('setBlockLocked', () => {
it('should set block locked state', () => {
const { setBlockLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
setBlockLocked('block-1', true)
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(true)
setBlockLocked('block-1', false)
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(false)
})
it('should not update if locked state is already the target value', () => {
const { setBlockLocked } = useWorkflowStore.getState()
addBlock('block-1', 'function', 'Test', { x: 0, y: 0 })
// First set to true
setBlockLocked('block-1', true)
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(true)
// Setting to true again should still be true
setBlockLocked('block-1', true)
expect(useWorkflowStore.getState().blocks['block-1'].locked).toBe(true)
})
})
describe('duplicateBlock with locked', () => {
it('should unlock duplicate when duplicating a locked block', () => {
const { setBlockLocked, duplicateBlock } = useWorkflowStore.getState()
addBlock('original', 'agent', 'Original Agent', { x: 0, y: 0 })
setBlockLocked('original', true)
expect(useWorkflowStore.getState().blocks.original.locked).toBe(true)
duplicateBlock('original')
const { blocks } = useWorkflowStore.getState()
const blockIds = Object.keys(blocks)
expect(blockIds.length).toBe(2)
const duplicatedId = blockIds.find((id) => id !== 'original')
expect(duplicatedId).toBeDefined()
if (duplicatedId) {
// Original should still be locked
expect(blocks.original.locked).toBe(true)
// Duplicate should be unlocked so users can edit it
expect(blocks[duplicatedId].locked).toBe(false)
}
})
it('should create unlocked duplicate when duplicating an unlocked block', () => {
const { duplicateBlock } = useWorkflowStore.getState()
addBlock('original', 'agent', 'Original Agent', { x: 0, y: 0 })
duplicateBlock('original')
const { blocks } = useWorkflowStore.getState()
const blockIds = Object.keys(blocks)
const duplicatedId = blockIds.find((id) => id !== 'original')
if (duplicatedId) {
expect(blocks[duplicatedId].locked).toBeFalsy()
}
})
it('should place duplicate outside locked container when duplicating block inside locked loop', () => {
const { batchToggleLocked, duplicateBlock } = useWorkflowStore.getState()
// Create a loop with a child block
addBlock('loop-1', 'loop', 'My Loop', { x: 0, y: 0 }, { loopType: 'for', count: 3 })
addBlock(
'child-1',
'function',
'Child',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
// Lock the loop (which cascades to the child)
batchToggleLocked(['loop-1'])
expect(useWorkflowStore.getState().blocks['child-1'].locked).toBe(true)
// Duplicate the child block
duplicateBlock('child-1')
const { blocks } = useWorkflowStore.getState()
const blockIds = Object.keys(blocks)
expect(blockIds.length).toBe(3) // loop, original child, duplicate
const duplicatedId = blockIds.find((id) => id !== 'loop-1' && id !== 'child-1')
expect(duplicatedId).toBeDefined()
if (duplicatedId) {
// Duplicate should be unlocked
expect(blocks[duplicatedId].locked).toBe(false)
// Duplicate should NOT have a parentId (placed outside the locked container)
expect(blocks[duplicatedId].data?.parentId).toBeUndefined()
// Original should still be inside the loop
expect(blocks['child-1'].data?.parentId).toBe('loop-1')
}
})
it('should keep duplicate inside unlocked container when duplicating block inside unlocked loop', () => {
const { duplicateBlock } = useWorkflowStore.getState()
// Create a loop with a child block (not locked)
addBlock('loop-1', 'loop', 'My Loop', { x: 0, y: 0 }, { loopType: 'for', count: 3 })
addBlock(
'child-1',
'function',
'Child',
{ x: 50, y: 50 },
{ parentId: 'loop-1' },
'loop-1',
'parent'
)
// Duplicate the child block (loop is NOT locked)
duplicateBlock('child-1')
const { blocks } = useWorkflowStore.getState()
const blockIds = Object.keys(blocks)
const duplicatedId = blockIds.find((id) => id !== 'loop-1' && id !== 'child-1')
if (duplicatedId) {
// Duplicate should still be inside the loop since it's not locked
expect(blocks[duplicatedId].data?.parentId).toBe('loop-1')
}
})
})
describe('updateBlockName', () => {
beforeEach(() => {
useWorkflowStore.setState({
@@ -1370,6 +791,8 @@ describe('workflow store', () => {
parallels: {},
})
const { addBlock } = useWorkflowStore.getState()
addBlock('block1', 'agent', 'Column AD', { x: 0, y: 0 })
addBlock('block2', 'function', 'Employee Length', { x: 100, y: 0 })
addBlock('block3', 'starter', 'Start', { x: 200, y: 0 })

View File

@@ -3,6 +3,8 @@ import type { Edge } from 'reactflow'
import { create } from 'zustand'
import { devtools } from 'zustand/middleware'
import { DEFAULT_DUPLICATE_OFFSET } from '@/lib/workflows/autolayout/constants'
import { getBlockOutputs } from '@/lib/workflows/blocks/block-outputs'
import { getBlock } from '@/blocks'
import type { SubBlockConfig } from '@/blocks/types'
import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
@@ -112,6 +114,135 @@ export const useWorkflowStore = create<WorkflowStore>()(
set({ needsRedeployment })
},
addBlock: (
id: string,
type: string,
name: string,
position: Position,
data?: Record<string, any>,
parentId?: string,
extent?: 'parent',
blockProperties?: {
enabled?: boolean
horizontalHandles?: boolean
advancedMode?: boolean
triggerMode?: boolean
height?: number
}
) => {
const blockConfig = getBlock(type)
// For custom nodes like loop and parallel that don't use BlockConfig
if (!blockConfig && (type === 'loop' || type === 'parallel')) {
// Merge parentId and extent into data if provided
const nodeData = {
...data,
...(parentId && { parentId, extent: extent || 'parent' }),
}
const newState = {
blocks: {
...get().blocks,
[id]: {
id,
type,
name,
position,
subBlocks: {},
outputs: {},
enabled: blockProperties?.enabled ?? true,
horizontalHandles: blockProperties?.horizontalHandles ?? true,
advancedMode: blockProperties?.advancedMode ?? false,
triggerMode: blockProperties?.triggerMode ?? false,
height: blockProperties?.height ?? 0,
data: nodeData,
},
},
edges: [...get().edges],
loops: get().generateLoopBlocks(),
parallels: get().generateParallelBlocks(),
}
set(newState)
get().updateLastSaved()
return
}
if (!blockConfig) return
// Merge parentId and extent into data for regular blocks
const nodeData = {
...data,
...(parentId && { parentId, extent: extent || 'parent' }),
}
const subBlocks: Record<string, SubBlockState> = {}
const subBlockStore = useSubBlockStore.getState()
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
blockConfig.subBlocks.forEach((subBlock) => {
const subBlockId = subBlock.id
const initialValue = resolveInitialSubblockValue(subBlock)
const normalizedValue =
initialValue !== undefined && initialValue !== null ? initialValue : null
subBlocks[subBlockId] = {
id: subBlockId,
type: subBlock.type,
value: normalizedValue as SubBlockState['value'],
}
if (activeWorkflowId) {
try {
const valueToStore =
initialValue !== undefined ? cloneInitialSubblockValue(initialValue) : null
subBlockStore.setValue(id, subBlockId, valueToStore)
} catch (error) {
logger.warn('Failed to seed sub-block store value during block creation', {
blockId: id,
subBlockId,
error: error instanceof Error ? error.message : String(error),
})
}
} else {
logger.warn('Cannot seed sub-block store value: activeWorkflowId not available', {
blockId: id,
subBlockId,
})
}
})
// Get outputs based on trigger mode
const triggerMode = blockProperties?.triggerMode ?? false
const outputs = getBlockOutputs(type, subBlocks, triggerMode)
const newState = {
blocks: {
...get().blocks,
[id]: {
id,
type,
name,
position,
subBlocks,
outputs,
enabled: blockProperties?.enabled ?? true,
horizontalHandles: blockProperties?.horizontalHandles ?? true,
advancedMode: blockProperties?.advancedMode ?? false,
triggerMode: triggerMode,
height: blockProperties?.height ?? 0,
layout: {},
data: nodeData,
},
},
edges: [...get().edges],
loops: get().generateLoopBlocks(),
parallels: get().generateParallelBlocks(),
}
set(newState)
get().updateLastSaved()
},
updateNodeDimensions: (id: string, dimensions: { width: number; height: number }) => {
set((state) => {
const block = state.blocks[id]
@@ -207,7 +338,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
triggerMode?: boolean
height?: number
data?: Record<string, any>
locked?: boolean
}>,
edges?: Edge[],
subBlockValues?: Record<string, Record<string, unknown>>,
@@ -232,7 +362,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
triggerMode: block.triggerMode ?? false,
height: block.height ?? 0,
data: block.data,
locked: block.locked ?? false,
}
}
@@ -257,27 +386,11 @@ export const useWorkflowStore = create<WorkflowStore>()(
}
}
// Only regenerate loops/parallels if we're adding blocks that affect them:
// - Adding a loop/parallel container block
// - Adding a block as a child of a loop/parallel (has parentId pointing to one)
const needsLoopRegeneration = blocks.some(
(block) =>
block.type === 'loop' ||
(block.data?.parentId && newBlocks[block.data.parentId]?.type === 'loop')
)
const needsParallelRegeneration = blocks.some(
(block) =>
block.type === 'parallel' ||
(block.data?.parentId && newBlocks[block.data.parentId]?.type === 'parallel')
)
set({
blocks: newBlocks,
edges: newEdges,
loops: needsLoopRegeneration ? generateLoopBlocks(newBlocks) : { ...get().loops },
parallels: needsParallelRegeneration
? generateParallelBlocks(newBlocks)
: { ...get().parallels },
loops: generateLoopBlocks(newBlocks),
parallels: generateParallelBlocks(newBlocks),
})
if (subBlockValues && Object.keys(subBlockValues).length > 0) {
@@ -367,69 +480,24 @@ export const useWorkflowStore = create<WorkflowStore>()(
},
batchToggleEnabled: (ids: string[]) => {
if (ids.length === 0) return
const currentBlocks = get().blocks
const newBlocks = { ...currentBlocks }
const blocksToToggle = new Set<string>()
// For each ID, collect blocks to toggle (skip locked blocks entirely)
// If it's a container, also include non-locked children
const newBlocks = { ...get().blocks }
for (const id of ids) {
const block = currentBlocks[id]
if (!block) continue
// Skip locked blocks entirely (including their children)
if (block.locked) continue
blocksToToggle.add(id)
// If it's a loop or parallel, also include non-locked children
if (block.type === 'loop' || block.type === 'parallel') {
Object.entries(currentBlocks).forEach(([blockId, b]) => {
if (b.data?.parentId === id && !b.locked) {
blocksToToggle.add(blockId)
}
})
if (newBlocks[id]) {
newBlocks[id] = { ...newBlocks[id], enabled: !newBlocks[id].enabled }
}
}
// If no blocks can be toggled, exit early
if (blocksToToggle.size === 0) return
// Determine target enabled state based on first toggleable block
const firstToggleableId = Array.from(blocksToToggle)[0]
const firstBlock = currentBlocks[firstToggleableId]
const targetEnabled = !firstBlock.enabled
// Apply the enabled state to all toggleable blocks
for (const blockId of blocksToToggle) {
newBlocks[blockId] = { ...newBlocks[blockId], enabled: targetEnabled }
}
set({ blocks: newBlocks, edges: [...get().edges] })
get().updateLastSaved()
},
batchToggleHandles: (ids: string[]) => {
const currentBlocks = get().blocks
const newBlocks = { ...currentBlocks }
// Helper to check if a block is protected (locked or inside locked parent)
const isProtected = (blockId: string): boolean => {
const block = currentBlocks[blockId]
if (!block) return false
if (block.locked) return true
const parentId = block.data?.parentId
if (parentId && currentBlocks[parentId]?.locked) return true
return false
}
const newBlocks = { ...get().blocks }
for (const id of ids) {
if (!newBlocks[id] || isProtected(id)) continue
newBlocks[id] = {
...newBlocks[id],
horizontalHandles: !newBlocks[id].horizontalHandles,
if (newBlocks[id]) {
newBlocks[id] = {
...newBlocks[id],
horizontalHandles: !newBlocks[id].horizontalHandles,
}
}
}
set({ blocks: newBlocks, edges: [...get().edges] })
@@ -461,9 +529,8 @@ export const useWorkflowStore = create<WorkflowStore>()(
set({
blocks: { ...blocks },
edges: newEdges,
// Edges don't affect loop/parallel structure (determined by parentId), skip regeneration
loops: { ...get().loops },
parallels: { ...get().parallels },
loops: generateLoopBlocks(blocks),
parallels: generateParallelBlocks(blocks),
})
get().updateLastSaved()
@@ -477,9 +544,8 @@ export const useWorkflowStore = create<WorkflowStore>()(
set({
blocks: { ...blocks },
edges: newEdges,
// Edges don't affect loop/parallel structure (determined by parentId), skip regeneration
loops: { ...get().loops },
parallels: { ...get().parallels },
loops: generateLoopBlocks(blocks),
parallels: generateParallelBlocks(blocks),
})
get().updateLastSaved()
@@ -574,33 +640,9 @@ export const useWorkflowStore = create<WorkflowStore>()(
if (!block) return
const newId = crypto.randomUUID()
// Check if block is inside a locked container - if so, place duplicate outside
const parentId = block.data?.parentId
const parentBlock = parentId ? get().blocks[parentId] : undefined
const isParentLocked = parentBlock?.locked ?? false
// If parent is locked, calculate position outside the container
let offsetPosition: Position
const newData = block.data ? { ...block.data } : undefined
if (isParentLocked && parentBlock) {
// Place duplicate outside the locked container (to the right of it)
const containerWidth = parentBlock.data?.width ?? 400
offsetPosition = {
x: parentBlock.position.x + containerWidth + 50,
y: parentBlock.position.y,
}
// Remove parent relationship since we're placing outside
if (newData) {
newData.parentId = undefined
newData.extent = undefined
}
} else {
offsetPosition = {
x: block.position.x + DEFAULT_DUPLICATE_OFFSET.x,
y: block.position.y + DEFAULT_DUPLICATE_OFFSET.y,
}
const offsetPosition = {
x: block.position.x + DEFAULT_DUPLICATE_OFFSET.x,
y: block.position.y + DEFAULT_DUPLICATE_OFFSET.y,
}
const newName = getUniqueBlockName(block.name, get().blocks)
@@ -628,8 +670,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
name: newName,
position: offsetPosition,
subBlocks: newSubBlocks,
locked: false,
data: newData,
},
},
edges: [...get().edges],
@@ -1237,70 +1277,6 @@ export const useWorkflowStore = create<WorkflowStore>()(
getDragStartPosition: () => {
return get().dragStartPosition || null
},
setBlockLocked: (id: string, locked: boolean) => {
const block = get().blocks[id]
if (!block || block.locked === locked) return
const newState = {
blocks: {
...get().blocks,
[id]: {
...block,
locked,
},
},
edges: [...get().edges],
loops: { ...get().loops },
parallels: { ...get().parallels },
}
set(newState)
get().updateLastSaved()
},
batchToggleLocked: (ids: string[]) => {
if (ids.length === 0) return
const currentBlocks = get().blocks
const newBlocks = { ...currentBlocks }
const blocksToToggle = new Set<string>()
// For each ID, collect blocks to toggle
// If it's a container, also include all children
for (const id of ids) {
const block = currentBlocks[id]
if (!block) continue
blocksToToggle.add(id)
// If it's a loop or parallel, also include all children
if (block.type === 'loop' || block.type === 'parallel') {
Object.entries(currentBlocks).forEach(([blockId, b]) => {
if (b.data?.parentId === id) {
blocksToToggle.add(blockId)
}
})
}
}
// If no blocks found, exit early
if (blocksToToggle.size === 0) return
// Determine target locked state based on first block in original ids
const firstBlock = currentBlocks[ids[0]]
if (!firstBlock) return
const targetLocked = !firstBlock.locked
// Apply the locked state to all blocks
for (const blockId of blocksToToggle) {
newBlocks[blockId] = { ...newBlocks[blockId], locked: targetLocked }
}
set({ blocks: newBlocks, edges: [...get().edges] })
get().updateLastSaved()
},
}),
{ name: 'workflow-store' }
)

View File

@@ -87,7 +87,6 @@ export interface BlockState {
triggerMode?: boolean
data?: BlockData
layout?: BlockLayoutState
locked?: boolean
}
export interface SubBlockState {
@@ -132,7 +131,6 @@ export interface Loop {
whileCondition?: string // JS expression that evaluates to boolean (for while loops)
doWhileCondition?: string // JS expression that evaluates to boolean (for do-while loops)
enabled: boolean
locked?: boolean
}
export interface Parallel {
@@ -142,7 +140,6 @@ export interface Parallel {
count?: number // Number of parallel executions for count-based parallel
parallelType?: 'count' | 'collection' // Explicit parallel type to avoid inference bugs
enabled: boolean
locked?: boolean
}
export interface Variable {
@@ -178,6 +175,22 @@ export interface WorkflowState {
}
export interface WorkflowActions {
addBlock: (
id: string,
type: string,
name: string,
position: Position,
data?: Record<string, any>,
parentId?: string,
extent?: 'parent',
blockProperties?: {
enabled?: boolean
horizontalHandles?: boolean
advancedMode?: boolean
triggerMode?: boolean
height?: number
}
) => void
updateNodeDimensions: (id: string, dimensions: { width: number; height: number }) => void
batchUpdateBlocksWithParent: (
updates: Array<{
@@ -236,8 +249,6 @@ export interface WorkflowActions {
workflowState: WorkflowState,
options?: { updateLastSaved?: boolean }
) => void
setBlockLocked: (id: string, locked: boolean) => void
batchToggleLocked: (ids: string[]) => void
}
export type WorkflowStore = WorkflowState & WorkflowActions

View File

@@ -125,8 +125,8 @@ app:
# Rate Limiting Configuration (per minute)
RATE_LIMIT_WINDOW_MS: "60000" # Rate limit window duration (1 minute)
RATE_LIMIT_FREE_SYNC: "50" # Sync API executions per minute
RATE_LIMIT_FREE_ASYNC: "200" # Async API executions per minute
RATE_LIMIT_FREE_SYNC: "10" # Sync API executions per minute
RATE_LIMIT_FREE_ASYNC: "50" # Async API executions per minute
# UI Branding & Whitelabeling Configuration
NEXT_PUBLIC_BRAND_NAME: "Sim" # Custom brand name

View File

@@ -1 +0,0 @@
ALTER TABLE "workflow_blocks" ADD COLUMN "locked" boolean DEFAULT false NOT NULL;

File diff suppressed because it is too large Load Diff

View File

@@ -1044,13 +1044,6 @@
"when": 1769656977701,
"tag": "0149_next_cerise",
"breakpoints": true
},
{
"idx": 150,
"version": "7",
"when": 1769897862156,
"tag": "0150_flimsy_hemingway",
"breakpoints": true
}
]
}

View File

@@ -189,7 +189,6 @@ export const workflowBlocks = pgTable(
isWide: boolean('is_wide').notNull().default(false),
advancedMode: boolean('advanced_mode').notNull().default(false),
triggerMode: boolean('trigger_mode').notNull().default(false),
locked: boolean('locked').notNull().default(false),
height: decimal('height').notNull().default('0'),
subBlocks: jsonb('sub_blocks').notNull().default('{}'),

View File

@@ -21,7 +21,6 @@ export interface BlockFactoryOptions {
triggerMode?: boolean
data?: BlockData
parentId?: string
locked?: boolean
}
/**
@@ -68,7 +67,6 @@ export function createBlock(options: BlockFactoryOptions = {}): any {
height: options.height ?? 0,
advancedMode: options.advancedMode ?? false,
triggerMode: options.triggerMode ?? false,
locked: options.locked ?? false,
data: Object.keys(data).length > 0 ? data : undefined,
layout: {},
}