Compare commits

..

41 Commits

Author SHA1 Message Date
Vikhyath Mondreti
11dc18a80d v0.5.74: autolayout improvements, clerk integration, auth enforcements 2026-01-27 20:37:39 -08:00
Waleed
ab4e9dc72f v0.5.73: ci, helm updates, kb, ui fixes, note block enhancements 2026-01-26 22:04:35 -08:00
Vikhyath Mondreti
1c58c35bd8 v0.5.72: azure connection string, supabase improvement, multitrigger resolution, docs quick reference 2026-01-25 23:42:27 -08:00
Waleed
d63a5cb504 v0.5.71: ux, ci improvements, docs updates 2026-01-25 03:08:08 -08:00
Waleed
8bd5d41723 v0.5.70: router fix, anthropic agent response format adherence 2026-01-24 20:57:02 -08:00
Waleed
c12931bc50 v0.5.69: kb upgrades, blog, copilot improvements, auth consolidation (#2973)
* fix(subflows): tag dropdown + resolution logic (#2949)

* fix(subflows): tag dropdown + resolution logic

* fixes;

* revert parallel change

* chore(deps): bump posthog-js to 1.334.1 (#2948)

* fix(idempotency): add conflict target to atomicallyClaimDb query + remove redundant db namespace tracking (#2950)

* fix(idempotency): add conflict target to atomicallyClaimDb query

* delete needs to account for namespace

* simplify namespace filtering logic

* fix cleanup

* consistent target

* improvement(kb): add document filtering, select all, and React Query migration (#2951)

* improvement(kb): add document filtering, select all, and React Query migration

* test(kb): update tests for enabledFilter and removed userId params

* fix(kb): remove non-null assertion, add explicit guard

* improvement(logs): trace span, details (#2952)

* improvement(action-bar): ordering

* improvement(logs): details, trace span

* feat(blog): v0.5 release post (#2953)

* feat(blog): v0.5 post

* improvement(blog): simplify title and remove code block header

- Simplified blog title from Introducing Sim Studio v0.5 to Introducing Sim v0.5
- Removed language label header and copy button from code blocks for cleaner appearance

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* ack PR comments

* small styling improvements

* created system to create post-specific components

* updated componnet

* cache invalidation

---------

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>

* feat(admin): add credits endpoint to issue credits to users (#2954)

* feat(admin): add credits endpoint to issue credits to users

* fix(admin): use existing credit functions and handle enterprise seats

* fix(admin): reject NaN and Infinity in amount validation

* styling

* fix(admin): validate userId and email are strings

* improvement(copilot): fast mode, subagent tool responses and allow preferences (#2955)

* Improvements

* Fix actions mapping

* Remove console logs

* fix(billing): handle missing userStats and prevent crashes (#2956)

* fix(billing): handle missing userStats and prevent crashes

* fix(billing): correct import path for getFilledPillColor

* fix(billing): add Number.isFinite check to lastPeriodCost

* fix(logs): refresh logic to refresh logs details (#2958)

* fix(security): add authentication and input validation to API routes (#2959)

* fix(security): add authentication and input validation to API routes

* moved utils

* remove extraneous commetns

* removed unused dep

* improvement(helm): add internal ingress support and same-host path consolidation (#2960)

* improvement(helm): add internal ingress support and same-host path consolidation

* improvement(helm): clean up ingress template comments

Simplify verbose inline Helm comments and section dividers to match the
minimal style used in services.yaml.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* fix(helm): add missing copilot path consolidation for realtime host

When copilot.host equals realtime.host but differs from app.host,
copilot paths were not being routed. Added logic to consolidate
copilot paths into the realtime rule for this scenario.

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

* improvement(helm): follow ingress best practices

- Remove orphan comments that appeared when services were disabled
- Add documentation about path ordering requirements
- Paths rendered in order: realtime, copilot, app (specific before catch-all)
- Clean template output matching industry Helm chart standards

---------

Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>

* feat(blog): enterprise post (#2961)

* feat(blog): enterprise post

* added more images, styling

* more content

* updated v0-5 post

* remove unused transition

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>

* fix(envvars): resolution standardized (#2957)

* fix(envvars): resolution standardized

* remove comments

* address bugbot

* fix highlighting for env vars

* remove comments

* address greptile

* address bugbot

* fix(copilot): mask credentials fix (#2963)

* Fix copilot masking

* Clean up

* Lint

* improvement(webhooks): remove dead code (#2965)

* fix(webhooks): subscription recreation path

* improvement(webhooks): remove dead code

* fix tests

* address bugbot comments

* fix restoration edge case

* fix more edge cases

* address bugbot comments

* fix gmail polling

* add warnings for UI indication for credential sets

* fix(preview): subblock values (#2969)

* fix(child-workflow): nested spans handoff (#2966)

* fix(child-workflow): nested spans handoff

* remove overly defensive programming

* update type check

* type more code

* remove more dead code

* address bugbot comments

* fix(security): restrict API key access on internal-only routes (#2964)

* fix(security): restrict API key access on internal-only routes

* test(security): update function execute tests for checkInternalAuth

* updated agent handler

* move session check higher in checkSessionOrInternalAuth

* extracted duplicate code into helper for resolving user from jwt

* fix(copilot): update copilot chat title (#2968)

* fix(hitl): fix condition blocks after hitl (#2967)

* fix(notes): ghost edges (#2970)

* fix(notes): ghost edges

* fix deployed state fallback

* fallback

* remove UI level checks

* annotation missing from autoconnect source check

* improvement(docs): loop and parallel var reference syntax (#2975)

* fix(blog): slash actions description (#2976)

* improvement(docs): loop and parallel var reference syntax

* fix(blog): slash actions description

* fix(auth): copilot routes (#2977)

* Fix copilot auth

* Fix

* Fix

* Fix

* fix(copilot): fix edit summary for loops/parallels (#2978)

* fix(integrations): hide from tool bar (#2544)

* fix(landing): ui (#2979)

* fix(edge-validation): race condition on collaborative add (#2980)

* fix(variables): boolean type support and input improvements (#2981)

* fix(variables): boolean type support and input improvements

* fix formatting

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
Co-authored-by: Siddharth Ganesan <33737564+Sg312@users.noreply.github.com>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
2026-01-24 14:29:53 -08:00
Waleed
e9c4251c1c v0.5.68: router block reasoning, executor improvements, variable resolution consolidation, helm updates (#2946)
* improvement(workflow-item): stabilize avatar layout and fix name truncation (#2939)

* improvement(workflow-item): stabilize avatar layout and fix name truncation

* fix(avatars): revert overflow bg to hardcoded color for contrast

* fix(executor): stop parallel execution when block errors (#2940)

* improvement(helm): add per-deployment extraVolumes support (#2942)

* fix(gmail): expose messageId field in read email block (#2943)

* fix(resolver): consolidate reference resolution  (#2941)

* fix(resolver): consolidate code to resolve references

* fix edge cases

* use already formatted error

* fix multi index

* fix backwards compat reachability

* handle backwards compatibility accurately

* use shared constant correctly

* feat(router): expose reasoning output in router v2 block (#2945)

* fix(copilot): always allow, credential masking (#2947)

* Fix always allow, credential validation

* Credential masking

* Autoload

* fix(executor): handle condition dead-end branches in loops (#2944)

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Siddharth Ganesan <33737564+Sg312@users.noreply.github.com>
2026-01-22 13:48:15 -08:00
Waleed
cc2be33d6b v0.5.67: loading, password reset, ui improvements, helm updates (#2928)
* fix(zustand): updated to useShallow from deprecated createWithEqualityFn (#2919)

* fix(logger): use direct env access for webpack inlining (#2920)

* fix(notifications): text overflow with line-clamp (#2921)

* chore(helm): add env vars for Vertex AI, orgs, and telemetry (#2922)

* fix(auth): improve reset password flow and consolidate brand detection (#2924)

* fix(auth): improve reset password flow and consolidate brand detection

* fix(auth): set errorHandled for EMAIL_NOT_VERIFIED to prevent duplicate error

* fix(auth): clear success message on login errors

* chore(auth): fix import order per lint

* fix(action-bar): duplicate subflows with children (#2923)

* fix(action-bar): duplicate subflows with children

* fix(action-bar): add validateTriggerPaste for subflow duplicate

* fix(resolver): agent response format, input formats, root level (#2925)

* fix(resolvers): agent response format, input formats, root level

* fix response block initial seeding

* fix tests

* fix(messages-input): fix cursor alignment and auto-resize with overlay (#2926)

* fix(messages-input): fix cursor alignment and auto-resize with overlay

* fixed remaining zustand warnings

* fix(stores): remove dead code causing log spam on startup (#2927)

* fix(stores): remove dead code causing log spam on startup

* fix(stores): replace custom tools zustand store with react query cache

* improvement(ui): use BrandedButton and BrandedLink components (#2930)

- Refactor auth forms to use BrandedButton component
- Add BrandedLink component for changelog page
- Reduce code duplication in login, signup, reset-password forms
- Update star count default value

* fix(custom-tools): remove unsafe title fallback in getCustomTool (#2929)

* fix(custom-tools): remove unsafe title fallback in getCustomTool

* fix(custom-tools): restore title fallback in getCustomTool lookup

Custom tools are referenced by title (custom_${title}), not database ID.
The title fallback is required for client-side tool resolution to work.

* fix(null-bodies): empty bodies handling (#2931)

* fix(null-statuses): empty bodies handling

* address bugbot comment

* fix(token-refresh): microsoft, notion, x, linear (#2933)

* fix(microsoft): proactive refresh needed

* fix(x): missing token refresh flag

* notion and linear missing flag too

* address bugbot comment

* fix(auth): handle EMAIL_NOT_VERIFIED in onError callback (#2932)

* fix(auth): handle EMAIL_NOT_VERIFIED in onError callback

* refactor(auth): extract redirectToVerify helper to reduce duplication

* fix(workflow-selector): use dedicated selector for workflow dropdown (#2934)

* feat(workflow-block): preview (#2935)

* improvement(copilot): tool configs to show nested props (#2936)

* fix(auth): add genericOAuth providers to trustedProviders (#2937)

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
2026-01-21 22:53:25 -08:00
Vikhyath Mondreti
45371e521e v0.5.66: external http requests fix, ring highlighting 2026-01-21 02:55:39 -08:00
Waleed
0ce0f98aa5 v0.5.65: gemini updates, textract integration, ui updates (#2909)
* fix(google): wrap primitive tool responses for Gemini API compatibility (#2900)

* fix(canonical): copilot path + update parent (#2901)

* fix(rss): add top-level title, link, pubDate fields to RSS trigger output (#2902)

* fix(rss): add top-level title, link, pubDate fields to RSS trigger output

* fix(imap): add top-level fields to IMAP trigger output

* improvement(browseruse): add profile id param (#2903)

* improvement(browseruse): add profile id param

* make request a stub since we have directExec

* improvement(executor): upgraded abort controller to handle aborts for loops and parallels (#2880)

* improvement(executor): upgraded abort controller to handle aborts for loops and parallels

* comments

* improvement(files): update execution for passing base64 strings (#2906)

* progress

* improvement(execution): update execution for passing base64 strings

* fix types

* cleanup comments

* path security vuln

* reject promise correctly

* fix redirect case

* remove proxy routes

* fix tests

* use ipaddr

* feat(tools): added textract, added v2 for mistral, updated tag dropdown (#2904)

* feat(tools): added textract

* cleanup

* ack pr comments

* reorder

* removed upload for textract async version

* fix additional fields dropdown in editor, update parser to leave validation to be done on the server

* added mistral v2, files v2, and finalized textract

* updated the rest of the old file patterns, updated mistral outputs for v2

* updated tag dropdown to parse non-operation fields as well

* updated extension finder

* cleanup

* added description for inputs to workflow

* use helper for internal route check

* fix tag dropdown merge conflict change

* remove duplicate code

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>

* fix(ui): change add inputs button to match output selector (#2907)

* fix(canvas): removed invite to workspace from canvas popover (#2908)

* fix(canvas): removed invite to workspace

* removed unused props

* fix(copilot): legacy tool display names (#2911)

* fix(a2a): canonical merge  (#2912)

* fix canonical merge

* fix empty array case

* fix(change-detection): copilot diffs have extra field (#2913)

* improvement(logs): improved logs ui bugs, added subflow disable UI (#2910)

* improvement(logs): improved logs ui bugs, added subflow disable UI

* added duplicate to action bar for subflows

* feat(broadcast): email v0.5 (#2905)

---------

Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
2026-01-20 23:54:55 -08:00
Waleed
dff1c9d083 v0.5.64: unsubscribe, search improvements, metrics, additional SSO configuration 2026-01-20 00:34:11 -08:00
Vikhyath Mondreti
b09f683072 v0.5.63: ui and performance improvements, more google tools 2026-01-18 15:22:42 -08:00
Vikhyath Mondreti
a8bb0db660 v0.5.62: webhook bug fixes, seeding default subblock values, block selection fixes 2026-01-16 20:27:06 -08:00
Waleed
af82820a28 v0.5.61: webhook improvements, workflow controls, react query for deployment status, chat fixes, reducto and pulse OCR, linear fixes 2026-01-16 18:06:23 -08:00
Waleed
4372841797 v0.5.60: invitation flow improvements, chat fixes, a2a improvements, additional copilot actions 2026-01-15 00:02:18 -08:00
Waleed
5e8c843241 v0.5.59: a2a support, documentation 2026-01-13 13:21:21 -08:00
Waleed
7bf3d73ee6 v0.5.58: export folders, new tools, permissions groups enhancements 2026-01-13 00:56:59 -08:00
Vikhyath Mondreti
7ffc11a738 v0.5.57: subagents, context menu improvements, bug fixes 2026-01-11 11:38:40 -08:00
Waleed
be578e2ed7 v0.5.56: batch operations, access control and permission groups, billing fixes 2026-01-10 00:31:34 -08:00
Waleed
f415e5edc4 v0.5.55: polling groups, bedrock provider, devcontainer fixes, workflow preview enhancements 2026-01-08 23:36:56 -08:00
Waleed
13a6e6c3fa v0.5.54: seo, model blacklist, helm chart updates, fireflies integration, autoconnect improvements, billing fixes 2026-01-07 16:09:45 -08:00
Waleed
f5ab7f21ae v0.5.53: hotkey improvements, added redis fallback, fixes for workflow tool 2026-01-06 23:34:52 -08:00
Waleed
bfb6fffe38 v0.5.52: new port-based router block, combobox expression and variable support 2026-01-06 16:14:10 -08:00
Waleed
4fbec0a43f v0.5.51: triggers, kb, condition block improvements, supabase and grain integration updates 2026-01-06 14:26:46 -08:00
Waleed
585f5e365b v0.5.50: import improvements, ui upgrades, kb styling and performance improvements 2026-01-05 00:35:55 -08:00
Waleed
3792bdd252 v0.5.49: hitl improvements, new email styles, imap trigger, logs context menu (#2672)
* feat(logs-context-menu): consolidated logs utils and types, added logs record context menu (#2659)

* feat(email): welcome email; improvement(emails): ui/ux (#2658)

* feat(email): welcome email; improvement(emails): ui/ux

* improvement(emails): links, accounts, preview

* refactor(emails): file structure and wrapper components

* added envvar for personal emails sent, added isHosted gate

* fixed failing tests, added env mock

* fix: removed comment

---------

Co-authored-by: waleed <walif6@gmail.com>

* fix(logging): hitl + trigger dev crash protection (#2664)

* hitl gaps

* deal with trigger worker crashes

* cleanup import strcuture

* feat(imap): added support for imap trigger (#2663)

* feat(tools): added support for imap trigger

* feat(imap): added parity, tested

* ack PR comments

* final cleanup

* feat(i18n): update translations (#2665)

Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>

* fix(grain): updated grain trigger to auto-establish trigger (#2666)

Co-authored-by: aadamgough <adam@sim.ai>

* feat(admin): routes to manage deployments (#2667)

* feat(admin): routes to manage deployments

* fix naming fo deployed by

* feat(time-picker): added timepicker emcn component, added to playground, added searchable prop for dropdown, added more timezones for schedule, updated license and notice date (#2668)

* feat(time-picker): added timepicker emcn component, added to playground, added searchable prop for dropdown, added more timezones for schedule, updated license and notice date

* removed unused params, cleaned up redundant utils

* improvement(invite): aligned styling (#2669)

* improvement(invite): aligned with rest of app

* fix(invite): error handling

* fix: addressed comments

---------

Co-authored-by: Emir Karabeg <78010029+emir-karabeg@users.noreply.github.com>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
Co-authored-by: waleedlatif1 <waleedlatif1@users.noreply.github.com>
Co-authored-by: Adam Gough <77861281+aadamgough@users.noreply.github.com>
Co-authored-by: aadamgough <adam@sim.ai>
2026-01-03 13:19:18 -08:00
Waleed
eb5d1f3e5b v0.5.48: copy-paste workflow blocks, docs updates, mcp tool fixes 2025-12-31 18:00:04 -08:00
Waleed
54ab82c8dd v0.5.47: deploy workflow as mcp, kb chunks tokenizer, UI improvements, jira service management tools 2025-12-30 23:18:58 -08:00
Waleed
f895bf469b v0.5.46: build improvements, greptile, light mode improvements 2025-12-29 02:17:52 -08:00
Waleed
dd3209af06 v0.5.45: light mode fixes, realtime usage indicator, docker build improvements 2025-12-27 19:57:42 -08:00
Waleed
b6ba3b50a7 v0.5.44: keyboard shortcuts, autolayout, light mode, byok, testing improvements 2025-12-26 21:25:19 -08:00
Waleed
b304233062 v0.5.43: export logs, circleback, grain, vertex, code hygiene, schedule improvements 2025-12-23 19:19:18 -08:00
Vikhyath Mondreti
57e4b49bd6 v0.5.42: fix memory migration 2025-12-23 01:24:54 -08:00
Vikhyath Mondreti
e12dd204ed v0.5.41: memory fixes, copilot improvements, knowledgebase improvements, LLM providers standardization 2025-12-23 00:15:18 -08:00
Vikhyath Mondreti
3d9d9cbc54 v0.5.40: supabase ops to allow non-public schemas, jira uuid 2025-12-21 22:28:05 -08:00
Waleed
0f4ec962ad v0.5.39: notion, workflow variables fixes 2025-12-20 20:44:00 -08:00
Waleed
4827866f9a v0.5.38: snap to grid, copilot ux improvements, billing line items 2025-12-20 17:24:38 -08:00
Waleed
3e697d9ed9 v0.5.37: redaction utils consolidation, logs updates, autoconnect improvements, additional kb tag types 2025-12-19 22:31:55 -08:00
Martin Yankov
4431a1a484 fix(helm): add custom egress rules to realtime network policy (#2481)
The realtime service network policy was missing the custom egress rules section
that allows configuration of additional egress rules via values.yaml. This caused
the realtime pods to be unable to connect to external databases (e.g., PostgreSQL
on port 5432) when using external database configurations.

The app network policy already had this section, but the realtime network policy
was missing it, creating an inconsistency and preventing the realtime service
from accessing external databases configured via networkPolicy.egress values.

This fix adds the same custom egress rules template section to the realtime
network policy, matching the app network policy behavior and allowing users to
configure database connectivity via values.yaml.
2025-12-19 18:59:08 -08:00
Waleed
4d1a9a3f22 v0.5.36: hitl improvements, opengraph, slack fixes, one-click unsubscribe, auth checks, new db indexes 2025-12-19 01:27:49 -08:00
Vikhyath Mondreti
eb07a080fb v0.5.35: helm updates, copilot improvements, 404 for docs, salesforce fixes, subflow resize clamping 2025-12-18 16:23:19 -08:00
176 changed files with 4189 additions and 26625 deletions

File diff suppressed because one or more lines are too long

View File

@@ -13,7 +13,6 @@ import {
AsanaIcon, AsanaIcon,
BrainIcon, BrainIcon,
BrowserUseIcon, BrowserUseIcon,
CalComIcon,
CalendlyIcon, CalendlyIcon,
CirclebackIcon, CirclebackIcon,
ClayIcon, ClayIcon,
@@ -142,7 +141,6 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
arxiv: ArxivIcon, arxiv: ArxivIcon,
asana: AsanaIcon, asana: AsanaIcon,
browser_use: BrowserUseIcon, browser_use: BrowserUseIcon,
calcom: CalComIcon,
calendly: CalendlyIcon, calendly: CalendlyIcon,
circleback: CirclebackIcon, circleback: CirclebackIcon,
clay: ClayIcon, clay: ClayIcon,

View File

@@ -280,24 +280,14 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
<td>Click clear button in Chat panel</td> <td>Click clear button in Chat panel</td>
<td><ActionImage src="/static/quick-reference/clear-chat.png" alt="Clear chat history" /></td> <td><ActionImage src="/static/quick-reference/clear-chat.png" alt="Clear chat history" /></td>
</tr> </tr>
<tr>
<td>Run from block</td>
<td>Hover block → Click play button, or right-click → **Run from block**</td>
<td><ActionImage src="/static/quick-reference/run-from-block.png" alt="Run from block" /></td>
</tr>
<tr>
<td>Run until block</td>
<td>Right-click block → **Run until block**</td>
<td><ActionImage src="/static/quick-reference/run-until-block.png" alt="Run until block" /></td>
</tr>
<tr> <tr>
<td>View execution logs</td> <td>View execution logs</td>
<td>Open terminal panel at bottom, or `Mod+L`</td> <td>Open terminal panel at bottom, or `Mod+L`</td>
<td><ActionImage src="/static/quick-reference/terminal.png" alt="Execution logs terminal" /></td> <td><ActionImage src="/static/quick-reference/terminal.png" alt="Execution logs terminal" /></td>
</tr> </tr>
<tr> <tr>
<td>Filter logs</td> <td>Filter logs by block or status</td>
<td>Click filter icon in terminal Filter by block or status</td> <td>Click block filter in terminal or right-click log entry → **Filter by Block** or **Filter by Status**</td>
<td><ActionImage src="/static/quick-reference/filter-block.png" alt="Filter logs by block" /></td> <td><ActionImage src="/static/quick-reference/filter-block.png" alt="Filter logs by block" /></td>
</tr> </tr>
<tr> <tr>
@@ -345,11 +335,6 @@ A quick lookup for everyday actions in the Sim workflow editor. For keyboard sho
<td>Access previous versions in Deploy tab → **Promote to live**</td> <td>Access previous versions in Deploy tab → **Promote to live**</td>
<td><ActionImage src="/static/quick-reference/promote-deployment.png" alt="Promote deployment to live" /></td> <td><ActionImage src="/static/quick-reference/promote-deployment.png" alt="Promote deployment to live" /></td>
</tr> </tr>
<tr>
<td>Add version description</td>
<td>Deploy tab → Click description icon → Add or generate description</td>
<td><ActionVideo src="quick-reference/deployment-description.mp4" alt="Add deployment version description" /></td>
</tr>
<tr> <tr>
<td>Copy API endpoint</td> <td>Copy API endpoint</td>
<td>Deploy tab → API → Copy API cURL</td> <td>Deploy tab → API → Copy API cURL</td>

View File

@@ -1,515 +0,0 @@
---
title: CalCom
description: Manage Cal.com bookings, event types, schedules, and availability
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="calcom"
color="#FFFFFE"
/>
{/* MANUAL-CONTENT-START:intro */}
[Cal.com](https://cal.com/) is a flexible and open-source scheduling platform that makes it easy to manage appointments, bookings, event types, and team availabilities.
With Cal.com, you can:
- **Automate scheduling**: Allow users to view your available time slots and book meetings automatically, without back-and-forth emails.
- **Manage events**: Create and customize event types, durations, and rules for one-on-one or group meetings.
- **Integrate calendars**: Seamlessly connect with Google, Outlook, Apple, or other calendar providers to avoid double bookings.
- **Handle attendees and guests**: Collect attendee information, manage guests, and send invitations or reminders.
- **Control availability**: Define custom working hours, buffer times, and cancellation/rebooking rules.
- **Power workflows**: Trigger custom actions via webhooks when a booking is created, cancelled, or rescheduled.
In Sim, the Cal.com integration enables your agents to book meetings, check availabilities, manage event types, and automate scheduling tasks programmatically. This helps agents coordinate meetings, send bookings on behalf of users, check schedules, or respond to booking events—all without manual intervention. By connecting Sim with Cal.com, you unlock highly automated and intelligent scheduling workflows that can integrate seamlessly with your broader automation needs.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate Cal.com into your workflow. Create and manage bookings, event types, schedules, and check availability slots. Supports creating, listing, rescheduling, and canceling bookings, as well as managing event types and schedules. Can also trigger workflows based on Cal.com webhook events (booking created, cancelled, rescheduled). Connect your Cal.com account via OAuth.
## Tools
### `calcom_create_booking`
Create a new booking on Cal.com
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `eventTypeId` | number | Yes | The ID of the event type to book |
| `start` | string | Yes | Start time in UTC ISO 8601 format \(e.g., 2024-01-15T09:00:00Z\) |
| `attendee` | object | Yes | Attendee information object with name, email, timeZone, and optional phoneNumber |
| `guests` | array | No | Array of guest email addresses |
| `items` | string | No | Guest email address |
| `lengthInMinutes` | number | No | Duration of the booking in minutes \(overrides event type default\) |
| `metadata` | object | No | Custom metadata to attach to the booking |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Created booking details |
### `calcom_get_booking`
Get details of a specific booking by its UID
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `bookingUid` | string | Yes | Unique identifier \(UID\) of the booking |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Booking details |
### `calcom_list_bookings`
List all bookings with optional status filter
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `status` | string | No | Filter bookings by status: upcoming, recurring, past, cancelled, or unconfirmed |
| `take` | number | No | Number of bookings to return \(pagination limit\) |
| `skip` | number | No | Number of bookings to skip \(pagination offset\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | array | Array of bookings |
### `calcom_cancel_booking`
Cancel an existing booking
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `bookingUid` | string | Yes | Unique identifier \(UID\) of the booking to cancel |
| `cancellationReason` | string | No | Reason for cancelling the booking |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Cancelled booking details |
| ↳ `status` | string | Booking status \(should be cancelled\) |
### `calcom_reschedule_booking`
Reschedule an existing booking to a new time
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `bookingUid` | string | Yes | Unique identifier \(UID\) of the booking to reschedule |
| `start` | string | Yes | New start time in UTC ISO 8601 format \(e.g., 2024-01-15T09:00:00Z\) |
| `reschedulingReason` | string | No | Reason for rescheduling the booking |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Rescheduled booking details |
| ↳ `uid` | string | Unique identifier for the new booking |
| ↳ `start` | string | New start time in ISO 8601 format |
| ↳ `end` | string | New end time in ISO 8601 format |
### `calcom_confirm_booking`
Confirm a pending booking that requires confirmation
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `bookingUid` | string | Yes | Unique identifier \(UID\) of the booking to confirm |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Confirmed booking details |
| ↳ `status` | string | Booking status \(should be accepted/confirmed\) |
### `calcom_decline_booking`
Decline a pending booking request
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `bookingUid` | string | Yes | Unique identifier \(UID\) of the booking to decline |
| `reason` | string | No | Reason for declining the booking |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Declined booking details |
| ↳ `status` | string | Booking status \(should be cancelled/rejected\) |
### `calcom_create_event_type`
Create a new event type in Cal.com
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `title` | string | Yes | Title of the event type |
| `slug` | string | Yes | Unique slug for the event type URL |
| `lengthInMinutes` | number | Yes | Duration of the event in minutes |
| `description` | string | No | Description of the event type |
| `slotInterval` | number | No | Interval between available booking slots in minutes |
| `minimumBookingNotice` | number | No | Minimum notice required before booking in minutes |
| `beforeEventBuffer` | number | No | Buffer time before the event in minutes |
| `afterEventBuffer` | number | No | Buffer time after the event in minutes |
| `scheduleId` | number | No | ID of the schedule to use for availability |
| `disableGuests` | boolean | No | Whether to disable guests from being added to bookings |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Created event type details |
| ↳ `id` | number | Event type ID |
| ↳ `title` | string | Event type title |
| ↳ `slug` | string | Event type slug |
| ↳ `description` | string | Event type description |
| ↳ `lengthInMinutes` | number | Duration in minutes |
| ↳ `slotInterval` | number | Slot interval in minutes |
| ↳ `minimumBookingNotice` | number | Minimum booking notice in minutes |
| ↳ `beforeEventBuffer` | number | Buffer before event in minutes |
| ↳ `afterEventBuffer` | number | Buffer after event in minutes |
| ↳ `scheduleId` | number | Schedule ID |
| ↳ `disableGuests` | boolean | Whether guests are disabled |
| ↳ `createdAt` | string | ISO timestamp of creation |
| ↳ `updatedAt` | string | ISO timestamp of last update |
### `calcom_get_event_type`
Get detailed information about a specific event type
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `eventTypeId` | number | Yes | Event type ID to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Event type details |
| ↳ `id` | number | Event type ID |
| ↳ `title` | string | Event type title |
| ↳ `slug` | string | Event type slug |
| ↳ `description` | string | Event type description |
| ↳ `lengthInMinutes` | number | Duration in minutes |
| ↳ `slotInterval` | number | Slot interval in minutes |
| ↳ `minimumBookingNotice` | number | Minimum booking notice in minutes |
| ↳ `beforeEventBuffer` | number | Buffer before event in minutes |
| ↳ `afterEventBuffer` | number | Buffer after event in minutes |
| ↳ `scheduleId` | number | Schedule ID |
| ↳ `disableGuests` | boolean | Whether guests are disabled |
| ↳ `createdAt` | string | ISO timestamp of creation |
| ↳ `updatedAt` | string | ISO timestamp of last update |
### `calcom_list_event_types`
Retrieve a list of all event types
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `sortCreatedAt` | string | No | Sort by creation date: "asc" or "desc" |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | array | Array of event types |
| ↳ `id` | number | Event type ID |
| ↳ `title` | string | Event type title |
| ↳ `slug` | string | Event type slug |
| ↳ `description` | string | Event type description |
| ↳ `lengthInMinutes` | number | Duration in minutes |
| ↳ `slotInterval` | number | Slot interval in minutes |
| ↳ `minimumBookingNotice` | number | Minimum booking notice in minutes |
| ↳ `beforeEventBuffer` | number | Buffer before event in minutes |
| ↳ `afterEventBuffer` | number | Buffer after event in minutes |
| ↳ `scheduleId` | number | Schedule ID |
| ↳ `disableGuests` | boolean | Whether guests are disabled |
| ↳ `createdAt` | string | ISO timestamp of creation |
| ↳ `updatedAt` | string | ISO timestamp of last update |
### `calcom_update_event_type`
Update an existing event type in Cal.com
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `eventTypeId` | number | Yes | Event type ID to update |
| `title` | string | No | Title of the event type |
| `slug` | string | No | Unique slug for the event type URL |
| `lengthInMinutes` | number | No | Duration of the event in minutes |
| `description` | string | No | Description of the event type |
| `slotInterval` | number | No | Interval between available booking slots in minutes |
| `minimumBookingNotice` | number | No | Minimum notice required before booking in minutes |
| `beforeEventBuffer` | number | No | Buffer time before the event in minutes |
| `afterEventBuffer` | number | No | Buffer time after the event in minutes |
| `scheduleId` | number | No | ID of the schedule to use for availability |
| `disableGuests` | boolean | No | Whether to disable guests from being added to bookings |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Updated event type details |
| ↳ `id` | number | Event type ID |
| ↳ `title` | string | Event type title |
| ↳ `slug` | string | Event type slug |
| ↳ `description` | string | Event type description |
| ↳ `lengthInMinutes` | number | Duration in minutes |
| ↳ `slotInterval` | number | Slot interval in minutes |
| ↳ `minimumBookingNotice` | number | Minimum booking notice in minutes |
| ↳ `beforeEventBuffer` | number | Buffer before event in minutes |
| ↳ `afterEventBuffer` | number | Buffer after event in minutes |
| ↳ `scheduleId` | number | Schedule ID |
| ↳ `disableGuests` | boolean | Whether guests are disabled |
| ↳ `createdAt` | string | ISO timestamp of creation |
| ↳ `updatedAt` | string | ISO timestamp of last update |
### `calcom_delete_event_type`
Delete an event type from Cal.com
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `eventTypeId` | number | Yes | Event type ID to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `deleted` | boolean | Whether the event type was successfully deleted |
| `message` | string | Status message |
### `calcom_create_schedule`
Create a new availability schedule in Cal.com
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `name` | string | Yes | Name of the schedule |
| `timeZone` | string | Yes | Timezone for the schedule \(e.g., America/New_York\) |
| `isDefault` | boolean | Yes | Whether this schedule should be the default |
| `availability` | array | No | Availability intervals for the schedule |
| `items` | object | No | Availability interval |
| `properties` | array | No | Days of the week \(Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday\) |
| `days` | array | No | Days of the week \(Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday\) |
| `startTime` | string | No | Start time in HH:MM format |
| `endTime` | string | No | End time in HH:MM format |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Created schedule data |
| ↳ `id` | number | Unique identifier for the schedule |
| ↳ `name` | string | Name of the schedule |
| ↳ `timeZone` | string | Timezone of the schedule |
| ↳ `isDefault` | boolean | Whether this is the default schedule |
| ↳ `availability` | array | Availability intervals |
| ↳ `days` | array | Days of the week |
| ↳ `startTime` | string | Start time in HH:MM format |
| ↳ `endTime` | string | End time in HH:MM format |
### `calcom_get_schedule`
Get a specific schedule by ID from Cal.com
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `scheduleId` | string | Yes | ID of the schedule to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Schedule data |
| ↳ `id` | number | Unique identifier for the schedule |
| ↳ `name` | string | Name of the schedule |
| ↳ `timeZone` | string | Timezone of the schedule |
| ↳ `isDefault` | boolean | Whether this is the default schedule |
| ↳ `availability` | array | Availability intervals |
| ↳ `days` | array | Days of the week |
| ↳ `startTime` | string | Start time in HH:MM format |
| ↳ `endTime` | string | End time in HH:MM format |
### `calcom_list_schedules`
List all availability schedules from Cal.com
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | array | Array of schedule objects |
| ↳ `id` | number | Unique identifier for the schedule |
| ↳ `name` | string | Name of the schedule |
| ↳ `timeZone` | string | Timezone of the schedule |
| ↳ `isDefault` | boolean | Whether this is the default schedule |
| ↳ `availability` | array | Availability intervals |
| ↳ `days` | array | Days of the week |
| ↳ `startTime` | string | Start time in HH:MM format |
| ↳ `endTime` | string | End time in HH:MM format |
### `calcom_update_schedule`
Update an existing schedule in Cal.com
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `scheduleId` | string | Yes | ID of the schedule to update |
| `name` | string | No | New name for the schedule |
| `timeZone` | string | No | New timezone for the schedule \(e.g., America/New_York\) |
| `isDefault` | boolean | No | Whether this schedule should be the default |
| `availability` | array | No | New availability intervals for the schedule |
| `items` | object | No | Availability interval |
| `properties` | array | No | Days of the week \(Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday\) |
| `days` | array | No | Days of the week \(Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday\) |
| `startTime` | string | No | Start time in HH:MM format |
| `endTime` | string | No | End time in HH:MM format |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Updated schedule data |
| ↳ `id` | number | Unique identifier for the schedule |
| ↳ `name` | string | Name of the schedule |
| ↳ `timeZone` | string | Timezone of the schedule |
| ↳ `isDefault` | boolean | Whether this is the default schedule |
| ↳ `availability` | array | Availability intervals |
| ↳ `days` | array | Days of the week |
| ↳ `startTime` | string | Start time in HH:MM format |
| ↳ `endTime` | string | End time in HH:MM format |
### `calcom_delete_schedule`
Delete a schedule from Cal.com
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `scheduleId` | string | Yes | ID of the schedule to delete |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Deleted schedule data |
| ↳ `id` | number | Unique identifier of the deleted schedule |
| ↳ `name` | string | Name of the deleted schedule |
| ↳ `timeZone` | string | Timezone of the deleted schedule |
| ↳ `isDefault` | boolean | Whether this was the default schedule |
### `calcom_get_default_schedule`
Get the default availability schedule from Cal.com
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Default schedule data |
| ↳ `id` | number | Unique identifier for the schedule |
| ↳ `name` | string | Name of the schedule |
| ↳ `timeZone` | string | Timezone of the schedule |
| ↳ `isDefault` | boolean | Whether this is the default schedule \(always true\) |
| ↳ `availability` | array | Availability intervals |
| ↳ `days` | array | Days of the week |
| ↳ `startTime` | string | Start time in HH:MM format |
| ↳ `endTime` | string | End time in HH:MM format |
### `calcom_get_slots`
Get available booking slots for a Cal.com event type within a time range
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `start` | string | Yes | Start of time range in UTC ISO 8601 format \(e.g., 2024-01-15T00:00:00Z\) |
| `end` | string | Yes | End of time range in UTC ISO 8601 format \(e.g., 2024-01-22T00:00:00Z\) |
| `eventTypeId` | number | No | Event type ID for direct lookup |
| `eventTypeSlug` | string | No | Event type slug \(requires username to be set\) |
| `username` | string | No | Username for personal event types \(required when using eventTypeSlug\) |
| `timeZone` | string | No | Timezone for returned slots \(defaults to UTC\) |
| `duration` | number | No | Slot length in minutes |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `status` | string | Response status |
| `data` | object | Slots data container |
| ↳ `slots` | object | Available time slots grouped by date \(YYYY-MM-DD keys\) |

View File

@@ -9,7 +9,6 @@
"arxiv", "arxiv",
"asana", "asana",
"browser_use", "browser_use",
"calcom",
"calendly", "calendly",
"circleback", "circleback",
"clay", "clay",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 144 KiB

After

Width:  |  Height:  |  Size: 235 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.2 KiB

After

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

After

Width:  |  Height:  |  Size: 78 KiB

View File

@@ -14,7 +14,7 @@
--panel-width: 320px; /* PANEL_WIDTH.DEFAULT */ --panel-width: 320px; /* PANEL_WIDTH.DEFAULT */
--toolbar-triggers-height: 300px; /* TOOLBAR_TRIGGERS_HEIGHT.DEFAULT */ --toolbar-triggers-height: 300px; /* TOOLBAR_TRIGGERS_HEIGHT.DEFAULT */
--editor-connections-height: 172px; /* EDITOR_CONNECTIONS_HEIGHT.DEFAULT */ --editor-connections-height: 172px; /* EDITOR_CONNECTIONS_HEIGHT.DEFAULT */
--terminal-height: 206px; /* TERMINAL_HEIGHT.DEFAULT */ --terminal-height: 155px; /* TERMINAL_HEIGHT.DEFAULT */
} }
.sidebar-container { .sidebar-container {

View File

@@ -344,7 +344,7 @@ describe('Schedule PUT API (Reactivate)', () => {
expect(nextRunAt).toBeGreaterThan(beforeCall) expect(nextRunAt).toBeGreaterThan(beforeCall)
expect(nextRunAt).toBeLessThanOrEqual(afterCall + 5 * 60 * 1000 + 1000) expect(nextRunAt).toBeLessThanOrEqual(afterCall + 5 * 60 * 1000 + 1000)
// Should align with 5-minute intervals (minute divisible by 5) // Should align with 5-minute intervals (minute divisible by 5)
expect(new Date(nextRunAt).getUTCMinutes() % 5).toBe(0) expect(new Date(nextRunAt).getMinutes() % 5).toBe(0)
}) })
it('calculates nextRunAt from daily cron expression', async () => { it('calculates nextRunAt from daily cron expression', async () => {
@@ -572,7 +572,7 @@ describe('Schedule PUT API (Reactivate)', () => {
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall) expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
expect(nextRunAt.getTime()).toBeLessThanOrEqual(beforeCall + 10 * 60 * 1000 + 1000) expect(nextRunAt.getTime()).toBeLessThanOrEqual(beforeCall + 10 * 60 * 1000 + 1000)
// Should align with 10-minute intervals // Should align with 10-minute intervals
expect(nextRunAt.getUTCMinutes() % 10).toBe(0) expect(nextRunAt.getMinutes() % 10).toBe(0)
}) })
it('handles hourly schedules with timezone correctly', async () => { it('handles hourly schedules with timezone correctly', async () => {
@@ -598,8 +598,8 @@ describe('Schedule PUT API (Reactivate)', () => {
// Should be a future date at minute 15 // Should be a future date at minute 15
expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall) expect(nextRunAt.getTime()).toBeGreaterThan(beforeCall)
expect(nextRunAt.getUTCMinutes()).toBe(15) expect(nextRunAt.getMinutes()).toBe(15)
expect(nextRunAt.getUTCSeconds()).toBe(0) expect(nextRunAt.getSeconds()).toBe(0)
}) })
it('handles custom cron expressions with complex patterns and timezone', async () => { it('handles custom cron expressions with complex patterns and timezone', async () => {

View File

@@ -9,24 +9,13 @@ import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/
const logger = createLogger('WorkflowDeploymentVersionAPI') const logger = createLogger('WorkflowDeploymentVersionAPI')
const patchBodySchema = z const patchBodySchema = z.object({
.object({ name: z
name: z .string()
.string() .trim()
.trim() .min(1, 'Name cannot be empty')
.min(1, 'Name cannot be empty') .max(100, 'Name must be 100 characters or less'),
.max(100, 'Name must be 100 characters or less') })
.optional(),
description: z
.string()
.trim()
.max(500, 'Description must be 500 characters or less')
.nullable()
.optional(),
})
.refine((data) => data.name !== undefined || data.description !== undefined, {
message: 'At least one of name or description must be provided',
})
export const dynamic = 'force-dynamic' export const dynamic = 'force-dynamic'
export const runtime = 'nodejs' export const runtime = 'nodejs'
@@ -99,46 +88,33 @@ export async function PATCH(
return createErrorResponse(validation.error.errors[0]?.message || 'Invalid request body', 400) return createErrorResponse(validation.error.errors[0]?.message || 'Invalid request body', 400)
} }
const { name, description } = validation.data const { name } = validation.data
const updateData: { name?: string; description?: string | null } = {}
if (name !== undefined) {
updateData.name = name
}
if (description !== undefined) {
updateData.description = description
}
const [updated] = await db const [updated] = await db
.update(workflowDeploymentVersion) .update(workflowDeploymentVersion)
.set(updateData) .set({ name })
.where( .where(
and( and(
eq(workflowDeploymentVersion.workflowId, id), eq(workflowDeploymentVersion.workflowId, id),
eq(workflowDeploymentVersion.version, versionNum) eq(workflowDeploymentVersion.version, versionNum)
) )
) )
.returning({ .returning({ id: workflowDeploymentVersion.id, name: workflowDeploymentVersion.name })
id: workflowDeploymentVersion.id,
name: workflowDeploymentVersion.name,
description: workflowDeploymentVersion.description,
})
if (!updated) { if (!updated) {
return createErrorResponse('Deployment version not found', 404) return createErrorResponse('Deployment version not found', 404)
} }
logger.info(`[${requestId}] Updated deployment version ${version} for workflow ${id}`, { logger.info(
name: updateData.name, `[${requestId}] Renamed deployment version ${version} for workflow ${id} to "${name}"`
description: updateData.description, )
})
return createSuccessResponse({ name: updated.name, description: updated.description }) return createSuccessResponse({ name: updated.name })
} catch (error: any) { } catch (error: any) {
logger.error( logger.error(
`[${requestId}] Error updating deployment version ${version} for workflow ${id}`, `[${requestId}] Error renaming deployment version ${version} for workflow ${id}`,
error error
) )
return createErrorResponse(error.message || 'Failed to update deployment version', 500) return createErrorResponse(error.message || 'Failed to rename deployment version', 500)
} }
} }

View File

@@ -26,7 +26,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
id: workflowDeploymentVersion.id, id: workflowDeploymentVersion.id,
version: workflowDeploymentVersion.version, version: workflowDeploymentVersion.version,
name: workflowDeploymentVersion.name, name: workflowDeploymentVersion.name,
description: workflowDeploymentVersion.description,
isActive: workflowDeploymentVersion.isActive, isActive: workflowDeploymentVersion.isActive,
createdAt: workflowDeploymentVersion.createdAt, createdAt: workflowDeploymentVersion.createdAt,
createdBy: workflowDeploymentVersion.createdBy, createdBy: workflowDeploymentVersion.createdBy,

View File

@@ -1,216 +0,0 @@
import { db, workflow as workflowTable } from '@sim/db'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { markExecutionCancelled } from '@/lib/execution/cancellation'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
import type { ExecutionMetadata, SerializableExecutionState } from '@/executor/execution/types'
import { hasExecutionResult } from '@/executor/utils/errors'
const logger = createLogger('ExecuteFromBlockAPI')
const ExecuteFromBlockSchema = z.object({
startBlockId: z.string().min(1, 'Start block ID is required'),
sourceSnapshot: z.object({
blockStates: z.record(z.any()),
executedBlocks: z.array(z.string()),
blockLogs: z.array(z.any()),
decisions: z.object({
router: z.record(z.string()),
condition: z.record(z.string()),
}),
completedLoops: z.array(z.string()),
loopExecutions: z.record(z.any()).optional(),
parallelExecutions: z.record(z.any()).optional(),
parallelBlockMapping: z.record(z.any()).optional(),
activeExecutionPath: z.array(z.string()),
}),
input: z.any().optional(),
})
export const runtime = 'nodejs'
export const dynamic = 'force-dynamic'
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id: workflowId } = await params
try {
const auth = await checkHybridAuth(req, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const userId = auth.userId
let body: unknown
try {
body = await req.json()
} catch {
return NextResponse.json({ error: 'Invalid JSON body' }, { status: 400 })
}
const validation = ExecuteFromBlockSchema.safeParse(body)
if (!validation.success) {
logger.warn(`[${requestId}] Invalid request body:`, validation.error.errors)
return NextResponse.json(
{
error: 'Invalid request body',
details: validation.error.errors.map((e) => ({
path: e.path.join('.'),
message: e.message,
})),
},
{ status: 400 }
)
}
const { startBlockId, sourceSnapshot, input } = validation.data
const executionId = uuidv4()
const [workflowRecord] = await db
.select({ workspaceId: workflowTable.workspaceId, userId: workflowTable.userId })
.from(workflowTable)
.where(eq(workflowTable.id, workflowId))
.limit(1)
if (!workflowRecord?.workspaceId) {
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
}
const workspaceId = workflowRecord.workspaceId
const workflowUserId = workflowRecord.userId
logger.info(`[${requestId}] Starting run-from-block execution`, {
workflowId,
startBlockId,
executedBlocksCount: sourceSnapshot.executedBlocks.length,
})
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
const abortController = new AbortController()
let isStreamClosed = false
const stream = new ReadableStream<Uint8Array>({
async start(controller) {
const { sendEvent, onBlockStart, onBlockComplete, onStream } = createSSECallbacks({
executionId,
workflowId,
controller,
isStreamClosed: () => isStreamClosed,
setStreamClosed: () => {
isStreamClosed = true
},
})
const metadata: ExecutionMetadata = {
requestId,
workflowId,
userId,
executionId,
triggerType: 'manual',
workspaceId,
workflowUserId,
useDraftState: true,
isClientSession: true,
startTime: new Date().toISOString(),
}
const snapshot = new ExecutionSnapshot(metadata, {}, input || {}, {})
try {
const startTime = new Date()
sendEvent({
type: 'execution:started',
timestamp: startTime.toISOString(),
executionId,
workflowId,
data: { startTime: startTime.toISOString() },
})
const result = await executeWorkflowCore({
snapshot,
loggingSession,
abortSignal: abortController.signal,
runFromBlock: {
startBlockId,
sourceSnapshot: sourceSnapshot as SerializableExecutionState,
},
callbacks: { onBlockStart, onBlockComplete, onStream },
})
if (result.status === 'cancelled') {
sendEvent({
type: 'execution:cancelled',
timestamp: new Date().toISOString(),
executionId,
workflowId,
data: { duration: result.metadata?.duration || 0 },
})
} else {
sendEvent({
type: 'execution:completed',
timestamp: new Date().toISOString(),
executionId,
workflowId,
data: {
success: result.success,
output: result.output,
duration: result.metadata?.duration || 0,
startTime: result.metadata?.startTime || startTime.toISOString(),
endTime: result.metadata?.endTime || new Date().toISOString(),
},
})
}
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`)
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
sendEvent({
type: 'execution:error',
timestamp: new Date().toISOString(),
executionId,
workflowId,
data: {
error: executionResult?.error || errorMessage,
duration: executionResult?.metadata?.duration || 0,
},
})
} finally {
if (!isStreamClosed) {
try {
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'))
controller.close()
} catch {}
}
}
},
cancel() {
isStreamClosed = true
abortController.abort()
markExecutionCancelled(executionId).catch(() => {})
},
})
return new NextResponse(stream, {
headers: { ...SSE_HEADERS, 'X-Execution-Id': executionId },
})
} catch (error: unknown) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
logger.error(`[${requestId}] Failed to start run-from-block execution:`, error)
return NextResponse.json(
{ error: errorMessage || 'Failed to start execution' },
{ status: 500 }
)
}
}

View File

@@ -53,7 +53,6 @@ const ExecuteWorkflowSchema = z.object({
parallels: z.record(z.any()).optional(), parallels: z.record(z.any()).optional(),
}) })
.optional(), .optional(),
stopAfterBlockId: z.string().optional(),
}) })
export const runtime = 'nodejs' export const runtime = 'nodejs'
@@ -223,7 +222,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
includeFileBase64, includeFileBase64,
base64MaxBytes, base64MaxBytes,
workflowStateOverride, workflowStateOverride,
stopAfterBlockId,
} = validation.data } = validation.data
// For API key and internal JWT auth, the entire body is the input (except for our control fields) // For API key and internal JWT auth, the entire body is the input (except for our control fields)
@@ -239,7 +237,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
includeFileBase64, includeFileBase64,
base64MaxBytes, base64MaxBytes,
workflowStateOverride, workflowStateOverride,
stopAfterBlockId: _stopAfterBlockId,
workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth workflowId: _workflowId, // Also exclude workflowId used for internal JWT auth
...rest ...rest
} = body } = body
@@ -437,7 +434,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
loggingSession, loggingSession,
includeFileBase64, includeFileBase64,
base64MaxBytes, base64MaxBytes,
stopAfterBlockId,
}) })
const outputWithBase64 = includeFileBase64 const outputWithBase64 = includeFileBase64
@@ -726,7 +722,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
abortSignal: abortController.signal, abortSignal: abortController.signal,
includeFileBase64, includeFileBase64,
base64MaxBytes, base64MaxBytes,
stopAfterBlockId,
}) })
if (result.status === 'paused') { if (result.status === 'paused') {

View File

@@ -57,6 +57,40 @@ function useSetToggle() {
) )
} }
/**
* Generates a unique key for a trace span
*/
function getSpanKey(span: TraceSpan): string {
if (span.id) {
return span.id
}
const name = span.name || 'span'
const start = span.startTime || 'unknown-start'
const end = span.endTime || 'unknown-end'
return `${name}|${start}|${end}`
}
/**
* Merges multiple arrays of trace span children, deduplicating by span key
*/
function mergeTraceSpanChildren(...groups: TraceSpan[][]): TraceSpan[] {
const merged: TraceSpan[] = []
const seen = new Set<string>()
groups.forEach((group) => {
group.forEach((child) => {
const key = getSpanKey(child)
if (seen.has(key)) {
return
}
seen.add(key)
merged.push(child)
})
})
return merged
}
/** /**
* Parses a time value to milliseconds * Parses a time value to milliseconds
*/ */
@@ -82,16 +116,34 @@ function hasErrorInTree(span: TraceSpan): boolean {
/** /**
* Normalizes and sorts trace spans recursively. * Normalizes and sorts trace spans recursively.
* Deduplicates children and sorts by start time. * Merges children from both span.children and span.output.childTraceSpans,
* deduplicates them, and sorts by start time.
*/ */
function normalizeAndSortSpans(spans: TraceSpan[]): TraceSpan[] { function normalizeAndSortSpans(spans: TraceSpan[]): TraceSpan[] {
return spans return spans
.map((span) => { .map((span) => {
const enrichedSpan: TraceSpan = { ...span } const enrichedSpan: TraceSpan = { ...span }
// Process and deduplicate children // Clean output by removing childTraceSpans after extracting
const children = Array.isArray(span.children) ? span.children : [] if (enrichedSpan.output && typeof enrichedSpan.output === 'object') {
enrichedSpan.children = children.length > 0 ? normalizeAndSortSpans(children) : undefined enrichedSpan.output = { ...enrichedSpan.output }
if ('childTraceSpans' in enrichedSpan.output) {
const { childTraceSpans, ...cleanOutput } = enrichedSpan.output as {
childTraceSpans?: TraceSpan[]
} & Record<string, unknown>
enrichedSpan.output = cleanOutput
}
}
// Merge and deduplicate children from both sources
const directChildren = Array.isArray(span.children) ? span.children : []
const outputChildren = Array.isArray(span.output?.childTraceSpans)
? (span.output!.childTraceSpans as TraceSpan[])
: []
const mergedChildren = mergeTraceSpanChildren(directChildren, outputChildren)
enrichedSpan.children =
mergedChildren.length > 0 ? normalizeAndSortSpans(mergedChildren) : undefined
return enrichedSpan return enrichedSpan
}) })
@@ -521,19 +573,7 @@ const TraceSpanNode = memo(function TraceSpanNode({
return children.sort((a, b) => parseTime(a.startTime) - parseTime(b.startTime)) return children.sort((a, b) => parseTime(a.startTime) - parseTime(b.startTime))
}, [span, spanId, spanStartTime]) }, [span, spanId, spanStartTime])
// Hide empty model timing segments for agents without tool calls const hasChildren = allChildren.length > 0
const filteredChildren = useMemo(() => {
const isAgent = span.type?.toLowerCase() === 'agent'
const hasToolCalls =
(span.toolCalls?.length ?? 0) > 0 || allChildren.some((c) => c.type?.toLowerCase() === 'tool')
if (isAgent && !hasToolCalls) {
return allChildren.filter((c) => c.type?.toLowerCase() !== 'model')
}
return allChildren
}, [allChildren, span.type, span.toolCalls])
const hasChildren = filteredChildren.length > 0
const isExpanded = isRootWorkflow || expandedNodes.has(spanId) const isExpanded = isRootWorkflow || expandedNodes.has(spanId)
const isToggleable = !isRootWorkflow const isToggleable = !isRootWorkflow
@@ -645,7 +685,7 @@ const TraceSpanNode = memo(function TraceSpanNode({
{/* Nested Children */} {/* Nested Children */}
{hasChildren && ( {hasChildren && (
<div className='flex min-w-0 flex-col gap-[2px] border-[var(--border)] border-l pl-[10px]'> <div className='flex min-w-0 flex-col gap-[2px] border-[var(--border)] border-l pl-[10px]'>
{filteredChildren.map((child, index) => ( {allChildren.map((child, index) => (
<div key={child.id || `${spanId}-child-${index}`} className='pl-[6px]'> <div key={child.id || `${spanId}-child-${index}`} className='pl-[6px]'>
<TraceSpanNode <TraceSpanNode
span={child} span={child}

View File

@@ -18,7 +18,6 @@ import {
import { ScrollArea } from '@/components/ui/scroll-area' import { ScrollArea } from '@/components/ui/scroll-area'
import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants' import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import { filterHiddenOutputKeys } from '@/lib/logs/execution/trace-spans/trace-spans'
import { import {
ExecutionSnapshot, ExecutionSnapshot,
FileCards, FileCards,
@@ -275,13 +274,16 @@ export const LogDetails = memo(function LogDetails({
return isWorkflowExecutionLog && log?.cost return isWorkflowExecutionLog && log?.cost
}, [log, isWorkflowExecutionLog]) }, [log, isWorkflowExecutionLog])
// Extract and clean the workflow final output (recursively remove hidden keys for cleaner display) // Extract and clean the workflow final output (remove childTraceSpans for cleaner display)
const workflowOutput = useMemo(() => { const workflowOutput = useMemo(() => {
const executionData = log?.executionData as const executionData = log?.executionData as
| { finalOutput?: Record<string, unknown> } | { finalOutput?: Record<string, unknown> }
| undefined | undefined
if (!executionData?.finalOutput) return null if (!executionData?.finalOutput) return null
return filterHiddenOutputKeys(executionData.finalOutput) as Record<string, unknown> const { childTraceSpans, ...cleanOutput } = executionData.finalOutput as {
childTraceSpans?: unknown
} & Record<string, unknown>
return cleanOutput
}, [log?.executionData]) }, [log?.executionData])
useEffect(() => { useEffect(() => {

View File

@@ -78,7 +78,7 @@ export default function Logs() {
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, []) }, [])
const [isLive, setIsLive] = useState(true) const [isLive, setIsLive] = useState(false)
const [isVisuallyRefreshing, setIsVisuallyRefreshing] = useState(false) const [isVisuallyRefreshing, setIsVisuallyRefreshing] = useState(false)
const [isExporting, setIsExporting] = useState(false) const [isExporting, setIsExporting] = useState(false)
const isSearchOpenRef = useRef<boolean>(false) const isSearchOpenRef = useRef<boolean>(false)

View File

@@ -1,13 +1,11 @@
import { memo, useCallback } from 'react' import { memo, useCallback } from 'react'
import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react' import { ArrowLeftRight, ArrowUpDown, Circle, CircleOff, LogOut } from 'lucide-react'
import { Button, Copy, PlayOutline, Tooltip, Trash2 } from '@/components/emcn' import { Button, Copy, Tooltip, Trash2 } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers' import { isInputDefinitionTrigger } from '@/lib/workflows/triggers/input-definition-triggers'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider' import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { useWorkflowExecution } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { validateTriggerPaste } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils' import { validateTriggerPaste } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow' import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { useExecutionStore } from '@/stores/execution'
import { useNotificationStore } from '@/stores/notifications' import { useNotificationStore } from '@/stores/notifications'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useWorkflowStore } from '@/stores/workflows/workflow/store' import { useWorkflowStore } from '@/stores/workflows/workflow/store'
@@ -51,7 +49,6 @@ export const ActionBar = memo(
collaborativeBatchToggleBlockHandles, collaborativeBatchToggleBlockHandles,
} = useCollaborativeWorkflow() } = useCollaborativeWorkflow()
const { setPendingSelection } = useWorkflowRegistry() const { setPendingSelection } = useWorkflowRegistry()
const { handleRunFromBlock } = useWorkflowExecution()
const addNotification = useNotificationStore((s) => s.addNotification) const addNotification = useNotificationStore((s) => s.addNotification)
@@ -100,39 +97,12 @@ export const ActionBar = memo(
) )
) )
const { activeWorkflowId } = useWorkflowRegistry()
const { isExecuting, getLastExecutionSnapshot } = useExecutionStore()
const userPermissions = useUserPermissionsContext() const userPermissions = useUserPermissionsContext()
const edges = useWorkflowStore((state) => state.edges)
const isStartBlock = isInputDefinitionTrigger(blockType) const isStartBlock = isInputDefinitionTrigger(blockType)
const isResponseBlock = blockType === 'response' const isResponseBlock = blockType === 'response'
const isNoteBlock = blockType === 'note' const isNoteBlock = blockType === 'note'
const isSubflowBlock = blockType === 'loop' || blockType === 'parallel' const isSubflowBlock = blockType === 'loop' || blockType === 'parallel'
const isInsideSubflow = parentId && (parentType === 'loop' || parentType === 'parallel')
const snapshot = activeWorkflowId ? getLastExecutionSnapshot(activeWorkflowId) : null
const incomingEdges = edges.filter((edge) => edge.target === blockId)
const isTriggerBlock = incomingEdges.length === 0
// Check if each source block is either executed OR is a trigger block (triggers don't need prior execution)
const isSourceSatisfied = (sourceId: string) => {
if (snapshot?.executedBlocks.includes(sourceId)) return true
// Check if source is a trigger (has no incoming edges itself)
const sourceIncomingEdges = edges.filter((edge) => edge.target === sourceId)
return sourceIncomingEdges.length === 0
}
// Non-trigger blocks need a snapshot to exist (so upstream outputs are available)
const dependenciesSatisfied =
isTriggerBlock || (snapshot && incomingEdges.every((edge) => isSourceSatisfied(edge.source)))
const canRunFromBlock =
dependenciesSatisfied && !isNoteBlock && !isInsideSubflow && !isExecuting
const handleRunFromBlockClick = useCallback(() => {
if (!activeWorkflowId || !canRunFromBlock) return
handleRunFromBlock(blockId, activeWorkflowId)
}, [blockId, activeWorkflowId, canRunFromBlock, handleRunFromBlock])
/** /**
* Get appropriate tooltip message based on disabled state * Get appropriate tooltip message based on disabled state
@@ -158,35 +128,30 @@ export const ActionBar = memo(
'dark:border-transparent dark:bg-[var(--surface-4)]' 'dark:border-transparent dark:bg-[var(--surface-4)]'
)} )}
> >
{!isNoteBlock && !isInsideSubflow && ( {!isNoteBlock && (
<Tooltip.Root> <Tooltip.Root>
<Tooltip.Trigger asChild> <Tooltip.Trigger asChild>
<Button <Button
variant='ghost' variant='ghost'
onClick={(e) => { onClick={(e) => {
e.stopPropagation() e.stopPropagation()
if (canRunFromBlock && !disabled) { if (!disabled) {
handleRunFromBlockClick() collaborativeBatchToggleBlockEnabled([blockId])
} }
}} }}
className={ACTION_BUTTON_STYLES} className={ACTION_BUTTON_STYLES}
disabled={disabled || !canRunFromBlock} disabled={disabled}
> >
<PlayOutline className={ICON_SIZE} /> {isEnabled ? <Circle className={ICON_SIZE} /> : <CircleOff className={ICON_SIZE} />}
</Button> </Button>
</Tooltip.Trigger> </Tooltip.Trigger>
<Tooltip.Content side='top'> <Tooltip.Content side='top'>
{(() => { {getTooltipMessage(isEnabled ? 'Disable Block' : 'Enable Block')}
if (disabled) return getTooltipMessage('Run from block')
if (isExecuting) return 'Execution in progress'
if (!dependenciesSatisfied) return 'Run upstream blocks first'
return 'Run from block'
})()}
</Tooltip.Content> </Tooltip.Content>
</Tooltip.Root> </Tooltip.Root>
)} )}
{!isNoteBlock && ( {isSubflowBlock && (
<Tooltip.Root> <Tooltip.Root>
<Tooltip.Trigger asChild> <Tooltip.Trigger asChild>
<Button <Button

View File

@@ -40,16 +40,9 @@ export interface BlockMenuProps {
onRemoveFromSubflow: () => void onRemoveFromSubflow: () => void
onOpenEditor: () => void onOpenEditor: () => void
onRename: () => void onRename: () => void
onRunFromBlock?: () => void
onRunUntilBlock?: () => void
hasClipboard?: boolean hasClipboard?: boolean
showRemoveFromSubflow?: boolean showRemoveFromSubflow?: boolean
/** Whether run from block is available (has snapshot, was executed, not inside subflow) */
canRunFromBlock?: boolean
disableEdit?: boolean disableEdit?: boolean
isExecuting?: boolean
/** Whether the selected block is a trigger (has no incoming edges) */
isPositionalTrigger?: boolean
} }
/** /**
@@ -72,14 +65,9 @@ export function BlockMenu({
onRemoveFromSubflow, onRemoveFromSubflow,
onOpenEditor, onOpenEditor,
onRename, onRename,
onRunFromBlock,
onRunUntilBlock,
hasClipboard = false, hasClipboard = false,
showRemoveFromSubflow = false, showRemoveFromSubflow = false,
canRunFromBlock = false,
disableEdit = false, disableEdit = false,
isExecuting = false,
isPositionalTrigger = false,
}: BlockMenuProps) { }: BlockMenuProps) {
const isSingleBlock = selectedBlocks.length === 1 const isSingleBlock = selectedBlocks.length === 1
@@ -90,15 +78,10 @@ export function BlockMenu({
(b) => (b) =>
TriggerUtils.requiresSingleInstance(b.type) || TriggerUtils.isSingleInstanceBlockType(b.type) TriggerUtils.requiresSingleInstance(b.type) || TriggerUtils.isSingleInstanceBlockType(b.type)
) )
// A block is a trigger if it's explicitly a trigger type OR has no incoming edges (positional trigger) const hasTriggerBlock = selectedBlocks.some((b) => TriggerUtils.isTriggerBlock(b))
const hasTriggerBlock =
selectedBlocks.some((b) => TriggerUtils.isTriggerBlock(b)) || isPositionalTrigger
const allNoteBlocks = selectedBlocks.every((b) => b.type === 'note') const allNoteBlocks = selectedBlocks.every((b) => b.type === 'note')
const isSubflow = const isSubflow =
isSingleBlock && (selectedBlocks[0]?.type === 'loop' || selectedBlocks[0]?.type === 'parallel') isSingleBlock && (selectedBlocks[0]?.type === 'loop' || selectedBlocks[0]?.type === 'parallel')
const isInsideSubflow =
isSingleBlock &&
(selectedBlocks[0]?.parentType === 'loop' || selectedBlocks[0]?.parentType === 'parallel')
const canRemoveFromSubflow = showRemoveFromSubflow && !hasTriggerBlock const canRemoveFromSubflow = showRemoveFromSubflow && !hasTriggerBlock
@@ -220,38 +203,6 @@ export function BlockMenu({
</PopoverItem> </PopoverItem>
)} )}
{/* Run from/until block - only for single non-note block, not inside subflows */}
{isSingleBlock && !allNoteBlocks && !isInsideSubflow && (
<>
<PopoverDivider />
<PopoverItem
disabled={!canRunFromBlock || isExecuting}
onClick={() => {
if (canRunFromBlock && !isExecuting) {
onRunFromBlock?.()
onClose()
}
}}
>
Run from block
</PopoverItem>
{/* Hide "Run until" for triggers - they're always at the start */}
{!hasTriggerBlock && (
<PopoverItem
disabled={isExecuting}
onClick={() => {
if (!isExecuting) {
onRunUntilBlock?.()
onClose()
}
}}
>
Run until block
</PopoverItem>
)}
</>
)}
{/* Destructive action */} {/* Destructive action */}
<PopoverDivider /> <PopoverDivider />
<PopoverItem <PopoverItem

View File

@@ -1,170 +0,0 @@
'use client'
import { useCallback, useRef, useState } from 'react'
import {
Button,
Modal,
ModalBody,
ModalContent,
ModalFooter,
ModalHeader,
Textarea,
} from '@/components/emcn'
import {
useGenerateVersionDescription,
useUpdateDeploymentVersion,
} from '@/hooks/queries/deployments'
interface VersionDescriptionModalProps {
open: boolean
onOpenChange: (open: boolean) => void
workflowId: string
version: number
versionName: string
currentDescription: string | null | undefined
}
export function VersionDescriptionModal({
open,
onOpenChange,
workflowId,
version,
versionName,
currentDescription,
}: VersionDescriptionModalProps) {
const initialDescriptionRef = useRef(currentDescription || '')
const [description, setDescription] = useState(initialDescriptionRef.current)
const [showUnsavedChangesAlert, setShowUnsavedChangesAlert] = useState(false)
const updateMutation = useUpdateDeploymentVersion()
const generateMutation = useGenerateVersionDescription()
const hasChanges = description.trim() !== initialDescriptionRef.current.trim()
const isGenerating = generateMutation.isPending
const handleCloseAttempt = useCallback(() => {
if (updateMutation.isPending || isGenerating) {
return
}
if (hasChanges) {
setShowUnsavedChangesAlert(true)
} else {
onOpenChange(false)
}
}, [hasChanges, updateMutation.isPending, isGenerating, onOpenChange])
const handleDiscardChanges = useCallback(() => {
setShowUnsavedChangesAlert(false)
setDescription(initialDescriptionRef.current)
onOpenChange(false)
}, [onOpenChange])
const handleGenerateDescription = useCallback(() => {
generateMutation.mutate({
workflowId,
version,
onStreamChunk: (accumulated) => {
setDescription(accumulated)
},
})
}, [workflowId, version, generateMutation])
const handleSave = useCallback(() => {
if (!workflowId) return
updateMutation.mutate(
{
workflowId,
version,
description: description.trim() || null,
},
{
onSuccess: () => {
onOpenChange(false)
},
}
)
}, [workflowId, version, description, updateMutation, onOpenChange])
return (
<>
<Modal open={open} onOpenChange={(openState) => !openState && handleCloseAttempt()}>
<ModalContent className='max-w-[480px]'>
<ModalHeader>
<span>Version Description</span>
</ModalHeader>
<ModalBody className='space-y-[10px]'>
<div className='flex items-center justify-between'>
<p className='text-[12px] text-[var(--text-secondary)]'>
{currentDescription ? 'Edit the' : 'Add a'} description for{' '}
<span className='font-medium text-[var(--text-primary)]'>{versionName}</span>
</p>
<Button
variant='active'
className='-my-1 h-5 px-2 py-0 text-[11px]'
onClick={handleGenerateDescription}
disabled={isGenerating || updateMutation.isPending}
>
{isGenerating ? 'Generating...' : 'Generate'}
</Button>
</div>
<Textarea
placeholder='Describe the changes in this deployment version...'
className='min-h-[120px] resize-none'
value={description}
onChange={(e) => setDescription(e.target.value)}
maxLength={500}
disabled={isGenerating}
/>
<div className='flex items-center justify-between'>
{(updateMutation.error || generateMutation.error) && (
<p className='text-[12px] text-[var(--text-error)]'>
{updateMutation.error?.message || generateMutation.error?.message}
</p>
)}
{!updateMutation.error && !generateMutation.error && <div />}
<p className='text-[11px] text-[var(--text-tertiary)]'>{description.length}/500</p>
</div>
</ModalBody>
<ModalFooter>
<Button
variant='default'
onClick={handleCloseAttempt}
disabled={updateMutation.isPending || isGenerating}
>
Cancel
</Button>
<Button
variant='tertiary'
onClick={handleSave}
disabled={updateMutation.isPending || isGenerating || !hasChanges}
>
{updateMutation.isPending ? 'Saving...' : 'Save'}
</Button>
</ModalFooter>
</ModalContent>
</Modal>
<Modal open={showUnsavedChangesAlert} onOpenChange={setShowUnsavedChangesAlert}>
<ModalContent className='max-w-[400px]'>
<ModalHeader>
<span>Unsaved Changes</span>
</ModalHeader>
<ModalBody>
<p className='text-[14px] text-[var(--text-secondary)]'>
You have unsaved changes. Are you sure you want to discard them?
</p>
</ModalBody>
<ModalFooter>
<Button variant='default' onClick={() => setShowUnsavedChangesAlert(false)}>
Keep Editing
</Button>
<Button variant='destructive' onClick={handleDiscardChanges}>
Discard Changes
</Button>
</ModalFooter>
</ModalContent>
</Modal>
</>
)
}

View File

@@ -1,31 +1,26 @@
'use client' 'use client'
import { useEffect, useRef, useState } from 'react' import { useEffect, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import clsx from 'clsx' import clsx from 'clsx'
import { FileText, MoreVertical, Pencil, RotateCcw, SendToBack } from 'lucide-react' import { MoreVertical, Pencil, RotateCcw, SendToBack } from 'lucide-react'
import { import { Button, Popover, PopoverContent, PopoverItem, PopoverTrigger } from '@/components/emcn'
Button,
Popover,
PopoverContent,
PopoverItem,
PopoverTrigger,
Tooltip,
} from '@/components/emcn'
import { Skeleton } from '@/components/ui' import { Skeleton } from '@/components/ui'
import { formatDateTime } from '@/lib/core/utils/formatting'
import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils' import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils'
import { useUpdateDeploymentVersion } from '@/hooks/queries/deployments'
import { VersionDescriptionModal } from './version-description-modal'
const logger = createLogger('Versions')
/** Shared styling constants aligned with terminal component */
const HEADER_TEXT_CLASS = 'font-medium text-[var(--text-tertiary)] text-[12px]' const HEADER_TEXT_CLASS = 'font-medium text-[var(--text-tertiary)] text-[12px]'
const ROW_TEXT_CLASS = 'font-medium text-[var(--text-primary)] text-[12px]' const ROW_TEXT_CLASS = 'font-medium text-[var(--text-primary)] text-[12px]'
const COLUMN_BASE_CLASS = 'flex-shrink-0' const COLUMN_BASE_CLASS = 'flex-shrink-0'
/** Column width configuration */
const COLUMN_WIDTHS = { const COLUMN_WIDTHS = {
VERSION: 'w-[180px]', VERSION: 'w-[180px]',
DEPLOYED_BY: 'w-[140px]', DEPLOYED_BY: 'w-[140px]',
TIMESTAMP: 'flex-1', TIMESTAMP: 'flex-1',
ACTIONS: 'w-[56px]', ACTIONS: 'w-[32px]',
} as const } as const
interface VersionsProps { interface VersionsProps {
@@ -36,6 +31,34 @@ interface VersionsProps {
onSelectVersion: (version: number | null) => void onSelectVersion: (version: number | null) => void
onPromoteToLive: (version: number) => void onPromoteToLive: (version: number) => void
onLoadDeployment: (version: number) => void onLoadDeployment: (version: number) => void
fetchVersions: () => Promise<void>
}
/**
* Formats a timestamp into a readable string.
* @param value - The date string or Date object to format
* @returns Formatted string like "8:36 PM PT on Oct 11, 2025"
*/
const formatDate = (value: string | Date): string => {
const date = value instanceof Date ? value : new Date(value)
if (Number.isNaN(date.getTime())) {
return '-'
}
const timePart = date.toLocaleTimeString('en-US', {
hour: 'numeric',
minute: '2-digit',
hour12: true,
timeZoneName: 'short',
})
const datePart = date.toLocaleDateString('en-US', {
month: 'short',
day: 'numeric',
year: 'numeric',
})
return `${timePart} on ${datePart}`
} }
/** /**
@@ -50,15 +73,14 @@ export function Versions({
onSelectVersion, onSelectVersion,
onPromoteToLive, onPromoteToLive,
onLoadDeployment, onLoadDeployment,
fetchVersions,
}: VersionsProps) { }: VersionsProps) {
const [editingVersion, setEditingVersion] = useState<number | null>(null) const [editingVersion, setEditingVersion] = useState<number | null>(null)
const [editValue, setEditValue] = useState('') const [editValue, setEditValue] = useState('')
const [isRenaming, setIsRenaming] = useState(false)
const [openDropdown, setOpenDropdown] = useState<number | null>(null) const [openDropdown, setOpenDropdown] = useState<number | null>(null)
const [descriptionModalVersion, setDescriptionModalVersion] = useState<number | null>(null)
const inputRef = useRef<HTMLInputElement>(null) const inputRef = useRef<HTMLInputElement>(null)
const renameMutation = useUpdateDeploymentVersion()
useEffect(() => { useEffect(() => {
if (editingVersion !== null && inputRef.current) { if (editingVersion !== null && inputRef.current) {
inputRef.current.focus() inputRef.current.focus()
@@ -72,8 +94,7 @@ export function Versions({
setEditValue(currentName || `v${version}`) setEditValue(currentName || `v${version}`)
} }
const handleSaveRename = (version: number) => { const handleSaveRename = async (version: number) => {
if (renameMutation.isPending) return
if (!workflowId || !editValue.trim()) { if (!workflowId || !editValue.trim()) {
setEditingVersion(null) setEditingVersion(null)
return return
@@ -87,21 +108,25 @@ export function Versions({
return return
} }
renameMutation.mutate( setIsRenaming(true)
{ try {
workflowId, const res = await fetch(`/api/workflows/${workflowId}/deployments/${version}`, {
version, method: 'PATCH',
name: editValue.trim(), headers: { 'Content-Type': 'application/json' },
}, body: JSON.stringify({ name: editValue.trim() }),
{ })
onSuccess: () => {
setEditingVersion(null) if (res.ok) {
}, await fetchVersions()
onError: () => { setEditingVersion(null)
// Keep editing state open on error so user can retry } else {
}, logger.error('Failed to rename version')
} }
) } catch (error) {
logger.error('Error renaming version:', error)
} finally {
setIsRenaming(false)
}
} }
const handleCancelRename = () => { const handleCancelRename = () => {
@@ -124,16 +149,6 @@ export function Versions({
onLoadDeployment(version) onLoadDeployment(version)
} }
const handleOpenDescriptionModal = (version: number) => {
setOpenDropdown(null)
setDescriptionModalVersion(version)
}
const descriptionModalVersionData =
descriptionModalVersion !== null
? versions.find((v) => v.version === descriptionModalVersion)
: null
if (versionsLoading && versions.length === 0) { if (versionsLoading && versions.length === 0) {
return ( return (
<div className='overflow-hidden rounded-[4px] border border-[var(--border)]'> <div className='overflow-hidden rounded-[4px] border border-[var(--border)]'>
@@ -164,14 +179,7 @@ export function Versions({
<div className={clsx(COLUMN_WIDTHS.TIMESTAMP, 'min-w-0')}> <div className={clsx(COLUMN_WIDTHS.TIMESTAMP, 'min-w-0')}>
<Skeleton className='h-[12px] w-[160px]' /> <Skeleton className='h-[12px] w-[160px]' />
</div> </div>
<div <div className={clsx(COLUMN_WIDTHS.ACTIONS, COLUMN_BASE_CLASS, 'flex justify-end')}>
className={clsx(
COLUMN_WIDTHS.ACTIONS,
COLUMN_BASE_CLASS,
'flex justify-end gap-[2px]'
)}
>
<Skeleton className='h-[20px] w-[20px] rounded-[4px]' />
<Skeleton className='h-[20px] w-[20px] rounded-[4px]' /> <Skeleton className='h-[20px] w-[20px] rounded-[4px]' />
</div> </div>
</div> </div>
@@ -249,7 +257,7 @@ export function Versions({
'text-[var(--text-primary)] focus:outline-none focus:ring-0' 'text-[var(--text-primary)] focus:outline-none focus:ring-0'
)} )}
maxLength={100} maxLength={100}
disabled={renameMutation.isPending} disabled={isRenaming}
autoComplete='off' autoComplete='off'
autoCorrect='off' autoCorrect='off'
autoCapitalize='off' autoCapitalize='off'
@@ -281,40 +289,14 @@ export function Versions({
<span <span
className={clsx('block truncate text-[var(--text-tertiary)]', ROW_TEXT_CLASS)} className={clsx('block truncate text-[var(--text-tertiary)]', ROW_TEXT_CLASS)}
> >
{formatDateTime(new Date(v.createdAt))} {formatDate(v.createdAt)}
</span> </span>
</div> </div>
<div <div
className={clsx( className={clsx(COLUMN_WIDTHS.ACTIONS, COLUMN_BASE_CLASS, 'flex justify-end')}
COLUMN_WIDTHS.ACTIONS,
COLUMN_BASE_CLASS,
'flex items-center justify-end gap-[2px]'
)}
onClick={(e) => e.stopPropagation()} onClick={(e) => e.stopPropagation()}
> >
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
className={clsx(
'!p-1',
!v.description &&
'text-[var(--text-quaternary)] hover:text-[var(--text-tertiary)]'
)}
onClick={() => handleOpenDescriptionModal(v.version)}
>
<FileText className='h-3.5 w-3.5' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content side='top' className='max-w-[240px]'>
{v.description ? (
<p className='line-clamp-3 text-[12px]'>{v.description}</p>
) : (
<p className='text-[12px]'>Add description</p>
)}
</Tooltip.Content>
</Tooltip.Root>
<Popover <Popover
open={openDropdown === v.version} open={openDropdown === v.version}
onOpenChange={(open) => setOpenDropdown(open ? v.version : null)} onOpenChange={(open) => setOpenDropdown(open ? v.version : null)}
@@ -329,10 +311,6 @@ export function Versions({
<Pencil className='h-3 w-3' /> <Pencil className='h-3 w-3' />
<span>Rename</span> <span>Rename</span>
</PopoverItem> </PopoverItem>
<PopoverItem onClick={() => handleOpenDescriptionModal(v.version)}>
<FileText className='h-3 w-3' />
<span>{v.description ? 'Edit description' : 'Add description'}</span>
</PopoverItem>
{!v.isActive && ( {!v.isActive && (
<PopoverItem onClick={() => handlePromote(v.version)}> <PopoverItem onClick={() => handlePromote(v.version)}>
<RotateCcw className='h-3 w-3' /> <RotateCcw className='h-3 w-3' />
@@ -350,20 +328,6 @@ export function Versions({
) )
})} })}
</div> </div>
{workflowId && descriptionModalVersionData && (
<VersionDescriptionModal
key={descriptionModalVersionData.version}
open={descriptionModalVersion !== null}
onOpenChange={(open) => !open && setDescriptionModalVersion(null)}
workflowId={workflowId}
version={descriptionModalVersionData.version}
versionName={
descriptionModalVersionData.name || `v${descriptionModalVersionData.version}`
}
currentDescription={descriptionModalVersionData.description}
/>
)}
</div> </div>
) )
} }

View File

@@ -32,6 +32,7 @@ interface GeneralDeployProps {
versionsLoading: boolean versionsLoading: boolean
onPromoteToLive: (version: number) => Promise<void> onPromoteToLive: (version: number) => Promise<void>
onLoadDeploymentComplete: () => void onLoadDeploymentComplete: () => void
fetchVersions: () => Promise<void>
} }
type PreviewMode = 'active' | 'selected' type PreviewMode = 'active' | 'selected'
@@ -47,6 +48,7 @@ export function GeneralDeploy({
versionsLoading, versionsLoading,
onPromoteToLive, onPromoteToLive,
onLoadDeploymentComplete, onLoadDeploymentComplete,
fetchVersions,
}: GeneralDeployProps) { }: GeneralDeployProps) {
const [selectedVersion, setSelectedVersion] = useState<number | null>(null) const [selectedVersion, setSelectedVersion] = useState<number | null>(null)
const [previewMode, setPreviewMode] = useState<PreviewMode>('active') const [previewMode, setPreviewMode] = useState<PreviewMode>('active')
@@ -227,6 +229,7 @@ export function GeneralDeploy({
onSelectVersion={handleSelectVersion} onSelectVersion={handleSelectVersion}
onPromoteToLive={handlePromoteToLive} onPromoteToLive={handlePromoteToLive}
onLoadDeployment={handleLoadDeployment} onLoadDeployment={handleLoadDeployment}
fetchVersions={fetchVersions}
/> />
</div> </div>
</div> </div>

View File

@@ -135,9 +135,11 @@ export function DeployModal({
refetch: refetchDeploymentInfo, refetch: refetchDeploymentInfo,
} = useDeploymentInfo(workflowId, { enabled: open && isDeployed }) } = useDeploymentInfo(workflowId, { enabled: open && isDeployed })
const { data: versionsData, isLoading: versionsLoading } = useDeploymentVersions(workflowId, { const {
enabled: open, data: versionsData,
}) isLoading: versionsLoading,
refetch: refetchVersions,
} = useDeploymentVersions(workflowId, { enabled: open })
const { const {
isLoading: isLoadingChat, isLoading: isLoadingChat,
@@ -448,6 +450,10 @@ export function DeployModal({
deleteTrigger?.click() deleteTrigger?.click()
}, []) }, [])
const handleFetchVersions = useCallback(async () => {
await refetchVersions()
}, [refetchVersions])
const isSubmitting = deployMutation.isPending const isSubmitting = deployMutation.isPending
const isUndeploying = undeployMutation.isPending const isUndeploying = undeployMutation.isPending
@@ -506,6 +512,7 @@ export function DeployModal({
versionsLoading={versionsLoading} versionsLoading={versionsLoading}
onPromoteToLive={handlePromoteToLive} onPromoteToLive={handlePromoteToLive}
onLoadDeploymentComplete={handleCloseModal} onLoadDeploymentComplete={handleCloseModal}
fetchVersions={handleFetchVersions}
/> />
</ModalTabsContent> </ModalTabsContent>

View File

@@ -3,9 +3,8 @@
import { useCallback, useRef, useState } from 'react' import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import clsx from 'clsx' import clsx from 'clsx'
import { RepeatIcon, SplitIcon } from 'lucide-react' import { ChevronDown, RepeatIcon, SplitIcon } from 'lucide-react'
import { useShallow } from 'zustand/react/shallow' import { useShallow } from 'zustand/react/shallow'
import { ChevronDown } from '@/components/emcn'
import { import {
FieldItem, FieldItem,
type SchemaField, type SchemaField,
@@ -116,8 +115,9 @@ function ConnectionItem({
{hasFields && ( {hasFields && (
<ChevronDown <ChevronDown
className={clsx( className={clsx(
'h-[8px] w-[8px] flex-shrink-0 text-[var(--text-tertiary)] transition-transform duration-100 group-hover:text-[var(--text-primary)]', 'h-3.5 w-3.5 flex-shrink-0 transition-transform duration-100',
!isExpanded && '-rotate-90' 'text-[var(--text-secondary)] group-hover:text-[var(--text-primary)]',
isExpanded && 'rotate-180'
)} )}
/> />
)} )}

View File

@@ -1,5 +1,5 @@
import { memo, useCallback, useEffect, useImperativeHandle, useMemo, useRef, useState } from 'react' import { memo, useCallback, useEffect, useImperativeHandle, useMemo, useRef, useState } from 'react'
import { Wand2 } from 'lucide-react' import { Check, Copy, Wand2 } from 'lucide-react'
import { useReactFlow } from 'reactflow' import { useReactFlow } from 'reactflow'
import { Input } from '@/components/emcn' import { Input } from '@/components/emcn'
import { Button } from '@/components/ui/button' import { Button } from '@/components/ui/button'
@@ -40,6 +40,8 @@ interface ShortInputProps {
disabled?: boolean disabled?: boolean
/** Whether the input is read-only */ /** Whether the input is read-only */
readOnly?: boolean readOnly?: boolean
/** Whether to show a copy button */
showCopyButton?: boolean
/** Whether to use webhook URL as value */ /** Whether to use webhook URL as value */
useWebhookUrl?: boolean useWebhookUrl?: boolean
/** Ref to expose wand control handlers to parent */ /** Ref to expose wand control handlers to parent */
@@ -57,6 +59,7 @@ interface ShortInputProps {
* - Handles drag-and-drop for connections and variable references * - Handles drag-and-drop for connections and variable references
* - Provides environment variable and tag autocomplete * - Provides environment variable and tag autocomplete
* - Password masking with reveal on focus * - Password masking with reveal on focus
* - Copy to clipboard functionality
* - Integrates with ReactFlow for zoom control * - Integrates with ReactFlow for zoom control
*/ */
export const ShortInput = memo(function ShortInput({ export const ShortInput = memo(function ShortInput({
@@ -71,12 +74,14 @@ export const ShortInput = memo(function ShortInput({
previewValue, previewValue,
disabled = false, disabled = false,
readOnly = false, readOnly = false,
showCopyButton = false,
useWebhookUrl = false, useWebhookUrl = false,
wandControlRef, wandControlRef,
hideInternalWand = false, hideInternalWand = false,
}: ShortInputProps) { }: ShortInputProps) {
const [localContent, setLocalContent] = useState<string>('') const [localContent, setLocalContent] = useState<string>('')
const [isFocused, setIsFocused] = useState(false) const [isFocused, setIsFocused] = useState(false)
const [copied, setCopied] = useState(false)
const persistSubBlockValueRef = useRef<(value: string) => void>(() => {}) const persistSubBlockValueRef = useRef<(value: string) => void>(() => {})
const justPastedRef = useRef(false) const justPastedRef = useRef(false)
@@ -273,6 +278,18 @@ export const ShortInput = memo(function ShortInput({
[reactFlowInstance] [reactFlowInstance]
) )
/**
* Handles copying the value to the clipboard.
*/
const handleCopy = useCallback(() => {
const textToCopy = useWebhookUrl ? webhookManagement?.webhookUrl : value?.toString()
if (textToCopy) {
navigator.clipboard.writeText(textToCopy)
setCopied(true)
setTimeout(() => setCopied(false), 2000)
}
}, [useWebhookUrl, webhookManagement?.webhookUrl, value])
const handleBlur = useCallback(() => { const handleBlur = useCallback(() => {
setIsFocused(false) setIsFocused(false)
}, []) }, [])
@@ -349,7 +366,10 @@ export const ShortInput = memo(function ShortInput({
<> <>
<Input <Input
ref={ref as React.RefObject<HTMLInputElement>} ref={ref as React.RefObject<HTMLInputElement>}
className='allow-scroll w-full overflow-auto text-transparent caret-foreground [-ms-overflow-style:none] [scrollbar-width:none] placeholder:text-muted-foreground/50 [&::-webkit-scrollbar]:hidden' className={cn(
'allow-scroll w-full overflow-auto text-transparent caret-foreground [-ms-overflow-style:none] [scrollbar-width:none] placeholder:text-muted-foreground/50 [&::-webkit-scrollbar]:hidden',
showCopyButton && 'pr-14'
)}
readOnly={readOnly} readOnly={readOnly}
placeholder={placeholder ?? ''} placeholder={placeholder ?? ''}
type='text' type='text'
@@ -372,7 +392,8 @@ export const ShortInput = memo(function ShortInput({
<div <div
ref={overlayRef} ref={overlayRef}
className={cn( className={cn(
'pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-[8px] py-[6px] pr-3 font-medium font-sans text-foreground text-sm [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden', 'pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-[8px] py-[6px] font-medium font-sans text-foreground text-sm [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden',
showCopyButton ? 'pr-14' : 'pr-3',
(isPreview || disabled) && 'opacity-50' (isPreview || disabled) && 'opacity-50'
)} )}
> >
@@ -383,6 +404,27 @@ export const ShortInput = memo(function ShortInput({
}} }}
</SubBlockInputController> </SubBlockInputController>
{/* Copy Button */}
{showCopyButton && value && (
<div className='pointer-events-none absolute top-0 right-0 bottom-0 z-10 flex w-14 items-center justify-end pr-2 opacity-0 transition-opacity group-hover:opacity-100'>
<Button
type='button'
variant='ghost'
size='icon'
onClick={handleCopy}
disabled={!value}
className='pointer-events-auto h-6 w-6 p-0'
aria-label='Copy value'
>
{copied ? (
<Check className='h-3.5 w-3.5 text-green-500' />
) : (
<Copy className='h-3.5 w-3.5 text-muted-foreground' />
)}
</Button>
</div>
)}
{/* Wand Button - only show if not hidden by parent */} {/* Wand Button - only show if not hidden by parent */}
{isWandEnabled && !isPreview && !wandHook.isStreaming && !hideInternalWand && ( {isWandEnabled && !isPreview && !wandHook.isStreaming && !hideInternalWand && (
<div className='-translate-y-1/2 absolute top-1/2 right-3 z-10 flex items-center gap-1 opacity-0 transition-opacity group-hover:opacity-100'> <div className='-translate-y-1/2 absolute top-1/2 right-3 z-10 flex items-center gap-1 opacity-0 transition-opacity group-hover:opacity-100'>

View File

@@ -14,8 +14,6 @@ interface KeyboardNavigationHandlerProps {
flatTagList: Array<{ tag: string; group?: BlockTagGroup }> flatTagList: Array<{ tag: string; group?: BlockTagGroup }>
nestedBlockTagGroups: NestedBlockTagGroup[] nestedBlockTagGroups: NestedBlockTagGroup[]
handleTagSelect: (tag: string, group?: BlockTagGroup) => void handleTagSelect: (tag: string, group?: BlockTagGroup) => void
/** Called when entering a folder from root level via keyboard navigation */
onFolderEnter?: () => void
} }
/** /**
@@ -109,7 +107,6 @@ export const KeyboardNavigationHandler: React.FC<KeyboardNavigationHandlerProps>
flatTagList, flatTagList,
nestedBlockTagGroups, nestedBlockTagGroups,
handleTagSelect, handleTagSelect,
onFolderEnter,
}) => { }) => {
const { openFolder, closeFolder, isInFolder, currentFolder, setKeyboardNav } = usePopoverContext() const { openFolder, closeFolder, isInFolder, currentFolder, setKeyboardNav } = usePopoverContext()
const nestedNav = useNestedNavigation() const nestedNav = useNestedNavigation()
@@ -254,7 +251,7 @@ export const KeyboardNavigationHandler: React.FC<KeyboardNavigationHandlerProps>
} else if (currentVisibleIndex < visibleIndices.length - 1) { } else if (currentVisibleIndex < visibleIndices.length - 1) {
newIndex = visibleIndices[currentVisibleIndex + 1] newIndex = visibleIndices[currentVisibleIndex + 1]
} else { } else {
newIndex = selectedIndex newIndex = visibleIndices[0]
} }
setSelectedIndex(newIndex) setSelectedIndex(newIndex)
scrollIntoView() scrollIntoView()
@@ -272,7 +269,7 @@ export const KeyboardNavigationHandler: React.FC<KeyboardNavigationHandlerProps>
} else if (currentVisibleIndex > 0) { } else if (currentVisibleIndex > 0) {
newIndex = visibleIndices[currentVisibleIndex - 1] newIndex = visibleIndices[currentVisibleIndex - 1]
} else { } else {
newIndex = selectedIndex newIndex = visibleIndices[visibleIndices.length - 1]
} }
setSelectedIndex(newIndex) setSelectedIndex(newIndex)
scrollIntoView() scrollIntoView()
@@ -298,7 +295,6 @@ export const KeyboardNavigationHandler: React.FC<KeyboardNavigationHandlerProps>
currentFolderInfo.parentTag, currentFolderInfo.parentTag,
currentFolderInfo.group currentFolderInfo.group
) )
onFolderEnter?.()
} }
} }
break break
@@ -350,7 +346,6 @@ export const KeyboardNavigationHandler: React.FC<KeyboardNavigationHandlerProps>
handleTagSelect, handleTagSelect,
nestedNav, nestedNav,
setKeyboardNav, setKeyboardNav,
onFolderEnter,
]) ])
return null return null

View File

@@ -444,12 +444,10 @@ interface NestedTagRendererProps {
nestedTag: NestedTag nestedTag: NestedTag
group: NestedBlockTagGroup group: NestedBlockTagGroup
flatTagList: Array<{ tag: string; group?: BlockTagGroup }> flatTagList: Array<{ tag: string; group?: BlockTagGroup }>
/** Map from tag string to index for O(1) lookups */
flatTagIndexMap: Map<string, number>
selectedIndex: number selectedIndex: number
setSelectedIndex: (index: number) => void setSelectedIndex: (index: number) => void
handleTagSelect: (tag: string, blockGroup?: BlockTagGroup) => void handleTagSelect: (tag: string, blockGroup?: BlockTagGroup) => void
itemRefs: React.RefObject<Map<string, HTMLElement>> itemRefs: React.RefObject<Map<number, HTMLElement>>
blocks: Record<string, BlockState> blocks: Record<string, BlockState>
getMergedSubBlocks: (blockId: string) => Record<string, any> getMergedSubBlocks: (blockId: string) => Record<string, any>
} }
@@ -471,7 +469,6 @@ interface FolderContentsProps extends NestedTagRendererProps {
const FolderContentsInner: React.FC<FolderContentsProps> = ({ const FolderContentsInner: React.FC<FolderContentsProps> = ({
group, group,
flatTagList, flatTagList,
flatTagIndexMap,
selectedIndex, selectedIndex,
setSelectedIndex, setSelectedIndex,
handleTagSelect, handleTagSelect,
@@ -486,7 +483,7 @@ const FolderContentsInner: React.FC<FolderContentsProps> = ({
const currentNestedTag = nestedPath.length > 0 ? nestedPath[nestedPath.length - 1] : nestedTag const currentNestedTag = nestedPath.length > 0 ? nestedPath[nestedPath.length - 1] : nestedTag
const parentTagIndex = currentNestedTag.parentTag const parentTagIndex = currentNestedTag.parentTag
? (flatTagIndexMap.get(currentNestedTag.parentTag) ?? -1) ? flatTagList.findIndex((item) => item.tag === currentNestedTag.parentTag)
: -1 : -1
return ( return (
@@ -496,6 +493,7 @@ const FolderContentsInner: React.FC<FolderContentsProps> = ({
<PopoverItem <PopoverItem
active={parentTagIndex === selectedIndex && parentTagIndex >= 0} active={parentTagIndex === selectedIndex && parentTagIndex >= 0}
onMouseEnter={() => { onMouseEnter={() => {
// Skip selection update during keyboard navigation to prevent scroll-triggered selection changes
if (isKeyboardNav) return if (isKeyboardNav) return
setKeyboardNav(false) setKeyboardNav(false)
if (parentTagIndex >= 0) setSelectedIndex(parentTagIndex) if (parentTagIndex >= 0) setSelectedIndex(parentTagIndex)
@@ -506,8 +504,8 @@ const FolderContentsInner: React.FC<FolderContentsProps> = ({
handleTagSelect(currentNestedTag.parentTag!, group) handleTagSelect(currentNestedTag.parentTag!, group)
}} }}
ref={(el) => { ref={(el) => {
if (el && currentNestedTag.parentTag) { if (el && parentTagIndex >= 0) {
itemRefs.current?.set(currentNestedTag.parentTag, el) itemRefs.current?.set(parentTagIndex, el)
} }
}} }}
> >
@@ -517,7 +515,7 @@ const FolderContentsInner: React.FC<FolderContentsProps> = ({
{/* Render leaf children as PopoverItems */} {/* Render leaf children as PopoverItems */}
{currentNestedTag.children?.map((child) => { {currentNestedTag.children?.map((child) => {
const childGlobalIndex = flatTagIndexMap.get(child.fullTag) ?? -1 const childGlobalIndex = flatTagList.findIndex((item) => item.tag === child.fullTag)
const tagParts = child.fullTag.split('.') const tagParts = child.fullTag.split('.')
const outputPath = tagParts.slice(1).join('.') const outputPath = tagParts.slice(1).join('.')
@@ -552,8 +550,8 @@ const FolderContentsInner: React.FC<FolderContentsProps> = ({
handleTagSelect(child.fullTag, group) handleTagSelect(child.fullTag, group)
}} }}
ref={(el) => { ref={(el) => {
if (el) { if (el && childGlobalIndex >= 0) {
itemRefs.current?.set(child.fullTag, el) itemRefs.current?.set(childGlobalIndex, el)
} }
}} }}
> >
@@ -570,7 +568,7 @@ const FolderContentsInner: React.FC<FolderContentsProps> = ({
{/* Render nested children as clickable folder items */} {/* Render nested children as clickable folder items */}
{currentNestedTag.nestedChildren?.map((nestedChild) => { {currentNestedTag.nestedChildren?.map((nestedChild) => {
const parentGlobalIndex = nestedChild.parentTag const parentGlobalIndex = nestedChild.parentTag
? (flatTagIndexMap.get(nestedChild.parentTag) ?? -1) ? flatTagList.findIndex((item) => item.tag === nestedChild.parentTag)
: -1 : -1
return ( return (
@@ -585,11 +583,12 @@ const FolderContentsInner: React.FC<FolderContentsProps> = ({
onMouseDown={(e) => { onMouseDown={(e) => {
e.preventDefault() e.preventDefault()
e.stopPropagation() e.stopPropagation()
// Navigate into the subfolder on click
onNavigateIn(nestedChild) onNavigateIn(nestedChild)
}} }}
ref={(el) => { ref={(el) => {
if (el && nestedChild.parentTag) { if (el && parentGlobalIndex >= 0) {
itemRefs.current?.set(nestedChild.parentTag, el) itemRefs.current?.set(parentGlobalIndex, el)
} }
}} }}
> >
@@ -606,7 +605,7 @@ const FolderContentsInner: React.FC<FolderContentsProps> = ({
* Wrapper component that uses shared nested navigation state from context. * Wrapper component that uses shared nested navigation state from context.
* Handles registration of the base folder and navigation callbacks. * Handles registration of the base folder and navigation callbacks.
*/ */
const FolderContents: React.FC<Omit<NestedTagRendererProps, never>> = (props) => { const FolderContents: React.FC<NestedTagRendererProps> = (props) => {
const nestedNav = useNestedNavigation() const nestedNav = useNestedNavigation()
const { currentFolder } = usePopoverContext() const { currentFolder } = usePopoverContext()
@@ -639,7 +638,6 @@ const NestedTagRenderer: React.FC<NestedTagRendererProps> = ({
nestedTag, nestedTag,
group, group,
flatTagList, flatTagList,
flatTagIndexMap,
selectedIndex, selectedIndex,
setSelectedIndex, setSelectedIndex,
handleTagSelect, handleTagSelect,
@@ -655,7 +653,7 @@ const NestedTagRenderer: React.FC<NestedTagRendererProps> = ({
const folderId = `${group.blockId}-${nestedTag.key}` const folderId = `${group.blockId}-${nestedTag.key}`
const parentGlobalIndex = nestedTag.parentTag const parentGlobalIndex = nestedTag.parentTag
? (flatTagIndexMap.get(nestedTag.parentTag) ?? -1) ? flatTagList.findIndex((item) => item.tag === nestedTag.parentTag)
: -1 : -1
return ( return (
@@ -677,8 +675,8 @@ const NestedTagRenderer: React.FC<NestedTagRendererProps> = ({
} }
}} }}
ref={(el) => { ref={(el) => {
if (el && nestedTag.parentTag) { if (el && parentGlobalIndex >= 0) {
itemRefs.current?.set(nestedTag.parentTag, el) itemRefs.current?.set(parentGlobalIndex, el)
} }
}} }}
> >
@@ -686,7 +684,6 @@ const NestedTagRenderer: React.FC<NestedTagRendererProps> = ({
nestedTag={nestedTag} nestedTag={nestedTag}
group={group} group={group}
flatTagList={flatTagList} flatTagList={flatTagList}
flatTagIndexMap={flatTagIndexMap}
selectedIndex={selectedIndex} selectedIndex={selectedIndex}
setSelectedIndex={setSelectedIndex} setSelectedIndex={setSelectedIndex}
handleTagSelect={handleTagSelect} handleTagSelect={handleTagSelect}
@@ -698,7 +695,10 @@ const NestedTagRenderer: React.FC<NestedTagRendererProps> = ({
) )
} }
const globalIndex = nestedTag.fullTag ? (flatTagIndexMap.get(nestedTag.fullTag) ?? -1) : -1 // Leaf tag - render as a simple PopoverItem
const globalIndex = nestedTag.fullTag
? flatTagList.findIndex((item) => item.tag === nestedTag.fullTag)
: -1
let tagDescription = '' let tagDescription = ''
@@ -751,8 +751,8 @@ const NestedTagRenderer: React.FC<NestedTagRendererProps> = ({
} }
}} }}
ref={(el) => { ref={(el) => {
if (el && nestedTag.fullTag) { if (el && globalIndex >= 0) {
itemRefs.current?.set(nestedTag.fullTag, el) itemRefs.current?.set(globalIndex, el)
} }
}} }}
> >
@@ -767,7 +767,7 @@ const NestedTagRenderer: React.FC<NestedTagRendererProps> = ({
} }
/** /**
* Hook to get mouse enter handler that respects keyboard navigation state. * Hook to get mouse enter handler that respects keyboard navigation mode.
* Returns a handler that only updates selection if not in keyboard mode. * Returns a handler that only updates selection if not in keyboard mode.
*/ */
const useKeyboardAwareMouseEnter = ( const useKeyboardAwareMouseEnter = (
@@ -794,7 +794,7 @@ const VariableTagItem: React.FC<{
selectedIndex: number selectedIndex: number
setSelectedIndex: (index: number) => void setSelectedIndex: (index: number) => void
handleTagSelect: (tag: string) => void handleTagSelect: (tag: string) => void
itemRefs: React.RefObject<Map<string, HTMLElement>> itemRefs: React.RefObject<Map<number, HTMLElement>>
variableInfo: { type: string; id: string } | null variableInfo: { type: string; id: string } | null
}> = ({ }> = ({
tag, tag,
@@ -819,8 +819,8 @@ const VariableTagItem: React.FC<{
handleTagSelect(tag) handleTagSelect(tag)
}} }}
ref={(el) => { ref={(el) => {
if (el) { if (el && globalIndex >= 0) {
itemRefs.current?.set(tag, el) itemRefs.current?.set(globalIndex, el)
} }
}} }}
> >
@@ -845,7 +845,7 @@ const BlockRootTagItem: React.FC<{
selectedIndex: number selectedIndex: number
setSelectedIndex: (index: number) => void setSelectedIndex: (index: number) => void
handleTagSelect: (tag: string, group?: BlockTagGroup) => void handleTagSelect: (tag: string, group?: BlockTagGroup) => void
itemRefs: React.RefObject<Map<string, HTMLElement>> itemRefs: React.RefObject<Map<number, HTMLElement>>
group: BlockTagGroup group: BlockTagGroup
tagIcon: string | React.ComponentType<{ className?: string }> tagIcon: string | React.ComponentType<{ className?: string }>
blockColor: string blockColor: string
@@ -875,8 +875,8 @@ const BlockRootTagItem: React.FC<{
handleTagSelect(rootTag, group) handleTagSelect(rootTag, group)
}} }}
ref={(el) => { ref={(el) => {
if (el) { if (el && rootTagGlobalIndex >= 0) {
itemRefs.current?.set(rootTag, el) itemRefs.current?.set(rootTagGlobalIndex, el)
} }
}} }}
> >
@@ -916,12 +916,16 @@ const TagDropdownBackButton: React.FC = () => {
const handleBackClick = (e: React.MouseEvent) => { const handleBackClick = (e: React.MouseEvent) => {
e.stopPropagation() e.stopPropagation()
// Try to navigate back in nested path first
if (nestedNav?.navigateBack()) { if (nestedNav?.navigateBack()) {
// Successfully navigated back one level
return return
} }
// At root folder level, close the folder
closeFolder() closeFolder()
} }
// Just render the back button - the parent tag is rendered as the first item in FolderContentsInner
return ( return (
<div <div
className={cn( className={cn(
@@ -982,7 +986,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
inputRef, inputRef,
}) => { }) => {
const [selectedIndex, setSelectedIndex] = useState(0) const [selectedIndex, setSelectedIndex] = useState(0)
const itemRefs = useRef<Map<string, HTMLElement>>(new Map()) const itemRefs = useRef<Map<number, HTMLElement>>(new Map())
const [nestedPath, setNestedPath] = useState<NestedTag[]>([]) const [nestedPath, setNestedPath] = useState<NestedTag[]>([])
const baseFolderRef = useRef<{ const baseFolderRef = useRef<{
@@ -994,11 +998,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const handleTagSelectRef = useRef<((tag: string, group?: BlockTagGroup) => void) | null>(null) const handleTagSelectRef = useRef<((tag: string, group?: BlockTagGroup) => void) | null>(null)
const scrollAreaRef = useRef<HTMLDivElement>(null) const scrollAreaRef = useRef<HTMLDivElement>(null)
const inputValueRef = useRef(inputValue)
const cursorPositionRef = useRef(cursorPosition)
inputValueRef.current = inputValue
cursorPositionRef.current = cursorPosition
const { blocks, edges, loops, parallels } = useWorkflowStore( const { blocks, edges, loops, parallels } = useWorkflowStore(
useShallow((state) => ({ useShallow((state) => ({
blocks: state.blocks, blocks: state.blocks,
@@ -1701,27 +1700,27 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
return list return list
}, [variableTags, nestedBlockTagGroups]) }, [variableTags, nestedBlockTagGroups])
/** useEffect(() => {
* Map from tag string to its index in flatTagList for O(1) lookups. if (!visible || selectedIndex < 0) return
* Replaces O(n) findIndex calls throughout the component.
*/ const element = itemRefs.current.get(selectedIndex)
const flatTagIndexMap = useMemo(() => { if (element) {
const map = new Map<string, number>() element.scrollIntoView({
flatTagList.forEach((item, index) => { behavior: 'auto',
map.set(item.tag, index) block: 'nearest',
}) })
return map }
}, [flatTagList]) }, [selectedIndex, visible])
const handleTagSelect = useCallback( const handleTagSelect = useCallback(
(tag: string, blockGroup?: BlockTagGroup) => { (tag: string, blockGroup?: BlockTagGroup) => {
let liveCursor = cursorPositionRef.current let liveCursor = cursorPosition
let liveValue = inputValueRef.current let liveValue = inputValue
if (typeof window !== 'undefined' && document?.activeElement) { if (typeof window !== 'undefined' && document?.activeElement) {
const activeEl = document.activeElement as HTMLInputElement | HTMLTextAreaElement | null const activeEl = document.activeElement as HTMLInputElement | HTMLTextAreaElement | null
if (activeEl && typeof activeEl.selectionStart === 'number') { if (activeEl && typeof activeEl.selectionStart === 'number') {
liveCursor = activeEl.selectionStart ?? cursorPositionRef.current liveCursor = activeEl.selectionStart ?? cursorPosition
if ('value' in activeEl && typeof activeEl.value === 'string') { if ('value' in activeEl && typeof activeEl.value === 'string') {
liveValue = activeEl.value liveValue = activeEl.value
} }
@@ -1806,7 +1805,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
onSelect(newValue) onSelect(newValue)
onClose?.() onClose?.()
}, },
[workflowVariables, onSelect, onClose, getMergedSubBlocks] [inputValue, cursorPosition, workflowVariables, onSelect, onClose, getMergedSubBlocks]
) )
handleTagSelectRef.current = handleTagSelect handleTagSelectRef.current = handleTagSelect
@@ -1878,6 +1877,9 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
}, },
registerFolder: (folderId, folderTitle, baseTag, group) => { registerFolder: (folderId, folderTitle, baseTag, group) => {
baseFolderRef.current = { id: folderId, title: folderTitle, baseTag, group } baseFolderRef.current = { id: folderId, title: folderTitle, baseTag, group }
if (scrollAreaRef.current) {
scrollAreaRef.current.scrollTop = 0
}
}, },
}), }),
[nestedPath] [nestedPath]
@@ -1890,9 +1892,13 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
} }
}, [visible]) }, [visible])
useEffect(() => setSelectedIndex(0), [searchTerm])
useEffect(() => { useEffect(() => {
setSelectedIndex(0) if (selectedIndex >= flatTagList.length) {
}, [flatTagList.length]) setSelectedIndex(Math.max(0, flatTagList.length - 1))
}
}, [flatTagList.length, selectedIndex])
useEffect(() => { useEffect(() => {
if (visible) { if (visible) {
@@ -1911,28 +1917,20 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
} }
}, [visible, onClose]) }, [visible, onClose])
/**
* Memoized caret position and side calculation.
* getCaretViewportPosition does DOM manipulation, so we avoid calling it on every render.
*/
const { caretViewport, side } = useMemo(() => {
const inputElement = inputRef?.current
if (!inputElement) {
return { caretViewport: { left: 0, top: 0 }, side: 'bottom' as const }
}
const viewport = getCaretViewportPosition(inputElement, cursorPosition, inputValue)
const margin = 8
const spaceAbove = viewport.top - margin
const spaceBelow = window.innerHeight - viewport.top - margin
const computedSide: 'top' | 'bottom' = spaceBelow >= spaceAbove ? 'bottom' : 'top'
return { caretViewport: viewport, side: computedSide }
}, [cursorPosition, inputValue, inputRef])
if (!visible || tags.length === 0 || flatTagList.length === 0) return null if (!visible || tags.length === 0 || flatTagList.length === 0) return null
const inputElement = inputRef?.current const inputElement = inputRef?.current
let caretViewport = { left: 0, top: 0 }
let side: 'top' | 'bottom' = 'bottom'
if (inputElement) {
caretViewport = getCaretViewportPosition(inputElement, cursorPosition, inputValue)
const margin = 8
const spaceAbove = caretViewport.top - margin
const spaceBelow = window.innerHeight - caretViewport.top - margin
side = spaceBelow >= spaceAbove ? 'bottom' : 'top'
}
return ( return (
<NestedNavigationContext.Provider value={nestedNavigationValue}> <NestedNavigationContext.Provider value={nestedNavigationValue}>
@@ -1958,11 +1956,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
flatTagList={flatTagList} flatTagList={flatTagList}
nestedBlockTagGroups={nestedBlockTagGroups} nestedBlockTagGroups={nestedBlockTagGroups}
handleTagSelect={handleTagSelect} handleTagSelect={handleTagSelect}
onFolderEnter={() => {
if (scrollAreaRef.current) {
scrollAreaRef.current.scrollTop = 0
}
}}
/> />
<PopoverContent <PopoverContent
maxHeight={240} maxHeight={240}
@@ -1991,7 +1984,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
</PopoverSection> </PopoverSection>
{variableTags.map((tag: string) => { {variableTags.map((tag: string) => {
const variableInfo = variableInfoMap?.[tag] || null const variableInfo = variableInfoMap?.[tag] || null
const globalIndex = flatTagIndexMap.get(tag) ?? -1 const globalIndex = flatTagList.findIndex((item) => item.tag === tag)
return ( return (
<VariableTagItem <VariableTagItem
@@ -2034,7 +2027,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const rootTagFromTags = group.tags.find((tag) => tag === normalizedBlockName) const rootTagFromTags = group.tags.find((tag) => tag === normalizedBlockName)
const rootTag = rootTagFromTags || normalizedBlockName const rootTag = rootTagFromTags || normalizedBlockName
const rootTagGlobalIndex = flatTagIndexMap.get(rootTag) ?? -1 const rootTagGlobalIndex = flatTagList.findIndex((item) => item.tag === rootTag)
return ( return (
<div key={group.blockId}> <div key={group.blockId}>
@@ -2061,7 +2054,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
nestedTag={nestedTag} nestedTag={nestedTag}
group={group} group={group}
flatTagList={flatTagList} flatTagList={flatTagList}
flatTagIndexMap={flatTagIndexMap}
selectedIndex={selectedIndex} selectedIndex={selectedIndex}
setSelectedIndex={setSelectedIndex} setSelectedIndex={setSelectedIndex}
handleTagSelect={handleTagSelect} handleTagSelect={handleTagSelect}

View File

@@ -180,6 +180,20 @@ function resolveCustomToolFromReference(
return null return null
} }
/**
* Checks if a stored custom tool uses the reference-only format.
*
* @remarks
* Reference-only format means the tool has a customToolId but no inline code/schema,
* requiring resolution from the database at runtime.
*
* @param storedTool - The stored tool to check
* @returns `true` if the tool is a reference-only custom tool, `false` otherwise
*/
function isCustomToolReference(storedTool: StoredTool): boolean {
return storedTool.type === 'custom-tool' && !!storedTool.customToolId && !storedTool.code
}
/** /**
* Generic sync wrapper that synchronizes store values with local component state. * Generic sync wrapper that synchronizes store values with local component state.
* *
@@ -1141,6 +1155,21 @@ export const ToolInput = memo(function ToolInput({
return filterBlocks(allToolBlocks) return filterBlocks(allToolBlocks)
}, [filterBlocks]) }, [filterBlocks])
const customFilter = useCallback((value: string, search: string) => {
if (!search.trim()) return 1
const normalizedValue = value.toLowerCase()
const normalizedSearch = search.toLowerCase()
if (normalizedValue === normalizedSearch) return 1
if (normalizedValue.startsWith(normalizedSearch)) return 0.8
if (normalizedValue.includes(normalizedSearch)) return 0.6
return 0
}, [])
const hasBackfilledRef = useRef(false) const hasBackfilledRef = useRef(false)
useEffect(() => { useEffect(() => {
if ( if (

View File

@@ -1,261 +0,0 @@
/**
* @vitest-environment node
*/
import { describe, expect, it } from 'vitest'
import type { SubBlockConfig } from '@/blocks/types'
const isFieldRequired = (config: SubBlockConfig, subBlockValues?: Record<string, any>): boolean => {
if (!config.required) return false
if (typeof config.required === 'boolean') return config.required
const evalCond = (
cond: {
field: string
value: string | number | boolean | Array<string | number | boolean>
not?: boolean
and?: {
field: string
value: string | number | boolean | Array<string | number | boolean> | undefined
not?: boolean
}
},
values: Record<string, any>
): boolean => {
const fieldValue = values[cond.field]?.value
const condValue = cond.value
let match: boolean
if (Array.isArray(condValue)) {
match = condValue.includes(fieldValue)
} else {
match = fieldValue === condValue
}
if (cond.not) match = !match
if (cond.and) {
const andFieldValue = values[cond.and.field]?.value
const andCondValue = cond.and.value
let andMatch: boolean
if (Array.isArray(andCondValue)) {
andMatch = andCondValue.includes(andFieldValue)
} else {
andMatch = andFieldValue === andCondValue
}
if (cond.and.not) andMatch = !andMatch
match = match && andMatch
}
return match
}
const condition = typeof config.required === 'function' ? config.required() : config.required
return evalCond(condition, subBlockValues || {})
}
describe('isFieldRequired', () => {
describe('boolean required', () => {
it.concurrent('returns false when required is not set', () => {
const config = { id: 'test', type: 'short-input' } as SubBlockConfig
expect(isFieldRequired(config, {})).toBe(false)
})
it.concurrent('returns false when required is false', () => {
const config = { id: 'test', type: 'short-input', required: false } as SubBlockConfig
expect(isFieldRequired(config, {})).toBe(false)
})
it.concurrent('returns true when required is true', () => {
const config = { id: 'test', type: 'short-input', required: true } as SubBlockConfig
expect(isFieldRequired(config, {})).toBe(true)
})
})
describe('conditional required - simple value matching', () => {
it.concurrent('returns true when field value matches condition value', () => {
const config = {
id: 'test',
type: 'short-input',
required: { field: 'operation', value: 'create_booking' },
} as SubBlockConfig
const values = { operation: { value: 'create_booking' } }
expect(isFieldRequired(config, values)).toBe(true)
})
it.concurrent('returns false when field value does not match condition value', () => {
const config = {
id: 'test',
type: 'short-input',
required: { field: 'operation', value: 'create_booking' },
} as SubBlockConfig
const values = { operation: { value: 'cancel_booking' } }
expect(isFieldRequired(config, values)).toBe(false)
})
it.concurrent('returns false when field is missing', () => {
const config = {
id: 'test',
type: 'short-input',
required: { field: 'operation', value: 'create_booking' },
} as SubBlockConfig
expect(isFieldRequired(config, {})).toBe(false)
})
it.concurrent('returns false when field value is undefined', () => {
const config = {
id: 'test',
type: 'short-input',
required: { field: 'operation', value: 'create_booking' },
} as SubBlockConfig
const values = { operation: { value: undefined } }
expect(isFieldRequired(config, values)).toBe(false)
})
})
describe('conditional required - array value matching', () => {
it.concurrent('returns true when field value is in condition array', () => {
const config = {
id: 'test',
type: 'short-input',
required: { field: 'operation', value: ['create_booking', 'update_booking'] },
} as SubBlockConfig
const values = { operation: { value: 'create_booking' } }
expect(isFieldRequired(config, values)).toBe(true)
})
it.concurrent('returns false when field value is not in condition array', () => {
const config = {
id: 'test',
type: 'short-input',
required: { field: 'operation', value: ['create_booking', 'update_booking'] },
} as SubBlockConfig
const values = { operation: { value: 'cancel_booking' } }
expect(isFieldRequired(config, values)).toBe(false)
})
})
describe('conditional required - negation', () => {
it.concurrent('returns false when field matches but not is true', () => {
const config = {
id: 'test',
type: 'short-input',
required: { field: 'operation', value: 'create_booking', not: true },
} as SubBlockConfig
const values = { operation: { value: 'create_booking' } }
expect(isFieldRequired(config, values)).toBe(false)
})
it.concurrent('returns true when field does not match and not is true', () => {
const config = {
id: 'test',
type: 'short-input',
required: { field: 'operation', value: 'create_booking', not: true },
} as SubBlockConfig
const values = { operation: { value: 'cancel_booking' } }
expect(isFieldRequired(config, values)).toBe(true)
})
})
describe('conditional required - compound conditions', () => {
it.concurrent('returns true when both conditions match', () => {
const config = {
id: 'test',
type: 'short-input',
required: {
field: 'operation',
value: 'create_booking',
and: { field: 'hasEmail', value: true },
},
} as SubBlockConfig
const values = {
operation: { value: 'create_booking' },
hasEmail: { value: true },
}
expect(isFieldRequired(config, values)).toBe(true)
})
it.concurrent('returns false when first matches but and fails', () => {
const config = {
id: 'test',
type: 'short-input',
required: {
field: 'operation',
value: 'create_booking',
and: { field: 'hasEmail', value: true },
},
} as SubBlockConfig
const values = {
operation: { value: 'create_booking' },
hasEmail: { value: false },
}
expect(isFieldRequired(config, values)).toBe(false)
})
})
})
describe('condition + required equivalence', () => {
const conditionValue = { field: 'operation', value: 'calcom_create_booking' }
const configWithConditionalRequired = {
id: 'attendeeName',
type: 'short-input',
condition: conditionValue,
required: conditionValue,
} as SubBlockConfig
const configWithSimpleRequired = {
id: 'attendeeName',
type: 'short-input',
condition: conditionValue,
required: true,
} as SubBlockConfig
describe('when condition IS met (field is visible)', () => {
const valuesWhenVisible = { operation: { value: 'calcom_create_booking' } }
it.concurrent('conditional required returns true', () => {
expect(isFieldRequired(configWithConditionalRequired, valuesWhenVisible)).toBe(true)
})
it.concurrent('simple required returns true', () => {
expect(isFieldRequired(configWithSimpleRequired, valuesWhenVisible)).toBe(true)
})
it.concurrent('both configs produce the same result', () => {
const conditionalResult = isFieldRequired(configWithConditionalRequired, valuesWhenVisible)
const simpleResult = isFieldRequired(configWithSimpleRequired, valuesWhenVisible)
expect(conditionalResult).toBe(simpleResult)
})
})
describe('when condition is NOT met (field is hidden)', () => {
const valuesWhenHidden = { operation: { value: 'calcom_cancel_booking' } }
it.concurrent('conditional required returns false', () => {
expect(isFieldRequired(configWithConditionalRequired, valuesWhenHidden)).toBe(false)
})
it.concurrent('simple required returns true but field is hidden', () => {
expect(isFieldRequired(configWithSimpleRequired, valuesWhenHidden)).toBe(true)
})
it.concurrent('results differ but field is hidden when condition fails', () => {
const conditionalResult = isFieldRequired(configWithConditionalRequired, valuesWhenHidden)
const simpleResult = isFieldRequired(configWithSimpleRequired, valuesWhenHidden)
expect(conditionalResult).not.toBe(simpleResult)
})
})
describe('practical equivalence for user-facing behavior', () => {
it.concurrent('when field is visible both show required indicator', () => {
const valuesWhenVisible = { operation: { value: 'calcom_create_booking' } }
const showsRequiredIndicatorA = isFieldRequired(
configWithConditionalRequired,
valuesWhenVisible
)
const showsRequiredIndicatorB = isFieldRequired(configWithSimpleRequired, valuesWhenVisible)
expect(showsRequiredIndicatorA).toBe(true)
expect(showsRequiredIndicatorB).toBe(true)
})
})
})

View File

@@ -1,6 +1,6 @@
import { type JSX, type MouseEvent, memo, useCallback, useRef, useState } from 'react' import { type JSX, type MouseEvent, memo, useRef, useState } from 'react'
import { isEqual } from 'lodash' import { isEqual } from 'lodash'
import { AlertTriangle, ArrowLeftRight, ArrowUp, Check, Clipboard } from 'lucide-react' import { AlertTriangle, ArrowLeftRight, ArrowUp } from 'lucide-react'
import { Button, Input, Label, Tooltip } from '@/components/emcn/components' import { Button, Input, Label, Tooltip } from '@/components/emcn/components'
import { cn } from '@/lib/core/utils/cn' import { cn } from '@/lib/core/utils/cn'
import type { FieldDiffStatus } from '@/lib/workflows/diff/types' import type { FieldDiffStatus } from '@/lib/workflows/diff/types'
@@ -44,7 +44,6 @@ import {
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components' } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components'
import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate' import { useDependsOnGate } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-depends-on-gate'
import type { SubBlockConfig } from '@/blocks/types' import type { SubBlockConfig } from '@/blocks/types'
import { useWebhookManagement } from '@/hooks/use-webhook-management'
/** /**
* Interface for wand control handlers exposed by sub-block inputs * Interface for wand control handlers exposed by sub-block inputs
@@ -196,12 +195,7 @@ const renderLabel = (
disabled?: boolean disabled?: boolean
onToggle?: () => void onToggle?: () => void
}, },
canonicalToggleIsDisabled?: boolean, canonicalToggleIsDisabled?: boolean
copyState?: {
showCopyButton: boolean
copied: boolean
onCopy: () => void
}
): JSX.Element | null => { ): JSX.Element | null => {
if (config.type === 'switch') return null if (config.type === 'switch') return null
if (!config.title) return null if (!config.title) return null
@@ -209,7 +203,6 @@ const renderLabel = (
const required = isFieldRequired(config, subBlockValues) const required = isFieldRequired(config, subBlockValues)
const showWand = wandState?.isWandEnabled && !wandState.isPreview && !wandState.disabled const showWand = wandState?.isWandEnabled && !wandState.isPreview && !wandState.disabled
const showCanonicalToggle = !!canonicalToggle && !wandState?.isPreview const showCanonicalToggle = !!canonicalToggle && !wandState?.isPreview
const showCopy = copyState?.showCopyButton && !wandState?.isPreview
const canonicalToggleDisabledResolved = canonicalToggleIsDisabled ?? canonicalToggle?.disabled const canonicalToggleDisabledResolved = canonicalToggleIsDisabled ?? canonicalToggle?.disabled
return ( return (
@@ -234,27 +227,6 @@ const renderLabel = (
)} )}
</Label> </Label>
<div className='flex items-center gap-[6px]'> <div className='flex items-center gap-[6px]'>
{showCopy && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<button
type='button'
onClick={copyState.onCopy}
className='-my-1 flex h-5 w-5 items-center justify-center'
aria-label='Copy value'
>
{copyState.copied ? (
<Check className='h-3 w-3 text-green-500' />
) : (
<Clipboard className='h-3 w-3 text-muted-foreground' />
)}
</button>
</Tooltip.Trigger>
<Tooltip.Content side='top'>
<p>{copyState.copied ? 'Copied!' : 'Copy'}</p>
</Tooltip.Content>
</Tooltip.Root>
)}
{showWand && ( {showWand && (
<> <>
{!wandState.isSearchActive ? ( {!wandState.isSearchActive ? (
@@ -413,18 +385,9 @@ function SubBlockComponent({
const [isValidJson, setIsValidJson] = useState(true) const [isValidJson, setIsValidJson] = useState(true)
const [isSearchActive, setIsSearchActive] = useState(false) const [isSearchActive, setIsSearchActive] = useState(false)
const [searchQuery, setSearchQuery] = useState('') const [searchQuery, setSearchQuery] = useState('')
const [copied, setCopied] = useState(false)
const searchInputRef = useRef<HTMLInputElement>(null) const searchInputRef = useRef<HTMLInputElement>(null)
const wandControlRef = useRef<WandControlHandlers | null>(null) const wandControlRef = useRef<WandControlHandlers | null>(null)
// Use webhook management hook when config has useWebhookUrl enabled
const webhookManagement = useWebhookManagement({
blockId,
triggerId: undefined,
isPreview,
useWebhookUrl: config.useWebhookUrl,
})
const handleMouseDown = (e: MouseEvent<HTMLDivElement>): void => { const handleMouseDown = (e: MouseEvent<HTMLDivElement>): void => {
e.stopPropagation() e.stopPropagation()
} }
@@ -435,18 +398,6 @@ function SubBlockComponent({
const isWandEnabled = config.wandConfig?.enabled ?? false const isWandEnabled = config.wandConfig?.enabled ?? false
/**
* Handles copying the webhook URL to clipboard.
*/
const handleCopy = useCallback(() => {
const textToCopy = webhookManagement?.webhookUrl
if (textToCopy) {
navigator.clipboard.writeText(textToCopy)
setCopied(true)
setTimeout(() => setCopied(false), 2000)
}
}, [webhookManagement?.webhookUrl])
/** /**
* Handles wand icon click to activate inline prompt mode. * Handles wand icon click to activate inline prompt mode.
* Focuses the input after a brief delay to ensure DOM is ready. * Focuses the input after a brief delay to ensure DOM is ready.
@@ -531,6 +482,7 @@ function SubBlockComponent({
placeholder={config.placeholder} placeholder={config.placeholder}
password={config.password} password={config.password}
readOnly={config.readOnly} readOnly={config.readOnly}
showCopyButton={config.showCopyButton}
useWebhookUrl={config.useWebhookUrl} useWebhookUrl={config.useWebhookUrl}
config={config} config={config}
isPreview={isPreview} isPreview={isPreview}
@@ -1027,12 +979,7 @@ function SubBlockComponent({
searchInputRef, searchInputRef,
}, },
canonicalToggle, canonicalToggle,
Boolean(canonicalToggle?.disabled || disabled || isPreview), Boolean(canonicalToggle?.disabled || disabled || isPreview)
{
showCopyButton: Boolean(config.showCopyButton && config.useWebhookUrl),
copied,
onCopy: handleCopy,
}
)} )}
{renderInput()} {renderInput()}
</div> </div>

View File

@@ -1,121 +0,0 @@
'use client'
import { memo } from 'react'
import clsx from 'clsx'
import { Filter } from 'lucide-react'
import {
Button,
Popover,
PopoverContent,
PopoverDivider,
PopoverItem,
PopoverScrollArea,
PopoverSection,
PopoverTrigger,
} from '@/components/emcn'
import type {
BlockInfo,
TerminalFilters,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
import { getBlockIcon } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/utils'
/**
* Props for the FilterPopover component
*/
export interface FilterPopoverProps {
open: boolean
onOpenChange: (open: boolean) => void
filters: TerminalFilters
toggleStatus: (status: 'error' | 'info') => void
toggleBlock: (blockId: string) => void
uniqueBlocks: BlockInfo[]
hasActiveFilters: boolean
}
/**
* Filter popover component used in terminal header and output panel
*/
export const FilterPopover = memo(function FilterPopover({
open,
onOpenChange,
filters,
toggleStatus,
toggleBlock,
uniqueBlocks,
hasActiveFilters,
}: FilterPopoverProps) {
return (
<Popover open={open} onOpenChange={onOpenChange} size='sm'>
<PopoverTrigger asChild>
<Button
variant='ghost'
className='!p-1.5 -m-1.5'
onClick={(e) => e.stopPropagation()}
aria-label='Filters'
>
<Filter
className={clsx('h-3 w-3', hasActiveFilters && 'text-[var(--brand-secondary)]')}
/>
</Button>
</PopoverTrigger>
<PopoverContent
side='top'
align='end'
sideOffset={4}
onClick={(e) => e.stopPropagation()}
minWidth={160}
maxWidth={220}
maxHeight={300}
>
<PopoverSection>Status</PopoverSection>
<PopoverItem
active={filters.statuses.has('error')}
showCheck={filters.statuses.has('error')}
onClick={() => toggleStatus('error')}
>
<div
className='h-[6px] w-[6px] rounded-[2px]'
style={{ backgroundColor: 'var(--text-error)' }}
/>
<span className='flex-1'>Error</span>
</PopoverItem>
<PopoverItem
active={filters.statuses.has('info')}
showCheck={filters.statuses.has('info')}
onClick={() => toggleStatus('info')}
>
<div
className='h-[6px] w-[6px] rounded-[2px]'
style={{ backgroundColor: 'var(--terminal-status-info-color)' }}
/>
<span className='flex-1'>Info</span>
</PopoverItem>
{uniqueBlocks.length > 0 && (
<>
<PopoverDivider className='my-[4px]' />
<PopoverSection className='!mt-0'>Blocks</PopoverSection>
<PopoverScrollArea className='max-h-[100px]'>
{uniqueBlocks.map((block) => {
const BlockIcon = getBlockIcon(block.blockType)
const isSelected = filters.blockIds.has(block.blockId)
return (
<PopoverItem
key={block.blockId}
active={isSelected}
showCheck={isSelected}
onClick={() => toggleBlock(block.blockId)}
>
{BlockIcon && <BlockIcon className='h-3 w-3' />}
<span className='flex-1'>{block.blockName}</span>
</PopoverItem>
)
})}
</PopoverScrollArea>
</>
)}
</PopoverContent>
</Popover>
)
})

View File

@@ -1 +0,0 @@
export { FilterPopover, type FilterPopoverProps } from './filter-popover'

View File

@@ -1,5 +1,2 @@
export { FilterPopover, type FilterPopoverProps } from './filter-popover' export { LogRowContextMenu } from './log-row-context-menu'
export { LogRowContextMenu, type LogRowContextMenuProps } from './log-row-context-menu' export { OutputContextMenu } from './output-context-menu'
export { OutputPanel, type OutputPanelProps } from './output-panel'
export { RunningBadge, StatusDisplay, type StatusDisplayProps } from './status-display'
export { ToggleButton, type ToggleButtonProps } from './toggle-button'

View File

@@ -1,6 +1,6 @@
'use client' 'use client'
import { memo, type RefObject } from 'react' import type { RefObject } from 'react'
import { import {
Popover, Popover,
PopoverAnchor, PopoverAnchor,
@@ -8,13 +8,20 @@ import {
PopoverDivider, PopoverDivider,
PopoverItem, PopoverItem,
} from '@/components/emcn' } from '@/components/emcn'
import type {
ContextMenuPosition,
TerminalFilters,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
import type { ConsoleEntry } from '@/stores/terminal' import type { ConsoleEntry } from '@/stores/terminal'
export interface LogRowContextMenuProps { interface ContextMenuPosition {
x: number
y: number
}
interface TerminalFilters {
blockIds: Set<string>
statuses: Set<'error' | 'info'>
runIds: Set<string>
}
interface LogRowContextMenuProps {
isOpen: boolean isOpen: boolean
position: ContextMenuPosition position: ContextMenuPosition
menuRef: RefObject<HTMLDivElement | null> menuRef: RefObject<HTMLDivElement | null>
@@ -23,16 +30,19 @@ export interface LogRowContextMenuProps {
filters: TerminalFilters filters: TerminalFilters
onFilterByBlock: (blockId: string) => void onFilterByBlock: (blockId: string) => void
onFilterByStatus: (status: 'error' | 'info') => void onFilterByStatus: (status: 'error' | 'info') => void
onFilterByRunId: (runId: string) => void
onCopyRunId: (runId: string) => void onCopyRunId: (runId: string) => void
onClearFilters: () => void
onClearConsole: () => void onClearConsole: () => void
onFixInCopilot: (entry: ConsoleEntry) => void onFixInCopilot: (entry: ConsoleEntry) => void
hasActiveFilters: boolean
} }
/** /**
* Context menu for terminal log rows (left side). * Context menu for terminal log rows (left side).
* Displays filtering options based on the selected row's properties. * Displays filtering options based on the selected row's properties.
*/ */
export const LogRowContextMenu = memo(function LogRowContextMenu({ export function LogRowContextMenu({
isOpen, isOpen,
position, position,
menuRef, menuRef,
@@ -41,15 +51,19 @@ export const LogRowContextMenu = memo(function LogRowContextMenu({
filters, filters,
onFilterByBlock, onFilterByBlock,
onFilterByStatus, onFilterByStatus,
onFilterByRunId,
onCopyRunId, onCopyRunId,
onClearFilters,
onClearConsole, onClearConsole,
onFixInCopilot, onFixInCopilot,
hasActiveFilters,
}: LogRowContextMenuProps) { }: LogRowContextMenuProps) {
const hasRunId = entry?.executionId != null const hasRunId = entry?.executionId != null
const isBlockFiltered = entry ? filters.blockIds.has(entry.blockId) : false const isBlockFiltered = entry ? filters.blockIds.has(entry.blockId) : false
const entryStatus = entry?.success ? 'info' : 'error' const entryStatus = entry?.success ? 'info' : 'error'
const isStatusFiltered = entry ? filters.statuses.has(entryStatus) : false const isStatusFiltered = entry ? filters.statuses.has(entryStatus) : false
const isRunIdFiltered = entry?.executionId ? filters.runIds.has(entry.executionId) : false
return ( return (
<Popover <Popover
@@ -120,11 +134,34 @@ export const LogRowContextMenu = memo(function LogRowContextMenu({
> >
Filter by Status Filter by Status
</PopoverItem> </PopoverItem>
{hasRunId && (
<PopoverItem
showCheck={isRunIdFiltered}
onClick={() => {
onFilterByRunId(entry.executionId!)
onClose()
}}
>
Filter by Run ID
</PopoverItem>
)}
</> </>
)} )}
{/* Clear filters */}
{hasActiveFilters && (
<PopoverItem
onClick={() => {
onClearFilters()
onClose()
}}
>
Clear All Filters
</PopoverItem>
)}
{/* Destructive action */} {/* Destructive action */}
{entry && <PopoverDivider />} {(entry || hasActiveFilters) && <PopoverDivider />}
<PopoverItem <PopoverItem
onClick={() => { onClick={() => {
onClearConsole() onClearConsole()
@@ -136,4 +173,4 @@ export const LogRowContextMenu = memo(function LogRowContextMenu({
</PopoverContent> </PopoverContent>
</Popover> </Popover>
) )
}) }

View File

@@ -1 +0,0 @@
export { LogRowContextMenu, type LogRowContextMenuProps } from './log-row-context-menu'

View File

@@ -1,6 +1,6 @@
'use client' 'use client'
import { memo, type RefObject } from 'react' import type { RefObject } from 'react'
import { import {
Popover, Popover,
PopoverAnchor, PopoverAnchor,
@@ -8,9 +8,13 @@ import {
PopoverDivider, PopoverDivider,
PopoverItem, PopoverItem,
} from '@/components/emcn' } from '@/components/emcn'
import type { ContextMenuPosition } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
export interface OutputContextMenuProps { interface ContextMenuPosition {
x: number
y: number
}
interface OutputContextMenuProps {
isOpen: boolean isOpen: boolean
position: ContextMenuPosition position: ContextMenuPosition
menuRef: RefObject<HTMLDivElement | null> menuRef: RefObject<HTMLDivElement | null>
@@ -18,8 +22,6 @@ export interface OutputContextMenuProps {
onCopySelection: () => void onCopySelection: () => void
onCopyAll: () => void onCopyAll: () => void
onSearch: () => void onSearch: () => void
structuredView: boolean
onToggleStructuredView: () => void
wrapText: boolean wrapText: boolean
onToggleWrap: () => void onToggleWrap: () => void
openOnRun: boolean openOnRun: boolean
@@ -32,7 +34,7 @@ export interface OutputContextMenuProps {
* Context menu for terminal output panel (right side). * Context menu for terminal output panel (right side).
* Displays copy, search, and display options for the code viewer. * Displays copy, search, and display options for the code viewer.
*/ */
export const OutputContextMenu = memo(function OutputContextMenu({ export function OutputContextMenu({
isOpen, isOpen,
position, position,
menuRef, menuRef,
@@ -40,8 +42,6 @@ export const OutputContextMenu = memo(function OutputContextMenu({
onCopySelection, onCopySelection,
onCopyAll, onCopyAll,
onSearch, onSearch,
structuredView,
onToggleStructuredView,
wrapText, wrapText,
onToggleWrap, onToggleWrap,
openOnRun, openOnRun,
@@ -96,9 +96,6 @@ export const OutputContextMenu = memo(function OutputContextMenu({
{/* Display settings - toggles don't close menu */} {/* Display settings - toggles don't close menu */}
<PopoverDivider /> <PopoverDivider />
<PopoverItem showCheck={structuredView} onClick={onToggleStructuredView}>
Structured View
</PopoverItem>
<PopoverItem showCheck={wrapText} onClick={onToggleWrap}> <PopoverItem showCheck={wrapText} onClick={onToggleWrap}>
Wrap Text Wrap Text
</PopoverItem> </PopoverItem>
@@ -119,4 +116,4 @@ export const OutputContextMenu = memo(function OutputContextMenu({
</PopoverContent> </PopoverContent>
</Popover> </Popover>
) )
}) }

View File

@@ -1,913 +0,0 @@
'use client'
import type React from 'react'
import {
createContext,
memo,
useCallback,
useContext,
useEffect,
useMemo,
useRef,
useState,
} from 'react'
import { List, type RowComponentProps, useListRef } from 'react-window'
import { Badge, ChevronDown } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
type ValueType = 'null' | 'undefined' | 'array' | 'string' | 'number' | 'boolean' | 'object'
type BadgeVariant = 'green' | 'blue' | 'orange' | 'purple' | 'gray' | 'red'
interface NodeEntry {
key: string
value: unknown
path: string
}
/**
* Search context for structured output tree.
*/
interface SearchContextValue {
query: string
pathToMatchIndices: Map<string, number[]>
}
const SearchContext = createContext<SearchContextValue | null>(null)
/**
* Configuration for virtualized rendering.
*/
const CONFIG = {
ROW_HEIGHT: 22,
INDENT_PER_LEVEL: 12,
BASE_PADDING: 20,
MAX_SEARCH_DEPTH: 100,
OVERSCAN_COUNT: 10,
VIRTUALIZATION_THRESHOLD: 200,
} as const
const BADGE_VARIANTS: Record<ValueType, BadgeVariant> = {
string: 'green',
number: 'blue',
boolean: 'orange',
array: 'purple',
null: 'gray',
undefined: 'gray',
object: 'gray',
} as const
/**
* Styling constants matching the original non-virtualized implementation.
*/
const STYLES = {
row: 'group flex min-h-[22px] cursor-pointer items-center gap-[6px] rounded-[8px] px-[6px] -mx-[6px] hover:bg-[var(--surface-6)] dark:hover:bg-[var(--surface-5)]',
chevron:
'h-[8px] w-[8px] flex-shrink-0 text-[var(--text-tertiary)] transition-transform duration-100 group-hover:text-[var(--text-primary)]',
keyName:
'font-medium text-[13px] text-[var(--text-primary)] group-hover:text-[var(--text-primary)]',
badge: 'rounded-[4px] px-[4px] py-[0px] text-[11px]',
summary: 'text-[12px] text-[var(--text-tertiary)]',
indent:
'mt-[2px] ml-[3px] flex min-w-0 flex-col gap-[2px] border-[var(--border)] border-l pl-[9px]',
value: 'min-w-0 py-[2px] text-[13px] text-[var(--text-primary)]',
emptyValue: 'py-[2px] text-[13px] text-[var(--text-tertiary)]',
matchHighlight: 'bg-yellow-200/60 dark:bg-yellow-500/40',
currentMatchHighlight: 'bg-orange-400',
} as const
const EMPTY_MATCH_INDICES: number[] = []
function getTypeLabel(value: unknown): ValueType {
if (value === null) return 'null'
if (value === undefined) return 'undefined'
if (Array.isArray(value)) return 'array'
return typeof value as ValueType
}
function formatPrimitive(value: unknown): string {
if (value === null) return 'null'
if (value === undefined) return 'undefined'
return String(value)
}
function isPrimitive(value: unknown): value is null | undefined | string | number | boolean {
return value === null || value === undefined || typeof value !== 'object'
}
function isEmpty(value: unknown): boolean {
if (Array.isArray(value)) return value.length === 0
if (typeof value === 'object' && value !== null) return Object.keys(value).length === 0
return false
}
function extractErrorMessage(data: unknown): string {
if (typeof data === 'string') return data
if (data instanceof Error) return data.message
if (typeof data === 'object' && data !== null && 'message' in data) {
return String((data as { message: unknown }).message)
}
return JSON.stringify(data, null, 2)
}
function buildEntries(value: unknown, basePath: string): NodeEntry[] {
if (Array.isArray(value)) {
return value.map((item, i) => ({ key: String(i), value: item, path: `${basePath}[${i}]` }))
}
return Object.entries(value as Record<string, unknown>).map(([k, v]) => ({
key: k,
value: v,
path: `${basePath}.${k}`,
}))
}
function getCollapsedSummary(value: unknown): string | null {
if (Array.isArray(value)) {
const len = value.length
return `${len} item${len !== 1 ? 's' : ''}`
}
if (typeof value === 'object' && value !== null) {
const count = Object.keys(value).length
return `${count} key${count !== 1 ? 's' : ''}`
}
return null
}
function computeInitialPaths(data: unknown, isError: boolean): Set<string> {
if (isError) return new Set(['root.error'])
if (!data || typeof data !== 'object') return new Set()
const entries = Array.isArray(data)
? data.map((_, i) => `root[${i}]`)
: Object.keys(data).map((k) => `root.${k}`)
return new Set(entries)
}
function getAncestorPaths(path: string): string[] {
const ancestors: string[] = []
let current = path
while (current.includes('.') || current.includes('[')) {
const splitPoint = Math.max(current.lastIndexOf('.'), current.lastIndexOf('['))
if (splitPoint <= 0) break
current = current.slice(0, splitPoint)
if (current !== 'root') ancestors.push(current)
}
return ancestors
}
function findTextMatches(text: string, query: string): Array<[number, number]> {
if (!query) return []
const matches: Array<[number, number]> = []
const lowerText = text.toLowerCase()
const lowerQuery = query.toLowerCase()
let pos = 0
while (pos < lowerText.length) {
const idx = lowerText.indexOf(lowerQuery, pos)
if (idx === -1) break
matches.push([idx, idx + query.length])
pos = idx + 1
}
return matches
}
function addPrimitiveMatches(value: unknown, path: string, query: string, matches: string[]): void {
const text = formatPrimitive(value)
const count = findTextMatches(text, query).length
for (let i = 0; i < count; i++) {
matches.push(path)
}
}
function collectAllMatchPaths(data: unknown, query: string, basePath: string, depth = 0): string[] {
if (!query || depth > CONFIG.MAX_SEARCH_DEPTH) return []
const matches: string[] = []
if (isPrimitive(data)) {
addPrimitiveMatches(data, `${basePath}.value`, query, matches)
return matches
}
for (const entry of buildEntries(data, basePath)) {
if (isPrimitive(entry.value)) {
addPrimitiveMatches(entry.value, entry.path, query, matches)
} else {
matches.push(...collectAllMatchPaths(entry.value, query, entry.path, depth + 1))
}
}
return matches
}
function buildPathToIndicesMap(matchPaths: string[]): Map<string, number[]> {
const map = new Map<string, number[]>()
matchPaths.forEach((path, globalIndex) => {
const existing = map.get(path)
if (existing) {
existing.push(globalIndex)
} else {
map.set(path, [globalIndex])
}
})
return map
}
/**
* Renders text with search highlights using segments.
*/
function renderHighlightedSegments(
text: string,
query: string,
matchIndices: number[],
currentMatchIndex: number,
path: string
): React.ReactNode {
if (!query || matchIndices.length === 0) return text
const textMatches = findTextMatches(text, query)
if (textMatches.length === 0) return text
const segments: React.ReactNode[] = []
let lastEnd = 0
textMatches.forEach(([start, end], i) => {
const globalIndex = matchIndices[i]
const isCurrent = globalIndex === currentMatchIndex
if (start > lastEnd) {
segments.push(<span key={`t-${path}-${start}`}>{text.slice(lastEnd, start)}</span>)
}
segments.push(
<mark
key={`m-${path}-${start}`}
data-search-match
data-match-index={globalIndex}
className={cn(
'rounded-sm',
isCurrent ? STYLES.currentMatchHighlight : STYLES.matchHighlight
)}
>
{text.slice(start, end)}
</mark>
)
lastEnd = end
})
if (lastEnd < text.length) {
segments.push(<span key={`t-${path}-${lastEnd}`}>{text.slice(lastEnd)}</span>)
}
return <>{segments}</>
}
interface HighlightedTextProps {
text: string
matchIndices: number[]
path: string
currentMatchIndex: number
}
/**
* Renders text with search highlights for non-virtualized mode.
* Accepts currentMatchIndex as prop to ensure re-render when it changes.
*/
const HighlightedText = memo(function HighlightedText({
text,
matchIndices,
path,
currentMatchIndex,
}: HighlightedTextProps) {
const searchContext = useContext(SearchContext)
if (!searchContext || matchIndices.length === 0) return <>{text}</>
return (
<>
{renderHighlightedSegments(text, searchContext.query, matchIndices, currentMatchIndex, path)}
</>
)
})
interface StructuredNodeProps {
name: string
value: unknown
path: string
expandedPaths: Set<string>
onToggle: (path: string) => void
wrapText: boolean
currentMatchIndex: number
isError?: boolean
}
/**
* Recursive node component for non-virtualized rendering.
* Preserves exact original styling with border-left tree lines.
*/
const StructuredNode = memo(function StructuredNode({
name,
value,
path,
expandedPaths,
onToggle,
wrapText,
currentMatchIndex,
isError = false,
}: StructuredNodeProps) {
const searchContext = useContext(SearchContext)
const type = getTypeLabel(value)
const isPrimitiveValue = isPrimitive(value)
const isEmptyValue = !isPrimitiveValue && isEmpty(value)
const isExpanded = expandedPaths.has(path)
const handleToggle = useCallback(() => onToggle(path), [onToggle, path])
const handleKeyDown = useCallback(
(e: React.KeyboardEvent) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault()
handleToggle()
}
},
[handleToggle]
)
const childEntries = useMemo(
() => (isPrimitiveValue || isEmptyValue ? [] : buildEntries(value, path)),
[value, isPrimitiveValue, isEmptyValue, path]
)
const collapsedSummary = useMemo(
() => (isPrimitiveValue ? null : getCollapsedSummary(value)),
[value, isPrimitiveValue]
)
const badgeVariant = isError ? 'red' : BADGE_VARIANTS[type]
const valueText = isPrimitiveValue ? formatPrimitive(value) : ''
const matchIndices = searchContext?.pathToMatchIndices.get(path) ?? EMPTY_MATCH_INDICES
return (
<div className='flex min-w-0 flex-col'>
<div
className={STYLES.row}
onClick={handleToggle}
onKeyDown={handleKeyDown}
role='button'
tabIndex={0}
aria-expanded={isExpanded}
>
<span className={cn(STYLES.keyName, isError && 'text-[var(--text-error)]')}>{name}</span>
<Badge variant={badgeVariant} className={STYLES.badge}>
{type}
</Badge>
{!isExpanded && collapsedSummary && (
<span className={STYLES.summary}>{collapsedSummary}</span>
)}
<ChevronDown className={cn(STYLES.chevron, !isExpanded && '-rotate-90')} />
</div>
{isExpanded && (
<div className={STYLES.indent}>
{isPrimitiveValue ? (
<div
className={cn(
STYLES.value,
wrapText ? '[word-break:break-word]' : 'whitespace-nowrap'
)}
>
<HighlightedText
text={valueText}
matchIndices={matchIndices}
path={path}
currentMatchIndex={currentMatchIndex}
/>
</div>
) : isEmptyValue ? (
<div className={STYLES.emptyValue}>{Array.isArray(value) ? '[]' : '{}'}</div>
) : (
childEntries.map((entry) => (
<StructuredNode
key={entry.path}
name={entry.key}
value={entry.value}
path={entry.path}
expandedPaths={expandedPaths}
onToggle={onToggle}
wrapText={wrapText}
currentMatchIndex={currentMatchIndex}
/>
))
)}
</div>
)}
</div>
)
})
/**
* Flattened row for virtualization.
*/
interface FlatRow {
path: string
key: string
value: unknown
depth: number
type: 'header' | 'value' | 'empty'
valueType: ValueType
isExpanded: boolean
isError: boolean
collapsedSummary: string | null
displayText: string
matchIndices: number[]
}
/**
* Flattens the tree into rows for virtualization.
*/
function flattenTree(
data: unknown,
expandedPaths: Set<string>,
pathToMatchIndices: Map<string, number[]>,
isError: boolean
): FlatRow[] {
const rows: FlatRow[] = []
if (isError) {
const errorText = extractErrorMessage(data)
const isExpanded = expandedPaths.has('root.error')
rows.push({
path: 'root.error',
key: 'error',
value: errorText,
depth: 0,
type: 'header',
valueType: 'string',
isExpanded,
isError: true,
collapsedSummary: null,
displayText: '',
matchIndices: [],
})
if (isExpanded) {
rows.push({
path: 'root.error.value',
key: '',
value: errorText,
depth: 1,
type: 'value',
valueType: 'string',
isExpanded: false,
isError: true,
collapsedSummary: null,
displayText: errorText,
matchIndices: pathToMatchIndices.get('root.error') ?? [],
})
}
return rows
}
function processNode(key: string, value: unknown, path: string, depth: number): void {
const valueType = getTypeLabel(value)
const isPrimitiveValue = isPrimitive(value)
const isEmptyValue = !isPrimitiveValue && isEmpty(value)
const isExpanded = expandedPaths.has(path)
const collapsedSummary = isPrimitiveValue ? null : getCollapsedSummary(value)
rows.push({
path,
key,
value,
depth,
type: 'header',
valueType,
isExpanded,
isError: false,
collapsedSummary,
displayText: '',
matchIndices: [],
})
if (isExpanded) {
if (isPrimitiveValue) {
rows.push({
path: `${path}.value`,
key: '',
value,
depth: depth + 1,
type: 'value',
valueType,
isExpanded: false,
isError: false,
collapsedSummary: null,
displayText: formatPrimitive(value),
matchIndices: pathToMatchIndices.get(path) ?? [],
})
} else if (isEmptyValue) {
rows.push({
path: `${path}.empty`,
key: '',
value,
depth: depth + 1,
type: 'empty',
valueType,
isExpanded: false,
isError: false,
collapsedSummary: null,
displayText: Array.isArray(value) ? '[]' : '{}',
matchIndices: [],
})
} else {
for (const entry of buildEntries(value, path)) {
processNode(entry.key, entry.value, entry.path, depth + 1)
}
}
}
}
if (isPrimitive(data)) {
processNode('value', data, 'root.value', 0)
} else if (data && typeof data === 'object') {
for (const entry of buildEntries(data, 'root')) {
processNode(entry.key, entry.value, entry.path, 0)
}
}
return rows
}
/**
* Counts total visible rows for determining virtualization threshold.
*/
function countVisibleRows(data: unknown, expandedPaths: Set<string>, isError: boolean): number {
if (isError) return expandedPaths.has('root.error') ? 2 : 1
let count = 0
function countNode(value: unknown, path: string): void {
count++
if (!expandedPaths.has(path)) return
if (isPrimitive(value) || isEmpty(value)) {
count++
} else {
for (const entry of buildEntries(value, path)) {
countNode(entry.value, entry.path)
}
}
}
if (isPrimitive(data)) {
countNode(data, 'root.value')
} else if (data && typeof data === 'object') {
for (const entry of buildEntries(data, 'root')) {
countNode(entry.value, entry.path)
}
}
return count
}
interface VirtualizedRowProps {
rows: FlatRow[]
onToggle: (path: string) => void
wrapText: boolean
searchQuery: string
currentMatchIndex: number
}
/**
* Virtualized row component for large data sets.
*/
function VirtualizedRow({ index, style, ...props }: RowComponentProps<VirtualizedRowProps>) {
const { rows, onToggle, wrapText, searchQuery, currentMatchIndex } = props
const row = rows[index]
const paddingLeft = CONFIG.BASE_PADDING + row.depth * CONFIG.INDENT_PER_LEVEL
if (row.type === 'header') {
const badgeVariant = row.isError ? 'red' : BADGE_VARIANTS[row.valueType]
return (
<div style={{ ...style, paddingLeft }} data-row-index={index}>
<div
className={STYLES.row}
onClick={() => onToggle(row.path)}
onKeyDown={(e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault()
onToggle(row.path)
}
}}
role='button'
tabIndex={0}
aria-expanded={row.isExpanded}
>
<span className={cn(STYLES.keyName, row.isError && 'text-[var(--text-error)]')}>
{row.key}
</span>
<Badge variant={badgeVariant} className={STYLES.badge}>
{row.valueType}
</Badge>
{!row.isExpanded && row.collapsedSummary && (
<span className={STYLES.summary}>{row.collapsedSummary}</span>
)}
<ChevronDown className={cn(STYLES.chevron, !row.isExpanded && '-rotate-90')} />
</div>
</div>
)
}
if (row.type === 'empty') {
return (
<div style={{ ...style, paddingLeft }} data-row-index={index}>
<div className={STYLES.emptyValue}>{row.displayText}</div>
</div>
)
}
return (
<div style={{ ...style, paddingLeft }} data-row-index={index}>
<div
className={cn(
STYLES.value,
row.isError && 'text-[var(--text-error)]',
wrapText ? '[word-break:break-word]' : 'whitespace-nowrap'
)}
>
{renderHighlightedSegments(
row.displayText,
searchQuery,
row.matchIndices,
currentMatchIndex,
row.path
)}
</div>
</div>
)
}
export interface StructuredOutputProps {
data: unknown
wrapText?: boolean
isError?: boolean
isRunning?: boolean
className?: string
searchQuery?: string
currentMatchIndex?: number
onMatchCountChange?: (count: number) => void
contentRef?: React.RefObject<HTMLDivElement | null>
}
/**
* Renders structured data as nested collapsible blocks.
* Uses virtualization for large data sets (>200 visible rows) while
* preserving exact original styling for smaller data sets.
*/
export const StructuredOutput = memo(function StructuredOutput({
data,
wrapText = true,
isError = false,
isRunning = false,
className,
searchQuery,
currentMatchIndex = 0,
onMatchCountChange,
contentRef,
}: StructuredOutputProps) {
const [expandedPaths, setExpandedPaths] = useState<Set<string>>(() =>
computeInitialPaths(data, isError)
)
const prevDataRef = useRef(data)
const prevIsErrorRef = useRef(isError)
const internalRef = useRef<HTMLDivElement>(null)
const listRef = useListRef(null)
const [containerHeight, setContainerHeight] = useState(400)
const setContainerRef = useCallback(
(node: HTMLDivElement | null) => {
;(internalRef as React.MutableRefObject<HTMLDivElement | null>).current = node
if (contentRef) {
;(contentRef as React.MutableRefObject<HTMLDivElement | null>).current = node
}
},
[contentRef]
)
// Measure container height
useEffect(() => {
const container = internalRef.current?.parentElement
if (!container) return
const updateHeight = () => setContainerHeight(container.clientHeight)
updateHeight()
const resizeObserver = new ResizeObserver(updateHeight)
resizeObserver.observe(container)
return () => resizeObserver.disconnect()
}, [])
// Reset expanded paths when data changes
useEffect(() => {
if (prevDataRef.current !== data || prevIsErrorRef.current !== isError) {
prevDataRef.current = data
prevIsErrorRef.current = isError
setExpandedPaths(computeInitialPaths(data, isError))
}
}, [data, isError])
const allMatchPaths = useMemo(() => {
if (!searchQuery) return []
if (isError) {
const errorText = extractErrorMessage(data)
const count = findTextMatches(errorText, searchQuery).length
return Array(count).fill('root.error') as string[]
}
return collectAllMatchPaths(data, searchQuery, 'root')
}, [data, searchQuery, isError])
useEffect(() => {
onMatchCountChange?.(allMatchPaths.length)
}, [allMatchPaths.length, onMatchCountChange])
const pathToMatchIndices = useMemo(() => buildPathToIndicesMap(allMatchPaths), [allMatchPaths])
// Auto-expand to current match
useEffect(() => {
if (
allMatchPaths.length === 0 ||
currentMatchIndex < 0 ||
currentMatchIndex >= allMatchPaths.length
) {
return
}
const currentPath = allMatchPaths[currentMatchIndex]
const pathsToExpand = [currentPath, ...getAncestorPaths(currentPath)]
setExpandedPaths((prev) => {
if (pathsToExpand.every((p) => prev.has(p))) return prev
const next = new Set(prev)
pathsToExpand.forEach((p) => next.add(p))
return next
})
}, [currentMatchIndex, allMatchPaths])
const handleToggle = useCallback((path: string) => {
setExpandedPaths((prev) => {
const next = new Set(prev)
if (next.has(path)) {
next.delete(path)
} else {
next.add(path)
}
return next
})
}, [])
const rootEntries = useMemo<NodeEntry[]>(() => {
if (isPrimitive(data)) return [{ key: 'value', value: data, path: 'root.value' }]
return buildEntries(data, 'root')
}, [data])
const searchContextValue = useMemo<SearchContextValue | null>(() => {
if (!searchQuery) return null
return { query: searchQuery, pathToMatchIndices }
}, [searchQuery, pathToMatchIndices])
const visibleRowCount = useMemo(
() => countVisibleRows(data, expandedPaths, isError),
[data, expandedPaths, isError]
)
const useVirtualization = visibleRowCount > CONFIG.VIRTUALIZATION_THRESHOLD
const flatRows = useMemo(() => {
if (!useVirtualization) return []
return flattenTree(data, expandedPaths, pathToMatchIndices, isError)
}, [data, expandedPaths, pathToMatchIndices, isError, useVirtualization])
// Scroll to match (virtualized)
useEffect(() => {
if (!useVirtualization || allMatchPaths.length === 0 || !listRef.current) return
const currentPath = allMatchPaths[currentMatchIndex]
const targetPath = currentPath.endsWith('.value') ? currentPath : `${currentPath}.value`
const rowIndex = flatRows.findIndex((r) => r.path === targetPath || r.path === currentPath)
if (rowIndex !== -1) {
listRef.current.scrollToRow({ index: rowIndex, align: 'center' })
}
}, [currentMatchIndex, allMatchPaths, flatRows, listRef, useVirtualization])
// Scroll to match (non-virtualized)
useEffect(() => {
if (useVirtualization || allMatchPaths.length === 0) return
const rafId = requestAnimationFrame(() => {
const match = internalRef.current?.querySelector(
`[data-match-index="${currentMatchIndex}"]`
) as HTMLElement | null
match?.scrollIntoView({ block: 'center', behavior: 'smooth' })
})
return () => cancelAnimationFrame(rafId)
}, [currentMatchIndex, allMatchPaths.length, expandedPaths, useVirtualization])
const containerClass = cn('flex flex-col pl-[20px]', wrapText && 'overflow-x-hidden', className)
const virtualizedContainerClass = cn('relative', wrapText && 'overflow-x-hidden', className)
const listClass = wrapText ? 'overflow-x-hidden' : 'overflow-x-auto'
// Running state
if (isRunning && data === undefined) {
return (
<div ref={setContainerRef} className={containerClass}>
<div className={STYLES.row}>
<span className={STYLES.keyName}>running</span>
<Badge variant='green' className={STYLES.badge}>
Running
</Badge>
</div>
</div>
)
}
// Empty state
if (rootEntries.length === 0 && !isError) {
return (
<div ref={setContainerRef} className={containerClass}>
<span className={STYLES.emptyValue}>null</span>
</div>
)
}
// Virtualized rendering
if (useVirtualization) {
return (
<div
ref={setContainerRef}
className={virtualizedContainerClass}
style={{ height: containerHeight }}
>
<List
listRef={listRef}
defaultHeight={containerHeight}
rowCount={flatRows.length}
rowHeight={CONFIG.ROW_HEIGHT}
rowComponent={VirtualizedRow}
rowProps={{
rows: flatRows,
onToggle: handleToggle,
wrapText,
searchQuery: searchQuery ?? '',
currentMatchIndex,
}}
overscanCount={CONFIG.OVERSCAN_COUNT}
className={listClass}
/>
</div>
)
}
// Non-virtualized rendering (preserves exact original styling)
if (isError) {
return (
<SearchContext.Provider value={searchContextValue}>
<div ref={setContainerRef} className={containerClass}>
<StructuredNode
name='error'
value={extractErrorMessage(data)}
path='root.error'
expandedPaths={expandedPaths}
onToggle={handleToggle}
wrapText={wrapText}
currentMatchIndex={currentMatchIndex}
isError
/>
</div>
</SearchContext.Provider>
)
}
return (
<SearchContext.Provider value={searchContextValue}>
<div ref={setContainerRef} className={containerClass}>
{rootEntries.map((entry) => (
<StructuredNode
key={entry.path}
name={entry.key}
value={entry.value}
path={entry.path}
expandedPaths={expandedPaths}
onToggle={handleToggle}
wrapText={wrapText}
currentMatchIndex={currentMatchIndex}
/>
))}
</div>
</SearchContext.Provider>
)
})

View File

@@ -1,4 +0,0 @@
export { OutputContextMenu, type OutputContextMenuProps } from './components/output-context-menu'
export { StructuredOutput, type StructuredOutputProps } from './components/structured-output'
export type { OutputPanelProps } from './output-panel'
export { OutputPanel } from './output-panel'

View File

@@ -1,643 +0,0 @@
'use client'
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import clsx from 'clsx'
import {
ArrowDown,
ArrowDownToLine,
ArrowUp,
Check,
Clipboard,
Database,
MoreHorizontal,
Palette,
Pause,
Search,
Trash2,
X,
} from 'lucide-react'
import Link from 'next/link'
import {
Button,
Code,
Input,
Popover,
PopoverContent,
PopoverItem,
PopoverTrigger,
Tooltip,
} from '@/components/emcn'
import { FilterPopover } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/filter-popover'
import { OutputContextMenu } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/output-panel/components/output-context-menu'
import { StructuredOutput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/output-panel/components/structured-output'
import { ToggleButton } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/components/toggle-button'
import type {
BlockInfo,
TerminalFilters,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
import { useContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { useCodeViewerFeatures } from '@/hooks/use-code-viewer'
import type { ConsoleEntry } from '@/stores/terminal'
import { useTerminalStore } from '@/stores/terminal'
interface OutputCodeContentProps {
code: string
language: 'javascript' | 'json'
wrapText: boolean
searchQuery: string | undefined
currentMatchIndex: number
onMatchCountChange: (count: number) => void
contentRef: React.RefObject<HTMLDivElement | null>
}
const OutputCodeContent = React.memo(function OutputCodeContent({
code,
language,
wrapText,
searchQuery,
currentMatchIndex,
onMatchCountChange,
contentRef,
}: OutputCodeContentProps) {
return (
<Code.Viewer
code={code}
showGutter
language={language}
className='m-0 min-h-full rounded-none border-0 bg-[var(--surface-1)] dark:bg-[var(--surface-1)]'
paddingLeft={8}
gutterStyle={{ backgroundColor: 'transparent' }}
wrapText={wrapText}
searchQuery={searchQuery}
currentMatchIndex={currentMatchIndex}
onMatchCountChange={onMatchCountChange}
contentRef={contentRef}
virtualized
showCollapseColumn={language === 'json'}
/>
)
})
/**
* Props for the OutputPanel component
* Store-backed settings (wrapText, openOnRun, structuredView, outputPanelWidth)
* are accessed directly from useTerminalStore to reduce prop drilling.
*/
export interface OutputPanelProps {
selectedEntry: ConsoleEntry
handleOutputPanelResizeMouseDown: (e: React.MouseEvent) => void
handleHeaderClick: () => void
isExpanded: boolean
expandToLastHeight: () => void
showInput: boolean
setShowInput: (show: boolean) => void
hasInputData: boolean
isPlaygroundEnabled: boolean
shouldShowTrainingButton: boolean
isTraining: boolean
handleTrainingClick: (e: React.MouseEvent) => void
showCopySuccess: boolean
handleCopy: () => void
filteredEntries: ConsoleEntry[]
handleExportConsole: (e: React.MouseEvent) => void
hasActiveFilters: boolean
handleClearConsole: (e: React.MouseEvent) => void
shouldShowCodeDisplay: boolean
outputDataStringified: string
outputData: unknown
handleClearConsoleFromMenu: () => void
filters: TerminalFilters
toggleBlock: (blockId: string) => void
toggleStatus: (status: 'error' | 'info') => void
uniqueBlocks: BlockInfo[]
}
/**
* Output panel component that manages its own search state.
* Accesses store-backed settings directly to reduce prop drilling.
*/
export const OutputPanel = React.memo(function OutputPanel({
selectedEntry,
handleOutputPanelResizeMouseDown,
handleHeaderClick,
isExpanded,
expandToLastHeight,
showInput,
setShowInput,
hasInputData,
isPlaygroundEnabled,
shouldShowTrainingButton,
isTraining,
handleTrainingClick,
showCopySuccess,
handleCopy,
filteredEntries,
handleExportConsole,
hasActiveFilters,
handleClearConsole,
shouldShowCodeDisplay,
outputDataStringified,
outputData,
handleClearConsoleFromMenu,
filters,
toggleBlock,
toggleStatus,
uniqueBlocks,
}: OutputPanelProps) {
// Access store-backed settings directly to reduce prop drilling
const outputPanelWidth = useTerminalStore((state) => state.outputPanelWidth)
const wrapText = useTerminalStore((state) => state.wrapText)
const setWrapText = useTerminalStore((state) => state.setWrapText)
const openOnRun = useTerminalStore((state) => state.openOnRun)
const setOpenOnRun = useTerminalStore((state) => state.setOpenOnRun)
const structuredView = useTerminalStore((state) => state.structuredView)
const setStructuredView = useTerminalStore((state) => state.setStructuredView)
const outputContentRef = useRef<HTMLDivElement>(null)
const [filtersOpen, setFiltersOpen] = useState(false)
const [outputOptionsOpen, setOutputOptionsOpen] = useState(false)
const {
isSearchActive: isOutputSearchActive,
searchQuery: outputSearchQuery,
setSearchQuery: setOutputSearchQuery,
matchCount,
currentMatchIndex,
activateSearch: activateOutputSearch,
closeSearch: closeOutputSearch,
goToNextMatch,
goToPreviousMatch,
handleMatchCountChange,
searchInputRef: outputSearchInputRef,
} = useCodeViewerFeatures({
contentRef: outputContentRef,
externalWrapText: wrapText,
onWrapTextChange: setWrapText,
})
// Context menu state for output panel
const [hasSelection, setHasSelection] = useState(false)
const [storedSelectionText, setStoredSelectionText] = useState('')
const {
isOpen: isOutputMenuOpen,
position: outputMenuPosition,
menuRef: outputMenuRef,
handleContextMenu: handleOutputContextMenu,
closeMenu: closeOutputMenu,
} = useContextMenu()
const handleOutputPanelContextMenu = useCallback(
(e: React.MouseEvent) => {
const selection = window.getSelection()
const selectionText = selection?.toString() || ''
setStoredSelectionText(selectionText)
setHasSelection(selectionText.length > 0)
handleOutputContextMenu(e)
},
[handleOutputContextMenu]
)
const handleCopySelection = useCallback(() => {
if (storedSelectionText) {
navigator.clipboard.writeText(storedSelectionText)
}
}, [storedSelectionText])
// Memoized callbacks to avoid inline arrow functions
const handleToggleStructuredView = useCallback(() => {
setStructuredView(!structuredView)
}, [structuredView, setStructuredView])
const handleToggleWrapText = useCallback(() => {
setWrapText(!wrapText)
}, [wrapText, setWrapText])
const handleToggleOpenOnRun = useCallback(() => {
setOpenOnRun(!openOnRun)
}, [openOnRun, setOpenOnRun])
const handleCopyClick = useCallback(
(e: React.MouseEvent) => {
e.stopPropagation()
handleCopy()
},
[handleCopy]
)
const handleSearchClick = useCallback(
(e: React.MouseEvent) => {
e.stopPropagation()
activateOutputSearch()
},
[activateOutputSearch]
)
const handleCloseSearchClick = useCallback(
(e: React.MouseEvent) => {
e.stopPropagation()
closeOutputSearch()
},
[closeOutputSearch]
)
const handleOutputButtonClick = useCallback(
(e: React.MouseEvent) => {
e.stopPropagation()
if (!isExpanded) {
expandToLastHeight()
}
if (showInput) setShowInput(false)
},
[isExpanded, expandToLastHeight, showInput, setShowInput]
)
const handleInputButtonClick = useCallback(
(e: React.MouseEvent) => {
e.stopPropagation()
if (!isExpanded) {
expandToLastHeight()
}
setShowInput(true)
},
[isExpanded, expandToLastHeight, setShowInput]
)
const handleToggleButtonClick = useCallback(
(e: React.MouseEvent) => {
e.stopPropagation()
handleHeaderClick()
},
[handleHeaderClick]
)
/**
* Track text selection state for context menu.
* Skip updates when the context menu is open to prevent the selection
* state from changing mid-click (which would disable the copy button).
*/
useEffect(() => {
const handleSelectionChange = () => {
if (isOutputMenuOpen) return
const selection = window.getSelection()
setHasSelection(Boolean(selection && selection.toString().length > 0))
}
document.addEventListener('selectionchange', handleSelectionChange)
return () => document.removeEventListener('selectionchange', handleSelectionChange)
}, [isOutputMenuOpen])
// Memoize the search query for structured output to avoid re-renders
const structuredSearchQuery = useMemo(
() => (isOutputSearchActive ? outputSearchQuery : undefined),
[isOutputSearchActive, outputSearchQuery]
)
return (
<>
<div
className='absolute top-0 right-0 bottom-0 flex flex-col border-[var(--border)] border-l bg-[var(--surface-1)]'
style={{ width: `${outputPanelWidth}px` }}
>
{/* Horizontal Resize Handle */}
<div
className='-ml-[4px] absolute top-0 bottom-0 left-0 z-20 w-[8px] cursor-ew-resize'
onMouseDown={handleOutputPanelResizeMouseDown}
role='separator'
aria-label='Resize output panel'
aria-orientation='vertical'
/>
{/* Header */}
<div
className='group flex h-[30px] flex-shrink-0 cursor-pointer items-center justify-between bg-[var(--surface-1)] pr-[16px] pl-[10px]'
onClick={handleHeaderClick}
>
<div className='flex items-center'>
<Button
variant='ghost'
className={clsx(
'px-[8px] py-[6px] text-[12px]',
!showInput ? '!text-[var(--text-primary)]' : '!text-[var(--text-tertiary)]'
)}
onClick={handleOutputButtonClick}
aria-label='Show output'
>
Output
</Button>
{hasInputData && (
<Button
variant='ghost'
className={clsx(
'px-[8px] py-[6px] text-[12px]',
showInput ? '!text-[var(--text-primary)]' : '!text-[var(--text-tertiary)]'
)}
onClick={handleInputButtonClick}
aria-label='Show input'
>
Input
</Button>
)}
</div>
<div className='flex flex-shrink-0 items-center gap-[8px]'>
{/* Unified filter popover */}
{filteredEntries.length > 0 && (
<FilterPopover
open={filtersOpen}
onOpenChange={setFiltersOpen}
filters={filters}
toggleStatus={toggleStatus}
toggleBlock={toggleBlock}
uniqueBlocks={uniqueBlocks}
hasActiveFilters={hasActiveFilters}
/>
)}
{isOutputSearchActive ? (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={handleCloseSearchClick}
aria-label='Close search'
className='!p-1.5 -m-1.5'
>
<X className='h-[12px] w-[12px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<span>Close search</span>
</Tooltip.Content>
</Tooltip.Root>
) : (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={handleSearchClick}
aria-label='Search in output'
className='!p-1.5 -m-1.5'
>
<Search className='h-[12px] w-[12px]' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<span>Search</span>
</Tooltip.Content>
</Tooltip.Root>
)}
{isPlaygroundEnabled && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Link href='/playground'>
<Button
variant='ghost'
aria-label='Component Playground'
className='!p-1.5 -m-1.5'
>
<Palette className='h-[12px] w-[12px]' />
</Button>
</Link>
</Tooltip.Trigger>
<Tooltip.Content>
<span>Component Playground</span>
</Tooltip.Content>
</Tooltip.Root>
)}
{shouldShowTrainingButton && (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={handleTrainingClick}
aria-label={isTraining ? 'Stop training' : 'Train Copilot'}
className={clsx(
'!p-1.5 -m-1.5',
isTraining && 'text-orange-600 dark:text-orange-400'
)}
>
{isTraining ? (
<Pause className='h-[12px] w-[12px]' />
) : (
<Database className='h-[12px] w-[12px]' />
)}
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<span>{isTraining ? 'Stop Training' : 'Train Copilot'}</span>
</Tooltip.Content>
</Tooltip.Root>
)}
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={handleCopyClick}
aria-label='Copy output'
className='!p-1.5 -m-1.5'
>
{showCopySuccess ? (
<Check className='h-[12px] w-[12px]' />
) : (
<Clipboard className='h-[12px] w-[12px]' />
)}
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<span>{showCopySuccess ? 'Copied' : 'Copy output'}</span>
</Tooltip.Content>
</Tooltip.Root>
{filteredEntries.length > 0 && (
<>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={handleExportConsole}
aria-label='Download console CSV'
className='!p-1.5 -m-1.5'
>
<ArrowDownToLine className='h-3 w-3' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<span>Download CSV</span>
</Tooltip.Content>
</Tooltip.Root>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button
variant='ghost'
onClick={handleClearConsole}
aria-label='Clear console'
className='!p-1.5 -m-1.5'
>
<Trash2 className='h-3 w-3' />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>
<Tooltip.Shortcut keys='⌘D'>Clear console</Tooltip.Shortcut>
</Tooltip.Content>
</Tooltip.Root>
</>
)}
<Popover open={outputOptionsOpen} onOpenChange={setOutputOptionsOpen} size='sm'>
<PopoverTrigger asChild>
<Button
variant='ghost'
onClick={(e) => e.stopPropagation()}
aria-label='Terminal options'
className='!p-1.5 -m-1.5'
>
<MoreHorizontal className='h-3.5 w-3.5' />
</Button>
</PopoverTrigger>
<PopoverContent
side='bottom'
align='end'
sideOffset={4}
collisionPadding={0}
onClick={(e) => e.stopPropagation()}
style={{ minWidth: '140px', maxWidth: '160px' }}
className='gap-[2px]'
>
<PopoverItem
active={structuredView}
showCheck={structuredView}
onClick={handleToggleStructuredView}
>
<span>Structured view</span>
</PopoverItem>
<PopoverItem active={wrapText} showCheck={wrapText} onClick={handleToggleWrapText}>
<span>Wrap text</span>
</PopoverItem>
<PopoverItem
active={openOnRun}
showCheck={openOnRun}
onClick={handleToggleOpenOnRun}
>
<span>Open on run</span>
</PopoverItem>
</PopoverContent>
</Popover>
<ToggleButton isExpanded={isExpanded} onClick={handleToggleButtonClick} />
</div>
</div>
{/* Search Overlay */}
{isOutputSearchActive && (
<div
className='absolute top-[30px] right-[8px] z-30 flex h-[34px] items-center gap-[6px] rounded-b-[4px] border border-[var(--border)] border-t-0 bg-[var(--surface-1)] px-[6px] shadow-sm'
onClick={(e) => e.stopPropagation()}
data-toolbar-root
data-search-active='true'
>
<Input
ref={outputSearchInputRef}
type='text'
value={outputSearchQuery}
onChange={(e) => setOutputSearchQuery(e.target.value)}
placeholder='Search...'
className='mr-[2px] h-[23px] w-[94px] text-[12px]'
/>
<span
className={clsx(
'w-[58px] font-medium text-[11px]',
matchCount > 0 ? 'text-[var(--text-secondary)]' : 'text-[var(--text-tertiary)]'
)}
>
{matchCount > 0 ? `${currentMatchIndex + 1}/${matchCount}` : 'No results'}
</span>
<Button
variant='ghost'
onClick={goToPreviousMatch}
aria-label='Previous match'
className='!p-1.5 -m-1.5'
disabled={matchCount === 0}
>
<ArrowUp className='h-[12px] w-[12px]' />
</Button>
<Button
variant='ghost'
onClick={goToNextMatch}
aria-label='Next match'
className='!p-1.5 -m-1.5'
disabled={matchCount === 0}
>
<ArrowDown className='h-[12px] w-[12px]' />
</Button>
<Button
variant='ghost'
onClick={closeOutputSearch}
aria-label='Close search'
className='!p-1.5 -m-1.5'
>
<X className='h-[12px] w-[12px]' />
</Button>
</div>
)}
{/* Content */}
<div
className={clsx('flex-1 overflow-y-auto', !wrapText && 'overflow-x-auto')}
onContextMenu={handleOutputPanelContextMenu}
>
{shouldShowCodeDisplay ? (
<OutputCodeContent
code={selectedEntry.input.code}
language={(selectedEntry.input.language as 'javascript' | 'json') || 'javascript'}
wrapText={wrapText}
searchQuery={structuredSearchQuery}
currentMatchIndex={currentMatchIndex}
onMatchCountChange={handleMatchCountChange}
contentRef={outputContentRef}
/>
) : structuredView ? (
<StructuredOutput
data={outputData}
wrapText={wrapText}
isError={!showInput && Boolean(selectedEntry.error)}
isRunning={!showInput && Boolean(selectedEntry.isRunning)}
className='min-h-full'
searchQuery={structuredSearchQuery}
currentMatchIndex={currentMatchIndex}
onMatchCountChange={handleMatchCountChange}
contentRef={outputContentRef}
/>
) : (
<OutputCodeContent
code={outputDataStringified}
language='json'
wrapText={wrapText}
searchQuery={structuredSearchQuery}
currentMatchIndex={currentMatchIndex}
onMatchCountChange={handleMatchCountChange}
contentRef={outputContentRef}
/>
)}
</div>
</div>
{/* Output Panel Context Menu */}
<OutputContextMenu
isOpen={isOutputMenuOpen}
position={outputMenuPosition}
menuRef={outputMenuRef}
onClose={closeOutputMenu}
onCopySelection={handleCopySelection}
onCopyAll={handleCopy}
onSearch={activateOutputSearch}
structuredView={structuredView}
onToggleStructuredView={handleToggleStructuredView}
wrapText={wrapText}
onToggleWrap={handleToggleWrapText}
openOnRun={openOnRun}
onToggleOpenOnRun={handleToggleOpenOnRun}
onClearConsole={handleClearConsoleFromMenu}
hasSelection={hasSelection}
/>
</>
)
})

View File

@@ -1 +0,0 @@
export { RunningBadge, StatusDisplay, type StatusDisplayProps } from './status-display'

View File

@@ -1,43 +0,0 @@
'use client'
import { memo } from 'react'
import { Badge } from '@/components/emcn'
import { BADGE_STYLE } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
/**
* Running badge component - displays a consistent "Running" indicator
*/
export const RunningBadge = memo(function RunningBadge() {
return (
<Badge variant='green' className={BADGE_STYLE}>
Running
</Badge>
)
})
/**
* Props for StatusDisplay component
*/
export interface StatusDisplayProps {
isRunning: boolean
isCanceled: boolean
formattedDuration: string
}
/**
* Reusable status display for terminal rows.
* Shows Running badge, 'canceled' text, or formatted duration.
*/
export const StatusDisplay = memo(function StatusDisplay({
isRunning,
isCanceled,
formattedDuration,
}: StatusDisplayProps) {
if (isRunning) {
return <RunningBadge />
}
if (isCanceled) {
return <>canceled</>
}
return <>{formattedDuration}</>
})

View File

@@ -1 +0,0 @@
export { ToggleButton, type ToggleButtonProps } from './toggle-button'

View File

@@ -1,33 +0,0 @@
'use client'
import type React from 'react'
import { memo } from 'react'
import clsx from 'clsx'
import { ChevronDown } from 'lucide-react'
import { Button } from '@/components/emcn'
export interface ToggleButtonProps {
isExpanded: boolean
onClick: (e: React.MouseEvent) => void
}
/**
* Toggle button component for terminal expand/collapse
*/
export const ToggleButton = memo(function ToggleButton({ isExpanded, onClick }: ToggleButtonProps) {
return (
<Button
variant='ghost'
className='!p-1.5 -m-1.5'
onClick={onClick}
aria-label='Toggle terminal'
>
<ChevronDown
className={clsx(
'h-3.5 w-3.5 flex-shrink-0 transition-transform duration-100',
!isExpanded && 'rotate-180'
)}
/>
</Button>
)
})

View File

@@ -1,4 +1,3 @@
export type { SortConfig, SortDirection, SortField, TerminalFilters } from '../types'
export { useOutputPanelResize } from './use-output-panel-resize' export { useOutputPanelResize } from './use-output-panel-resize'
export { useTerminalFilters } from './use-terminal-filters' export { useTerminalFilters } from './use-terminal-filters'
export { useTerminalResize } from './use-terminal-resize' export { useTerminalResize } from './use-terminal-resize'

View File

@@ -1,7 +1,9 @@
import { useCallback, useEffect, useState } from 'react' import { useCallback, useEffect, useState } from 'react'
import { OUTPUT_PANEL_WIDTH, TERMINAL_BLOCK_COLUMN_WIDTH } from '@/stores/constants' import { OUTPUT_PANEL_WIDTH } from '@/stores/constants'
import { useTerminalStore } from '@/stores/terminal' import { useTerminalStore } from '@/stores/terminal'
const BLOCK_COLUMN_WIDTH = 240
export function useOutputPanelResize() { export function useOutputPanelResize() {
const setOutputPanelWidth = useTerminalStore((state) => state.setOutputPanelWidth) const setOutputPanelWidth = useTerminalStore((state) => state.setOutputPanelWidth)
const [isResizing, setIsResizing] = useState(false) const [isResizing, setIsResizing] = useState(false)
@@ -23,7 +25,7 @@ export function useOutputPanelResize() {
const newWidth = window.innerWidth - e.clientX - panelWidth const newWidth = window.innerWidth - e.clientX - panelWidth
const terminalWidth = window.innerWidth - sidebarWidth - panelWidth const terminalWidth = window.innerWidth - sidebarWidth - panelWidth
const maxWidth = terminalWidth - TERMINAL_BLOCK_COLUMN_WIDTH const maxWidth = terminalWidth - BLOCK_COLUMN_WIDTH
const clampedWidth = Math.max(OUTPUT_PANEL_WIDTH.MIN, Math.min(newWidth, maxWidth)) const clampedWidth = Math.max(OUTPUT_PANEL_WIDTH.MIN, Math.min(newWidth, maxWidth))
setOutputPanelWidth(clampedWidth) setOutputPanelWidth(clampedWidth)

View File

@@ -1,10 +1,26 @@
import { useCallback, useMemo, useState } from 'react' import { useCallback, useMemo, useState } from 'react'
import type {
SortConfig,
TerminalFilters,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/components/terminal/types'
import type { ConsoleEntry } from '@/stores/terminal' import type { ConsoleEntry } from '@/stores/terminal'
/**
* Sort configuration
*/
export type SortField = 'timestamp'
export type SortDirection = 'asc' | 'desc'
export interface SortConfig {
field: SortField
direction: SortDirection
}
/**
* Filter configuration state
*/
export interface TerminalFilters {
blockIds: Set<string>
statuses: Set<'error' | 'info'>
runIds: Set<string>
}
/** /**
* Custom hook to manage terminal filters and sorting. * Custom hook to manage terminal filters and sorting.
* Provides filter state, sort state, and filtering/sorting logic for console entries. * Provides filter state, sort state, and filtering/sorting logic for console entries.
@@ -15,6 +31,7 @@ export function useTerminalFilters() {
const [filters, setFilters] = useState<TerminalFilters>({ const [filters, setFilters] = useState<TerminalFilters>({
blockIds: new Set(), blockIds: new Set(),
statuses: new Set(), statuses: new Set(),
runIds: new Set(),
}) })
const [sortConfig, setSortConfig] = useState<SortConfig>({ const [sortConfig, setSortConfig] = useState<SortConfig>({
@@ -52,6 +69,21 @@ export function useTerminalFilters() {
}) })
}, []) }, [])
/**
* Toggles a run ID filter
*/
const toggleRunId = useCallback((runId: string) => {
setFilters((prev) => {
const newRunIds = new Set(prev.runIds)
if (newRunIds.has(runId)) {
newRunIds.delete(runId)
} else {
newRunIds.add(runId)
}
return { ...prev, runIds: newRunIds }
})
}, [])
/** /**
* Toggles sort direction between ascending and descending * Toggles sort direction between ascending and descending
*/ */
@@ -69,6 +101,7 @@ export function useTerminalFilters() {
setFilters({ setFilters({
blockIds: new Set(), blockIds: new Set(),
statuses: new Set(), statuses: new Set(),
runIds: new Set(),
}) })
}, []) }, [])
@@ -76,7 +109,7 @@ export function useTerminalFilters() {
* Checks if any filters are active * Checks if any filters are active
*/ */
const hasActiveFilters = useMemo(() => { const hasActiveFilters = useMemo(() => {
return filters.blockIds.size > 0 || filters.statuses.size > 0 return filters.blockIds.size > 0 || filters.statuses.size > 0 || filters.runIds.size > 0
}, [filters]) }, [filters])
/** /**
@@ -101,6 +134,14 @@ export function useTerminalFilters() {
if (!hasStatus) return false if (!hasStatus) return false
} }
// Run ID filter
if (
filters.runIds.size > 0 &&
(!entry.executionId || !filters.runIds.has(entry.executionId))
) {
return false
}
return true return true
}) })
} }
@@ -123,6 +164,7 @@ export function useTerminalFilters() {
sortConfig, sortConfig,
toggleBlock, toggleBlock,
toggleStatus, toggleStatus,
toggleRunId,
toggleSort, toggleSort,
clearFilters, clearFilters,
hasActiveFilters, hasActiveFilters,

View File

@@ -1,64 +0,0 @@
/**
* Terminal filter configuration state
*/
export interface TerminalFilters {
blockIds: Set<string>
statuses: Set<'error' | 'info'>
}
/**
* Context menu position for positioning floating menus
*/
export interface ContextMenuPosition {
x: number
y: number
}
/**
* Sort field options for terminal entries
*/
export type SortField = 'timestamp'
/**
* Sort direction options
*/
export type SortDirection = 'asc' | 'desc'
/**
* Sort configuration for terminal entries
*/
export interface SortConfig {
field: SortField
direction: SortDirection
}
/**
* Status type for console entries
*/
export type EntryStatus = 'error' | 'info'
/**
* Block information for filters
*/
export interface BlockInfo {
blockId: string
blockName: string
blockType: string
}
/**
* Common row styling classes for terminal components
*/
export const ROW_STYLES = {
base: 'group flex cursor-pointer items-center justify-between gap-[8px] rounded-[8px] px-[6px]',
selected: 'bg-[var(--surface-6)] dark:bg-[var(--surface-5)]',
hover: 'hover:bg-[var(--surface-6)] dark:hover:bg-[var(--surface-5)]',
nested:
'mt-[2px] ml-[3px] flex min-w-0 flex-col gap-[2px] border-[var(--border)] border-l pl-[9px]',
iconButton: '!p-1.5 -m-1.5',
} as const
/**
* Common badge styling for status badges
*/
export const BADGE_STYLE = 'rounded-[4px] px-[4px] py-[0px] text-[11px]'

View File

@@ -1,452 +0,0 @@
import type React from 'react'
import { RepeatIcon, SplitIcon } from 'lucide-react'
import { getBlock } from '@/blocks'
import { TERMINAL_BLOCK_COLUMN_WIDTH } from '@/stores/constants'
import type { ConsoleEntry } from '@/stores/terminal'
/**
* Subflow colors matching the subflow tool configs
*/
const SUBFLOW_COLORS = {
loop: '#2FB3FF',
parallel: '#FEE12B',
} as const
/**
* Retrieves the icon component for a given block type
*/
export function getBlockIcon(
blockType: string
): React.ComponentType<{ className?: string }> | null {
const blockConfig = getBlock(blockType)
if (blockConfig?.icon) {
return blockConfig.icon
}
if (blockType === 'loop') {
return RepeatIcon
}
if (blockType === 'parallel') {
return SplitIcon
}
return null
}
/**
* Gets the background color for a block type
*/
export function getBlockColor(blockType: string): string {
const blockConfig = getBlock(blockType)
if (blockConfig?.bgColor) {
return blockConfig.bgColor
}
// Use proper subflow colors matching the toolbar configs
if (blockType === 'loop') {
return SUBFLOW_COLORS.loop
}
if (blockType === 'parallel') {
return SUBFLOW_COLORS.parallel
}
return '#6b7280'
}
/**
* Formats duration from milliseconds to readable format
*/
export function formatDuration(ms?: number): string {
if (ms === undefined || ms === null) return '-'
if (ms < 1000) return `${ms}ms`
return `${(ms / 1000).toFixed(2)}s`
}
/**
* Determines if a keyboard event originated from a text-editable element
*/
export function isEventFromEditableElement(e: KeyboardEvent): boolean {
const target = e.target as HTMLElement | null
if (!target) return false
const isEditable = (el: HTMLElement | null): boolean => {
if (!el) return false
if (el instanceof HTMLInputElement) return true
if (el instanceof HTMLTextAreaElement) return true
if ((el as HTMLElement).isContentEditable) return true
const role = el.getAttribute('role')
if (role === 'textbox' || role === 'combobox') return true
return false
}
let el: HTMLElement | null = target
while (el) {
if (isEditable(el)) return true
el = el.parentElement
}
return false
}
/**
* Checks if a block type is a subflow (loop or parallel)
*/
export function isSubflowBlockType(blockType: string): boolean {
const lower = blockType?.toLowerCase() || ''
return lower === 'loop' || lower === 'parallel'
}
/**
* Node type for the tree structure
*/
export type EntryNodeType = 'block' | 'subflow' | 'iteration'
/**
* Entry node for tree structure - represents a block, subflow, or iteration
*/
export interface EntryNode {
/** The console entry (for blocks) or synthetic entry (for subflows/iterations) */
entry: ConsoleEntry
/** Child nodes */
children: EntryNode[]
/** Node type */
nodeType: EntryNodeType
/** Iteration info for iteration nodes */
iterationInfo?: {
current: number
total?: number
}
}
/**
* Execution group interface for grouping entries by execution
*/
export interface ExecutionGroup {
executionId: string
startTime: string
endTime: string
startTimeMs: number
endTimeMs: number
duration: number
status: 'success' | 'error'
/** Flat list of entries (legacy, kept for filters) */
entries: ConsoleEntry[]
/** Tree structure of entry nodes for nested display */
entryTree: EntryNode[]
}
/**
* Iteration group for grouping blocks within the same iteration
*/
interface IterationGroup {
iterationType: string
iterationCurrent: number
iterationTotal?: number
blocks: ConsoleEntry[]
startTimeMs: number
}
/**
* Builds a tree structure from flat entries.
* Groups iteration entries by (iterationType, iterationCurrent), showing all blocks
* that executed within each iteration.
* Sorts by start time to ensure chronological order.
*/
function buildEntryTree(entries: ConsoleEntry[]): EntryNode[] {
// Separate regular blocks from iteration entries
const regularBlocks: ConsoleEntry[] = []
const iterationEntries: ConsoleEntry[] = []
for (const entry of entries) {
if (entry.iterationType && entry.iterationCurrent !== undefined) {
iterationEntries.push(entry)
} else {
regularBlocks.push(entry)
}
}
// Group iteration entries by (iterationType, iterationCurrent)
const iterationGroupsMap = new Map<string, IterationGroup>()
for (const entry of iterationEntries) {
const key = `${entry.iterationType}-${entry.iterationCurrent}`
let group = iterationGroupsMap.get(key)
const entryStartMs = new Date(entry.startedAt || entry.timestamp).getTime()
if (!group) {
group = {
iterationType: entry.iterationType!,
iterationCurrent: entry.iterationCurrent!,
iterationTotal: entry.iterationTotal,
blocks: [],
startTimeMs: entryStartMs,
}
iterationGroupsMap.set(key, group)
} else {
// Update start time to earliest
if (entryStartMs < group.startTimeMs) {
group.startTimeMs = entryStartMs
}
// Update total if available
if (entry.iterationTotal !== undefined) {
group.iterationTotal = entry.iterationTotal
}
}
group.blocks.push(entry)
}
// Sort blocks within each iteration by start time ascending (oldest first, top-down)
for (const group of iterationGroupsMap.values()) {
group.blocks.sort((a, b) => {
const aStart = new Date(a.startedAt || a.timestamp).getTime()
const bStart = new Date(b.startedAt || b.timestamp).getTime()
return aStart - bStart
})
}
// Group iterations by iterationType to create subflow parents
const subflowGroups = new Map<string, IterationGroup[]>()
for (const group of iterationGroupsMap.values()) {
const type = group.iterationType
let groups = subflowGroups.get(type)
if (!groups) {
groups = []
subflowGroups.set(type, groups)
}
groups.push(group)
}
// Sort iterations within each subflow by iteration number
for (const groups of subflowGroups.values()) {
groups.sort((a, b) => a.iterationCurrent - b.iterationCurrent)
}
// Build subflow nodes with iteration children
const subflowNodes: EntryNode[] = []
for (const [iterationType, iterationGroups] of subflowGroups.entries()) {
// Calculate subflow timing from all its iterations
const firstIteration = iterationGroups[0]
const allBlocks = iterationGroups.flatMap((g) => g.blocks)
const subflowStartMs = Math.min(
...allBlocks.map((b) => new Date(b.startedAt || b.timestamp).getTime())
)
const subflowEndMs = Math.max(
...allBlocks.map((b) => new Date(b.endedAt || b.timestamp).getTime())
)
const totalDuration = allBlocks.reduce((sum, b) => sum + (b.durationMs || 0), 0)
// Create synthetic subflow parent entry
const syntheticSubflow: ConsoleEntry = {
id: `subflow-${iterationType}-${firstIteration.blocks[0]?.executionId || 'unknown'}`,
timestamp: new Date(subflowStartMs).toISOString(),
workflowId: firstIteration.blocks[0]?.workflowId || '',
blockId: `${iterationType}-container`,
blockName: iterationType.charAt(0).toUpperCase() + iterationType.slice(1),
blockType: iterationType,
executionId: firstIteration.blocks[0]?.executionId,
startedAt: new Date(subflowStartMs).toISOString(),
endedAt: new Date(subflowEndMs).toISOString(),
durationMs: totalDuration,
success: !allBlocks.some((b) => b.error),
}
// Build iteration child nodes
const iterationNodes: EntryNode[] = iterationGroups.map((iterGroup) => {
// Create synthetic iteration entry
const iterBlocks = iterGroup.blocks
const iterStartMs = Math.min(
...iterBlocks.map((b) => new Date(b.startedAt || b.timestamp).getTime())
)
const iterEndMs = Math.max(
...iterBlocks.map((b) => new Date(b.endedAt || b.timestamp).getTime())
)
const iterDuration = iterBlocks.reduce((sum, b) => sum + (b.durationMs || 0), 0)
const syntheticIteration: ConsoleEntry = {
id: `iteration-${iterationType}-${iterGroup.iterationCurrent}-${iterBlocks[0]?.executionId || 'unknown'}`,
timestamp: new Date(iterStartMs).toISOString(),
workflowId: iterBlocks[0]?.workflowId || '',
blockId: `iteration-${iterGroup.iterationCurrent}`,
blockName: `Iteration ${iterGroup.iterationCurrent}${iterGroup.iterationTotal !== undefined ? ` / ${iterGroup.iterationTotal}` : ''}`,
blockType: iterationType,
executionId: iterBlocks[0]?.executionId,
startedAt: new Date(iterStartMs).toISOString(),
endedAt: new Date(iterEndMs).toISOString(),
durationMs: iterDuration,
success: !iterBlocks.some((b) => b.error),
iterationCurrent: iterGroup.iterationCurrent,
iterationTotal: iterGroup.iterationTotal,
iterationType: iterationType as 'loop' | 'parallel',
}
// Block nodes within this iteration
const blockNodes: EntryNode[] = iterBlocks.map((block) => ({
entry: block,
children: [],
nodeType: 'block' as const,
}))
return {
entry: syntheticIteration,
children: blockNodes,
nodeType: 'iteration' as const,
iterationInfo: {
current: iterGroup.iterationCurrent,
total: iterGroup.iterationTotal,
},
}
})
subflowNodes.push({
entry: syntheticSubflow,
children: iterationNodes,
nodeType: 'subflow' as const,
})
}
// Build nodes for regular blocks
const regularNodes: EntryNode[] = regularBlocks.map((entry) => ({
entry,
children: [],
nodeType: 'block' as const,
}))
// Combine all nodes and sort by start time ascending (oldest first, top-down)
const allNodes = [...subflowNodes, ...regularNodes]
allNodes.sort((a, b) => {
const aStart = new Date(a.entry.startedAt || a.entry.timestamp).getTime()
const bStart = new Date(b.entry.startedAt || b.entry.timestamp).getTime()
return aStart - bStart
})
return allNodes
}
/**
* Groups console entries by execution ID and builds a tree structure.
* Pre-computes timestamps for efficient sorting.
*/
export function groupEntriesByExecution(entries: ConsoleEntry[]): ExecutionGroup[] {
const groups = new Map<
string,
{ meta: Omit<ExecutionGroup, 'entryTree'>; entries: ConsoleEntry[] }
>()
for (const entry of entries) {
const execId = entry.executionId || entry.id
const entryStartTime = entry.startedAt || entry.timestamp
const entryEndTime = entry.endedAt || entry.timestamp
const entryStartMs = new Date(entryStartTime).getTime()
const entryEndMs = new Date(entryEndTime).getTime()
let group = groups.get(execId)
if (!group) {
group = {
meta: {
executionId: execId,
startTime: entryStartTime,
endTime: entryEndTime,
startTimeMs: entryStartMs,
endTimeMs: entryEndMs,
duration: 0,
status: 'success',
entries: [],
},
entries: [],
}
groups.set(execId, group)
} else {
// Update timing bounds
if (entryStartMs < group.meta.startTimeMs) {
group.meta.startTime = entryStartTime
group.meta.startTimeMs = entryStartMs
}
if (entryEndMs > group.meta.endTimeMs) {
group.meta.endTime = entryEndTime
group.meta.endTimeMs = entryEndMs
}
}
// Check for errors
if (entry.error) {
group.meta.status = 'error'
}
group.entries.push(entry)
}
// Build tree structure for each group
const result: ExecutionGroup[] = []
for (const group of groups.values()) {
group.meta.duration = group.meta.endTimeMs - group.meta.startTimeMs
group.meta.entries = group.entries
result.push({
...group.meta,
entryTree: buildEntryTree(group.entries),
})
}
// Sort by start time descending (newest first)
result.sort((a, b) => b.startTimeMs - a.startTimeMs)
return result
}
/**
* Flattens entry tree into display order for keyboard navigation
*/
export function flattenEntryTree(nodes: EntryNode[]): ConsoleEntry[] {
const result: ConsoleEntry[] = []
for (const node of nodes) {
result.push(node.entry)
if (node.children.length > 0) {
result.push(...flattenEntryTree(node.children))
}
}
return result
}
/**
* Block entry with parent tracking for navigation
*/
export interface NavigableBlockEntry {
entry: ConsoleEntry
executionId: string
/** IDs of parent nodes (subflows, iterations) that contain this block */
parentNodeIds: string[]
}
/**
* Flattens entry tree to only include actual block entries (not subflows/iterations).
* Also tracks parent node IDs for auto-expanding when navigating.
*/
export function flattenBlockEntriesOnly(
nodes: EntryNode[],
executionId: string,
parentIds: string[] = []
): NavigableBlockEntry[] {
const result: NavigableBlockEntry[] = []
for (const node of nodes) {
if (node.nodeType === 'block') {
result.push({
entry: node.entry,
executionId,
parentNodeIds: parentIds,
})
}
if (node.children.length > 0) {
const newParentIds = node.nodeType !== 'block' ? [...parentIds, node.entry.id] : parentIds
result.push(...flattenBlockEntriesOnly(node.children, executionId, newParentIds))
}
}
return result
}
/**
* Terminal height configuration constants
*/
export const TERMINAL_CONFIG = {
NEAR_MIN_THRESHOLD: 40,
BLOCK_COLUMN_WIDTH_PX: TERMINAL_BLOCK_COLUMN_WIDTH,
HEADER_TEXT_CLASS: 'font-medium text-[var(--text-tertiary)] text-[12px]',
} as const

View File

@@ -1,7 +1,6 @@
import { useCallback, useRef, useState } from 'react' import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query' import { useQueryClient } from '@tanstack/react-query'
import { readSSEStream } from '@/lib/core/utils/sse'
import type { GenerationType } from '@/blocks/types' import type { GenerationType } from '@/blocks/types'
import { subscriptionKeys } from '@/hooks/queries/subscription' import { subscriptionKeys } from '@/hooks/queries/subscription'
@@ -185,10 +184,52 @@ export function useWand({
throw new Error('Response body is null') throw new Error('Response body is null')
} }
const accumulatedContent = await readSSEStream(response.body, { const reader = response.body.getReader()
onChunk: onStreamChunk, const decoder = new TextDecoder()
signal: abortControllerRef.current?.signal, let accumulatedContent = ''
})
try {
while (true) {
const { done, value } = await reader.read()
if (done) break
const chunk = decoder.decode(value)
const lines = chunk.split('\n\n')
for (const line of lines) {
if (line.startsWith('data: ')) {
const lineData = line.substring(6)
if (lineData === '[DONE]') {
continue
}
try {
const data = JSON.parse(lineData)
if (data.error) {
throw new Error(data.error)
}
if (data.chunk) {
accumulatedContent += data.chunk
if (onStreamChunk) {
onStreamChunk(data.chunk)
}
}
if (data.done) {
break
}
} catch (parseError) {
logger.debug('Failed to parse SSE line', { line, parseError })
}
}
}
}
} finally {
reader.releaseLock()
}
if (accumulatedContent) { if (accumulatedContent) {
onGeneratedContent(accumulatedContent) onGeneratedContent(accumulatedContent)

View File

@@ -15,16 +15,13 @@ import {
TriggerUtils, TriggerUtils,
} from '@/lib/workflows/triggers/triggers' } from '@/lib/workflows/triggers/triggers'
import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow' import { useCurrentWorkflow } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-current-workflow'
import { getBlock } from '@/blocks' import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
import type { SerializableExecutionState } from '@/executor/execution/types'
import type { BlockLog, BlockState, ExecutionResult, StreamingExecution } from '@/executor/types'
import { hasExecutionResult } from '@/executor/utils/errors' import { hasExecutionResult } from '@/executor/utils/errors'
import { coerceValue } from '@/executor/utils/start-block' import { coerceValue } from '@/executor/utils/start-block'
import { subscriptionKeys } from '@/hooks/queries/subscription' import { subscriptionKeys } from '@/hooks/queries/subscription'
import { useExecutionStream } from '@/hooks/use-execution-stream' import { useExecutionStream } from '@/hooks/use-execution-stream'
import { WorkflowValidationError } from '@/serializer' import { WorkflowValidationError } from '@/serializer'
import { useExecutionStore } from '@/stores/execution' import { useExecutionStore } from '@/stores/execution'
import { useNotificationStore } from '@/stores/notifications'
import { useVariablesStore } from '@/stores/panel' import { useVariablesStore } from '@/stores/panel'
import { useEnvironmentStore } from '@/stores/settings/environment' import { useEnvironmentStore } from '@/stores/settings/environment'
import { type ConsoleEntry, useTerminalConsoleStore } from '@/stores/terminal' import { type ConsoleEntry, useTerminalConsoleStore } from '@/stores/terminal'
@@ -84,8 +81,7 @@ export function useWorkflowExecution() {
const queryClient = useQueryClient() const queryClient = useQueryClient()
const currentWorkflow = useCurrentWorkflow() const currentWorkflow = useCurrentWorkflow()
const { activeWorkflowId, workflows } = useWorkflowRegistry() const { activeWorkflowId, workflows } = useWorkflowRegistry()
const { toggleConsole, addConsole, updateConsole, cancelRunningEntries } = const { toggleConsole, addConsole } = useTerminalConsoleStore()
useTerminalConsoleStore()
const { getAllVariables } = useEnvironmentStore() const { getAllVariables } = useEnvironmentStore()
const { getVariablesByWorkflowId, variables } = useVariablesStore() const { getVariablesByWorkflowId, variables } = useVariablesStore()
const { const {
@@ -102,15 +98,11 @@ export function useWorkflowExecution() {
setActiveBlocks, setActiveBlocks,
setBlockRunStatus, setBlockRunStatus,
setEdgeRunStatus, setEdgeRunStatus,
setLastExecutionSnapshot,
getLastExecutionSnapshot,
clearLastExecutionSnapshot,
} = useExecutionStore() } = useExecutionStore()
const [executionResult, setExecutionResult] = useState<ExecutionResult | null>(null) const [executionResult, setExecutionResult] = useState<ExecutionResult | null>(null)
const executionStream = useExecutionStream() const executionStream = useExecutionStream()
const currentChatExecutionIdRef = useRef<string | null>(null) const currentChatExecutionIdRef = useRef<string | null>(null)
const isViewingDiff = useWorkflowDiffStore((state) => state.isShowingDiff) const isViewingDiff = useWorkflowDiffStore((state) => state.isShowingDiff)
const addNotification = useNotificationStore((state) => state.addNotification)
/** /**
* Validates debug state before performing debug operations * Validates debug state before performing debug operations
@@ -676,8 +668,7 @@ export function useWorkflowExecution() {
onStream?: (se: StreamingExecution) => Promise<void>, onStream?: (se: StreamingExecution) => Promise<void>,
executionId?: string, executionId?: string,
onBlockComplete?: (blockId: string, output: any) => Promise<void>, onBlockComplete?: (blockId: string, output: any) => Promise<void>,
overrideTriggerType?: 'chat' | 'manual' | 'api', overrideTriggerType?: 'chat' | 'manual' | 'api'
stopAfterBlockId?: string
): Promise<ExecutionResult | StreamingExecution> => { ): Promise<ExecutionResult | StreamingExecution> => {
// Use diff workflow for execution when available, regardless of canvas view state // Use diff workflow for execution when available, regardless of canvas view state
const executionWorkflowState = null as { const executionWorkflowState = null as {
@@ -876,8 +867,6 @@ export function useWorkflowExecution() {
if (activeWorkflowId) { if (activeWorkflowId) {
logger.info('Using server-side executor') logger.info('Using server-side executor')
const executionId = uuidv4()
let executionResult: ExecutionResult = { let executionResult: ExecutionResult = {
success: false, success: false,
output: {}, output: {},
@@ -887,8 +876,6 @@ export function useWorkflowExecution() {
const activeBlocksSet = new Set<string>() const activeBlocksSet = new Set<string>()
const streamedContent = new Map<string, string>() const streamedContent = new Map<string, string>()
const accumulatedBlockLogs: BlockLog[] = [] const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>()
// Execute the workflow // Execute the workflow
try { try {
@@ -900,7 +887,6 @@ export function useWorkflowExecution() {
triggerType: overrideTriggerType || 'manual', triggerType: overrideTriggerType || 'manual',
useDraftState: true, useDraftState: true,
isClientSession: true, isClientSession: true,
stopAfterBlockId,
workflowStateOverride: executionWorkflowState workflowStateOverride: executionWorkflowState
? { ? {
blocks: executionWorkflowState.blocks, blocks: executionWorkflowState.blocks,
@@ -924,49 +910,24 @@ export function useWorkflowExecution() {
incomingEdges.forEach((edge) => { incomingEdges.forEach((edge) => {
setEdgeRunStatus(edge.id, 'success') setEdgeRunStatus(edge.id, 'success')
}) })
// Add entry to terminal immediately with isRunning=true
const startedAt = new Date().toISOString()
addConsole({
input: {},
output: undefined,
success: undefined,
durationMs: undefined,
startedAt,
endedAt: undefined,
workflowId: activeWorkflowId,
blockId: data.blockId,
executionId,
blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown',
isRunning: true,
// Pass through iteration context for subflow grouping
iterationCurrent: data.iterationCurrent,
iterationTotal: data.iterationTotal,
iterationType: data.iterationType,
})
}, },
onBlockCompleted: (data) => { onBlockCompleted: (data) => {
logger.info('onBlockCompleted received:', { data }) logger.info('onBlockCompleted received:', { data })
activeBlocksSet.delete(data.blockId) activeBlocksSet.delete(data.blockId)
// Create a new Set to trigger React re-render
setActiveBlocks(new Set(activeBlocksSet)) setActiveBlocks(new Set(activeBlocksSet))
// Track successful block execution in run path
setBlockRunStatus(data.blockId, 'success') setBlockRunStatus(data.blockId, 'success')
executedBlockIds.add(data.blockId) // Edges already tracked in onBlockStarted, no need to track again
accumulatedBlockStates.set(data.blockId, {
output: data.output,
executed: true,
executionTime: data.durationMs,
})
const isContainerBlock = data.blockType === 'loop' || data.blockType === 'parallel'
if (isContainerBlock) return
const startedAt = new Date(Date.now() - data.durationMs).toISOString() const startedAt = new Date(Date.now() - data.durationMs).toISOString()
const endedAt = new Date().toISOString() const endedAt = new Date().toISOString()
// Accumulate block log for the execution result
accumulatedBlockLogs.push({ accumulatedBlockLogs.push({
blockId: data.blockId, blockId: data.blockId,
blockName: data.blockName || 'Unknown Block', blockName: data.blockName || 'Unknown Block',
@@ -979,23 +940,24 @@ export function useWorkflowExecution() {
endedAt, endedAt,
}) })
// Update existing console entry (created in onBlockStarted) with completion data // Add to console
updateConsole( addConsole({
data.blockId, input: data.input || {},
{ output: data.output,
input: data.input || {}, success: true,
replaceOutput: data.output, durationMs: data.durationMs,
success: true, startedAt,
durationMs: data.durationMs, endedAt,
endedAt, workflowId: activeWorkflowId,
isRunning: false, blockId: data.blockId,
// Pass through iteration context for subflow grouping executionId: executionId || uuidv4(),
iterationCurrent: data.iterationCurrent, blockName: data.blockName || 'Unknown Block',
iterationTotal: data.iterationTotal, blockType: data.blockType || 'unknown',
iterationType: data.iterationType, // Pass through iteration context for console pills
}, iterationCurrent: data.iterationCurrent,
executionId iterationTotal: data.iterationTotal,
) iterationType: data.iterationType,
})
// Call onBlockComplete callback if provided // Call onBlockComplete callback if provided
if (onBlockComplete) { if (onBlockComplete) {
@@ -1030,24 +992,25 @@ export function useWorkflowExecution() {
endedAt, endedAt,
}) })
// Update existing console entry (created in onBlockStarted) with error data // Add error to console
updateConsole( addConsole({
data.blockId, input: data.input || {},
{ output: {},
input: data.input || {}, success: false,
replaceOutput: {}, error: data.error,
success: false, durationMs: data.durationMs,
error: data.error, startedAt,
durationMs: data.durationMs, endedAt,
endedAt, workflowId: activeWorkflowId,
isRunning: false, blockId: data.blockId,
// Pass through iteration context for subflow grouping executionId: executionId || uuidv4(),
iterationCurrent: data.iterationCurrent, blockName: data.blockName,
iterationTotal: data.iterationTotal, blockType: data.blockType,
iterationType: data.iterationType, // Pass through iteration context for console pills
}, iterationCurrent: data.iterationCurrent,
executionId iterationTotal: data.iterationTotal,
) iterationType: data.iterationType,
})
}, },
onStreamChunk: (data) => { onStreamChunk: (data) => {
@@ -1093,53 +1056,6 @@ export function useWorkflowExecution() {
}, },
logs: accumulatedBlockLogs, logs: accumulatedBlockLogs,
} }
// Add trigger block to executed blocks so downstream blocks can use run-from-block
if (data.success && startBlockId) {
executedBlockIds.add(startBlockId)
}
if (data.success && activeWorkflowId) {
if (stopAfterBlockId) {
const existingSnapshot = getLastExecutionSnapshot(activeWorkflowId)
const mergedBlockStates = {
...(existingSnapshot?.blockStates || {}),
...Object.fromEntries(accumulatedBlockStates),
}
const mergedExecutedBlocks = new Set([
...(existingSnapshot?.executedBlocks || []),
...executedBlockIds,
])
const snapshot: SerializableExecutionState = {
blockStates: mergedBlockStates,
executedBlocks: Array.from(mergedExecutedBlocks),
blockLogs: [...(existingSnapshot?.blockLogs || []), ...accumulatedBlockLogs],
decisions: existingSnapshot?.decisions || { router: {}, condition: {} },
completedLoops: existingSnapshot?.completedLoops || [],
activeExecutionPath: Array.from(mergedExecutedBlocks),
}
setLastExecutionSnapshot(activeWorkflowId, snapshot)
logger.info('Merged execution snapshot after run-until-block', {
workflowId: activeWorkflowId,
newBlocksExecuted: executedBlockIds.size,
totalExecutedBlocks: mergedExecutedBlocks.size,
})
} else {
const snapshot: SerializableExecutionState = {
blockStates: Object.fromEntries(accumulatedBlockStates),
executedBlocks: Array.from(executedBlockIds),
blockLogs: accumulatedBlockLogs,
decisions: { router: {}, condition: {} },
completedLoops: [],
activeExecutionPath: Array.from(executedBlockIds),
}
setLastExecutionSnapshot(activeWorkflowId, snapshot)
logger.info('Stored execution snapshot for run-from-block', {
workflowId: activeWorkflowId,
executedBlocksCount: executedBlockIds.size,
})
}
}
}, },
onExecutionError: (data) => { onExecutionError: (data) => {
@@ -1173,7 +1089,7 @@ export function useWorkflowExecution() {
endedAt: new Date().toISOString(), endedAt: new Date().toISOString(),
workflowId: activeWorkflowId, workflowId: activeWorkflowId,
blockId: 'validation', blockId: 'validation',
executionId, executionId: executionId || uuidv4(),
blockName: 'Workflow Validation', blockName: 'Workflow Validation',
blockType: 'validation', blockType: 'validation',
}) })
@@ -1442,11 +1358,6 @@ export function useWorkflowExecution() {
// Mark current chat execution as superseded so its cleanup won't affect new executions // Mark current chat execution as superseded so its cleanup won't affect new executions
currentChatExecutionIdRef.current = null currentChatExecutionIdRef.current = null
// Mark all running entries as canceled in the terminal
if (activeWorkflowId) {
cancelRunningEntries(activeWorkflowId)
}
// Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx // Reset execution state - this triggers chat stream cleanup via useEffect in chat.tsx
setIsExecuting(false) setIsExecuting(false)
setIsDebugging(false) setIsDebugging(false)
@@ -1463,334 +1374,8 @@ export function useWorkflowExecution() {
setIsExecuting, setIsExecuting,
setIsDebugging, setIsDebugging,
setActiveBlocks, setActiveBlocks,
activeWorkflowId,
cancelRunningEntries,
]) ])
/**
* Handles running workflow from a specific block using cached outputs
*/
const handleRunFromBlock = useCallback(
async (blockId: string, workflowId: string) => {
const snapshot = getLastExecutionSnapshot(workflowId)
const workflowEdges = useWorkflowStore.getState().edges
const incomingEdges = workflowEdges.filter((edge) => edge.target === blockId)
const isTriggerBlock = incomingEdges.length === 0
// Check if each source block is either executed OR is a trigger block (triggers don't need prior execution)
const isSourceSatisfied = (sourceId: string) => {
if (snapshot?.executedBlocks.includes(sourceId)) return true
// Check if source is a trigger (has no incoming edges itself)
const sourceIncomingEdges = workflowEdges.filter((edge) => edge.target === sourceId)
return sourceIncomingEdges.length === 0
}
// Non-trigger blocks need a snapshot to exist (so upstream outputs are available)
if (!snapshot && !isTriggerBlock) {
logger.error('No execution snapshot available for run-from-block', { workflowId, blockId })
return
}
const dependenciesSatisfied =
isTriggerBlock || incomingEdges.every((edge) => isSourceSatisfied(edge.source))
if (!dependenciesSatisfied) {
logger.error('Upstream dependencies not satisfied for run-from-block', {
workflowId,
blockId,
})
return
}
// For trigger blocks, always use empty snapshot to prevent stale data from different
// execution paths from being resolved. For non-trigger blocks, use the existing snapshot.
const emptySnapshot: SerializableExecutionState = {
blockStates: {},
executedBlocks: [],
blockLogs: [],
decisions: { router: {}, condition: {} },
completedLoops: [],
activeExecutionPath: [],
}
const effectiveSnapshot: SerializableExecutionState = isTriggerBlock
? emptySnapshot
: snapshot || emptySnapshot
// Extract mock payload for trigger blocks
let workflowInput: any
if (isTriggerBlock) {
const workflowBlocks = useWorkflowStore.getState().blocks
const mergedStates = mergeSubblockState(workflowBlocks, workflowId)
const candidates = resolveStartCandidates(mergedStates, { execution: 'manual' })
const candidate = candidates.find((c) => c.blockId === blockId)
if (candidate) {
if (triggerNeedsMockPayload(candidate)) {
workflowInput = extractTriggerMockPayload(candidate)
} else if (
candidate.path === StartBlockPath.SPLIT_API ||
candidate.path === StartBlockPath.SPLIT_INPUT ||
candidate.path === StartBlockPath.UNIFIED
) {
const inputFormatValue = candidate.block.subBlocks?.inputFormat?.value
if (Array.isArray(inputFormatValue)) {
const testInput: Record<string, any> = {}
inputFormatValue.forEach((field: any) => {
if (field && typeof field === 'object' && field.name && field.value !== undefined) {
testInput[field.name] = coerceValue(field.type, field.value)
}
})
if (Object.keys(testInput).length > 0) {
workflowInput = testInput
}
}
}
} else {
// Fallback: block is trigger by position but not classified as start candidate
const block = mergedStates[blockId]
if (block) {
const blockConfig = getBlock(block.type)
const hasTriggers = blockConfig?.triggers?.available?.length
if (hasTriggers || block.triggerMode) {
workflowInput = extractTriggerMockPayload({
blockId,
block,
path: StartBlockPath.EXTERNAL_TRIGGER,
})
}
}
}
}
setIsExecuting(true)
const executionId = uuidv4()
const accumulatedBlockLogs: BlockLog[] = []
const accumulatedBlockStates = new Map<string, BlockState>()
const executedBlockIds = new Set<string>()
const activeBlocksSet = new Set<string>()
try {
await executionStream.executeFromBlock({
workflowId,
startBlockId: blockId,
sourceSnapshot: effectiveSnapshot,
input: workflowInput,
callbacks: {
onBlockStarted: (data) => {
activeBlocksSet.add(data.blockId)
setActiveBlocks(new Set(activeBlocksSet))
const incomingEdges = workflowEdges.filter((edge) => edge.target === data.blockId)
incomingEdges.forEach((edge) => {
setEdgeRunStatus(edge.id, 'success')
})
},
onBlockCompleted: (data) => {
activeBlocksSet.delete(data.blockId)
setActiveBlocks(new Set(activeBlocksSet))
setBlockRunStatus(data.blockId, 'success')
executedBlockIds.add(data.blockId)
accumulatedBlockStates.set(data.blockId, {
output: data.output,
executed: true,
executionTime: data.durationMs,
})
const isContainerBlock = data.blockType === 'loop' || data.blockType === 'parallel'
if (isContainerBlock) return
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
const endedAt = new Date().toISOString()
accumulatedBlockLogs.push({
blockId: data.blockId,
blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown',
input: data.input || {},
output: data.output,
success: true,
durationMs: data.durationMs,
startedAt,
endedAt,
})
addConsole({
input: data.input || {},
output: data.output,
success: true,
durationMs: data.durationMs,
startedAt,
endedAt,
workflowId,
blockId: data.blockId,
executionId,
blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown',
iterationCurrent: data.iterationCurrent,
iterationTotal: data.iterationTotal,
iterationType: data.iterationType,
})
},
onBlockError: (data) => {
activeBlocksSet.delete(data.blockId)
setActiveBlocks(new Set(activeBlocksSet))
setBlockRunStatus(data.blockId, 'error')
const startedAt = new Date(Date.now() - data.durationMs).toISOString()
const endedAt = new Date().toISOString()
accumulatedBlockLogs.push({
blockId: data.blockId,
blockName: data.blockName || 'Unknown Block',
blockType: data.blockType || 'unknown',
input: data.input || {},
output: {},
success: false,
error: data.error,
durationMs: data.durationMs,
startedAt,
endedAt,
})
addConsole({
input: data.input || {},
output: {},
success: false,
error: data.error,
durationMs: data.durationMs,
startedAt,
endedAt,
workflowId,
blockId: data.blockId,
executionId,
blockName: data.blockName,
blockType: data.blockType,
iterationCurrent: data.iterationCurrent,
iterationTotal: data.iterationTotal,
iterationType: data.iterationType,
})
},
onExecutionCompleted: (data) => {
if (data.success) {
// Add the start block (trigger) to executed blocks
executedBlockIds.add(blockId)
const mergedBlockStates: Record<string, BlockState> = {
...effectiveSnapshot.blockStates,
}
for (const [bId, state] of accumulatedBlockStates) {
mergedBlockStates[bId] = state
}
const mergedExecutedBlocks = new Set([
...effectiveSnapshot.executedBlocks,
...executedBlockIds,
])
const updatedSnapshot: SerializableExecutionState = {
...effectiveSnapshot,
blockStates: mergedBlockStates,
executedBlocks: Array.from(mergedExecutedBlocks),
blockLogs: [...effectiveSnapshot.blockLogs, ...accumulatedBlockLogs],
activeExecutionPath: Array.from(mergedExecutedBlocks),
}
setLastExecutionSnapshot(workflowId, updatedSnapshot)
}
},
onExecutionError: (data) => {
const isWorkflowModified =
data.error?.includes('Block not found in workflow') ||
data.error?.includes('Upstream dependency not executed')
if (isWorkflowModified) {
clearLastExecutionSnapshot(workflowId)
addNotification({
level: 'error',
message:
'Workflow was modified. Run the workflow again to enable running from block.',
workflowId,
})
} else {
addNotification({
level: 'error',
message: data.error || 'Run from block failed',
workflowId,
})
}
},
},
})
} catch (error) {
if ((error as Error).name !== 'AbortError') {
logger.error('Run-from-block failed:', error)
}
} finally {
setIsExecuting(false)
setActiveBlocks(new Set())
}
},
[
getLastExecutionSnapshot,
setLastExecutionSnapshot,
clearLastExecutionSnapshot,
setIsExecuting,
setActiveBlocks,
setBlockRunStatus,
setEdgeRunStatus,
addNotification,
addConsole,
executionStream,
]
)
/**
* Handles running workflow until a specific block (stops after that block completes)
*/
const handleRunUntilBlock = useCallback(
async (blockId: string, workflowId: string) => {
if (!workflowId || workflowId !== activeWorkflowId) {
logger.error('Invalid workflow ID for run-until-block', { workflowId, activeWorkflowId })
return
}
logger.info('Starting run-until-block execution', { workflowId, stopAfterBlockId: blockId })
setExecutionResult(null)
setIsExecuting(true)
const executionId = uuidv4()
try {
const result = await executeWorkflow(
undefined,
undefined,
executionId,
undefined,
'manual',
blockId
)
if (result && 'success' in result) {
setExecutionResult(result)
}
} catch (error) {
const errorResult = handleExecutionError(error, { executionId })
return errorResult
} finally {
setIsExecuting(false)
setIsDebugging(false)
setActiveBlocks(new Set())
}
},
[activeWorkflowId, setExecutionResult, setIsExecuting, setIsDebugging, setActiveBlocks]
)
return { return {
isExecuting, isExecuting,
isDebugging, isDebugging,
@@ -1801,7 +1386,5 @@ export function useWorkflowExecution() {
handleResumeDebug, handleResumeDebug,
handleCancelDebug, handleCancelDebug,
handleCancelExecution, handleCancelExecution,
handleRunFromBlock,
handleRunUntilBlock,
} }
} }

View File

@@ -47,7 +47,6 @@ import {
useCurrentWorkflow, useCurrentWorkflow,
useNodeUtilities, useNodeUtilities,
useShiftSelectionLock, useShiftSelectionLock,
useWorkflowExecution,
} from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks' } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks'
import { import {
calculateContainerDimensions, calculateContainerDimensions,
@@ -303,8 +302,6 @@ const WorkflowContent = React.memo(() => {
const showTrainingModal = useCopilotTrainingStore((state) => state.showModal) const showTrainingModal = useCopilotTrainingStore((state) => state.showModal)
const { handleRunFromBlock, handleRunUntilBlock } = useWorkflowExecution()
const snapToGridSize = useSnapToGridSize() const snapToGridSize = useSnapToGridSize()
const snapToGrid = snapToGridSize > 0 const snapToGrid = snapToGridSize > 0
@@ -736,16 +733,13 @@ const WorkflowContent = React.memo(() => {
[collaborativeBatchAddBlocks, setSelectedEdges, setPendingSelection] [collaborativeBatchAddBlocks, setSelectedEdges, setPendingSelection]
) )
const { activeBlockIds, pendingBlocks, isDebugging, isExecuting, getLastExecutionSnapshot } = const { activeBlockIds, pendingBlocks, isDebugging } = useExecutionStore(
useExecutionStore( useShallow((state) => ({
useShallow((state) => ({ activeBlockIds: state.activeBlockIds,
activeBlockIds: state.activeBlockIds, pendingBlocks: state.pendingBlocks,
pendingBlocks: state.pendingBlocks, isDebugging: state.isDebugging,
isDebugging: state.isDebugging, }))
isExecuting: state.isExecuting, )
getLastExecutionSnapshot: state.getLastExecutionSnapshot,
}))
)
const [dragStartParentId, setDragStartParentId] = useState<string | null>(null) const [dragStartParentId, setDragStartParentId] = useState<string | null>(null)
@@ -1108,50 +1102,6 @@ const WorkflowContent = React.memo(() => {
} }
}, [contextMenuBlocks]) }, [contextMenuBlocks])
const handleContextRunFromBlock = useCallback(() => {
if (contextMenuBlocks.length !== 1) return
const blockId = contextMenuBlocks[0].id
handleRunFromBlock(blockId, workflowIdParam)
}, [contextMenuBlocks, workflowIdParam, handleRunFromBlock])
const handleContextRunUntilBlock = useCallback(() => {
if (contextMenuBlocks.length !== 1) return
const blockId = contextMenuBlocks[0].id
handleRunUntilBlock(blockId, workflowIdParam)
}, [contextMenuBlocks, workflowIdParam, handleRunUntilBlock])
const runFromBlockState = useMemo(() => {
if (contextMenuBlocks.length !== 1) {
return { canRun: false, reason: undefined }
}
const block = contextMenuBlocks[0]
const snapshot = getLastExecutionSnapshot(workflowIdParam)
const incomingEdges = edges.filter((edge) => edge.target === block.id)
const isTriggerBlock = incomingEdges.length === 0
// Check if each source block is either executed OR is a trigger block (triggers don't need prior execution)
const isSourceSatisfied = (sourceId: string) => {
if (snapshot?.executedBlocks.includes(sourceId)) return true
// Check if source is a trigger (has no incoming edges itself)
const sourceIncomingEdges = edges.filter((edge) => edge.target === sourceId)
return sourceIncomingEdges.length === 0
}
// Non-trigger blocks need a snapshot to exist (so upstream outputs are available)
const dependenciesSatisfied =
isTriggerBlock || (snapshot && incomingEdges.every((edge) => isSourceSatisfied(edge.source)))
const isNoteBlock = block.type === 'note'
const isInsideSubflow =
block.parentId && (block.parentType === 'loop' || block.parentType === 'parallel')
if (isInsideSubflow) return { canRun: false, reason: 'Cannot run from inside subflow' }
if (!dependenciesSatisfied) return { canRun: false, reason: 'Run upstream blocks first' }
if (isNoteBlock) return { canRun: false, reason: undefined }
if (isExecuting) return { canRun: false, reason: undefined }
return { canRun: true, reason: undefined }
}, [contextMenuBlocks, edges, workflowIdParam, getLastExecutionSnapshot, isExecuting])
const handleContextAddBlock = useCallback(() => { const handleContextAddBlock = useCallback(() => {
useSearchModalStore.getState().open() useSearchModalStore.getState().open()
}, []) }, [])
@@ -1800,32 +1750,37 @@ const WorkflowContent = React.memo(() => {
) )
}, [screenToFlowPosition, handleToolbarDrop]) }, [screenToFlowPosition, handleToolbarDrop])
/** Tracks blocks to pan to after diff updates. */ /**
* Focus canvas on changed blocks when diff appears.
*/
const pendingZoomBlockIdsRef = useRef<Set<string> | null>(null) const pendingZoomBlockIdsRef = useRef<Set<string> | null>(null)
const seenDiffBlocksRef = useRef<Set<string>>(new Set()) const prevDiffReadyRef = useRef(false)
/** Queues newly changed blocks for viewport panning. */ // Phase 1: When diff becomes ready, record which blocks we want to zoom to
// Phase 2 effect is located after displayNodes is defined (search for "Phase 2")
useEffect(() => { useEffect(() => {
if (!isDiffReady || !diffAnalysis) { if (isDiffReady && !prevDiffReadyRef.current && diffAnalysis) {
pendingZoomBlockIdsRef.current = null // Diff just became ready - record blocks to zoom to
seenDiffBlocksRef.current.clear() const changedBlockIds = [
return ...(diffAnalysis.new_blocks || []),
} ...(diffAnalysis.edited_blocks || []),
]
const newBlocks = new Set<string>() if (changedBlockIds.length > 0) {
const allBlocks = [...(diffAnalysis.new_blocks || []), ...(diffAnalysis.edited_blocks || [])] pendingZoomBlockIdsRef.current = new Set(changedBlockIds)
} else {
for (const id of allBlocks) { // No specific blocks to focus on, fit all after a frame
if (!seenDiffBlocksRef.current.has(id)) { pendingZoomBlockIdsRef.current = null
newBlocks.add(id) requestAnimationFrame(() => {
fitViewToBounds({ padding: 0.1, duration: 600 })
})
} }
seenDiffBlocksRef.current.add(id) } else if (!isDiffReady && prevDiffReadyRef.current) {
// Diff was cleared (accepted/rejected) - cancel any pending zoom
pendingZoomBlockIdsRef.current = null
} }
prevDiffReadyRef.current = isDiffReady
if (newBlocks.size > 0) { }, [isDiffReady, diffAnalysis, fitViewToBounds])
pendingZoomBlockIdsRef.current = newBlocks
}
}, [isDiffReady, diffAnalysis])
/** Displays trigger warning notifications. */ /** Displays trigger warning notifications. */
useEffect(() => { useEffect(() => {
@@ -2233,12 +2188,18 @@ const WorkflowContent = React.memo(() => {
}) })
}, [derivedNodes, blocks, pendingSelection, clearPendingSelection]) }, [derivedNodes, blocks, pendingSelection, clearPendingSelection])
/** Pans viewport to pending blocks once they have valid dimensions. */ // Phase 2: When displayNodes updates, check if pending zoom blocks are ready
// (Phase 1 is located earlier in the file where pendingZoomBlockIdsRef is defined)
useEffect(() => { useEffect(() => {
const pendingBlockIds = pendingZoomBlockIdsRef.current const pendingBlockIds = pendingZoomBlockIdsRef.current
if (!pendingBlockIds || pendingBlockIds.size === 0) return if (!pendingBlockIds || pendingBlockIds.size === 0) {
return
}
// Find the nodes we're waiting for
const pendingNodes = displayNodes.filter((node) => pendingBlockIds.has(node.id)) const pendingNodes = displayNodes.filter((node) => pendingBlockIds.has(node.id))
// Check if all expected nodes are present with valid dimensions
const allNodesReady = const allNodesReady =
pendingNodes.length === pendingBlockIds.size && pendingNodes.length === pendingBlockIds.size &&
pendingNodes.every( pendingNodes.every(
@@ -2250,20 +2211,16 @@ const WorkflowContent = React.memo(() => {
) )
if (allNodesReady) { if (allNodesReady) {
logger.info('Focusing on changed blocks', { logger.info('Diff ready - focusing on changed blocks', {
changedBlockIds: Array.from(pendingBlockIds), changedBlockIds: Array.from(pendingBlockIds),
foundNodes: pendingNodes.length, foundNodes: pendingNodes.length,
}) })
// Clear pending state before zooming to prevent re-triggers
pendingZoomBlockIdsRef.current = null pendingZoomBlockIdsRef.current = null
// Use requestAnimationFrame to ensure React has finished rendering
const nodesWithAbsolutePositions = pendingNodes.map((node) => ({
...node,
position: getNodeAbsolutePosition(node.id),
}))
requestAnimationFrame(() => { requestAnimationFrame(() => {
fitViewToBounds({ fitViewToBounds({
nodes: nodesWithAbsolutePositions, nodes: pendingNodes,
duration: 600, duration: 600,
padding: 0.1, padding: 0.1,
minZoom: 0.5, minZoom: 0.5,
@@ -2271,7 +2228,7 @@ const WorkflowContent = React.memo(() => {
}) })
}) })
} }
}, [displayNodes, fitViewToBounds, getNodeAbsolutePosition]) }, [displayNodes, fitViewToBounds])
/** Handles ActionBar remove-from-subflow events. */ /** Handles ActionBar remove-from-subflow events. */
useEffect(() => { useEffect(() => {
@@ -2345,12 +2302,33 @@ const WorkflowContent = React.memo(() => {
window.removeEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener) window.removeEventListener('remove-from-subflow', handleRemoveFromSubflow as EventListener)
}, [blocks, edgesForDisplay, getNodeAbsolutePosition, collaborativeBatchUpdateParent]) }, [blocks, edgesForDisplay, getNodeAbsolutePosition, collaborativeBatchUpdateParent])
/** Handles node changes - applies changes and resolves parent-child selection conflicts. */
const onNodesChange = useCallback(
(changes: NodeChange[]) => {
selectedIdsRef.current = null
setDisplayNodes((nds) => {
const updated = applyNodeChanges(changes, nds)
const hasSelectionChange = changes.some((c) => c.type === 'select')
if (!hasSelectionChange) return updated
const resolved = resolveParentChildSelectionConflicts(updated, blocks)
selectedIdsRef.current = resolved.filter((node) => node.selected).map((node) => node.id)
return resolved
})
const selectedIds = selectedIdsRef.current as string[] | null
if (selectedIds !== null) {
syncPanelWithSelection(selectedIds)
}
},
[blocks]
)
/** /**
* Updates container dimensions in displayNodes during drag or keyboard movement. * Updates container dimensions in displayNodes during drag.
* This allows live resizing of containers as their children are dragged.
*/ */
const updateContainerDimensionsDuringMove = useCallback( const updateContainerDimensionsDuringDrag = useCallback(
(movedNodeId: string, movedNodePosition: { x: number; y: number }) => { (draggedNodeId: string, draggedNodePosition: { x: number; y: number }) => {
const parentId = blocks[movedNodeId]?.data?.parentId const parentId = blocks[draggedNodeId]?.data?.parentId
if (!parentId) return if (!parentId) return
setDisplayNodes((currentNodes) => { setDisplayNodes((currentNodes) => {
@@ -2358,7 +2336,7 @@ const WorkflowContent = React.memo(() => {
if (childNodes.length === 0) return currentNodes if (childNodes.length === 0) return currentNodes
const childPositions = childNodes.map((node) => { const childPositions = childNodes.map((node) => {
const nodePosition = node.id === movedNodeId ? movedNodePosition : node.position const nodePosition = node.id === draggedNodeId ? draggedNodePosition : node.position
const { width, height } = getBlockDimensions(node.id) const { width, height } = getBlockDimensions(node.id)
return { x: nodePosition.x, y: nodePosition.y, width, height } return { x: nodePosition.x, y: nodePosition.y, width, height }
}) })
@@ -2389,55 +2367,6 @@ const WorkflowContent = React.memo(() => {
[blocks, getBlockDimensions] [blocks, getBlockDimensions]
) )
/** Handles node changes - applies changes and resolves parent-child selection conflicts. */
const onNodesChange = useCallback(
(changes: NodeChange[]) => {
selectedIdsRef.current = null
setDisplayNodes((nds) => {
const updated = applyNodeChanges(changes, nds)
const hasSelectionChange = changes.some((c) => c.type === 'select')
if (!hasSelectionChange) return updated
const resolved = resolveParentChildSelectionConflicts(updated, blocks)
selectedIdsRef.current = resolved.filter((node) => node.selected).map((node) => node.id)
return resolved
})
const selectedIds = selectedIdsRef.current as string[] | null
if (selectedIds !== null) {
syncPanelWithSelection(selectedIds)
}
// Handle position changes (e.g., from keyboard arrow key movement)
// Update container dimensions when child nodes are moved and persist to backend
// Only persist if not in a drag operation (drag-end is handled by onNodeDragStop)
const isInDragOperation =
getDragStartPosition() !== null || multiNodeDragStartRef.current.size > 0
const keyboardPositionUpdates: Array<{ id: string; position: { x: number; y: number } }> = []
for (const change of changes) {
if (
change.type === 'position' &&
!change.dragging &&
'position' in change &&
change.position
) {
updateContainerDimensionsDuringMove(change.id, change.position)
if (!isInDragOperation) {
keyboardPositionUpdates.push({ id: change.id, position: change.position })
}
}
}
// Persist keyboard movements to backend for collaboration sync
if (keyboardPositionUpdates.length > 0) {
collaborativeBatchUpdatePositions(keyboardPositionUpdates)
}
},
[
blocks,
updateContainerDimensionsDuringMove,
collaborativeBatchUpdatePositions,
getDragStartPosition,
]
)
/** /**
* Effect to resize loops when nodes change (add/remove/position change). * Effect to resize loops when nodes change (add/remove/position change).
* Runs on structural changes only - not during drag (position-only changes). * Runs on structural changes only - not during drag (position-only changes).
@@ -2682,7 +2611,7 @@ const WorkflowContent = React.memo(() => {
// If the node is inside a container, update container dimensions during drag // If the node is inside a container, update container dimensions during drag
if (currentParentId) { if (currentParentId) {
updateContainerDimensionsDuringMove(node.id, node.position) updateContainerDimensionsDuringDrag(node.id, node.position)
} }
// Check if this is a starter block - starter blocks should never be in containers // Check if this is a starter block - starter blocks should never be in containers
@@ -2799,7 +2728,7 @@ const WorkflowContent = React.memo(() => {
blocks, blocks,
getNodeAbsolutePosition, getNodeAbsolutePosition,
getNodeDepth, getNodeDepth,
updateContainerDimensionsDuringMove, updateContainerDimensionsDuringDrag,
highlightContainerNode, highlightContainerNode,
] ]
) )
@@ -3489,19 +3418,11 @@ const WorkflowContent = React.memo(() => {
onRemoveFromSubflow={handleContextRemoveFromSubflow} onRemoveFromSubflow={handleContextRemoveFromSubflow}
onOpenEditor={handleContextOpenEditor} onOpenEditor={handleContextOpenEditor}
onRename={handleContextRename} onRename={handleContextRename}
onRunFromBlock={handleContextRunFromBlock}
onRunUntilBlock={handleContextRunUntilBlock}
hasClipboard={hasClipboard()} hasClipboard={hasClipboard()}
showRemoveFromSubflow={contextMenuBlocks.some( showRemoveFromSubflow={contextMenuBlocks.some(
(b) => b.parentId && (b.parentType === 'loop' || b.parentType === 'parallel') (b) => b.parentId && (b.parentType === 'loop' || b.parentType === 'parallel')
)} )}
canRunFromBlock={runFromBlockState.canRun}
disableEdit={!effectivePermissions.canEdit} disableEdit={!effectivePermissions.canEdit}
isExecuting={isExecuting}
isPositionalTrigger={
contextMenuBlocks.length === 1 &&
edges.filter((e) => e.target === contextMenuBlocks[0]?.id).length === 0
}
/> />
<CanvasMenu <CanvasMenu

View File

@@ -1141,17 +1141,15 @@ function PreviewEditorContent({
<div className='relative flex h-full w-80 flex-col overflow-hidden border-[var(--border)] border-l bg-[var(--surface-1)]'> <div className='relative flex h-full w-80 flex-col overflow-hidden border-[var(--border)] border-l bg-[var(--surface-1)]'>
{/* Header - styled like editor */} {/* Header - styled like editor */}
<div className='mx-[-1px] flex flex-shrink-0 items-center gap-[8px] rounded-b-[4px] border-[var(--border)] border-x border-b bg-[var(--surface-4)] px-[12px] py-[6px]'> <div className='mx-[-1px] flex flex-shrink-0 items-center gap-[8px] rounded-b-[4px] border-[var(--border)] border-x border-b bg-[var(--surface-4)] px-[12px] py-[6px]'>
{block.type !== 'note' && ( <div
<div className='flex h-[18px] w-[18px] flex-shrink-0 items-center justify-center rounded-[4px]'
className='flex h-[18px] w-[18px] flex-shrink-0 items-center justify-center rounded-[4px]' style={{ backgroundColor: blockConfig.bgColor }}
style={{ backgroundColor: blockConfig.bgColor }} >
> <IconComponent
<IconComponent icon={blockConfig.icon}
icon={blockConfig.icon} className='h-[12px] w-[12px] text-[var(--white)]'
className='h-[12px] w-[12px] text-[var(--white)]' />
/> </div>
</div>
)}
<span className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'> <span className='min-w-0 flex-1 truncate font-medium text-[14px] text-[var(--text-primary)]'>
{block.name || blockConfig.name} {block.name || blockConfig.name}
</span> </span>

View File

@@ -411,9 +411,8 @@ function WorkflowPreviewBlockInner({ data }: NodeProps<WorkflowPreviewBlockData>
const IconComponent = blockConfig.icon const IconComponent = blockConfig.icon
const isStarterOrTrigger = blockConfig.category === 'triggers' || type === 'starter' || isTrigger const isStarterOrTrigger = blockConfig.category === 'triggers' || type === 'starter' || isTrigger
const isNoteBlock = type === 'note'
const shouldShowDefaultHandles = !isStarterOrTrigger && !isNoteBlock const shouldShowDefaultHandles = !isStarterOrTrigger
const hasSubBlocks = visibleSubBlocks.length > 0 const hasSubBlocks = visibleSubBlocks.length > 0
const hasContentBelowHeader = const hasContentBelowHeader =
type === 'condition' type === 'condition'
@@ -575,8 +574,8 @@ function WorkflowPreviewBlockInner({ data }: NodeProps<WorkflowPreviewBlockData>
</> </>
)} )}
{/* Source and error handles for non-condition/router/note blocks */} {/* Source and error handles for non-condition/router blocks */}
{type !== 'condition' && type !== 'router_v2' && type !== 'response' && !isNoteBlock && ( {type !== 'condition' && type !== 'router_v2' && type !== 'response' && (
<> <>
<Handle <Handle
type='source' type='source'

View File

@@ -3,8 +3,6 @@
import { memo } from 'react' import { memo } from 'react'
import { RepeatIcon, SplitIcon } from 'lucide-react' import { RepeatIcon, SplitIcon } from 'lucide-react'
import { Handle, type NodeProps, Position } from 'reactflow' import { Handle, type NodeProps, Position } from 'reactflow'
import { Badge } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions' import { HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions'
/** Execution status for subflows in preview mode */ /** Execution status for subflows in preview mode */
@@ -15,8 +13,6 @@ interface WorkflowPreviewSubflowData {
width?: number width?: number
height?: number height?: number
kind: 'loop' | 'parallel' kind: 'loop' | 'parallel'
/** Whether this subflow is enabled */
enabled?: boolean
/** Whether this subflow is selected in preview mode */ /** Whether this subflow is selected in preview mode */
isPreviewSelected?: boolean isPreviewSelected?: boolean
/** Execution status for highlighting the subflow container */ /** Execution status for highlighting the subflow container */
@@ -31,15 +27,7 @@ interface WorkflowPreviewSubflowData {
* or interactive features. * or interactive features.
*/ */
function WorkflowPreviewSubflowInner({ data }: NodeProps<WorkflowPreviewSubflowData>) { function WorkflowPreviewSubflowInner({ data }: NodeProps<WorkflowPreviewSubflowData>) {
const { const { name, width = 500, height = 300, kind, isPreviewSelected = false, executionStatus } = data
name,
width = 500,
height = 300,
kind,
enabled = true,
isPreviewSelected = false,
executionStatus,
} = data
const isLoop = kind === 'loop' const isLoop = kind === 'loop'
const BlockIcon = isLoop ? RepeatIcon : SplitIcon const BlockIcon = isLoop ? RepeatIcon : SplitIcon
@@ -96,21 +84,14 @@ function WorkflowPreviewSubflowInner({ data }: NodeProps<WorkflowPreviewSubflowD
<div className='flex min-w-0 flex-1 items-center gap-[10px]'> <div className='flex min-w-0 flex-1 items-center gap-[10px]'>
<div <div
className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]' className='flex h-[24px] w-[24px] flex-shrink-0 items-center justify-center rounded-[6px]'
style={{ backgroundColor: enabled ? blockIconBg : 'var(--surface-4)' }} style={{ backgroundColor: blockIconBg }}
> >
<BlockIcon className='h-[16px] w-[16px] text-white' /> <BlockIcon className='h-[16px] w-[16px] text-white' />
</div> </div>
<span <span className='font-medium text-[16px]' title={blockName}>
className={cn(
'truncate font-medium text-[16px]',
!enabled && 'text-[var(--text-muted)]'
)}
title={blockName}
>
{blockName} {blockName}
</span> </span>
</div> </div>
{!enabled && <Badge variant='gray-secondary'>disabled</Badge>}
</div> </div>
{/* Content area - matches workflow structure */} {/* Content area - matches workflow structure */}

View File

@@ -23,7 +23,11 @@ import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/type
const logger = createLogger('PreviewWorkflow') const logger = createLogger('PreviewWorkflow')
/** Gets block dimensions, using stored values or defaults. */ /**
* Gets block dimensions for preview purposes.
* For containers, uses stored dimensions or defaults.
* For regular blocks, uses stored height or estimates based on type.
*/
function getPreviewBlockDimensions(block: BlockState): { width: number; height: number } { function getPreviewBlockDimensions(block: BlockState): { width: number; height: number } {
if (block.type === 'loop' || block.type === 'parallel') { if (block.type === 'loop' || block.type === 'parallel') {
return { return {
@@ -46,7 +50,10 @@ function getPreviewBlockDimensions(block: BlockState): { width: number; height:
return estimateBlockDimensions(block.type) return estimateBlockDimensions(block.type)
} }
/** Calculates container dimensions from child block positions. */ /**
* Calculates container dimensions based on child block positions and sizes.
* Mirrors the logic from useNodeUtilities.calculateLoopDimensions.
*/
function calculateContainerDimensions( function calculateContainerDimensions(
containerId: string, containerId: string,
blocks: Record<string, BlockState> blocks: Record<string, BlockState>
@@ -84,7 +91,12 @@ function calculateContainerDimensions(
return { width, height } return { width, height }
} }
/** Finds the leftmost block ID, excluding subflow containers. */ /**
* Finds the leftmost block ID from a workflow state.
* Excludes subflow containers (loop/parallel) from consideration.
* @param workflowState - The workflow state to search
* @returns The ID of the leftmost block, or null if no blocks exist
*/
export function getLeftmostBlockId(workflowState: WorkflowState | null | undefined): string | null { export function getLeftmostBlockId(workflowState: WorkflowState | null | undefined): string | null {
if (!workflowState?.blocks) return null if (!workflowState?.blocks) return null
@@ -106,7 +118,7 @@ export function getLeftmostBlockId(workflowState: WorkflowState | null | undefin
/** Execution status for edges/nodes in the preview */ /** Execution status for edges/nodes in the preview */
type ExecutionStatus = 'success' | 'error' | 'not-executed' type ExecutionStatus = 'success' | 'error' | 'not-executed'
/** Calculates absolute position, handling nested subflows. */ /** Calculates absolute position for blocks, handling nested subflows */
function calculateAbsolutePosition( function calculateAbsolutePosition(
block: BlockState, block: BlockState,
blocks: Record<string, BlockState> blocks: Record<string, BlockState>
@@ -152,7 +164,10 @@ interface PreviewWorkflowProps {
lightweight?: boolean lightweight?: boolean
} }
/** Preview node types using minimal, hook-free components. */ /**
* Preview node types using minimal components without hooks or store subscriptions.
* This prevents interaction issues while allowing canvas panning and node clicking.
*/
const previewNodeTypes: NodeTypes = { const previewNodeTypes: NodeTypes = {
workflowBlock: PreviewBlock, workflowBlock: PreviewBlock,
noteBlock: PreviewBlock, noteBlock: PreviewBlock,
@@ -170,7 +185,11 @@ interface FitViewOnChangeProps {
containerRef: React.RefObject<HTMLDivElement | null> containerRef: React.RefObject<HTMLDivElement | null>
} }
/** Calls fitView on node changes or container resize. */ /**
* Helper component that calls fitView when the set of nodes changes or when the container resizes.
* Only triggers on actual node additions/removals, not on selection changes.
* Must be rendered inside ReactFlowProvider.
*/
function FitViewOnChange({ nodeIds, fitPadding, containerRef }: FitViewOnChangeProps) { function FitViewOnChange({ nodeIds, fitPadding, containerRef }: FitViewOnChangeProps) {
const { fitView } = useReactFlow() const { fitView } = useReactFlow()
const lastNodeIdsRef = useRef<string | null>(null) const lastNodeIdsRef = useRef<string | null>(null)
@@ -210,7 +229,16 @@ function FitViewOnChange({ nodeIds, fitPadding, containerRef }: FitViewOnChangeP
return null return null
} }
/** Readonly workflow visualization with execution status highlighting. */ /**
* Readonly workflow component for visualizing workflow state.
* Renders blocks, subflows, and edges with execution status highlighting.
*
* @remarks
* - Supports panning and node click interactions
* - Shows execution path via green edges for successful paths
* - Error edges display red by default, green when error path was taken
* - Fits view automatically when nodes change or container resizes
*/
export function PreviewWorkflow({ export function PreviewWorkflow({
workflowState, workflowState,
className, className,
@@ -272,58 +300,49 @@ export function PreviewWorkflow({
return map return map
}, [workflowState.blocks, isValidWorkflowState]) }, [workflowState.blocks, isValidWorkflowState])
/** Maps base block IDs to execution data, handling parallel iteration variants (blockId₍n₎). */ /** Derives subflow execution status from child blocks */
const blockExecutionMap = useMemo(() => {
if (!executedBlocks) return new Map<string, { status: string }>()
const map = new Map<string, { status: string }>()
for (const [key, value] of Object.entries(executedBlocks)) {
// Extract base ID (remove iteration suffix like ₍0₎)
const baseId = key.includes('₍') ? key.split('₍')[0] : key
// Keep first match or error status (error takes precedence)
const existing = map.get(baseId)
if (!existing || value.status === 'error') {
map.set(baseId, value)
}
}
return map
}, [executedBlocks])
/** Derives subflow status from children. Error takes precedence. */
const getSubflowExecutionStatus = useMemo(() => { const getSubflowExecutionStatus = useMemo(() => {
return (subflowId: string): ExecutionStatus | undefined => { return (subflowId: string): ExecutionStatus | undefined => {
if (!executedBlocks) return undefined
const childIds = subflowChildrenMap.get(subflowId) const childIds = subflowChildrenMap.get(subflowId)
if (!childIds?.length) return undefined if (!childIds?.length) return undefined
const executedChildren = childIds const childStatuses = childIds.map((id) => executedBlocks[id]).filter(Boolean)
.map((id) => blockExecutionMap.get(id)) if (childStatuses.length === 0) return undefined
.filter((status): status is { status: string } => Boolean(status))
if (executedChildren.length === 0) return undefined if (childStatuses.some((s) => s.status === 'error')) return 'error'
if (executedChildren.some((s) => s.status === 'error')) return 'error' if (childStatuses.some((s) => s.status === 'success')) return 'success'
return 'success' return 'not-executed'
} }
}, [subflowChildrenMap, blockExecutionMap]) }, [executedBlocks, subflowChildrenMap])
/** Gets block status. Subflows derive status from children. */ /** Gets execution status for any block, deriving subflow status from children */
const getBlockExecutionStatus = useMemo(() => { const getBlockExecutionStatus = useMemo(() => {
return (blockId: string): { status: string; executed: boolean } | undefined => { return (blockId: string): { status: string; executed: boolean } | undefined => {
const directStatus = blockExecutionMap.get(blockId) if (!executedBlocks) return undefined
const directStatus = executedBlocks[blockId]
if (directStatus) { if (directStatus) {
return { status: directStatus.status, executed: true } return { status: directStatus.status, executed: true }
} }
const block = workflowState.blocks?.[blockId] const block = workflowState.blocks?.[blockId]
if (block?.type === 'loop' || block?.type === 'parallel') { if (block && (block.type === 'loop' || block.type === 'parallel')) {
const subflowStatus = getSubflowExecutionStatus(blockId) const subflowStatus = getSubflowExecutionStatus(blockId)
if (subflowStatus) { if (subflowStatus) {
return { status: subflowStatus, executed: true } return { status: subflowStatus, executed: true }
} }
const incomingEdge = workflowState.edges?.find((e) => e.target === blockId)
if (incomingEdge && executedBlocks[incomingEdge.source]?.status === 'success') {
return { status: 'not-executed', executed: true }
}
} }
return undefined return undefined
} }
}, [workflowState.blocks, getSubflowExecutionStatus, blockExecutionMap]) }, [executedBlocks, workflowState.blocks, workflowState.edges, getSubflowExecutionStatus])
const edgesStructure = useMemo(() => { const edgesStructure = useMemo(() => {
if (!isValidWorkflowState) return { count: 0, ids: '' } if (!isValidWorkflowState) return { count: 0, ids: '' }
@@ -361,7 +380,6 @@ export function PreviewWorkflow({
width: dimensions.width, width: dimensions.width,
height: dimensions.height, height: dimensions.height,
kind: block.type as 'loop' | 'parallel', kind: block.type as 'loop' | 'parallel',
enabled: block.enabled ?? true,
isPreviewSelected: isSelected, isPreviewSelected: isSelected,
executionStatus: subflowExecutionStatus, executionStatus: subflowExecutionStatus,
lightweight, lightweight,
@@ -388,11 +406,9 @@ export function PreviewWorkflow({
} }
} }
const nodeType = block.type === 'note' ? 'noteBlock' : 'workflowBlock'
nodeArray.push({ nodeArray.push({
id: blockId, id: blockId,
type: nodeType, type: 'workflowBlock',
position: absolutePosition, position: absolutePosition,
draggable: false, draggable: false,
zIndex: block.data?.parentId ? 10 : undefined, zIndex: block.data?.parentId ? 10 : undefined,
@@ -426,29 +442,48 @@ export function PreviewWorkflow({
const edges: Edge[] = useMemo(() => { const edges: Edge[] = useMemo(() => {
if (!isValidWorkflowState) return [] if (!isValidWorkflowState) return []
/** Edge is green if target executed and source condition met by edge type. */ /**
* Determines edge execution status for visualization.
* Error edges turn green when taken (source errored, target executed).
* Normal edges turn green when both source succeeded and target executed.
*/
const getEdgeExecutionStatus = (edge: { const getEdgeExecutionStatus = (edge: {
source: string source: string
target: string target: string
sourceHandle?: string | null sourceHandle?: string | null
}): ExecutionStatus | undefined => { }): ExecutionStatus | undefined => {
if (blockExecutionMap.size === 0) return undefined if (!executedBlocks) return undefined
const targetStatus = getBlockExecutionStatus(edge.target)
if (!targetStatus?.executed) return 'not-executed'
const sourceStatus = getBlockExecutionStatus(edge.source) const sourceStatus = getBlockExecutionStatus(edge.source)
const { sourceHandle } = edge const targetStatus = getBlockExecutionStatus(edge.target)
const isErrorEdge = edge.sourceHandle === 'error'
if (sourceHandle === 'error') { if (isErrorEdge) {
return sourceStatus?.status === 'error' ? 'success' : 'not-executed' return sourceStatus?.status === 'error' && targetStatus?.executed
? 'success'
: 'not-executed'
} }
if (sourceHandle === 'loop-start-source' || sourceHandle === 'parallel-start-source') { const isSubflowStartEdge =
edge.sourceHandle === 'loop-start-source' || edge.sourceHandle === 'parallel-start-source'
if (isSubflowStartEdge) {
const incomingEdge = workflowState.edges?.find((e) => e.target === edge.source)
const incomingSucceeded = incomingEdge
? executedBlocks[incomingEdge.source]?.status === 'success'
: false
return incomingSucceeded ? 'success' : 'not-executed'
}
const targetBlock = workflowState.blocks?.[edge.target]
const targetIsSubflow =
targetBlock && (targetBlock.type === 'loop' || targetBlock.type === 'parallel')
if (sourceStatus?.status === 'success' && (targetStatus?.executed || targetIsSubflow)) {
return 'success' return 'success'
} }
return sourceStatus?.status === 'success' ? 'success' : 'not-executed' return 'not-executed'
} }
return (workflowState.edges || []).map((edge) => { return (workflowState.edges || []).map((edge) => {
@@ -470,8 +505,9 @@ export function PreviewWorkflow({
}, [ }, [
edgesStructure, edgesStructure,
workflowState.edges, workflowState.edges,
workflowState.blocks,
isValidWorkflowState, isValidWorkflowState,
blockExecutionMap, executedBlocks,
getBlockExecutionStatus, getBlockExecutionStatus,
]) ])

View File

@@ -39,8 +39,8 @@ interface WorkflowStackEntry {
/** /**
* Extracts child trace spans from a workflow block's execution data. * Extracts child trace spans from a workflow block's execution data.
* Checks `children` property (where trace-spans processing puts them), * Checks both the `children` property (where trace span processing moves them)
* with fallback to `output.childTraceSpans` for old stored logs. * and the legacy `output.childTraceSpans` for compatibility.
*/ */
function extractChildTraceSpans(blockExecution: BlockExecutionData | undefined): TraceSpan[] { function extractChildTraceSpans(blockExecution: BlockExecutionData | undefined): TraceSpan[] {
if (!blockExecution) return [] if (!blockExecution) return []
@@ -49,7 +49,6 @@ function extractChildTraceSpans(blockExecution: BlockExecutionData | undefined):
return blockExecution.children return blockExecution.children
} }
// Backward compat: old stored logs may have childTraceSpans in output
if (blockExecution.output && typeof blockExecution.output === 'object') { if (blockExecution.output && typeof blockExecution.output === 'object') {
const output = blockExecution.output as Record<string, unknown> const output = blockExecution.output as Record<string, unknown>
if (Array.isArray(output.childTraceSpans)) { if (Array.isArray(output.childTraceSpans)) {

View File

@@ -0,0 +1,241 @@
/**
* Search utility functions for tiered matching algorithm
* Provides predictable search results prioritizing exact matches over fuzzy matches
*/
export interface SearchableItem {
id: string
name: string
description?: string
type: string
aliases?: string[]
[key: string]: any
}
export interface SearchResult<T extends SearchableItem> {
item: T
score: number
matchType: 'exact' | 'prefix' | 'alias' | 'word-boundary' | 'substring' | 'description'
}
const SCORE_EXACT_MATCH = 10000
const SCORE_PREFIX_MATCH = 5000
const SCORE_ALIAS_MATCH = 3000
const SCORE_WORD_BOUNDARY = 1000
const SCORE_SUBSTRING_MATCH = 100
const DESCRIPTION_WEIGHT = 0.3
/**
* Calculate match score for a single field
* Returns 0 if no match found
*/
function calculateFieldScore(
query: string,
field: string
): {
score: number
matchType: 'exact' | 'prefix' | 'word-boundary' | 'substring' | null
} {
const normalizedQuery = query.toLowerCase().trim()
const normalizedField = field.toLowerCase().trim()
if (!normalizedQuery || !normalizedField) {
return { score: 0, matchType: null }
}
// Tier 1: Exact match
if (normalizedField === normalizedQuery) {
return { score: SCORE_EXACT_MATCH, matchType: 'exact' }
}
// Tier 2: Prefix match (starts with query)
if (normalizedField.startsWith(normalizedQuery)) {
return { score: SCORE_PREFIX_MATCH, matchType: 'prefix' }
}
// Tier 3: Word boundary match (query matches start of a word)
const words = normalizedField.split(/[\s-_/]+/)
const hasWordBoundaryMatch = words.some((word) => word.startsWith(normalizedQuery))
if (hasWordBoundaryMatch) {
return { score: SCORE_WORD_BOUNDARY, matchType: 'word-boundary' }
}
// Tier 4: Substring match (query appears anywhere)
if (normalizedField.includes(normalizedQuery)) {
return { score: SCORE_SUBSTRING_MATCH, matchType: 'substring' }
}
// No match
return { score: 0, matchType: null }
}
/**
* Check if query matches any alias in the item's aliases array
* Returns the alias score if a match is found, 0 otherwise
*/
function calculateAliasScore(
query: string,
aliases?: string[]
): { score: number; matchType: 'alias' | null } {
if (!aliases || aliases.length === 0) {
return { score: 0, matchType: null }
}
const normalizedQuery = query.toLowerCase().trim()
for (const alias of aliases) {
const normalizedAlias = alias.toLowerCase().trim()
if (normalizedAlias === normalizedQuery) {
return { score: SCORE_ALIAS_MATCH, matchType: 'alias' }
}
if (normalizedAlias.startsWith(normalizedQuery)) {
return { score: SCORE_ALIAS_MATCH * 0.8, matchType: 'alias' }
}
if (normalizedQuery.includes(normalizedAlias) || normalizedAlias.includes(normalizedQuery)) {
return { score: SCORE_ALIAS_MATCH * 0.6, matchType: 'alias' }
}
}
return { score: 0, matchType: null }
}
/**
* Calculate multi-word match score
* Each word in the query must appear somewhere in the field
* Returns a score based on how well the words match
*/
function calculateMultiWordScore(
queryWords: string[],
field: string
): { score: number; matchType: 'word-boundary' | 'substring' | null } {
const normalizedField = field.toLowerCase().trim()
const fieldWords = normalizedField.split(/[\s\-_/:]+/)
let allWordsMatch = true
let totalScore = 0
let hasWordBoundary = false
for (const queryWord of queryWords) {
const wordBoundaryMatch = fieldWords.some((fw) => fw.startsWith(queryWord))
const substringMatch = normalizedField.includes(queryWord)
if (wordBoundaryMatch) {
totalScore += SCORE_WORD_BOUNDARY
hasWordBoundary = true
} else if (substringMatch) {
totalScore += SCORE_SUBSTRING_MATCH
} else {
allWordsMatch = false
break
}
}
if (!allWordsMatch) {
return { score: 0, matchType: null }
}
return {
score: totalScore / queryWords.length,
matchType: hasWordBoundary ? 'word-boundary' : 'substring',
}
}
/**
* Search items using tiered matching algorithm
* Returns items sorted by relevance (highest score first)
*/
export function searchItems<T extends SearchableItem>(
query: string,
items: T[]
): SearchResult<T>[] {
const normalizedQuery = query.trim()
if (!normalizedQuery) {
return []
}
const results: SearchResult<T>[] = []
const queryWords = normalizedQuery.toLowerCase().split(/\s+/).filter(Boolean)
const isMultiWord = queryWords.length > 1
for (const item of items) {
const nameMatch = calculateFieldScore(normalizedQuery, item.name)
const descMatch = item.description
? calculateFieldScore(normalizedQuery, item.description)
: { score: 0, matchType: null }
const aliasMatch = calculateAliasScore(normalizedQuery, item.aliases)
let nameScore = nameMatch.score
let descScore = descMatch.score * DESCRIPTION_WEIGHT
const aliasScore = aliasMatch.score
let bestMatchType = nameMatch.matchType
// For multi-word queries, also try matching each word independently and take the better score
if (isMultiWord) {
const multiWordNameMatch = calculateMultiWordScore(queryWords, item.name)
if (multiWordNameMatch.score > nameScore) {
nameScore = multiWordNameMatch.score
bestMatchType = multiWordNameMatch.matchType
}
if (item.description) {
const multiWordDescMatch = calculateMultiWordScore(queryWords, item.description)
const multiWordDescScore = multiWordDescMatch.score * DESCRIPTION_WEIGHT
if (multiWordDescScore > descScore) {
descScore = multiWordDescScore
}
}
}
const bestScore = Math.max(nameScore, descScore, aliasScore)
if (bestScore > 0) {
let matchType: SearchResult<T>['matchType'] = 'substring'
if (nameScore >= descScore && nameScore >= aliasScore) {
matchType = bestMatchType || 'substring'
} else if (aliasScore >= descScore) {
matchType = 'alias'
} else {
matchType = 'description'
}
results.push({
item,
score: bestScore,
matchType,
})
}
}
results.sort((a, b) => b.score - a.score)
return results
}
/**
* Get a human-readable match type label
*/
export function getMatchTypeLabel(matchType: SearchResult<any>['matchType']): string {
switch (matchType) {
case 'exact':
return 'Exact match'
case 'prefix':
return 'Starts with'
case 'alias':
return 'Similar to'
case 'word-boundary':
return 'Word match'
case 'substring':
return 'Contains'
case 'description':
return 'In description'
default:
return 'Match'
}
}

View File

@@ -176,7 +176,7 @@ function FormattedInput({
onChange, onChange,
onScroll, onScroll,
}: FormattedInputProps) { }: FormattedInputProps) {
const handleScroll = (e: { currentTarget: HTMLInputElement }) => { const handleScroll = (e: React.UIEvent<HTMLInputElement>) => {
onScroll(e.currentTarget.scrollLeft) onScroll(e.currentTarget.scrollLeft)
} }

View File

@@ -164,7 +164,7 @@ export function InviteModal({ open, onOpenChange, workspaceName }: InviteModalPr
...prev, ...prev,
{ {
email: normalized, email: normalized,
permissionType: 'admin', permissionType: 'read',
}, },
]) ])
} }

View File

@@ -73,12 +73,7 @@ export const Sidebar = memo(function Sidebar() {
const { data: sessionData, isPending: sessionLoading } = useSession() const { data: sessionData, isPending: sessionLoading } = useSession()
const { canEdit } = useUserPermissionsContext() const { canEdit } = useUserPermissionsContext()
const { config: permissionConfig, filterBlocks } = usePermissionConfig() const { config: permissionConfig } = usePermissionConfig()
const initializeSearchData = useSearchModalStore((state) => state.initializeData)
useEffect(() => {
initializeSearchData(filterBlocks)
}, [initializeSearchData, filterBlocks])
/** /**
* Sidebar state from store with hydration tracking to prevent SSR mismatch. * Sidebar state from store with hydration tracking to prevent SSR mismatch.

View File

@@ -80,15 +80,6 @@ Example:
generationType: 'json-object', generationType: 'json-object',
}, },
}, },
{
id: 'timeout',
title: 'Timeout (ms)',
type: 'short-input',
placeholder: '300000',
description:
'Request timeout in milliseconds (default: 300000 = 5 minutes, max: 600000 = 10 minutes)',
mode: 'advanced',
},
], ],
tools: { tools: {
access: ['http_request'], access: ['http_request'],
@@ -99,7 +90,6 @@ Example:
headers: { type: 'json', description: 'Request headers' }, headers: { type: 'json', description: 'Request headers' },
body: { type: 'json', description: 'Request body data' }, body: { type: 'json', description: 'Request body data' },
params: { type: 'json', description: 'URL query parameters' }, params: { type: 'json', description: 'URL query parameters' },
timeout: { type: 'number', description: 'Request timeout in milliseconds' },
}, },
outputs: { outputs: {
data: { type: 'json', description: 'API response data (JSON, text, or other formats)' }, data: { type: 'json', description: 'API response data (JSON, text, or other formats)' },

View File

@@ -1,668 +0,0 @@
import { CalComIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import { AuthMode } from '@/blocks/types'
import type { ToolResponse } from '@/tools/types'
import { getTrigger } from '@/triggers'
export const CalComBlock: BlockConfig<ToolResponse> = {
type: 'calcom',
name: 'CalCom',
description: 'Manage Cal.com bookings, event types, schedules, and availability',
authMode: AuthMode.OAuth,
triggerAllowed: true,
longDescription:
'Integrate Cal.com into your workflow. Create and manage bookings, event types, schedules, and check availability slots. Supports creating, listing, rescheduling, and canceling bookings, as well as managing event types and schedules. Can also trigger workflows based on Cal.com webhook events (booking created, cancelled, rescheduled). Connect your Cal.com account via OAuth.',
docsLink: 'https://docs.sim.ai/tools/calcom',
category: 'tools',
bgColor: '#FFFFFE',
icon: CalComIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'Create Booking', id: 'calcom_create_booking' },
{ label: 'Get Booking', id: 'calcom_get_booking' },
{ label: 'List Bookings', id: 'calcom_list_bookings' },
{ label: 'Cancel Booking', id: 'calcom_cancel_booking' },
{ label: 'Reschedule Booking', id: 'calcom_reschedule_booking' },
{ label: 'Confirm Booking', id: 'calcom_confirm_booking' },
{ label: 'Decline Booking', id: 'calcom_decline_booking' },
{ label: 'Create Event Type', id: 'calcom_create_event_type' },
{ label: 'Get Event Type', id: 'calcom_get_event_type' },
{ label: 'List Event Types', id: 'calcom_list_event_types' },
{ label: 'Update Event Type', id: 'calcom_update_event_type' },
{ label: 'Delete Event Type', id: 'calcom_delete_event_type' },
{ label: 'Create Schedule', id: 'calcom_create_schedule' },
{ label: 'Get Schedule', id: 'calcom_get_schedule' },
{ label: 'List Schedules', id: 'calcom_list_schedules' },
{ label: 'Update Schedule', id: 'calcom_update_schedule' },
{ label: 'Delete Schedule', id: 'calcom_delete_schedule' },
{ label: 'Get Default Schedule', id: 'calcom_get_default_schedule' },
{ label: 'Get Available Slots', id: 'calcom_get_slots' },
],
value: () => 'calcom_list_bookings',
},
{
id: 'credential',
title: 'Cal.com Account',
type: 'oauth-input',
serviceId: 'calcom',
placeholder: 'Select Cal.com account',
required: true,
},
// === Create Booking fields ===
{
id: 'eventTypeId',
title: 'Event Type ID',
type: 'short-input',
placeholder: 'Enter event type ID (number)',
condition: {
field: 'operation',
value: ['calcom_create_booking', 'calcom_get_slots'],
},
required: { field: 'operation', value: 'calcom_create_booking' },
},
{
id: 'start',
title: 'Start Time',
type: 'short-input',
placeholder: 'ISO 8601 format (e.g., 2024-01-15T10:00:00Z)',
condition: {
field: 'operation',
value: ['calcom_create_booking', 'calcom_reschedule_booking', 'calcom_get_slots'],
},
required: {
field: 'operation',
value: ['calcom_create_booking', 'calcom_reschedule_booking', 'calcom_get_slots'],
},
wandConfig: {
enabled: true,
prompt: `Generate an ISO 8601 timestamp in UTC based on the user's description.
Format: YYYY-MM-DDTHH:MM:SSZ
Examples:
- "tomorrow at 2pm" -> Tomorrow's date at 14:00:00Z
- "next Monday 9am" -> Next Monday at 09:00:00Z
- "in 3 hours" -> Current time + 3 hours
Return ONLY the timestamp string - no explanations or quotes.`,
placeholder: 'Describe the start time (e.g., "tomorrow at 2pm")...',
generationType: 'timestamp',
},
},
{
id: 'end',
title: 'End Time',
type: 'short-input',
placeholder: 'ISO 8601 format (e.g., 2024-01-15T11:00:00Z)',
condition: { field: 'operation', value: 'calcom_get_slots' },
required: { field: 'operation', value: 'calcom_get_slots' },
wandConfig: {
enabled: true,
prompt: `Generate an ISO 8601 timestamp in UTC based on the user's description.
Format: YYYY-MM-DDTHH:MM:SSZ
Examples:
- "end of tomorrow" -> Tomorrow at 23:59:59Z
- "next Friday" -> Next Friday at 23:59:59Z
- "in 1 week" -> Current date + 7 days
Return ONLY the timestamp string - no explanations or quotes.`,
placeholder: 'Describe the end time (e.g., "end of next week")...',
generationType: 'timestamp',
},
},
{
id: 'attendeeName',
title: 'Attendee Name',
type: 'short-input',
placeholder: 'Enter attendee name',
condition: { field: 'operation', value: 'calcom_create_booking' },
required: true,
},
{
id: 'attendeeEmail',
title: 'Attendee Email',
type: 'short-input',
placeholder: 'Enter attendee email',
condition: { field: 'operation', value: 'calcom_create_booking' },
},
{
id: 'attendeeTimeZone',
title: 'Attendee Time Zone',
type: 'short-input',
placeholder: 'e.g., America/New_York, Europe/London',
condition: { field: 'operation', value: 'calcom_create_booking' },
required: true,
},
{
id: 'attendeePhone',
title: 'Attendee Phone',
type: 'short-input',
placeholder: 'International format (e.g., +1234567890)',
condition: { field: 'operation', value: 'calcom_create_booking' },
},
{
id: 'guests',
title: 'Guests',
type: 'short-input',
placeholder: 'Comma-separated email addresses',
condition: { field: 'operation', value: 'calcom_create_booking' },
},
{
id: 'lengthInMinutes',
title: 'Duration (minutes)',
type: 'short-input',
placeholder: 'Override event duration (optional)',
condition: { field: 'operation', value: 'calcom_create_booking' },
},
{
id: 'metadata',
title: 'Metadata',
type: 'code',
language: 'json',
placeholder: '{"key": "value"}',
condition: { field: 'operation', value: 'calcom_create_booking' },
},
// === Get/Cancel/Reschedule/Confirm/Decline Booking fields ===
{
id: 'bookingUid',
title: 'Booking UID',
type: 'short-input',
placeholder: 'Enter booking UID',
condition: {
field: 'operation',
value: [
'calcom_get_booking',
'calcom_cancel_booking',
'calcom_reschedule_booking',
'calcom_confirm_booking',
'calcom_decline_booking',
],
},
required: {
field: 'operation',
value: [
'calcom_get_booking',
'calcom_cancel_booking',
'calcom_reschedule_booking',
'calcom_confirm_booking',
'calcom_decline_booking',
],
},
},
{
id: 'cancellationReason',
title: 'Cancellation Reason',
type: 'long-input',
placeholder: 'Reason for cancellation (optional)',
rows: 3,
condition: { field: 'operation', value: 'calcom_cancel_booking' },
},
{
id: 'reschedulingReason',
title: 'Rescheduling Reason',
type: 'long-input',
placeholder: 'Reason for rescheduling (optional)',
rows: 3,
condition: { field: 'operation', value: 'calcom_reschedule_booking' },
},
// === List Bookings filters ===
{
id: 'bookingStatus',
title: 'Status',
type: 'dropdown',
options: [
{ label: 'All', id: '' },
{ label: 'Upcoming', id: 'upcoming' },
{ label: 'Recurring', id: 'recurring' },
{ label: 'Past', id: 'past' },
{ label: 'Cancelled', id: 'cancelled' },
{ label: 'Unconfirmed', id: 'unconfirmed' },
],
condition: { field: 'operation', value: 'calcom_list_bookings' },
},
// === Event Type fields ===
{
id: 'eventTypeIdParam',
title: 'Event Type ID',
type: 'short-input',
placeholder: 'Enter event type ID',
condition: {
field: 'operation',
value: ['calcom_get_event_type', 'calcom_update_event_type', 'calcom_delete_event_type'],
},
required: {
field: 'operation',
value: ['calcom_get_event_type', 'calcom_update_event_type', 'calcom_delete_event_type'],
},
},
{
id: 'title',
title: 'Title',
type: 'short-input',
placeholder: 'Event type title',
condition: {
field: 'operation',
value: ['calcom_create_event_type', 'calcom_update_event_type'],
},
required: { field: 'operation', value: 'calcom_create_event_type' },
},
{
id: 'slug',
title: 'Slug',
type: 'short-input',
placeholder: 'URL-friendly identifier (e.g., 30-min-meeting)',
condition: {
field: 'operation',
value: ['calcom_create_event_type', 'calcom_update_event_type'],
},
required: { field: 'operation', value: 'calcom_create_event_type' },
},
{
id: 'eventLength',
title: 'Duration (minutes)',
type: 'short-input',
placeholder: 'Event duration in minutes (e.g., 30)',
condition: {
field: 'operation',
value: ['calcom_create_event_type', 'calcom_update_event_type'],
},
required: { field: 'operation', value: 'calcom_create_event_type' },
},
{
id: 'description',
title: 'Description',
type: 'long-input',
placeholder: 'Event type description',
rows: 3,
condition: {
field: 'operation',
value: ['calcom_create_event_type', 'calcom_update_event_type'],
},
},
{
id: 'slotInterval',
title: 'Slot Interval (minutes)',
type: 'short-input',
placeholder: 'Minutes between available slots',
condition: {
field: 'operation',
value: ['calcom_create_event_type', 'calcom_update_event_type'],
},
},
{
id: 'minimumBookingNotice',
title: 'Minimum Notice (minutes)',
type: 'short-input',
placeholder: 'Minimum advance notice required',
condition: {
field: 'operation',
value: ['calcom_create_event_type', 'calcom_update_event_type'],
},
},
{
id: 'beforeEventBuffer',
title: 'Buffer Before (minutes)',
type: 'short-input',
placeholder: 'Buffer time before event',
condition: {
field: 'operation',
value: ['calcom_create_event_type', 'calcom_update_event_type'],
},
},
{
id: 'afterEventBuffer',
title: 'Buffer After (minutes)',
type: 'short-input',
placeholder: 'Buffer time after event',
condition: {
field: 'operation',
value: ['calcom_create_event_type', 'calcom_update_event_type'],
},
},
{
id: 'eventTypeScheduleId',
title: 'Schedule ID',
type: 'short-input',
placeholder: 'Assign to specific schedule (optional)',
condition: {
field: 'operation',
value: ['calcom_create_event_type', 'calcom_update_event_type'],
},
},
{
id: 'disableGuests',
title: 'Disable Guests',
type: 'switch',
description: 'Prevent attendees from adding guests',
condition: {
field: 'operation',
value: ['calcom_create_event_type', 'calcom_update_event_type'],
},
},
// === Schedule fields ===
{
id: 'scheduleId',
title: 'Schedule ID',
type: 'short-input',
placeholder: 'Enter schedule ID',
condition: {
field: 'operation',
value: ['calcom_get_schedule', 'calcom_update_schedule', 'calcom_delete_schedule'],
},
required: {
field: 'operation',
value: ['calcom_get_schedule', 'calcom_update_schedule', 'calcom_delete_schedule'],
},
},
{
id: 'scheduleName',
title: 'Name',
type: 'short-input',
placeholder: 'Schedule name (e.g., Working Hours)',
condition: {
field: 'operation',
value: ['calcom_create_schedule', 'calcom_update_schedule'],
},
required: { field: 'operation', value: 'calcom_create_schedule' },
},
{
id: 'timeZone',
title: 'Time Zone',
type: 'short-input',
placeholder: 'e.g., America/New_York',
condition: {
field: 'operation',
value: ['calcom_create_schedule', 'calcom_update_schedule', 'calcom_get_slots'],
},
required: { field: 'operation', value: 'calcom_create_schedule' },
},
{
id: 'isDefault',
title: 'Default Schedule',
type: 'switch',
description: 'Set as the default schedule',
condition: {
field: 'operation',
value: ['calcom_create_schedule', 'calcom_update_schedule'],
},
},
{
id: 'availability',
title: 'Availability',
type: 'code',
language: 'json',
placeholder: `[
{
"days": ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday"],
"startTime": "09:00",
"endTime": "17:00"
}
]`,
condition: {
field: 'operation',
value: ['calcom_create_schedule', 'calcom_update_schedule'],
},
wandConfig: {
enabled: true,
prompt: `Generate a Cal.com availability JSON array based on the user's description.
Each availability object has:
- days: Array of weekday names (Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday)
- startTime: HH:MM format (24-hour)
- endTime: HH:MM format (24-hour)
Example for "9-5 weekdays":
[{"days": ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday"], "startTime": "09:00", "endTime": "17:00"}]
Example for "mornings only, Monday and Wednesday":
[{"days": ["Monday", "Wednesday"], "startTime": "08:00", "endTime": "12:00"}]
Return ONLY valid JSON - no explanations.`,
placeholder: 'Describe your availability (e.g., "9-5 weekdays")...',
generationType: 'json-object',
},
},
// === Slots fields ===
{
id: 'eventTypeSlug',
title: 'Event Type Slug',
type: 'short-input',
placeholder: 'Event type slug (alternative to ID)',
condition: { field: 'operation', value: 'calcom_get_slots' },
},
{
id: 'username',
title: 'Username',
type: 'short-input',
placeholder: 'Cal.com username (required with slug)',
condition: { field: 'operation', value: 'calcom_get_slots' },
},
{
id: 'duration',
title: 'Duration (minutes)',
type: 'short-input',
placeholder: 'Slot duration (optional)',
condition: { field: 'operation', value: 'calcom_get_slots' },
},
// === List Event Types sorting ===
{
id: 'sortCreatedAt',
title: 'Sort by Created',
type: 'dropdown',
options: [
{ label: 'None', id: '' },
{ label: 'Ascending', id: 'asc' },
{ label: 'Descending', id: 'desc' },
],
condition: { field: 'operation', value: 'calcom_list_event_types' },
},
// Trigger SubBlocks
...getTrigger('calcom_booking_created').subBlocks,
...getTrigger('calcom_booking_cancelled').subBlocks,
...getTrigger('calcom_booking_rescheduled').subBlocks,
...getTrigger('calcom_booking_requested').subBlocks,
...getTrigger('calcom_booking_rejected').subBlocks,
...getTrigger('calcom_booking_paid').subBlocks,
...getTrigger('calcom_meeting_ended').subBlocks,
...getTrigger('calcom_recording_ready').subBlocks,
...getTrigger('calcom_webhook').subBlocks,
],
tools: {
access: [
'calcom_create_booking',
'calcom_get_booking',
'calcom_list_bookings',
'calcom_cancel_booking',
'calcom_reschedule_booking',
'calcom_confirm_booking',
'calcom_decline_booking',
'calcom_create_event_type',
'calcom_get_event_type',
'calcom_list_event_types',
'calcom_update_event_type',
'calcom_delete_event_type',
'calcom_create_schedule',
'calcom_get_schedule',
'calcom_list_schedules',
'calcom_update_schedule',
'calcom_delete_schedule',
'calcom_get_default_schedule',
'calcom_get_slots',
],
config: {
tool: (params) => params.operation || 'calcom_list_bookings',
params: (params) => {
const {
operation,
attendeeName,
attendeeEmail,
attendeeTimeZone,
attendeePhone,
guests,
metadata,
availability,
eventTypeIdParam,
eventTypeId,
bookingStatus,
eventLength,
scheduleName,
eventTypeScheduleId,
...rest
} = params
const result: Record<string, unknown> = { ...rest }
if (eventTypeId) {
result.eventTypeId = Number(eventTypeId)
}
if (operation === 'calcom_create_booking') {
result.attendee = {
name: attendeeName,
...(attendeeEmail && { email: attendeeEmail }),
timeZone: attendeeTimeZone,
...(attendeePhone && { phoneNumber: attendeePhone }),
}
result.attendeeName = undefined
result.attendeeEmail = undefined
result.attendeeTimeZone = undefined
result.attendeePhone = undefined
if (guests) {
result.guests = guests.split(',').map((g: string) => g.trim())
}
}
if (metadata) {
try {
result.metadata = typeof metadata === 'string' ? JSON.parse(metadata) : metadata
} catch {
throw new Error('Invalid JSON for metadata')
}
}
if (availability) {
try {
result.availability =
typeof availability === 'string' ? JSON.parse(availability) : availability
} catch {
throw new Error('Invalid JSON for availability')
}
}
if (eventTypeIdParam) {
result.eventTypeId = Number(eventTypeIdParam)
}
if (bookingStatus) {
result.status = bookingStatus
}
if (eventLength) {
result.lengthInMinutes = Number(eventLength)
}
if (scheduleName) {
result.name = scheduleName
}
if (eventTypeScheduleId) {
result.scheduleId = Number(eventTypeScheduleId)
}
return result
},
},
},
inputs: {
operation: { type: 'string', description: 'Operation to perform' },
credential: { type: 'string', description: 'Cal.com OAuth credential' },
eventTypeId: { type: 'number', description: 'Event type ID' },
start: { type: 'string', description: 'Start time (ISO 8601)' },
end: { type: 'string', description: 'End time (ISO 8601)' },
attendeeName: { type: 'string', description: 'Attendee name' },
attendeeEmail: { type: 'string', description: 'Attendee email' },
attendeeTimeZone: { type: 'string', description: 'Attendee time zone' },
attendeePhone: { type: 'string', description: 'Attendee phone number' },
guests: { type: 'string', description: 'Comma-separated guest emails' },
lengthInMinutes: { type: 'number', description: 'Duration override in minutes' },
metadata: { type: 'json', description: 'Custom metadata object' },
bookingUid: { type: 'string', description: 'Booking UID' },
cancellationReason: { type: 'string', description: 'Reason for cancellation' },
reschedulingReason: { type: 'string', description: 'Reason for rescheduling' },
bookingStatus: { type: 'string', description: 'Filter by booking status' },
eventTypeIdParam: { type: 'number', description: 'Event type ID for get/update/delete' },
title: { type: 'string', description: 'Event type title' },
slug: { type: 'string', description: 'URL-friendly slug' },
eventLength: { type: 'number', description: 'Event duration in minutes' },
description: { type: 'string', description: 'Event type description' },
slotInterval: { type: 'number', description: 'Minutes between available slots' },
minimumBookingNotice: { type: 'number', description: 'Minimum advance notice' },
beforeEventBuffer: { type: 'number', description: 'Buffer before event' },
afterEventBuffer: { type: 'number', description: 'Buffer after event' },
eventTypeScheduleId: { type: 'number', description: 'Schedule ID for event type' },
disableGuests: { type: 'boolean', description: 'Disable guest additions' },
sortCreatedAt: { type: 'string', description: 'Sort order for event types' },
scheduleId: { type: 'number', description: 'Schedule ID' },
scheduleName: { type: 'string', description: 'Schedule name' },
timeZone: { type: 'string', description: 'Time zone' },
isDefault: { type: 'boolean', description: 'Set as default schedule' },
availability: { type: 'json', description: 'Availability configuration' },
eventTypeSlug: { type: 'string', description: 'Event type slug' },
username: { type: 'string', description: 'Cal.com username' },
duration: { type: 'number', description: 'Slot duration in minutes' },
},
outputs: {
success: { type: 'boolean', description: 'Whether operation succeeded' },
bookingUid: { type: 'string', description: 'Booking unique identifier' },
bookingId: { type: 'number', description: 'Booking ID' },
status: { type: 'string', description: 'Booking or event status' },
title: { type: 'string', description: 'Booking or event type title' },
startTime: { type: 'string', description: 'Booking start time (ISO 8601)' },
endTime: { type: 'string', description: 'Booking end time (ISO 8601)' },
attendees: { type: 'json', description: 'List of attendees' },
hosts: { type: 'json', description: 'List of hosts' },
location: { type: 'string', description: 'Meeting location' },
meetingUrl: { type: 'string', description: 'Video meeting URL' },
bookings: { type: 'json', description: 'List of bookings' },
eventTypes: { type: 'json', description: 'List of event types' },
schedules: { type: 'json', description: 'List of schedules' },
slots: { type: 'json', description: 'Available time slots' },
id: { type: 'number', description: 'Event type or schedule ID' },
slug: { type: 'string', description: 'Event type slug' },
lengthInMinutes: { type: 'number', description: 'Event duration' },
description: { type: 'string', description: 'Event type description' },
name: { type: 'string', description: 'Schedule name' },
timeZone: { type: 'string', description: 'Schedule time zone' },
isDefault: { type: 'boolean', description: 'Whether schedule is default' },
availability: { type: 'json', description: 'Availability configuration' },
deleted: { type: 'boolean', description: 'Whether deletion succeeded' },
message: { type: 'string', description: 'Status or error message' },
triggerEvent: { type: 'string', description: 'Webhook event type' },
createdAt: { type: 'string', description: 'Webhook event timestamp' },
payload: { type: 'json', description: 'Complete webhook payload data' },
},
triggers: {
enabled: true,
available: [
'calcom_booking_created',
'calcom_booking_cancelled',
'calcom_booking_rescheduled',
'calcom_booking_requested',
'calcom_booking_rejected',
'calcom_booking_paid',
'calcom_meeting_ended',
'calcom_recording_ready',
'calcom_webhook',
],
},
}

View File

@@ -9,7 +9,6 @@ import { ApolloBlock } from '@/blocks/blocks/apollo'
import { ArxivBlock } from '@/blocks/blocks/arxiv' import { ArxivBlock } from '@/blocks/blocks/arxiv'
import { AsanaBlock } from '@/blocks/blocks/asana' import { AsanaBlock } from '@/blocks/blocks/asana'
import { BrowserUseBlock } from '@/blocks/blocks/browser_use' import { BrowserUseBlock } from '@/blocks/blocks/browser_use'
import { CalComBlock } from '@/blocks/blocks/calcom'
import { CalendlyBlock } from '@/blocks/blocks/calendly' import { CalendlyBlock } from '@/blocks/blocks/calendly'
import { ChatTriggerBlock } from '@/blocks/blocks/chat_trigger' import { ChatTriggerBlock } from '@/blocks/blocks/chat_trigger'
import { CirclebackBlock } from '@/blocks/blocks/circleback' import { CirclebackBlock } from '@/blocks/blocks/circleback'
@@ -166,7 +165,6 @@ export const registry: Record<string, BlockConfig> = {
arxiv: ArxivBlock, arxiv: ArxivBlock,
asana: AsanaBlock, asana: AsanaBlock,
browser_use: BrowserUseBlock, browser_use: BrowserUseBlock,
calcom: CalComBlock,
calendly: CalendlyBlock, calendly: CalendlyBlock,
chat_trigger: ChatTriggerBlock, chat_trigger: ChatTriggerBlock,
circleback: CirclebackBlock, circleback: CirclebackBlock,

View File

@@ -37,7 +37,7 @@
.code-editor-theme .token.char, .code-editor-theme .token.char,
.code-editor-theme .token.builtin, .code-editor-theme .token.builtin,
.code-editor-theme .token.inserted { .code-editor-theme .token.inserted {
color: #b45309 !important; color: #dc2626 !important;
} }
.code-editor-theme .token.operator, .code-editor-theme .token.operator,
@@ -49,7 +49,7 @@
.code-editor-theme .token.atrule, .code-editor-theme .token.atrule,
.code-editor-theme .token.attr-value, .code-editor-theme .token.attr-value,
.code-editor-theme .token.keyword { .code-editor-theme .token.keyword {
color: #2f55ff !important; color: #2563eb !important;
} }
.code-editor-theme .token.function, .code-editor-theme .token.function,
@@ -119,7 +119,7 @@
.dark .code-editor-theme .token.atrule, .dark .code-editor-theme .token.atrule,
.dark .code-editor-theme .token.attr-value, .dark .code-editor-theme .token.attr-value,
.dark .code-editor-theme .token.keyword { .dark .code-editor-theme .token.keyword {
color: #2fa1ff !important; color: #4db8ff !important;
} }
.dark .code-editor-theme .token.function, .dark .code-editor-theme .token.function,

File diff suppressed because it is too large Load Diff

View File

@@ -513,10 +513,18 @@ const PopoverContent = React.forwardRef<
return () => window.removeEventListener('keydown', handleKeyDown, true) return () => window.removeEventListener('keydown', handleKeyDown, true)
}, [context]) }, [context])
// Note: scrollIntoView for keyboard navigation is intentionally disabled here. React.useEffect(() => {
// Components using Popover (like TagDropdown) should handle their own scroll const content = contentRef.current
// management to avoid conflicts between the popover's internal selection index if (!content || !context?.isKeyboardNav || context.selectedIndex < 0) return
// and the component's custom navigation state.
const items = content.querySelectorAll<HTMLElement>(
'[role="menuitem"]:not([aria-disabled="true"])'
)
const selectedItem = items[context.selectedIndex]
if (selectedItem) {
selectedItem.scrollIntoView({ block: 'nearest', behavior: 'smooth' })
}
}, [context?.selectedIndex, context?.isKeyboardNav])
const hasUserWidthConstraint = const hasUserWidthConstraint =
maxWidth !== undefined || maxWidth !== undefined ||
@@ -707,8 +715,7 @@ const PopoverItem = React.forwardRef<HTMLDivElement, PopoverItemProps>(
const handleMouseEnter = (e: React.MouseEvent<HTMLDivElement>) => { const handleMouseEnter = (e: React.MouseEvent<HTMLDivElement>) => {
context?.setLastHoveredItem(null) context?.setLastHoveredItem(null)
// Don't update selection during keyboard navigation to prevent scroll jumps if (itemIndex >= 0 && context) {
if (itemIndex >= 0 && context && !context.isKeyboardNav) {
context.setSelectedIndex(itemIndex) context.setSelectedIndex(itemIndex)
} }
onMouseEnter?.(e) onMouseEnter?.(e)
@@ -889,8 +896,7 @@ const PopoverFolder = React.forwardRef<HTMLDivElement, PopoverFolderProps>(
} }
const handleMouseEnter = () => { const handleMouseEnter = () => {
// Don't update selection during keyboard navigation to prevent scroll jumps if (itemIndex >= 0) {
if (itemIndex >= 0 && !isKeyboardNav) {
setSelectedIndex(itemIndex) setSelectedIndex(itemIndex)
} }

View File

@@ -21,7 +21,7 @@ export { Loader } from './loader'
export { MoreHorizontal } from './more-horizontal' export { MoreHorizontal } from './more-horizontal'
export { NoWrap } from './no-wrap' export { NoWrap } from './no-wrap'
export { PanelLeft } from './panel-left' export { PanelLeft } from './panel-left'
export { Play, PlayOutline } from './play' export { Play } from './play'
export { Redo } from './redo' export { Redo } from './redo'
export { Rocket } from './rocket' export { Rocket } from './rocket'
export { Trash } from './trash' export { Trash } from './trash'

View File

@@ -1,7 +1,7 @@
import type { SVGProps } from 'react' import type { SVGProps } from 'react'
/** /**
* Play icon component (filled/solid version) * Play icon component
* @param props - SVG properties including className, fill, etc. * @param props - SVG properties including className, fill, etc.
*/ */
export function Play(props: SVGProps<SVGSVGElement>) { export function Play(props: SVGProps<SVGSVGElement>) {
@@ -21,27 +21,3 @@ export function Play(props: SVGProps<SVGSVGElement>) {
</svg> </svg>
) )
} }
/**
* Play icon component (stroke/outline version, matches lucide style)
* Uses 24x24 viewBox and strokeWidth 2 for consistency with other icons.
* @param props - SVG properties including className, stroke, etc.
*/
export function PlayOutline(props: SVGProps<SVGSVGElement>) {
return (
<svg
width='24'
height='24'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth='2'
strokeLinecap='round'
strokeLinejoin='round'
xmlns='http://www.w3.org/2000/svg'
{...props}
>
<path d='M14.7175 4.07175C16.6036 5.37051 18.0001 6.39111 19.0000 7.32600C20.0087 8.26733 20.9617 9.25123 21.3031 10.5484C21.5534 11.4996 21.5534 12.5003 21.3031 13.4515C20.9617 14.7487 20.0087 15.7326 19.0000 16.6739C18.0001 17.6088 16.6037 18.6294 14.7176 19.9281C12.9093 21.1827 11.0470 22.2407 9.6333 22.8420C8.2082 23.4482 6.9090 23.7554 5.6463 23.3976C4.6383 23.1346 3.7940 22.6355 3.1138 21.9492C2.1907 21.0179 1.9001 19.7306 1.7248 18.1814C1.5507 16.6436 1.5507 14.6305 1.5508 12.0701V11.9298C1.5507 9.36936 1.5507 7.35626 1.7248 5.81844C1.9001 4.26926 2.1907 2.982 3.1138 2.05063C3.7940 1.36438 4.6383 0.865267 5.6463 0.602306C6.9090 0.244489 8.2082 0.551707 9.6333 1.15785C11.0470 1.75916 12.9092 2.81712 14.7175 4.07175Z' />
</svg>
)
}

File diff suppressed because one or more lines are too long

View File

@@ -33,15 +33,6 @@ export interface DAG {
parallelConfigs: Map<string, SerializedParallel> parallelConfigs: Map<string, SerializedParallel>
} }
export interface DAGBuildOptions {
/** Trigger block ID to start path construction from */
triggerBlockId?: string
/** Saved incoming edges from snapshot for resumption */
savedIncomingEdges?: Record<string, string[]>
/** Include all enabled blocks instead of only those reachable from trigger */
includeAllBlocks?: boolean
}
export class DAGBuilder { export class DAGBuilder {
private pathConstructor = new PathConstructor() private pathConstructor = new PathConstructor()
private loopConstructor = new LoopConstructor() private loopConstructor = new LoopConstructor()
@@ -49,9 +40,11 @@ export class DAGBuilder {
private nodeConstructor = new NodeConstructor() private nodeConstructor = new NodeConstructor()
private edgeConstructor = new EdgeConstructor() private edgeConstructor = new EdgeConstructor()
build(workflow: SerializedWorkflow, options: DAGBuildOptions = {}): DAG { build(
const { triggerBlockId, savedIncomingEdges, includeAllBlocks } = options workflow: SerializedWorkflow,
triggerBlockId?: string,
savedIncomingEdges?: Record<string, string[]>
): DAG {
const dag: DAG = { const dag: DAG = {
nodes: new Map(), nodes: new Map(),
loopConfigs: new Map(), loopConfigs: new Map(),
@@ -60,7 +53,7 @@ export class DAGBuilder {
this.initializeConfigs(workflow, dag) this.initializeConfigs(workflow, dag)
const reachableBlocks = this.pathConstructor.execute(workflow, triggerBlockId, includeAllBlocks) const reachableBlocks = this.pathConstructor.execute(workflow, triggerBlockId)
this.loopConstructor.execute(dag, reachableBlocks) this.loopConstructor.execute(dag, reachableBlocks)
this.parallelConstructor.execute(dag, reachableBlocks) this.parallelConstructor.execute(dag, reachableBlocks)

View File

@@ -207,7 +207,6 @@ export class EdgeConstructor {
for (const connection of workflow.connections) { for (const connection of workflow.connections) {
let { source, target } = connection let { source, target } = connection
const originalSource = source const originalSource = source
const originalTarget = target
let sourceHandle = this.generateSourceHandle( let sourceHandle = this.generateSourceHandle(
source, source,
target, target,
@@ -258,12 +257,12 @@ export class EdgeConstructor {
target = sentinelStartId target = sentinelStartId
} }
if (this.edgeCrossesLoopBoundary(source, target, blocksInLoops, dag)) { if (loopSentinelStartId) {
continue this.addEdge(dag, loopSentinelStartId, target, EDGE.LOOP_EXIT, targetHandle)
} }
if (loopSentinelStartId && !blocksInLoops.has(originalTarget)) { if (this.edgeCrossesLoopBoundary(source, target, blocksInLoops, dag)) {
this.addEdge(dag, loopSentinelStartId, target, EDGE.LOOP_EXIT, targetHandle) continue
} }
if (!this.isEdgeReachable(source, target, reachableBlocks, dag)) { if (!this.isEdgeReachable(source, target, reachableBlocks, dag)) {

View File

@@ -6,16 +6,7 @@ import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
const logger = createLogger('PathConstructor') const logger = createLogger('PathConstructor')
export class PathConstructor { export class PathConstructor {
execute( execute(workflow: SerializedWorkflow, triggerBlockId?: string): Set<string> {
workflow: SerializedWorkflow,
triggerBlockId?: string,
includeAllBlocks?: boolean
): Set<string> {
// For run-from-block mode, include all enabled blocks regardless of trigger reachability
if (includeAllBlocks) {
return this.getAllEnabledBlocks(workflow)
}
const resolvedTriggerId = this.findTriggerBlock(workflow, triggerBlockId) const resolvedTriggerId = this.findTriggerBlock(workflow, triggerBlockId)
if (!resolvedTriggerId) { if (!resolvedTriggerId) {

View File

@@ -4,7 +4,6 @@ import {
containsUserFileWithMetadata, containsUserFileWithMetadata,
hydrateUserFilesWithBase64, hydrateUserFilesWithBase64,
} from '@/lib/uploads/utils/user-file-base64.server' } from '@/lib/uploads/utils/user-file-base64.server'
import { sanitizeInputFormat, sanitizeTools } from '@/lib/workflows/comparison/normalize'
import { import {
BlockType, BlockType,
buildResumeApiUrl, buildResumeApiUrl,
@@ -35,7 +34,6 @@ import { validateBlockType } from '@/executor/utils/permission-check'
import type { VariableResolver } from '@/executor/variables/resolver' import type { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedBlock } from '@/serializer/types' import type { SerializedBlock } from '@/serializer/types'
import type { SubflowType } from '@/stores/workflows/workflow/types' import type { SubflowType } from '@/stores/workflows/workflow/types'
import { SYSTEM_SUBBLOCK_IDS } from '@/triggers/constants'
const logger = createLogger('BlockExecutor') const logger = createLogger('BlockExecutor')
@@ -89,7 +87,7 @@ export class BlockExecutor {
resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block) resolvedInputs = this.resolver.resolveInputs(ctx, node.id, block.config.params, block)
if (blockLog) { if (blockLog) {
blockLog.input = this.sanitizeInputsForLog(resolvedInputs) blockLog.input = this.parseJsonInputs(resolvedInputs)
} }
} catch (error) { } catch (error) {
cleanupSelfReference?.() cleanupSelfReference?.()
@@ -152,9 +150,6 @@ export class BlockExecutor {
blockLog.durationMs = duration blockLog.durationMs = duration
blockLog.success = true blockLog.success = true
blockLog.output = filterOutputForLog(block.metadata?.id || '', normalizedOutput, { block }) blockLog.output = filterOutputForLog(block.metadata?.id || '', normalizedOutput, { block })
if (normalizedOutput.childTraceSpans && Array.isArray(normalizedOutput.childTraceSpans)) {
blockLog.childTraceSpans = normalizedOutput.childTraceSpans
}
} }
this.state.setBlockOutput(node.id, normalizedOutput, duration) this.state.setBlockOutput(node.id, normalizedOutput, duration)
@@ -167,7 +162,7 @@ export class BlockExecutor {
ctx, ctx,
node, node,
block, block,
this.sanitizeInputsForLog(resolvedInputs), this.parseJsonInputs(resolvedInputs),
displayOutput, displayOutput,
duration duration
) )
@@ -246,12 +241,8 @@ export class BlockExecutor {
blockLog.durationMs = duration blockLog.durationMs = duration
blockLog.success = false blockLog.success = false
blockLog.error = errorMessage blockLog.error = errorMessage
blockLog.input = this.sanitizeInputsForLog(input) blockLog.input = this.parseJsonInputs(input)
blockLog.output = filterOutputForLog(block.metadata?.id || '', errorOutput, { block }) blockLog.output = filterOutputForLog(block.metadata?.id || '', errorOutput, { block })
if (errorOutput.childTraceSpans && Array.isArray(errorOutput.childTraceSpans)) {
blockLog.childTraceSpans = errorOutput.childTraceSpans
}
} }
logger.error( logger.error(
@@ -269,7 +260,7 @@ export class BlockExecutor {
ctx, ctx,
node, node,
block, block,
this.sanitizeInputsForLog(input), this.parseJsonInputs(input),
displayOutput, displayOutput,
duration duration
) )
@@ -361,41 +352,29 @@ export class BlockExecutor {
} }
/** /**
* Sanitizes inputs for log display. * Parse JSON string inputs to objects for log display only.
* - Filters out system fields (UI-only, readonly, internal flags) * Attempts to parse any string that looks like JSON.
* - Removes UI state from inputFormat items (e.g., collapsed)
* - Parses JSON strings to objects for readability
* Returns a new object - does not mutate the original inputs. * Returns a new object - does not mutate the original inputs.
*/ */
private sanitizeInputsForLog(inputs: Record<string, any>): Record<string, any> { private parseJsonInputs(inputs: Record<string, any>): Record<string, any> {
const result: Record<string, any> = {} let result = inputs
let hasChanges = false
for (const [key, value] of Object.entries(inputs)) { for (const [key, value] of Object.entries(inputs)) {
if (SYSTEM_SUBBLOCK_IDS.includes(key) || key === 'triggerMode') {
continue
}
if (key === 'inputFormat' && Array.isArray(value)) {
result[key] = sanitizeInputFormat(value)
continue
}
if (key === 'tools' && Array.isArray(value)) {
result[key] = sanitizeTools(value)
continue
}
// isJSONString is a quick heuristic (checks for { or [), not a validator. // isJSONString is a quick heuristic (checks for { or [), not a validator.
// Invalid JSON is safely caught below - this just avoids JSON.parse on every string. // Invalid JSON is safely caught below - this just avoids JSON.parse on every string.
if (typeof value === 'string' && isJSONString(value)) { if (typeof value !== 'string' || !isJSONString(value)) {
try { continue
result[key] = JSON.parse(value.trim()) }
} catch {
// Not valid JSON, keep original string try {
result[key] = value if (!hasChanges) {
result = { ...inputs }
hasChanges = true
} }
} else { result[key] = JSON.parse(value.trim())
result[key] = value } catch {
// Not valid JSON, keep original string
} }
} }

View File

@@ -77,16 +77,15 @@ export class EdgeManager {
} }
} }
if (output.selectedRoute !== EDGE.LOOP_EXIT && output.selectedRoute !== EDGE.PARALLEL_EXIT) { // Check if any deactivation targets that previously received an activated edge are now ready
for (const { target } of edgesToDeactivate) { for (const { target } of edgesToDeactivate) {
if ( if (
!readyNodes.includes(target) && !readyNodes.includes(target) &&
!activatedTargets.includes(target) && !activatedTargets.includes(target) &&
this.nodesWithActivatedEdge.has(target) && this.nodesWithActivatedEdge.has(target) &&
this.isTargetReady(target) this.isTargetReady(target)
) { ) {
readyNodes.push(target) readyNodes.push(target)
}
} }
} }

View File

@@ -26,7 +26,6 @@ export class ExecutionEngine {
private allowResumeTriggers: boolean private allowResumeTriggers: boolean
private cancelledFlag = false private cancelledFlag = false
private errorFlag = false private errorFlag = false
private stoppedEarlyFlag = false
private executionError: Error | null = null private executionError: Error | null = null
private lastCancellationCheck = 0 private lastCancellationCheck = 0
private readonly useRedisCancellation: boolean private readonly useRedisCancellation: boolean
@@ -106,7 +105,7 @@ export class ExecutionEngine {
this.initializeQueue(triggerBlockId) this.initializeQueue(triggerBlockId)
while (this.hasWork()) { while (this.hasWork()) {
if ((await this.checkCancellation()) || this.errorFlag || this.stoppedEarlyFlag) { if ((await this.checkCancellation()) || this.errorFlag) {
break break
} }
await this.processQueue() await this.processQueue()
@@ -260,16 +259,6 @@ export class ExecutionEngine {
} }
private initializeQueue(triggerBlockId?: string): void { private initializeQueue(triggerBlockId?: string): void {
if (this.context.runFromBlockContext) {
const { startBlockId } = this.context.runFromBlockContext
logger.info('Initializing queue for run-from-block mode', {
startBlockId,
dirtySetSize: this.context.runFromBlockContext.dirtySet.size,
})
this.addToQueue(startBlockId)
return
}
const pendingBlocks = this.context.metadata.pendingBlocks const pendingBlocks = this.context.metadata.pendingBlocks
const remainingEdges = (this.context.metadata as any).remainingEdges const remainingEdges = (this.context.metadata as any).remainingEdges
@@ -396,28 +385,11 @@ export class ExecutionEngine {
this.finalOutput = output this.finalOutput = output
} }
if (this.context.stopAfterBlockId === nodeId) {
// For loop/parallel sentinels, only stop if the subflow has fully exited (all iterations done)
// shouldContinue: true means more iterations, shouldExit: true means loop is done
const shouldContinueLoop = output.shouldContinue === true
if (!shouldContinueLoop) {
logger.info('Stopping execution after target block', { nodeId })
this.stoppedEarlyFlag = true
return
}
}
const readyNodes = this.edgeManager.processOutgoingEdges(node, output, false) const readyNodes = this.edgeManager.processOutgoingEdges(node, output, false)
logger.info('Processing outgoing edges', { logger.info('Processing outgoing edges', {
nodeId, nodeId,
outgoingEdgesCount: node.outgoingEdges.size, outgoingEdgesCount: node.outgoingEdges.size,
outgoingEdges: Array.from(node.outgoingEdges.entries()).map(([id, e]) => ({
id,
target: e.target,
sourceHandle: e.sourceHandle,
})),
output,
readyNodesCount: readyNodes.length, readyNodesCount: readyNodes.length,
readyNodes, readyNodes,
}) })

View File

@@ -5,31 +5,17 @@ import { BlockExecutor } from '@/executor/execution/block-executor'
import { EdgeManager } from '@/executor/execution/edge-manager' import { EdgeManager } from '@/executor/execution/edge-manager'
import { ExecutionEngine } from '@/executor/execution/engine' import { ExecutionEngine } from '@/executor/execution/engine'
import { ExecutionState } from '@/executor/execution/state' import { ExecutionState } from '@/executor/execution/state'
import type { import type { ContextExtensions, WorkflowInput } from '@/executor/execution/types'
ContextExtensions,
SerializableExecutionState,
WorkflowInput,
} from '@/executor/execution/types'
import { createBlockHandlers } from '@/executor/handlers/registry' import { createBlockHandlers } from '@/executor/handlers/registry'
import { LoopOrchestrator } from '@/executor/orchestrators/loop' import { LoopOrchestrator } from '@/executor/orchestrators/loop'
import { NodeExecutionOrchestrator } from '@/executor/orchestrators/node' import { NodeExecutionOrchestrator } from '@/executor/orchestrators/node'
import { ParallelOrchestrator } from '@/executor/orchestrators/parallel' import { ParallelOrchestrator } from '@/executor/orchestrators/parallel'
import type { BlockState, ExecutionContext, ExecutionResult } from '@/executor/types' import type { BlockState, ExecutionContext, ExecutionResult } from '@/executor/types'
import {
computeExecutionSets,
type RunFromBlockContext,
resolveContainerToSentinelStart,
validateRunFromBlock,
} from '@/executor/utils/run-from-block'
import { import {
buildResolutionFromBlock, buildResolutionFromBlock,
buildStartBlockOutput, buildStartBlockOutput,
resolveExecutorStartBlock, resolveExecutorStartBlock,
} from '@/executor/utils/start-block' } from '@/executor/utils/start-block'
import {
extractLoopIdFromSentinel,
extractParallelIdFromSentinel,
} from '@/executor/utils/subflow-utils'
import { VariableResolver } from '@/executor/variables/resolver' import { VariableResolver } from '@/executor/variables/resolver'
import type { SerializedWorkflow } from '@/serializer/types' import type { SerializedWorkflow } from '@/serializer/types'
@@ -62,10 +48,7 @@ export class DAGExecutor {
async execute(workflowId: string, triggerBlockId?: string): Promise<ExecutionResult> { async execute(workflowId: string, triggerBlockId?: string): Promise<ExecutionResult> {
const savedIncomingEdges = this.contextExtensions.dagIncomingEdges const savedIncomingEdges = this.contextExtensions.dagIncomingEdges
const dag = this.dagBuilder.build(this.workflow, { const dag = this.dagBuilder.build(this.workflow, triggerBlockId, savedIncomingEdges)
triggerBlockId,
savedIncomingEdges,
})
const { context, state } = this.createExecutionContext(workflowId, triggerBlockId) const { context, state } = this.createExecutionContext(workflowId, triggerBlockId)
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state) const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
@@ -106,156 +89,17 @@ export class DAGExecutor {
} }
} }
/**
* Execute from a specific block using cached outputs for upstream blocks.
*/
async executeFromBlock(
workflowId: string,
startBlockId: string,
sourceSnapshot: SerializableExecutionState
): Promise<ExecutionResult> {
// Build full DAG with all blocks to compute upstream set for snapshot filtering
// includeAllBlocks is needed because the startBlockId might be a trigger not reachable from the main trigger
const dag = this.dagBuilder.build(this.workflow, { includeAllBlocks: true })
const executedBlocks = new Set(sourceSnapshot.executedBlocks)
const validation = validateRunFromBlock(startBlockId, dag, executedBlocks)
if (!validation.valid) {
throw new Error(validation.error)
}
const { dirtySet, upstreamSet, reachableUpstreamSet } = computeExecutionSets(dag, startBlockId)
const effectiveStartBlockId = resolveContainerToSentinelStart(startBlockId, dag) ?? startBlockId
// Extract container IDs from sentinel IDs in reachable upstream set
// Use reachableUpstreamSet (not upstreamSet) to preserve sibling branch outputs
// Example: A->C, B->C where C references A.result || B.result
// When running from A, B's output should be preserved for C to reference
const reachableContainerIds = new Set<string>()
for (const nodeId of reachableUpstreamSet) {
const loopId = extractLoopIdFromSentinel(nodeId)
if (loopId) reachableContainerIds.add(loopId)
const parallelId = extractParallelIdFromSentinel(nodeId)
if (parallelId) reachableContainerIds.add(parallelId)
}
// Filter snapshot to include all blocks reachable from dirty blocks
// This preserves sibling branch outputs that dirty blocks may reference
const filteredBlockStates: Record<string, any> = {}
for (const [blockId, state] of Object.entries(sourceSnapshot.blockStates)) {
if (reachableUpstreamSet.has(blockId) || reachableContainerIds.has(blockId)) {
filteredBlockStates[blockId] = state
}
}
const filteredExecutedBlocks = sourceSnapshot.executedBlocks.filter(
(id) => reachableUpstreamSet.has(id) || reachableContainerIds.has(id)
)
// Filter loop/parallel executions to only include reachable containers
const filteredLoopExecutions: Record<string, any> = {}
if (sourceSnapshot.loopExecutions) {
for (const [loopId, execution] of Object.entries(sourceSnapshot.loopExecutions)) {
if (reachableContainerIds.has(loopId)) {
filteredLoopExecutions[loopId] = execution
}
}
}
const filteredParallelExecutions: Record<string, any> = {}
if (sourceSnapshot.parallelExecutions) {
for (const [parallelId, execution] of Object.entries(sourceSnapshot.parallelExecutions)) {
if (reachableContainerIds.has(parallelId)) {
filteredParallelExecutions[parallelId] = execution
}
}
}
const filteredSnapshot: SerializableExecutionState = {
...sourceSnapshot,
blockStates: filteredBlockStates,
executedBlocks: filteredExecutedBlocks,
loopExecutions: filteredLoopExecutions,
parallelExecutions: filteredParallelExecutions,
}
logger.info('Executing from block', {
workflowId,
startBlockId,
effectiveStartBlockId,
dirtySetSize: dirtySet.size,
upstreamSetSize: upstreamSet.size,
reachableUpstreamSetSize: reachableUpstreamSet.size,
})
// Remove incoming edges from non-dirty sources so convergent blocks don't wait for cached upstream
for (const nodeId of dirtySet) {
const node = dag.nodes.get(nodeId)
if (!node) continue
const nonDirtyIncoming: string[] = []
for (const sourceId of node.incomingEdges) {
if (!dirtySet.has(sourceId)) {
nonDirtyIncoming.push(sourceId)
}
}
for (const sourceId of nonDirtyIncoming) {
node.incomingEdges.delete(sourceId)
}
}
const runFromBlockContext = { startBlockId: effectiveStartBlockId, dirtySet }
const { context, state } = this.createExecutionContext(workflowId, undefined, {
snapshotState: filteredSnapshot,
runFromBlockContext,
})
const resolver = new VariableResolver(this.workflow, this.workflowVariables, state)
const loopOrchestrator = new LoopOrchestrator(dag, state, resolver)
loopOrchestrator.setContextExtensions(this.contextExtensions)
const parallelOrchestrator = new ParallelOrchestrator(dag, state)
parallelOrchestrator.setResolver(resolver)
parallelOrchestrator.setContextExtensions(this.contextExtensions)
const allHandlers = createBlockHandlers()
const blockExecutor = new BlockExecutor(allHandlers, resolver, this.contextExtensions, state)
const edgeManager = new EdgeManager(dag)
loopOrchestrator.setEdgeManager(edgeManager)
const nodeOrchestrator = new NodeExecutionOrchestrator(
dag,
state,
blockExecutor,
loopOrchestrator,
parallelOrchestrator
)
const engine = new ExecutionEngine(context, dag, edgeManager, nodeOrchestrator)
return await engine.run()
}
private createExecutionContext( private createExecutionContext(
workflowId: string, workflowId: string,
triggerBlockId?: string, triggerBlockId?: string
overrides?: {
snapshotState?: SerializableExecutionState
runFromBlockContext?: RunFromBlockContext
}
): { context: ExecutionContext; state: ExecutionState } { ): { context: ExecutionContext; state: ExecutionState } {
const snapshotState = overrides?.snapshotState ?? this.contextExtensions.snapshotState const snapshotState = this.contextExtensions.snapshotState
const blockStates = snapshotState?.blockStates const blockStates = snapshotState?.blockStates
? new Map(Object.entries(snapshotState.blockStates)) ? new Map(Object.entries(snapshotState.blockStates))
: new Map<string, BlockState>() : new Map<string, BlockState>()
let executedBlocks = snapshotState?.executedBlocks const executedBlocks = snapshotState?.executedBlocks
? new Set(snapshotState.executedBlocks) ? new Set(snapshotState.executedBlocks)
: new Set<string>() : new Set<string>()
if (overrides?.runFromBlockContext) {
const { dirtySet } = overrides.runFromBlockContext
executedBlocks = new Set([...executedBlocks].filter((id) => !dirtySet.has(id)))
logger.info('Cleared executed status for dirty blocks', {
dirtySetSize: dirtySet.size,
remainingExecutedBlocks: executedBlocks.size,
})
}
const state = new ExecutionState(blockStates, executedBlocks) const state = new ExecutionState(blockStates, executedBlocks)
const context: ExecutionContext = { const context: ExecutionContext = {
@@ -265,7 +109,7 @@ export class DAGExecutor {
userId: this.contextExtensions.userId, userId: this.contextExtensions.userId,
isDeployedContext: this.contextExtensions.isDeployedContext, isDeployedContext: this.contextExtensions.isDeployedContext,
blockStates: state.getBlockStates(), blockStates: state.getBlockStates(),
blockLogs: overrides?.runFromBlockContext ? [] : (snapshotState?.blockLogs ?? []), blockLogs: snapshotState?.blockLogs ?? [],
metadata: { metadata: {
...this.contextExtensions.metadata, ...this.contextExtensions.metadata,
startTime: new Date().toISOString(), startTime: new Date().toISOString(),
@@ -325,8 +169,6 @@ export class DAGExecutor {
abortSignal: this.contextExtensions.abortSignal, abortSignal: this.contextExtensions.abortSignal,
includeFileBase64: this.contextExtensions.includeFileBase64, includeFileBase64: this.contextExtensions.includeFileBase64,
base64MaxBytes: this.contextExtensions.base64MaxBytes, base64MaxBytes: this.contextExtensions.base64MaxBytes,
runFromBlockContext: overrides?.runFromBlockContext,
stopAfterBlockId: this.contextExtensions.stopAfterBlockId,
} }
if (this.contextExtensions.resumeFromSnapshot) { if (this.contextExtensions.resumeFromSnapshot) {
@@ -351,15 +193,6 @@ export class DAGExecutor {
pendingBlocks: context.metadata.pendingBlocks, pendingBlocks: context.metadata.pendingBlocks,
skipStarterBlockInit: true, skipStarterBlockInit: true,
}) })
} else if (overrides?.runFromBlockContext) {
// In run-from-block mode, initialize the start block only if it's a regular block
// Skip for sentinels/containers (loop/parallel) which aren't real blocks
const startBlockId = overrides.runFromBlockContext.startBlockId
const isRegularBlock = this.workflow.blocks.some((b) => b.id === startBlockId)
if (isRegularBlock) {
this.initializeStarterBlock(context, state, startBlockId)
}
} else { } else {
this.initializeStarterBlock(context, state, triggerBlockId) this.initializeStarterBlock(context, state, triggerBlockId)
} }

View File

@@ -27,8 +27,6 @@ export interface ParallelScope {
items?: any[] items?: any[]
/** Error message if parallel validation failed (e.g., exceeded max branches) */ /** Error message if parallel validation failed (e.g., exceeded max branches) */
validationError?: string validationError?: string
/** Whether the parallel has an empty distribution and should be skipped */
isEmpty?: boolean
} }
export class ExecutionState implements BlockStateController { export class ExecutionState implements BlockStateController {

View File

@@ -1,6 +1,5 @@
import type { Edge } from 'reactflow' import type { Edge } from 'reactflow'
import type { BlockLog, BlockState, NormalizedBlockOutput } from '@/executor/types' import type { BlockLog, BlockState, NormalizedBlockOutput } from '@/executor/types'
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
import type { SubflowType } from '@/stores/workflows/workflow/types' import type { SubflowType } from '@/stores/workflows/workflow/types'
export interface ExecutionMetadata { export interface ExecutionMetadata {
@@ -106,17 +105,6 @@ export interface ContextExtensions {
output: { input?: any; output: NormalizedBlockOutput; executionTime: number }, output: { input?: any; output: NormalizedBlockOutput; executionTime: number },
iterationContext?: IterationContext iterationContext?: IterationContext
) => Promise<void> ) => Promise<void>
/**
* Run-from-block configuration. When provided, executor runs in partial
* execution mode starting from the specified block.
*/
runFromBlockContext?: RunFromBlockContext
/**
* Stop execution after this block completes. Used for "run until block" feature.
*/
stopAfterBlockId?: string
} }
export interface WorkflowInput { export interface WorkflowInput {

View File

@@ -118,7 +118,7 @@ describe('WorkflowBlockHandler', () => {
} }
await expect(handler.execute(deepContext, mockBlock, inputs)).rejects.toThrow( await expect(handler.execute(deepContext, mockBlock, inputs)).rejects.toThrow(
'"child-workflow-id" failed: Maximum workflow nesting depth of 10 exceeded' 'Error in child workflow "child-workflow-id": Maximum workflow nesting depth of 10 exceeded'
) )
}) })
@@ -132,7 +132,7 @@ describe('WorkflowBlockHandler', () => {
}) })
await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow( await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow(
'"non-existent-workflow" failed: Child workflow non-existent-workflow not found' 'Error in child workflow "non-existent-workflow": Child workflow non-existent-workflow not found'
) )
}) })
@@ -142,7 +142,7 @@ describe('WorkflowBlockHandler', () => {
mockFetch.mockRejectedValueOnce(new Error('Network error')) mockFetch.mockRejectedValueOnce(new Error('Network error'))
await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow( await expect(handler.execute(mockContext, mockBlock, inputs)).rejects.toThrow(
'"child-workflow-id" failed: Network error' 'Error in child workflow "child-workflow-id": Network error'
) )
}) })
}) })
@@ -212,7 +212,7 @@ describe('WorkflowBlockHandler', () => {
expect(() => expect(() =>
(handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100) (handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100)
).toThrow('"Child Workflow" failed: Child workflow failed') ).toThrow('Error in child workflow "Child Workflow": Child workflow failed')
try { try {
;(handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100) ;(handler as any).mapChildOutputToParent(childResult, 'child-id', 'Child Workflow', 100)

View File

@@ -52,11 +52,6 @@ export class WorkflowBlockHandler implements BlockHandler {
throw new Error('No workflow selected for execution') throw new Error('No workflow selected for execution')
} }
// Initialize with registry name, will be updated with loaded workflow name
const { workflows } = useWorkflowRegistry.getState()
const workflowMetadata = workflows[workflowId]
let childWorkflowName = workflowMetadata?.name || workflowId
try { try {
const currentDepth = (ctx.workflowId?.split('_sub_').length || 1) - 1 const currentDepth = (ctx.workflowId?.split('_sub_').length || 1) - 1
if (currentDepth >= DEFAULTS.MAX_WORKFLOW_DEPTH) { if (currentDepth >= DEFAULTS.MAX_WORKFLOW_DEPTH) {
@@ -80,8 +75,9 @@ export class WorkflowBlockHandler implements BlockHandler {
throw new Error(`Child workflow ${workflowId} not found`) throw new Error(`Child workflow ${workflowId} not found`)
} }
// Update with loaded workflow name (more reliable than registry) const { workflows } = useWorkflowRegistry.getState()
childWorkflowName = workflowMetadata?.name || childWorkflow.name || 'Unknown Workflow' const workflowMetadata = workflows[workflowId]
const childWorkflowName = workflowMetadata?.name || childWorkflow.name || 'Unknown Workflow'
logger.info( logger.info(
`Executing child workflow: ${childWorkflowName} (${workflowId}) at depth ${currentDepth}` `Executing child workflow: ${childWorkflowName} (${workflowId}) at depth ${currentDepth}`
@@ -146,6 +142,11 @@ export class WorkflowBlockHandler implements BlockHandler {
} catch (error: unknown) { } catch (error: unknown) {
logger.error(`Error executing child workflow ${workflowId}:`, error) logger.error(`Error executing child workflow ${workflowId}:`, error)
const { workflows } = useWorkflowRegistry.getState()
const workflowMetadata = workflows[workflowId]
const childWorkflowName = workflowMetadata?.name || workflowId
const originalError = error instanceof Error ? error.message : 'Unknown error'
let childTraceSpans: WorkflowTraceSpan[] = [] let childTraceSpans: WorkflowTraceSpan[] = []
let executionResult: ExecutionResult | undefined let executionResult: ExecutionResult | undefined
@@ -164,11 +165,8 @@ export class WorkflowBlockHandler implements BlockHandler {
childTraceSpans = error.childTraceSpans childTraceSpans = error.childTraceSpans
} }
// Build a cleaner error message for nested workflow errors
const errorMessage = this.buildNestedWorkflowErrorMessage(childWorkflowName, error)
throw new ChildWorkflowError({ throw new ChildWorkflowError({
message: errorMessage, message: `Error in child workflow "${childWorkflowName}": ${originalError}`,
childWorkflowName, childWorkflowName,
childTraceSpans, childTraceSpans,
executionResult, executionResult,
@@ -177,72 +175,6 @@ export class WorkflowBlockHandler implements BlockHandler {
} }
} }
/**
* Builds a cleaner error message for nested workflow errors.
* Parses nested error messages to extract workflow chain and root error.
*/
private buildNestedWorkflowErrorMessage(childWorkflowName: string, error: unknown): string {
const originalError = error instanceof Error ? error.message : 'Unknown error'
// Extract any nested workflow names from the error message
const { chain, rootError } = this.parseNestedWorkflowError(originalError)
// Add current workflow to the beginning of the chain
chain.unshift(childWorkflowName)
// If we have a chain (nested workflows), format nicely
if (chain.length > 1) {
return `Workflow chain: ${chain.join(' → ')} | ${rootError}`
}
// Single workflow failure
return `"${childWorkflowName}" failed: ${rootError}`
}
/**
* Parses a potentially nested workflow error message to extract:
* - The chain of workflow names
* - The actual root error message (preserving the block prefix for the failing block)
*
* Handles formats like:
* - "workflow-name" failed: error
* - [block_type] Block Name: "workflow-name" failed: error
* - Workflow chain: A → B | error
*/
private parseNestedWorkflowError(message: string): { chain: string[]; rootError: string } {
const chain: string[] = []
const remaining = message
// First, check if it's already in chain format
const chainMatch = remaining.match(/^Workflow chain: (.+?) \| (.+)$/)
if (chainMatch) {
const chainPart = chainMatch[1]
const errorPart = chainMatch[2]
chain.push(...chainPart.split(' → ').map((s) => s.trim()))
return { chain, rootError: errorPart }
}
// Extract workflow names from patterns like:
// - "workflow-name" failed:
// - [block_type] Block Name: "workflow-name" failed:
const workflowPattern = /(?:\[[^\]]+\]\s*[^:]+:\s*)?"([^"]+)"\s*failed:\s*/g
let match: RegExpExecArray | null
let lastIndex = 0
match = workflowPattern.exec(remaining)
while (match !== null) {
chain.push(match[1])
lastIndex = match.index + match[0].length
match = workflowPattern.exec(remaining)
}
// The root error is everything after the last match
// Keep the block prefix (e.g., [function] Function 1:) so we know which block failed
const rootError = lastIndex > 0 ? remaining.slice(lastIndex) : remaining
return { chain, rootError: rootError.trim() || 'Unknown error' }
}
private async loadChildWorkflow(workflowId: string) { private async loadChildWorkflow(workflowId: string) {
const headers = await buildAuthHeaders() const headers = await buildAuthHeaders()
const url = buildAPIUrl(`/api/workflows/${workflowId}`) const url = buildAPIUrl(`/api/workflows/${workflowId}`)
@@ -512,7 +444,7 @@ export class WorkflowBlockHandler implements BlockHandler {
if (!success) { if (!success) {
logger.warn(`Child workflow ${childWorkflowName} failed`) logger.warn(`Child workflow ${childWorkflowName} failed`)
throw new ChildWorkflowError({ throw new ChildWorkflowError({
message: `"${childWorkflowName}" failed: ${childResult.error || 'Child workflow execution failed'}`, message: `Error in child workflow "${childWorkflowName}": ${childResult.error || 'Child workflow execution failed'}`,
childWorkflowName, childWorkflowName,
childTraceSpans: childTraceSpans || [], childTraceSpans: childTraceSpans || [],
}) })

View File

@@ -276,16 +276,7 @@ export class LoopOrchestrator {
scope: LoopScope scope: LoopScope
): LoopContinuationResult { ): LoopContinuationResult {
const results = scope.allIterationOutputs const results = scope.allIterationOutputs
const output = { results } this.state.setBlockOutput(loopId, { results }, DEFAULTS.EXECUTION_TIME)
this.state.setBlockOutput(loopId, output, DEFAULTS.EXECUTION_TIME)
// Emit onBlockComplete for the loop container so the UI can track it
if (this.contextExtensions?.onBlockComplete) {
this.contextExtensions.onBlockComplete(loopId, 'Loop', 'loop', {
output,
executionTime: DEFAULTS.EXECUTION_TIME,
})
}
return { return {
shouldContinue: false, shouldContinue: false,
@@ -395,10 +386,10 @@ export class LoopOrchestrator {
return true return true
} }
// forEach: skip if items array is empty
if (scope.loopType === 'forEach') { if (scope.loopType === 'forEach') {
if (!scope.items || scope.items.length === 0) { if (!scope.items || scope.items.length === 0) {
logger.info('ForEach loop has empty collection, skipping loop body', { loopId }) logger.info('ForEach loop has empty items, skipping loop body', { loopId })
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
return false return false
} }
return true return true
@@ -408,8 +399,6 @@ export class LoopOrchestrator {
if (scope.loopType === 'for') { if (scope.loopType === 'for') {
if (scope.maxIterations === 0) { if (scope.maxIterations === 0) {
logger.info('For loop has 0 iterations, skipping loop body', { loopId }) logger.info('For loop has 0 iterations, skipping loop body', { loopId })
// Set empty output for the loop
this.state.setBlockOutput(loopId, { results: [] }, DEFAULTS.EXECUTION_TIME)
return false return false
} }
return true return true

View File

@@ -31,18 +31,7 @@ export class NodeExecutionOrchestrator {
throw new Error(`Node not found in DAG: ${nodeId}`) throw new Error(`Node not found in DAG: ${nodeId}`)
} }
if (ctx.runFromBlockContext && !ctx.runFromBlockContext.dirtySet.has(nodeId)) { if (this.state.hasExecuted(nodeId)) {
const cachedOutput = this.state.getBlockOutput(nodeId) || {}
logger.debug('Skipping non-dirty block in run-from-block mode', { nodeId })
return {
nodeId,
output: cachedOutput,
isFinalOutput: false,
}
}
const isDirtyBlock = ctx.runFromBlockContext?.dirtySet.has(nodeId) ?? false
if (!isDirtyBlock && this.state.hasExecuted(nodeId)) {
const output = this.state.getBlockOutput(nodeId) || {} const output = this.state.getBlockOutput(nodeId) || {}
return { return {
nodeId, nodeId,
@@ -108,7 +97,7 @@ export class NodeExecutionOrchestrator {
if (loopId) { if (loopId) {
const shouldExecute = await this.loopOrchestrator.evaluateInitialCondition(ctx, loopId) const shouldExecute = await this.loopOrchestrator.evaluateInitialCondition(ctx, loopId)
if (!shouldExecute) { if (!shouldExecute) {
logger.info('Loop initial condition false, skipping loop body', { loopId }) logger.info('While loop initial condition false, skipping loop body', { loopId })
return { return {
sentinelStart: true, sentinelStart: true,
shouldExit: true, shouldExit: true,
@@ -169,17 +158,6 @@ export class NodeExecutionOrchestrator {
this.parallelOrchestrator.initializeParallelScope(ctx, parallelId, nodesInParallel) this.parallelOrchestrator.initializeParallelScope(ctx, parallelId, nodesInParallel)
} }
} }
const scope = this.parallelOrchestrator.getParallelScope(ctx, parallelId)
if (scope?.isEmpty) {
logger.info('Parallel has empty distribution, skipping parallel body', { parallelId })
return {
sentinelStart: true,
shouldExit: true,
selectedRoute: EDGE.PARALLEL_EXIT,
}
}
return { sentinelStart: true } return { sentinelStart: true }
} }

View File

@@ -61,13 +61,11 @@ export class ParallelOrchestrator {
let items: any[] | undefined let items: any[] | undefined
let branchCount: number let branchCount: number
let isEmpty = false
try { try {
const resolved = this.resolveBranchCount(ctx, parallelConfig, parallelId) const resolved = this.resolveBranchCount(ctx, parallelConfig)
branchCount = resolved.branchCount branchCount = resolved.branchCount
items = resolved.items items = resolved.items
isEmpty = resolved.isEmpty ?? false
} catch (error) { } catch (error) {
const errorMessage = `Parallel Items did not resolve: ${error instanceof Error ? error.message : String(error)}` const errorMessage = `Parallel Items did not resolve: ${error instanceof Error ? error.message : String(error)}`
logger.error(errorMessage, { parallelId, distribution: parallelConfig.distribution }) logger.error(errorMessage, { parallelId, distribution: parallelConfig.distribution })
@@ -93,34 +91,6 @@ export class ParallelOrchestrator {
throw new Error(branchError) throw new Error(branchError)
} }
// Handle empty distribution - skip parallel body
if (isEmpty || branchCount === 0) {
const scope: ParallelScope = {
parallelId,
totalBranches: 0,
branchOutputs: new Map(),
completedCount: 0,
totalExpectedNodes: 0,
items: [],
isEmpty: true,
}
if (!ctx.parallelExecutions) {
ctx.parallelExecutions = new Map()
}
ctx.parallelExecutions.set(parallelId, scope)
// Set empty output for the parallel
this.state.setBlockOutput(parallelId, { results: [] })
logger.info('Parallel scope initialized with empty distribution, skipping body', {
parallelId,
branchCount: 0,
})
return scope
}
const { entryNodes } = this.expander.expandParallel(this.dag, parallelId, branchCount, items) const { entryNodes } = this.expander.expandParallel(this.dag, parallelId, branchCount, items)
const scope: ParallelScope = { const scope: ParallelScope = {
@@ -157,17 +127,15 @@ export class ParallelOrchestrator {
private resolveBranchCount( private resolveBranchCount(
ctx: ExecutionContext, ctx: ExecutionContext,
config: SerializedParallel, config: SerializedParallel
parallelId: string ): { branchCount: number; items?: any[] } {
): { branchCount: number; items?: any[]; isEmpty?: boolean } {
if (config.parallelType === 'count') { if (config.parallelType === 'count') {
return { branchCount: config.count ?? 1 } return { branchCount: config.count ?? 1 }
} }
const items = this.resolveDistributionItems(ctx, config) const items = this.resolveDistributionItems(ctx, config)
if (items.length === 0) { if (items.length === 0) {
logger.info('Parallel has empty distribution, skipping parallel body', { parallelId }) return { branchCount: config.count ?? 1 }
return { branchCount: 0, items: [], isEmpty: true }
} }
return { branchCount: items.length, items } return { branchCount: items.length, items }
@@ -260,17 +228,9 @@ export class ParallelOrchestrator {
const branchOutputs = scope.branchOutputs.get(i) || [] const branchOutputs = scope.branchOutputs.get(i) || []
results.push(branchOutputs) results.push(branchOutputs)
} }
const output = { results } this.state.setBlockOutput(parallelId, {
this.state.setBlockOutput(parallelId, output) results,
})
// Emit onBlockComplete for the parallel container so the UI can track it
if (this.contextExtensions?.onBlockComplete) {
this.contextExtensions.onBlockComplete(parallelId, 'Parallel', 'parallel', {
output,
executionTime: 0,
})
}
return { return {
allBranchesComplete: true, allBranchesComplete: true,
results, results,

View File

@@ -1,7 +1,6 @@
import type { TraceSpan } from '@/lib/logs/types' import type { TraceSpan } from '@/lib/logs/types'
import type { PermissionGroupConfig } from '@/lib/permission-groups/types' import type { PermissionGroupConfig } from '@/lib/permission-groups/types'
import type { BlockOutput } from '@/blocks/types' import type { BlockOutput } from '@/blocks/types'
import type { RunFromBlockContext } from '@/executor/utils/run-from-block'
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types' import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
export interface UserFile { export interface UserFile {
@@ -114,12 +113,6 @@ export interface BlockLog {
loopId?: string loopId?: string
parallelId?: string parallelId?: string
iterationIndex?: number iterationIndex?: number
/**
* Child workflow trace spans for nested workflow execution.
* Stored separately from output to keep output clean for display
* while preserving data for trace-spans processing.
*/
childTraceSpans?: TraceSpan[]
} }
export interface ExecutionMetadata { export interface ExecutionMetadata {
@@ -257,17 +250,6 @@ export interface ExecutionContext {
* will not have their base64 content fetched. * will not have their base64 content fetched.
*/ */
base64MaxBytes?: number base64MaxBytes?: number
/**
* Context for "run from block" mode. When present, only blocks in dirtySet
* will be executed; others return cached outputs from the source snapshot.
*/
runFromBlockContext?: RunFromBlockContext
/**
* Stop execution after this block completes. Used for "run until block" feature.
*/
stopAfterBlockId?: string
} }
export interface ExecutionResult { export interface ExecutionResult {

View File

@@ -1,4 +1,3 @@
import { filterHiddenOutputKeys } from '@/lib/logs/execution/trace-spans/trace-spans'
import { getBlock } from '@/blocks' import { getBlock } from '@/blocks'
import { isHiddenFromDisplay } from '@/blocks/types' import { isHiddenFromDisplay } from '@/blocks/types'
import { isTriggerBehavior, isTriggerInternalKey } from '@/executor/constants' import { isTriggerBehavior, isTriggerInternalKey } from '@/executor/constants'
@@ -8,7 +7,6 @@ import type { SerializedBlock } from '@/serializer/types'
/** /**
* Filters block output for logging/display purposes. * Filters block output for logging/display purposes.
* Removes internal fields and fields marked with hiddenFromDisplay. * Removes internal fields and fields marked with hiddenFromDisplay.
* Also recursively filters globally hidden keys from nested objects.
* *
* @param blockType - The block type string (e.g., 'human_in_the_loop', 'workflow') * @param blockType - The block type string (e.g., 'human_in_the_loop', 'workflow')
* @param output - The raw block output to filter * @param output - The raw block output to filter
@@ -46,8 +44,7 @@ export function filterOutputForLog(
continue continue
} }
// Recursively filter globally hidden keys from nested objects filtered[key] = value
filtered[key] = filterHiddenOutputKeys(value)
} }
return filtered return filtered

File diff suppressed because it is too large Load Diff

View File

@@ -1,219 +0,0 @@
import { LOOP, PARALLEL } from '@/executor/constants'
import type { DAG } from '@/executor/dag/builder'
/**
* Builds the sentinel-start node ID for a loop.
*/
function buildLoopSentinelStartId(loopId: string): string {
return `${LOOP.SENTINEL.PREFIX}${loopId}${LOOP.SENTINEL.START_SUFFIX}`
}
/**
* Builds the sentinel-start node ID for a parallel.
*/
function buildParallelSentinelStartId(parallelId: string): string {
return `${PARALLEL.SENTINEL.PREFIX}${parallelId}${PARALLEL.SENTINEL.START_SUFFIX}`
}
/**
* Checks if a block ID is a loop or parallel container and returns the sentinel-start ID if so.
* Returns null if the block is not a container.
*/
export function resolveContainerToSentinelStart(blockId: string, dag: DAG): string | null {
if (dag.loopConfigs.has(blockId)) {
return buildLoopSentinelStartId(blockId)
}
if (dag.parallelConfigs.has(blockId)) {
return buildParallelSentinelStartId(blockId)
}
return null
}
/**
* Result of validating a block for run-from-block execution.
*/
export interface RunFromBlockValidation {
valid: boolean
error?: string
}
/**
* Context for run-from-block execution mode.
*/
export interface RunFromBlockContext {
/** The block ID to start execution from */
startBlockId: string
/** Set of block IDs that need re-execution (start block + all downstream) */
dirtySet: Set<string>
}
/**
* Result of computing execution sets for run-from-block mode.
*/
export interface ExecutionSets {
/** Blocks that need re-execution (start block + all downstream) */
dirtySet: Set<string>
/** Blocks that are upstream (ancestors) of the start block */
upstreamSet: Set<string>
/** Blocks that are upstream of any dirty block (for snapshot preservation) */
reachableUpstreamSet: Set<string>
}
/**
* Computes the dirty set, upstream set, and reachable upstream set.
* - Dirty set: start block + all blocks reachable via outgoing edges (need re-execution)
* - Upstream set: all blocks reachable via incoming edges from the start block
* - Reachable upstream set: all non-dirty blocks that are upstream of ANY dirty block
* (includes sibling branches that dirty blocks may reference)
*
* For loop/parallel containers, starts from the sentinel-start node and includes
* the container ID itself in the dirty set.
*
* @param dag - The workflow DAG
* @param startBlockId - The block to start execution from
* @returns Object containing dirtySet, upstreamSet, and reachableUpstreamSet
*/
export function computeExecutionSets(dag: DAG, startBlockId: string): ExecutionSets {
const dirty = new Set<string>([startBlockId])
const upstream = new Set<string>()
const sentinelStartId = resolveContainerToSentinelStart(startBlockId, dag)
const traversalStartId = sentinelStartId ?? startBlockId
if (sentinelStartId) {
dirty.add(sentinelStartId)
}
// BFS downstream for dirty set
const downstreamQueue = [traversalStartId]
while (downstreamQueue.length > 0) {
const nodeId = downstreamQueue.shift()!
const node = dag.nodes.get(nodeId)
if (!node) continue
for (const [, edge] of node.outgoingEdges) {
if (!dirty.has(edge.target)) {
dirty.add(edge.target)
downstreamQueue.push(edge.target)
}
}
}
// BFS upstream from start block for upstream set
const upstreamQueue = [traversalStartId]
while (upstreamQueue.length > 0) {
const nodeId = upstreamQueue.shift()!
const node = dag.nodes.get(nodeId)
if (!node) continue
for (const sourceId of node.incomingEdges) {
if (!upstream.has(sourceId)) {
upstream.add(sourceId)
upstreamQueue.push(sourceId)
}
}
}
// Compute reachable upstream: all non-dirty blocks upstream of ANY dirty block
// This handles the case where a dirty block (like C in A->C, B->C) may reference
// sibling branches (like B when running from A)
const reachableUpstream = new Set<string>()
for (const dirtyNodeId of dirty) {
const node = dag.nodes.get(dirtyNodeId)
if (!node) continue
// BFS upstream from this dirty node
const queue = [...node.incomingEdges]
while (queue.length > 0) {
const sourceId = queue.shift()!
if (reachableUpstream.has(sourceId) || dirty.has(sourceId)) continue
reachableUpstream.add(sourceId)
const sourceNode = dag.nodes.get(sourceId)
if (sourceNode) {
queue.push(...sourceNode.incomingEdges)
}
}
}
return { dirtySet: dirty, upstreamSet: upstream, reachableUpstreamSet: reachableUpstream }
}
/**
* Validates that a block can be used as a run-from-block starting point.
*
* Validation rules:
* - Block must exist in the DAG (or be a loop/parallel container)
* - Block cannot be inside a loop (but loop containers are allowed)
* - Block cannot be inside a parallel (but parallel containers are allowed)
* - Block cannot be a sentinel node
* - All upstream dependencies must have been executed (have cached outputs)
*
* @param blockId - The block ID to validate
* @param dag - The workflow DAG
* @param executedBlocks - Set of blocks that were executed in the source run
* @returns Validation result with error message if invalid
*/
export function validateRunFromBlock(
blockId: string,
dag: DAG,
executedBlocks: Set<string>
): RunFromBlockValidation {
const node = dag.nodes.get(blockId)
const isLoopContainer = dag.loopConfigs.has(blockId)
const isParallelContainer = dag.parallelConfigs.has(blockId)
const isContainer = isLoopContainer || isParallelContainer
if (!node && !isContainer) {
return { valid: false, error: `Block not found in workflow: ${blockId}` }
}
if (isContainer) {
const sentinelStartId = resolveContainerToSentinelStart(blockId, dag)
if (!sentinelStartId || !dag.nodes.has(sentinelStartId)) {
return {
valid: false,
error: `Container sentinel not found for: ${blockId}`,
}
}
}
if (node) {
if (node.metadata.isLoopNode) {
return {
valid: false,
error: `Cannot run from block inside loop: ${node.metadata.loopId}`,
}
}
if (node.metadata.isParallelBranch) {
return {
valid: false,
error: `Cannot run from block inside parallel: ${node.metadata.parallelId}`,
}
}
if (node.metadata.isSentinel) {
return { valid: false, error: 'Cannot run from sentinel node' }
}
// Check immediate upstream dependencies were executed
for (const sourceId of node.incomingEdges) {
const sourceNode = dag.nodes.get(sourceId)
// Skip sentinel nodes - they're internal and not in executedBlocks
if (sourceNode?.metadata.isSentinel) continue
// Skip trigger nodes - they're entry points and don't need prior execution
// A trigger node has no incoming edges
if (sourceNode && sourceNode.incomingEdges.size === 0) continue
if (!executedBlocks.has(sourceId)) {
return {
valid: false,
error: `Upstream dependency not executed: ${sourceId}`,
}
}
}
}
return { valid: true }
}

View File

@@ -2,7 +2,6 @@ import { createLogger } from '@sim/logger'
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query' import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils' import type { WorkflowDeploymentVersionResponse } from '@/lib/workflows/persistence/utils'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { fetchDeploymentVersionState } from './workflows'
const logger = createLogger('DeploymentQueries') const logger = createLogger('DeploymentQueries')
@@ -349,173 +348,6 @@ export function useUndeployWorkflow() {
}) })
} }
/**
* Variables for update deployment version mutation
*/
interface UpdateDeploymentVersionVariables {
workflowId: string
version: number
name?: string
description?: string | null
}
/**
* Response from update deployment version mutation
*/
interface UpdateDeploymentVersionResult {
name: string | null
description: string | null
}
/**
* Mutation hook for updating a deployment version's name or description.
* Invalidates versions query on success.
*/
export function useUpdateDeploymentVersion() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({
workflowId,
version,
name,
description,
}: UpdateDeploymentVersionVariables): Promise<UpdateDeploymentVersionResult> => {
const response = await fetch(`/api/workflows/${workflowId}/deployments/${version}`, {
method: 'PATCH',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ name, description }),
})
if (!response.ok) {
const errorData = await response.json()
throw new Error(errorData.error || 'Failed to update deployment version')
}
return response.json()
},
onSuccess: (_, variables) => {
logger.info('Deployment version updated', {
workflowId: variables.workflowId,
version: variables.version,
})
queryClient.invalidateQueries({
queryKey: deploymentKeys.versions(variables.workflowId),
})
},
onError: (error) => {
logger.error('Failed to update deployment version', { error })
},
})
}
/**
* Variables for generating a version description
*/
interface GenerateVersionDescriptionVariables {
workflowId: string
version: number
onStreamChunk?: (accumulated: string) => void
}
const VERSION_DESCRIPTION_SYSTEM_PROMPT = `You are a technical writer generating concise deployment version descriptions.
Given a diff of changes between two workflow versions, write a brief, factual description (1-2 sentences, under 300 characters) that states ONLY what changed.
RULES:
- State specific values when provided (e.g. "model changed from X to Y")
- Do NOT wrap your response in quotes
- Do NOT add filler phrases like "streamlining the workflow", "for improved efficiency"
- Do NOT use markdown formatting
- Do NOT include version numbers
- Do NOT start with "This version" or similar phrases
Good examples:
- Changes model in Agent 1 from gpt-4o to claude-sonnet-4-20250514.
- Adds Slack notification block. Updates webhook URL to production endpoint.
- Removes Function block and its connection to Router.
Bad examples:
- "Changes model..." (NO - don't wrap in quotes)
- Changes model, streamlining the workflow. (NO - don't add filler)
Respond with ONLY the plain text description.`
/**
* Hook for generating a version description using AI based on workflow diff
*/
export function useGenerateVersionDescription() {
return useMutation({
mutationFn: async ({
workflowId,
version,
onStreamChunk,
}: GenerateVersionDescriptionVariables): Promise<string> => {
const { generateWorkflowDiffSummary, formatDiffSummaryForDescription } = await import(
'@/lib/workflows/comparison/compare'
)
const currentState = await fetchDeploymentVersionState(workflowId, version)
let previousState = null
if (version > 1) {
try {
previousState = await fetchDeploymentVersionState(workflowId, version - 1)
} catch {
// Previous version may not exist, continue without it
}
}
const diffSummary = generateWorkflowDiffSummary(currentState, previousState)
const diffText = formatDiffSummaryForDescription(diffSummary)
const wandResponse = await fetch('/api/wand', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Cache-Control': 'no-cache, no-transform',
},
body: JSON.stringify({
prompt: `Generate a deployment version description based on these changes:\n\n${diffText}`,
systemPrompt: VERSION_DESCRIPTION_SYSTEM_PROMPT,
stream: true,
workflowId,
}),
cache: 'no-store',
})
if (!wandResponse.ok) {
const errorText = await wandResponse.text()
throw new Error(errorText || 'Failed to generate description')
}
if (!wandResponse.body) {
throw new Error('Response body is null')
}
const { readSSEStream } = await import('@/lib/core/utils/sse')
const accumulatedContent = await readSSEStream(wandResponse.body, {
onAccumulated: onStreamChunk,
})
if (!accumulatedContent) {
throw new Error('Failed to generate description')
}
return accumulatedContent.trim()
},
onSuccess: (content) => {
logger.info('Generated version description', { length: content.length })
},
onError: (error) => {
logger.error('Failed to generate version description', { error })
},
})
}
/** /**
* Variables for activate version mutation * Variables for activate version mutation
*/ */

View File

@@ -411,11 +411,7 @@ interface DeploymentVersionStateResponse {
deployedState: WorkflowState deployedState: WorkflowState
} }
/** async function fetchDeploymentVersionState(
* Fetches the deployed state for a specific deployment version.
* Exported for reuse in other query hooks.
*/
export async function fetchDeploymentVersionState(
workflowId: string, workflowId: string,
version: number version: number
): Promise<WorkflowState> { ): Promise<WorkflowState> {

View File

@@ -1,85 +1,10 @@
import { useCallback, useRef } from 'react' import { useCallback, useRef } from 'react'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events' import type { ExecutionEvent } from '@/lib/workflows/executor/execution-events'
import type { SerializableExecutionState } from '@/executor/execution/types'
import type { SubflowType } from '@/stores/workflows/workflow/types' import type { SubflowType } from '@/stores/workflows/workflow/types'
const logger = createLogger('useExecutionStream') const logger = createLogger('useExecutionStream')
/**
* Processes SSE events from a response body and invokes appropriate callbacks.
*/
async function processSSEStream(
reader: ReadableStreamDefaultReader<Uint8Array>,
callbacks: ExecutionStreamCallbacks,
logPrefix: string
): Promise<void> {
const decoder = new TextDecoder()
let buffer = ''
try {
while (true) {
const { done, value } = await reader.read()
if (done) break
buffer += decoder.decode(value, { stream: true })
const lines = buffer.split('\n\n')
buffer = lines.pop() || ''
for (const line of lines) {
if (!line.trim() || !line.startsWith('data: ')) continue
const data = line.substring(6).trim()
if (data === '[DONE]') {
logger.info(`${logPrefix} stream completed`)
continue
}
try {
const event = JSON.parse(data) as ExecutionEvent
switch (event.type) {
case 'execution:started':
callbacks.onExecutionStarted?.(event.data)
break
case 'execution:completed':
callbacks.onExecutionCompleted?.(event.data)
break
case 'execution:error':
callbacks.onExecutionError?.(event.data)
break
case 'execution:cancelled':
callbacks.onExecutionCancelled?.(event.data)
break
case 'block:started':
callbacks.onBlockStarted?.(event.data)
break
case 'block:completed':
callbacks.onBlockCompleted?.(event.data)
break
case 'block:error':
callbacks.onBlockError?.(event.data)
break
case 'stream:chunk':
callbacks.onStreamChunk?.(event.data)
break
case 'stream:done':
callbacks.onStreamDone?.(event.data)
break
default:
logger.warn('Unknown event type:', (event as any).type)
}
} catch (error) {
logger.error('Failed to parse SSE event:', error, { data })
}
}
}
} finally {
reader.releaseLock()
}
}
export interface ExecutionStreamCallbacks { export interface ExecutionStreamCallbacks {
onExecutionStarted?: (data: { startTime: string }) => void onExecutionStarted?: (data: { startTime: string }) => void
onExecutionCompleted?: (data: { onExecutionCompleted?: (data: {
@@ -143,15 +68,6 @@ export interface ExecuteStreamOptions {
loops?: Record<string, any> loops?: Record<string, any>
parallels?: Record<string, any> parallels?: Record<string, any>
} }
stopAfterBlockId?: string
callbacks?: ExecutionStreamCallbacks
}
export interface ExecuteFromBlockOptions {
workflowId: string
startBlockId: string
sourceSnapshot: SerializableExecutionState
input?: any
callbacks?: ExecutionStreamCallbacks callbacks?: ExecutionStreamCallbacks
} }
@@ -203,7 +119,91 @@ export function useExecutionStream() {
} }
const reader = response.body.getReader() const reader = response.body.getReader()
await processSSEStream(reader, callbacks, 'Execution') const decoder = new TextDecoder()
let buffer = ''
try {
while (true) {
const { done, value } = await reader.read()
if (done) {
break
}
buffer += decoder.decode(value, { stream: true })
const lines = buffer.split('\n\n')
buffer = lines.pop() || ''
for (const line of lines) {
if (!line.trim() || !line.startsWith('data: ')) {
continue
}
const data = line.substring(6).trim()
if (data === '[DONE]') {
logger.info('Stream completed')
continue
}
try {
const event = JSON.parse(data) as ExecutionEvent
logger.info('📡 SSE Event received:', {
type: event.type,
executionId: event.executionId,
data: event.data,
})
switch (event.type) {
case 'execution:started':
logger.info('🚀 Execution started')
callbacks.onExecutionStarted?.(event.data)
break
case 'execution:completed':
logger.info('✅ Execution completed')
callbacks.onExecutionCompleted?.(event.data)
break
case 'execution:error':
logger.error('❌ Execution error')
callbacks.onExecutionError?.(event.data)
break
case 'execution:cancelled':
logger.warn('🛑 Execution cancelled')
callbacks.onExecutionCancelled?.(event.data)
break
case 'block:started':
logger.info('🔷 Block started:', event.data.blockId)
callbacks.onBlockStarted?.(event.data)
break
case 'block:completed':
logger.info('✓ Block completed:', event.data.blockId)
callbacks.onBlockCompleted?.(event.data)
break
case 'block:error':
logger.error('✗ Block error:', event.data.blockId)
callbacks.onBlockError?.(event.data)
break
case 'stream:chunk':
callbacks.onStreamChunk?.(event.data)
break
case 'stream:done':
logger.info('Stream done:', event.data.blockId)
callbacks.onStreamDone?.(event.data)
break
default:
logger.warn('Unknown event type:', (event as any).type)
}
} catch (error) {
logger.error('Failed to parse SSE event:', error, { data })
}
}
}
} finally {
reader.releaseLock()
}
} catch (error: any) { } catch (error: any) {
if (error.name === 'AbortError') { if (error.name === 'AbortError') {
logger.info('Execution stream cancelled') logger.info('Execution stream cancelled')
@@ -222,70 +222,6 @@ export function useExecutionStream() {
} }
}, []) }, [])
const executeFromBlock = useCallback(async (options: ExecuteFromBlockOptions) => {
const { workflowId, startBlockId, sourceSnapshot, input, callbacks = {} } = options
if (abortControllerRef.current) {
abortControllerRef.current.abort()
}
const abortController = new AbortController()
abortControllerRef.current = abortController
currentExecutionRef.current = null
try {
const response = await fetch(`/api/workflows/${workflowId}/execute-from-block`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ startBlockId, sourceSnapshot, input }),
signal: abortController.signal,
})
if (!response.ok) {
let errorResponse: any
try {
errorResponse = await response.json()
} catch {
throw new Error(`Server error (${response.status}): ${response.statusText}`)
}
const error = new Error(errorResponse.error || 'Failed to start execution')
if (errorResponse && typeof errorResponse === 'object') {
Object.assign(error, { executionResult: errorResponse })
}
throw error
}
if (!response.body) {
throw new Error('No response body')
}
const executionId = response.headers.get('X-Execution-Id')
if (executionId) {
currentExecutionRef.current = { workflowId, executionId }
}
const reader = response.body.getReader()
await processSSEStream(reader, callbacks, 'Run-from-block')
} catch (error: any) {
if (error.name === 'AbortError') {
logger.info('Run-from-block execution cancelled')
callbacks.onExecutionCancelled?.({ duration: 0 })
} else {
logger.error('Run-from-block execution error:', error)
callbacks.onExecutionError?.({
error: error.message || 'Unknown error',
duration: 0,
})
}
throw error
} finally {
abortControllerRef.current = null
currentExecutionRef.current = null
}
}, [])
const cancel = useCallback(() => { const cancel = useCallback(() => {
const execution = currentExecutionRef.current const execution = currentExecutionRef.current
if (execution) { if (execution) {
@@ -303,7 +239,6 @@ export function useExecutionStream() {
return { return {
execute, execute,
executeFromBlock,
cancel, cancel,
} }
} }

View File

@@ -452,7 +452,6 @@ export const auth = betterAuth({
'linear', 'linear',
'shopify', 'shopify',
'trello', 'trello',
'calcom',
...SSO_TRUSTED_PROVIDERS, ...SSO_TRUSTED_PROVIDERS,
], ],
}, },
@@ -2542,55 +2541,6 @@ export const auth = betterAuth({
} }
}, },
}, },
// Cal.com provider
{
providerId: 'calcom',
clientId: env.CALCOM_CLIENT_ID as string,
authorizationUrl: 'https://app.cal.com/auth/oauth2/authorize',
tokenUrl: 'https://app.cal.com/api/auth/oauth/token',
scopes: [],
responseType: 'code',
pkce: true,
accessType: 'offline',
prompt: 'consent',
redirectURI: `${getBaseUrl()}/api/auth/oauth2/callback/calcom`,
getUserInfo: async (tokens) => {
try {
logger.info('Fetching Cal.com user profile')
const response = await fetch('https://api.cal.com/v2/me', {
headers: {
Authorization: `Bearer ${tokens.accessToken}`,
'cal-api-version': '2024-08-13',
},
})
if (!response.ok) {
logger.error('Failed to fetch Cal.com user info', {
status: response.status,
statusText: response.statusText,
})
throw new Error('Failed to fetch user info')
}
const data = await response.json()
const profile = data.data || data
return {
id: `${profile.id?.toString()}-${crypto.randomUUID()}`,
name: profile.name || 'Cal.com User',
email: profile.email || `${profile.id}@cal.com`,
emailVerified: true,
createdAt: new Date(),
updatedAt: new Date(),
}
} catch (error) {
logger.error('Error in Cal.com getUserInfo:', { error })
return null
}
},
},
], ],
}), }),
// Include SSO plugin when enabled // Include SSO plugin when enabled

View File

@@ -243,7 +243,6 @@ export const env = createEnv({
WORDPRESS_CLIENT_SECRET: z.string().optional(), // WordPress.com OAuth client secret WORDPRESS_CLIENT_SECRET: z.string().optional(), // WordPress.com OAuth client secret
SPOTIFY_CLIENT_ID: z.string().optional(), // Spotify OAuth client ID SPOTIFY_CLIENT_ID: z.string().optional(), // Spotify OAuth client ID
SPOTIFY_CLIENT_SECRET: z.string().optional(), // Spotify OAuth client secret SPOTIFY_CLIENT_SECRET: z.string().optional(), // Spotify OAuth client secret
CALCOM_CLIENT_ID: z.string().optional(), // Cal.com OAuth client ID
// E2B Remote Code Execution // E2B Remote Code Execution
E2B_ENABLED: z.string().optional(), // Enable E2B remote code execution E2B_ENABLED: z.string().optional(), // Enable E2B remote code execution

View File

@@ -1,4 +1,4 @@
import { createCipheriv, createDecipheriv, randomBytes, timingSafeEqual } from 'crypto' import { createCipheriv, createDecipheriv, randomBytes } from 'crypto'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { env } from '@/lib/core/config/env' import { env } from '@/lib/core/config/env'
@@ -82,17 +82,3 @@ export function generatePassword(length = 24): string {
return result return result
} }
/**
* Compares two strings in constant time to prevent timing attacks.
* Used for HMAC signature validation.
* @param a - First string to compare
* @param b - Second string to compare
* @returns True if strings are equal, false otherwise
*/
export function safeCompare(a: string, b: string): boolean {
if (a.length !== b.length) {
return false
}
return timingSafeEqual(Buffer.from(a), Buffer.from(b))
}

View File

@@ -931,7 +931,7 @@ export async function secureFetchWithPinnedIP(
method: options.method || 'GET', method: options.method || 'GET',
headers: sanitizedHeaders, headers: sanitizedHeaders,
agent, agent,
timeout: options.timeout || 300000, // Default 5 minutes timeout: options.timeout || 30000,
} }
const protocol = isHttps ? https : http const protocol = isHttps ? https : http
@@ -1011,7 +1011,7 @@ export async function secureFetchWithPinnedIP(
req.on('timeout', () => { req.on('timeout', () => {
req.destroy() req.destroy()
reject(new Error(`Request timed out after ${requestOptions.timeout}ms`)) reject(new Error('Request timeout'))
}) })
if (options.body) { if (options.body) {

View File

@@ -19,68 +19,3 @@ export const SSE_HEADERS = {
export function encodeSSE(data: any): Uint8Array { export function encodeSSE(data: any): Uint8Array {
return new TextEncoder().encode(`data: ${JSON.stringify(data)}\n\n`) return new TextEncoder().encode(`data: ${JSON.stringify(data)}\n\n`)
} }
/**
* Options for reading SSE stream
*/
export interface ReadSSEStreamOptions {
onChunk?: (chunk: string) => void
onAccumulated?: (accumulated: string) => void
signal?: AbortSignal
}
/**
* Reads and parses an SSE stream from a Response body.
* Handles the wand API SSE format with data chunks and done signals.
*
* @param body - The ReadableStream body from a fetch Response
* @param options - Callbacks for handling stream data
* @returns The accumulated content from the stream
*/
export async function readSSEStream(
body: ReadableStream<Uint8Array>,
options: ReadSSEStreamOptions = {}
): Promise<string> {
const { onChunk, onAccumulated, signal } = options
const reader = body.getReader()
const decoder = new TextDecoder()
let accumulatedContent = ''
try {
while (true) {
if (signal?.aborted) {
break
}
const { done, value } = await reader.read()
if (done) break
const chunk = decoder.decode(value)
const lines = chunk.split('\n\n')
for (const line of lines) {
if (line.startsWith('data: ')) {
const lineData = line.substring(6)
if (lineData === '[DONE]') continue
try {
const data = JSON.parse(lineData)
if (data.error) throw new Error(data.error)
if (data.chunk) {
accumulatedContent += data.chunk
onChunk?.(data.chunk)
onAccumulated?.(accumulatedContent)
}
if (data.done) break
} catch {
// Skip unparseable lines
}
}
}
}
} finally {
reader.releaseLock()
}
return accumulatedContent
}

View File

@@ -86,13 +86,7 @@ describe('SnapshotService', () => {
type: 'agent', type: 'agent',
position: { x: 100, y: 200 }, position: { x: 100, y: 200 },
subBlocks: { subBlocks: {},
prompt: {
id: 'prompt',
type: 'short-input',
value: 'Hello world',
},
},
outputs: {}, outputs: {},
enabled: true, enabled: true,
horizontalHandles: true, horizontalHandles: true,
@@ -110,14 +104,8 @@ describe('SnapshotService', () => {
blocks: { blocks: {
block1: { block1: {
...baseState.blocks.block1, ...baseState.blocks.block1,
// Different subBlock value - this is a meaningful change // Different block state - we can change outputs to make it different
subBlocks: { outputs: { response: { type: 'string', description: 'different result' } },
prompt: {
id: 'prompt',
type: 'short-input',
value: 'Different prompt',
},
},
}, },
}, },
} }

Some files were not shown because too many files have changed in this diff Show More