Compare commits

..

65 Commits

Author SHA1 Message Date
Siddharth Ganesan
2e4945508c I hate thsi 2025-08-30 16:36:45 -07:00
Waleed
1feef4ce4b improvement(tools): update mysql to respect ssl pref (#1205) 2025-08-30 16:36:45 -07:00
Waleed
2404f8af14 feat(parsers): added pptx, md, & html parsers (#1202)
* feat(parsers): added pptx, md, & html parsers

* ack PR comments

* file renaming, reorganization
2025-08-30 16:36:45 -07:00
Waleed
ba72e35d43 fix(deps): downgrade nextjs (#1200) 2025-08-30 16:36:45 -07:00
Waleed
ab52458191 improvement(kb): add fallbacks for kb configs (#1199) 2025-08-30 16:36:45 -07:00
Waleed
ef5e2b699c feat(kb): add adjustable concurrency and batching to uploads and embeddings (#1198) 2025-08-30 16:36:45 -07:00
Waleed
ba45404423 imporvement(pg): added wand config for writing sql queries for generic db blocks & supabase postgrest syntax (#1197)
* add parallel ai, postgres, mysql, slight modifications to dark mode styling

* bun install frozen lockfile

* new deps

* improve security, add wand to short input and update wand config
2025-08-30 16:36:44 -07:00
Waleed
4ce2fc760a feat(tools): add parallel ai, postgres, mysql, slight modifications to dark mode styling (#1192)
* add parallel ai, postgres, mysql, slight modifications to dark mode styling

* bun install frozen lockfile

* new deps
2025-08-30 16:36:44 -07:00
Waleed
71e06f2b31 fix(billing-ui): open settings when enterprise sub folks press usage indicator (#1194) 2025-08-30 16:36:44 -07:00
Siddharth Ganesan
ce04d56d79 Use direct fetch (#1193) 2025-08-30 16:36:44 -07:00
Siddharth Ganesan
626e9a37da Fix/wand (#1191)
* Switch to node

* Refactor
2025-08-30 16:36:44 -07:00
Siddharth Ganesan
100ae1d23e Switch to node (#1190) 2025-08-30 16:36:44 -07:00
Vikhyath Mondreti
792df1a9f0 run bun install 2025-08-30 16:36:44 -07:00
Vikhyath Mondreti
d313a0f171 Revert "feat(integrations): added parallel AI, mySQL, and postgres block/tools (#1126)"
This reverts commit 766279bb8b.
2025-08-30 16:36:44 -07:00
Vikhyath Mondreti
29270d15ff change bun install to be based on frozen-lockfile flag"
"
2025-08-30 16:36:44 -07:00
Vikhyath Mondreti
879711d786 revert drizzle-orm version 2025-08-30 16:36:44 -07:00
Vikhyath Mondreti
f98138a550 remove bun lock 2025-08-30 16:36:44 -07:00
Vikhyath Mondreti
888609a93c revert package.json 2025-08-30 16:36:44 -07:00
Siddharth Ganesan
a516325733 Stuff 2025-08-30 15:46:20 -07:00
Siddharth Ganesan
396c9db204 Updates 2025-08-30 13:43:10 -07:00
Siddharth Ganesan
a1acbc9616 Load final state 2025-08-30 11:24:40 -07:00
Siddharth Ganesan
5a74ab28e2 Cleaning 2025-08-30 11:14:41 -07:00
Siddharth Ganesan
cf5532c852 Checkpoint 2025-08-29 18:32:06 -07:00
Siddharth Ganesan
3e6d454de3 Fix execution input 2025-08-29 17:34:31 -07:00
Siddharth Ganesan
4c4b3351e6 Checkpoint 2025-08-29 16:53:36 -07:00
Siddharth Ganesan
0c1ee239fe Broken checkpoint 2025-08-29 15:27:15 -07:00
Siddharth Ganesan
9c065a1c2a Execution dropdown 2025-08-29 14:55:55 -07:00
Siddharth Ganesan
dc92a79f33 Debugger 2025-08-29 12:53:03 -07:00
Siddharth Ganesan
efb0d22d3f Debugger fixes 2025-08-29 12:50:18 -07:00
Siddharth Ganesan
9af445fa25 Updates 2025-08-29 12:21:46 -07:00
Siddharth Ganesan
e09088bafc Updates 2025-08-29 12:07:09 -07:00
Siddharth Ganesan
994c81ba3c Functionality 2025-08-29 11:59:42 -07:00
Siddharth Ganesan
4bba1eb8f6 Startpos stuff 2025-08-29 11:54:06 -07:00
Siddharth Ganesan
de06e8c35c Rollback checkpoint 2025-08-29 11:00:33 -07:00
Siddharth Ganesan
61534b05dd Breakpoint color 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
694538e1ee Lint 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
6df565e4c8 Panel toggle 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
422df2be0f Stuff 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
692b385ece Checkpoint 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
728f5812ac HIde env vars 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
1d51706057 Loops and parallels 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
c166c60d9b Ui updates 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
120b7ffd5c Ui 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
ecc2a55f9e Breakpoint ui 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
f10b7c0493 Stuff 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
5835df3496 Fix resume 2025-08-29 10:25:11 -07:00
Siddharth Ganesan
c4924776b6 Update 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
9444661d98 Updates 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
9137b2eab3 Variable highlighting 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
fa0ef07981 Remove number styling 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
463ba208f4 Fix env vars 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
2cedac5ffb Fix workflow vars 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
9c3f559a91 Fix scoping 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
0d7ab06bd1 Fix 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
7bda4468b8 Ui updates 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
bdb9b866ab UI improvements 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
460d515df2 Execution status 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
7b49515798 Fixes 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
049f188d2e Var improvements 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
6c8a7f0594 Checkpoitn 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
3727b5d395 Checkpoint 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
75a3f4cce7 Checkpoint 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
63616a1239 Updates 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
7cb6dfc211 Updates 2025-08-29 10:25:10 -07:00
Siddharth Ganesan
b94d942204 Updates 2025-08-29 10:25:09 -07:00
177 changed files with 9439 additions and 2118 deletions

View File

@@ -33,12 +33,15 @@
"microsoft_planner",
"microsoft_teams",
"mistral_parse",
"mysql",
"notion",
"onedrive",
"openai",
"outlook",
"parallel_ai",
"perplexity",
"pinecone",
"postgresql",
"qdrant",
"reddit",
"s3",

View File

@@ -0,0 +1,180 @@
---
title: MySQL
description: Connect to MySQL database
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="mysql"
color="#E0E0E0"
icon={true}
iconSvg={`<svg className="block-icon"
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 25.6 25.6'
>
<path
d='M179.076 94.886c-3.568-.1-6.336.268-8.656 1.25-.668.27-1.74.27-1.828 1.116.357.355.4.936.713 1.428.535.893 1.473 2.096 2.32 2.72l2.855 2.053c1.74 1.07 3.703 1.695 5.398 2.766.982.625 1.963 1.428 2.945 2.098.5.357.803.938 1.428 1.16v-.135c-.312-.4-.402-.98-.713-1.428l-1.34-1.293c-1.293-1.74-2.9-3.258-4.64-4.506-1.428-.982-4.55-2.32-5.13-3.97l-.088-.1c.98-.1 2.14-.447 3.078-.715 1.518-.4 2.9-.312 4.46-.713l2.143-.625v-.4c-.803-.803-1.383-1.874-2.23-2.632-2.275-1.963-4.775-3.882-7.363-5.488-1.383-.892-3.168-1.473-4.64-2.23-.537-.268-1.428-.402-1.74-.848-.805-.98-1.25-2.275-1.83-3.436l-3.658-7.763c-.803-1.74-1.295-3.48-2.275-5.086-4.596-7.585-9.594-12.18-17.268-16.687-1.65-.937-3.613-1.34-5.7-1.83l-3.346-.18c-.715-.312-1.428-1.16-2.053-1.562-2.543-1.606-9.102-5.086-10.977-.5-1.205 2.9 1.785 5.755 2.8 7.228.76 1.026 1.74 2.186 2.277 3.346.3.758.4 1.562.713 2.365.713 1.963 1.383 4.15 2.32 5.98.5.937 1.025 1.92 1.65 2.767.357.5.982.714 1.115 1.517-.625.893-.668 2.23-1.025 3.347-1.607 5.042-.982 11.288 1.293 15 .715 1.115 2.4 3.57 4.686 2.632 2.008-.803 1.56-3.346 2.14-5.577.135-.535.045-.892.312-1.25v.1l1.83 3.703c1.383 2.186 3.793 4.462 5.8 5.98 1.07.803 1.918 2.187 3.256 2.677v-.135h-.088c-.268-.4-.67-.58-1.027-.892-.803-.803-1.695-1.785-2.32-2.677-1.873-2.498-3.523-5.265-4.996-8.12-.715-1.383-1.34-2.9-1.918-4.283-.27-.536-.27-1.34-.715-1.606-.67.98-1.65 1.83-2.143 3.034-.848 1.918-.936 4.283-1.248 6.737-.18.045-.1 0-.18.1-1.426-.356-1.918-1.83-2.453-3.078-1.338-3.168-1.562-8.254-.402-11.913.312-.937 1.652-3.882 1.117-4.774-.27-.848-1.16-1.338-1.652-2.008-.58-.848-1.203-1.918-1.605-2.855-1.07-2.5-1.605-5.265-2.766-7.764-.537-1.16-1.473-2.365-2.232-3.435-.848-1.205-1.783-2.053-2.453-3.48-.223-.5-.535-1.294-.178-1.83.088-.357.268-.5.623-.58.58-.5 2.232.134 2.812.4 1.65.67 3.033 1.294 4.416 2.23.625.446 1.295 1.294 2.098 1.518h.938c1.428.312 3.033.1 4.37.5 2.365.76 4.506 1.874 6.426 3.08 5.844 3.703 10.664 8.968 13.92 15.26.535 1.026.758 1.963 1.25 3.034.938 2.187 2.098 4.417 3.033 6.56.938 2.097 1.83 4.24 3.168 5.98.67.937 3.346 1.427 4.55 1.918.893.4 2.275.76 3.08 1.25 1.516.937 3.033 2.008 4.46 3.034.713.534 2.945 1.65 3.078 2.54zm-45.5-38.772a7.09 7.09 0 0 0-1.828.223v.1h.088c.357.714.982 1.205 1.428 1.83l1.027 2.142.088-.1c.625-.446.938-1.16.938-2.23-.268-.312-.312-.625-.535-.937-.268-.446-.848-.67-1.206-1.026z'
transform='matrix(.390229 0 0 .38781 -46.300037 -16.856717)'
fillRule='evenodd'
fill='#00678c'
/>
</svg>`}
/>
{/* MANUAL-CONTENT-START:intro */}
The [MySQL](https://www.mysql.com/) tool enables you to connect to any MySQL database and perform a wide range of database operations directly within your agentic workflows. With secure connection handling and flexible configuration, you can easily manage and interact with your data.
With the MySQL tool, you can:
- **Query data**: Execute SELECT queries to retrieve data from your MySQL tables using the `mysql_query` operation.
- **Insert records**: Add new rows to your tables with the `mysql_insert` operation by specifying the table and data to insert.
- **Update records**: Modify existing data in your tables using the `mysql_update` operation, providing the table, new data, and WHERE conditions.
- **Delete records**: Remove rows from your tables with the `mysql_delete` operation, specifying the table and WHERE conditions.
- **Execute raw SQL**: Run any custom SQL command using the `mysql_execute` operation for advanced use cases.
The MySQL tool is ideal for scenarios where your agents need to interact with structured data—such as automating reporting, syncing data between systems, or powering data-driven workflows. It streamlines database access, making it easy to read, write, and manage your MySQL data programmatically.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Connect to any MySQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.
## Tools
### `mysql_query`
Execute SELECT query on MySQL database
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | MySQL server hostname or IP address |
| `port` | number | Yes | MySQL server port \(default: 3306\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `query` | string | Yes | SQL SELECT query to execute |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `rows` | array | Array of rows returned from the query |
| `rowCount` | number | Number of rows returned |
### `mysql_insert`
Insert new record into MySQL database
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | MySQL server hostname or IP address |
| `port` | number | Yes | MySQL server port \(default: 3306\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `table` | string | Yes | Table name to insert into |
| `data` | object | Yes | Data to insert as key-value pairs |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `rows` | array | Array of inserted rows |
| `rowCount` | number | Number of rows inserted |
### `mysql_update`
Update existing records in MySQL database
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | MySQL server hostname or IP address |
| `port` | number | Yes | MySQL server port \(default: 3306\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `table` | string | Yes | Table name to update |
| `data` | object | Yes | Data to update as key-value pairs |
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `rows` | array | Array of updated rows |
| `rowCount` | number | Number of rows updated |
### `mysql_delete`
Delete records from MySQL database
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | MySQL server hostname or IP address |
| `port` | number | Yes | MySQL server port \(default: 3306\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `table` | string | Yes | Table name to delete from |
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `rows` | array | Array of deleted rows |
| `rowCount` | number | Number of rows deleted |
### `mysql_execute`
Execute raw SQL query on MySQL database
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | MySQL server hostname or IP address |
| `port` | number | Yes | MySQL server port \(default: 3306\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `query` | string | Yes | Raw SQL query to execute |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `rows` | array | Array of rows returned from the query |
| `rowCount` | number | Number of rows affected |
## Notes
- Category: `tools`
- Type: `mysql`

View File

@@ -0,0 +1,106 @@
---
title: Parallel AI
description: Search with Parallel AI
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="parallel_ai"
color="#E0E0E0"
icon={true}
iconSvg={`<svg className="block-icon"
fill='currentColor'
viewBox='0 0 271 270'
xmlns='http://www.w3.org/2000/svg'
>
<path
d='M267.804 105.65H193.828C194.026 106.814 194.187 107.996 194.349 109.178H76.6703C76.4546 110.736 76.2388 112.312 76.0591 113.87H1.63342C1.27387 116.198 0.950289 118.543 0.698608 120.925H75.3759C75.2501 122.483 75.1602 124.059 75.0703 125.617H195.949C196.003 126.781 196.057 127.962 196.093 129.144H270.68V125.384C270.195 118.651 269.242 112.061 267.804 105.65Z'
fill='#1D1C1A'
/>
<path
d='M195.949 144.401H75.0703C75.1422 145.977 75.2501 147.535 75.3759 149.093H0.698608C0.950289 151.457 1.2559 153.802 1.63342 156.148H76.0591C76.2388 157.724 76.4366 159.282 76.6703 160.84H194.349C194.187 162.022 194.008 163.186 193.828 164.367H267.804C269.242 157.957 270.195 151.367 270.68 144.634V140.874H196.093C196.057 142.055 196.003 143.219 195.949 144.401Z'
fill='#1D1C1A'
/>
<path
d='M190.628 179.642H80.3559C80.7514 181.218 81.1828 182.776 81.6143 184.334H9.30994C10.2448 186.715 11.2515 189.061 12.3121 191.389H83.7536C84.2749 192.965 84.7962 194.523 85.3535 196.08H185.594C185.163 197.262 184.732 198.426 184.282 199.608H254.519C258.6 192.177 261.98 184.316 264.604 176.114H191.455C191.185 177.296 190.898 178.46 190.61 179.642H190.628Z'
fill='#1D1C1A'
/>
<path
d='M177.666 214.883H93.3352C94.1082 216.458 94.9172 218.034 95.7441 219.574H29.8756C31.8351 221.992 33.8666 224.337 35.9699 226.63H99.6632C100.598 228.205 101.551 229.781 102.522 231.321H168.498C167.761 232.503 167.006 233.685 166.233 234.849H226.762C234.474 227.847 241.36 219.95 247.292 211.355H179.356C178.799 212.537 178.26 213.719 177.684 214.883H177.666Z'
fill='#1D1C1A'
/>
<path
d='M154.943 250.106H116.058C117.371 251.699 118.701 253.257 120.067 254.797H73.021C91.6094 264.431 112.715 269.946 135.096 270C135.24 270 135.366 270 135.492 270C135.618 270 135.761 270 135.887 270C164.04 269.911 190.178 261.28 211.805 246.56H157.748C156.813 247.742 155.878 248.924 154.925 250.088L154.943 250.106Z'
fill='#1D1C1A'
/>
<path
d='M116.059 19.9124H154.943C155.896 21.0764 156.831 22.2582 157.766 23.4401H211.823C190.179 8.72065 164.058 0.0895344 135.906 0C135.762 0 135.636 0 135.51 0C135.384 0 135.24 0 135.115 0C112.715 0.0716275 91.6277 5.56904 73.0393 15.2029H120.086C118.719 16.7429 117.389 18.3187 116.077 19.8945L116.059 19.9124Z'
fill='#1D1C1A'
/>
<path
d='M93.3356 55.1532H177.667C178.242 56.3171 178.799 57.499 179.339 58.6808H247.274C241.342 50.0855 234.457 42.1886 226.744 35.187H166.215C166.988 36.351 167.743 37.5328 168.48 38.7147H102.504C101.533 40.2726 100.58 41.8305 99.6456 43.4063H35.9523C33.831 45.6804 31.7996 48.0262 29.858 50.4616H95.7265C94.8996 52.0195 94.1086 53.5774 93.3176 55.1532H93.3356Z'
fill='#1D1C1A'
/>
<path
d='M80.3736 90.3758H190.646C190.933 91.5398 191.221 92.7216 191.491 93.9035H264.64C262.015 85.7021 258.636 77.841 254.555 70.4097H184.318C184.767 71.5736 185.199 72.7555 185.63 73.9373H85.3893C84.832 75.4952 84.2927 77.0531 83.7893 78.6289H12.3479C11.2872 80.9389 10.2805 83.2847 9.3457 85.6842H81.65C81.2186 87.2421 80.7871 88.8 80.3916 90.3758H80.3736Z'
fill='#1D1C1A'
/>
</svg>`}
/>
{/* MANUAL-CONTENT-START:intro */}
[Parallel AI](https://parallel.ai/) is an advanced web search and content extraction platform designed to deliver comprehensive, high-quality results for any query. By leveraging intelligent processing and large-scale data extraction, Parallel AI enables users and agents to access, analyze, and synthesize information from across the web with speed and accuracy.
With Parallel AI, you can:
- **Search the web intelligently**: Retrieve relevant, up-to-date information from a wide range of sources
- **Extract and summarize content**: Get concise, meaningful excerpts from web pages and documents
- **Customize search objectives**: Tailor queries to specific needs or questions for targeted results
- **Process results at scale**: Handle large volumes of search results with advanced processing options
- **Integrate with workflows**: Use Parallel AI within Sim to automate research, content gathering, and knowledge extraction
- **Control output granularity**: Specify the number of results and the amount of content per result
- **Secure API access**: Protect your searches and data with API key authentication
In Sim, the Parallel AI integration empowers your agents to perform web searches and extract content programmatically. This enables powerful automation scenarios such as real-time research, competitive analysis, content monitoring, and knowledge base creation. By connecting Sim with Parallel AI, you unlock the ability for agents to gather, process, and utilize web data as part of your automated workflows.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Search the web using Parallel AI's advanced search capabilities. Get comprehensive results with intelligent processing and content extraction.
## Tools
### `parallel_search`
Search the web using Parallel AI. Provides comprehensive search results with intelligent processing and content extraction.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `objective` | string | Yes | The search objective or question to answer |
| `search_queries` | string | No | Optional comma-separated list of search queries to execute |
| `processor` | string | No | Processing method: base or pro \(default: base\) |
| `max_results` | number | No | Maximum number of results to return \(default: 5\) |
| `max_chars_per_result` | number | No | Maximum characters per result \(default: 1500\) |
| `apiKey` | string | Yes | Parallel AI API Key |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `results` | array | Search results with excerpts from relevant pages |
## Notes
- Category: `tools`
- Type: `parallel_ai`

View File

@@ -0,0 +1,188 @@
---
title: PostgreSQL
description: Connect to PostgreSQL database
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="postgresql"
color="#336791"
icon={true}
iconSvg={`<svg className="block-icon"
viewBox='-4 0 264 264'
xmlns='http://www.w3.org/2000/svg'
preserveAspectRatio='xMinYMin meet'
>
<path d='M255.008 158.086c-1.535-4.649-5.556-7.887-10.756-8.664-2.452-.366-5.26-.21-8.583.475-5.792 1.195-10.089 1.65-13.225 1.738 11.837-19.985 21.462-42.775 27.003-64.228 8.96-34.689 4.172-50.492-1.423-57.64C233.217 10.847 211.614.683 185.552.372c-13.903-.17-26.108 2.575-32.475 4.549-5.928-1.046-12.302-1.63-18.99-1.738-12.537-.2-23.614 2.533-33.079 8.15-5.24-1.772-13.65-4.27-23.362-5.864-22.842-3.75-41.252-.828-54.718 8.685C6.622 25.672-.937 45.684.461 73.634c.444 8.874 5.408 35.874 13.224 61.48 4.492 14.718 9.282 26.94 14.237 36.33 7.027 13.315 14.546 21.156 22.987 23.972 4.731 1.576 13.327 2.68 22.368-4.85 1.146 1.388 2.675 2.767 4.704 4.048 2.577 1.625 5.728 2.953 8.875 3.74 11.341 2.835 21.964 2.126 31.027-1.848.056 1.612.099 3.152.135 4.482.06 2.157.12 4.272.199 6.25.537 13.374 1.447 23.773 4.143 31.049.148.4.347 1.01.557 1.657 1.345 4.118 3.594 11.012 9.316 16.411 5.925 5.593 13.092 7.308 19.656 7.308 3.292 0 6.433-.432 9.188-1.022 9.82-2.105 20.973-5.311 29.041-16.799 7.628-10.86 11.336-27.217 12.007-52.99.087-.729.167-1.425.244-2.088l.16-1.362 1.797.158.463.031c10.002.456 22.232-1.665 29.743-5.154 5.935-2.754 24.954-12.795 20.476-26.351' />
<path
d='M237.906 160.722c-29.74 6.135-31.785-3.934-31.785-3.934 31.4-46.593 44.527-105.736 33.2-120.211-30.904-39.485-84.399-20.811-85.292-20.327l-.287.052c-5.876-1.22-12.451-1.946-19.842-2.067-13.456-.22-23.664 3.528-31.41 9.402 0 0-95.43-39.314-90.991 49.444.944 18.882 27.064 142.873 58.218 105.422 11.387-13.695 22.39-25.274 22.39-25.274 5.464 3.63 12.006 5.482 18.864 4.817l.533-.452c-.166 1.7-.09 3.363.213 5.332-8.026 8.967-5.667 10.541-21.711 13.844-16.235 3.346-6.698 9.302-.471 10.86 7.549 1.887 25.013 4.561 36.813-11.958l-.47 1.885c3.144 2.519 5.352 16.383 4.982 28.952-.37 12.568-.617 21.197 1.86 27.937 2.479 6.74 4.948 21.905 26.04 17.386 17.623-3.777 26.756-13.564 28.027-29.89.901-11.606 2.942-9.89 3.07-20.267l1.637-4.912c1.887-15.733.3-20.809 11.157-18.448l2.64.232c7.99.363 18.45-1.286 24.589-4.139 13.218-6.134 21.058-16.377 8.024-13.686h.002'
fill='#336791'
/>
<path
d='M108.076 81.525c-2.68-.373-5.107-.028-6.335.902-.69.523-.904 1.129-.962 1.546-.154 1.105.62 2.327 1.096 2.957 1.346 1.784 3.312 3.01 5.258 3.28.282.04.563.058.842.058 3.245 0 6.196-2.527 6.456-4.392.325-2.336-3.066-3.893-6.355-4.35M196.86 81.599c-.256-1.831-3.514-2.353-6.606-1.923-3.088.43-6.082 1.824-5.832 3.659.2 1.427 2.777 3.863 5.827 3.863.258 0 .518-.017.78-.054 2.036-.282 3.53-1.575 4.24-2.32 1.08-1.136 1.706-2.402 1.591-3.225'
fill='#FFF'
/>
<path
d='M247.802 160.025c-1.134-3.429-4.784-4.532-10.848-3.28-18.005 3.716-24.453 1.142-26.57-.417 13.995-21.32 25.508-47.092 31.719-71.137 2.942-11.39 4.567-21.968 4.7-30.59.147-9.463-1.465-16.417-4.789-20.665-13.402-17.125-33.072-26.311-56.882-26.563-16.369-.184-30.199 4.005-32.88 5.183-5.646-1.404-11.801-2.266-18.502-2.376-12.288-.199-22.91 2.743-31.704 8.74-3.82-1.422-13.692-4.811-25.765-6.756-20.872-3.36-37.458-.814-49.294 7.571-14.123 10.006-20.643 27.892-19.38 53.16.425 8.501 5.269 34.653 12.913 59.698 10.062 32.964 21 51.625 32.508 55.464 1.347.449 2.9.763 4.613.763 4.198 0 9.345-1.892 14.7-8.33a529.832 529.832 0 0 1 20.261-22.926c4.524 2.428 9.494 3.784 14.577 3.92.01.133.023.266.035.398a117.66 117.66 0 0 0-2.57 3.175c-3.522 4.471-4.255 5.402-15.592 7.736-3.225.666-11.79 2.431-11.916 8.435-.136 6.56 10.125 9.315 11.294 9.607 4.074 1.02 7.999 1.523 11.742 1.523 9.103 0 17.114-2.992 23.516-8.781-.197 23.386.778 46.43 3.586 53.451 2.3 5.748 7.918 19.795 25.664 19.794 2.604 0 5.47-.303 8.623-.979 18.521-3.97 26.564-12.156 29.675-30.203 1.665-9.645 4.522-32.676 5.866-45.03 2.836.885 6.487 1.29 10.434 1.289 8.232 0 17.731-1.749 23.688-4.514 6.692-3.108 18.768-10.734 16.578-17.36zm-44.106-83.48c-.061 3.647-.563 6.958-1.095 10.414-.573 3.717-1.165 7.56-1.314 12.225-.147 4.54.42 9.26.968 13.825 1.108 9.22 2.245 18.712-2.156 28.078a36.508 36.508 0 0 1-1.95-4.009c-.547-1.326-1.735-3.456-3.38-6.404-6.399-11.476-21.384-38.35-13.713-49.316 2.285-3.264 8.084-6.62 22.64-4.813zm-17.644-61.787c21.334.471 38.21 8.452 50.158 23.72 9.164 11.711-.927 64.998-30.14 110.969a171.33 171.33 0 0 0-.886-1.117l-.37-.462c7.549-12.467 6.073-24.802 4.759-35.738-.54-4.488-1.05-8.727-.92-12.709.134-4.22.692-7.84 1.232-11.34.663-4.313 1.338-8.776 1.152-14.037.139-.552.195-1.204.122-1.978-.475-5.045-6.235-20.144-17.975-33.81-6.422-7.475-15.787-15.84-28.574-21.482 5.5-1.14 13.021-2.203 21.442-2.016zM66.674 175.778c-5.9 7.094-9.974 5.734-11.314 5.288-8.73-2.912-18.86-21.364-27.791-50.624-7.728-25.318-12.244-50.777-12.602-57.916-1.128-22.578 4.345-38.313 16.268-46.769 19.404-13.76 51.306-5.524 64.125-1.347-.184.182-.376.352-.558.537-21.036 21.244-20.537 57.54-20.485 59.759-.002.856.07 2.068.168 3.735.362 6.105 1.036 17.467-.764 30.334-1.672 11.957 2.014 23.66 10.111 32.109a36.275 36.275 0 0 0 2.617 2.468c-3.604 3.86-11.437 12.396-19.775 22.426zm22.479-29.993c-6.526-6.81-9.49-16.282-8.133-25.99 1.9-13.592 1.199-25.43.822-31.79-.053-.89-.1-1.67-.127-2.285 3.073-2.725 17.314-10.355 27.47-8.028 4.634 1.061 7.458 4.217 8.632 9.645 6.076 28.103.804 39.816-3.432 49.229-.873 1.939-1.698 3.772-2.402 5.668l-.546 1.466c-1.382 3.706-2.668 7.152-3.465 10.424-6.938-.02-13.687-2.984-18.819-8.34zm1.065 37.9c-2.026-.506-3.848-1.385-4.917-2.114.893-.42 2.482-.992 5.238-1.56 13.337-2.745 15.397-4.683 19.895-10.394 1.031-1.31 2.2-2.794 3.819-4.602l.002-.002c2.411-2.7 3.514-2.242 5.514-1.412 1.621.67 3.2 2.702 3.84 4.938.303 1.056.643 3.06-.47 4.62-9.396 13.156-23.088 12.987-32.921 10.526zm69.799 64.952c-16.316 3.496-22.093-4.829-25.9-14.346-2.457-6.144-3.665-33.85-2.808-64.447.011-.407-.047-.8-.159-1.17a15.444 15.444 0 0 0-.456-2.162c-1.274-4.452-4.379-8.176-8.104-9.72-1.48-.613-4.196-1.738-7.46-.903.696-2.868 1.903-6.107 3.212-9.614l.549-1.475c.618-1.663 1.394-3.386 2.214-5.21 4.433-9.848 10.504-23.337 3.915-53.81-2.468-11.414-10.71-16.988-23.204-15.693-7.49.775-14.343 3.797-17.761 5.53-.735.372-1.407.732-2.035 1.082.954-11.5 4.558-32.992 18.04-46.59 8.489-8.56 19.794-12.788 33.568-12.56 27.14.444 44.544 14.372 54.366 25.979 8.464 10.001 13.047 20.076 14.876 25.51-13.755-1.399-23.11 1.316-27.852 8.096-10.317 14.748 5.644 43.372 13.315 57.129 1.407 2.521 2.621 4.7 3.003 5.626 2.498 6.054 5.732 10.096 8.093 13.046.724.904 1.426 1.781 1.96 2.547-4.166 1.201-11.649 3.976-10.967 17.847-.55 6.96-4.461 39.546-6.448 51.059-2.623 15.21-8.22 20.875-23.957 24.25zm68.104-77.936c-4.26 1.977-11.389 3.46-18.161 3.779-7.48.35-11.288-.838-12.184-1.569-.42-8.644 2.797-9.547 6.202-10.503.535-.15 1.057-.297 1.561-.473.313.255.656.508 1.032.756 6.012 3.968 16.735 4.396 31.874 1.271l.166-.033c-2.042 1.909-5.536 4.471-10.49 6.772z'
fill='#FFF'
/>
</svg>`}
/>
{/* MANUAL-CONTENT-START:intro */}
The [PostgreSQL](https://www.postgresql.org/) tool enables you to connect to any PostgreSQL database and perform a wide range of database operations directly within your agentic workflows. With secure connection handling and flexible configuration, you can easily manage and interact with your data.
With the PostgreSQL tool, you can:
- **Query data**: Execute SELECT queries to retrieve data from your PostgreSQL tables using the `postgresql_query` operation.
- **Insert records**: Add new rows to your tables with the `postgresql_insert` operation by specifying the table and data to insert.
- **Update records**: Modify existing data in your tables using the `postgresql_update` operation, providing the table, new data, and WHERE conditions.
- **Delete records**: Remove rows from your tables with the `postgresql_delete` operation, specifying the table and WHERE conditions.
- **Execute raw SQL**: Run any custom SQL command using the `postgresql_execute` operation for advanced use cases.
The PostgreSQL tool is ideal for scenarios where your agents need to interact with structured data—such as automating reporting, syncing data between systems, or powering data-driven workflows. It streamlines database access, making it easy to read, write, and manage your PostgreSQL data programmatically.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Connect to any PostgreSQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.
## Tools
### `postgresql_query`
Execute a SELECT query on PostgreSQL database
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | PostgreSQL server hostname or IP address |
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `query` | string | Yes | SQL SELECT query to execute |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `rows` | array | Array of rows returned from the query |
| `rowCount` | number | Number of rows returned |
### `postgresql_insert`
Insert data into PostgreSQL database
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | PostgreSQL server hostname or IP address |
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `table` | string | Yes | Table name to insert data into |
| `data` | object | Yes | Data object to insert \(key-value pairs\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `rows` | array | Inserted data \(if RETURNING clause used\) |
| `rowCount` | number | Number of rows inserted |
### `postgresql_update`
Update data in PostgreSQL database
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | PostgreSQL server hostname or IP address |
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `table` | string | Yes | Table name to update data in |
| `data` | object | Yes | Data object with fields to update \(key-value pairs\) |
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `rows` | array | Updated data \(if RETURNING clause used\) |
| `rowCount` | number | Number of rows updated |
### `postgresql_delete`
Delete data from PostgreSQL database
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | PostgreSQL server hostname or IP address |
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `table` | string | Yes | Table name to delete data from |
| `where` | string | Yes | WHERE clause condition \(without WHERE keyword\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `rows` | array | Deleted data \(if RETURNING clause used\) |
| `rowCount` | number | Number of rows deleted |
### `postgresql_execute`
Execute raw SQL query on PostgreSQL database
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `host` | string | Yes | PostgreSQL server hostname or IP address |
| `port` | number | Yes | PostgreSQL server port \(default: 5432\) |
| `database` | string | Yes | Database name to connect to |
| `username` | string | Yes | Database username |
| `password` | string | Yes | Database password |
| `ssl` | string | No | SSL connection mode \(disabled, required, preferred\) |
| `query` | string | Yes | Raw SQL query to execute |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `message` | string | Operation status message |
| `rows` | array | Array of rows returned from the query |
| `rowCount` | number | Number of rows affected |
## Notes
- Category: `tools`
- Type: `postgresql`

View File

@@ -143,7 +143,6 @@ export const sampleWorkflowState = {
],
loops: {},
parallels: {},
whiles: {},
lastSaved: Date.now(),
isDeployed: false,
}

View File

@@ -420,7 +420,7 @@ export async function executeWorkflowForChat(
// Use deployed state for chat execution (this is the stable, deployed version)
const deployedState = workflowResult[0].deployedState as WorkflowState
const { blocks, edges, loops, parallels, whiles } = deployedState
const { blocks, edges, loops, parallels } = deployedState
// Prepare for execution, similar to use-workflow-execution.ts
const mergedStates = mergeSubblockState(blocks)
@@ -497,7 +497,6 @@ export async function executeWorkflowForChat(
filteredEdges,
loops,
parallels,
whiles,
true // Enable validation during execution
)
@@ -544,6 +543,8 @@ export async function executeWorkflowForChat(
userId: deployment.userId,
workspaceId: '', // TODO: Get from workflow
variables: workflowVariables,
initialInput: { input, conversationId },
executionType: 'chat',
})
const stream = new ReadableStream({

View File

@@ -12,9 +12,9 @@ import {
import { getCopilotModel } from '@/lib/copilot/config'
import type { CopilotProviderConfig } from '@/lib/copilot/types'
import { env } from '@/lib/env'
import { generateChatTitle } from '@/lib/generate-chat-title'
import { createLogger } from '@/lib/logs/console/logger'
import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { generateChatTitle } from '@/lib/sim-agent/utils'
import { createFileContent, isSupportedFileType } from '@/lib/uploads/file-utils'
import { S3_COPILOT_CONFIG } from '@/lib/uploads/setup'
import { downloadFile, getStorageProvider } from '@/lib/uploads/storage-client'

View File

@@ -71,7 +71,6 @@ export async function POST(request: NextRequest) {
edges: checkpointState?.edges || [],
loops: checkpointState?.loops || {},
parallels: checkpointState?.parallels || {},
whiles: checkpointState?.whiles || {},
isDeployed: checkpointState?.isDeployed || false,
deploymentStatuses: checkpointState?.deploymentStatuses || {},
hasActiveWebhook: checkpointState?.hasActiveWebhook || false,

View File

@@ -76,11 +76,9 @@ export async function POST(request: NextRequest) {
logger.info('File parse request received:', { filePath, fileType })
// Handle multiple files
if (Array.isArray(filePath)) {
const results = []
for (const path of filePath) {
// Skip empty or invalid paths
if (!path || (typeof path === 'string' && path.trim() === '')) {
results.push({
success: false,
@@ -91,12 +89,10 @@ export async function POST(request: NextRequest) {
}
const result = await parseFileSingle(path, fileType)
// Add processing time to metadata
if (result.metadata) {
result.metadata.processingTime = Date.now() - startTime
}
// Transform each result to match expected frontend format
if (result.success) {
results.push({
success: true,
@@ -105,7 +101,7 @@ export async function POST(request: NextRequest) {
name: result.filePath.split('/').pop() || 'unknown',
fileType: result.metadata?.fileType || 'application/octet-stream',
size: result.metadata?.size || 0,
binary: false, // We only return text content
binary: false,
},
filePath: result.filePath,
})
@@ -120,15 +116,12 @@ export async function POST(request: NextRequest) {
})
}
// Handle single file
const result = await parseFileSingle(filePath, fileType)
// Add processing time to metadata
if (result.metadata) {
result.metadata.processingTime = Date.now() - startTime
}
// Transform single file result to match expected frontend format
if (result.success) {
return NextResponse.json({
success: true,
@@ -142,8 +135,6 @@ export async function POST(request: NextRequest) {
})
}
// Only return 500 for actual server errors, not file processing failures
// File processing failures (like file not found, parsing errors) should return 200 with success:false
return NextResponse.json(result)
} catch (error) {
logger.error('Error in file parse API:', error)
@@ -164,7 +155,6 @@ export async function POST(request: NextRequest) {
async function parseFileSingle(filePath: string, fileType?: string): Promise<ParseResult> {
logger.info('Parsing file:', filePath)
// Validate that filePath is not empty
if (!filePath || filePath.trim() === '') {
return {
success: false,
@@ -173,7 +163,6 @@ async function parseFileSingle(filePath: string, fileType?: string): Promise<Par
}
}
// Validate path for security before any processing
const pathValidation = validateFilePath(filePath)
if (!pathValidation.isValid) {
return {
@@ -183,49 +172,40 @@ async function parseFileSingle(filePath: string, fileType?: string): Promise<Par
}
}
// Check if this is an external URL
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
return handleExternalUrl(filePath, fileType)
}
// Check if this is a cloud storage path (S3 or Blob)
const isS3Path = filePath.includes('/api/files/serve/s3/')
const isBlobPath = filePath.includes('/api/files/serve/blob/')
// Use cloud handler if it's a cloud path or we're in cloud mode
if (isS3Path || isBlobPath || isUsingCloudStorage()) {
return handleCloudFile(filePath, fileType)
}
// Use local handler for local files
return handleLocalFile(filePath, fileType)
}
/**
* Validate file path for security
* Validate file path for security - prevents null byte injection and path traversal attacks
*/
function validateFilePath(filePath: string): { isValid: boolean; error?: string } {
// Check for null bytes
if (filePath.includes('\0')) {
return { isValid: false, error: 'Invalid path: null byte detected' }
}
// Check for path traversal attempts
if (filePath.includes('..')) {
return { isValid: false, error: 'Access denied: path traversal detected' }
}
// Check for tilde characters (home directory access)
if (filePath.includes('~')) {
return { isValid: false, error: 'Invalid path: tilde character not allowed' }
}
// Check for absolute paths outside allowed directories
if (filePath.startsWith('/') && !filePath.startsWith('/api/files/serve/')) {
return { isValid: false, error: 'Path outside allowed directory' }
}
// Check for Windows absolute paths
if (/^[A-Za-z]:\\/.test(filePath)) {
return { isValid: false, error: 'Path outside allowed directory' }
}
@@ -260,12 +240,10 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
// Extract filename from URL
const urlPath = new URL(url).pathname
const filename = urlPath.split('/').pop() || 'download'
const extension = path.extname(filename).toLowerCase().substring(1)
// Process the file based on its content type
if (extension === 'pdf') {
return await handlePdfBuffer(buffer, filename, fileType, url)
}
@@ -276,7 +254,6 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
return await handleGenericTextBuffer(buffer, filename, extension, fileType, url)
}
// For binary or unknown files
return handleGenericBuffer(buffer, filename, extension, fileType)
} catch (error) {
logger.error(`Error handling external URL ${url}:`, error)
@@ -289,35 +266,29 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
}
/**
* Handle file stored in cloud storage (S3 or Azure Blob)
* Handle file stored in cloud storage
*/
async function handleCloudFile(filePath: string, fileType?: string): Promise<ParseResult> {
try {
// Extract the cloud key from the path
let cloudKey: string
if (filePath.includes('/api/files/serve/s3/')) {
cloudKey = decodeURIComponent(filePath.split('/api/files/serve/s3/')[1])
} else if (filePath.includes('/api/files/serve/blob/')) {
cloudKey = decodeURIComponent(filePath.split('/api/files/serve/blob/')[1])
} else if (filePath.startsWith('/api/files/serve/')) {
// Backwards-compatibility: path like "/api/files/serve/<key>"
cloudKey = decodeURIComponent(filePath.substring('/api/files/serve/'.length))
} else {
// Assume raw key provided
cloudKey = filePath
}
logger.info('Extracted cloud key:', cloudKey)
// Download the file from cloud storage - this can throw for access errors
const fileBuffer = await downloadFile(cloudKey)
logger.info(`Downloaded file from cloud storage: ${cloudKey}, size: ${fileBuffer.length} bytes`)
// Extract the filename from the cloud key
const filename = cloudKey.split('/').pop() || cloudKey
const extension = path.extname(filename).toLowerCase().substring(1)
// Process the file based on its content type
if (extension === 'pdf') {
return await handlePdfBuffer(fileBuffer, filename, fileType, filePath)
}
@@ -325,22 +296,19 @@ async function handleCloudFile(filePath: string, fileType?: string): Promise<Par
return await handleCsvBuffer(fileBuffer, filename, fileType, filePath)
}
if (isSupportedFileType(extension)) {
// For other supported types that we have parsers for
return await handleGenericTextBuffer(fileBuffer, filename, extension, fileType, filePath)
}
// For binary or unknown files
return handleGenericBuffer(fileBuffer, filename, extension, fileType)
} catch (error) {
logger.error(`Error handling cloud file ${filePath}:`, error)
// Check if this is a download/access error that should trigger a 500 response
// For download/access errors, throw to trigger 500 response
const errorMessage = (error as Error).message
if (errorMessage.includes('Access denied') || errorMessage.includes('Forbidden')) {
// For access errors, throw to trigger 500 response
throw new Error(`Error accessing file from cloud storage: ${errorMessage}`)
}
// For other errors (parsing, processing), return success:false
// For other errors (parsing, processing), return success:false and an error message
return {
success: false,
error: `Error accessing file from cloud storage: ${errorMessage}`,
@@ -354,28 +322,23 @@ async function handleCloudFile(filePath: string, fileType?: string): Promise<Par
*/
async function handleLocalFile(filePath: string, fileType?: string): Promise<ParseResult> {
try {
// Extract filename from path
const filename = filePath.split('/').pop() || filePath
const fullPath = path.join(UPLOAD_DIR_SERVER, filename)
logger.info('Processing local file:', fullPath)
// Check if file exists
try {
await fsPromises.access(fullPath)
} catch {
throw new Error(`File not found: ${filename}`)
}
// Parse the file directly
const result = await parseFile(fullPath)
// Get file stats for metadata
const stats = await fsPromises.stat(fullPath)
const fileBuffer = await readFile(fullPath)
const hash = createHash('md5').update(fileBuffer).digest('hex')
// Extract file extension for type detection
const extension = path.extname(filename).toLowerCase().substring(1)
return {
@@ -386,7 +349,7 @@ async function handleLocalFile(filePath: string, fileType?: string): Promise<Par
fileType: fileType || getMimeType(extension),
size: stats.size,
hash,
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
} catch (error) {
@@ -425,15 +388,14 @@ async function handlePdfBuffer(
fileType: fileType || 'application/pdf',
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
} catch (error) {
logger.error('Failed to parse PDF in memory:', error)
// Create fallback message for PDF parsing failure
const content = createPdfFailureMessage(
0, // We can't determine page count without parsing
0,
fileBuffer.length,
originalPath || filename,
(error as Error).message
@@ -447,7 +409,7 @@ async function handlePdfBuffer(
fileType: fileType || 'application/pdf',
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
}
@@ -465,7 +427,6 @@ async function handleCsvBuffer(
try {
logger.info(`Parsing CSV in memory: ${filename}`)
// Use the parseBuffer function from our library
const { parseBuffer } = await import('@/lib/file-parsers')
const result = await parseBuffer(fileBuffer, 'csv')
@@ -477,7 +438,7 @@ async function handleCsvBuffer(
fileType: fileType || 'text/csv',
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
} catch (error) {
@@ -490,7 +451,7 @@ async function handleCsvBuffer(
fileType: 'text/csv',
size: 0,
hash: '',
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
}
@@ -509,7 +470,6 @@ async function handleGenericTextBuffer(
try {
logger.info(`Parsing text file in memory: ${filename}`)
// Try to use a specialized parser if available
try {
const { parseBuffer, isSupportedFileType } = await import('@/lib/file-parsers')
@@ -524,7 +484,7 @@ async function handleGenericTextBuffer(
fileType: fileType || getMimeType(extension),
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
}
@@ -532,7 +492,6 @@ async function handleGenericTextBuffer(
logger.warn('Specialized parser failed, falling back to generic parsing:', parserError)
}
// Fallback to generic text parsing
const content = fileBuffer.toString('utf-8')
return {
@@ -543,7 +502,7 @@ async function handleGenericTextBuffer(
fileType: fileType || getMimeType(extension),
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
} catch (error) {
@@ -556,7 +515,7 @@ async function handleGenericTextBuffer(
fileType: 'text/plain',
size: 0,
hash: '',
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
}
@@ -584,7 +543,7 @@ function handleGenericBuffer(
fileType: fileType || getMimeType(extension),
size: fileBuffer.length,
hash: createHash('md5').update(fileBuffer).digest('hex'),
processingTime: 0, // Will be set by caller
processingTime: 0,
},
}
}
@@ -594,8 +553,6 @@ function handleGenericBuffer(
*/
async function parseBufferAsPdf(buffer: Buffer) {
try {
// Import parsers dynamically to avoid initialization issues in tests
// First try to use the main PDF parser
try {
const { PdfParser } = await import('@/lib/file-parsers/pdf-parser')
const parser = new PdfParser()
@@ -606,7 +563,6 @@ async function parseBufferAsPdf(buffer: Buffer) {
}
throw new Error('PDF parser does not support buffer parsing')
} catch (error) {
// Fallback to raw PDF parser
logger.warn('Main PDF parser failed, using raw parser for buffer:', error)
const { RawPdfParser } = await import('@/lib/file-parsers/raw-pdf-parser')
const rawParser = new RawPdfParser()
@@ -655,7 +611,7 @@ Please use a PDF viewer for best results.`
}
/**
* Create error message for PDF parsing failure
* Create error message for PDF parsing failure and make it more readable
*/
function createPdfFailureMessage(
pageCount: number,

View File

@@ -2,7 +2,7 @@ import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { SUPPORTED_FIELD_TYPES } from '@/lib/constants/knowledge'
import { SUPPORTED_FIELD_TYPES } from '@/lib/knowledge/consts'
import {
cleanupUnusedTagDefinitions,
createOrUpdateTagDefinitionsBulk,

View File

@@ -2,7 +2,7 @@ import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { SUPPORTED_FIELD_TYPES } from '@/lib/constants/knowledge'
import { SUPPORTED_FIELD_TYPES } from '@/lib/knowledge/consts'
import { createTagDefinition, getTagDefinitions } from '@/lib/knowledge/tags/service'
import { createLogger } from '@/lib/logs/console/logger'
import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils'

View File

@@ -1,6 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { TAG_SLOTS } from '@/lib/constants/knowledge'
import { TAG_SLOTS } from '@/lib/knowledge/consts'
import { getDocumentTagDefinitions } from '@/lib/knowledge/tags/service'
import { createLogger } from '@/lib/logs/console/logger'
import { estimateTokenCount } from '@/lib/tokenization/estimators'

View File

@@ -41,6 +41,7 @@ export async function GET(
executionId,
workflowId: workflowLog.workflowId,
workflowState: snapshot.stateData,
executionData: workflowLog.executionData || {},
executionMetadata: {
trigger: workflowLog.trigger,
startedAt: workflowLog.startedAt.toISOString(),

View File

@@ -23,7 +23,6 @@ describe('Scheduled Workflow Execution API Route', () => {
edges: sampleWorkflowState.edges || [],
loops: sampleWorkflowState.loops || {},
parallels: {},
whiles: {},
isFromNormalizedTables: true,
}),
}))

View File

@@ -230,7 +230,6 @@ export async function GET() {
const edges = normalizedData.edges
const loops = normalizedData.loops
const parallels = normalizedData.parallels
const whiles = normalizedData.whiles
logger.info(
`[${requestId}] Loaded scheduled workflow ${schedule.workflowId} from normalized tables`
)
@@ -385,7 +384,6 @@ export async function GET() {
edges,
loops,
parallels,
whiles,
true // Enable validation during execution
)
@@ -401,6 +399,9 @@ export async function GET() {
userId: workflowRecord.userId,
workspaceId: workflowRecord.workspaceId || '',
variables: variables || {},
initialInput: input,
startBlockId: schedule.blockId || undefined,
executionType: 'schedule',
})
const executor = new Executor({
@@ -469,10 +470,19 @@ export async function GET() {
// Create a minimal log entry for early failures
try {
const input = {
workflowId: schedule.workflowId,
_context: {
workflowId: schedule.workflowId,
},
}
await loggingSession.safeStart({
userId: workflowRecord.userId,
workspaceId: workflowRecord.workspaceId || '',
variables: {},
initialInput: input,
startBlockId: schedule.blockId || undefined,
executionType: 'schedule',
})
await loggingSession.safeCompleteWithError({
@@ -588,10 +598,17 @@ export async function GET() {
requestId
)
const input = {
workflowId: schedule.workflowId,
_context: {
workflowId: schedule.workflowId,
},
}
await failureLoggingSession.safeStart({
userId: workflowRecord.userId,
workspaceId: workflowRecord.workspaceId || '',
variables: {},
initialInput: input,
})
await failureLoggingSession.safeCompleteWithError({

View File

@@ -68,7 +68,6 @@ const CreateTemplateSchema = z.object({
edges: z.array(z.any()),
loops: z.record(z.any()),
parallels: z.record(z.any()),
whiles: z.record(z.any()),
}),
})

View File

@@ -0,0 +1,68 @@
import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import { buildDeleteQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLDeleteAPI')
const DeleteSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
where: z.string().min(1, 'WHERE clause is required'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = DeleteSchema.parse(body)
logger.info(
`[${requestId}] Deleting data from ${params.table} on ${params.host}:${params.port}/${params.database}`
)
const connection = await createMySQLConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const { query, values } = buildDeleteQuery(params.table, params.where)
const result = await executeQuery(connection, query, values)
logger.info(`[${requestId}] Delete executed successfully, ${result.rowCount} row(s) deleted`)
return NextResponse.json({
message: `Data deleted successfully. ${result.rowCount} row(s) affected.`,
rows: result.rows,
rowCount: result.rowCount,
})
} finally {
await connection.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] MySQL delete failed:`, error)
return NextResponse.json({ error: `MySQL delete failed: ${errorMessage}` }, { status: 500 })
}
}

View File

@@ -0,0 +1,75 @@
import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLExecuteAPI')
const ExecuteSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
query: z.string().min(1, 'Query is required'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = ExecuteSchema.parse(body)
logger.info(
`[${requestId}] Executing raw SQL on ${params.host}:${params.port}/${params.database}`
)
const validation = validateQuery(params.query)
if (!validation.isValid) {
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
return NextResponse.json(
{ error: `Query validation failed: ${validation.error}` },
{ status: 400 }
)
}
const connection = await createMySQLConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const result = await executeQuery(connection, params.query)
logger.info(`[${requestId}] SQL executed successfully, ${result.rowCount} row(s) affected`)
return NextResponse.json({
message: `SQL executed successfully. ${result.rowCount} row(s) affected.`,
rows: result.rows,
rowCount: result.rowCount,
})
} finally {
await connection.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] MySQL execute failed:`, error)
return NextResponse.json({ error: `MySQL execute failed: ${errorMessage}` }, { status: 500 })
}
}

View File

@@ -0,0 +1,89 @@
import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import { buildInsertQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLInsertAPI')
const InsertSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
data: z.union([
z
.record(z.unknown())
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
z
.string()
.min(1)
.transform((str) => {
try {
const parsed = JSON.parse(str)
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
throw new Error('Data must be a JSON object')
}
return parsed
} catch (e) {
const errorMsg = e instanceof Error ? e.message : 'Unknown error'
throw new Error(
`Invalid JSON format in data field: ${errorMsg}. Received: ${str.substring(0, 100)}...`
)
}
}),
]),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = InsertSchema.parse(body)
logger.info(
`[${requestId}] Inserting data into ${params.table} on ${params.host}:${params.port}/${params.database}`
)
const connection = await createMySQLConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const { query, values } = buildInsertQuery(params.table, params.data)
const result = await executeQuery(connection, query, values)
logger.info(`[${requestId}] Insert executed successfully, ${result.rowCount} row(s) inserted`)
return NextResponse.json({
message: `Data inserted successfully. ${result.rowCount} row(s) affected.`,
rows: result.rows,
rowCount: result.rowCount,
})
} finally {
await connection.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] MySQL insert failed:`, error)
return NextResponse.json({ error: `MySQL insert failed: ${errorMessage}` }, { status: 500 })
}
}

View File

@@ -0,0 +1,75 @@
import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import { createMySQLConnection, executeQuery, validateQuery } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLQueryAPI')
const QuerySchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
query: z.string().min(1, 'Query is required'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = QuerySchema.parse(body)
logger.info(
`[${requestId}] Executing MySQL query on ${params.host}:${params.port}/${params.database}`
)
const validation = validateQuery(params.query)
if (!validation.isValid) {
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
return NextResponse.json(
{ error: `Query validation failed: ${validation.error}` },
{ status: 400 }
)
}
const connection = await createMySQLConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const result = await executeQuery(connection, params.query)
logger.info(`[${requestId}] Query executed successfully, returned ${result.rowCount} rows`)
return NextResponse.json({
message: `Query executed successfully. ${result.rowCount} row(s) returned.`,
rows: result.rows,
rowCount: result.rowCount,
})
} finally {
await connection.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] MySQL query failed:`, error)
return NextResponse.json({ error: `MySQL query failed: ${errorMessage}` }, { status: 500 })
}
}

View File

@@ -0,0 +1,87 @@
import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import { buildUpdateQuery, createMySQLConnection, executeQuery } from '@/app/api/tools/mysql/utils'
const logger = createLogger('MySQLUpdateAPI')
const UpdateSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
data: z.union([
z
.record(z.unknown())
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
z
.string()
.min(1)
.transform((str) => {
try {
const parsed = JSON.parse(str)
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
throw new Error('Data must be a JSON object')
}
return parsed
} catch (e) {
throw new Error('Invalid JSON format in data field')
}
}),
]),
where: z.string().min(1, 'WHERE clause is required'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = UpdateSchema.parse(body)
logger.info(
`[${requestId}] Updating data in ${params.table} on ${params.host}:${params.port}/${params.database}`
)
const connection = await createMySQLConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const { query, values } = buildUpdateQuery(params.table, params.data, params.where)
const result = await executeQuery(connection, query, values)
logger.info(`[${requestId}] Update executed successfully, ${result.rowCount} row(s) updated`)
return NextResponse.json({
message: `Data updated successfully. ${result.rowCount} row(s) affected.`,
rows: result.rows,
rowCount: result.rowCount,
})
} finally {
await connection.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] MySQL update failed:`, error)
return NextResponse.json({ error: `MySQL update failed: ${errorMessage}` }, { status: 500 })
}
}

View File

@@ -0,0 +1,175 @@
import mysql from 'mysql2/promise'
export interface MySQLConnectionConfig {
host: string
port: number
database: string
username: string
password: string
ssl?: 'disabled' | 'required' | 'preferred'
}
export async function createMySQLConnection(config: MySQLConnectionConfig) {
const connectionConfig: mysql.ConnectionOptions = {
host: config.host,
port: config.port,
database: config.database,
user: config.username,
password: config.password,
}
if (config.ssl === 'disabled') {
// Don't set ssl property at all to disable SSL
} else if (config.ssl === 'required') {
connectionConfig.ssl = { rejectUnauthorized: true }
} else if (config.ssl === 'preferred') {
connectionConfig.ssl = { rejectUnauthorized: false }
}
return mysql.createConnection(connectionConfig)
}
export async function executeQuery(
connection: mysql.Connection,
query: string,
values?: unknown[]
) {
const [rows, fields] = await connection.execute(query, values)
if (Array.isArray(rows)) {
return {
rows: rows as unknown[],
rowCount: rows.length,
fields,
}
}
return {
rows: [],
rowCount: (rows as mysql.ResultSetHeader).affectedRows || 0,
fields,
}
}
export function validateQuery(query: string): { isValid: boolean; error?: string } {
const trimmedQuery = query.trim().toLowerCase()
const dangerousPatterns = [
/drop\s+database/i,
/drop\s+schema/i,
/drop\s+user/i,
/create\s+user/i,
/grant\s+/i,
/revoke\s+/i,
/alter\s+user/i,
/set\s+global/i,
/set\s+session/i,
/load\s+data/i,
/into\s+outfile/i,
/into\s+dumpfile/i,
/load_file\s*\(/i,
/system\s+/i,
/exec\s+/i,
/execute\s+immediate/i,
/xp_cmdshell/i,
/sp_configure/i,
/information_schema\.tables/i,
/mysql\.user/i,
/mysql\.db/i,
/mysql\.host/i,
/performance_schema/i,
/sys\./i,
]
for (const pattern of dangerousPatterns) {
if (pattern.test(query)) {
return {
isValid: false,
error: `Query contains potentially dangerous operation: ${pattern.source}`,
}
}
}
const allowedStatements = /^(select|insert|update|delete|with|show|describe|explain)\s+/i
if (!allowedStatements.test(trimmedQuery)) {
return {
isValid: false,
error:
'Only SELECT, INSERT, UPDATE, DELETE, WITH, SHOW, DESCRIBE, and EXPLAIN statements are allowed',
}
}
return { isValid: true }
}
export function buildInsertQuery(table: string, data: Record<string, unknown>) {
const sanitizedTable = sanitizeIdentifier(table)
const columns = Object.keys(data)
const values = Object.values(data)
const placeholders = columns.map(() => '?').join(', ')
const query = `INSERT INTO ${sanitizedTable} (${columns.map(sanitizeIdentifier).join(', ')}) VALUES (${placeholders})`
return { query, values }
}
export function buildUpdateQuery(table: string, data: Record<string, unknown>, where: string) {
validateWhereClause(where)
const sanitizedTable = sanitizeIdentifier(table)
const columns = Object.keys(data)
const values = Object.values(data)
const setClause = columns.map((col) => `${sanitizeIdentifier(col)} = ?`).join(', ')
const query = `UPDATE ${sanitizedTable} SET ${setClause} WHERE ${where}`
return { query, values }
}
export function buildDeleteQuery(table: string, where: string) {
validateWhereClause(where)
const sanitizedTable = sanitizeIdentifier(table)
const query = `DELETE FROM ${sanitizedTable} WHERE ${where}`
return { query, values: [] }
}
function validateWhereClause(where: string): void {
const dangerousPatterns = [
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
/union\s+select/i,
/into\s+outfile/i,
/load_file/i,
/--/,
/\/\*/,
/\*\//,
]
for (const pattern of dangerousPatterns) {
if (pattern.test(where)) {
throw new Error('WHERE clause contains potentially dangerous operation')
}
}
}
export function sanitizeIdentifier(identifier: string): string {
if (identifier.includes('.')) {
const parts = identifier.split('.')
return parts.map((part) => sanitizeSingleIdentifier(part)).join('.')
}
return sanitizeSingleIdentifier(identifier)
}
function sanitizeSingleIdentifier(identifier: string): string {
const cleaned = identifier.replace(/`/g, '')
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(cleaned)) {
throw new Error(
`Invalid identifier: ${identifier}. Identifiers must start with a letter or underscore and contain only letters, numbers, and underscores.`
)
}
return `\`${cleaned}\``
}

View File

@@ -0,0 +1,70 @@
import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import { createPostgresConnection, executeDelete } from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLDeleteAPI')
const DeleteSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
where: z.string().min(1, 'WHERE clause is required'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = DeleteSchema.parse(body)
logger.info(
`[${requestId}] Deleting data from ${params.table} on ${params.host}:${params.port}/${params.database}`
)
const sql = createPostgresConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const result = await executeDelete(sql, params.table, params.where)
logger.info(`[${requestId}] Delete executed successfully, ${result.rowCount} row(s) deleted`)
return NextResponse.json({
message: `Data deleted successfully. ${result.rowCount} row(s) affected.`,
rows: result.rows,
rowCount: result.rowCount,
})
} finally {
await sql.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] PostgreSQL delete failed:`, error)
return NextResponse.json(
{ error: `PostgreSQL delete failed: ${errorMessage}` },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,82 @@
import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import {
createPostgresConnection,
executeQuery,
validateQuery,
} from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLExecuteAPI')
const ExecuteSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
query: z.string().min(1, 'Query is required'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = ExecuteSchema.parse(body)
logger.info(
`[${requestId}] Executing raw SQL on ${params.host}:${params.port}/${params.database}`
)
const validation = validateQuery(params.query)
if (!validation.isValid) {
logger.warn(`[${requestId}] Query validation failed: ${validation.error}`)
return NextResponse.json(
{ error: `Query validation failed: ${validation.error}` },
{ status: 400 }
)
}
const sql = createPostgresConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const result = await executeQuery(sql, params.query)
logger.info(`[${requestId}] SQL executed successfully, ${result.rowCount} row(s) affected`)
return NextResponse.json({
message: `SQL executed successfully. ${result.rowCount} row(s) affected.`,
rows: result.rows,
rowCount: result.rowCount,
})
} finally {
await sql.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] PostgreSQL execute failed:`, error)
return NextResponse.json(
{ error: `PostgreSQL execute failed: ${errorMessage}` },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,92 @@
import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import { createPostgresConnection, executeInsert } from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLInsertAPI')
const InsertSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
data: z.union([
z
.record(z.unknown())
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
z
.string()
.min(1)
.transform((str) => {
try {
const parsed = JSON.parse(str)
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
throw new Error('Data must be a JSON object')
}
return parsed
} catch (e) {
const errorMsg = e instanceof Error ? e.message : 'Unknown error'
throw new Error(
`Invalid JSON format in data field: ${errorMsg}. Received: ${str.substring(0, 100)}...`
)
}
}),
]),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = InsertSchema.parse(body)
logger.info(
`[${requestId}] Inserting data into ${params.table} on ${params.host}:${params.port}/${params.database}`
)
const sql = createPostgresConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const result = await executeInsert(sql, params.table, params.data)
logger.info(`[${requestId}] Insert executed successfully, ${result.rowCount} row(s) inserted`)
return NextResponse.json({
message: `Data inserted successfully. ${result.rowCount} row(s) affected.`,
rows: result.rows,
rowCount: result.rowCount,
})
} finally {
await sql.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] PostgreSQL insert failed:`, error)
return NextResponse.json(
{ error: `PostgreSQL insert failed: ${errorMessage}` },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,66 @@
import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import { createPostgresConnection, executeQuery } from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLQueryAPI')
const QuerySchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
query: z.string().min(1, 'Query is required'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = QuerySchema.parse(body)
logger.info(
`[${requestId}] Executing PostgreSQL query on ${params.host}:${params.port}/${params.database}`
)
const sql = createPostgresConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const result = await executeQuery(sql, params.query)
logger.info(`[${requestId}] Query executed successfully, returned ${result.rowCount} rows`)
return NextResponse.json({
message: `Query executed successfully. ${result.rowCount} row(s) returned.`,
rows: result.rows,
rowCount: result.rowCount,
})
} finally {
await sql.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] PostgreSQL query failed:`, error)
return NextResponse.json({ error: `PostgreSQL query failed: ${errorMessage}` }, { status: 500 })
}
}

View File

@@ -0,0 +1,89 @@
import { randomUUID } from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import { createPostgresConnection, executeUpdate } from '@/app/api/tools/postgresql/utils'
const logger = createLogger('PostgreSQLUpdateAPI')
const UpdateSchema = z.object({
host: z.string().min(1, 'Host is required'),
port: z.coerce.number().int().positive('Port must be a positive integer'),
database: z.string().min(1, 'Database name is required'),
username: z.string().min(1, 'Username is required'),
password: z.string().min(1, 'Password is required'),
ssl: z.enum(['disabled', 'required', 'preferred']).default('preferred'),
table: z.string().min(1, 'Table name is required'),
data: z.union([
z
.record(z.unknown())
.refine((obj) => Object.keys(obj).length > 0, 'Data object cannot be empty'),
z
.string()
.min(1)
.transform((str) => {
try {
const parsed = JSON.parse(str)
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
throw new Error('Data must be a JSON object')
}
return parsed
} catch (e) {
throw new Error('Invalid JSON format in data field')
}
}),
]),
where: z.string().min(1, 'WHERE clause is required'),
})
export async function POST(request: NextRequest) {
const requestId = randomUUID().slice(0, 8)
try {
const body = await request.json()
const params = UpdateSchema.parse(body)
logger.info(
`[${requestId}] Updating data in ${params.table} on ${params.host}:${params.port}/${params.database}`
)
const sql = createPostgresConnection({
host: params.host,
port: params.port,
database: params.database,
username: params.username,
password: params.password,
ssl: params.ssl,
})
try {
const result = await executeUpdate(sql, params.table, params.data, params.where)
logger.info(`[${requestId}] Update executed successfully, ${result.rowCount} row(s) updated`)
return NextResponse.json({
message: `Data updated successfully. ${result.rowCount} row(s) affected.`,
rows: result.rows,
rowCount: result.rowCount,
})
} finally {
await sql.end()
}
} catch (error) {
if (error instanceof z.ZodError) {
logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors })
return NextResponse.json(
{ error: 'Invalid request data', details: error.errors },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
logger.error(`[${requestId}] PostgreSQL update failed:`, error)
return NextResponse.json(
{ error: `PostgreSQL update failed: ${errorMessage}` },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,194 @@
import postgres from 'postgres'
import type { PostgresConnectionConfig } from '@/tools/postgresql/types'
export function createPostgresConnection(config: PostgresConnectionConfig) {
const sslConfig =
config.ssl === 'disabled'
? false
: config.ssl === 'required'
? 'require'
: config.ssl === 'preferred'
? 'prefer'
: 'require'
const sql = postgres({
host: config.host,
port: config.port,
database: config.database,
username: config.username,
password: config.password,
ssl: sslConfig,
connect_timeout: 10, // 10 seconds
idle_timeout: 20, // 20 seconds
max_lifetime: 60 * 30, // 30 minutes
max: 1, // Single connection for tool usage
})
return sql
}
export async function executeQuery(
sql: any,
query: string,
params: unknown[] = []
): Promise<{ rows: unknown[]; rowCount: number }> {
const result = await sql.unsafe(query, params)
return {
rows: Array.isArray(result) ? result : [result],
rowCount: Array.isArray(result) ? result.length : result ? 1 : 0,
}
}
export function validateQuery(query: string): { isValid: boolean; error?: string } {
const trimmedQuery = query.trim().toLowerCase()
// Block dangerous SQL operations
const dangerousPatterns = [
/drop\s+database/i,
/drop\s+schema/i,
/drop\s+user/i,
/create\s+user/i,
/create\s+role/i,
/grant\s+/i,
/revoke\s+/i,
/alter\s+user/i,
/alter\s+role/i,
/set\s+role/i,
/reset\s+role/i,
/copy\s+.*from/i,
/copy\s+.*to/i,
/lo_import/i,
/lo_export/i,
/pg_read_file/i,
/pg_write_file/i,
/pg_ls_dir/i,
/information_schema\.tables/i,
/pg_catalog/i,
/pg_user/i,
/pg_shadow/i,
/pg_roles/i,
/pg_authid/i,
/pg_stat_activity/i,
/dblink/i,
/\\\\copy/i,
]
for (const pattern of dangerousPatterns) {
if (pattern.test(query)) {
return {
isValid: false,
error: `Query contains potentially dangerous operation: ${pattern.source}`,
}
}
}
const allowedStatements = /^(select|insert|update|delete|with|explain|analyze|show)\s+/i
if (!allowedStatements.test(trimmedQuery)) {
return {
isValid: false,
error:
'Only SELECT, INSERT, UPDATE, DELETE, WITH, EXPLAIN, ANALYZE, and SHOW statements are allowed',
}
}
return { isValid: true }
}
export function sanitizeIdentifier(identifier: string): string {
if (identifier.includes('.')) {
const parts = identifier.split('.')
return parts.map((part) => sanitizeSingleIdentifier(part)).join('.')
}
return sanitizeSingleIdentifier(identifier)
}
function validateWhereClause(where: string): void {
const dangerousPatterns = [
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
/union\s+select/i,
/into\s+outfile/i,
/load_file/i,
/--/,
/\/\*/,
/\*\//,
]
for (const pattern of dangerousPatterns) {
if (pattern.test(where)) {
throw new Error('WHERE clause contains potentially dangerous operation')
}
}
}
function sanitizeSingleIdentifier(identifier: string): string {
const cleaned = identifier.replace(/"/g, '')
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(cleaned)) {
throw new Error(
`Invalid identifier: ${identifier}. Identifiers must start with a letter or underscore and contain only letters, numbers, and underscores.`
)
}
return `"${cleaned}"`
}
export async function executeInsert(
sql: any,
table: string,
data: Record<string, unknown>
): Promise<{ rows: unknown[]; rowCount: number }> {
const sanitizedTable = sanitizeIdentifier(table)
const columns = Object.keys(data)
const sanitizedColumns = columns.map((col) => sanitizeIdentifier(col))
const placeholders = columns.map((_, index) => `$${index + 1}`)
const values = columns.map((col) => data[col])
const query = `INSERT INTO ${sanitizedTable} (${sanitizedColumns.join(', ')}) VALUES (${placeholders.join(', ')}) RETURNING *`
const result = await sql.unsafe(query, values)
return {
rows: Array.isArray(result) ? result : [result],
rowCount: Array.isArray(result) ? result.length : result ? 1 : 0,
}
}
export async function executeUpdate(
sql: any,
table: string,
data: Record<string, unknown>,
where: string
): Promise<{ rows: unknown[]; rowCount: number }> {
validateWhereClause(where)
const sanitizedTable = sanitizeIdentifier(table)
const columns = Object.keys(data)
const sanitizedColumns = columns.map((col) => sanitizeIdentifier(col))
const setClause = sanitizedColumns.map((col, index) => `${col} = $${index + 1}`).join(', ')
const values = columns.map((col) => data[col])
const query = `UPDATE ${sanitizedTable} SET ${setClause} WHERE ${where} RETURNING *`
const result = await sql.unsafe(query, values)
return {
rows: Array.isArray(result) ? result : [result],
rowCount: Array.isArray(result) ? result.length : result ? 1 : 0,
}
}
export async function executeDelete(
sql: any,
table: string,
where: string
): Promise<{ rows: unknown[]; rowCount: number }> {
validateWhereClause(where)
const sanitizedTable = sanitizeIdentifier(table)
const query = `DELETE FROM ${sanitizedTable} WHERE ${where} RETURNING *`
const result = await sql.unsafe(query, [])
return {
rows: Array.isArray(result) ? result : [result],
rowCount: Array.isArray(result) ? result.length : result ? 1 : 0,
}
}

View File

@@ -153,7 +153,6 @@ describe('Webhook Trigger API Route', () => {
edges: [],
loops: {},
parallels: {},
whiles: {},
isFromNormalizedTables: true,
}),
}))

View File

@@ -11,11 +11,7 @@ import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
import { db } from '@/db'
import { workflow as workflowTable } from '@/db/schema'
import {
generateLoopBlocks,
generateParallelBlocks,
generateWhileBlocks,
} from '@/stores/workflows/workflow/utils'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
export const dynamic = 'force-dynamic'
@@ -129,7 +125,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
edges: currentWorkflowData.edges,
loops: currentWorkflowData.loops || {},
parallels: currentWorkflowData.parallels || {},
whiles: currentWorkflowData.whiles || {},
}
const autoLayoutOptions = {
@@ -171,7 +166,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
utilities: {
generateLoopBlocks: generateLoopBlocks.toString(),
generateParallelBlocks: generateParallelBlocks.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
resolveOutputType: resolveOutputType.toString(),
},
},

View File

@@ -69,7 +69,6 @@ describe('Workflow Deployment API Route', () => {
edges: [],
loops: {},
parallels: {},
whiles: {},
isFromNormalizedTables: true,
}),
}))

View File

@@ -109,7 +109,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
whiles: normalizedData.whiles,
}
const { hasWorkflowChanged } = await import('@/lib/workflows/utils')
@@ -193,7 +192,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
const blocksMap: Record<string, any> = {}
const loops: Record<string, any> = {}
const parallels: Record<string, any> = {}
const whiles: Record<string, any> = {}
// Process blocks
blocks.forEach((block) => {
@@ -208,7 +206,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
})
// Process subflows (loops, parallels, and whiles)
// Process subflows (loops and parallels)
subflows.forEach((subflow) => {
const config = (subflow.config as any) || {}
if (subflow.type === 'loop') {
@@ -227,13 +225,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
distribution: config.distribution || '',
parallelType: config.parallelType || 'count',
}
} else if (subflow.type === 'while') {
whiles[subflow.id] = {
id: subflow.id,
nodes: config.nodes || [],
iterations: config.iterations || 1,
whileType: config.whileType || 'while',
}
}
})
@@ -253,7 +244,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
edges: edgesArray,
loops,
parallels,
whiles,
lastSaved: Date.now(),
}

View File

@@ -7,7 +7,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { db } from '@/db'
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@/db/schema'
import type { LoopConfig, ParallelConfig, WhileConfig } from '@/stores/workflows/workflow/types'
import type { LoopConfig, ParallelConfig } from '@/stores/workflows/workflow/types'
const logger = createLogger('WorkflowDuplicateAPI')
@@ -209,16 +209,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
})
// Update block references in subflow config
let updatedConfig: LoopConfig | ParallelConfig | WhileConfig = subflow.config as
let updatedConfig: LoopConfig | ParallelConfig = subflow.config as
| LoopConfig
| ParallelConfig
| WhileConfig
if (subflow.config && typeof subflow.config === 'object') {
updatedConfig = JSON.parse(JSON.stringify(subflow.config)) as
| LoopConfig
| ParallelConfig
| WhileConfig
// Update the config ID to match the new subflow ID
;(updatedConfig as any).id = newSubflowId
// Update node references in config if they exist

View File

@@ -121,7 +121,6 @@ describe('Workflow Execution API Route', () => {
],
loops: {},
parallels: {},
whiles: {},
isFromNormalizedTables: false, // Changed to false since it's from deployed state
}),
}))
@@ -560,7 +559,6 @@ describe('Workflow Execution API Route', () => {
],
loops: {},
parallels: {},
whiles: {},
isFromNormalizedTables: false, // Changed to false since it's from deployed state
}),
}))

View File

@@ -115,14 +115,13 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any): P
const deployedData = await loadDeployedWorkflowState(workflowId)
// Use deployed data as primary source for API executions
const { blocks, edges, loops, parallels, whiles } = deployedData
const { blocks, edges, loops, parallels } = deployedData
logger.info(`[${requestId}] Using deployed state for workflow execution: ${workflowId}`)
logger.debug(`[${requestId}] Deployed data loaded:`, {
blocksCount: Object.keys(blocks || {}).length,
edgesCount: (edges || []).length,
loopsCount: Object.keys(loops || {}).length,
parallelsCount: Object.keys(parallels || {}).length,
whilesCount: Object.keys(whiles || {}).length,
})
// Use the same execution flow as in scheduled executions
@@ -147,6 +146,8 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any): P
userId: workflow.userId,
workspaceId: workflow.workspaceId,
variables,
initialInput: processedInput || {},
executionType: 'api',
})
// Replace environment variables in the block states
@@ -276,7 +277,6 @@ async function executeWorkflow(workflow: any, requestId: string, input?: any): P
edges,
loops,
parallels,
whiles,
true // Enable validation during execution
)

View File

@@ -4,6 +4,7 @@ import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
import { loadWorkflowStateForExecution } from '@/lib/logs/execution/logging-factory'
const logger = createLogger('WorkflowLogAPI')
@@ -30,6 +31,22 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
success: result.success,
})
// Log current normalized state before starting logging session (what snapshot will save)
try {
const normalizedState = await loadWorkflowStateForExecution(id)
logger.info(`[${requestId}] 🔍 Normalized workflow state at persistence time:`, {
blocks: Object.entries(normalizedState.blocks || {}).map(([bid, b]: [string, any]) => ({
id: bid,
type: (b as any).type,
triggerMode: (b as any).triggerMode,
enabled: (b as any).enabled,
})),
edgesCount: (normalizedState.edges || []).length,
})
} catch (e) {
logger.warn(`[${requestId}] Failed to load normalized state for logging snapshot context`)
}
// Check if this execution is from chat using only the explicit source flag
const isChatExecution = result.metadata?.source === 'chat'

View File

@@ -52,7 +52,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
edgesCount: deployedState.edges.length,
loopsCount: Object.keys(deployedState.loops || {}).length,
parallelsCount: Object.keys(deployedState.parallels || {}).length,
whilesCount: Object.keys(deployedState.whiles || {}).length,
})
// Save deployed state to normalized tables
@@ -61,7 +60,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
edges: deployedState.edges,
loops: deployedState.loops || {},
parallels: deployedState.parallels || {},
whiles: deployedState.whiles || {},
lastSaved: Date.now(),
isDeployed: workflowData.isDeployed,
deployedAt: workflowData.deployedAt,

View File

@@ -96,7 +96,6 @@ describe('Workflow By ID API Route', () => {
edges: [],
loops: {},
parallels: {},
whiles: {},
isFromNormalizedTables: true,
}
@@ -146,7 +145,6 @@ describe('Workflow By ID API Route', () => {
edges: [],
loops: {},
parallels: {},
whiles: {},
isFromNormalizedTables: true,
}
@@ -243,7 +241,6 @@ describe('Workflow By ID API Route', () => {
edges: [{ id: 'edge-1', source: 'block-1', target: 'block-2' }],
loops: {},
parallels: {},
whiles: {},
isFromNormalizedTables: true,
}

View File

@@ -126,7 +126,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
edgesCount: normalizedData.edges.length,
loopsCount: Object.keys(normalizedData.loops).length,
parallelsCount: Object.keys(normalizedData.parallels).length,
whilesCount: Object.keys(normalizedData.whiles).length,
loops: normalizedData.loops,
})
@@ -142,7 +141,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
edges: normalizedData.edges,
loops: normalizedData.loops,
parallels: normalizedData.parallels,
whiles: normalizedData.whiles,
lastSaved: Date.now(),
isDeployed: workflowData.isDeployed || false,
deployedAt: workflowData.deployedAt,

View File

@@ -24,7 +24,6 @@ const BlockDataSchema = z.object({
count: z.number().optional(),
loopType: z.enum(['for', 'forEach']).optional(),
parallelType: z.enum(['collection', 'count']).optional(),
whileType: z.enum(['while', 'doWhile']).optional(),
type: z.string().optional(),
})
@@ -88,13 +87,6 @@ const ParallelSchema = z.object({
parallelType: z.enum(['count', 'collection']).optional(),
})
const WhileSchema = z.object({
id: z.string(),
nodes: z.array(z.string()),
iterations: z.number(),
whileType: z.enum(['while', 'doWhile']),
})
const DeploymentStatusSchema = z.object({
id: z.string(),
status: z.enum(['deploying', 'deployed', 'failed', 'stopping', 'stopped']),
@@ -107,7 +99,6 @@ const WorkflowStateSchema = z.object({
edges: z.array(EdgeSchema),
loops: z.record(LoopSchema).optional(),
parallels: z.record(ParallelSchema).optional(),
whiles: z.record(WhileSchema).optional(),
lastSaved: z.number().optional(),
isDeployed: z.boolean().optional(),
deployedAt: z.date().optional(),
@@ -206,7 +197,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
edges: state.edges,
loops: state.loops || {},
parallels: state.parallels || {},
whiles: state.whiles || {},
lastSaved: state.lastSaved || Date.now(),
isDeployed: state.isDeployed || false,
deployedAt: state.deployedAt,
@@ -241,9 +231,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
success: true,
blocksCount: Object.keys(filteredBlocks).length,
edgesCount: state.edges.length,
loopsCount: Object.keys(state.loops || {}).length,
parallelsCount: Object.keys(state.parallels || {}).length,
whilesCount: Object.keys(state.whiles || {}).length,
},
{ status: 200 }
)

View File

@@ -38,7 +38,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
const blocksMap: Record<string, any> = {}
const loops: Record<string, any> = {}
const parallels: Record<string, any> = {}
const whiles: Record<string, any> = {}
// Process blocks
blocks.forEach((block) => {
blocksMap[block.id] = {
@@ -71,13 +71,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
distribution: config.distribution || '',
parallelType: config.parallelType || 'count',
}
} else if (subflow.type === 'while') {
whiles[subflow.id] = {
id: subflow.id,
nodes: config.nodes || [],
iterations: config.iterations || 1,
whileType: config.whileType || 'while',
}
}
})
@@ -97,7 +90,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
edges: edgesArray,
loops,
parallels,
whiles,
lastSaved: Date.now(),
}

View File

@@ -16,11 +16,7 @@ import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
import { db } from '@/db'
import { workflowCheckpoints, workflow as workflowTable } from '@/db/schema'
import {
generateLoopBlocks,
generateParallelBlocks,
generateWhileBlocks,
} from '@/stores/workflows/workflow/utils'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
const SIM_AGENT_API_URL = env.SIM_AGENT_API_URL || SIM_AGENT_API_URL_DEFAULT
@@ -84,7 +80,6 @@ async function createWorkflowCheckpoint(
generateLoopBlocks: generateLoopBlocks.toString(),
generateParallelBlocks: generateParallelBlocks.toString(),
resolveOutputType: resolveOutputType.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
},
}),
})
@@ -298,7 +293,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
generateLoopBlocks: generateLoopBlocks.toString(),
generateParallelBlocks: generateParallelBlocks.toString(),
resolveOutputType: resolveOutputType.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
},
options: {
generateNewIds: false, // We'll handle ID generation manually for now
@@ -379,7 +373,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
edges: [] as any[],
loops: {} as Record<string, any>,
parallels: {} as Record<string, any>,
whiles: {} as Record<string, any>,
lastSaved: Date.now(),
isDeployed: false,
deployedAt: undefined,
@@ -398,10 +391,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
// Get block configuration for proper setup
const blockConfig = getBlock(block.type)
if (
!blockConfig &&
(block.type === 'loop' || block.type === 'parallel' || block.type === 'while')
) {
if (!blockConfig && (block.type === 'loop' || block.type === 'parallel')) {
// Handle loop/parallel blocks (they don't have regular block configs)
// Preserve parentId if it exists (though loop/parallel shouldn't have parents)
const containerData = block.data || {}
@@ -424,7 +414,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
height: 0,
data: containerData,
}
logger.debug(`[${requestId}] Processed loop/parallel/while block: ${block.id} -> ${newId}`)
logger.debug(`[${requestId}] Processed loop/parallel block: ${block.id} -> ${newId}`)
} else if (blockConfig) {
// Handle regular blocks with proper configuration
const subBlocks: Record<string, any> = {}
@@ -555,17 +545,14 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
// Generate loop and parallel configurations
const loops = generateLoopBlocks(newWorkflowState.blocks)
const parallels = generateParallelBlocks(newWorkflowState.blocks)
const whiles = generateWhileBlocks(newWorkflowState.blocks)
newWorkflowState.loops = loops
newWorkflowState.parallels = parallels
newWorkflowState.whiles = whiles
logger.info(`[${requestId}] Generated workflow state`, {
blocksCount: Object.keys(newWorkflowState.blocks).length,
edgesCount: newWorkflowState.edges.length,
loopsCount: Object.keys(loops).length,
parallelsCount: Object.keys(parallels).length,
whilesCount: Object.keys(whiles).length,
})
// Apply intelligent autolayout if requested
@@ -579,7 +566,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
edges: newWorkflowState.edges,
loops: newWorkflowState.loops || {},
parallels: newWorkflowState.parallels || {},
whiles: newWorkflowState.whiles || {},
}
const autoLayoutOptions = {
@@ -622,7 +608,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
generateLoopBlocks: generateLoopBlocks.toString(),
generateParallelBlocks: generateParallelBlocks.toString(),
resolveOutputType: resolveOutputType.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
},
},
})
@@ -700,7 +685,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
edgesCount: newWorkflowState.edges.length,
loopsCount: Object.keys(loops).length,
parallelsCount: Object.keys(parallels).length,
whilesCount: Object.keys(whiles).length,
},
errors: [],
warnings,

View File

@@ -4,11 +4,7 @@ import { simAgentClient } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
import {
generateLoopBlocks,
generateParallelBlocks,
generateWhileBlocks,
} from '@/stores/workflows/workflow/utils'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
const logger = createLogger('WorkflowYamlAPI')
@@ -54,7 +50,6 @@ export async function POST(request: NextRequest) {
generateLoopBlocks: generateLoopBlocks.toString(),
generateParallelBlocks: generateParallelBlocks.toString(),
resolveOutputType: resolveOutputType.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
},
},
})

View File

@@ -10,11 +10,7 @@ import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
import { db } from '@/db'
import { workflow } from '@/db/schema'
import {
generateLoopBlocks,
generateParallelBlocks,
generateWhileBlocks,
} from '@/stores/workflows/workflow/utils'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
const logger = createLogger('WorkflowYamlExportAPI')
@@ -148,7 +144,6 @@ export async function GET(request: NextRequest) {
generateLoopBlocks: generateLoopBlocks.toString(),
generateParallelBlocks: generateParallelBlocks.toString(),
resolveOutputType: resolveOutputType.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
},
},
})

View File

@@ -9,12 +9,10 @@ import { resolveOutputType } from '@/blocks/utils'
import {
convertLoopBlockToLoop,
convertParallelBlockToParallel,
convertWhileBlockToWhile,
findAllDescendantNodes,
findChildNodes,
generateLoopBlocks,
generateParallelBlocks,
generateWhileBlocks,
} from '@/stores/workflows/workflow/utils'
const logger = createLogger('YamlAutoLayoutAPI')
@@ -28,7 +26,6 @@ const AutoLayoutRequestSchema = z.object({
edges: z.array(z.any()),
loops: z.record(z.any()).optional().default({}),
parallels: z.record(z.any()).optional().default({}),
whiles: z.record(z.any()).optional().default({}),
}),
options: z
.object({
@@ -39,7 +36,6 @@ const AutoLayoutRequestSchema = z.object({
horizontal: z.number().optional(),
vertical: z.number().optional(),
layer: z.number().optional(),
while: z.number().optional(),
})
.optional(),
alignment: z.enum(['start', 'center', 'end']).optional(),
@@ -49,12 +45,6 @@ const AutoLayoutRequestSchema = z.object({
y: z.number().optional(),
})
.optional(),
while: z
.object({
x: z.number().optional(),
y: z.number().optional(),
})
.optional(),
})
.optional(),
})
@@ -143,10 +133,8 @@ export async function POST(request: NextRequest) {
resolveOutputType: resolveOutputType.toString(),
convertLoopBlockToLoop: convertLoopBlockToLoop.toString(),
convertParallelBlockToParallel: convertParallelBlockToParallel.toString(),
convertWhileBlockToWhile: convertWhileBlockToWhile.toString(),
findChildNodes: findChildNodes.toString(),
findAllDescendantNodes: findAllDescendantNodes.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
},
}),
})
@@ -204,7 +192,6 @@ export async function POST(request: NextRequest) {
edges: workflowState.edges || [],
loops: workflowState.loops || {},
parallels: workflowState.parallels || {},
whiles: workflowState.whiles || {},
},
errors: result.errors,
}

View File

@@ -9,12 +9,10 @@ import { resolveOutputType } from '@/blocks/utils'
import {
convertLoopBlockToLoop,
convertParallelBlockToParallel,
convertWhileBlockToWhile,
findAllDescendantNodes,
findChildNodes,
generateLoopBlocks,
generateParallelBlocks,
generateWhileBlocks,
} from '@/stores/workflows/workflow/utils'
const logger = createLogger('YamlDiffCreateAPI')
@@ -132,10 +130,8 @@ export async function POST(request: NextRequest) {
resolveOutputType: resolveOutputType.toString(),
convertLoopBlockToLoop: convertLoopBlockToLoop.toString(),
convertParallelBlockToParallel: convertParallelBlockToParallel.toString(),
convertWhileBlockToWhile: convertWhileBlockToWhile.toString(),
findChildNodes: findChildNodes.toString(),
findAllDescendantNodes: findAllDescendantNodes.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
},
options,
}),
@@ -172,7 +168,7 @@ export async function POST(request: NextRequest) {
dataKeys: block.data ? Object.keys(block.data) : [],
})
}
if (block.type === 'loop' || block.type === 'parallel' || block.type === 'while') {
if (block.type === 'loop' || block.type === 'parallel') {
logger.info(`[${requestId}] Container block ${blockId} (${block.name}):`, {
type: block.type,
hasData: !!block.data,
@@ -184,10 +180,8 @@ export async function POST(request: NextRequest) {
// Log existing loops/parallels from sim-agent
const loops = result.diff?.proposedState?.loops || result.loops || {}
const parallels = result.diff?.proposedState?.parallels || result.parallels || {}
const whiles = result.diff?.proposedState?.whiles || result.whiles || {}
logger.info(`[${requestId}] Sim agent loops:`, loops)
logger.info(`[${requestId}] Sim agent parallels:`, parallels)
logger.info(`[${requestId}] Sim agent whiles:`, whiles)
}
// Log diff analysis specifically
@@ -213,7 +207,7 @@ export async function POST(request: NextRequest) {
// Find all loop and parallel blocks
const containerBlocks = Object.values(blocks).filter(
(block: any) => block.type === 'loop' || block.type === 'parallel' || block.type === 'while'
(block: any) => block.type === 'loop' || block.type === 'parallel'
)
// For each container, find its children based on loop-start edges
@@ -257,23 +251,17 @@ export async function POST(request: NextRequest) {
// Now regenerate loops and parallels with the fixed relationships
const loops = generateLoopBlocks(result.diff.proposedState.blocks)
const parallels = generateParallelBlocks(result.diff.proposedState.blocks)
const whiles = generateWhileBlocks(result.diff.proposedState.blocks)
result.diff.proposedState.loops = loops
result.diff.proposedState.parallels = parallels
result.diff.proposedState.whiles = whiles
logger.info(`[${requestId}] Regenerated loops and parallels after fixing parent-child:`, {
loopsCount: Object.keys(loops).length,
parallelsCount: Object.keys(parallels).length,
whilesCount: Object.keys(whiles).length,
loops: Object.keys(loops).map((id) => ({
id,
nodes: loops[id].nodes,
})),
whiles: Object.keys(whiles).map((id) => ({
id,
nodes: whiles[id].nodes,
})),
})
}
@@ -321,7 +309,7 @@ export async function POST(request: NextRequest) {
// Generate loops and parallels for the blocks with fixed relationships
const loops = generateLoopBlocks(result.blocks)
const parallels = generateParallelBlocks(result.blocks)
const whiles = generateWhileBlocks(result.blocks)
const transformedResult = {
success: result.success,
diff: {
@@ -330,7 +318,6 @@ export async function POST(request: NextRequest) {
edges: result.edges || [],
loops: loops,
parallels: parallels,
whiles: whiles,
},
diffAnalysis: diffAnalysis,
metadata: result.metadata || {

View File

@@ -9,12 +9,10 @@ import { resolveOutputType } from '@/blocks/utils'
import {
convertLoopBlockToLoop,
convertParallelBlockToParallel,
convertWhileBlockToWhile,
findAllDescendantNodes,
findChildNodes,
generateLoopBlocks,
generateParallelBlocks,
generateWhileBlocks,
} from '@/stores/workflows/workflow/utils'
const logger = createLogger('YamlDiffMergeAPI')
@@ -29,7 +27,6 @@ const MergeDiffRequestSchema = z.object({
edges: z.array(z.any()),
loops: z.record(z.any()).optional(),
parallels: z.record(z.any()).optional(),
whiles: z.record(z.any()).optional(),
}),
diffAnalysis: z.any().optional(),
metadata: z.object({
@@ -106,8 +103,6 @@ export async function POST(request: NextRequest) {
convertParallelBlockToParallel: convertParallelBlockToParallel.toString(),
findChildNodes: findChildNodes.toString(),
findAllDescendantNodes: findAllDescendantNodes.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
convertWhileBlockToWhile: convertWhileBlockToWhile.toString(),
},
options,
}),
@@ -144,7 +139,7 @@ export async function POST(request: NextRequest) {
dataKeys: block.data ? Object.keys(block.data) : [],
})
}
if (block.type === 'loop' || block.type === 'parallel' || block.type === 'while') {
if (block.type === 'loop' || block.type === 'parallel') {
logger.info(`[${requestId}] Container block ${blockId} (${block.name}):`, {
type: block.type,
hasData: !!block.data,
@@ -156,10 +151,8 @@ export async function POST(request: NextRequest) {
// Log existing loops/parallels from sim-agent
const loops = result.diff?.proposedState?.loops || result.loops || {}
const parallels = result.diff?.proposedState?.parallels || result.parallels || {}
const whiles = result.diff?.proposedState?.whiles || result.whiles || {}
logger.info(`[${requestId}] Sim agent loops:`, loops)
logger.info(`[${requestId}] Sim agent parallels:`, parallels)
logger.info(`[${requestId}] Sim agent whiles:`, whiles)
}
// Post-process the result to ensure loops and parallels are properly generated
@@ -172,16 +165,13 @@ export async function POST(request: NextRequest) {
// Find all loop and parallel blocks
const containerBlocks = Object.values(blocks).filter(
(block: any) => block.type === 'loop' || block.type === 'parallel' || block.type === 'while'
(block: any) => block.type === 'loop' || block.type === 'parallel'
)
// For each container, find its children based on loop-start edges
containerBlocks.forEach((container: any) => {
const childEdges = edges.filter(
(edge: any) =>
edge.source === container.id &&
(edge.sourceHandle === 'loop-start-source' ||
edge.sourceHandle === 'while-start-source')
(edge: any) => edge.source === container.id && edge.sourceHandle === 'loop-start-source'
)
childEdges.forEach((edge: any) => {
@@ -208,23 +198,17 @@ export async function POST(request: NextRequest) {
// Now regenerate loops and parallels with the fixed relationships
const loops = generateLoopBlocks(result.diff.proposedState.blocks)
const parallels = generateParallelBlocks(result.diff.proposedState.blocks)
const whiles = generateWhileBlocks(result.diff.proposedState.blocks)
result.diff.proposedState.loops = loops
result.diff.proposedState.parallels = parallels
result.diff.proposedState.whiles = whiles
logger.info(`[${requestId}] Regenerated loops and parallels after fixing parent-child:`, {
loopsCount: Object.keys(loops).length,
parallelsCount: Object.keys(parallels).length,
whilesCount: Object.keys(whiles).length,
loops: Object.keys(loops).map((id) => ({
id,
nodes: loops[id].nodes,
})),
whiles: Object.keys(whiles).map((id) => ({
id,
nodes: whiles[id].nodes,
})),
})
}
@@ -239,16 +223,13 @@ export async function POST(request: NextRequest) {
// Find all loop and parallel blocks
const containerBlocks = Object.values(blocks).filter(
(block: any) => block.type === 'loop' || block.type === 'parallel' || block.type === 'while'
(block: any) => block.type === 'loop' || block.type === 'parallel'
)
// For each container, find its children based on loop-start edges
containerBlocks.forEach((container: any) => {
const childEdges = edges.filter(
(edge: any) =>
edge.source === container.id &&
(edge.sourceHandle === 'loop-start-source' ||
edge.sourceHandle === 'while-start-source')
(edge: any) => edge.source === container.id && edge.sourceHandle === 'loop-start-source'
)
childEdges.forEach((edge: any) => {
@@ -275,7 +256,7 @@ export async function POST(request: NextRequest) {
// Generate loops and parallels for the blocks with fixed relationships
const loops = generateLoopBlocks(result.blocks)
const parallels = generateParallelBlocks(result.blocks)
const whiles = generateWhileBlocks(result.blocks)
const transformedResult = {
success: result.success,
diff: {
@@ -284,7 +265,6 @@ export async function POST(request: NextRequest) {
edges: result.edges || existingDiff.proposedState.edges || [],
loops: loops,
parallels: parallels,
whiles: whiles,
},
diffAnalysis: diffAnalysis,
metadata: result.metadata || {

View File

@@ -6,11 +6,7 @@ import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
import {
generateLoopBlocks,
generateParallelBlocks,
generateWhileBlocks,
} from '@/stores/workflows/workflow/utils'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
const logger = createLogger('YamlGenerateAPI')
@@ -64,7 +60,6 @@ export async function POST(request: NextRequest) {
generateLoopBlocks: generateLoopBlocks.toString(),
generateParallelBlocks: generateParallelBlocks.toString(),
resolveOutputType: resolveOutputType.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
},
}),
})

View File

@@ -6,11 +6,7 @@ import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
import {
generateLoopBlocks,
generateParallelBlocks,
generateWhileBlocks,
} from '@/stores/workflows/workflow/utils'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
const logger = createLogger('YamlParseAPI')
@@ -61,7 +57,6 @@ export async function POST(request: NextRequest) {
generateLoopBlocks: generateLoopBlocks.toString(),
generateParallelBlocks: generateParallelBlocks.toString(),
resolveOutputType: resolveOutputType.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
},
}),
})

View File

@@ -6,11 +6,7 @@ import { SIM_AGENT_API_URL_DEFAULT } from '@/lib/sim-agent'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
import { resolveOutputType } from '@/blocks/utils'
import {
generateLoopBlocks,
generateParallelBlocks,
generateWhileBlocks,
} from '@/stores/workflows/workflow/utils'
import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils'
const logger = createLogger('YamlToWorkflowAPI')
@@ -69,7 +65,6 @@ export async function POST(request: NextRequest) {
generateLoopBlocks: generateLoopBlocks.toString(),
generateParallelBlocks: generateParallelBlocks.toString(),
resolveOutputType: resolveOutputType.toString(),
generateWhileBlocks: generateWhileBlocks.toString(),
},
options,
}),

View File

@@ -15,7 +15,6 @@
.workflow-container .react-flow__node-loopNode,
.workflow-container .react-flow__node-parallelNode,
.workflow-container .react-flow__node-whileNode,
.workflow-container .react-flow__node-subflowNode {
z-index: -1 !important;
}

View File

@@ -11,7 +11,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import { getAssetUrl } from '@/lib/utils'
import '@/app/globals.css'
import { SessionProvider } from '@/lib/session-context'
import { SessionProvider } from '@/lib/session/session-context'
import { ThemeProvider } from '@/app/theme-provider'
import { ZoomPrevention } from '@/app/zoom-prevention'

View File

@@ -21,6 +21,10 @@ const ACCEPTED_FILE_TYPES = [
'text/csv',
'application/vnd.ms-excel',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'text/markdown',
'application/vnd.ms-powerpoint',
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'text/html',
]
interface FileWithPreview extends File {
@@ -74,7 +78,7 @@ export function UploadModal({
return `File "${file.name}" is too large. Maximum size is 100MB.`
}
if (!ACCEPTED_FILE_TYPES.includes(file.type)) {
return `File "${file.name}" has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, or XLSX files.`
return `File "${file.name}" has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, or HTML files.`
}
return null
}
@@ -203,7 +207,8 @@ export function UploadModal({
{isDragging ? 'Drop files here!' : 'Drop files here or click to browse'}
</p>
<p className='text-muted-foreground text-xs'>
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX (max 100MB each)
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML (max 100MB
each)
</p>
</div>
</div>

View File

@@ -29,6 +29,10 @@ const ACCEPTED_FILE_TYPES = [
'text/csv',
'application/vnd.ms-excel',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'text/markdown',
'application/vnd.ms-powerpoint',
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'text/html',
]
interface FileWithPreview extends File {
@@ -168,7 +172,7 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
// Check file type
if (!ACCEPTED_FILE_TYPES.includes(file.type)) {
setFileError(
`File ${file.name} has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, or XLSX.`
`File ${file.name} has an unsupported format. Please use PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, or HTML.`
)
hasError = true
continue
@@ -511,7 +515,8 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
: 'Drop files here or click to browse'}
</p>
<p className='text-muted-foreground text-xs'>
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX (max 100MB each)
Supports PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML (max
100MB each)
</p>
</div>
</div>
@@ -552,7 +557,8 @@ export function CreateModal({ open, onOpenChange, onKnowledgeBaseCreated }: Crea
: 'Drop more files or click to browse'}
</p>
<p className='text-muted-foreground text-xs'>
PDF, DOC, DOCX, TXT, CSV, XLS, XLSX (max 100MB each)
PDF, DOC, DOCX, TXT, CSV, XLS, XLSX, MD, PPT, PPTX, HTML (max 100MB
each)
</p>
</div>
</div>

View File

@@ -25,7 +25,7 @@ import {
TooltipProvider,
TooltipTrigger,
} from '@/components/ui'
import { MAX_TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
import { MAX_TAG_SLOTS, type TagSlot } from '@/lib/knowledge/consts'
import { createLogger } from '@/lib/logs/console/logger'
import { useKnowledgeBaseTagDefinitions } from '@/hooks/use-knowledge-base-tag-definitions'
import { useNextAvailableSlot } from '@/hooks/use-next-available-slot'

View File

@@ -6,7 +6,7 @@ import { Button } from '@/components/ui/button'
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
import { Input } from '@/components/ui/input'
import { Label } from '@/components/ui/label'
import { TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
import { TAG_SLOTS, type TagSlot } from '@/lib/knowledge/consts'
import { useKnowledgeBaseTagDefinitions } from '@/hooks/use-knowledge-base-tag-definitions'
export type TagData = {

View File

@@ -16,6 +16,7 @@ import { TraceSpansDisplay } from '@/app/workspace/[workspaceId]/logs/components
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils/format-date'
import { formatCost } from '@/providers/utils'
import type { WorkflowLog } from '@/stores/logs/filters/types'
import { useParams, useRouter } from 'next/navigation'
interface LogSidebarProps {
log: WorkflowLog | null
@@ -199,6 +200,8 @@ export function Sidebar({
const [isModelsExpanded, setIsModelsExpanded] = useState(false)
const [isFrozenCanvasOpen, setIsFrozenCanvasOpen] = useState(false)
const scrollAreaRef = useRef<HTMLDivElement>(null)
const router = useRouter()
const params = useParams() as { workspaceId?: string }
// Update currentLogId when log changes
useEffect(() => {
@@ -529,15 +532,31 @@ export function Sidebar({
<h3 className='mb-1 font-medium text-muted-foreground text-xs'>
Workflow State
</h3>
<Button
variant='outline'
size='sm'
onClick={() => setIsFrozenCanvasOpen(true)}
className='w-full justify-start gap-2'
>
<Eye className='h-4 w-4' />
View Snapshot
</Button>
<div className='flex w-full gap-2'>
<Button
variant='outline'
size='sm'
onClick={() => setIsFrozenCanvasOpen(true)}
className='flex-1 justify-start gap-2'
>
<Eye className='h-4 w-4' />
View Snapshot
</Button>
<Button
variant='secondary'
size='sm'
onClick={() => {
try {
const href = `/workspace/${encodeURIComponent(String(params?.workspaceId || ''))}/w/${encodeURIComponent(String(log.workflowId || ''))}`
router.push(href)
} catch {}
}}
className='flex-1 justify-start gap-2'
>
<Eye className='h-4 w-4' />
Open Live Debug
</Button>
</div>
<p className='mt-1 text-muted-foreground text-xs'>
See the exact workflow state and block inputs/outputs at execution time
</p>

View File

@@ -46,7 +46,6 @@ export function DeployedWorkflowModal({
edges: state.edges,
loops: state.loops,
parallels: state.parallels,
whiles: state.whiles,
}))
const handleRevert = () => {

View File

@@ -8,8 +8,6 @@ import {
Layers,
Play,
RefreshCw,
SkipForward,
StepForward,
Store,
Trash2,
WifiOff,
@@ -44,6 +42,8 @@ import {
getKeyboardShortcutText,
useKeyboardShortcuts,
} from '@/app/workspace/[workspaceId]/w/hooks/use-keyboard-shortcuts'
import { useExecutionStore } from '@/stores/execution/store'
import { useDebugCanvasStore } from '@/stores/execution/debug-canvas/store'
import { useFolderStore } from '@/stores/folders/store'
import { usePanelStore } from '@/stores/panel/store'
import { useGeneralStore } from '@/stores/settings/general/store'
@@ -111,6 +111,9 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
const [isExpanded, setIsExpanded] = useState(false)
const [isTemplateModalOpen, setIsTemplateModalOpen] = useState(false)
const [isAutoLayouting, setIsAutoLayouting] = useState(false)
// Remove chat modal state
// const [isChatPromptOpen, setIsChatPromptOpen] = useState(false)
// const [chatPrompt, setChatPrompt] = useState('')
// Delete workflow state - grouped for better organization
const [deleteState, setDeleteState] = useState({
@@ -146,6 +149,13 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
}
}, [setActiveTab, isOpen, togglePanel])
const openDebugPanel = useCallback(() => {
setActiveTab('debug')
if (!isOpen) {
togglePanel()
}
}, [setActiveTab, isOpen, togglePanel])
// Shared condition for keyboard shortcut and button disabled state
const isWorkflowBlocked = isExecuting || hasValidationErrors
@@ -808,6 +818,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
if (isDebugging) {
// Stop debugging
try { useDebugCanvasStore.getState().clear() } catch {}
handleCancelDebug()
} else {
// Check if there are executable blocks before starting debug mode
@@ -819,15 +830,31 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
return // Do nothing if no executable blocks
}
// Start debugging
// Determine starter id for focus
const starter = Object.values(blocks).find((b) => b.type === 'starter') as any
const starterId = starter?.id as string | undefined
// Enable debug UI but do NOT start execution
if (!isDebugModeEnabled) {
toggleDebugMode()
}
if (usageExceeded) {
openSubscriptionSettings()
} else {
openConsolePanel()
handleRunWorkflow(undefined, true) // Start in debug mode
// Activate debug session state so the panel is active
const execStore = useExecutionStore.getState()
execStore.setIsExecuting(false)
execStore.setIsDebugging(true)
// Set the Start block as pending - it will execute on first Step
execStore.setPendingBlocks(starterId ? [starterId] : [])
// Show Debug tab and mark starter as the current block to execute
openDebugPanel()
if (starterId) {
execStore.setActiveBlocks(new Set([starterId]))
}
// Ensure debug canvas starts in a clean state
try { useDebugCanvasStore.getState().clear() } catch {}
}
}
}, [
@@ -838,8 +865,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
blocks,
handleCancelDebug,
toggleDebugMode,
handleRunWorkflow,
openConsolePanel,
openDebugPanel,
])
/**
@@ -859,40 +885,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
return (
<div className='flex items-center gap-1'>
<Tooltip>
<TooltipTrigger asChild>
<Button
onClick={() => {
openConsolePanel()
handleStepDebug()
}}
className={debugButtonClass}
disabled={isControlDisabled}
>
<StepForward className='h-5 w-5' />
<span className='sr-only'>Step Forward</span>
</Button>
</TooltipTrigger>
<TooltipContent>Step Forward</TooltipContent>
</Tooltip>
<Tooltip>
<TooltipTrigger asChild>
<Button
onClick={() => {
openConsolePanel()
handleResumeDebug()
}}
className={debugButtonClass}
disabled={isControlDisabled}
>
<SkipForward className='h-5 w-5' />
<span className='sr-only'>Resume Until End</span>
</Button>
</TooltipTrigger>
<TooltipContent>Resume Until End</TooltipContent>
</Tooltip>
{/* Keep only cancel (X) here; step/resume moved to panel */}
<Tooltip>
<TooltipTrigger asChild>
<Button
@@ -1214,7 +1207,7 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
{isExpanded && renderPublishButton()}
{renderDeleteButton()}
{renderDuplicateButton()}
{!isDebugging && renderDebugModeToggle()}
{renderDebugModeToggle()}
{renderDeployButton()}
{isDebugging ? renderDebugControlsBar() : renderRunButton()}
@@ -1226,6 +1219,8 @@ export function ControlBar({ hasValidationErrors = false }: ControlBarProps) {
workflowId={activeWorkflowId}
/>
)}
{/* Removed chat prompt dialog; chat input now lives in DebugPanel */}
</div>
)
}

View File

@@ -83,7 +83,6 @@ export function DiffControls() {
edges: rawState.edges || [],
loops: rawState.loops || {},
parallels: rawState.parallels || {},
whiles: rawState.whiles || {},
lastSaved: rawState.lastSaved || Date.now(),
isDeployed: rawState.isDeployed || false,
deploymentStatuses: rawState.deploymentStatuses || {},
@@ -99,7 +98,6 @@ export function DiffControls() {
edgesCount: workflowState.edges.length,
loopsCount: Object.keys(workflowState.loops).length,
parallelsCount: Object.keys(workflowState.parallels).length,
whilesCount: Object.keys(workflowState.whiles).length,
hasRequiredFields: Object.values(workflowState.blocks).every(
(block) => block.id && block.type && block.name && block.position
),
@@ -148,7 +146,6 @@ export function DiffControls() {
workflowId: activeWorkflowId,
chatId: currentChat.id,
messageId,
whiles: workflowState.whiles,
workflowState: JSON.stringify(workflowState),
}),
})

View File

@@ -10,6 +10,7 @@ import {
import { ScrollArea } from '@/components/ui/scroll-area'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { useCopilotStore } from '@/stores/copilot/store'
import { useExecutionStore } from '@/stores/execution/store'
import { useChatStore } from '@/stores/panel/chat/store'
import { useConsoleStore } from '@/stores/panel/console/store'
import { usePanelStore } from '@/stores/panel/store'
@@ -17,6 +18,7 @@ import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { Chat } from './components/chat/chat'
import { Console } from './components/console/console'
import { Copilot } from './components/copilot/copilot'
import { DebugPanel } from './components/debug/debug'
import { Variables } from './components/variables/variables'
export function Panel() {
@@ -44,6 +46,9 @@ export function Panel() {
const exportChatCSV = useChatStore((state) => state.exportChatCSV)
const { activeWorkflowId } = useWorkflowRegistry()
// Get debug state
const isDebugging = useExecutionStore((state) => state.isDebugging)
// Copilot store for chat management
const {
chats,
@@ -216,7 +221,11 @@ export function Panel() {
)
// Handle tab clicks - no loading, just switch tabs
const handleTabClick = async (tab: 'chat' | 'console' | 'variables' | 'copilot') => {
const handleTabClick = async (tab: 'chat' | 'console' | 'variables' | 'copilot' | 'debug') => {
// Don't allow clicking debug tab if not debugging
if (tab === 'debug' && !isDebugging) {
return
}
setActiveTab(tab)
if (!isOpen) {
togglePanel()
@@ -284,10 +293,30 @@ export function Panel() {
}
}, [activeWorkflowId, copilotWorkflowId, ensureCopilotDataLoaded])
// When debug mode ends, switch to a different tab if debug was active
useEffect(() => {
if (!isDebugging && activeTab === 'debug') {
setActiveTab('console')
}
}, [isDebugging, activeTab, setActiveTab])
// When debug mode starts, automatically open the debug panel
useEffect(() => {
if (isDebugging) {
setActiveTab('debug')
if (!isOpen) {
togglePanel()
}
}
}, [isDebugging, setActiveTab, isOpen, togglePanel])
return (
<>
{/* Tab Selector - Always visible */}
<div className='fixed top-[76px] right-4 z-20 flex h-9 w-[308px] items-center gap-1 rounded-[14px] border bg-card px-[2.5px] py-1 shadow-xs'>
<div
className='fixed top-[76px] right-4 z-20 flex h-9 items-center gap-1 rounded-[14px] border bg-card px-[2.5px] py-1 shadow-xs'
style={{ width: isDebugging ? '380px' : '308px' }}
>
<button
onClick={() => handleTabClick('chat')}
className={`panel-tab-base inline-flex flex-1 cursor-pointer items-center justify-center rounded-[10px] border border-transparent py-1 font-[450] text-sm outline-none transition-colors duration-200 ${
@@ -320,6 +349,16 @@ export function Panel() {
>
Variables
</button>
{isDebugging && (
<button
onClick={() => handleTabClick('debug')}
className={`panel-tab-base inline-flex flex-1 cursor-pointer items-center justify-center rounded-[10px] border border-transparent py-1 font-[450] text-sm outline-none transition-colors duration-200 ${
isOpen && activeTab === 'debug' ? 'panel-tab-active' : 'panel-tab-inactive'
}`}
>
Debug
</button>
)}
</div>
{/* Panel Content - Only visible when isOpen is true */}
@@ -512,6 +551,9 @@ export function Panel() {
<div style={{ display: activeTab === 'variables' ? 'block' : 'none', height: '100%' }}>
<Variables />
</div>
<div style={{ display: activeTab === 'debug' ? 'block' : 'none', height: '100%' }}>
<DebugPanel />
</div>
</div>
</div>
)}

View File

@@ -12,10 +12,9 @@ import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import 'prismjs/components/prism-javascript'
import 'prismjs/themes/prism.css'
type IterationType = 'loop' | 'parallel' | 'while'
type IterationType = 'loop' | 'parallel'
type LoopType = 'for' | 'forEach'
type ParallelType = 'count' | 'collection'
type WhileType = 'while' | 'doWhile'
interface IterationNodeData {
width?: number
@@ -26,11 +25,9 @@ interface IterationNodeData {
extent?: 'parent'
loopType?: LoopType
parallelType?: ParallelType
whileType?: WhileType
// Common
count?: number
collection?: string | any[] | Record<string, any>
condition?: string
isPreview?: boolean
executionState?: {
currentIteration?: number
@@ -68,12 +65,6 @@ const CONFIG = {
items: 'distribution' as const,
},
},
while: {
typeLabels: { while: 'While Loop', doWhile: 'Do While' },
typeKey: 'whileType' as const,
storeKey: 'whiles' as const,
maxIterations: 100,
},
} as const
export function IterationBadges({ nodeId, data, iterationType }: IterationBadgesProps) {
@@ -86,21 +77,9 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
// Determine current type and values
const currentType = (data?.[config.typeKey] ||
(iterationType === 'loop' ? 'for' : iterationType === 'parallel' ? 'count' : 'while')) as any
const configIterations =
iterationType === 'loop'
? ((nodeConfig as any)?.[CONFIG.loop.configKeys.iterations] ?? data?.count ?? 5)
: iterationType === 'parallel'
? ((nodeConfig as any)?.[CONFIG.parallel.configKeys.iterations] ?? data?.count ?? 5)
: ((nodeConfig as any)?.iterations ?? data?.count ?? 5)
const configCollection =
iterationType === 'loop'
? ((nodeConfig as any)?.[CONFIG.loop.configKeys.items] ?? data?.collection ?? '')
: iterationType === 'parallel'
? ((nodeConfig as any)?.[CONFIG.parallel.configKeys.items] ?? data?.collection ?? '')
: ''
(iterationType === 'loop' ? 'for' : 'count')) as any
const configIterations = (nodeConfig as any)?.[config.configKeys.iterations] ?? data?.count ?? 5
const configCollection = (nodeConfig as any)?.[config.configKeys.items] ?? data?.collection ?? ''
const iterations = configIterations
const collectionString =
@@ -108,10 +87,8 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
// State management
const [tempInputValue, setTempInputValue] = useState<string | null>(null)
const isWhile = iterationType === 'while'
const [whileValue, setWhileValue] = useState<string>(data?.condition || '')
const inputValue = tempInputValue ?? iterations.toString()
const editorValue = isWhile ? whileValue : collectionString
const editorValue = collectionString
const [typePopoverOpen, setTypePopoverOpen] = useState(false)
const [configPopoverOpen, setConfigPopoverOpen] = useState(false)
const [showTagDropdown, setShowTagDropdown] = useState(false)
@@ -123,7 +100,6 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
const {
collaborativeUpdateLoopType,
collaborativeUpdateParallelType,
collaborativeUpdateWhileType,
collaborativeUpdateIterationCount,
collaborativeUpdateIterationCollection,
} = useCollaborativeWorkflow()
@@ -134,21 +110,12 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
if (isPreview) return
if (iterationType === 'loop') {
collaborativeUpdateLoopType(nodeId, newType)
} else if (iterationType === 'parallel') {
collaborativeUpdateParallelType(nodeId, newType)
} else {
collaborativeUpdateWhileType(nodeId, newType)
collaborativeUpdateParallelType(nodeId, newType)
}
setTypePopoverOpen(false)
},
[
nodeId,
iterationType,
collaborativeUpdateLoopType,
collaborativeUpdateParallelType,
collaborativeUpdateWhileType,
isPreview,
]
[nodeId, iterationType, collaborativeUpdateLoopType, collaborativeUpdateParallelType, isPreview]
)
// Handle iterations input change
@@ -174,9 +141,7 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
if (!Number.isNaN(value)) {
const newValue = Math.min(config.maxIterations, Math.max(1, value))
if (iterationType === 'loop' || iterationType === 'parallel') {
collaborativeUpdateIterationCount(nodeId, iterationType, newValue)
}
collaborativeUpdateIterationCount(nodeId, iterationType, newValue)
}
setTempInputValue(null)
setConfigPopoverOpen(false)
@@ -193,11 +158,7 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
const handleEditorChange = useCallback(
(value: string) => {
if (isPreview) return
if (iterationType === 'loop' || iterationType === 'parallel') {
collaborativeUpdateIterationCollection(nodeId, iterationType, value)
} else if (isWhile) {
setWhileValue(value)
}
collaborativeUpdateIterationCollection(nodeId, iterationType, value)
const textarea = editorContainerRef.current?.querySelector('textarea')
if (textarea) {
@@ -209,18 +170,14 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
setShowTagDropdown(triggerCheck.show)
}
},
[nodeId, iterationType, collaborativeUpdateIterationCollection, isPreview, isWhile]
[nodeId, iterationType, collaborativeUpdateIterationCollection, isPreview]
)
// Handle tag selection
const handleTagSelect = useCallback(
(newValue: string) => {
if (isPreview) return
if (iterationType === 'loop' || iterationType === 'parallel') {
collaborativeUpdateIterationCollection(nodeId, iterationType, newValue)
} else if (isWhile) {
setWhileValue(newValue)
}
collaborativeUpdateIterationCollection(nodeId, iterationType, newValue)
setShowTagDropdown(false)
setTimeout(() => {
@@ -230,7 +187,7 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
}
}, 0)
},
[nodeId, iterationType, collaborativeUpdateIterationCollection, isPreview, isWhile]
[nodeId, iterationType, collaborativeUpdateIterationCollection, isPreview]
)
// Determine if we're in count mode or collection mode
@@ -266,11 +223,7 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
<PopoverContent className='w-48 p-3' align='center' onClick={(e) => e.stopPropagation()}>
<div className='space-y-2'>
<div className='font-medium text-muted-foreground text-xs'>
{iterationType === 'loop'
? 'Loop Type'
: iterationType === 'parallel'
? 'Parallel Type'
: 'While Type'}
{iterationType === 'loop' ? 'Loop Type' : 'Parallel Type'}
</div>
<div className='space-y-1'>
{typeOptions.map(([typeValue, typeLabel]) => (
@@ -306,63 +259,24 @@ export function IterationBadges({ nodeId, data, iterationType }: IterationBadges
)}
style={{ pointerEvents: isPreview ? 'none' : 'auto' }}
>
{isWhile ? 'Condition' : isCountMode ? `Iterations: ${iterations}` : 'Items'}
{isCountMode ? `Iterations: ${iterations}` : 'Items'}
{!isPreview && <ChevronDown className='h-3 w-3 text-muted-foreground' />}
</Badge>
</PopoverTrigger>
{!isPreview && (
<PopoverContent
className={cn('p-3', isWhile || !isCountMode ? 'w-72' : 'w-48')}
className={cn('p-3', !isCountMode ? 'w-72' : 'w-48')}
align='center'
onClick={(e) => e.stopPropagation()}
>
<div className='space-y-2'>
<div className='font-medium text-muted-foreground text-xs'>
{isWhile
? 'While Condition'
: isCountMode
? `${iterationType === 'loop' ? 'Loop' : 'Parallel'} Iterations`
: `${iterationType === 'loop' ? 'Collection' : 'Parallel'} Items`}
{isCountMode
? `${iterationType === 'loop' ? 'Loop' : 'Parallel'} Iterations`
: `${iterationType === 'loop' ? 'Collection' : 'Parallel'} Items`}
</div>
{isWhile ? (
// Code editor for while condition
<div ref={editorContainerRef} className='relative'>
<div className='relative min-h-[80px] rounded-md border border-input bg-background px-3 pt-2 pb-3 font-mono text-sm'>
{editorValue === '' && (
<div className='pointer-events-none absolute top-[8.5px] left-3 select-none text-muted-foreground/50'>
condition === true
</div>
)}
<Editor
value={editorValue}
onValueChange={handleEditorChange}
highlight={(code) => highlight(code, languages.javascript, 'javascript')}
padding={0}
style={{
fontFamily: 'monospace',
lineHeight: '21px',
}}
className='w-full focus:outline-none'
textareaClassName='focus:outline-none focus:ring-0 bg-transparent resize-none w-full overflow-hidden whitespace-pre-wrap'
/>
</div>
<div className='mt-2 text-[10px] text-muted-foreground'>
Enter a boolean expression.
</div>
{showTagDropdown && (
<TagDropdown
visible={showTagDropdown}
onSelect={handleTagSelect}
blockId={nodeId}
activeSourceBlockId={null}
inputValue={editorValue}
cursorPosition={cursorPosition}
onClose={() => setShowTagDropdown(false)}
/>
)}
</div>
) : isCountMode ? (
{isCountMode ? (
// Number input for count-based mode
<div className='flex items-center gap-2'>
<Input

View File

@@ -136,40 +136,12 @@ describe('SubflowNodeComponent', () => {
}).not.toThrow()
})
it.concurrent('should accept while kind in NodeProps data', () => {
const validProps = {
id: 'test-id-while',
type: 'subflowNode' as const,
data: {
width: 400,
height: 300,
isPreview: false,
kind: 'while' as const,
},
selected: false,
zIndex: 1,
isConnectable: true,
xPos: 0,
yPos: 0,
dragging: false,
}
expect(() => {
const _component: typeof SubflowNodeComponent = SubflowNodeComponent
expect(_component).toBeDefined()
expect(validProps.type).toBe('subflowNode')
}).not.toThrow()
})
it.concurrent('should handle different data configurations', () => {
const configurations = [
{ width: 500, height: 300, isPreview: false, kind: 'loop' as const },
{ width: 800, height: 600, isPreview: true, kind: 'parallel' as const },
{ width: 500, height: 300, isPreview: false, kind: 'while' as const },
{ width: 0, height: 0, isPreview: false, kind: 'loop' as const },
{ width: 0, height: 0, isPreview: false, kind: 'while' as const },
{ kind: 'loop' as const },
{ kind: 'while' as const },
]
configurations.forEach((data) => {
@@ -334,20 +306,10 @@ describe('SubflowNodeComponent', () => {
})
it.concurrent('should generate correct handle IDs for parallel kind', () => {
type SubflowKind = 'loop' | 'parallel' | 'while'
type SubflowKind = 'loop' | 'parallel'
const testHandleGeneration = (kind: SubflowKind) => {
const startHandleId =
kind === 'loop'
? 'loop-start-source'
: kind === 'parallel'
? 'parallel-start-source'
: 'while-start-source'
const endHandleId =
kind === 'loop'
? 'loop-end-source'
: kind === 'parallel'
? 'parallel-end-source'
: 'while-end-source'
const startHandleId = kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
return { startHandleId, endHandleId }
}
@@ -356,29 +318,6 @@ describe('SubflowNodeComponent', () => {
expect(result.endHandleId).toBe('parallel-end-source')
})
it.concurrent('should generate correct handle IDs for while kind', () => {
type SubflowKind = 'loop' | 'parallel' | 'while'
const testHandleGeneration = (kind: SubflowKind) => {
const startHandleId =
kind === 'loop'
? 'loop-start-source'
: kind === 'parallel'
? 'parallel-start-source'
: 'while-start-source'
const endHandleId =
kind === 'loop'
? 'loop-end-source'
: kind === 'parallel'
? 'parallel-end-source'
: 'while-end-source'
return { startHandleId, endHandleId }
}
const result = testHandleGeneration('while')
expect(result.startHandleId).toBe('while-start-source')
expect(result.endHandleId).toBe('while-end-source')
})
it.concurrent('should generate correct background colors for loop kind', () => {
const loopData = { ...defaultProps.data, kind: 'loop' as const }
const startBg = loopData.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
@@ -387,41 +326,21 @@ describe('SubflowNodeComponent', () => {
})
it.concurrent('should generate correct background colors for parallel kind', () => {
type SubflowKind = 'loop' | 'parallel' | 'while'
type SubflowKind = 'loop' | 'parallel'
const testBgGeneration = (kind: SubflowKind) => {
return kind === 'loop' ? '#2FB3FF' : kind === 'parallel' ? '#FEE12B' : '#57D9A3'
return kind === 'loop' ? '#2FB3FF' : '#FEE12B'
}
const startBg = testBgGeneration('parallel')
expect(startBg).toBe('#FEE12B')
})
it.concurrent('should generate correct background colors for while kind', () => {
type SubflowKind = 'loop' | 'parallel' | 'while'
const testBgGeneration = (kind: SubflowKind) => {
return kind === 'loop' ? '#2FB3FF' : kind === 'parallel' ? '#FEE12B' : '#57D9A3'
}
const startBg = testBgGeneration('while')
expect(startBg).toBe('#57D9A3')
})
it.concurrent('should demonstrate handle ID generation for any kind', () => {
type SubflowKind = 'loop' | 'parallel' | 'while'
type SubflowKind = 'loop' | 'parallel'
const testKind = (kind: SubflowKind) => {
const data = { kind }
const startHandleId =
data.kind === 'loop'
? 'loop-start-source'
: data.kind === 'parallel'
? 'parallel-start-source'
: 'while-start-source'
const endHandleId =
data.kind === 'loop'
? 'loop-end-source'
: data.kind === 'parallel'
? 'parallel-end-source'
: 'while-end-source'
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
return { startHandleId, endHandleId }
}
@@ -432,10 +351,6 @@ describe('SubflowNodeComponent', () => {
const parallelResult = testKind('parallel')
expect(parallelResult.startHandleId).toBe('parallel-start-source')
expect(parallelResult.endHandleId).toBe('parallel-end-source')
const whileResult = testKind('while')
expect(whileResult.startHandleId).toBe('while-start-source')
expect(whileResult.endHandleId).toBe('while-end-source')
})
it.concurrent('should pass correct iterationType to IterationBadges for loop', () => {
@@ -453,49 +368,25 @@ describe('SubflowNodeComponent', () => {
expect(parallelProps.data.kind).toBe('parallel')
})
it.concurrent('should pass correct iterationType to IterationBadges for while', () => {
const whileProps = {
...defaultProps,
data: { ...defaultProps.data, kind: 'while' as const },
}
// Mock IterationBadges should receive the kind as iterationType
expect(whileProps.data.kind).toBe('while')
})
it.concurrent('should handle loop, parallel, and while kinds in configuration arrays', () => {
const allKinds = ['loop', 'parallel', 'while'] as const
allKinds.forEach((kind) => {
it.concurrent('should handle both kinds in configuration arrays', () => {
const bothKinds = ['loop', 'parallel'] as const
bothKinds.forEach((kind) => {
const data = { ...defaultProps.data, kind }
expect(['loop', 'parallel', 'while']).toContain(data.kind)
expect(['loop', 'parallel']).toContain(data.kind)
// Test handle ID generation for both kinds
const startHandleId =
data.kind === 'loop'
? 'loop-start-source'
: data.kind === 'parallel'
? 'parallel-start-source'
: 'while-start-source'
const endHandleId =
data.kind === 'loop'
? 'loop-end-source'
: data.kind === 'parallel'
? 'parallel-end-source'
: 'while-end-source'
const startBg =
data.kind === 'loop' ? '#2FB3FF' : data.kind === 'parallel' ? '#FEE12B' : '#57D9A3'
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
const startBg = data.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
if (kind === 'loop') {
expect(startHandleId).toBe('loop-start-source')
expect(endHandleId).toBe('loop-end-source')
expect(startBg).toBe('#2FB3FF')
} else if (kind === 'parallel') {
} else {
expect(startHandleId).toBe('parallel-start-source')
expect(endHandleId).toBe('parallel-end-source')
expect(startBg).toBe('#FEE12B')
} else {
expect(startHandleId).toBe('while-start-source')
expect(endHandleId).toBe('while-end-source')
expect(startBg).toBe('#57D9A3')
}
})
})
@@ -542,15 +433,10 @@ describe('SubflowNodeComponent', () => {
...defaultProps,
data: { ...defaultProps.data, kind: 'parallel' as const },
}
const whileProps = {
...defaultProps,
data: { ...defaultProps.data, kind: 'while' as const },
}
// The iterationType should match the kind
expect(loopProps.data.kind).toBe('loop')
expect(parallelProps.data.kind).toBe('parallel')
expect(whileProps.data.kind).toBe('while')
})
})

View File

@@ -26,12 +26,6 @@ const SubflowNodeStyles: React.FC = () => {
100% { box-shadow: 0 0 0 0 rgba(139, 195, 74, 0); }
}
@keyframes while-node-pulse {
0% { box-shadow: 0 0 0 0 rgba(255, 159, 67, 0.3); }
70% { box-shadow: 0 0 0 6px rgba(255, 159, 67, 0); }
100% { box-shadow: 0 0 0 0 rgba(255, 159, 67, 0); }
}
.loop-node-drag-over {
animation: loop-node-pulse 1.2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
border-style: solid !important;
@@ -46,13 +40,6 @@ const SubflowNodeStyles: React.FC = () => {
box-shadow: 0 0 0 8px rgba(139, 195, 74, 0.1);
}
.while-node-drag-over {
animation: while-node-pulse 1.2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
border-style: solid !important;
background-color: rgba(255, 159, 67, 0.08) !important;
box-shadow: 0 0 0 8px rgba(255, 159, 67, 0.1);
}
.react-flow__node-group:hover,
.hover-highlight {
border-color: #1e293b !important;
@@ -82,7 +69,7 @@ export interface SubflowNodeData {
extent?: 'parent'
hasNestedError?: boolean
isPreview?: boolean
kind: 'loop' | 'parallel' | 'while'
kind: 'loop' | 'parallel'
}
export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeData>) => {
@@ -127,26 +114,9 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
const nestedStyles = getNestedStyles()
const startHandleId =
data.kind === 'loop'
? 'loop-start-source'
: data.kind === 'parallel'
? 'parallel-start-source'
: 'while-start-source'
const endHandleId =
data.kind === 'loop'
? 'loop-end-source'
: data.kind === 'parallel'
? 'parallel-end-source'
: 'while-end-source'
const startBg =
data.kind === 'loop'
? '#2FB3FF'
: data.kind === 'parallel'
? '#FEE12B'
: data.kind === 'while'
? '#FF9F43'
: '#2FB3FF'
const startHandleId = data.kind === 'loop' ? 'loop-start-source' : 'parallel-start-source'
const endHandleId = data.kind === 'loop' ? 'loop-end-source' : 'parallel-end-source'
const startBg = data.kind === 'loop' ? '#2FB3FF' : '#FEE12B'
return (
<>

View File

@@ -1,29 +0,0 @@
import { RefreshCwIcon } from 'lucide-react'
export const WhileTool = {
id: 'while',
type: 'while',
name: 'While',
description: 'While Loop',
icon: RefreshCwIcon,
bgColor: '#CC5500',
data: {
label: 'While',
whileType: 'while' as 'while' | 'doWhile',
condition: '',
width: 500,
height: 300,
extent: 'parent',
executionState: {
currentIteration: 0,
isExecuting: false,
startTime: null,
endTime: null,
},
},
style: {
width: 500,
height: 300,
},
isResizable: true,
}

View File

@@ -106,34 +106,32 @@ export function ActionBar({ blockId, blockType, disabled = false }: ActionBarPro
</Tooltip>
)}
{/* Remove from subflow - only show when inside loop/parallel/while */}
{!isStarterBlock &&
parentId &&
(parentType === 'loop' || parentType === 'parallel' || parentType === 'while') && (
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='ghost'
size='sm'
onClick={() => {
if (!disabled && userPermissions.canEdit) {
window.dispatchEvent(
new CustomEvent('remove-from-subflow', { detail: { blockId } })
)
}
}}
className={cn(
'text-gray-500',
(disabled || !userPermissions.canEdit) && 'cursor-not-allowed opacity-50'
)}
disabled={disabled || !userPermissions.canEdit}
>
<LogOut className='h-4 w-4' />
</Button>
</TooltipTrigger>
<TooltipContent side='right'>{getTooltipMessage('Remove From Subflow')}</TooltipContent>
</Tooltip>
)}
{/* Remove from subflow - only show when inside loop/parallel */}
{!isStarterBlock && parentId && (parentType === 'loop' || parentType === 'parallel') && (
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='ghost'
size='sm'
onClick={() => {
if (!disabled && userPermissions.canEdit) {
window.dispatchEvent(
new CustomEvent('remove-from-subflow', { detail: { blockId } })
)
}
}}
className={cn(
'text-gray-500',
(disabled || !userPermissions.canEdit) && 'cursor-not-allowed opacity-50'
)}
disabled={disabled || !userPermissions.canEdit}
>
<LogOut className='h-4 w-4' />
</Button>
</TooltipTrigger>
<TooltipContent side='right'>{getTooltipMessage('Remove From Subflow')}</TooltipContent>
</Tooltip>
)}
<Tooltip>
<TooltipTrigger asChild>

View File

@@ -79,7 +79,7 @@ export function ConnectionBlocks({
const blockConfig = getBlock(connection.type)
const displayName = connection.name // Use the actual block name instead of transforming it
// Handle special blocks that aren't in the registry (loop, parallel, while)
// Handle special blocks that aren't in the registry (loop and parallel)
let Icon = blockConfig?.icon
let bgColor = blockConfig?.bgColor || '#6B7280' // Fallback to gray
@@ -90,9 +90,6 @@ export function ConnectionBlocks({
} else if (connection.type === 'parallel') {
Icon = SplitIcon as typeof Icon
bgColor = '#FEE12B' // Yellow color for parallel blocks
} else if (connection.type === 'while') {
Icon = RepeatIcon as typeof Icon
bgColor = '#FF9F43' // Orange color for while blocks
}
}

View File

@@ -385,16 +385,16 @@ export function Code({
<div
className={cn(
'group relative min-h-[100px] rounded-md border bg-background font-mono text-sm transition-colors',
'group relative min-h-[100px] rounded-md border border-input bg-background font-mono text-sm transition-colors',
isConnecting && 'ring-2 ring-blue-500 ring-offset-2',
!isValidJson && 'border-2 border-destructive bg-destructive/10'
!isValidJson && 'border-destructive bg-destructive/10'
)}
title={!isValidJson ? 'Invalid JSON' : undefined}
onDragOver={(e) => e.preventDefault()}
onDrop={handleDrop}
>
<div className='absolute top-2 right-3 z-10 flex items-center gap-1 opacity-0 transition-opacity group-hover:opacity-100'>
{!isCollapsed && !isAiStreaming && !isPreview && (
{wandConfig?.enabled && !isCollapsed && !isAiStreaming && !isPreview && (
<Button
variant='ghost'
size='icon'
@@ -486,7 +486,7 @@ export function Code({
outline: 'none',
}}
className={cn(
'code-editor-area caret-primary',
'code-editor-area caret-primary dark:caret-white',
'bg-transparent focus:outline-none',
(isCollapsed || isAiStreaming) && 'cursor-not-allowed opacity-50'
)}

View File

@@ -6,7 +6,7 @@ import { Button } from '@/components/ui/button'
import { formatDisplayText } from '@/components/ui/formatted-text'
import { Input } from '@/components/ui/input'
import { checkTagTrigger, TagDropdown } from '@/components/ui/tag-dropdown'
import { MAX_TAG_SLOTS } from '@/lib/constants/knowledge'
import { MAX_TAG_SLOTS } from '@/lib/knowledge/consts'
import { cn } from '@/lib/utils'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/hooks/use-sub-block-value'
import type { SubBlockConfig } from '@/blocks/types'

View File

@@ -1,12 +1,16 @@
import { useEffect, useMemo, useRef, useState } from 'react'
import { Wand2 } from 'lucide-react'
import { useReactFlow } from 'reactflow'
import { Button } from '@/components/ui/button'
import { checkEnvVarTrigger, EnvVarDropdown } from '@/components/ui/env-var-dropdown'
import { formatDisplayText } from '@/components/ui/formatted-text'
import { Input } from '@/components/ui/input'
import { checkTagTrigger, TagDropdown } from '@/components/ui/tag-dropdown'
import { createLogger } from '@/lib/logs/console/logger'
import { cn } from '@/lib/utils'
import { WandPromptBar } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/wand-prompt-bar/wand-prompt-bar'
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/hooks/use-sub-block-value'
import { useWand } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-wand'
import type { SubBlockConfig } from '@/blocks/types'
import { useTagSelection } from '@/hooks/use-tag-selection'
import { useOperationQueueStore } from '@/stores/operation-queue/store'
@@ -40,19 +44,39 @@ export function ShortInput({
previewValue,
disabled = false,
}: ShortInputProps) {
// Local state for immediate UI updates during streaming
const [localContent, setLocalContent] = useState<string>('')
const [isFocused, setIsFocused] = useState(false)
const [showEnvVars, setShowEnvVars] = useState(false)
const [showTags, setShowTags] = useState(false)
const validatePropValue = (value: any): string => {
if (value === undefined || value === null) return ''
if (typeof value === 'string') return value
try {
return String(value)
} catch {
return ''
}
}
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
// Wand functionality (only if wandConfig is enabled)
const wandHook = config.wandConfig?.enabled
? useWand({
wandConfig: config.wandConfig,
currentValue: localContent,
onStreamStart: () => {
// Clear the content when streaming starts
setLocalContent('')
},
onStreamChunk: (chunk) => {
// Update local content with each chunk as it arrives
setLocalContent((current) => current + chunk)
},
onGeneratedContent: (content) => {
// Final content update
setLocalContent(content)
},
})
: null
// State management - useSubBlockValue with explicit streaming control
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId, false, {
isStreaming: wandHook?.isStreaming || false,
onStreamingEnd: () => {
logger.debug('Wand streaming ended, value persisted', { blockId, subBlockId })
},
})
const [searchTerm, setSearchTerm] = useState('')
const [cursorPosition, setCursorPosition] = useState(0)
const inputRef = useRef<HTMLInputElement>(null)
@@ -65,7 +89,29 @@ export function ShortInput({
const reactFlowInstance = useReactFlow()
// Use preview value when in preview mode, otherwise use store value or prop value
const value = isPreview ? previewValue : propValue !== undefined ? propValue : storeValue
const baseValue = isPreview ? previewValue : propValue !== undefined ? propValue : storeValue
// During streaming, use local content; otherwise use base value
const value = wandHook?.isStreaming ? localContent : baseValue
// Sync local content with base value when not streaming
useEffect(() => {
if (!wandHook?.isStreaming) {
const baseValueString = baseValue?.toString() ?? ''
if (baseValueString !== localContent) {
setLocalContent(baseValueString)
}
}
}, [baseValue, wandHook?.isStreaming])
// Update store value during streaming (but won't persist until streaming ends)
useEffect(() => {
if (wandHook?.isStreaming && localContent !== '') {
if (!isPreview && !disabled) {
setStoreValue(localContent)
}
}
}, [localContent, wandHook?.isStreaming, isPreview, disabled, setStoreValue])
// Check if this input is API key related
const isApiKeyField = useMemo(() => {
@@ -297,91 +343,130 @@ export function ShortInput({
}
return (
<div className='relative w-full'>
<Input
ref={inputRef}
className={cn(
'allow-scroll w-full overflow-auto text-transparent caret-foreground placeholder:text-muted-foreground/50',
isConnecting &&
config?.connectionDroppable !== false &&
'ring-2 ring-blue-500 ring-offset-2 focus-visible:ring-blue-500'
)}
placeholder={placeholder ?? ''}
type='text'
value={displayValue}
onChange={handleChange}
onFocus={() => {
setIsFocused(true)
<>
<WandPromptBar
isVisible={wandHook?.isPromptVisible || false}
isLoading={wandHook?.isLoading || false}
isStreaming={wandHook?.isStreaming || false}
promptValue={wandHook?.promptInputValue || ''}
onSubmit={(prompt: string) => wandHook?.generateStream({ prompt }) || undefined}
onCancel={
wandHook?.isStreaming
? wandHook?.cancelGeneration
: wandHook?.hidePromptInline || (() => {})
}
onChange={(value: string) => wandHook?.updatePromptValue?.(value)}
placeholder={config.wandConfig?.placeholder || 'Describe what you want to generate...'}
/>
// If this is an API key field, automatically show env vars dropdown
if (isApiKeyField) {
setShowEnvVars(true)
setSearchTerm('')
<div className='group relative w-full'>
<Input
ref={inputRef}
className={cn(
'allow-scroll w-full overflow-auto text-transparent caret-foreground placeholder:text-muted-foreground/50',
isConnecting &&
config?.connectionDroppable !== false &&
'ring-2 ring-blue-500 ring-offset-2 focus-visible:ring-blue-500'
)}
placeholder={placeholder ?? ''}
type='text'
value={displayValue}
onChange={handleChange}
onFocus={() => {
setIsFocused(true)
// Set cursor position to the end of the input
const inputLength = value?.toString().length ?? 0
setCursorPosition(inputLength)
} else {
// If this is an API key field, automatically show env vars dropdown
if (isApiKeyField) {
setShowEnvVars(true)
setSearchTerm('')
// Set cursor position to the end of the input
const inputLength = value?.toString().length ?? 0
setCursorPosition(inputLength)
} else {
setShowEnvVars(false)
setShowTags(false)
setSearchTerm('')
}
}}
onBlur={() => {
setIsFocused(false)
setShowEnvVars(false)
setShowTags(false)
setSearchTerm('')
}
}}
onBlur={() => {
setIsFocused(false)
setShowEnvVars(false)
try {
useOperationQueueStore.getState().flushDebouncedForBlock(blockId)
} catch {}
}}
onDrop={handleDrop}
onDragOver={handleDragOver}
onScroll={handleScroll}
onPaste={handlePaste}
onWheel={handleWheel}
onKeyDown={handleKeyDown}
autoComplete='off'
style={{ overflowX: 'auto' }}
disabled={disabled}
/>
<div
ref={overlayRef}
className='pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-3 text-sm'
style={{ overflowX: 'auto' }}
>
try {
useOperationQueueStore.getState().flushDebouncedForBlock(blockId)
} catch {}
}}
onDrop={handleDrop}
onDragOver={handleDragOver}
onScroll={handleScroll}
onPaste={handlePaste}
onWheel={handleWheel}
onKeyDown={handleKeyDown}
autoComplete='off'
style={{ overflowX: 'auto' }}
disabled={disabled}
/>
<div
className='w-full whitespace-pre'
style={{ scrollbarWidth: 'none', minWidth: 'fit-content' }}
ref={overlayRef}
className='pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-3 text-sm'
style={{ overflowX: 'auto' }}
>
{password && !isFocused
? '•'.repeat(value?.toString().length ?? 0)
: formatDisplayText(value?.toString() ?? '', true)}
<div
className='w-full whitespace-pre'
style={{ scrollbarWidth: 'none', minWidth: 'fit-content' }}
>
{password && !isFocused
? '•'.repeat(value?.toString().length ?? 0)
: formatDisplayText(value?.toString() ?? '', true)}
</div>
</div>
</div>
<EnvVarDropdown
visible={showEnvVars}
onSelect={handleEnvVarSelect}
searchTerm={searchTerm}
inputValue={value?.toString() ?? ''}
cursorPosition={cursorPosition}
onClose={() => {
setShowEnvVars(false)
setSearchTerm('')
}}
/>
<TagDropdown
visible={showTags}
onSelect={handleEnvVarSelect}
blockId={blockId}
activeSourceBlockId={activeSourceBlockId}
inputValue={value?.toString() ?? ''}
cursorPosition={cursorPosition}
onClose={() => {
setShowTags(false)
setActiveSourceBlockId(null)
}}
/>
</div>
{/* Wand Button */}
{wandHook && !isPreview && !wandHook.isStreaming && (
<div className='-translate-y-1/2 absolute top-1/2 right-3 z-10 flex items-center gap-1 opacity-0 transition-opacity group-hover:opacity-100'>
<Button
variant='ghost'
size='icon'
onClick={
wandHook.isPromptVisible ? wandHook.hidePromptInline : wandHook.showPromptInline
}
disabled={wandHook.isLoading || wandHook.isStreaming || disabled}
aria-label='Generate content with AI'
className='h-8 w-8 rounded-full border border-transparent bg-muted/80 text-muted-foreground shadow-sm transition-all duration-200 hover:border-primary/20 hover:bg-muted hover:text-primary hover:shadow'
>
<Wand2 className='h-4 w-4' />
</Button>
</div>
)}
{!wandHook?.isStreaming && (
<>
<EnvVarDropdown
visible={showEnvVars}
onSelect={handleEnvVarSelect}
searchTerm={searchTerm}
inputValue={value?.toString() ?? ''}
cursorPosition={cursorPosition}
onClose={() => {
setShowEnvVars(false)
setSearchTerm('')
}}
/>
<TagDropdown
visible={showTags}
onSelect={handleEnvVarSelect}
blockId={blockId}
activeSourceBlockId={activeSourceBlockId}
inputValue={value?.toString() ?? ''}
cursorPosition={cursorPosition}
onClose={() => {
setShowTags(false)
setActiveSourceBlockId(null)
}}
/>
</>
)}
</div>
</>
)
}

View File

@@ -344,7 +344,13 @@ export function TriggerConfig({
// Check if the trigger is connected
// Both webhook and credential-based triggers now have webhook database entries
const isTriggerConnected = Boolean(triggerId && actualTriggerId)
// In preview, consider it configured if the snapshot contains any trigger fields
const isConfiguredInPreview = isPreview && Boolean(
(propValue?.triggerPath && propValue.triggerPath.length > 0) ||
(propValue?.triggerConfig && Object.keys(propValue.triggerConfig).length > 0) ||
propValue?.triggerId
)
const isTriggerConnected = isConfiguredInPreview || Boolean(triggerId && actualTriggerId)
// Debug logging to help with troubleshooting
useEffect(() => {

View File

@@ -486,10 +486,15 @@ export function SubBlock({
</TooltipContent>
</Tooltip>
)}
{config.id === 'responseFormat' && !isValidJson && (
{config.id === 'responseFormat' && (
<Tooltip>
<TooltipTrigger asChild>
<AlertTriangle className='h-4 w-4 cursor-pointer text-destructive' />
<AlertTriangle
className={cn(
'h-4 w-4 cursor-pointer text-destructive',
!isValidJson ? 'opacity-100' : 'opacity-0'
)}
/>
</TooltipTrigger>
<TooltipContent side='top'>
<p>Invalid JSON</p>

View File

@@ -13,6 +13,8 @@ import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/provide
import type { BlockConfig, SubBlockConfig, SubBlockType } from '@/blocks/types'
import { useCollaborativeWorkflow } from '@/hooks/use-collaborative-workflow'
import { useExecutionStore } from '@/stores/execution/store'
import { usePanelStore } from '@/stores/panel/store'
import { useGeneralStore } from '@/stores/settings/general/store'
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
@@ -435,8 +437,12 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
stateToUse = mergedState?.subBlocks || {}
}
const isAdvancedMode = useWorkflowStore.getState().blocks[blockId]?.advancedMode ?? false
const isTriggerMode = useWorkflowStore.getState().blocks[blockId]?.triggerMode ?? false
const isAdvancedMode = data.isPreview
? ((data.blockState as any)?.advancedMode ?? false)
: useWorkflowStore.getState().blocks[blockId]?.advancedMode ?? false
const isTriggerMode = data.isPreview
? ((data.blockState as any)?.triggerMode ?? false)
: useWorkflowStore.getState().blocks[blockId]?.triggerMode ?? false
const effectiveAdvanced = currentWorkflow.isDiffMode ? displayAdvancedMode : isAdvancedMode
const effectiveTrigger = currentWorkflow.isDiffMode ? displayTriggerMode : isTriggerMode
@@ -580,6 +586,72 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
type === 'schedule' && !isLoadingScheduleInfo && scheduleInfo !== null
const userPermissions = useUserPermissionsContext()
// Debug mode and active selection
const isDebugModeEnabled = useGeneralStore((s) => s.isDebugModeEnabled)
const activeBlockIds = useExecutionStore((s) => s.activeBlockIds)
const panelFocusedBlockId = useExecutionStore((s) => s.panelFocusedBlockId)
const setPanelFocusedBlockId = useExecutionStore((s) => s.setPanelFocusedBlockId)
const executingBlockIds = useExecutionStore((s) => s.executingBlockIds)
const setActiveBlocks = useExecutionStore((s) => s.setActiveBlocks)
const setActiveTab = usePanelStore((s) => s.setActiveTab)
const breakpointId = useExecutionStore((s) => s.breakpointId)
const debugContext = useExecutionStore((s) => s.debugContext)
const startPositionIds = useExecutionStore((s) => s.startPositionIds)
const handleDebugOpen = (e: React.MouseEvent) => {
if (!isDebugModeEnabled) return
e.stopPropagation()
setActiveBlocks(new Set([id]))
setActiveTab('debug')
// Always select this block for the debug panel focus
setPanelFocusedBlockId(id)
}
// In debug mode, use executingBlockIds to detect actual executing blocks (not selection);
// outside debug, fall back to activeBlockIds driven by the executor
const isExecutingNow = isDebugModeEnabled ? executingBlockIds.has(id) : activeBlockIds.has(id)
const isCurrentBlock = isDebugModeEnabled && isPending
const isPanelFocused = isDebugModeEnabled && panelFocusedBlockId === id
// Check if block has errored during debug execution
const hasError =
isDebugModeEnabled && debugContext
? (() => {
// Check direct block state for error
const directState = debugContext.blockStates?.get(id)
if (
directState?.output &&
typeof directState.output === 'object' &&
'error' in directState.output
) {
return true
}
// Check virtual executions for errors (for blocks inside parallels)
for (const [key, state] of debugContext.blockStates?.entries() || []) {
// Check if this is a virtual ID for our block
if (typeof key === 'string' && key.startsWith(`${id}_parallel_`)) {
if (state?.output && typeof state.output === 'object' && 'error' in state.output) {
return true
}
}
}
// Also check block logs for this block
const hasErrorLog = debugContext.blockLogs?.some((log: any) => {
if (log.blockId === id && !log.success) return true
// Check if log is for a virtual version of this block
if (
typeof log.blockId === 'string' &&
log.blockId.startsWith(`${id}_parallel_`) &&
!log.success
) {
return true
}
return false
})
return hasErrorLog || false
})()
: false
return (
<div className='group relative'>
<Card
@@ -589,20 +661,54 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
'transition-block-bg transition-ring',
displayIsWide ? 'w-[480px]' : 'w-[320px]',
!isEnabled && 'shadow-sm',
isActive && 'animate-pulse-ring ring-2 ring-blue-500',
isPending && 'ring-2 ring-amber-500',
// Diff highlighting
diffStatus === 'new' && 'bg-green-50/50 ring-2 ring-green-500 dark:bg-green-900/10',
diffStatus === 'edited' && 'bg-orange-50/50 ring-2 ring-orange-500 dark:bg-orange-900/10',
// Error state - highest priority (only border, no background)
hasError && 'ring-2 ring-red-500',
// Panel-focused block highlight (unless errored)
!hasError && isPanelFocused && 'bg-blue-50/60 dark:bg-blue-900/5',
// Executing blocks match staging: pulsing blue ring
!hasError && isExecutingNow && 'animate-pulse-ring ring-2 ring-blue-500',
// Pending blocks show blue border when not executing
!hasError && !isExecutingNow && isCurrentBlock && 'ring-2 ring-blue-500',
// Diff highlighting (only if not in debug error state)
!hasError &&
diffStatus === 'new' &&
'bg-green-50/50 ring-2 ring-green-500 dark:bg-green-900/10',
!hasError &&
diffStatus === 'edited' &&
'bg-orange-50/50 ring-2 ring-orange-500 dark:bg-orange-900/10',
// Deleted block highlighting (in original workflow)
isDeletedBlock && 'bg-red-50/50 ring-2 ring-red-500 dark:bg-red-900/10',
'z-[20]'
)}
onClick={handleDebugOpen}
>
{/* Show debug indicator for pending blocks */}
{isPending && (
<div className='-top-6 -translate-x-1/2 absolute left-1/2 z-10 transform rounded-t-md bg-amber-500 px-2 py-0.5 text-white text-xs'>
Next Step
{/* Show error indicator for errored blocks */}
{hasError && (
<div className='-top-6 -translate-x-1/2 absolute left-1/2 z-10 transform rounded-t-md bg-red-500 px-2 py-0.5 text-white text-xs'>
Error
</div>
)}
{/* Show debug indicator for current blocks in debug mode (pending or executing) - but not if errored */}
{!hasError && isDebugModeEnabled && (isPending || executingBlockIds.has(id)) && (
<div className='-top-6 -translate-x-1/2 absolute left-1/2 z-10 transform rounded-t-md bg-blue-500 px-2 py-0.5 text-white text-xs'>
Current
</div>
)}
{/* Bottom indicators: breakpoint and start position side by side */}
{isDebugModeEnabled && (breakpointId === id || startPositionIds.has(id)) && (
<div className='-bottom-6 -translate-x-1/2 absolute left-1/2 z-10 flex transform items-end gap-2'>
{breakpointId === id && (
<div className='rounded-b-md bg-orange-500 px-2 py-0.5 text-white text-xs'>
Breakpoint
</div>
)}
{startPositionIds.has(id) && (
<div className='rounded-b-md bg-purple-600 px-2 py-0.5 text-white text-xs'>
Start Position
</div>
)}
</div>
)}

View File

@@ -58,7 +58,6 @@ export function generateFullWorkflowData() {
edges: workflowState.edges,
loops: workflowState.loops,
parallels: workflowState.parallels,
whiles: workflowState.whiles,
},
subBlockValues,
exportedAt: new Date().toISOString(),

View File

@@ -3,44 +3,39 @@ import type { Edge } from 'reactflow'
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
import type { DeploymentStatus } from '@/stores/workflows/registry/types'
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
import type {
BlockState,
Loop,
Parallel,
While,
WorkflowState,
} from '@/stores/workflows/workflow/types'
import type { BlockState, Loop, Parallel, WorkflowState } from '@/stores/workflows/workflow/types'
import { useDebugCanvasStore } from '@/stores/execution/debug-canvas/store'
/**
* Interface for the current workflow abstraction
*/
export interface CurrentWorkflow {
// Current workflow state properties
blocks: Record<string, BlockState>
edges: Edge[]
loops: Record<string, Loop>
parallels: Record<string, Parallel>
whiles: Record<string, While>
lastSaved?: number
isDeployed?: boolean
deployedAt?: Date
deploymentStatuses?: Record<string, DeploymentStatus>
needsRedeployment?: boolean
hasActiveWebhook?: boolean
// Current workflow state properties
blocks: Record<string, BlockState>
edges: Edge[]
loops: Record<string, Loop>
parallels: Record<string, Parallel>
lastSaved?: number
isDeployed?: boolean
deployedAt?: Date
deploymentStatuses?: Record<string, DeploymentStatus>
needsRedeployment?: boolean
hasActiveWebhook?: boolean
// Mode information
isDiffMode: boolean
isNormalMode: boolean
// Mode information
isDiffMode: boolean
isNormalMode: boolean
isDebugCanvasMode?: boolean
// Full workflow state (for cases that need the complete object)
workflowState: WorkflowState
// Full workflow state (for cases that need the complete object)
workflowState: WorkflowState
// Helper methods
getBlockById: (blockId: string) => BlockState | undefined
getBlockCount: () => number
getEdgeCount: () => number
hasBlocks: () => boolean
hasEdges: () => boolean
// Helper methods
getBlockById: (blockId: string) => BlockState | undefined
getBlockCount: () => number
getEdgeCount: () => number
hasBlocks: () => boolean
hasEdges: () => boolean
}
/**
@@ -48,49 +43,91 @@ export interface CurrentWorkflow {
* Automatically handles diff vs normal mode without exposing the complexity to consumers.
*/
export function useCurrentWorkflow(): CurrentWorkflow {
// Get normal workflow state
const normalWorkflow = useWorkflowStore((state) => state.getWorkflowState())
// Get normal workflow state
const normalWorkflow = useWorkflowStore((state) => state.getWorkflowState())
// Get diff state - now including isDiffReady
const { isShowingDiff, isDiffReady, diffWorkflow } = useWorkflowDiffStore()
// Get diff state - now including isDiffReady
const { isShowingDiff, isDiffReady, diffWorkflow } = useWorkflowDiffStore()
// Create the abstracted interface
const currentWorkflow = useMemo((): CurrentWorkflow => {
// Determine which workflow to use - only use diff if it's ready
const hasDiffBlocks =
!!diffWorkflow && Object.keys((diffWorkflow as any).blocks || {}).length > 0
const shouldUseDiff = isShowingDiff && isDiffReady && hasDiffBlocks
const activeWorkflow = shouldUseDiff ? diffWorkflow : normalWorkflow
// Get debug canvas override
const debugCanvas = useDebugCanvasStore((s) => ({ isActive: s.isActive, workflowState: s.workflowState }))
return {
// Current workflow state
blocks: activeWorkflow.blocks,
edges: activeWorkflow.edges,
loops: activeWorkflow.loops || {},
parallels: activeWorkflow.parallels || {},
whiles: activeWorkflow.whiles || {},
lastSaved: activeWorkflow.lastSaved,
isDeployed: activeWorkflow.isDeployed,
deployedAt: activeWorkflow.deployedAt,
deploymentStatuses: activeWorkflow.deploymentStatuses,
needsRedeployment: activeWorkflow.needsRedeployment,
hasActiveWebhook: activeWorkflow.hasActiveWebhook,
// Create the abstracted interface
const currentWorkflow = useMemo((): CurrentWorkflow => {
// Prefer debug canvas if active
const hasDebugCanvas = !!debugCanvas.isActive && !!debugCanvas.workflowState
if (hasDebugCanvas) {
console.log('[useCurrentWorkflow] Using debug canvas state', {
isActive: debugCanvas.isActive,
hasWorkflowState: !!debugCanvas.workflowState,
blockCount: debugCanvas.workflowState ? Object.keys(debugCanvas.workflowState.blocks || {}).length : 0,
edgeCount: debugCanvas.workflowState ? (debugCanvas.workflowState.edges || []).length : 0
})
const activeWorkflow = debugCanvas.workflowState as WorkflowState
return {
blocks: activeWorkflow.blocks,
edges: activeWorkflow.edges,
loops: activeWorkflow.loops || {},
parallels: activeWorkflow.parallels || {},
lastSaved: activeWorkflow.lastSaved,
isDeployed: activeWorkflow.isDeployed,
deployedAt: activeWorkflow.deployedAt,
deploymentStatuses: activeWorkflow.deploymentStatuses,
needsRedeployment: activeWorkflow.needsRedeployment,
hasActiveWebhook: activeWorkflow.hasActiveWebhook,
isDiffMode: false,
isNormalMode: false,
isDebugCanvasMode: true,
workflowState: activeWorkflow,
getBlockById: (blockId: string) => activeWorkflow.blocks[blockId],
getBlockCount: () => Object.keys(activeWorkflow.blocks).length,
getEdgeCount: () => activeWorkflow.edges.length,
hasBlocks: () => Object.keys(activeWorkflow.blocks).length > 0,
hasEdges: () => activeWorkflow.edges.length > 0,
}
}
// Mode information - update to reflect ready state
isDiffMode: shouldUseDiff,
isNormalMode: !shouldUseDiff,
// Determine which workflow to use - only use diff if it's ready
const hasDiffBlocks = !!diffWorkflow && Object.keys((diffWorkflow as any).blocks || {}).length > 0
const shouldUseDiff = isShowingDiff && isDiffReady && hasDiffBlocks
const activeWorkflow = shouldUseDiff ? diffWorkflow : normalWorkflow
console.log('[useCurrentWorkflow] Not using debug canvas', {
debugCanvasIsActive: debugCanvas.isActive,
debugCanvasHasState: !!debugCanvas.workflowState,
usingDiff: shouldUseDiff,
normalBlockCount: Object.keys(normalWorkflow.blocks || {}).length
})
// Full workflow state (for cases that need the complete object)
workflowState: activeWorkflow,
return {
// Current workflow state
blocks: activeWorkflow.blocks,
edges: activeWorkflow.edges,
loops: activeWorkflow.loops || {},
parallels: activeWorkflow.parallels || {},
lastSaved: activeWorkflow.lastSaved,
isDeployed: activeWorkflow.isDeployed,
deployedAt: activeWorkflow.deployedAt,
deploymentStatuses: activeWorkflow.deploymentStatuses,
needsRedeployment: activeWorkflow.needsRedeployment,
hasActiveWebhook: activeWorkflow.hasActiveWebhook,
// Helper methods
getBlockById: (blockId: string) => activeWorkflow.blocks[blockId],
getBlockCount: () => Object.keys(activeWorkflow.blocks).length,
getEdgeCount: () => activeWorkflow.edges.length,
hasBlocks: () => Object.keys(activeWorkflow.blocks).length > 0,
hasEdges: () => activeWorkflow.edges.length > 0,
}
}, [normalWorkflow, isShowingDiff, isDiffReady, diffWorkflow])
// Mode information - update to reflect ready state
isDiffMode: shouldUseDiff,
isNormalMode: !shouldUseDiff,
isDebugCanvasMode: false,
return currentWorkflow
// Full workflow state (for cases that need the complete object)
workflowState: activeWorkflow,
// Helper methods
getBlockById: (blockId: string) => activeWorkflow.blocks[blockId],
getBlockCount: () => Object.keys(activeWorkflow.blocks).length,
getEdgeCount: () => activeWorkflow.edges.length,
hasBlocks: () => Object.keys(activeWorkflow.blocks).length > 0,
hasEdges: () => activeWorkflow.edges.length > 0,
}
}, [normalWorkflow, isShowingDiff, isDiffReady, diffWorkflow, debugCanvas.isActive, debugCanvas.workflowState])
return currentWorkflow
}

View File

@@ -9,6 +9,7 @@ import { Executor } from '@/executor'
import type { BlockLog, ExecutionResult, StreamingExecution } from '@/executor/types'
import { Serializer } from '@/serializer'
import type { SerializedWorkflow } from '@/serializer/types'
import { useDebugSnapshotStore } from '@/stores/execution/debug-snapshots/store'
import { useExecutionStore } from '@/stores/execution/store'
import { useConsoleStore } from '@/stores/panel/console/store'
import { useVariablesStore } from '@/stores/panel/variables/store'
@@ -62,6 +63,8 @@ export function useWorkflowExecution() {
setExecutor,
setDebugContext,
setActiveBlocks,
setExecutingBlockIds,
startPositionIds,
} = useExecutionStore()
const [executionResult, setExecutionResult] = useState<ExecutionResult | null>(null)
@@ -70,7 +73,7 @@ export function useWorkflowExecution() {
*/
const validateDebugState = useCallback((): DebugValidationResult => {
if (!executor || !debugContext || pendingBlocks.length === 0) {
const missing = []
const missing = [] as string[]
if (!executor) missing.push('executor')
if (!debugContext) missing.push('debugContext')
if (pendingBlocks.length === 0) missing.push('pendingBlocks')
@@ -93,6 +96,7 @@ export function useWorkflowExecution() {
setExecutor(null)
setPendingBlocks([])
setActiveBlocks(new Set())
setExecutingBlockIds(new Set())
// Reset debug mode setting if it was enabled
if (isDebugModeEnabled) {
@@ -105,6 +109,7 @@ export function useWorkflowExecution() {
setExecutor,
setPendingBlocks,
setActiveBlocks,
setExecutingBlockIds,
isDebugModeEnabled,
])
@@ -120,7 +125,7 @@ export function useWorkflowExecution() {
}, [])
/**
* Handles debug session completion
* Handles debug session completion - keep debug session open for inspection
*/
const handleDebugSessionComplete = useCallback(
async (result: ExecutionResult) => {
@@ -130,10 +135,14 @@ export function useWorkflowExecution() {
// Persist logs
await persistLogs(uuidv4(), result)
// Reset debug state
resetDebugState()
// Keep debug mode open for inspection: stop executing, clear pending
setIsExecuting(false)
setPendingBlocks([])
setExecutingBlockIds(new Set())
// Keep debugContext and executor so the panel can inspect state
// Do not reset isDebugging
},
[activeWorkflowId, resetDebugState]
[activeWorkflowId, setIsExecuting, setPendingBlocks, setExecutingBlockIds]
)
/**
@@ -148,16 +157,30 @@ export function useWorkflowExecution() {
// Update debug context and pending blocks
if (result.metadata?.context) {
setDebugContext(result.metadata.context)
// Capture snapshot for revert/backstep
try {
useDebugSnapshotStore.getState().captureFromContext(result.metadata.context as any)
useDebugSnapshotStore
.getState()
.pushFromContext(result.metadata.context as any, result.metadata?.pendingBlocks || [])
} catch {}
}
if (result.metadata?.pendingBlocks) {
setPendingBlocks(result.metadata.pendingBlocks)
// Filter triggers from next pending
const filtered = (result.metadata.pendingBlocks as string[]).filter((id) => {
const block = currentWorkflow.blocks[id]
if (!block) return false
const cfg = getBlock(block.type)
return cfg?.category !== 'triggers'
})
setPendingBlocks(filtered)
}
},
[setDebugContext, setPendingBlocks]
[setDebugContext, setPendingBlocks, currentWorkflow.blocks]
)
/**
* Handles debug execution errors
* Handles debug execution errors - keep debug open for inspection
*/
const handleDebugExecutionError = useCallback(
async (error: any, operation: string) => {
@@ -176,10 +199,13 @@ export function useWorkflowExecution() {
// Persist logs
await persistLogs(uuidv4(), errorResult)
// Reset debug state
resetDebugState()
// Keep debug session open for inspection
setIsExecuting(false)
setPendingBlocks([])
setExecutingBlockIds(new Set())
// Keep isDebugging, debugContext, and executor intact
},
[debugContext, activeWorkflowId, resetDebugState]
[debugContext, activeWorkflowId, setIsExecuting, setPendingBlocks, setExecutingBlockIds]
)
const persistLogs = async (
@@ -268,8 +294,8 @@ export function useWorkflowExecution() {
const isChatExecution =
workflowInput && typeof workflowInput === 'object' && 'input' in workflowInput
// For chat executions, we'll use a streaming approach
if (isChatExecution) {
// For chat executions, use streaming only when NOT debugging
if (isChatExecution && !enableDebug) {
const stream = new ReadableStream({
async start(controller) {
const encoder = new TextEncoder()
@@ -390,7 +416,7 @@ export function useWorkflowExecution() {
}
try {
const result = await executeWorkflow(workflowInput, onStream, executionId)
const result = await executeWorkflow(workflowInput, onStream, executionId, false)
// Check if execution was cancelled
if (
@@ -448,7 +474,6 @@ export function useWorkflowExecution() {
} catch (error: any) {
controller.error(error)
} finally {
controller.close()
setIsExecuting(false)
setIsDebugging(false)
setActiveBlocks(new Set())
@@ -458,12 +483,23 @@ export function useWorkflowExecution() {
return { success: true, stream }
}
// For manual (non-chat) execution
// For manual (non-streaming) execution including debug and non-chat
const executionId = uuidv4()
try {
const result = await executeWorkflow(workflowInput, undefined, executionId)
const result = await executeWorkflow(workflowInput, undefined, executionId, enableDebug)
if (result && 'metadata' in result && result.metadata?.isDebugSession) {
setDebugContext(result.metadata.context || null)
try {
if (result.metadata?.context) {
useDebugSnapshotStore.getState().captureFromContext(result.metadata.context as any)
useDebugSnapshotStore
.getState()
.pushFromContext(
result.metadata.context as any,
result.metadata?.pendingBlocks || []
)
}
} catch {}
if (result.metadata.pendingBlocks) {
setPendingBlocks(result.metadata.pendingBlocks)
}
@@ -508,13 +544,15 @@ export function useWorkflowExecution() {
setExecutor,
setPendingBlocks,
setActiveBlocks,
startPositionIds,
]
)
const executeWorkflow = async (
workflowInput?: any,
onStream?: (se: StreamingExecution) => Promise<void>,
executionId?: string
executionId?: string,
debugRequested?: boolean
): Promise<ExecutionResult | StreamingExecution> => {
// Use currentWorkflow but check if we're in diff mode
const {
@@ -522,7 +560,6 @@ export function useWorkflowExecution() {
edges: workflowEdges,
loops: workflowLoops,
parallels: workflowParallels,
whiles: workflowWhiles,
} = currentWorkflow
// Filter out blocks without type (these are layout-only blocks)
@@ -603,7 +640,7 @@ export function useWorkflowExecution() {
const envVars = getAllVariables()
const envVarValues = Object.entries(envVars).reduce(
(acc, [key, variable]) => {
acc[key] = variable.value
acc[key] = (variable as any).value
return acc
},
{} as Record<string, string>
@@ -634,8 +671,7 @@ export function useWorkflowExecution() {
filteredStates,
filteredEdges,
workflowLoops,
workflowParallels,
workflowWhiles
workflowParallels
)
// If this is a chat execution, get the selected outputs
@@ -674,7 +710,9 @@ export function useWorkflowExecution() {
setExecutor(newExecutor)
// Execute workflow
return newExecutor.execute(activeWorkflowId || '')
const execResult = await newExecutor.execute(activeWorkflowId || '')
return execResult
}
const handleExecutionError = (error: any) => {
@@ -750,14 +788,33 @@ export function useWorkflowExecution() {
// Validate debug state
const validation = validateDebugState()
if (!validation.isValid) {
resetDebugState()
// Keep session open for inspection; simply stop executing
setIsExecuting(false)
return
}
// Compute executable set without triggers
const nonTriggerPending = pendingBlocks.filter((id) => {
const block = currentWorkflow.blocks[id]
if (!block) return false
const cfg = getBlock(block.type)
return cfg?.category !== 'triggers'
})
if (nonTriggerPending.length === 0) {
// Nothing executable
setIsExecuting(false)
return
}
try {
logger.info('Executing debug step with blocks:', pendingBlocks)
const result = await executor!.continueExecution(pendingBlocks, debugContext!)
logger.info('Executing debug step with blocks:', nonTriggerPending)
// Mark current pending blocks as executing for UI pulse
setExecutingBlockIds(new Set(nonTriggerPending))
const result = await executor!.continueExecution(nonTriggerPending, debugContext!)
logger.info('Debug step execution result:', result)
// Clear executing state after step returns
setExecutingBlockIds(new Set())
if (isDebugSessionComplete(result)) {
await handleDebugSessionComplete(result)
@@ -765,6 +822,7 @@ export function useWorkflowExecution() {
handleDebugSessionContinuation(result)
}
} catch (error: any) {
setExecutingBlockIds(new Set())
await handleDebugExecutionError(error, 'step')
}
}, [
@@ -773,11 +831,13 @@ export function useWorkflowExecution() {
pendingBlocks,
activeWorkflowId,
validateDebugState,
resetDebugState,
setIsExecuting,
setExecutingBlockIds,
isDebugSessionComplete,
handleDebugSessionComplete,
handleDebugSessionContinuation,
handleDebugExecutionError,
currentWorkflow.blocks,
])
/**
@@ -793,7 +853,8 @@ export function useWorkflowExecution() {
// Validate debug state
const validation = validateDebugState()
if (!validation.isValid) {
resetDebugState()
// Keep session open for inspection; simply stop executing
setIsExecuting(false)
return
}
@@ -810,6 +871,14 @@ export function useWorkflowExecution() {
let currentContext = { ...debugContext! }
let currentPendingBlocks = [...pendingBlocks]
// Filter initial pending
currentPendingBlocks = currentPendingBlocks.filter((id) => {
const block = currentWorkflow.blocks[id]
if (!block) return false
const cfg = getBlock(block.type)
return cfg?.category !== 'triggers'
})
logger.info('Starting resume execution with blocks:', currentPendingBlocks)
// Continue execution until there are no more pending blocks
@@ -821,7 +890,9 @@ export function useWorkflowExecution() {
`Resume iteration ${iterationCount + 1}, executing ${currentPendingBlocks.length} blocks`
)
setExecutingBlockIds(new Set(currentPendingBlocks))
currentResult = await executor!.continueExecution(currentPendingBlocks, currentContext)
setExecutingBlockIds(new Set())
logger.info('Resume iteration result:', {
success: currentResult.success,
@@ -837,9 +908,14 @@ export function useWorkflowExecution() {
break
}
// Update pending blocks for next iteration
// Update pending blocks for next iteration, filtered
if (currentResult.metadata?.pendingBlocks) {
currentPendingBlocks = currentResult.metadata.pendingBlocks
currentPendingBlocks = (currentResult.metadata.pendingBlocks as string[]).filter((id) => {
const block = currentWorkflow.blocks[id]
if (!block) return false
const cfg = getBlock(block.type)
return cfg?.category !== 'triggers'
})
} else {
logger.info('No pending blocks in result, ending resume')
break
@@ -866,6 +942,7 @@ export function useWorkflowExecution() {
// Handle completion
await handleDebugSessionComplete(currentResult)
} catch (error: any) {
setExecutingBlockIds(new Set())
await handleDebugExecutionError(error, 'resume')
}
}, [
@@ -874,9 +951,11 @@ export function useWorkflowExecution() {
pendingBlocks,
activeWorkflowId,
validateDebugState,
resetDebugState,
setIsExecuting,
setExecutingBlockIds,
handleDebugSessionComplete,
handleDebugExecutionError,
currentWorkflow.blocks,
])
/**

View File

@@ -104,7 +104,6 @@ export async function executeWorkflowWithLogging(
edges: workflowEdges,
loops: workflowLoops,
parallels: workflowParallels,
whiles: workflowWhiles,
} = currentWorkflow
// Filter out blocks without type (these are layout-only blocks)
@@ -132,6 +131,17 @@ export async function executeWorkflowWithLogging(
// Merge subblock states from the appropriate store
const mergedStates = mergeSubblockState(validBlocks)
// Log the current workflow state before filtering
logger.info('🔍 Current workflow state before filtering:', {
totalBlocks: Object.keys(mergedStates).length,
blocks: Object.entries(mergedStates).map(([id, block]) => ({
id,
type: block.type,
triggerMode: block.triggerMode,
category: block.type ? getBlock(block.type)?.category : undefined,
})),
})
// Filter out trigger blocks for manual execution
const filteredStates = Object.entries(mergedStates).reduce(
(acc, [id, block]) => {
@@ -143,16 +153,29 @@ export async function executeWorkflowWithLogging(
const blockConfig = getBlock(block.type)
const isTriggerBlock = blockConfig?.category === 'triggers'
const isInTriggerMode = block.triggerMode === true
// Skip trigger blocks during manual execution
if (!isTriggerBlock) {
// Skip trigger blocks AND blocks in trigger mode during manual execution
if (!isTriggerBlock && !isInTriggerMode) {
acc[id] = block
} else {
logger.info(`🚫 Filtering out block ${id} - trigger category: ${isTriggerBlock}, trigger mode: ${isInTriggerMode}`)
}
return acc
},
{} as typeof mergedStates
)
// Log the filtered state that will be used for execution (not snapshots)
logger.info('📦 Filtered workflow state for execution:', {
totalBlocks: Object.keys(filteredStates).length,
blocks: Object.entries(filteredStates).map(([id, block]) => ({
id,
type: block.type,
triggerMode: block.triggerMode,
})),
})
const currentBlockStates = Object.entries(filteredStates).reduce(
(acc, [id, block]) => {
acc[id] = Object.entries(block.subBlocks).reduce(
@@ -202,8 +225,7 @@ export async function executeWorkflowWithLogging(
filteredStates,
filteredEdges,
workflowLoops,
workflowParallels,
workflowWhiles
workflowParallels
)
// If this is a chat execution, get the selected outputs

View File

@@ -15,8 +15,7 @@ const isContainerType = (blockType: string): boolean => {
blockType === 'parallel' ||
blockType === 'loopNode' ||
blockType === 'parallelNode' ||
blockType === 'subflowNode' ||
blockType === 'while'
blockType === 'subflowNode'
)
}
@@ -303,7 +302,7 @@ export const calculateRelativePosition = (
* @param getNodes Function to retrieve all nodes from ReactFlow
* @param updateBlockPosition Function to update the position of a block
* @param updateParentId Function to update the parent ID of a block
* @param resizeLoopNodes Function to resize loop or parallel or while nodes after parent update
* @param resizeLoopNodes Function to resize loop nodes after parent update
*/
export const updateNodeParent = (
nodeId: string,
@@ -337,7 +336,7 @@ export const updateNodeParent = (
}
/**
* Checks if a point is inside a loop or parallel or while node
* Checks if a point is inside a loop or parallel node
* @param position Position coordinates to check
* @param getNodes Function to retrieve all nodes from ReactFlow
* @returns The smallest container node containing the point, or null if none
@@ -391,7 +390,7 @@ export const isPointInLoopNode = (
}
/**
* Calculates appropriate dimensions for a loop or parallel or while node based on its children
* Calculates appropriate dimensions for a loop or parallel node based on its children
* @param nodeId ID of the container node
* @param getNodes Function to retrieve all nodes from ReactFlow
* @param blocks Block states from workflow store

View File

@@ -47,7 +47,6 @@ export async function applyAutoLayoutToWorkflow(
edges: any[],
loops: Record<string, any> = {},
parallels: Record<string, any> = {},
whiles: Record<string, any> = {},
options: AutoLayoutOptions = {}
): Promise<{
success: boolean
@@ -153,7 +152,7 @@ export async function applyAutoLayoutAndUpdateStore(
const { useWorkflowStore } = await import('@/stores/workflows/workflow/store')
const workflowStore = useWorkflowStore.getState()
const { blocks, edges, loops = {}, parallels = {}, whiles = {} } = workflowStore
const { blocks, edges, loops = {}, parallels = {} } = workflowStore
logger.info('Auto layout store data:', {
workflowId,
@@ -161,7 +160,6 @@ export async function applyAutoLayoutAndUpdateStore(
edgeCount: edges.length,
loopCount: Object.keys(loops).length,
parallelCount: Object.keys(parallels).length,
whileCount: Object.keys(whiles).length,
})
if (Object.keys(blocks).length === 0) {
@@ -176,7 +174,6 @@ export async function applyAutoLayoutAndUpdateStore(
edges,
loops,
parallels,
whiles,
options
)
@@ -268,5 +265,5 @@ export async function applyAutoLayoutToBlocks(
layoutedBlocks?: Record<string, any>
error?: string
}> {
return applyAutoLayoutToWorkflow('preview', blocks, edges, {}, {}, {}, options)
return applyAutoLayoutToWorkflow('preview', blocks, edges, {}, {}, options)
}

View File

@@ -98,7 +98,7 @@ const WorkflowContent = React.memo(() => {
useStreamCleanup(copilotCleanup)
// Extract workflow data from the abstraction
const { blocks, edges, isDiffMode } = currentWorkflow
const { blocks, edges, loops, parallels, isDiffMode } = currentWorkflow
// Get diff analysis for edge reconstruction
const { diffAnalysis, isShowingDiff, isDiffReady } = useWorkflowDiffStore()
@@ -462,8 +462,6 @@ const WorkflowContent = React.memo(() => {
sourceHandle = 'loop-end-source'
} else if (block.type === 'parallel') {
sourceHandle = 'parallel-end-source'
} else if (block.type === 'while') {
sourceHandle = 'while-end-source'
}
return sourceHandle
@@ -483,19 +481,14 @@ const WorkflowContent = React.memo(() => {
if (type === 'connectionBlock') return
// Special handling for container nodes (loop or parallel)
if (type === 'loop' || type === 'parallel' || type === 'while') {
if (type === 'loop' || type === 'parallel') {
// Create a unique ID and name for the container
const id = crypto.randomUUID()
// Auto-number the blocks based on existing blocks of the same type
const existingBlocksOfType = Object.values(blocks).filter((b) => b.type === type)
const blockNumber = existingBlocksOfType.length + 1
const name =
type === 'loop'
? `Loop ${blockNumber}`
: type === 'parallel'
? `Parallel ${blockNumber}`
: `While ${blockNumber}`
const name = type === 'loop' ? `Loop ${blockNumber}` : `Parallel ${blockNumber}`
// Calculate the center position of the viewport
const centerPosition = project({
@@ -622,30 +615,21 @@ const WorkflowContent = React.memo(() => {
// Clear any drag-over styling
document
.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over, .while-node-drag-over')
.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over')
.forEach((el) => {
el.classList.remove(
'loop-node-drag-over',
'parallel-node-drag-over',
'while-node-drag-over'
)
el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
})
document.body.style.cursor = ''
// Special handling for container nodes (loop or parallel)
if (data.type === 'loop' || data.type === 'parallel' || data.type === 'while') {
if (data.type === 'loop' || data.type === 'parallel') {
// Create a unique ID and name for the container
const id = crypto.randomUUID()
// Auto-number the blocks based on existing blocks of the same type
const existingBlocksOfType = Object.values(blocks).filter((b) => b.type === data.type)
const blockNumber = existingBlocksOfType.length + 1
const name =
data.type === 'loop'
? `Loop ${blockNumber}`
: data.type === 'parallel'
? `Parallel ${blockNumber}`
: `While ${blockNumber}`
const name = data.type === 'loop' ? `Loop ${blockNumber}` : `Parallel ${blockNumber}`
// Check if we're dropping inside another container
if (containerInfo) {
@@ -707,12 +691,7 @@ const WorkflowContent = React.memo(() => {
}
const blockConfig = getBlock(data.type)
if (
!blockConfig &&
data.type !== 'loop' &&
data.type !== 'parallel' &&
data.type !== 'while'
) {
if (!blockConfig && data.type !== 'loop' && data.type !== 'parallel') {
logger.error('Invalid block type:', { data })
return
}
@@ -724,9 +703,7 @@ const WorkflowContent = React.memo(() => {
? `Loop ${Object.values(blocks).filter((b) => b.type === 'loop').length + 1}`
: data.type === 'parallel'
? `Parallel ${Object.values(blocks).filter((b) => b.type === 'parallel').length + 1}`
: data.type === 'while'
? `While ${Object.values(blocks).filter((b) => b.type === 'while').length + 1}`
: `${blockConfig!.name} ${Object.values(blocks).filter((b) => b.type === data.type).length + 1}`
: `${blockConfig!.name} ${Object.values(blocks).filter((b) => b.type === data.type).length + 1}`
if (containerInfo) {
// Calculate position relative to the container node
@@ -785,9 +762,7 @@ const WorkflowContent = React.memo(() => {
const startSourceHandle =
(containerNode?.data as any)?.kind === 'loop'
? 'loop-start-source'
: data.type === 'parallel'
? 'parallel-start-source'
: 'while-start-source'
: 'parallel-start-source'
addEdge({
id: crypto.randomUUID(),
@@ -858,13 +833,9 @@ const WorkflowContent = React.memo(() => {
// Clear any previous highlighting
document
.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over, .while-node-drag-over')
.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over')
.forEach((el) => {
el.classList.remove(
'loop-node-drag-over',
'parallel-node-drag-over',
'while-node-drag-over'
)
el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
})
// If hovering over a container node, highlight it
@@ -883,11 +854,6 @@ const WorkflowContent = React.memo(() => {
(containerNode.data as any)?.kind === 'parallel'
) {
containerElement.classList.add('parallel-node-drag-over')
} else if (
containerNode?.type === 'subflowNode' &&
(containerNode.data as any)?.kind === 'while'
) {
containerElement.classList.add('while-node-drag-over')
}
document.body.style.cursor = 'copy'
}
@@ -1017,7 +983,7 @@ const WorkflowContent = React.memo(() => {
}
// Handle container nodes differently
if (block.type === 'loop' || block.type === 'parallel' || block.type === 'while') {
if (block.type === 'loop' || block.type === 'parallel') {
const hasNestedError = nestedSubflowErrors.has(block.id)
nodeArray.push({
id: block.id,
@@ -1031,7 +997,7 @@ const WorkflowContent = React.memo(() => {
width: block.data?.width || 500,
height: block.data?.height || 300,
hasNestedError,
kind: block.type === 'loop' ? 'loop' : block.type === 'parallel' ? 'parallel' : 'while',
kind: block.type === 'loop' ? 'loop' : 'parallel',
},
})
return
@@ -1178,8 +1144,7 @@ const WorkflowContent = React.memo(() => {
const sourceParentId =
sourceNode.parentId ||
(connection.sourceHandle === 'loop-start-source' ||
connection.sourceHandle === 'parallel-start-source' ||
connection.sourceHandle === 'while-start-source'
connection.sourceHandle === 'parallel-start-source'
? connection.source
: undefined)
const targetParentId = targetNode.parentId
@@ -1190,8 +1155,7 @@ const WorkflowContent = React.memo(() => {
// Special case for container start source: Always allow connections to nodes within the same container
if (
(connection.sourceHandle === 'loop-start-source' ||
connection.sourceHandle === 'parallel-start-source' ||
connection.sourceHandle === 'while-start-source') &&
connection.sourceHandle === 'parallel-start-source') &&
targetNode.parentId === sourceNode.id
) {
// This is a connection from container start to a node inside the container - always allow
@@ -1258,11 +1222,7 @@ const WorkflowContent = React.memo(() => {
if (potentialParentId) {
const prevElement = document.querySelector(`[data-id="${potentialParentId}"]`)
if (prevElement) {
prevElement.classList.remove(
'loop-node-drag-over',
'parallel-node-drag-over',
'while-node-drag-over'
)
prevElement.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
}
setPotentialParentId(null)
document.body.style.cursor = ''
@@ -1382,11 +1342,6 @@ const WorkflowContent = React.memo(() => {
(bestContainerMatch.container.data as any)?.kind === 'parallel'
) {
containerElement.classList.add('parallel-node-drag-over')
} else if (
bestContainerMatch.container.type === 'subflowNode' &&
(bestContainerMatch.container.data as any)?.kind === 'while'
) {
containerElement.classList.add('while-node-drag-over')
}
document.body.style.cursor = 'copy'
}
@@ -1395,11 +1350,7 @@ const WorkflowContent = React.memo(() => {
if (potentialParentId) {
const prevElement = document.querySelector(`[data-id="${potentialParentId}"]`)
if (prevElement) {
prevElement.classList.remove(
'loop-node-drag-over',
'parallel-node-drag-over',
'while-node-drag-over'
)
prevElement.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
}
setPotentialParentId(null)
document.body.style.cursor = ''
@@ -1431,15 +1382,9 @@ const WorkflowContent = React.memo(() => {
const onNodeDragStop = useCallback(
(_event: React.MouseEvent, node: any) => {
// Clear UI effects
document
.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over, .while-node-drag-over')
.forEach((el) => {
el.classList.remove(
'loop-node-drag-over',
'parallel-node-drag-over',
'while-node-drag-over'
)
})
document.querySelectorAll('.loop-node-drag-over, .parallel-node-drag-over').forEach((el) => {
el.classList.remove('loop-node-drag-over', 'parallel-node-drag-over')
})
document.body.style.cursor = ''
// Emit collaborative position update for the final position
@@ -1532,9 +1477,7 @@ const WorkflowContent = React.memo(() => {
const startSourceHandle =
(containerNode?.data as any)?.kind === 'loop'
? 'loop-start-source'
: (containerNode?.data as any)?.kind === 'parallel'
? 'parallel-start-source'
: 'while-start-source'
: 'parallel-start-source'
addEdge({
id: crypto.randomUUID(),
@@ -1692,6 +1635,8 @@ const WorkflowContent = React.memo(() => {
)
}
const isReadOnly = currentWorkflow.isDebugCanvasMode === true ? true : !effectivePermissions.canEdit
return (
<div className='flex h-screen w-full flex-col overflow-hidden'>
<div className='relative h-full w-full flex-1 transition-all duration-200'>
@@ -1707,11 +1652,11 @@ const WorkflowContent = React.memo(() => {
edges={edgesWithSelection}
onNodesChange={onNodesChange}
onEdgesChange={onEdgesChange}
onConnect={effectivePermissions.canEdit ? onConnect : undefined}
onConnect={isReadOnly ? undefined : onConnect}
nodeTypes={nodeTypes}
edgeTypes={edgeTypes}
onDrop={effectivePermissions.canEdit ? onDrop : undefined}
onDragOver={effectivePermissions.canEdit ? onDragOver : undefined}
onDrop={isReadOnly ? undefined : onDrop}
onDragOver={isReadOnly ? undefined : onDragOver}
fitView
minZoom={0.1}
maxZoom={1.3}
@@ -1731,22 +1676,22 @@ const WorkflowContent = React.memo(() => {
onEdgeClick={onEdgeClick}
elementsSelectable={true}
selectNodesOnDrag={false}
nodesConnectable={effectivePermissions.canEdit}
nodesDraggable={effectivePermissions.canEdit}
nodesConnectable={!isReadOnly}
nodesDraggable={!isReadOnly}
draggable={false}
noWheelClassName='allow-scroll'
edgesFocusable={true}
edgesUpdatable={effectivePermissions.canEdit}
edgesUpdatable={!isReadOnly}
className='workflow-container h-full'
onNodeDrag={effectivePermissions.canEdit ? onNodeDrag : undefined}
onNodeDragStop={effectivePermissions.canEdit ? onNodeDragStop : undefined}
onNodeDragStart={effectivePermissions.canEdit ? onNodeDragStart : undefined}
onNodeDrag={isReadOnly ? undefined : onNodeDrag}
onNodeDragStop={isReadOnly ? undefined : onNodeDragStop}
onNodeDragStart={isReadOnly ? undefined : onNodeDragStart}
snapToGrid={false}
snapGrid={[20, 20]}
elevateEdgesOnSelect={true}
elevateNodesOnSelect={true}
autoPanOnConnect={effectivePermissions.canEdit}
autoPanOnNodeDrag={effectivePermissions.canEdit}
autoPanOnConnect={!isReadOnly}
autoPanOnNodeDrag={!isReadOnly}
>
<Background
color='hsl(var(--workflow-dots))'

View File

@@ -148,7 +148,7 @@ export function SearchModal({
})
)
// Add special blocks (loop, parallel, and while)
// Add special blocks (loop and parallel)
const specialBlocks: BlockItem[] = [
{
id: 'loop',
@@ -166,14 +166,6 @@ export function SearchModal({
bgColor: '#FEE12B',
type: 'parallel',
},
{
id: 'while',
name: 'While',
description: 'While Loop',
icon: RepeatIcon,
bgColor: '#FF9F43',
type: 'while',
},
]
return [...regularBlocks, ...specialBlocks].sort((a, b) => a.name.localeCompare(b.name))

View File

@@ -26,7 +26,7 @@ import {
AlertDialogTitle,
} from '@/components/ui/alert-dialog'
import { ScrollArea } from '@/components/ui/scroll-area'
import { MAX_TAG_SLOTS } from '@/lib/constants/knowledge'
import { MAX_TAG_SLOTS } from '@/lib/knowledge/consts'
import { createLogger } from '@/lib/logs/console/logger'
import { getDocumentIcon } from '@/app/workspace/[workspaceId]/knowledge/components/icons/document-icons'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'

View File

@@ -17,7 +17,7 @@ import {
SelectValue,
} from '@/components/ui'
import { ScrollArea } from '@/components/ui/scroll-area'
import { MAX_TAG_SLOTS, TAG_SLOTS, type TagSlot } from '@/lib/constants/knowledge'
import { MAX_TAG_SLOTS, TAG_SLOTS, type TagSlot } from '@/lib/knowledge/consts'
import { createLogger } from '@/lib/logs/console/logger'
import type { DocumentTag } from '@/app/workspace/[workspaceId]/knowledge/components/document-tag-entry/document-tag-entry'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'

View File

@@ -1,87 +0,0 @@
import { useCallback } from 'react'
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
import { cn } from '@/lib/utils'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { WhileTool } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/while/while-config'
type WhileToolbarItemProps = {
disabled?: boolean
}
// Custom component for the While Tool
export default function WhileToolbarItem({ disabled = false }: WhileToolbarItemProps) {
const userPermissions = useUserPermissionsContext()
const handleDragStart = (e: React.DragEvent) => {
if (disabled) {
e.preventDefault()
return
}
// Only send the essential data for the while node
const simplifiedData = {
type: 'while',
}
e.dataTransfer.setData('application/json', JSON.stringify(simplifiedData))
e.dataTransfer.effectAllowed = 'move'
}
// Handle click to add while block
const handleClick = useCallback(
(e: React.MouseEvent) => {
if (disabled) return
// Dispatch a custom event to be caught by the workflow component
const event = new CustomEvent('add-block-from-toolbar', {
detail: {
type: 'while',
clientX: e.clientX,
clientY: e.clientY,
},
})
window.dispatchEvent(event)
},
[disabled]
)
const blockContent = (
<div
draggable={!disabled}
onDragStart={handleDragStart}
onClick={handleClick}
className={cn(
'group flex h-8 items-center gap-[10px] rounded-[8px] p-2 transition-colors',
disabled
? 'cursor-not-allowed opacity-60'
: 'cursor-pointer hover:bg-muted active:cursor-grabbing'
)}
>
<div
className='relative flex h-6 w-6 shrink-0 items-center justify-center overflow-hidden rounded-[6px]'
style={{ backgroundColor: WhileTool.bgColor }}
>
<WhileTool.icon
className={cn(
'h-[14px] w-[14px] text-white transition-transform duration-200',
!disabled && 'group-hover:scale-110'
)}
/>
</div>
<span className='font-medium text-sm leading-none'>{WhileTool.name}</span>
</div>
)
if (disabled) {
return (
<Tooltip>
<TooltipTrigger asChild>{blockContent}</TooltipTrigger>
<TooltipContent>
{userPermissions.isOfflineMode
? 'Connection lost - please refresh'
: 'Edit permissions required to add blocks'}
</TooltipContent>
</Tooltip>
)
}
return blockContent
}

View File

@@ -7,7 +7,6 @@ import { ScrollArea } from '@/components/ui/scroll-area'
import { ToolbarBlock } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/toolbar/components/toolbar-block/toolbar-block'
import LoopToolbarItem from '@/app/workspace/[workspaceId]/w/components/sidebar/components/toolbar/components/toolbar-loop-block/toolbar-loop-block'
import ParallelToolbarItem from '@/app/workspace/[workspaceId]/w/components/sidebar/components/toolbar/components/toolbar-parallel-block/toolbar-parallel-block'
import WhileToolbarItem from '@/app/workspace/[workspaceId]/w/components/sidebar/components/toolbar/components/toolbar-while-block/toolbar-while-block'
import { getAllBlocks } from '@/blocks'
import type { WorkspaceUserPermissions } from '@/hooks/use-user-permissions'
@@ -55,7 +54,7 @@ export function Toolbar({ userPermissions, isWorkspaceSelectorVisible = false }:
}))
.sort((a, b) => a.name.localeCompare(b.name))
// Create special blocks (loop, parallel, and while) if they match search
// Create special blocks (loop and parallel) if they match search
const specialBlockItems: BlockItem[] = []
if (!searchQuery.trim() || 'loop'.toLowerCase().includes(searchQuery.toLowerCase())) {
@@ -74,14 +73,6 @@ export function Toolbar({ userPermissions, isWorkspaceSelectorVisible = false }:
})
}
if (!searchQuery.trim() || 'while'.toLowerCase().includes(searchQuery.toLowerCase())) {
specialBlockItems.push({
name: 'While',
type: 'while',
isCustom: true,
})
}
// Sort special blocks alphabetically
specialBlockItems.sort((a, b) => a.name.localeCompare(b.name))
@@ -137,7 +128,7 @@ export function Toolbar({ userPermissions, isWorkspaceSelectorVisible = false }:
/>
))}
{/* Special Blocks Section (Loop, Parallel, and While) */}
{/* Special Blocks Section (Loop & Parallel) */}
{specialBlocks.map((block) => {
if (block.type === 'loop') {
return <LoopToolbarItem key={block.type} disabled={!userPermissions.canEdit} />
@@ -145,9 +136,6 @@ export function Toolbar({ userPermissions, isWorkspaceSelectorVisible = false }:
if (block.type === 'parallel') {
return <ParallelToolbarItem key={block.type} disabled={!userPermissions.canEdit} />
}
if (block.type === 'while') {
return <WhileToolbarItem key={block.type} disabled={!userPermissions.canEdit} />
}
return null
})}

View File

@@ -83,14 +83,6 @@ export function WorkflowPreview({
}
}, [workflowState.parallels, isValidWorkflowState])
const whilesStructure = useMemo(() => {
if (!isValidWorkflowState) return { count: 0, ids: '' }
return {
count: Object.keys(workflowState.whiles || {}).length,
ids: Object.keys(workflowState.whiles || {}).join(','),
}
}, [workflowState.whiles, isValidWorkflowState])
const edgesStructure = useMemo(() => {
if (!isValidWorkflowState) return { count: 0, ids: '' }
return {
@@ -174,26 +166,6 @@ export function WorkflowPreview({
return
}
if (block.type === 'while') {
nodeArray.push({
id: block.id,
type: 'subflowNode',
position: absolutePosition,
parentId: block.data?.parentId,
extent: block.data?.extent || undefined,
draggable: false,
data: {
...block.data,
width: block.data?.width || 500,
height: block.data?.height || 300,
state: 'valid',
isPreview: true,
kind: 'while',
},
})
return
}
const blockConfig = getBlock(block.type)
if (!blockConfig) {
logger.error(`No configuration found for block type: ${block.type}`, { blockId })
@@ -257,7 +229,6 @@ export function WorkflowPreview({
blocksStructure,
loopsStructure,
parallelsStructure,
whilesStructure,
showSubBlocks,
workflowState.blocks,
isValidWorkflowState,

View File

@@ -1,4 +1,5 @@
import { task } from '@trigger.dev/sdk'
import { env } from '@/lib/env'
import { processDocumentAsync } from '@/lib/knowledge/documents/service'
import { createLogger } from '@/lib/logs/console/logger'
@@ -25,15 +26,15 @@ export type DocumentProcessingPayload = {
export const processDocument = task({
id: 'knowledge-process-document',
maxDuration: 300,
maxDuration: env.KB_CONFIG_MAX_DURATION || 300,
retry: {
maxAttempts: 3,
factor: 2,
minTimeoutInMs: 1000,
maxTimeoutInMs: 10000,
maxAttempts: env.KB_CONFIG_MAX_ATTEMPTS || 3,
factor: env.KB_CONFIG_RETRY_FACTOR || 2,
minTimeoutInMs: env.KB_CONFIG_MIN_TIMEOUT || 1000,
maxTimeoutInMs: env.KB_CONFIG_MAX_TIMEOUT || 10000,
},
queue: {
concurrencyLimit: 20,
concurrencyLimit: env.KB_CONFIG_CONCURRENCY_LIMIT || 20,
name: 'document-processing-queue',
},
run: async (payload: DocumentProcessingPayload) => {

View File

@@ -99,6 +99,10 @@ export async function executeWebhookJob(payload: WebhookExecutionPayload) {
userId: payload.userId,
workspaceId: '', // TODO: Get from workflow if needed
variables: decryptedEnvVars,
initialInput: payload.body || {},
triggerData: { provider: payload.provider, blockId: payload.blockId },
startBlockId: payload.blockId,
executionType: 'webhook',
})
// Merge subblock states (matching workflow-execution pattern)
@@ -129,7 +133,7 @@ export async function executeWebhookJob(payload: WebhookExecutionPayload) {
edges,
loops || {},
parallels || {},
{} // Enable validation during execution
true // Enable validation during execution
)
// Handle special Airtable case

View File

@@ -118,7 +118,7 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) {
edges,
loops || {},
parallels || {},
{} // Enable validation during execution
true // Enable validation during execution
)
// Create executor and execute

View File

@@ -0,0 +1,387 @@
import { MySQLIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import type { MySQLResponse } from '@/tools/mysql/types'
export const MySQLBlock: BlockConfig<MySQLResponse> = {
type: 'mysql',
name: 'MySQL',
description: 'Connect to MySQL database',
longDescription:
'Connect to any MySQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.',
docsLink: 'https://docs.sim.ai/tools/mysql',
category: 'tools',
bgColor: '#E0E0E0',
icon: MySQLIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
layout: 'full',
options: [
{ label: 'Query (SELECT)', id: 'query' },
{ label: 'Insert Data', id: 'insert' },
{ label: 'Update Data', id: 'update' },
{ label: 'Delete Data', id: 'delete' },
{ label: 'Execute Raw SQL', id: 'execute' },
],
value: () => 'query',
},
{
id: 'host',
title: 'Host',
type: 'short-input',
layout: 'full',
placeholder: 'localhost or your.database.host',
required: true,
},
{
id: 'port',
title: 'Port',
type: 'short-input',
layout: 'full',
placeholder: '3306',
value: () => '3306',
required: true,
},
{
id: 'database',
title: 'Database Name',
type: 'short-input',
layout: 'full',
placeholder: 'your_database',
required: true,
},
{
id: 'username',
title: 'Username',
type: 'short-input',
layout: 'full',
placeholder: 'root',
required: true,
},
{
id: 'password',
title: 'Password',
type: 'short-input',
layout: 'full',
password: true,
placeholder: 'Your database password',
required: true,
},
{
id: 'ssl',
title: 'SSL Mode',
type: 'dropdown',
layout: 'full',
options: [
{ label: 'Disabled', id: 'disabled' },
{ label: 'Required', id: 'required' },
{ label: 'Preferred', id: 'preferred' },
],
value: () => 'preferred',
},
// Table field for insert/update/delete operations
{
id: 'table',
title: 'Table Name',
type: 'short-input',
layout: 'full',
placeholder: 'users',
condition: { field: 'operation', value: 'insert' },
required: true,
},
{
id: 'table',
title: 'Table Name',
type: 'short-input',
layout: 'full',
placeholder: 'users',
condition: { field: 'operation', value: 'update' },
required: true,
},
{
id: 'table',
title: 'Table Name',
type: 'short-input',
layout: 'full',
placeholder: 'users',
condition: { field: 'operation', value: 'delete' },
required: true,
},
// SQL Query field
{
id: 'query',
title: 'SQL Query',
type: 'code',
layout: 'full',
placeholder: 'SELECT * FROM users WHERE active = true',
condition: { field: 'operation', value: 'query' },
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `You are an expert MySQL database developer. Write MySQL SQL queries based on the user's request.
### CONTEXT
{context}
### CRITICAL INSTRUCTION
Return ONLY the SQL query. Do not include any explanations, markdown formatting, comments, or additional text. Just the raw SQL query.
### QUERY GUIDELINES
1. **Syntax**: Use MySQL-specific syntax and functions
2. **Performance**: Write efficient queries with proper indexing considerations
3. **Security**: Use parameterized queries when applicable
4. **Readability**: Format queries with proper indentation and spacing
5. **Best Practices**: Follow MySQL naming conventions
### MYSQL FEATURES
- Use MySQL-specific functions (IFNULL, DATE_FORMAT, CONCAT, etc.)
- Leverage MySQL features like GROUP_CONCAT, AUTO_INCREMENT
- Use proper MySQL data types (VARCHAR, DATETIME, DECIMAL, JSON, etc.)
- Include appropriate LIMIT clauses for large result sets
### EXAMPLES
**Simple Select**: "Get all active users"
→ SELECT id, name, email, created_at
FROM users
WHERE active = 1
ORDER BY created_at DESC;
**Complex Join**: "Get users with their order counts and total spent"
→ SELECT
u.id,
u.name,
u.email,
COUNT(o.id) as order_count,
IFNULL(SUM(o.total), 0) as total_spent
FROM users u
LEFT JOIN orders o ON u.id = o.user_id
WHERE u.active = 1
GROUP BY u.id, u.name, u.email
HAVING COUNT(o.id) > 0
ORDER BY total_spent DESC;
**With Subquery**: "Get top 10 products by sales"
→ SELECT
p.id,
p.name,
(SELECT SUM(oi.quantity * oi.price)
FROM order_items oi
JOIN orders o ON oi.order_id = o.id
WHERE oi.product_id = p.id
AND o.created_at >= DATE_SUB(NOW(), INTERVAL 30 DAY)
) as total_sales
FROM products p
WHERE p.active = 1
ORDER BY total_sales DESC
LIMIT 10;
### REMEMBER
Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
placeholder: 'Describe the SQL query you need...',
generationType: 'sql-query',
},
},
{
id: 'query',
title: 'SQL Query',
type: 'code',
layout: 'full',
placeholder: 'SELECT * FROM table_name',
condition: { field: 'operation', value: 'execute' },
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `You are an expert MySQL database developer. Write MySQL SQL queries based on the user's request.
### CONTEXT
{context}
### CRITICAL INSTRUCTION
Return ONLY the SQL query. Do not include any explanations, markdown formatting, comments, or additional text. Just the raw SQL query.
### QUERY GUIDELINES
1. **Syntax**: Use MySQL-specific syntax and functions
2. **Performance**: Write efficient queries with proper indexing considerations
3. **Security**: Use parameterized queries when applicable
4. **Readability**: Format queries with proper indentation and spacing
5. **Best Practices**: Follow MySQL naming conventions
### MYSQL FEATURES
- Use MySQL-specific functions (IFNULL, DATE_FORMAT, CONCAT, etc.)
- Leverage MySQL features like GROUP_CONCAT, AUTO_INCREMENT
- Use proper MySQL data types (VARCHAR, DATETIME, DECIMAL, JSON, etc.)
- Include appropriate LIMIT clauses for large result sets
### EXAMPLES
**Simple Select**: "Get all active users"
→ SELECT id, name, email, created_at
FROM users
WHERE active = 1
ORDER BY created_at DESC;
**Complex Join**: "Get users with their order counts and total spent"
→ SELECT
u.id,
u.name,
u.email,
COUNT(o.id) as order_count,
IFNULL(SUM(o.total), 0) as total_spent
FROM users u
LEFT JOIN orders o ON u.id = o.user_id
WHERE u.active = 1
GROUP BY u.id, u.name, u.email
HAVING COUNT(o.id) > 0
ORDER BY total_spent DESC;
**With Subquery**: "Get top 10 products by sales"
→ SELECT
p.id,
p.name,
(SELECT SUM(oi.quantity * oi.price)
FROM order_items oi
JOIN orders o ON oi.order_id = o.id
WHERE oi.product_id = p.id
AND o.created_at >= DATE_SUB(NOW(), INTERVAL 30 DAY)
) as total_sales
FROM products p
WHERE p.active = 1
ORDER BY total_sales DESC
LIMIT 10;
### REMEMBER
Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
placeholder: 'Describe the SQL query you need...',
generationType: 'sql-query',
},
},
// Data for insert operations
{
id: 'data',
title: 'Data (JSON)',
type: 'code',
layout: 'full',
placeholder: '{\n "name": "John Doe",\n "email": "john@example.com",\n "active": true\n}',
condition: { field: 'operation', value: 'insert' },
required: true,
},
// Set clause for updates
{
id: 'data',
title: 'Update Data (JSON)',
type: 'code',
layout: 'full',
placeholder: '{\n "name": "Jane Doe",\n "email": "jane@example.com"\n}',
condition: { field: 'operation', value: 'update' },
required: true,
},
// Where clause for update/delete
{
id: 'where',
title: 'WHERE Condition',
type: 'short-input',
layout: 'full',
placeholder: 'id = 1',
condition: { field: 'operation', value: 'update' },
required: true,
},
{
id: 'where',
title: 'WHERE Condition',
type: 'short-input',
layout: 'full',
placeholder: 'id = 1',
condition: { field: 'operation', value: 'delete' },
required: true,
},
],
tools: {
access: ['mysql_query', 'mysql_insert', 'mysql_update', 'mysql_delete', 'mysql_execute'],
config: {
tool: (params) => {
switch (params.operation) {
case 'query':
return 'mysql_query'
case 'insert':
return 'mysql_insert'
case 'update':
return 'mysql_update'
case 'delete':
return 'mysql_delete'
case 'execute':
return 'mysql_execute'
default:
throw new Error(`Invalid MySQL operation: ${params.operation}`)
}
},
params: (params) => {
const { operation, data, ...rest } = params
// Parse JSON data if it's a string
let parsedData
if (data && typeof data === 'string' && data.trim()) {
try {
parsedData = JSON.parse(data)
} catch (parseError) {
const errorMsg = parseError instanceof Error ? parseError.message : 'Unknown JSON error'
throw new Error(`Invalid JSON data format: ${errorMsg}. Please check your JSON syntax.`)
}
} else if (data && typeof data === 'object') {
parsedData = data
}
// Build connection config
const connectionConfig = {
host: rest.host,
port: typeof rest.port === 'string' ? Number.parseInt(rest.port, 10) : rest.port || 3306,
database: rest.database,
username: rest.username,
password: rest.password,
ssl: rest.ssl || 'preferred',
}
// Build params object
const result: any = { ...connectionConfig }
if (rest.table) result.table = rest.table
if (rest.query) result.query = rest.query
if (rest.where) result.where = rest.where
if (parsedData !== undefined) result.data = parsedData
return result
},
},
},
inputs: {
operation: { type: 'string', description: 'Database operation to perform' },
host: { type: 'string', description: 'Database host' },
port: { type: 'string', description: 'Database port' },
database: { type: 'string', description: 'Database name' },
username: { type: 'string', description: 'Database username' },
password: { type: 'string', description: 'Database password' },
ssl: { type: 'string', description: 'SSL mode' },
table: { type: 'string', description: 'Table name' },
query: { type: 'string', description: 'SQL query to execute' },
data: { type: 'json', description: 'Data for insert/update operations' },
where: { type: 'string', description: 'WHERE clause for update/delete' },
},
outputs: {
message: {
type: 'string',
description: 'Success or error message describing the operation outcome',
},
rows: {
type: 'array',
description: 'Array of rows returned from the query',
},
rowCount: {
type: 'number',
description: 'Number of rows affected by the operation',
},
},
}

View File

@@ -0,0 +1,109 @@
import { ParallelIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import type { ToolResponse } from '@/tools/types'
export const ParallelBlock: BlockConfig<ToolResponse> = {
type: 'parallel_ai',
name: 'Parallel AI',
description: 'Search with Parallel AI',
longDescription:
"Search the web using Parallel AI's advanced search capabilities. Get comprehensive results with intelligent processing and content extraction.",
docsLink: 'https://docs.parallel.ai/search-api/search-quickstart',
category: 'tools',
bgColor: '#E0E0E0',
icon: ParallelIcon,
subBlocks: [
{
id: 'objective',
title: 'Search Objective',
type: 'long-input',
layout: 'full',
placeholder: "When was the United Nations established? Prefer UN's websites.",
required: true,
},
{
id: 'search_queries',
title: 'Search Queries',
type: 'long-input',
layout: 'full',
placeholder:
'Enter search queries separated by commas (e.g., "Founding year UN", "Year of founding United Nations")',
required: false,
},
{
id: 'processor',
title: 'Processor',
type: 'dropdown',
layout: 'full',
options: [
{ label: 'Base', id: 'base' },
{ label: 'Pro', id: 'pro' },
],
value: () => 'base',
},
{
id: 'max_results',
title: 'Max Results',
type: 'short-input',
layout: 'half',
placeholder: '5',
},
{
id: 'max_chars_per_result',
title: 'Max Chars',
type: 'short-input',
layout: 'half',
placeholder: '1500',
},
{
id: 'apiKey',
title: 'API Key',
type: 'short-input',
layout: 'full',
placeholder: 'Enter your Parallel AI API key',
password: true,
required: true,
},
],
tools: {
access: ['parallel_search'],
config: {
tool: (params) => {
// Convert search_queries from comma-separated string to array (if provided)
if (params.search_queries && typeof params.search_queries === 'string') {
const queries = params.search_queries
.split(',')
.map((query: string) => query.trim())
.filter((query: string) => query.length > 0)
// Only set if we have actual queries
if (queries.length > 0) {
params.search_queries = queries
} else {
params.search_queries = undefined
}
}
// Convert numeric parameters
if (params.max_results) {
params.max_results = Number(params.max_results)
}
if (params.max_chars_per_result) {
params.max_chars_per_result = Number(params.max_chars_per_result)
}
return 'parallel_search'
},
},
},
inputs: {
objective: { type: 'string', description: 'Search objective or question' },
search_queries: { type: 'string', description: 'Comma-separated search queries' },
processor: { type: 'string', description: 'Processing method' },
max_results: { type: 'number', description: 'Maximum number of results' },
max_chars_per_result: { type: 'number', description: 'Maximum characters per result' },
apiKey: { type: 'string', description: 'Parallel AI API key' },
},
outputs: {
results: { type: 'array', description: 'Search results with excerpts from relevant pages' },
},
}

View File

@@ -0,0 +1,395 @@
import { PostgresIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import type { PostgresResponse } from '@/tools/postgresql/types'
export const PostgreSQLBlock: BlockConfig<PostgresResponse> = {
type: 'postgresql',
name: 'PostgreSQL',
description: 'Connect to PostgreSQL database',
longDescription:
'Connect to any PostgreSQL database to execute queries, manage data, and perform database operations. Supports SELECT, INSERT, UPDATE, DELETE operations with secure connection handling.',
docsLink: 'https://docs.sim.ai/tools/postgresql',
category: 'tools',
bgColor: '#336791',
icon: PostgresIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
layout: 'full',
options: [
{ label: 'Query (SELECT)', id: 'query' },
{ label: 'Insert Data', id: 'insert' },
{ label: 'Update Data', id: 'update' },
{ label: 'Delete Data', id: 'delete' },
{ label: 'Execute Raw SQL', id: 'execute' },
],
value: () => 'query',
},
{
id: 'host',
title: 'Host',
type: 'short-input',
layout: 'full',
placeholder: 'localhost or your.database.host',
required: true,
},
{
id: 'port',
title: 'Port',
type: 'short-input',
layout: 'full',
placeholder: '5432',
value: () => '5432',
required: true,
},
{
id: 'database',
title: 'Database Name',
type: 'short-input',
layout: 'full',
placeholder: 'your_database',
required: true,
},
{
id: 'username',
title: 'Username',
type: 'short-input',
layout: 'full',
placeholder: 'postgres',
required: true,
},
{
id: 'password',
title: 'Password',
type: 'short-input',
layout: 'full',
password: true,
placeholder: 'Your database password',
required: true,
},
{
id: 'ssl',
title: 'SSL Mode',
type: 'dropdown',
layout: 'full',
options: [
{ label: 'Disabled', id: 'disabled' },
{ label: 'Required', id: 'required' },
{ label: 'Preferred', id: 'preferred' },
],
value: () => 'preferred',
},
// Table field for insert/update/delete operations
{
id: 'table',
title: 'Table Name',
type: 'short-input',
layout: 'full',
placeholder: 'users',
condition: { field: 'operation', value: 'insert' },
required: true,
},
{
id: 'table',
title: 'Table Name',
type: 'short-input',
layout: 'full',
placeholder: 'users',
condition: { field: 'operation', value: 'update' },
required: true,
},
{
id: 'table',
title: 'Table Name',
type: 'short-input',
layout: 'full',
placeholder: 'users',
condition: { field: 'operation', value: 'delete' },
required: true,
},
// SQL Query field
{
id: 'query',
title: 'SQL Query',
type: 'code',
layout: 'full',
placeholder: 'SELECT * FROM users WHERE active = true',
condition: { field: 'operation', value: 'query' },
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `You are an expert PostgreSQL database developer. Write PostgreSQL SQL queries based on the user's request.
### CONTEXT
{context}
### CRITICAL INSTRUCTION
Return ONLY the SQL query. Do not include any explanations, markdown formatting, comments, or additional text. Just the raw SQL query.
### QUERY GUIDELINES
1. **Syntax**: Use PostgreSQL-specific syntax and functions
2. **Performance**: Write efficient queries with proper indexing considerations
3. **Security**: Use parameterized queries when applicable
4. **Readability**: Format queries with proper indentation and spacing
5. **Best Practices**: Follow PostgreSQL naming conventions
### POSTGRESQL FEATURES
- Use PostgreSQL-specific functions (COALESCE, EXTRACT, etc.)
- Leverage advanced features like CTEs, window functions, arrays
- Use proper PostgreSQL data types (TEXT, TIMESTAMPTZ, JSONB, etc.)
- Include appropriate LIMIT clauses for large result sets
### EXAMPLES
**Simple Select**: "Get all active users"
→ SELECT id, name, email, created_at
FROM users
WHERE active = true
ORDER BY created_at DESC;
**Complex Join**: "Get users with their order counts and total spent"
→ SELECT
u.id,
u.name,
u.email,
COUNT(o.id) as order_count,
COALESCE(SUM(o.total), 0) as total_spent
FROM users u
LEFT JOIN orders o ON u.id = o.user_id
WHERE u.active = true
GROUP BY u.id, u.name, u.email
HAVING COUNT(o.id) > 0
ORDER BY total_spent DESC;
**With CTE**: "Get top 10 products by sales"
→ WITH product_sales AS (
SELECT
p.id,
p.name,
SUM(oi.quantity * oi.price) as total_sales
FROM products p
JOIN order_items oi ON p.id = oi.product_id
JOIN orders o ON oi.order_id = o.id
WHERE o.created_at >= CURRENT_DATE - INTERVAL '30 days'
GROUP BY p.id, p.name
)
SELECT * FROM product_sales
ORDER BY total_sales DESC
LIMIT 10;
### REMEMBER
Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
placeholder: 'Describe the SQL query you need...',
generationType: 'sql-query',
},
},
{
id: 'query',
title: 'SQL Query',
type: 'code',
layout: 'full',
placeholder: 'SELECT * FROM table_name',
condition: { field: 'operation', value: 'execute' },
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `You are an expert PostgreSQL database developer. Write PostgreSQL SQL queries based on the user's request.
### CONTEXT
{context}
### CRITICAL INSTRUCTION
Return ONLY the SQL query. Do not include any explanations, markdown formatting, comments, or additional text. Just the raw SQL query.
### QUERY GUIDELINES
1. **Syntax**: Use PostgreSQL-specific syntax and functions
2. **Performance**: Write efficient queries with proper indexing considerations
3. **Security**: Use parameterized queries when applicable
4. **Readability**: Format queries with proper indentation and spacing
5. **Best Practices**: Follow PostgreSQL naming conventions
### POSTGRESQL FEATURES
- Use PostgreSQL-specific functions (COALESCE, EXTRACT, etc.)
- Leverage advanced features like CTEs, window functions, arrays
- Use proper PostgreSQL data types (TEXT, TIMESTAMPTZ, JSONB, etc.)
- Include appropriate LIMIT clauses for large result sets
### EXAMPLES
**Simple Select**: "Get all active users"
→ SELECT id, name, email, created_at
FROM users
WHERE active = true
ORDER BY created_at DESC;
**Complex Join**: "Get users with their order counts and total spent"
→ SELECT
u.id,
u.name,
u.email,
COUNT(o.id) as order_count,
COALESCE(SUM(o.total), 0) as total_spent
FROM users u
LEFT JOIN orders o ON u.id = o.user_id
WHERE u.active = true
GROUP BY u.id, u.name, u.email
HAVING COUNT(o.id) > 0
ORDER BY total_spent DESC;
**With CTE**: "Get top 10 products by sales"
→ WITH product_sales AS (
SELECT
p.id,
p.name,
SUM(oi.quantity * oi.price) as total_sales
FROM products p
JOIN order_items oi ON p.id = oi.product_id
JOIN orders o ON oi.order_id = o.id
WHERE o.created_at >= CURRENT_DATE - INTERVAL '30 days'
GROUP BY p.id, p.name
)
SELECT * FROM product_sales
ORDER BY total_sales DESC
LIMIT 10;
### REMEMBER
Return ONLY the SQL query - no explanations, no markdown, no extra text.`,
placeholder: 'Describe the SQL query you need...',
generationType: 'sql-query',
},
},
// Data for insert operations
{
id: 'data',
title: 'Data (JSON)',
type: 'code',
layout: 'full',
placeholder: '{\n "name": "John Doe",\n "email": "john@example.com",\n "active": true\n}',
condition: { field: 'operation', value: 'insert' },
required: true,
},
// Set clause for updates
{
id: 'data',
title: 'Update Data (JSON)',
type: 'code',
layout: 'full',
placeholder: '{\n "name": "Jane Doe",\n "email": "jane@example.com"\n}',
condition: { field: 'operation', value: 'update' },
required: true,
},
// Where clause for update/delete
{
id: 'where',
title: 'WHERE Condition',
type: 'short-input',
layout: 'full',
placeholder: 'id = 1',
condition: { field: 'operation', value: 'update' },
required: true,
},
{
id: 'where',
title: 'WHERE Condition',
type: 'short-input',
layout: 'full',
placeholder: 'id = 1',
condition: { field: 'operation', value: 'delete' },
required: true,
},
],
tools: {
access: [
'postgresql_query',
'postgresql_insert',
'postgresql_update',
'postgresql_delete',
'postgresql_execute',
],
config: {
tool: (params) => {
switch (params.operation) {
case 'query':
return 'postgresql_query'
case 'insert':
return 'postgresql_insert'
case 'update':
return 'postgresql_update'
case 'delete':
return 'postgresql_delete'
case 'execute':
return 'postgresql_execute'
default:
throw new Error(`Invalid PostgreSQL operation: ${params.operation}`)
}
},
params: (params) => {
const { operation, data, ...rest } = params
// Parse JSON data if it's a string
let parsedData
if (data && typeof data === 'string' && data.trim()) {
try {
parsedData = JSON.parse(data)
} catch (parseError) {
const errorMsg = parseError instanceof Error ? parseError.message : 'Unknown JSON error'
throw new Error(`Invalid JSON data format: ${errorMsg}. Please check your JSON syntax.`)
}
} else if (data && typeof data === 'object') {
parsedData = data
}
// Build connection config
const connectionConfig = {
host: rest.host,
port: typeof rest.port === 'string' ? Number.parseInt(rest.port, 10) : rest.port || 5432,
database: rest.database,
username: rest.username,
password: rest.password,
ssl: rest.ssl || 'preferred',
}
// Build params object
const result: any = { ...connectionConfig }
if (rest.table) result.table = rest.table
if (rest.query) result.query = rest.query
if (rest.where) result.where = rest.where
if (parsedData !== undefined) result.data = parsedData
return result
},
},
},
inputs: {
operation: { type: 'string', description: 'Database operation to perform' },
host: { type: 'string', description: 'Database host' },
port: { type: 'string', description: 'Database port' },
database: { type: 'string', description: 'Database name' },
username: { type: 'string', description: 'Database username' },
password: { type: 'string', description: 'Database password' },
ssl: { type: 'string', description: 'SSL mode' },
table: { type: 'string', description: 'Table name' },
query: { type: 'string', description: 'SQL query to execute' },
data: { type: 'json', description: 'Data for insert/update operations' },
where: { type: 'string', description: 'WHERE clause for update/delete' },
},
outputs: {
message: {
type: 'string',
description: 'Success or error message describing the operation outcome',
},
rows: {
type: 'array',
description: 'Array of rows returned from the query',
},
rowCount: {
type: 'number',
description: 'Number of rows affected by the operation',
},
},
}

View File

@@ -94,6 +94,66 @@ export const SupabaseBlock: BlockConfig<SupabaseResponse> = {
placeholder: 'id=eq.123',
condition: { field: 'operation', value: 'get_row' },
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `You are an expert in PostgREST API syntax. Generate PostgREST filter expressions based on the user's request.
### CONTEXT
{context}
### CRITICAL INSTRUCTION
Return ONLY the PostgREST filter expression. Do not include any explanations, markdown formatting, or additional text. Just the raw filter expression.
### POSTGREST FILTER SYNTAX
PostgREST uses a specific syntax for filtering data. The format is:
column=operator.value
### OPERATORS
- **eq** - equals: \`id=eq.123\`
- **neq** - not equals: \`status=neq.inactive\`
- **gt** - greater than: \`age=gt.18\`
- **gte** - greater than or equal: \`score=gte.80\`
- **lt** - less than: \`price=lt.100\`
- **lte** - less than or equal: \`rating=lte.5\`
- **like** - pattern matching: \`name=like.*john*\`
- **ilike** - case-insensitive like: \`email=ilike.*@gmail.com\`
- **in** - in list: \`category=in.(tech,science,art)\`
- **is** - is null/not null: \`deleted_at=is.null\`
- **not** - negation: \`not.and=(status.eq.active,verified.eq.true)\`
### COMBINING FILTERS
- **AND**: Use \`&\` or \`and=(...)\`: \`id=eq.123&status=eq.active\`
- **OR**: Use \`or=(...)\`: \`or=(status.eq.active,status.eq.pending)\`
### EXAMPLES
**Simple equality**: "Find user with ID 123"
→ id=eq.123
**Text search**: "Find users with Gmail addresses"
→ email=ilike.*@gmail.com
**Range filter**: "Find products under $50"
→ price=lt.50
**Multiple conditions**: "Find active users over 18"
→ age=gt.18&status=eq.active
**OR condition**: "Find active or pending orders"
→ or=(status.eq.active,status.eq.pending)
**In list**: "Find posts in specific categories"
→ category=in.(tech,science,health)
**Null check**: "Find users without a profile picture"
→ profile_image=is.null
### REMEMBER
Return ONLY the PostgREST filter expression - no explanations, no markdown, no extra text.`,
placeholder: 'Describe the filter condition you need...',
generationType: 'postgrest',
},
},
{
id: 'filter',
@@ -103,6 +163,66 @@ export const SupabaseBlock: BlockConfig<SupabaseResponse> = {
placeholder: 'id=eq.123',
condition: { field: 'operation', value: 'update' },
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `You are an expert in PostgREST API syntax. Generate PostgREST filter expressions based on the user's request.
### CONTEXT
{context}
### CRITICAL INSTRUCTION
Return ONLY the PostgREST filter expression. Do not include any explanations, markdown formatting, or additional text. Just the raw filter expression.
### POSTGREST FILTER SYNTAX
PostgREST uses a specific syntax for filtering data. The format is:
column=operator.value
### OPERATORS
- **eq** - equals: \`id=eq.123\`
- **neq** - not equals: \`status=neq.inactive\`
- **gt** - greater than: \`age=gt.18\`
- **gte** - greater than or equal: \`score=gte.80\`
- **lt** - less than: \`price=lt.100\`
- **lte** - less than or equal: \`rating=lte.5\`
- **like** - pattern matching: \`name=like.*john*\`
- **ilike** - case-insensitive like: \`email=ilike.*@gmail.com\`
- **in** - in list: \`category=in.(tech,science,art)\`
- **is** - is null/not null: \`deleted_at=is.null\`
- **not** - negation: \`not.and=(status.eq.active,verified.eq.true)\`
### COMBINING FILTERS
- **AND**: Use \`&\` or \`and=(...)\`: \`id=eq.123&status=eq.active\`
- **OR**: Use \`or=(...)\`: \`or=(status.eq.active,status.eq.pending)\`
### EXAMPLES
**Simple equality**: "Find user with ID 123"
→ id=eq.123
**Text search**: "Find users with Gmail addresses"
→ email=ilike.*@gmail.com
**Range filter**: "Find products under $50"
→ price=lt.50
**Multiple conditions**: "Find active users over 18"
→ age=gt.18&status=eq.active
**OR condition**: "Find active or pending orders"
→ or=(status.eq.active,status.eq.pending)
**In list**: "Find posts in specific categories"
→ category=in.(tech,science,health)
**Null check**: "Find users without a profile picture"
→ profile_image=is.null
### REMEMBER
Return ONLY the PostgREST filter expression - no explanations, no markdown, no extra text.`,
placeholder: 'Describe the filter condition you need...',
generationType: 'postgrest',
},
},
{
id: 'filter',
@@ -112,6 +232,66 @@ export const SupabaseBlock: BlockConfig<SupabaseResponse> = {
placeholder: 'id=eq.123',
condition: { field: 'operation', value: 'delete' },
required: true,
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `You are an expert in PostgREST API syntax. Generate PostgREST filter expressions based on the user's request.
### CONTEXT
{context}
### CRITICAL INSTRUCTION
Return ONLY the PostgREST filter expression. Do not include any explanations, markdown formatting, or additional text. Just the raw filter expression.
### POSTGREST FILTER SYNTAX
PostgREST uses a specific syntax for filtering data. The format is:
column=operator.value
### OPERATORS
- **eq** - equals: \`id=eq.123\`
- **neq** - not equals: \`status=neq.inactive\`
- **gt** - greater than: \`age=gt.18\`
- **gte** - greater than or equal: \`score=gte.80\`
- **lt** - less than: \`price=lt.100\`
- **lte** - less than or equal: \`rating=lte.5\`
- **like** - pattern matching: \`name=like.*john*\`
- **ilike** - case-insensitive like: \`email=ilike.*@gmail.com\`
- **in** - in list: \`category=in.(tech,science,art)\`
- **is** - is null/not null: \`deleted_at=is.null\`
- **not** - negation: \`not.and=(status.eq.active,verified.eq.true)\`
### COMBINING FILTERS
- **AND**: Use \`&\` or \`and=(...)\`: \`id=eq.123&status=eq.active\`
- **OR**: Use \`or=(...)\`: \`or=(status.eq.active,status.eq.pending)\`
### EXAMPLES
**Simple equality**: "Find user with ID 123"
→ id=eq.123
**Text search**: "Find users with Gmail addresses"
→ email=ilike.*@gmail.com
**Range filter**: "Find products under $50"
→ price=lt.50
**Multiple conditions**: "Find active users over 18"
→ age=gt.18&status=eq.active
**OR condition**: "Find active or pending orders"
→ or=(status.eq.active,status.eq.pending)
**In list**: "Find posts in specific categories"
→ category=in.(tech,science,health)
**Null check**: "Find users without a profile picture"
→ profile_image=is.null
### REMEMBER
Return ONLY the PostgREST filter expression - no explanations, no markdown, no extra text.`,
placeholder: 'Describe the filter condition you need...',
generationType: 'postgrest',
},
},
// Optional filter for query operation
{
@@ -121,6 +301,66 @@ export const SupabaseBlock: BlockConfig<SupabaseResponse> = {
layout: 'full',
placeholder: 'status=eq.active',
condition: { field: 'operation', value: 'query' },
wandConfig: {
enabled: true,
maintainHistory: true,
prompt: `You are an expert in PostgREST API syntax. Generate PostgREST filter expressions based on the user's request.
### CONTEXT
{context}
### CRITICAL INSTRUCTION
Return ONLY the PostgREST filter expression. Do not include any explanations, markdown formatting, or additional text. Just the raw filter expression.
### POSTGREST FILTER SYNTAX
PostgREST uses a specific syntax for filtering data. The format is:
column=operator.value
### OPERATORS
- **eq** - equals: \`id=eq.123\`
- **neq** - not equals: \`status=neq.inactive\`
- **gt** - greater than: \`age=gt.18\`
- **gte** - greater than or equal: \`score=gte.80\`
- **lt** - less than: \`price=lt.100\`
- **lte** - less than or equal: \`rating=lte.5\`
- **like** - pattern matching: \`name=like.*john*\`
- **ilike** - case-insensitive like: \`email=ilike.*@gmail.com\`
- **in** - in list: \`category=in.(tech,science,art)\`
- **is** - is null/not null: \`deleted_at=is.null\`
- **not** - negation: \`not.and=(status.eq.active,verified.eq.true)\`
### COMBINING FILTERS
- **AND**: Use \`&\` or \`and=(...)\`: \`id=eq.123&status=eq.active\`
- **OR**: Use \`or=(...)\`: \`or=(status.eq.active,status.eq.pending)\`
### EXAMPLES
**Simple equality**: "Find user with ID 123"
→ id=eq.123
**Text search**: "Find users with Gmail addresses"
→ email=ilike.*@gmail.com
**Range filter**: "Find products under $50"
→ price=lt.50
**Multiple conditions**: "Find active users over 18"
→ age=gt.18&status=eq.active
**OR condition**: "Find active or pending orders"
→ or=(status.eq.active,status.eq.pending)
**In list**: "Find posts in specific categories"
→ category=in.(tech,science,health)
**Null check**: "Find users without a profile picture"
→ profile_image=is.null
### REMEMBER
Return ONLY the PostgREST filter expression - no explanations, no markdown, no extra text.`,
placeholder: 'Describe the filter condition...',
generationType: 'postgrest',
},
},
// Optional order by for query operation
{

View File

@@ -40,12 +40,15 @@ import { MicrosoftExcelBlock } from '@/blocks/blocks/microsoft_excel'
import { MicrosoftPlannerBlock } from '@/blocks/blocks/microsoft_planner'
import { MicrosoftTeamsBlock } from '@/blocks/blocks/microsoft_teams'
import { MistralParseBlock } from '@/blocks/blocks/mistral_parse'
import { MySQLBlock } from '@/blocks/blocks/mysql'
import { NotionBlock } from '@/blocks/blocks/notion'
import { OneDriveBlock } from '@/blocks/blocks/onedrive'
import { OpenAIBlock } from '@/blocks/blocks/openai'
import { OutlookBlock } from '@/blocks/blocks/outlook'
import { ParallelBlock } from '@/blocks/blocks/parallel'
import { PerplexityBlock } from '@/blocks/blocks/perplexity'
import { PineconeBlock } from '@/blocks/blocks/pinecone'
import { PostgreSQLBlock } from '@/blocks/blocks/postgresql'
import { QdrantBlock } from '@/blocks/blocks/qdrant'
import { RedditBlock } from '@/blocks/blocks/reddit'
import { ResponseBlock } from '@/blocks/blocks/response'
@@ -113,12 +116,15 @@ export const registry: Record<string, BlockConfig> = {
microsoft_planner: MicrosoftPlannerBlock,
microsoft_teams: MicrosoftTeamsBlock,
mistral_parse: MistralParseBlock,
mysql: MySQLBlock,
notion: NotionBlock,
openai: OpenAIBlock,
outlook: OutlookBlock,
onedrive: OneDriveBlock,
parallel_ai: ParallelBlock,
perplexity: PerplexityBlock,
pinecone: PineconeBlock,
postgresql: PostgreSQLBlock,
qdrant: QdrantBlock,
memory: MemoryBlock,
reddit: RedditBlock,

View File

@@ -4,7 +4,7 @@ import type { ToolResponse } from '@/tools/types'
// Basic types
export type BlockIcon = (props: SVGProps<SVGSVGElement>) => JSX.Element
export type ParamType = 'string' | 'number' | 'boolean' | 'json'
export type PrimitiveValueType = 'string' | 'number' | 'boolean' | 'json' | 'any'
export type PrimitiveValueType = 'string' | 'number' | 'boolean' | 'json' | 'array' | 'any'
// Block classification
export type BlockCategory = 'blocks' | 'tools' | 'triggers'
@@ -17,6 +17,8 @@ export type GenerationType =
| 'json-object'
| 'system-prompt'
| 'custom-tool-schema'
| 'sql-query'
| 'postgrest'
// SubBlock types
export type SubBlockType =

View File

@@ -3345,6 +3345,98 @@ export function MicrosoftPlannerIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function ParallelIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
fill='currentColor'
width='271'
height='270'
viewBox='0 0 271 270'
xmlns='http://www.w3.org/2000/svg'
>
<path
d='M267.804 105.65H193.828C194.026 106.814 194.187 107.996 194.349 109.178H76.6703C76.4546 110.736 76.2388 112.312 76.0591 113.87H1.63342C1.27387 116.198 0.950289 118.543 0.698608 120.925H75.3759C75.2501 122.483 75.1602 124.059 75.0703 125.617H195.949C196.003 126.781 196.057 127.962 196.093 129.144H270.68V125.384C270.195 118.651 269.242 112.061 267.804 105.65Z'
fill='#1D1C1A'
/>
<path
d='M195.949 144.401H75.0703C75.1422 145.977 75.2501 147.535 75.3759 149.093H0.698608C0.950289 151.457 1.2559 153.802 1.63342 156.148H76.0591C76.2388 157.724 76.4366 159.282 76.6703 160.84H194.349C194.187 162.022 194.008 163.186 193.828 164.367H267.804C269.242 157.957 270.195 151.367 270.68 144.634V140.874H196.093C196.057 142.055 196.003 143.219 195.949 144.401Z'
fill='#1D1C1A'
/>
<path
d='M190.628 179.642H80.3559C80.7514 181.218 81.1828 182.776 81.6143 184.334H9.30994C10.2448 186.715 11.2515 189.061 12.3121 191.389H83.7536C84.2749 192.965 84.7962 194.523 85.3535 196.08H185.594C185.163 197.262 184.732 198.426 184.282 199.608H254.519C258.6 192.177 261.98 184.316 264.604 176.114H191.455C191.185 177.296 190.898 178.46 190.61 179.642H190.628Z'
fill='#1D1C1A'
/>
<path
d='M177.666 214.883H93.3352C94.1082 216.458 94.9172 218.034 95.7441 219.574H29.8756C31.8351 221.992 33.8666 224.337 35.9699 226.63H99.6632C100.598 228.205 101.551 229.781 102.522 231.321H168.498C167.761 232.503 167.006 233.685 166.233 234.849H226.762C234.474 227.847 241.36 219.95 247.292 211.355H179.356C178.799 212.537 178.26 213.719 177.684 214.883H177.666Z'
fill='#1D1C1A'
/>
<path
d='M154.943 250.106H116.058C117.371 251.699 118.701 253.257 120.067 254.797H73.021C91.6094 264.431 112.715 269.946 135.096 270C135.24 270 135.366 270 135.492 270C135.618 270 135.761 270 135.887 270C164.04 269.911 190.178 261.28 211.805 246.56H157.748C156.813 247.742 155.878 248.924 154.925 250.088L154.943 250.106Z'
fill='#1D1C1A'
/>
<path
d='M116.059 19.9124H154.943C155.896 21.0764 156.831 22.2582 157.766 23.4401H211.823C190.179 8.72065 164.058 0.0895344 135.906 0C135.762 0 135.636 0 135.51 0C135.384 0 135.24 0 135.115 0C112.715 0.0716275 91.6277 5.56904 73.0393 15.2029H120.086C118.719 16.7429 117.389 18.3187 116.077 19.8945L116.059 19.9124Z'
fill='#1D1C1A'
/>
<path
d='M93.3356 55.1532H177.667C178.242 56.3171 178.799 57.499 179.339 58.6808H247.274C241.342 50.0855 234.457 42.1886 226.744 35.187H166.215C166.988 36.351 167.743 37.5328 168.48 38.7147H102.504C101.533 40.2726 100.58 41.8305 99.6456 43.4063H35.9523C33.831 45.6804 31.7996 48.0262 29.858 50.4616H95.7265C94.8996 52.0195 94.1086 53.5774 93.3176 55.1532H93.3356Z'
fill='#1D1C1A'
/>
<path
d='M80.3736 90.3758H190.646C190.933 91.5398 191.221 92.7216 191.491 93.9035H264.64C262.015 85.7021 258.636 77.841 254.555 70.4097H184.318C184.767 71.5736 185.199 72.7555 185.63 73.9373H85.3893C84.832 75.4952 84.2927 77.0531 83.7893 78.6289H12.3479C11.2872 80.9389 10.2805 83.2847 9.3457 85.6842H81.65C81.2186 87.2421 80.7871 88.8 80.3916 90.3758H80.3736Z'
fill='#1D1C1A'
/>
</svg>
)
}
export function PostgresIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
width='800px'
height='800px'
viewBox='-4 0 264 264'
xmlns='http://www.w3.org/2000/svg'
preserveAspectRatio='xMinYMin meet'
>
<path d='M255.008 158.086c-1.535-4.649-5.556-7.887-10.756-8.664-2.452-.366-5.26-.21-8.583.475-5.792 1.195-10.089 1.65-13.225 1.738 11.837-19.985 21.462-42.775 27.003-64.228 8.96-34.689 4.172-50.492-1.423-57.64C233.217 10.847 211.614.683 185.552.372c-13.903-.17-26.108 2.575-32.475 4.549-5.928-1.046-12.302-1.63-18.99-1.738-12.537-.2-23.614 2.533-33.079 8.15-5.24-1.772-13.65-4.27-23.362-5.864-22.842-3.75-41.252-.828-54.718 8.685C6.622 25.672-.937 45.684.461 73.634c.444 8.874 5.408 35.874 13.224 61.48 4.492 14.718 9.282 26.94 14.237 36.33 7.027 13.315 14.546 21.156 22.987 23.972 4.731 1.576 13.327 2.68 22.368-4.85 1.146 1.388 2.675 2.767 4.704 4.048 2.577 1.625 5.728 2.953 8.875 3.74 11.341 2.835 21.964 2.126 31.027-1.848.056 1.612.099 3.152.135 4.482.06 2.157.12 4.272.199 6.25.537 13.374 1.447 23.773 4.143 31.049.148.4.347 1.01.557 1.657 1.345 4.118 3.594 11.012 9.316 16.411 5.925 5.593 13.092 7.308 19.656 7.308 3.292 0 6.433-.432 9.188-1.022 9.82-2.105 20.973-5.311 29.041-16.799 7.628-10.86 11.336-27.217 12.007-52.99.087-.729.167-1.425.244-2.088l.16-1.362 1.797.158.463.031c10.002.456 22.232-1.665 29.743-5.154 5.935-2.754 24.954-12.795 20.476-26.351' />
<path
d='M237.906 160.722c-29.74 6.135-31.785-3.934-31.785-3.934 31.4-46.593 44.527-105.736 33.2-120.211-30.904-39.485-84.399-20.811-85.292-20.327l-.287.052c-5.876-1.22-12.451-1.946-19.842-2.067-13.456-.22-23.664 3.528-31.41 9.402 0 0-95.43-39.314-90.991 49.444.944 18.882 27.064 142.873 58.218 105.422 11.387-13.695 22.39-25.274 22.39-25.274 5.464 3.63 12.006 5.482 18.864 4.817l.533-.452c-.166 1.7-.09 3.363.213 5.332-8.026 8.967-5.667 10.541-21.711 13.844-16.235 3.346-6.698 9.302-.471 10.86 7.549 1.887 25.013 4.561 36.813-11.958l-.47 1.885c3.144 2.519 5.352 16.383 4.982 28.952-.37 12.568-.617 21.197 1.86 27.937 2.479 6.74 4.948 21.905 26.04 17.386 17.623-3.777 26.756-13.564 28.027-29.89.901-11.606 2.942-9.89 3.07-20.267l1.637-4.912c1.887-15.733.3-20.809 11.157-18.448l2.64.232c7.99.363 18.45-1.286 24.589-4.139 13.218-6.134 21.058-16.377 8.024-13.686h.002'
fill='#336791'
/>
<path
d='M108.076 81.525c-2.68-.373-5.107-.028-6.335.902-.69.523-.904 1.129-.962 1.546-.154 1.105.62 2.327 1.096 2.957 1.346 1.784 3.312 3.01 5.258 3.28.282.04.563.058.842.058 3.245 0 6.196-2.527 6.456-4.392.325-2.336-3.066-3.893-6.355-4.35M196.86 81.599c-.256-1.831-3.514-2.353-6.606-1.923-3.088.43-6.082 1.824-5.832 3.659.2 1.427 2.777 3.863 5.827 3.863.258 0 .518-.017.78-.054 2.036-.282 3.53-1.575 4.24-2.32 1.08-1.136 1.706-2.402 1.591-3.225'
fill='#FFF'
/>
<path
d='M247.802 160.025c-1.134-3.429-4.784-4.532-10.848-3.28-18.005 3.716-24.453 1.142-26.57-.417 13.995-21.32 25.508-47.092 31.719-71.137 2.942-11.39 4.567-21.968 4.7-30.59.147-9.463-1.465-16.417-4.789-20.665-13.402-17.125-33.072-26.311-56.882-26.563-16.369-.184-30.199 4.005-32.88 5.183-5.646-1.404-11.801-2.266-18.502-2.376-12.288-.199-22.91 2.743-31.704 8.74-3.82-1.422-13.692-4.811-25.765-6.756-20.872-3.36-37.458-.814-49.294 7.571-14.123 10.006-20.643 27.892-19.38 53.16.425 8.501 5.269 34.653 12.913 59.698 10.062 32.964 21 51.625 32.508 55.464 1.347.449 2.9.763 4.613.763 4.198 0 9.345-1.892 14.7-8.33a529.832 529.832 0 0 1 20.261-22.926c4.524 2.428 9.494 3.784 14.577 3.92.01.133.023.266.035.398a117.66 117.66 0 0 0-2.57 3.175c-3.522 4.471-4.255 5.402-15.592 7.736-3.225.666-11.79 2.431-11.916 8.435-.136 6.56 10.125 9.315 11.294 9.607 4.074 1.02 7.999 1.523 11.742 1.523 9.103 0 17.114-2.992 23.516-8.781-.197 23.386.778 46.43 3.586 53.451 2.3 5.748 7.918 19.795 25.664 19.794 2.604 0 5.47-.303 8.623-.979 18.521-3.97 26.564-12.156 29.675-30.203 1.665-9.645 4.522-32.676 5.866-45.03 2.836.885 6.487 1.29 10.434 1.289 8.232 0 17.731-1.749 23.688-4.514 6.692-3.108 18.768-10.734 16.578-17.36zm-44.106-83.48c-.061 3.647-.563 6.958-1.095 10.414-.573 3.717-1.165 7.56-1.314 12.225-.147 4.54.42 9.26.968 13.825 1.108 9.22 2.245 18.712-2.156 28.078a36.508 36.508 0 0 1-1.95-4.009c-.547-1.326-1.735-3.456-3.38-6.404-6.399-11.476-21.384-38.35-13.713-49.316 2.285-3.264 8.084-6.62 22.64-4.813zm-17.644-61.787c21.334.471 38.21 8.452 50.158 23.72 9.164 11.711-.927 64.998-30.14 110.969a171.33 171.33 0 0 0-.886-1.117l-.37-.462c7.549-12.467 6.073-24.802 4.759-35.738-.54-4.488-1.05-8.727-.92-12.709.134-4.22.692-7.84 1.232-11.34.663-4.313 1.338-8.776 1.152-14.037.139-.552.195-1.204.122-1.978-.475-5.045-6.235-20.144-17.975-33.81-6.422-7.475-15.787-15.84-28.574-21.482 5.5-1.14 13.021-2.203 21.442-2.016zM66.674 175.778c-5.9 7.094-9.974 5.734-11.314 5.288-8.73-2.912-18.86-21.364-27.791-50.624-7.728-25.318-12.244-50.777-12.602-57.916-1.128-22.578 4.345-38.313 16.268-46.769 19.404-13.76 51.306-5.524 64.125-1.347-.184.182-.376.352-.558.537-21.036 21.244-20.537 57.54-20.485 59.759-.002.856.07 2.068.168 3.735.362 6.105 1.036 17.467-.764 30.334-1.672 11.957 2.014 23.66 10.111 32.109a36.275 36.275 0 0 0 2.617 2.468c-3.604 3.86-11.437 12.396-19.775 22.426zm22.479-29.993c-6.526-6.81-9.49-16.282-8.133-25.99 1.9-13.592 1.199-25.43.822-31.79-.053-.89-.1-1.67-.127-2.285 3.073-2.725 17.314-10.355 27.47-8.028 4.634 1.061 7.458 4.217 8.632 9.645 6.076 28.103.804 39.816-3.432 49.229-.873 1.939-1.698 3.772-2.402 5.668l-.546 1.466c-1.382 3.706-2.668 7.152-3.465 10.424-6.938-.02-13.687-2.984-18.819-8.34zm1.065 37.9c-2.026-.506-3.848-1.385-4.917-2.114.893-.42 2.482-.992 5.238-1.56 13.337-2.745 15.397-4.683 19.895-10.394 1.031-1.31 2.2-2.794 3.819-4.602l.002-.002c2.411-2.7 3.514-2.242 5.514-1.412 1.621.67 3.2 2.702 3.84 4.938.303 1.056.643 3.06-.47 4.62-9.396 13.156-23.088 12.987-32.921 10.526zm69.799 64.952c-16.316 3.496-22.093-4.829-25.9-14.346-2.457-6.144-3.665-33.85-2.808-64.447.011-.407-.047-.8-.159-1.17a15.444 15.444 0 0 0-.456-2.162c-1.274-4.452-4.379-8.176-8.104-9.72-1.48-.613-4.196-1.738-7.46-.903.696-2.868 1.903-6.107 3.212-9.614l.549-1.475c.618-1.663 1.394-3.386 2.214-5.21 4.433-9.848 10.504-23.337 3.915-53.81-2.468-11.414-10.71-16.988-23.204-15.693-7.49.775-14.343 3.797-17.761 5.53-.735.372-1.407.732-2.035 1.082.954-11.5 4.558-32.992 18.04-46.59 8.489-8.56 19.794-12.788 33.568-12.56 27.14.444 44.544 14.372 54.366 25.979 8.464 10.001 13.047 20.076 14.876 25.51-13.755-1.399-23.11 1.316-27.852 8.096-10.317 14.748 5.644 43.372 13.315 57.129 1.407 2.521 2.621 4.7 3.003 5.626 2.498 6.054 5.732 10.096 8.093 13.046.724.904 1.426 1.781 1.96 2.547-4.166 1.201-11.649 3.976-10.967 17.847-.55 6.96-4.461 39.546-6.448 51.059-2.623 15.21-8.22 20.875-23.957 24.25zm68.104-77.936c-4.26 1.977-11.389 3.46-18.161 3.779-7.48.35-11.288-.838-12.184-1.569-.42-8.644 2.797-9.547 6.202-10.503.535-.15 1.057-.297 1.561-.473.313.255.656.508 1.032.756 6.012 3.968 16.735 4.396 31.874 1.271l.166-.033c-2.042 1.909-5.536 4.471-10.49 6.772z'
fill='#FFF'
/>
</svg>
)
}
export function MySQLIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
xmlns='http://www.w3.org/2000/svg'
width='64'
height='64'
viewBox='0 0 25.6 25.6'
>
<path
d='M179.076 94.886c-3.568-.1-6.336.268-8.656 1.25-.668.27-1.74.27-1.828 1.116.357.355.4.936.713 1.428.535.893 1.473 2.096 2.32 2.72l2.855 2.053c1.74 1.07 3.703 1.695 5.398 2.766.982.625 1.963 1.428 2.945 2.098.5.357.803.938 1.428 1.16v-.135c-.312-.4-.402-.98-.713-1.428l-1.34-1.293c-1.293-1.74-2.9-3.258-4.64-4.506-1.428-.982-4.55-2.32-5.13-3.97l-.088-.1c.98-.1 2.14-.447 3.078-.715 1.518-.4 2.9-.312 4.46-.713l2.143-.625v-.4c-.803-.803-1.383-1.874-2.23-2.632-2.275-1.963-4.775-3.882-7.363-5.488-1.383-.892-3.168-1.473-4.64-2.23-.537-.268-1.428-.402-1.74-.848-.805-.98-1.25-2.275-1.83-3.436l-3.658-7.763c-.803-1.74-1.295-3.48-2.275-5.086-4.596-7.585-9.594-12.18-17.268-16.687-1.65-.937-3.613-1.34-5.7-1.83l-3.346-.18c-.715-.312-1.428-1.16-2.053-1.562-2.543-1.606-9.102-5.086-10.977-.5-1.205 2.9 1.785 5.755 2.8 7.228.76 1.026 1.74 2.186 2.277 3.346.3.758.4 1.562.713 2.365.713 1.963 1.383 4.15 2.32 5.98.5.937 1.025 1.92 1.65 2.767.357.5.982.714 1.115 1.517-.625.893-.668 2.23-1.025 3.347-1.607 5.042-.982 11.288 1.293 15 .715 1.115 2.4 3.57 4.686 2.632 2.008-.803 1.56-3.346 2.14-5.577.135-.535.045-.892.312-1.25v.1l1.83 3.703c1.383 2.186 3.793 4.462 5.8 5.98 1.07.803 1.918 2.187 3.256 2.677v-.135h-.088c-.268-.4-.67-.58-1.027-.892-.803-.803-1.695-1.785-2.32-2.677-1.873-2.498-3.523-5.265-4.996-8.12-.715-1.383-1.34-2.9-1.918-4.283-.27-.536-.27-1.34-.715-1.606-.67.98-1.65 1.83-2.143 3.034-.848 1.918-.936 4.283-1.248 6.737-.18.045-.1 0-.18.1-1.426-.356-1.918-1.83-2.453-3.078-1.338-3.168-1.562-8.254-.402-11.913.312-.937 1.652-3.882 1.117-4.774-.27-.848-1.16-1.338-1.652-2.008-.58-.848-1.203-1.918-1.605-2.855-1.07-2.5-1.605-5.265-2.766-7.764-.537-1.16-1.473-2.365-2.232-3.435-.848-1.205-1.783-2.053-2.453-3.48-.223-.5-.535-1.294-.178-1.83.088-.357.268-.5.623-.58.58-.5 2.232.134 2.812.4 1.65.67 3.033 1.294 4.416 2.23.625.446 1.295 1.294 2.098 1.518h.938c1.428.312 3.033.1 4.37.5 2.365.76 4.506 1.874 6.426 3.08 5.844 3.703 10.664 8.968 13.92 15.26.535 1.026.758 1.963 1.25 3.034.938 2.187 2.098 4.417 3.033 6.56.938 2.097 1.83 4.24 3.168 5.98.67.937 3.346 1.427 4.55 1.918.893.4 2.275.76 3.08 1.25 1.516.937 3.033 2.008 4.46 3.034.713.534 2.945 1.65 3.078 2.54zm-45.5-38.772a7.09 7.09 0 0 0-1.828.223v.1h.088c.357.714.982 1.205 1.428 1.83l1.027 2.142.088-.1c.625-.446.938-1.16.938-2.23-.268-.312-.312-.625-.535-.937-.268-.446-.848-.67-1.206-1.026z'
transform='matrix(.390229 0 0 .38781 -46.300037 -16.856717)'
fillRule='evenodd'
fill='#00678c'
/>
</svg>
)
}
export function OpenRouterIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg

View File

@@ -14,9 +14,9 @@ const Slider = React.forwardRef<
{...props}
>
<SliderPrimitive.Track className='relative h-2 w-full grow overflow-hidden rounded-full bg-secondary'>
<SliderPrimitive.Range className='absolute h-full bg-primary' />
<SliderPrimitive.Range className='absolute h-full bg-primary dark:bg-white' />
</SliderPrimitive.Track>
<SliderPrimitive.Thumb className='block h-5 w-5 rounded-full border-2 border-primary bg-background ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50' />
<SliderPrimitive.Thumb className='block h-5 w-5 rounded-full border-2 border-primary bg-background ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 dark:border-white dark:bg-black' />
</SliderPrimitive.Root>
))
Slider.displayName = SliderPrimitive.Root.displayName

View File

@@ -55,7 +55,6 @@ const BLOCK_COLORS = {
DEFAULT: '#2F55FF',
LOOP: '#2FB3FF',
PARALLEL: '#FEE12B',
WHILE: '#57D9A3',
} as const
const TAG_PREFIXES = {
@@ -295,7 +294,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const blocks = useWorkflowStore((state) => state.blocks)
const loops = useWorkflowStore((state) => state.loops)
const parallels = useWorkflowStore((state) => state.parallels)
const whiles = useWorkflowStore((state) => state.whiles)
const edges = useWorkflowStore((state) => state.edges)
const workflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
@@ -323,11 +321,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const blockConfig = getBlock(sourceBlock.type)
if (!blockConfig) {
if (
sourceBlock.type === 'loop' ||
sourceBlock.type === 'parallel' ||
sourceBlock.type === 'while'
) {
if (sourceBlock.type === 'loop' || sourceBlock.type === 'parallel') {
const mockConfig = {
outputs: {
results: 'array',
@@ -473,26 +467,13 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
}
const serializer = new Serializer()
const serializedWorkflow = serializer.serializeWorkflow(blocks, edges, loops, parallels, whiles)
const serializedWorkflow = serializer.serializeWorkflow(blocks, edges, loops, parallels)
const accessibleBlockIds = BlockPathCalculator.findAllPathNodes(
serializedWorkflow.connections,
blockId
)
// If editing a while block condition, also include children inside the while container
const sourceBlock = blocks[blockId]
if (sourceBlock && sourceBlock.type === 'while') {
const whileCfg = whiles[blockId]
if (whileCfg && Array.isArray(whileCfg.nodes)) {
whileCfg.nodes.forEach((childId: string) => {
if (!accessibleBlockIds.includes(childId)) {
accessibleBlockIds.push(childId)
}
})
}
}
const starterBlock = Object.values(blocks).find((block) => block.type === 'starter')
if (starterBlock && !accessibleBlockIds.includes(starterBlock.id)) {
accessibleBlockIds.push(starterBlock.id)
@@ -570,7 +551,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
}
let parallelBlockGroup: BlockTagGroup | null = null
let whileBlockGroup: BlockTagGroup | null = null
const containingParallel = Object.entries(parallels || {}).find(([_, parallel]) =>
parallel.nodes.includes(blockId)
)
@@ -599,27 +579,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
}
}
const containingWhile = Object.entries(whiles || {}).find(([_, w]) => w.nodes.includes(blockId))
let containingWhileBlockId: string | null = null
if (containingWhile) {
const [whileId] = containingWhile
containingWhileBlockId = whileId
const contextualTags: string[] = ['index']
const containingWhileBlock = blocks[whileId]
if (containingWhileBlock) {
const whileBlockName = containingWhileBlock.name || containingWhileBlock.type
whileBlockGroup = {
blockName: whileBlockName,
blockId: whileId,
blockType: 'while',
tags: contextualTags,
distance: 0,
}
}
}
const blockTagGroups: BlockTagGroup[] = []
const allBlockTags: string[] = []
@@ -630,16 +589,11 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
const blockConfig = getBlock(accessibleBlock.type)
if (!blockConfig) {
if (
accessibleBlock.type === 'loop' ||
accessibleBlock.type === 'parallel' ||
accessibleBlock.type === 'while'
) {
if (accessibleBlock.type === 'loop' || accessibleBlock.type === 'parallel') {
// Skip this block if it's the containing loop/parallel block - we'll handle it with contextual tags
if (
accessibleBlockId === containingLoopBlockId ||
accessibleBlockId === containingParallelBlockId ||
accessibleBlockId === containingWhileBlockId
accessibleBlockId === containingParallelBlockId
) {
continue
}
@@ -775,9 +729,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
if (parallelBlockGroup) {
finalBlockTagGroups.push(parallelBlockGroup)
}
if (whileBlockGroup) {
finalBlockTagGroups.push(whileBlockGroup)
}
blockTagGroups.sort((a, b) => a.distance - b.distance)
finalBlockTagGroups.push(...blockTagGroups)
@@ -789,16 +740,13 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
if (parallelBlockGroup) {
contextualTags.push(...parallelBlockGroup.tags)
}
if (whileBlockGroup) {
contextualTags.push(...whileBlockGroup.tags)
}
return {
tags: [...variableTags, ...contextualTags, ...allBlockTags],
variableInfoMap,
blockTagGroups: finalBlockTagGroups,
}
}, [blocks, edges, loops, parallels, whiles, blockId, activeSourceBlockId, workflowVariables])
}, [blocks, edges, loops, parallels, blockId, activeSourceBlockId, workflowVariables])
const filteredTags = useMemo(() => {
if (!searchTerm) return tags
@@ -858,11 +806,9 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
})
} else {
const path = tagParts.slice(1).join('.')
// Handle contextual tags for loop/parallel/while blocks (single words like 'index', 'currentItem')
// Handle contextual tags for loop/parallel blocks (single words like 'index', 'currentItem')
if (
(group.blockType === 'loop' ||
group.blockType === 'parallel' ||
group.blockType === 'while') &&
(group.blockType === 'loop' || group.blockType === 'parallel') &&
tagParts.length === 1
) {
directTags.push({
@@ -966,9 +912,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
}
} else if (
blockGroup &&
(blockGroup.blockType === 'loop' ||
blockGroup.blockType === 'parallel' ||
blockGroup.blockType === 'while')
(blockGroup.blockType === 'loop' || blockGroup.blockType === 'parallel')
) {
if (!tag.includes('.') && ['index', 'currentItem', 'items'].includes(tag)) {
processedTag = `${blockGroup.blockType}.${tag}`
@@ -1339,8 +1283,6 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
blockColor = BLOCK_COLORS.LOOP
} else if (group.blockType === 'parallel') {
blockColor = BLOCK_COLORS.PARALLEL
} else if (group.blockType === 'while') {
blockColor = BLOCK_COLORS.WHILE
}
return (
@@ -1363,9 +1305,7 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
let tagIcon = group.blockName.charAt(0).toUpperCase()
if (
(group.blockType === 'loop' ||
group.blockType === 'parallel' ||
group.blockType === 'while') &&
(group.blockType === 'loop' || group.blockType === 'parallel') &&
!nestedTag.key.includes('.')
) {
if (nestedTag.key === 'index') {

View File

@@ -391,7 +391,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
edges: mergedEdges,
loops: workflowState.loops || existing.loops || {},
parallels: workflowState.parallels || existing.parallels || {},
whiles: workflowState.whiles || existing.whiles || {},
lastSaved: workflowState.lastSaved || existing.lastSaved || Date.now(),
isDeployed: workflowState.isDeployed ?? existing.isDeployed ?? false,
deployedAt: workflowState.deployedAt || existing.deployedAt,
@@ -533,7 +532,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
edges: mergedEdges,
loops: workflowState.loops || existing.loops || {},
parallels: workflowState.parallels || existing.parallels || {},
whiles: workflowState.whiles || existing.whiles || {},
lastSaved: workflowState.lastSaved || existing.lastSaved || Date.now(),
isDeployed: workflowState.isDeployed ?? existing.isDeployed ?? false,
deployedAt: workflowState.deployedAt || existing.deployedAt,

Some files were not shown because too many files have changed in this diff Show More